From 1e2c278a7932429bf07bee82f6b596a959e7fc3e Mon Sep 17 00:00:00 2001 From: Maxine Levesque <170461181+maxinelevesque@users.noreply.github.com> Date: Wed, 28 Jan 2026 11:57:14 -0800 Subject: [PATCH 1/6] added boto3 dependency that wasn't explicit before --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index d1ca864..1487042 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,6 +10,7 @@ authors = [ requires-python = ">=3.12" dependencies = [ "atproto>=0.0.65", + "boto3>=1.41.5", "fastparquet>=2024.11.0", "libipld>=3.3.2", "msgpack>=1.1.2", From 47e928a935fd2ded1967c670307b4ab411c6ae7a Mon Sep 17 00:00:00 2001 From: Maxine Levesque <170461181+maxinelevesque@users.noreply.github.com> Date: Wed, 28 Jan 2026 12:51:24 -0800 Subject: [PATCH 2/6] fix(docs): update docstrings from Example: to Examples: format for proper code rendering Griffe's Google docstring parser treats Example: (singular) as an admonition, passing raw text through including :: markers. Using Examples: (plural) is recognized as a proper examples section with code parsing, enabling quartodoc to render code blocks with sourceCode/python/code-with-copy CSS classes. - Update all docstrings across 17 source files - Regenerate quartodoc .qmd files and Quarto HTML output - Update CLAUDE.md to document the correct docstring format Co-Authored-By: Claude Opus 4.5 --- .chainlink/issues.db | Bin 479232 -> 483328 bytes CHANGELOG.md | 3 + CLAUDE.md | 66 ++++---- docs/api/AbstractDataStore.html | 15 +- docs/api/AbstractIndex.html | 46 +++--- docs/api/AtUri.html | 21 ++- docs/api/AtmosphereClient.html | 15 +- docs/api/AtmosphereIndex.html | 27 ++- docs/api/BlobSource.html | 23 ++- docs/api/DataSource.html | 25 ++- docs/api/Dataset.html | 38 ++--- docs/api/DatasetDict.html | 21 ++- docs/api/DatasetLoader.html | 40 +++-- docs/api/DatasetPublisher.html | 31 ++-- docs/api/DictSample.html | 23 ++- docs/api/Lens.html | 98 ++++++----- docs/api/LensLoader.html | 21 ++- docs/api/LensPublisher.html | 41 +++-- docs/api/PDSBlobStore.html | 15 +- docs/api/Packable-protocol.html | 25 ++- docs/api/PackableSample.html | 23 ++- docs/api/S3Source.html | 55 +++---- docs/api/SampleBatch.html | 13 +- docs/api/SchemaLoader.html | 21 ++- docs/api/SchemaPublisher.html | 31 ++-- docs/api/URLSource.html | 13 +- docs/api/load_dataset.html | 39 +++-- docs/api/local.Index.html | 55 +++---- docs/api/packable.html | 23 ++- docs/api/promote_to_atmosphere.html | 15 +- docs/index.html | 12 +- docs/reference/architecture.html | 28 ++-- docs/reference/atmosphere.html | 44 ++--- docs/reference/datasets.html | 26 +-- docs/reference/lenses.html | 20 +-- docs/reference/load-dataset.html | 24 +-- docs/reference/local-storage.html | 22 +-- docs/reference/packable-samples.html | 24 +-- docs/reference/promotion.html | 14 +- docs/reference/protocols.html | 24 +-- docs/reference/uri-spec.html | 4 +- docs/search.json | 220 ++++++++++++------------- docs/sitemap.xml | 94 +++++------ docs/tutorials/atmosphere.html | 28 ++-- docs/tutorials/local-workflow.html | 16 +- docs/tutorials/promotion.html | 22 +-- docs/tutorials/quickstart.html | 12 +- docs_src/api/AbstractDataStore.qmd | 14 +- docs_src/api/AbstractIndex.qmd | 48 +++--- docs_src/api/AtUri.qmd | 20 +-- docs_src/api/AtmosphereClient.qmd | 14 +- docs_src/api/AtmosphereIndex.qmd | 28 ++-- docs_src/api/BlobSource.qmd | 24 +-- docs_src/api/DataSource.qmd | 26 +-- docs_src/api/Dataset.qmd | 38 ++--- docs_src/api/DatasetDict.qmd | 20 +-- docs_src/api/DatasetLoader.qmd | 40 ++--- docs_src/api/DatasetPublisher.qmd | 32 ++-- docs_src/api/DictSample.qmd | 24 +-- docs_src/api/Lens.qmd | 102 ++++++------ docs_src/api/LensLoader.qmd | 20 +-- docs_src/api/LensPublisher.qmd | 42 ++--- docs_src/api/PDSBlobStore.qmd | 14 +- docs_src/api/Packable-protocol.qmd | 26 +-- docs_src/api/PackableSample.qmd | 24 +-- docs_src/api/S3Source.qmd | 56 +++---- docs_src/api/SampleBatch.qmd | 12 +- docs_src/api/SchemaLoader.qmd | 20 +-- docs_src/api/SchemaPublisher.qmd | 32 ++-- docs_src/api/URLSource.qmd | 12 +- docs_src/api/load_dataset.qmd | 38 ++--- docs_src/api/local.Index.qmd | 58 +++---- docs_src/api/packable.qmd | 26 +-- docs_src/api/promote_to_atmosphere.qmd | 14 +- src/atdata/_cid.py | 56 +++---- src/atdata/_hf_api.py | 86 +++++----- src/atdata/_protocols.py | 126 +++++++------- src/atdata/_schema_codec.py | 62 +++---- src/atdata/_sources.py | 110 ++++++------- src/atdata/_stub_manager.py | 40 ++--- src/atdata/atmosphere/__init__.py | 42 +++-- src/atdata/atmosphere/_types.py | 18 +- src/atdata/atmosphere/client.py | 12 +- src/atdata/atmosphere/lens.py | 56 +++---- src/atdata/atmosphere/records.py | 64 ++++--- src/atdata/atmosphere/schema.py | 46 +++--- src/atdata/atmosphere/store.py | 34 ++-- src/atdata/dataset.py | 138 +++++++--------- src/atdata/lens.py | 92 +++++------ src/atdata/local.py | 72 ++++---- src/atdata/promote.py | 40 ++--- 91 files changed, 1636 insertions(+), 1798 deletions(-) diff --git a/.chainlink/issues.db b/.chainlink/issues.db index ebc4cd7ad0dd1b12a3d16902641f61e3024a884d..e7d74f7273ba4d09de48d496200617a5a425e0fe 100644 GIT binary patch delta 10640 zcmbVSdwdgBzMsjQnPeuf+BV3ek3k+SkETib4$sn-hfrRnR$vj*G=)f;gd}aPAmJ1Q z1y+IH>v3ItBMPF+v*9i7b$8u$mu0!TdiSb;Uf0#D>!Q2va&guBo0+8T)JgAt?jLA= zJ@ftje&_ppoZp;e(^J`-E@bZ-o}Sl0QB-^4A5Br2_T3X*y1a%Xt0ouHrQMGg=FnTa z&lZj!Udm+C5nm+Y4+bL0$TGup{z#VLK}3JM|W3d9orxJ z!pyzFZ{bH863|LQiH&t|rwujBgZG|wQGMPDBqDSk|+=^34l?xwfO|KCmDA2-*~ ze;?0uvreK9(Z53MG`1rC+Yr4(r(>M}VRV^^N4r^u($Rb5|L)#a(?-KTUA(4^d39N# zm(n#_7hCq5-!_k+3<^RIZGCZaKP@l!EW)HFk)W95Jx<`t3GtO!k zs5&Zp7+qUiODemWTgjCxjET&DhZ#kTSDDOPT;)q#4!5J!T~c9pIb9`XWaJ$vgNQ2; zOJ-kX()6x2fr?St#CnC9N}hQeaDgkSar>_@Hhq3npw+~7wr@+TE0$ztl8mX1IFq@2YqZp?Ine1~Tdn!}kPDVWW){RU&k3}Q3 z9#6?8nUm1JB(aAkA?Ki2VKQK|aITT+#J+5jI~hIS51UQ?I2ol3L+%u0Gt9y{0yU*G zn@pa9>XOihrlM?ecuHz0&}typryw)miL;GV1Fp;_+*Gusp8{w4ccSkkznO~i1U=3& zQJBRA*+e%D?e2%kCda2~8a+G}jWCs!78jRPOmIL(IpUWcsD|&Ej_9;PdOwv(-JnoC zb;H_VdBOZQ^J3E;v5O)@6iK})cMEUHJhlAkR`8QOT;BXualFcB9C)H;BV zs6%B*=hN;4=i6Xe1NnnGlyAt#w<)#r^Ey(O=|JFrXf zTc}5qk}71wYw9%(L%|Or&(x!ILlJf=#eTLPO;tg&p($wi4A49QmtqGn%)nm5ccAGi z-0xE2Y{b$4G>yT90_6qhxCZ1^Or= zY=RV-v>cfryT&O6wWwS_ocNccEb`7sXcIe@qXG@<@=5T{a%AHt<9y)D3Qpf#j%M~V znoGRjOSRt&ve$=%jPl~5Ql%qc5$6fiO3<=*8?q~=Klxs2)00<#X*(V(r?ZXhzL(7) ztt(Jo(*C0>G)a@){uHp>1Uv?)Rw{q`QH03YyP#{z<$uRrn&mHJe{dJF31jeRxe_yI z@_CfCkzWBcY523dQlA%aa%T3dLbO;0lf+PJ3uUda+-Lr|`BszHc+kiRb=(2=A8ajp zmbt;)uG>vtqf4kQ{AGTg;gVsI{x|x?a8S+TaRHtsP!XW&@MdT>85oUB8h;O?9C8On znOfc3vl(TQT^JH356={+)ky_+Z$cwT=}5*xKE_b(&uxn{ z71+qfT_~L|!PSs5bkc#)7L>2r!Rak2HDr>%ZUGm@<0^q_O)40BKgv-lXuf|C1y9`% z;h)iufFENtq95FqlDvv8Fg+Gm3Y1SV?d%83BJ>tCBn8$+rfz{IIss2tiVPw*l#JM{ zayyGG-K?zuZ$^2Bad?{2{^h7&zP~T!AnIAfx>XwsQMZwaThT2RJO)pdHP|R=;#*eE zjv+fAMuL*`2OdVbDV)zF7avCH@!#)6n{>pp6?GX3@f3l&4>51=6$ zJxVtE_5;AX1G|;O>&plF4lnR`7+LsWYUrPTgjidV(@|XPNHi%su24EHG|6G)!h>j3 z(ho`5BimE!nzCJXqg-*LVfz3#j;4eT9p)&r7nQim9nORk<+vhS6P#L;xiu;MR-twZwY+h#C zXS`vYE$raG(68ewblJM!F%NQC?A_>?VNcwC7;U6=Xiog)Bj|@ZWEer+O=^#$W&s!B zm5D&s9q$X|Bgc_LEC1-D(6v524hdk#cgg4KF!JeE2>R#~C@Z;(++#31c~5AbHkKY} zYLA})`p4iE$~+N20h1n_ZWfYv3_7ULlM0x}<9s|{nblUlg31!5@``q;q`d;JUVH_m zlXY=)5J8XcAh{<|gvX=sDy83~9YHQdN&OLRC9vP&WYZC35^xD#3PiCGu`wr6Wpb}~ zpVTI5=SeiS5ZiErKvlt8oepZevZ+a|an*>8Ex|@H9Bd0TMZ^A(81+S>iMN~Ury=~$ zozm`>Z=6DTdVKtBJT?VeQ1}z6lqMY4+Ch zXCMf3&!99ILY4@WA8dKgprYi4WgkzT(YAKw4EX56b>O2kWLnOm=?cG|v)cT=cnZ4I zOJ`x?Da4DFVe8eieZ$tbXSIElW7}{J`u}jeNT6!LR`?tmqwt|W2Yj45r|lsihm-#~ z2VHM0UZ|WLmh-6hMQ;{yg(C=H0(o?N>QC-rsA>eY{ z0pv>aU1KFXQz7?EC98Q@q*bw*kIY~*puxk{fHb??yf=L}+S-5kJzS#dz1Dgjh>JDZz~_tSHwY4>(*gRdTjF7%Rw9f@0&e0%+Nwm08oY}(vG zunR%E;Wpf&FuQhcAG3fh&>~ef`Bw=x7Oz$2GV46HMA>7*JncQopwEV~oUPpHC@*#; zPPb9`Zlwjp=dryl01RgmT`fB-rS-~MHiIw2{zR`@QOi~(+1gjjYMv~z12-jH5ncm0 zX&xOipY0tjmd+mp_oMl++u?Y%ax?Xd`Rw?lIhkPNfCh4*BYccC+x* zuz65bUd6RtHGRb^!``o9HG2#kS~ikBlTC*NXjF2pY-V51$=K~P2f_YC!aA`>nXazR zWP9J&0jqgv$a6u*EGUGRrn84g#tio9o`tMI%t3|B2HiR9O3NwB7;~rT3FG_5p+XaX z&hVw7TE8zoHpJR=h<%LeBGba`Mgi`C?gd`Wz|u$eu{BASoEx>x%B|qRjS#>>+^O`B zeH+={cXfc}JQCC1s0?+-0P4c`DC2W|jO`ttB`7&Q81`a}O(%bdvL|4)&; zlZi}jXJ3>)S;!~t>?s~+b#Vf<6n6A%8#_U%!uQ&=b4%`}W81;0f-Y8ZsPAu%!frTA8&*)tM?LvC0p0C!<2v@U7s@GHuC2u!TdMtS(7~t-zU$D8PYpp z6^w{cvBe+oMZ{pU=nXdcM1MdG`$ECQyjZ$HJ0HNMO!D9cNTb{E2Ib(`x1sOg_|L&$ z1@{3g8?TqYjUcn{WAn-W%P?hDY=m=s&wbkL%f)o9lGU?irRAk1@tZN$u#3y0zHeB2 zD>0;TPo)_@=XJ)njTeot8jl$M)wquwPUCLBjenp2E&rNf8{fmfz(2zMCtnTJ`=2Pa1^$Y6<);F!MTTe2V ztOu=otq)l@SwqZ^t&P?t)|u9FYd&+_I^1fqd}Fz8xyJ0bT(-Ps>9M?Ed6aq7vfZ-5 z60oeYEM&Gb8!YJ-1YaL~Wd08(VE%>qtodd0Q|4XFDsz`PYHl{)X`an2HoMIe%{g{+ znwes%O<$NkFuiGd-E@*Eg|T?A=^@i5Q;5koHJX-~X7cTpa?3=Eh#ra`n$8I{r@P6d zGhDowMzQCO8T=(`dB6X zS5k_}UqZf{6lLl6D(SLH`l(8~pps6jq~j{-WtH?&??wqz&0V8+$3Lj%wj-TsdOS3T zyQGU*swvLf(jP-}Dv4D|sxpA@F4Z!vdY0-hQQxYhZxT{W`EK?fYDLsns)ui?r2kP# zH&oJ>1N8hwwfcoh`dpG?$!0)ZS1o=vK+&I6tDmZ*PgK%>tE7(x==q~+^+U6_|2Ncs zP%VC-lHMPn=snfyHI;N#C0*%u{C#u`DqKnOJ(N1@-Ws6zO%?ceD(SZ>>EBh-ZwBbO zq*{&rS|wgoNk3CbKN+CtHP!0BsiYsPq*qnaEB*Dv;0FNbRmxUMWc2b6q^b6T-sDze& zyO2xAwo;o}j?(RzQcLe%91KQ9ucs{nLq(G>5cN0veNApbaN5P{pf>_nQ-ReHQCePA zbC0Jr)Z%lm67wVQ*R~cf~*~QAfr@X;H6xu%oLB(c4mA0~5z#M= zXY!6)It5|Lnm(=~5?P^^HXvi1*c=SEdZJ==Y(Y((*bxk`1*PGz&l_#&bPELuiWFjG zonW^&1abQ9TI*W+)72WmsIbYE`H+x&{K#>2x_NaywGh4j1 zbaz!ys)H!hNn+nk12^6r2EP-oxy1>h+ntPlUqmAAfvI5^@9;sL$OTv+RsxqLPWfIq zBF3bohM+-0w6#K>d*spx`x@K)ElsfFHZO!G(%c4{mV-NcN$orl*bM=}fnW!)1$ChW zG!kqJdwo(q6e@u&lwK(cq;M2?gP~4{gj7KAvJpt85;O=YeG=td(7WrZd6o6mHTG60 zHmQ0P*Pz0DV!+pt*s(eWMJ~6)X)kqDRJamf-+ng?4J`9t;%1h+%EPr%%TO+h3={If?C1Sh7UIh34(BV8Gs3}jhE$jpG zT0Q52p~a{}#9=Kj@Eh4+qLg#cAmunLC-SzlYmwpV2 t?U2H2lfp~=zrvejOr2`~XMB_K*C!GnzP(KUmQ86*Pw09?y~mss{ujRQx26C9 delta 5801 zcma)AX>=6Vm9FZ3ud2Gbs#iga8-K}cpt)!hNk`5<02R{)6VIzJT@FNO>>*)(4{FcE_F3caDAr5N$Sw^zh(snIl zSi&G{lGw1Mp<(&jH4Vg0t!~TmhK7wx8pOjFy(C{5YaB-b!umF?ep_3rC%NPF&Zm0E z@g2+7DUZz`mN*`F7?1BcoXeKuX`vnQi&m{|Sh83+cS$Tw6-R}prHZFS+bB6MEp1oI z^P@uF4iIfg))uOV7rz(;OSpmb%nXtszRmUh85k`VeojMH6Gf#tqCx$j!5vQZmScr>xf}eF4?sqrKP1%&}NN`S+HiYr})_gtLs-S zX^6s-p!1HkFC3nSY98N-szJ&n58j{(D^bnOyVlGeHOUZ%CYZV;C^OB1yH+Dib>6c| zWy#%wa#wDVX47Kpca{fYx6o~Tt$(Knv{<{U#-Y3Ip{rS>*ru|zEY&X5u?#od@scL4 zV3v=ho3dp%^jB1H-iOYh0!PnLv&qb37q+r7Zuq5-9O1A!KMA`ZfnE3i0}PLvErF%j zh0Sb<8@m1E42LxZdckD)BtX<2ZQ-L`DQ3y4FlMtGo&?BDj+&gwQDOfjDbtr5%+7=t z?qU=%AQGI(B)V-AOR@`skd7&3%ZB{Rc1aD*!tp~@Co%uwJ$sj9)zT#=YddpE^oHGQyOl!A4#zbth-M3{b zTjY0SkF<<-P|@nNED}$Hw~V~2dc7=*W!nWNgYTpi3r7!<46ggbgTxO%v=SMH93p+- z>sI2>19_P&mj#hM78B#bJmQA;%E>BtFazC)n=U&|%ApKrnX0fEIliAkMsaClXOaw4 zngl6RP~_2>ctt4E=s9Z2FcwgSb%<)8NwPWW@NAL-Y6Wq@mu!ECE9$X z5^WyHGE`v^vR$tv-X69Ta8{vf52q7N3+86$W(J0Py?I_Q+?!7N#`gXX%V6oMuoVR? ztsz!p`+7YwRG%*^s^*LJq@U}@GaJE1 zsMwg@@SAFq+Y215LGZE~R5HH?of*!CtHLH^+gC&K;EfR8!f$IZ?EH3X(Q$sUwZtW5 zuwiI7Y&#<*g0TqiysnnSanajqF`nVRlfD=;;&-+9D_&I}_)RT|kp{D&s<0Ny%SeAv z=8zf8x;aGXpp`K&s zd?I>gNOOgFVX|QFvJ2`o+i~TQ5|rDd|8-Q;iUK;IlD;;#t8Z%7SAyPJOB` z*RE*9UZbA1J+alu>y-YMB+GwV_tAFpcTytCq4e`)hiH*zh1xHWV-~`EtPV;qk)@Dw z8Ee3C5hrxjWxOr1y%Q7mUpg_A zNI9{0&E?}RtrMGIn0HGj^4{Bp)AK?n1_gON4y9X6#3;CgShuCLRDhQ{NFiBUzhn*k zz5{VjI!L_13|4EVWQ65-w%iXseNA$Ly@SMX=2T?1`m!saK15z8EWoNE4MUJ(A7fvt`ls=xiytfvVrSj*|Xjvi;Fyi)ZDkP>O>3-5_b)k&+u^7>v1zb^Xc> z{0iPg08)C)AXXOcZ{`)17A#&2byqRbzZ*_n+)|W}E80zr0HVWvPJ>4`i2@}ziH^N_ zI`7T%ZjwlE4jXjIly9n&yg7bRMTy{l>bq@@;>Wq;jHD7#VJaBI%?`H-q|Ak7J-_Pr##29E3!Q~%S$QFbqNI?TV|5+R&h>>f zj|buHLYnwu5sy`>GJw_dp_N)hBafW5MHtOjO^y>qDB>p!ACt|!;JqT6!$r7@c^nB; z7o%z6pw||oK{{Kb3I)j9SWF`WJbwamyirWu@ZouEbbl&F58gSCjSeZ7lEPN=7`0D8 zeW??uI})e~XhRs#HX#Z?%B6VND(*4@BH8`GM$wu0#@=NUNMmd zx!gk&X?nzvcua%OCQ>b8P2{LcscZ$d2IzCjyxmDOG9|MoMU@q|E3o<(YYTax^nPpNtOXPC?baDb&+hMio!8W2$jX|3n|B9n);~ zI(3ijeH&3`$mgU#NICRqXmCA^wGctDyawae(;cDKQ*@{W9&DhO2uoU$iHny_2V|tL6EQoO%2!DCkydvs<%)(#G3^d zJ!(pNvxTqltGCcmCTbvjumz+3Mk4~`tr%vkP=`r5)uLt*V>l3BuJHY3+*TUk5ZD@l zsF`^_Z+alhn~8&LBesq^_=_#8SwNuqeD-K79dAMgz>IBXPMTV7Ex~T%&CK)$;h!53 zVxD8e=ZI&sSQEmF5&r5nx`4w+ZjZv3ZI2p>loO}Q4&FcUIU@;P*-jH+Jf4u&jwjmfo!!unApiLLZ_Hw-5Ys0|pvfZyyz3 zHf%wUb@ce1^|Y90U>M?Uh(cC2pxa>xQcfI!n|YJj-9RIcR|rUf`we(AsKW*xQ#{_j z2K}+8D_-_2-+6O3&@nw4{3dU_Dc94vlUfc-4~2uuNduG^>4ajl*nelgZGQ`{#7L7B z`$PK$`)T{J4$G4YF*MsL#Y$}}pJv0!cxh`}(%;^fuL+9z{1$rbmR=Xzj(wYIvHuxv z^p}S9Q>|{1KiaLu9mUScHKD)vm#l-D$VE}KeiP&P%JI8ZO0YIS;q;-5yx|m zZH{`!Vn>x@sw3Z#=@{Zjbi_D_@zi)^+%x{ec-OdQoHtGxFBz@IPGf_y%$Q@8*&2-^ zBgYtFc#OV=g74%W>-X`2?S1{0eo22-e_21Qx9HF6tMvJLg+56iJwo^DgYqCKZ=)9SUwT9r0c%MZ2ZOR5;UQ6P;L@x7=b!Z+h3JJ2HO z6Y)Ukev!1FSOg(dUn1SHbd>$&mbJ4?A|CNwp!L$vpgu;N27j%W{*~@htJR{=@paPQ zEaF($e^kl~b#IXT$23BMg7Q!>> result = my_function() - >>> print(result) - 'output' + Examples: + >>> result = my_function() + >>> print(result) + 'output' """ ``` **Key formatting rules:** -1. `Example:` with a colon, 4-space indented from the docstring margin -2. `::` on its own line, 8-space indented (4 more than `Example:`) -3. Blank line after `::` -4. Code examples indented 12 spaces (4 more than `::`) -5. Use `>>>` for Python prompts and `...` for continuation lines +1. Use `Examples:` (plural, not `Example:` singular) +2. Code examples are indented 8 spaces (4 more than `Examples:`) +3. Use `>>>` for Python prompts and `...` for continuation lines +4. No `::` marker needed - griffe handles the parsing automatically -**Incorrect format (will not render properly):** +**Incorrect format (will not render with syntax highlighting):** ```python - Example: - >>> code_here() # Wrong - missing :: and extra indentation + Example: # Wrong - singular form is treated as an admonition + :: # Wrong - reST literal block marker not needed + >>> code_here() ``` **Correct format:** ```python - Example: - :: - - >>> code_here() # Correct - has :: and proper indentation + Examples: + >>> code_here() # Correct - plural form, proper indentation ``` ### Multiple Examples -For multiple examples, use the same pattern: +For multiple examples, continue in the same section: ```python - Example: - :: - - >>> # First example - >>> x = create_thing() + Examples: + >>> # First example + >>> x = create_thing() - >>> # Second example - >>> y = other_thing() + >>> # Second example + >>> y = other_thing() ``` ### Class and Method Docstrings @@ -281,20 +275,16 @@ Apply the same format to class docstrings and method docstrings: class MyClass: """Class description. - Example: - :: - - >>> obj = MyClass() - >>> obj.do_something() + Examples: + >>> obj = MyClass() + >>> obj.do_something() """ def method(self): """Method description. - Example: - :: - - >>> self.method() + Examples: + >>> self.method() """ ``` diff --git a/docs/api/AbstractDataStore.html b/docs/api/AbstractDataStore.html index c7a021f..b4b9393 100644 --- a/docs/api/AbstractDataStore.html +++ b/docs/api/AbstractDataStore.html @@ -402,7 +402,7 @@

On this page

  • AbstractDataStore
      -
    • Example
    • +
    • Examples
    • Methods
      • read_url
      • @@ -426,13 +426,12 @@

        AbstractDataStore

        Protocol for data storage operations.

        This protocol abstracts over different storage backends for dataset data: - S3DataStore: S3-compatible object storage - PDSBlobStore: ATProto PDS blob storage (future)

        The separation of index (metadata) from data store (actual files) allows flexible deployment: local index with S3 storage, atmosphere index with S3 storage, or atmosphere index with PDS blobs.

        -
        -

        Example

        -

        ::

        -
        >>> store = S3DataStore(credentials, bucket="my-bucket")
        ->>> urls = store.write_shards(dataset, prefix="training/v1")
        ->>> print(urls)
        -['s3://my-bucket/training/v1/shard-000000.tar', ...]
        +
        +

        Examples

        +
        >>> store = S3DataStore(credentials, bucket="my-bucket")
        +>>> urls = store.write_shards(dataset, prefix="training/v1")
        +>>> print(urls)
        +['s3://my-bucket/training/v1/shard-000000.tar', ...]

        Methods

        diff --git a/docs/api/AbstractIndex.html b/docs/api/AbstractIndex.html index 23f75de..fa79527 100644 --- a/docs/api/AbstractIndex.html +++ b/docs/api/AbstractIndex.html @@ -403,7 +403,7 @@

        On this page

      • AbstractIndex
        • Optional Extensions
        • -
        • Example
        • +
        • Examples
        • Attributes
        • Methods
            @@ -436,21 +436,20 @@

            AbstractIndex

            Optional Extensions

            Some index implementations support additional features: - data_store: An AbstractDataStore for reading/writing dataset shards. If present, load_dataset will use it for S3 credential resolution.

      • -
        -

        Example

        -

        ::

        -
        >>> def publish_and_list(index: AbstractIndex) -> None:
        -...     # Publish schemas for different types
        -...     schema1 = index.publish_schema(ImageSample, version="1.0.0")
        -...     schema2 = index.publish_schema(TextSample, version="1.0.0")
        -...
        -...     # Insert datasets of different types
        -...     index.insert_dataset(image_ds, name="images")
        -...     index.insert_dataset(text_ds, name="texts")
        -...
        -...     # List all datasets (mixed types)
        -...     for entry in index.list_datasets():
        -...         print(f"{entry.name} -> {entry.schema_ref}")
        +
        +

        Examples

        +
        >>> def publish_and_list(index: AbstractIndex) -> None:
        +...     # Publish schemas for different types
        +...     schema1 = index.publish_schema(ImageSample, version="1.0.0")
        +...     schema2 = index.publish_schema(TextSample, version="1.0.0")
        +...
        +...     # Insert datasets of different types
        +...     index.insert_dataset(image_ds, name="images")
        +...     index.insert_dataset(text_ds, name="texts")
        +...
        +...     # List all datasets (mixed types)
        +...     for entry in index.list_datasets():
        +...         print(f"{entry.name} -> {entry.schema_ref}")

        Attributes

        @@ -596,14 +595,13 @@

        Rais

        -
        -

        Example

        -

        ::

        -
        >>> entry = index.get_dataset("my-dataset")
        ->>> SampleType = index.decode_schema(entry.schema_ref)
        ->>> ds = Dataset[SampleType](entry.data_urls[0])
        ->>> for sample in ds.ordered():
        -...     print(sample)  # sample is instance of SampleType
        +
        +

        Examples

        +
        >>> entry = index.get_dataset("my-dataset")
        +>>> SampleType = index.decode_schema(entry.schema_ref)
        +>>> ds = Dataset[SampleType](entry.data_urls[0])
        +>>> for sample in ds.ordered():
        +...     print(sample)  # sample is instance of SampleType
        diff --git a/docs/api/AtUri.html b/docs/api/AtUri.html index 30d6746..ffeb0bf 100644 --- a/docs/api/AtUri.html +++ b/docs/api/AtUri.html @@ -402,7 +402,7 @@

        On this page

        • AtUri
            -
          • Example
          • +
          • Examples
          • Attributes
          • Methods
              @@ -424,16 +424,15 @@

              AtUri

              atmosphere.AtUri(authority, collection, rkey)

              Parsed AT Protocol URI.

              AT URIs follow the format: at:////

              -
              -

              Example

              -

              ::

              -
              >>> uri = AtUri.parse("at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz")
              ->>> uri.authority
              -'did:plc:abc123'
              ->>> uri.collection
              -'ac.foundation.dataset.sampleSchema'
              ->>> uri.rkey
              -'xyz'
              +
              +

              Examples

              +
              >>> uri = AtUri.parse("at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz")
              +>>> uri.authority
              +'did:plc:abc123'
              +>>> uri.collection
              +'ac.foundation.dataset.sampleSchema'
              +>>> uri.rkey
              +'xyz'

              Attributes

              diff --git a/docs/api/AtmosphereClient.html b/docs/api/AtmosphereClient.html index 022bf6e..069dde8 100644 --- a/docs/api/AtmosphereClient.html +++ b/docs/api/AtmosphereClient.html @@ -402,7 +402,7 @@

              On this page

              • AtmosphereClient
                  -
                • Example
                • +
                • Examples
                • Note
                • Attributes
                • Methods @@ -438,13 +438,12 @@

                  AtmosphereClient

                  atmosphere.AtmosphereClient(base_url=None, *, _client=None)

                  ATProto client wrapper for atdata operations.

                  This class wraps the atproto SDK client and provides higher-level methods for working with atdata records (schemas, datasets, lenses).

                  -
                  -

                  Example

                  -

                  ::

                  -
                  >>> client = AtmosphereClient()
                  ->>> client.login("alice.bsky.social", "app-password")
                  ->>> print(client.did)
                  -'did:plc:...'
                  +
                  +

                  Examples

                  +
                  >>> client = AtmosphereClient()
                  +>>> client.login("alice.bsky.social", "app-password")
                  +>>> print(client.did)
                  +'did:plc:...'

                  Note

                  diff --git a/docs/api/AtmosphereIndex.html b/docs/api/AtmosphereIndex.html index 4657505..ddce3ed 100644 --- a/docs/api/AtmosphereIndex.html +++ b/docs/api/AtmosphereIndex.html @@ -402,7 +402,7 @@

                  On this page

                  • AtmosphereIndex
                      -
                    • Example
                    • +
                    • Examples
                    • Attributes
                    • Methods
                        @@ -431,19 +431,18 @@

                        AtmosphereIndex

                        ATProto index implementing AbstractIndex protocol.

                        Wraps SchemaPublisher/Loader and DatasetPublisher/Loader to provide a unified interface compatible with LocalIndex.

                        Optionally accepts a PDSBlobStore for writing dataset shards as ATProto blobs, enabling fully decentralized dataset storage.

                        -
                        -

                        Example

                        -

                        ::

                        -
                        >>> client = AtmosphereClient()
                        ->>> client.login("handle.bsky.social", "app-password")
                        ->>>
                        ->>> # Without blob storage (external URLs only)
                        ->>> index = AtmosphereIndex(client)
                        ->>>
                        ->>> # With PDS blob storage
                        ->>> store = PDSBlobStore(client)
                        ->>> index = AtmosphereIndex(client, data_store=store)
                        ->>> entry = index.insert_dataset(dataset, name="my-data")
                        +
                        +

                        Examples

                        +
                        >>> client = AtmosphereClient()
                        +>>> client.login("handle.bsky.social", "app-password")
                        +>>>
                        +>>> # Without blob storage (external URLs only)
                        +>>> index = AtmosphereIndex(client)
                        +>>>
                        +>>> # With PDS blob storage
                        +>>> store = PDSBlobStore(client)
                        +>>> index = AtmosphereIndex(client, data_store=store)
                        +>>> entry = index.insert_dataset(dataset, name="my-data")

                        Attributes

                        diff --git a/docs/api/BlobSource.html b/docs/api/BlobSource.html index 39bb233..5b15b69 100644 --- a/docs/api/BlobSource.html +++ b/docs/api/BlobSource.html @@ -403,7 +403,7 @@

                        On this page

                      • BlobSource
                        • Attributes
                        • -
                        • Example
                        • +
                        • Examples
                        • Methods
                          • from_refs
                          • @@ -451,17 +451,16 @@

                            -

                            Example

                            -

                            ::

                            -
                            >>> source = BlobSource(
                            -...     blob_refs=[
                            -...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                            -...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                            -...     ],
                            -... )
                            ->>> for shard_id, stream in source.shards:
                            -...     process(stream)
                            +
                            +

                            Examples

                            +
                            >>> source = BlobSource(
                            +...     blob_refs=[
                            +...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                            +...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                            +...     ],
                            +... )
                            +>>> for shard_id, stream in source.shards:
                            +...     process(stream)

                            Methods

                            diff --git a/docs/api/DataSource.html b/docs/api/DataSource.html index 8873daf..8d17596 100644 --- a/docs/api/DataSource.html +++ b/docs/api/DataSource.html @@ -402,7 +402,7 @@

                            On this page

                            • DataSource
                                -
                              • Example
                              • +
                              • Examples
                              • Attributes
                              • Methods
                                  @@ -426,18 +426,17 @@

                                  DataSource

                                  Protocol for data sources that provide streams to Dataset.

                                  A DataSource abstracts over different ways of accessing dataset shards: - URLSource: Standard WebDataset-compatible URLs (http, https, pipe, gs, etc.) - S3Source: S3-compatible storage with explicit credentials - BlobSource: ATProto blob references (future)

                                  The key method is shards(), which yields (identifier, stream) pairs. These are fed directly to WebDataset’s tar_file_expander, bypassing URL resolution entirely. This enables: - Private S3 repos with credentials - Custom endpoints (Cloudflare R2, MinIO) - ATProto blob streaming - Any other source that can provide file-like objects

                                  -
                                  -

                                  Example

                                  -

                                  ::

                                  -
                                  >>> source = S3Source(
                                  -...     bucket="my-bucket",
                                  -...     keys=["data-000.tar", "data-001.tar"],
                                  -...     endpoint="https://r2.example.com",
                                  -...     credentials=creds,
                                  -... )
                                  ->>> ds = Dataset[MySample](source)
                                  ->>> for sample in ds.ordered():
                                  -...     print(sample)
                                  +
                                  +

                                  Examples

                                  +
                                  >>> source = S3Source(
                                  +...     bucket="my-bucket",
                                  +...     keys=["data-000.tar", "data-001.tar"],
                                  +...     endpoint="https://r2.example.com",
                                  +...     credentials=creds,
                                  +... )
                                  +>>> ds = Dataset[MySample](source)
                                  +>>> for sample in ds.ordered():
                                  +...     print(sample)

                                  Attributes

                                  diff --git a/docs/api/Dataset.html b/docs/api/Dataset.html index 1315d8b..4c6d079 100644 --- a/docs/api/Dataset.html +++ b/docs/api/Dataset.html @@ -404,7 +404,7 @@

                                  On this page

                                  • Parameters
                                  • Attributes
                                  • -
                                  • Example
                                  • +
                                  • Examples
                                  • Note
                                  • Methods
                                      @@ -479,16 +479,15 @@

                                      -

                                      Example

                                      -

                                      ::

                                      -
                                      >>> ds = Dataset[MyData]("path/to/data-{000000..000009}.tar")
                                      ->>> for sample in ds.ordered(batch_size=32):
                                      -...     # sample is SampleBatch[MyData] with batch_size samples
                                      -...     embeddings = sample.embeddings  # shape: (32, ...)
                                      -...
                                      ->>> # Transform to a different view
                                      ->>> ds_view = ds.as_type(MyDataView)
                                      +
                                      +

                                      Examples

                                      +
                                      >>> ds = Dataset[MyData]("path/to/data-{000000..000009}.tar")
                                      +>>> for sample in ds.ordered(batch_size=32):
                                      +...     # sample is SampleBatch[MyData] with batch_size samples
                                      +...     embeddings = sample.embeddings  # shape: (32, ...)
                                      +...
                                      +>>> # Transform to a different view
                                      +>>> ds_view = ds.as_type(MyDataView)

                                      Note

                                      @@ -817,15 +816,14 @@

                                      Wa ds.to_parquet("output.parquet", maxcount=10000)

                                      This creates multiple parquet files: output-000000.parquet, output-000001.parquet, etc.

                                      -
                                      -

                                      Example

                                      -

                                      ::

                                      -
                                      >>> ds = Dataset[MySample]("data.tar")
                                      ->>> # Small dataset - load all at once
                                      ->>> ds.to_parquet("output.parquet")
                                      ->>>
                                      ->>> # Large dataset - process in chunks
                                      ->>> ds.to_parquet("output.parquet", maxcount=50000)
                                      +
                                      +

                                      Examples

                                      +
                                      >>> ds = Dataset[MySample]("data.tar")
                                      +>>> # Small dataset - load all at once
                                      +>>> ds.to_parquet("output.parquet")
                                      +>>>
                                      +>>> # Large dataset - process in chunks
                                      +>>> ds.to_parquet("output.parquet", maxcount=50000)
                                      diff --git a/docs/api/DatasetDict.html b/docs/api/DatasetDict.html index 494e146..cdfcb6a 100644 --- a/docs/api/DatasetDict.html +++ b/docs/api/DatasetDict.html @@ -403,7 +403,7 @@

                                      On this page

                                    • DatasetDict
                                    @@ -448,16 +448,15 @@

                                    -

                                    Example

                                    -

                                    ::

                                    -
                                    >>> ds_dict = load_dataset("path/to/data", MyData)
                                    ->>> train = ds_dict["train"]
                                    ->>> test = ds_dict["test"]
                                    ->>>
                                    ->>> # Iterate over all splits
                                    ->>> for split_name, dataset in ds_dict.items():
                                    -...     print(f"{split_name}: {len(dataset.shard_list)} shards")
                                    +
                                    +

                                    Examples

                                    +
                                    >>> ds_dict = load_dataset("path/to/data", MyData)
                                    +>>> train = ds_dict["train"]
                                    +>>> test = ds_dict["test"]
                                    +>>>
                                    +>>> # Iterate over all splits
                                    +>>> for split_name, dataset in ds_dict.items():
                                    +...     print(f"{split_name}: {len(dataset.shard_list)} shards")

                                    Attributes

                                    diff --git a/docs/api/DatasetLoader.html b/docs/api/DatasetLoader.html index 9c00efe..86c497d 100644 --- a/docs/api/DatasetLoader.html +++ b/docs/api/DatasetLoader.html @@ -402,7 +402,7 @@

                                    On this page

                                    • DatasetLoader
                                        -
                                      • Example
                                      • +
                                      • Examples
                                      • Methods
                                        • get
                                        • @@ -430,19 +430,18 @@

                                          DatasetLoader

                                          atmosphere.DatasetLoader(client)

                                          Loads dataset records from ATProto.

                                          This class fetches dataset index records and can create Dataset objects from them. Note that loading a dataset requires having the corresponding Python class for the sample type.

                                          -
                                          -

                                          Example

                                          -

                                          ::

                                          -
                                          >>> client = AtmosphereClient()
                                          ->>> loader = DatasetLoader(client)
                                          ->>>
                                          ->>> # List available datasets
                                          ->>> datasets = loader.list()
                                          ->>> for ds in datasets:
                                          -...     print(ds["name"], ds["schemaRef"])
                                          ->>>
                                          ->>> # Get a specific dataset record
                                          ->>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.record/xyz")
                                          +
                                          +

                                          Examples

                                          +
                                          >>> client = AtmosphereClient()
                                          +>>> loader = DatasetLoader(client)
                                          +>>>
                                          +>>> # List available datasets
                                          +>>> datasets = loader.list()
                                          +>>> for ds in datasets:
                                          +...     print(ds["name"], ds["schemaRef"])
                                          +>>>
                                          +>>> # Get a specific dataset record
                                          +>>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.record/xyz")

                                          Methods

                                          @@ -976,13 +975,12 @@

                                          Ra

                                          -
                                          -

                                          Example

                                          -

                                          ::

                                          -
                                          >>> loader = DatasetLoader(client)
                                          ->>> dataset = loader.to_dataset(uri, MySampleType)
                                          ->>> for batch in dataset.shuffled(batch_size=32):
                                          -...     process(batch)
                                          +
                                          +

                                          Examples

                                          +
                                          >>> loader = DatasetLoader(client)
                                          +>>> dataset = loader.to_dataset(uri, MySampleType)
                                          +>>> for batch in dataset.shuffled(batch_size=32):
                                          +...     process(batch)
                                          diff --git a/docs/api/DatasetPublisher.html b/docs/api/DatasetPublisher.html index 35e7171..2e69763 100644 --- a/docs/api/DatasetPublisher.html +++ b/docs/api/DatasetPublisher.html @@ -402,7 +402,7 @@

                                          On this page

                                          • DatasetPublisher
                                              -
                                            • Example
                                            • +
                                            • Examples
                                            • Methods
                                              • publish
                                              • @@ -425,21 +425,20 @@

                                                DatasetPublisher

                                                atmosphere.DatasetPublisher(client)

                                                Publishes dataset index records to ATProto.

                                                This class creates dataset records that reference a schema and point to external storage (WebDataset URLs) or ATProto blobs.

                                                -
                                                -

                                                Example

                                                -

                                                ::

                                                -
                                                >>> dataset = atdata.Dataset[MySample]("s3://bucket/data-{000000..000009}.tar")
                                                ->>>
                                                ->>> client = AtmosphereClient()
                                                ->>> client.login("handle", "password")
                                                ->>>
                                                ->>> publisher = DatasetPublisher(client)
                                                ->>> uri = publisher.publish(
                                                -...     dataset,
                                                -...     name="My Training Data",
                                                -...     description="Training data for my model",
                                                -...     tags=["computer-vision", "training"],
                                                -... )
                                                +
                                                +

                                                Examples

                                                +
                                                >>> dataset = atdata.Dataset[MySample]("s3://bucket/data-{000000..000009}.tar")
                                                +>>>
                                                +>>> client = AtmosphereClient()
                                                +>>> client.login("handle", "password")
                                                +>>>
                                                +>>> publisher = DatasetPublisher(client)
                                                +>>> uri = publisher.publish(
                                                +...     dataset,
                                                +...     name="My Training Data",
                                                +...     description="Training data for my model",
                                                +...     tags=["computer-vision", "training"],
                                                +... )

                                                Methods

                                                diff --git a/docs/api/DictSample.html b/docs/api/DictSample.html index 78aaeb7..2ed8903 100644 --- a/docs/api/DictSample.html +++ b/docs/api/DictSample.html @@ -402,7 +402,7 @@

                                                On this page

                                                • DictSample
                                                    -
                                                  • Example
                                                  • +
                                                  • Examples
                                                  • Note
                                                  • Attributes
                                                  • Methods @@ -433,17 +433,16 @@

                                                    DictSample

                                                    This class is the default sample type for datasets when no explicit type is specified. It stores the raw unpacked msgpack data and provides both attribute-style (sample.field) and dict-style (sample["field"]) access to fields.

                                                    DictSample is useful for: - Exploring datasets without defining a schema first - Working with datasets that have variable schemas - Prototyping before committing to a typed schema

                                                    To convert to a typed schema, use Dataset.as_type() with a @packable-decorated class. Every @packable class automatically registers a lens from DictSample, making this conversion seamless.

                                                    -
                                                    -

                                                    Example

                                                    -

                                                    ::

                                                    -
                                                    >>> ds = load_dataset("path/to/data.tar")  # Returns Dataset[DictSample]
                                                    ->>> for sample in ds.ordered():
                                                    -...     print(sample.some_field)      # Attribute access
                                                    -...     print(sample["other_field"])  # Dict access
                                                    -...     print(sample.keys())          # Inspect available fields
                                                    -...
                                                    ->>> # Convert to typed schema
                                                    ->>> typed_ds = ds.as_type(MyTypedSample)
                                                    +
                                                    +

                                                    Examples

                                                    +
                                                    >>> ds = load_dataset("path/to/data.tar")  # Returns Dataset[DictSample]
                                                    +>>> for sample in ds.ordered():
                                                    +...     print(sample.some_field)      # Attribute access
                                                    +...     print(sample["other_field"])  # Dict access
                                                    +...     print(sample.keys())          # Inspect available fields
                                                    +...
                                                    +>>> # Convert to typed schema
                                                    +>>> typed_ds = ds.as_type(MyTypedSample)

                                                    Note

                                                    diff --git a/docs/api/Lens.html b/docs/api/Lens.html index 8ad9ef8..17cf3b2 100644 --- a/docs/api/Lens.html +++ b/docs/api/Lens.html @@ -402,7 +402,7 @@

                                                    On this page

                                                    • lens
                                                        -
                                                      • Example
                                                      • +
                                                      • Examples
                                                      • Classes
                                                        • Lens
                                                        • @@ -435,30 +435,29 @@

                                                          lens

                                                        • @lens: Decorator to create and register lens transformations

                                                        Lenses support the functional programming concept of composable, well-behaved transformations that satisfy lens laws (GetPut and PutGet).

                                                        -
                                                        -

                                                        Example

                                                        -

                                                        ::

                                                        -
                                                        >>> @packable
                                                        -... class FullData:
                                                        -...     name: str
                                                        -...     age: int
                                                        -...     embedding: NDArray
                                                        -...
                                                        ->>> @packable
                                                        -... class NameOnly:
                                                        -...     name: str
                                                        -...
                                                        ->>> @lens
                                                        -... def name_view(full: FullData) -> NameOnly:
                                                        -...     return NameOnly(name=full.name)
                                                        -...
                                                        ->>> @name_view.putter
                                                        -... def name_view_put(view: NameOnly, source: FullData) -> FullData:
                                                        -...     return FullData(name=view.name, age=source.age,
                                                        -...                     embedding=source.embedding)
                                                        -...
                                                        ->>> ds = Dataset[FullData]("data.tar")
                                                        ->>> ds_names = ds.as_type(NameOnly)  # Uses registered lens
                                                        +
                                                        +

                                                        Examples

                                                        +
                                                        >>> @packable
                                                        +... class FullData:
                                                        +...     name: str
                                                        +...     age: int
                                                        +...     embedding: NDArray
                                                        +...
                                                        +>>> @packable
                                                        +... class NameOnly:
                                                        +...     name: str
                                                        +...
                                                        +>>> @lens
                                                        +... def name_view(full: FullData) -> NameOnly:
                                                        +...     return NameOnly(name=full.name)
                                                        +...
                                                        +>>> @name_view.putter
                                                        +... def name_view_put(view: NameOnly, source: FullData) -> FullData:
                                                        +...     return FullData(name=view.name, age=source.age,
                                                        +...                     embedding=source.embedding)
                                                        +...
                                                        +>>> ds = Dataset[FullData]("data.tar")
                                                        +>>> ds_names = ds.as_type(NameOnly)  # Uses registered lens

                                                        Classes

                                                        @@ -518,16 +517,15 @@

                                                        -

                                                        Example

                                                        -

                                                        ::

                                                        -
                                                        >>> @lens
                                                        -... def name_lens(full: FullData) -> NameOnly:
                                                        -...     return NameOnly(name=full.name)
                                                        -...
                                                        ->>> @name_lens.putter
                                                        -... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                        -...     return FullData(name=view.name, age=source.age)
                                                        +
                                                        +

                                                        Examples

                                                        +
                                                        >>> @lens
                                                        +... def name_lens(full: FullData) -> NameOnly:
                                                        +...     return NameOnly(name=full.name)
                                                        +...
                                                        +>>> @name_lens.putter
                                                        +... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                        +...     return FullData(name=view.name, age=source.age)

                                                        Methods

                                                        @@ -693,12 +691,11 @@
                                                        -
                                                        -
                                                        Example
                                                        -

                                                        ::

                                                        -
                                                        >>> @my_lens.putter
                                                        -... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:
                                                        -...     return SourceType(...)
                                                        +
                                                        +
                                                        Examples
                                                        +
                                                        >>> @my_lens.putter
                                                        +... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:
                                                        +...     return SourceType(field=view.field, other=source.other)
                                                        @@ -925,16 +922,15 @@

                                                        -
                                                        -

                                                        Example

                                                        -

                                                        ::

                                                        -
                                                        >>> @lens
                                                        -... def extract_name(full: FullData) -> NameOnly:
                                                        -...     return NameOnly(name=full.name)
                                                        -...
                                                        ->>> @extract_name.putter
                                                        -... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                        -...     return FullData(name=view.name, age=source.age)
                                                        +
                                                        +

                                                        Examples

                                                        +
                                                        >>> @lens
                                                        +... def extract_name(full: FullData) -> NameOnly:
                                                        +...     return NameOnly(name=full.name)
                                                        +...
                                                        +>>> @extract_name.putter
                                                        +... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                        +...     return FullData(name=view.name, age=source.age)
                                                        diff --git a/docs/api/LensLoader.html b/docs/api/LensLoader.html index 6cb314c..b3a3073 100644 --- a/docs/api/LensLoader.html +++ b/docs/api/LensLoader.html @@ -402,7 +402,7 @@

                                                        On this page

                                                        • LensLoader
                                                            -
                                                          • Example
                                                          • +
                                                          • Examples
                                                          • Methods
                                                            • find_by_schemas
                                                            • @@ -425,16 +425,15 @@

                                                              LensLoader

                                                              atmosphere.LensLoader(client)

                                                              Loads lens records from ATProto.

                                                              This class fetches lens transformation records. Note that actually using a lens requires installing the referenced code and importing it manually.

                                                              -
                                                              -

                                                              Example

                                                              -

                                                              ::

                                                              -
                                                              >>> client = AtmosphereClient()
                                                              ->>> loader = LensLoader(client)
                                                              ->>>
                                                              ->>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.lens/xyz")
                                                              ->>> print(record["name"])
                                                              ->>> print(record["sourceSchema"])
                                                              ->>> print(record.get("getterCode", {}).get("repository"))
                                                              +
                                                              +

                                                              Examples

                                                              +
                                                              >>> client = AtmosphereClient()
                                                              +>>> loader = LensLoader(client)
                                                              +>>>
                                                              +>>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.lens/xyz")
                                                              +>>> print(record["name"])
                                                              +>>> print(record["sourceSchema"])
                                                              +>>> print(record.get("getterCode", {}).get("repository"))

                                                              Methods

                                                              diff --git a/docs/api/LensPublisher.html b/docs/api/LensPublisher.html index 872329b..09f63c7 100644 --- a/docs/api/LensPublisher.html +++ b/docs/api/LensPublisher.html @@ -402,7 +402,7 @@

                                                              On this page

                                                              • LensPublisher
                                                                  -
                                                                • Example
                                                                • +
                                                                • Examples
                                                                • Security Note
                                                                • Methods
                                                                    @@ -425,26 +425,25 @@

                                                                    LensPublisher

                                                                    atmosphere.LensPublisher(client)

                                                                    Publishes Lens transformation records to ATProto.

                                                                    This class creates lens records that reference source and target schemas and point to the transformation code in a git repository.

                                                                    -
                                                                    -

                                                                    Example

                                                                    -

                                                                    ::

                                                                    -
                                                                    >>> @atdata.lens
                                                                    -... def my_lens(source: SourceType) -> TargetType:
                                                                    -...     return TargetType(field=source.other_field)
                                                                    ->>>
                                                                    ->>> client = AtmosphereClient()
                                                                    ->>> client.login("handle", "password")
                                                                    ->>>
                                                                    ->>> publisher = LensPublisher(client)
                                                                    ->>> uri = publisher.publish(
                                                                    -...     name="my_lens",
                                                                    -...     source_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/source",
                                                                    -...     target_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/target",
                                                                    -...     code_repository="https://github.com/user/repo",
                                                                    -...     code_commit="abc123def456",
                                                                    -...     getter_path="mymodule.lenses:my_lens",
                                                                    -...     putter_path="mymodule.lenses:my_lens_putter",
                                                                    -... )
                                                                    +
                                                                    +

                                                                    Examples

                                                                    +
                                                                    >>> @atdata.lens
                                                                    +... def my_lens(source: SourceType) -> TargetType:
                                                                    +...     return TargetType(field=source.other_field)
                                                                    +>>>
                                                                    +>>> client = AtmosphereClient()
                                                                    +>>> client.login("handle", "password")
                                                                    +>>>
                                                                    +>>> publisher = LensPublisher(client)
                                                                    +>>> uri = publisher.publish(
                                                                    +...     name="my_lens",
                                                                    +...     source_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/source",
                                                                    +...     target_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/target",
                                                                    +...     code_repository="https://github.com/user/repo",
                                                                    +...     code_commit="abc123def456",
                                                                    +...     getter_path="mymodule.lenses:my_lens",
                                                                    +...     putter_path="mymodule.lenses:my_lens_putter",
                                                                    +... )

                                                                    Security Note

                                                                    diff --git a/docs/api/PDSBlobStore.html b/docs/api/PDSBlobStore.html index 7daba83..0ae9920 100644 --- a/docs/api/PDSBlobStore.html +++ b/docs/api/PDSBlobStore.html @@ -403,7 +403,7 @@

                                                                    On this page

                                                                  • PDSBlobStore
                                                                    • Attributes
                                                                    • -
                                                                    • Example
                                                                    • +
                                                                    • Examples
                                                                    • Methods
                                                                      • create_source
                                                                      • @@ -452,13 +452,12 @@

                                                                        -

                                                                        Example

                                                                        -

                                                                        ::

                                                                        -
                                                                        >>> store = PDSBlobStore(client)
                                                                        ->>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                        ->>> # Returns AT URIs like:
                                                                        ->>> # ['at://did:plc:abc/blob/bafyrei...', ...]
                                                                        +
                                                                        +

                                                                        Examples

                                                                        +
                                                                        >>> store = PDSBlobStore(client)
                                                                        +>>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                        +>>> # Returns AT URIs like:
                                                                        +>>> # ['at://did:plc:abc/blob/bafyrei...', ...]

                                                                        Methods

                                                                        diff --git a/docs/api/Packable-protocol.html b/docs/api/Packable-protocol.html index 71903e3..9476015 100644 --- a/docs/api/Packable-protocol.html +++ b/docs/api/Packable-protocol.html @@ -402,7 +402,7 @@

                                                                        On this page

                                                                        • Packable
                                                                            -
                                                                          • Example
                                                                          • +
                                                                          • Examples
                                                                          • Attributes
                                                                          • Methods
                                                                              @@ -427,18 +427,17 @@

                                                                              Packable

                                                                              This protocol allows classes decorated with @packable to be recognized as valid types for lens transformations and schema operations, even though the decorator doesn’t change the class’s nominal type at static analysis time.

                                                                              Both PackableSample subclasses and @packable-decorated classes satisfy this protocol structurally.

                                                                              The protocol captures the full interface needed for: - Lens type transformations (as_wds, from_data) - Schema publishing (class introspection via dataclass fields) - Serialization/deserialization (packed, from_bytes)

                                                                              -
                                                                              -

                                                                              Example

                                                                              -

                                                                              ::

                                                                              -
                                                                              >>> @packable
                                                                              -... class MySample:
                                                                              -...     name: str
                                                                              -...     value: int
                                                                              -...
                                                                              ->>> def process(sample_type: Type[Packable]) -> None:
                                                                              -...     # Type checker knows sample_type has from_bytes, packed, etc.
                                                                              -...     instance = sample_type.from_bytes(data)
                                                                              -...     print(instance.packed)
                                                                              +
                                                                              +

                                                                              Examples

                                                                              +
                                                                              >>> @packable
                                                                              +... class MySample:
                                                                              +...     name: str
                                                                              +...     value: int
                                                                              +...
                                                                              +>>> def process(sample_type: Type[Packable]) -> None:
                                                                              +...     # Type checker knows sample_type has from_bytes, packed, etc.
                                                                              +...     instance = sample_type.from_bytes(data)
                                                                              +...     print(instance.packed)

                                                                              Attributes

                                                                              diff --git a/docs/api/PackableSample.html b/docs/api/PackableSample.html index 719174e..d94a348 100644 --- a/docs/api/PackableSample.html +++ b/docs/api/PackableSample.html @@ -402,7 +402,7 @@

                                                                              On this page

                                                                              • PackableSample
                                                                                  -
                                                                                • Example
                                                                                • +
                                                                                • Examples
                                                                                • Attributes
                                                                                • Methods
                                                                                    @@ -426,17 +426,16 @@

                                                                                    PackableSample

                                                                                    Base class for samples that can be serialized with msgpack.

                                                                                    This abstract base class provides automatic serialization/deserialization for dataclass-based samples. Fields annotated as NDArray or NDArray | None are automatically converted between numpy arrays and bytes during packing/unpacking.

                                                                                    Subclasses should be defined either by: 1. Direct inheritance with the @dataclass decorator 2. Using the @packable decorator (recommended)

                                                                                    -
                                                                                    -

                                                                                    Example

                                                                                    -

                                                                                    ::

                                                                                    -
                                                                                    >>> @packable
                                                                                    -... class MyData:
                                                                                    -...     name: str
                                                                                    -...     embeddings: NDArray
                                                                                    -...
                                                                                    ->>> sample = MyData(name="test", embeddings=np.array([1.0, 2.0]))
                                                                                    ->>> packed = sample.packed  # Serialize to bytes
                                                                                    ->>> restored = MyData.from_bytes(packed)  # Deserialize
                                                                                    +
                                                                                    +

                                                                                    Examples

                                                                                    +
                                                                                    >>> @packable
                                                                                    +... class MyData:
                                                                                    +...     name: str
                                                                                    +...     embeddings: NDArray
                                                                                    +...
                                                                                    +>>> sample = MyData(name="test", embeddings=np.array([1.0, 2.0]))
                                                                                    +>>> packed = sample.packed  # Serialize to bytes
                                                                                    +>>> restored = MyData.from_bytes(packed)  # Deserialize

                                                                                    Attributes

                                                                                    diff --git a/docs/api/S3Source.html b/docs/api/S3Source.html index 19c9bfc..7946d21 100644 --- a/docs/api/S3Source.html +++ b/docs/api/S3Source.html @@ -403,7 +403,7 @@

                                                                                    On this page

                                                                                  • S3Source
                                                                                    • Attributes
                                                                                    • -
                                                                                    • Example
                                                                                    • +
                                                                                    • Examples
                                                                                    • Methods
                                                                                      • from_credentials
                                                                                      • @@ -480,18 +480,17 @@

                                                                                        -

                                                                                        Example

                                                                                        -

                                                                                        ::

                                                                                        -
                                                                                        >>> source = S3Source(
                                                                                        -...     bucket="my-datasets",
                                                                                        -...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                                        -...     endpoint="https://abc123.r2.cloudflarestorage.com",
                                                                                        -...     access_key="AKIAIOSFODNN7EXAMPLE",
                                                                                        -...     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                                                                                        -... )
                                                                                        ->>> for shard_id, stream in source.shards:
                                                                                        -...     process(stream)
                                                                                        +
                                                                                        +

                                                                                        Examples

                                                                                        +
                                                                                        >>> source = S3Source(
                                                                                        +...     bucket="my-datasets",
                                                                                        +...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                                        +...     endpoint="https://abc123.r2.cloudflarestorage.com",
                                                                                        +...     access_key="AKIAIOSFODNN7EXAMPLE",
                                                                                        +...     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                                                                                        +... )
                                                                                        +>>> for shard_id, stream in source.shards:
                                                                                        +...     process(stream)

                                                                                        Methods

                                                                                        @@ -578,15 +577,14 @@

                                                                                        Re

                                                                                        -
                                                                                        -

                                                                                        Example

                                                                                        -

                                                                                        ::

                                                                                        -
                                                                                        >>> creds = {
                                                                                        -...     "AWS_ACCESS_KEY_ID": "...",
                                                                                        -...     "AWS_SECRET_ACCESS_KEY": "...",
                                                                                        -...     "AWS_ENDPOINT": "https://r2.example.com",
                                                                                        -... }
                                                                                        ->>> source = S3Source.from_credentials(creds, "my-bucket", ["data.tar"])
                                                                                        +
                                                                                        +

                                                                                        Examples

                                                                                        +
                                                                                        >>> creds = {
                                                                                        +...     "AWS_ACCESS_KEY_ID": "...",
                                                                                        +...     "AWS_SECRET_ACCESS_KEY": "...",
                                                                                        +...     "AWS_ENDPOINT": "https://r2.example.com",
                                                                                        +... }
                                                                                        +>>> source = S3Source.from_credentials(creds, "my-bucket", ["data.tar"])
                                                                                        @@ -684,13 +682,12 @@

                                                                                        Rais

                                                                                        -
                                                                                        -

                                                                                        Example

                                                                                        -

                                                                                        ::

                                                                                        -
                                                                                        >>> source = S3Source.from_urls(
                                                                                        -...     ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"],
                                                                                        -...     endpoint="https://r2.example.com",
                                                                                        -... )
                                                                                        +
                                                                                        +

                                                                                        Examples

                                                                                        +
                                                                                        >>> source = S3Source.from_urls(
                                                                                        +...     ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"],
                                                                                        +...     endpoint="https://r2.example.com",
                                                                                        +... )
                                                                                        diff --git a/docs/api/SampleBatch.html b/docs/api/SampleBatch.html index fc51fd9..de8c0a0 100644 --- a/docs/api/SampleBatch.html +++ b/docs/api/SampleBatch.html @@ -404,7 +404,7 @@

                                                                                        On this page

                                                                                      @@ -469,12 +469,11 @@

                                                                                      -

                                                                                      Example

                                                                                      -

                                                                                      ::

                                                                                      -
                                                                                      >>> batch = SampleBatch[MyData]([sample1, sample2, sample3])
                                                                                      ->>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)
                                                                                      ->>> batch.names  # Returns list of names
                                                                                      +
                                                                                      +

                                                                                      Examples

                                                                                      +
                                                                                      >>> batch = SampleBatch[MyData]([sample1, sample2, sample3])
                                                                                      +>>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)
                                                                                      +>>> batch.names  # Returns list of names

                                                                                      Note

                                                                                      diff --git a/docs/api/SchemaLoader.html b/docs/api/SchemaLoader.html index f96329b..465bf15 100644 --- a/docs/api/SchemaLoader.html +++ b/docs/api/SchemaLoader.html @@ -402,7 +402,7 @@

                                                                                      On this page

                                                                                      • SchemaLoader
                                                                                          -
                                                                                        • Example
                                                                                        • +
                                                                                        • Examples
                                                                                        • Methods
                                                                                          • get
                                                                                          • @@ -424,16 +424,15 @@

                                                                                            SchemaLoader

                                                                                            atmosphere.SchemaLoader(client)

                                                                                            Loads PackableSample schemas from ATProto.

                                                                                            This class fetches schema records from ATProto and can list available schemas from a repository.

                                                                                            -
                                                                                            -

                                                                                            Example

                                                                                            -

                                                                                            ::

                                                                                            -
                                                                                            >>> client = AtmosphereClient()
                                                                                            ->>> client.login("handle", "password")
                                                                                            ->>>
                                                                                            ->>> loader = SchemaLoader(client)
                                                                                            ->>> schema = loader.get("at://did:plc:.../ac.foundation.dataset.sampleSchema/...")
                                                                                            ->>> print(schema["name"])
                                                                                            -'MySample'
                                                                                            +
                                                                                            +

                                                                                            Examples

                                                                                            +
                                                                                            >>> client = AtmosphereClient()
                                                                                            +>>> client.login("handle", "password")
                                                                                            +>>>
                                                                                            +>>> loader = SchemaLoader(client)
                                                                                            +>>> schema = loader.get("at://did:plc:.../ac.foundation.dataset.sampleSchema/...")
                                                                                            +>>> print(schema["name"])
                                                                                            +'MySample'

                                                                                            Methods

                                                                                            diff --git a/docs/api/SchemaPublisher.html b/docs/api/SchemaPublisher.html index 7713f7f..f93e893 100644 --- a/docs/api/SchemaPublisher.html +++ b/docs/api/SchemaPublisher.html @@ -402,7 +402,7 @@

                                                                                            On this page

                                                                                            • SchemaPublisher
                                                                                                -
                                                                                              • Example
                                                                                              • +
                                                                                              • Examples
                                                                                              • Methods
                                                                                                • publish
                                                                                                • @@ -423,21 +423,20 @@

                                                                                                  SchemaPublisher

                                                                                                  atmosphere.SchemaPublisher(client)

                                                                                                  Publishes PackableSample schemas to ATProto.

                                                                                                  This class introspects a PackableSample class to extract its field definitions and publishes them as an ATProto schema record.

                                                                                                  -
                                                                                                  -

                                                                                                  Example

                                                                                                  -

                                                                                                  ::

                                                                                                  -
                                                                                                  >>> @atdata.packable
                                                                                                  -... class MySample:
                                                                                                  -...     image: NDArray
                                                                                                  -...     label: str
                                                                                                  -...
                                                                                                  ->>> client = AtmosphereClient()
                                                                                                  ->>> client.login("handle", "password")
                                                                                                  ->>>
                                                                                                  ->>> publisher = SchemaPublisher(client)
                                                                                                  ->>> uri = publisher.publish(MySample, version="1.0.0")
                                                                                                  ->>> print(uri)
                                                                                                  -at://did:plc:.../ac.foundation.dataset.sampleSchema/...
                                                                                                  +
                                                                                                  +

                                                                                                  Examples

                                                                                                  +
                                                                                                  >>> @atdata.packable
                                                                                                  +... class MySample:
                                                                                                  +...     image: NDArray
                                                                                                  +...     label: str
                                                                                                  +...
                                                                                                  +>>> client = AtmosphereClient()
                                                                                                  +>>> client.login("handle", "password")
                                                                                                  +>>>
                                                                                                  +>>> publisher = SchemaPublisher(client)
                                                                                                  +>>> uri = publisher.publish(MySample, version="1.0.0")
                                                                                                  +>>> print(uri)
                                                                                                  +at://did:plc:.../ac.foundation.dataset.sampleSchema/...

                                                                                                  Methods

                                                                                                  diff --git a/docs/api/URLSource.html b/docs/api/URLSource.html index 583ad43..e8526e8 100644 --- a/docs/api/URLSource.html +++ b/docs/api/URLSource.html @@ -403,7 +403,7 @@

                                                                                                  On this page

                                                                                                • URLSource
                                                                                                  • Attributes
                                                                                                  • -
                                                                                                  • Example
                                                                                                  • +
                                                                                                  • Examples
                                                                                                  • Methods
                                                                                                    • list_shards
                                                                                                    • @@ -445,12 +445,11 @@

                                                                                                      -

                                                                                                      Example

                                                                                                      -

                                                                                                      ::

                                                                                                      -
                                                                                                      >>> source = URLSource("https://example.com/train-{000..009}.tar")
                                                                                                      ->>> for shard_id, stream in source.shards:
                                                                                                      -...     print(f"Streaming {shard_id}")
                                                                                                      +
                                                                                                      +

                                                                                                      Examples

                                                                                                      +
                                                                                                      >>> source = URLSource("https://example.com/train-{000..009}.tar")
                                                                                                      +>>> for shard_id, stream in source.shards:
                                                                                                      +...     print(f"Streaming {shard_id}")

                                                                                                      Methods

                                                                                                      diff --git a/docs/api/load_dataset.html b/docs/api/load_dataset.html index 5fe56bf..1e4342d 100644 --- a/docs/api/load_dataset.html +++ b/docs/api/load_dataset.html @@ -405,7 +405,7 @@

                                                                                                      On this page

                                                                                                    • Parameters
                                                                                                    • Returns
                                                                                                    • Raises
                                                                                                    • -
                                                                                                    • Example
                                                                                                    • +
                                                                                                    • Examples
                                                                                                  @@ -540,25 +540,24 @@

                                                                                                  Rais

                                                                                                • -
                                                                                                  -

                                                                                                  Example

                                                                                                  -

                                                                                                  ::

                                                                                                  -
                                                                                                  >>> # Load without type - get DictSample for exploration
                                                                                                  ->>> ds = load_dataset("./data/train.tar", split="train")
                                                                                                  ->>> for sample in ds.ordered():
                                                                                                  -...     print(sample.keys())  # Explore fields
                                                                                                  -...     print(sample["text"]) # Dict-style access
                                                                                                  -...     print(sample.label)   # Attribute access
                                                                                                  ->>>
                                                                                                  ->>> # Convert to typed schema
                                                                                                  ->>> typed_ds = ds.as_type(TextData)
                                                                                                  ->>>
                                                                                                  ->>> # Or load with explicit type directly
                                                                                                  ->>> train_ds = load_dataset("./data/train-*.tar", TextData, split="train")
                                                                                                  ->>>
                                                                                                  ->>> # Load from index with auto-type resolution
                                                                                                  ->>> index = LocalIndex()
                                                                                                  ->>> ds = load_dataset("@local/my-dataset", index=index, split="train")
                                                                                                  +
                                                                                                  +

                                                                                                  Examples

                                                                                                  +
                                                                                                  >>> # Load without type - get DictSample for exploration
                                                                                                  +>>> ds = load_dataset("./data/train.tar", split="train")
                                                                                                  +>>> for sample in ds.ordered():
                                                                                                  +...     print(sample.keys())  # Explore fields
                                                                                                  +...     print(sample["text"]) # Dict-style access
                                                                                                  +...     print(sample.label)   # Attribute access
                                                                                                  +>>>
                                                                                                  +>>> # Convert to typed schema
                                                                                                  +>>> typed_ds = ds.as_type(TextData)
                                                                                                  +>>>
                                                                                                  +>>> # Or load with explicit type directly
                                                                                                  +>>> train_ds = load_dataset("./data/train-*.tar", TextData, split="train")
                                                                                                  +>>>
                                                                                                  +>>> # Load from index with auto-type resolution
                                                                                                  +>>> index = LocalIndex()
                                                                                                  +>>> ds = load_dataset("@local/my-dataset", index=index, split="train")
                                                                                                  diff --git a/docs/api/local.Index.html b/docs/api/local.Index.html index c50d10c..6505dd1 100644 --- a/docs/api/local.Index.html +++ b/docs/api/local.Index.html @@ -766,15 +766,14 @@

                                                                                                  -
                                                                                                  -

                                                                                                  Example

                                                                                                  -

                                                                                                  ::

                                                                                                  -
                                                                                                  >>> # After enabling auto_stubs and configuring IDE extraPaths:
                                                                                                  ->>> from local.MySample_1_0_0 import MySample
                                                                                                  ->>>
                                                                                                  ->>> # This gives full IDE autocomplete:
                                                                                                  ->>> DecodedType = index.decode_schema_as(ref, MySample)
                                                                                                  ->>> sample = DecodedType(text="hello", value=42)  # IDE knows signature!
                                                                                                  +
                                                                                                  +

                                                                                                  Examples

                                                                                                  +
                                                                                                  >>> # After enabling auto_stubs and configuring IDE extraPaths:
                                                                                                  +>>> from local.MySample_1_0_0 import MySample
                                                                                                  +>>>
                                                                                                  +>>> # This gives full IDE autocomplete:
                                                                                                  +>>> DecodedType = index.decode_schema_as(ref, MySample)
                                                                                                  +>>> sample = DecodedType(text="hello", value=42)  # IDE knows signature!

                                                                                                  Note

                                                                                                  @@ -1023,16 +1022,15 @@

                                                                                                  -
                                                                                                  -

                                                                                                  Example

                                                                                                  -

                                                                                                  ::

                                                                                                  -
                                                                                                  >>> index = LocalIndex(auto_stubs=True)
                                                                                                  ->>> ref = index.publish_schema(MySample, version="1.0.0")
                                                                                                  ->>> index.load_schema(ref)
                                                                                                  ->>> print(index.get_import_path(ref))
                                                                                                  -local.MySample_1_0_0
                                                                                                  ->>> # Then in your code:
                                                                                                  ->>> # from local.MySample_1_0_0 import MySample
                                                                                                  +
                                                                                                  +

                                                                                                  Examples

                                                                                                  +
                                                                                                  >>> index = LocalIndex(auto_stubs=True)
                                                                                                  +>>> ref = index.publish_schema(MySample, version="1.0.0")
                                                                                                  +>>> index.load_schema(ref)
                                                                                                  +>>> print(index.get_import_path(ref))
                                                                                                  +local.MySample_1_0_0
                                                                                                  +>>> # Then in your code:
                                                                                                  +>>> # from local.MySample_1_0_0 import MySample
                                                                                                  @@ -1389,16 +1387,15 @@

                                                                                                  Ra

                                                                                                  -
                                                                                                  -

                                                                                                  Example

                                                                                                  -

                                                                                                  ::

                                                                                                  -
                                                                                                  >>> # Load and use immediately
                                                                                                  ->>> MyType = index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                                                  ->>> sample = MyType(name="hello", value=42)
                                                                                                  ->>>
                                                                                                  ->>> # Or access later via namespace
                                                                                                  ->>> index.load_schema("atdata://local/sampleSchema/OtherType@1.0.0")
                                                                                                  ->>> other = index.types.OtherType(data="test")
                                                                                                  +
                                                                                                  +

                                                                                                  Examples

                                                                                                  +
                                                                                                  >>> # Load and use immediately
                                                                                                  +>>> MyType = index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                                                  +>>> sample = MyType(name="hello", value=42)
                                                                                                  +>>>
                                                                                                  +>>> # Or access later via namespace
                                                                                                  +>>> index.load_schema("atdata://local/sampleSchema/OtherType@1.0.0")
                                                                                                  +>>> other = index.types.OtherType(data="test")
                                                                                                  diff --git a/docs/api/packable.html b/docs/api/packable.html index 26b0659..ddf6668 100644 --- a/docs/api/packable.html +++ b/docs/api/packable.html @@ -474,18 +474,17 @@

                                                                                                  Re

                                                                                                  Examples

                                                                                                  -

                                                                                                  This is a test of the functionality::

                                                                                                  -
                                                                                                  @packable
                                                                                                  -class MyData:
                                                                                                  -    name: str
                                                                                                  -    values: NDArray
                                                                                                  -
                                                                                                  -sample = MyData(name="test", values=np.array([1, 2, 3]))
                                                                                                  -bytes_data = sample.packed
                                                                                                  -restored = MyData.from_bytes(bytes_data)
                                                                                                  -
                                                                                                  -# Works with Packable-typed APIs
                                                                                                  -index.publish_schema(MyData, version="1.0.0")  # Type-safe
                                                                                                  +
                                                                                                  >>> @packable
                                                                                                  +... class MyData:
                                                                                                  +...     name: str
                                                                                                  +...     values: NDArray
                                                                                                  +...
                                                                                                  +>>> sample = MyData(name="test", values=np.array([1, 2, 3]))
                                                                                                  +>>> bytes_data = sample.packed
                                                                                                  +>>> restored = MyData.from_bytes(bytes_data)
                                                                                                  +>>>
                                                                                                  +>>> # Works with Packable-typed APIs
                                                                                                  +>>> index.publish_schema(MyData, version="1.0.0")  # Type-safe
                                                                                                  diff --git a/docs/api/promote_to_atmosphere.html b/docs/api/promote_to_atmosphere.html index 2b756a2..f133608 100644 --- a/docs/api/promote_to_atmosphere.html +++ b/docs/api/promote_to_atmosphere.html @@ -405,7 +405,7 @@

                                                                                                  On this page

                                                                                                • Parameters
                                                                                                • Returns
                                                                                                • Raises
                                                                                                • -
                                                                                                • Example
                                                                                                • +
                                                                                                • Examples
                                                                                              @@ -538,13 +538,12 @@

                                                                                              Rais

                                                                                            -
                                                                                            -

                                                                                            Example

                                                                                            -

                                                                                            ::

                                                                                            -
                                                                                            >>> entry = local_index.get_dataset("mnist-train")
                                                                                            ->>> uri = promote_to_atmosphere(entry, local_index, client)
                                                                                            ->>> print(uri)
                                                                                            -at://did:plc:abc123/ac.foundation.dataset.datasetIndex/...
                                                                                            +
                                                                                            +

                                                                                            Examples

                                                                                            +
                                                                                            >>> entry = local_index.get_dataset("mnist-train")
                                                                                            +>>> uri = promote_to_atmosphere(entry, local_index, client)
                                                                                            +>>> print(uri)
                                                                                            +at://did:plc:abc123/ac.foundation.dataset.datasetIndex/...
                                                                                            diff --git a/docs/index.html b/docs/index.html index 4134cce..f2bfff4 100644 --- a/docs/index.html +++ b/docs/index.html @@ -666,7 +666,7 @@

                                                                                            Quick Example

                                                                                            1. Define a Sample Type

                                                                                            The @packable decorator creates a serializable dataclass:

                                                                                            -
                                                                                            +
                                                                                            import numpy as np
                                                                                             from numpy.typing import NDArray
                                                                                             import atdata
                                                                                            @@ -681,7 +681,7 @@ 

                                                                                            1. Define a Sample Ty

                                                                                            2. Create and Write Samples

                                                                                            Use WebDataset’s standard TarWriter:

                                                                                            -
                                                                                            +
                                                                                            import webdataset as wds
                                                                                             
                                                                                             samples = [
                                                                                            @@ -701,7 +701,7 @@ 

                                                                                            2. Create and Wri

                                                                                            3. Load and Iterate with Type Safety

                                                                                            The generic Dataset[T] provides typed access:

                                                                                            -
                                                                                            +
                                                                                            dataset = atdata.Dataset[ImageSample]("data-000000.tar")
                                                                                             
                                                                                             for batch in dataset.shuffled(batch_size=32):
                                                                                            @@ -716,7 +716,7 @@ 

                                                                                            Scaling Up

                                                                                            Team Storage with Redis + S3

                                                                                            When you’re ready to share with your team:

                                                                                            -
                                                                                            +
                                                                                            from atdata.local import LocalIndex, S3DataStore
                                                                                             
                                                                                             # Connect to team infrastructure
                                                                                            @@ -740,7 +740,7 @@ 

                                                                                            Team Storage wi

                                                                                            Federation with ATProto

                                                                                            For public or cross-organization sharing:

                                                                                            -
                                                                                            +
                                                                                            from atdata.atmosphere import AtmosphereClient, AtmosphereIndex, PDSBlobStore
                                                                                             from atdata.promote import promote_to_atmosphere
                                                                                             
                                                                                            @@ -762,7 +762,7 @@ 

                                                                                            Federation with AT

                                                                                            HuggingFace-Style Loading

                                                                                            For convenient access to datasets:

                                                                                            -
                                                                                            +
                                                                                            from atdata import load_dataset
                                                                                             
                                                                                             # Load from local files
                                                                                            diff --git a/docs/reference/architecture.html b/docs/reference/architecture.html
                                                                                            index 18b2fcf..e7ea4d2 100644
                                                                                            --- a/docs/reference/architecture.html
                                                                                            +++ b/docs/reference/architecture.html
                                                                                            @@ -657,7 +657,7 @@ 

                                                                                            Core Components

                                                                                            PackableSample: The Foundation

                                                                                            Everything in atdata starts with PackableSample—a base class that makes Python dataclasses serializable with msgpack:

                                                                                            -
                                                                                            +
                                                                                            @atdata.packable
                                                                                             class ImageSample:
                                                                                                 image: NDArray       # Automatically converted to/from bytes
                                                                                            @@ -680,7 +680,7 @@ 

                                                                                            PackableSamp

                                                                                            Dataset: Typed Iteration

                                                                                            The Dataset[T] class wraps WebDataset tar archives with type information:

                                                                                            -
                                                                                            +
                                                                                            dataset = atdata.Dataset[ImageSample]("data-{000000..000009}.tar")
                                                                                             
                                                                                             for batch in dataset.shuffled(batch_size=32):
                                                                                            @@ -704,7 +704,7 @@ 

                                                                                            Dataset: Typed Ite

                                                                                            SampleBatch: Automatic Aggregation

                                                                                            When iterating with batch_size, atdata returns SampleBatch[T] objects that aggregate sample attributes:

                                                                                            -
                                                                                            +
                                                                                            batch = SampleBatch[ImageSample](samples)
                                                                                             
                                                                                             # NDArray fields → stacked numpy array with batch dimension
                                                                                            @@ -718,7 +718,7 @@ 

                                                                                            SampleBa

                                                                                            Lens: Schema Transformations

                                                                                            Lenses enable viewing datasets through different schemas without duplicating data:

                                                                                            -
                                                                                            +
                                                                                            @atdata.packable
                                                                                             class SimplifiedSample:
                                                                                                 label: str
                                                                                            @@ -755,7 +755,7 @@ 

                                                                                            Local Index (Redis +
                                                                                          • WebDataset tar shards
                                                                                          • Any S3-compatible storage (AWS, MinIO, Cloudflare R2)
                                                                                          -
                                                                                          +
                                                                                          store = S3DataStore(credentials=creds, bucket="datasets")
                                                                                           index = LocalIndex(data_store=store)
                                                                                           
                                                                                          @@ -783,7 +783,7 @@ 

                                                                                          Atmosphere Index
                                                                                        • Store actual data shards as ATProto blobs
                                                                                        • Fully decentralized—no external dependencies
                                                                                        -
                                                                                        +
                                                                                        client = AtmosphereClient()
                                                                                         client.login("handle.bsky.social", "app-password")
                                                                                         
                                                                                        @@ -801,7 +801,7 @@ 

                                                                                        Protocol Abstraction

                                                                                        AbstractIndex

                                                                                        Common interface for both LocalIndex and AtmosphereIndex:

                                                                                        -
                                                                                        +
                                                                                        def process_dataset(index: AbstractIndex, name: str):
                                                                                             entry = index.get_dataset(name)
                                                                                             schema = index.decode_schema(entry.schema_ref)
                                                                                        @@ -817,7 +817,7 @@ 

                                                                                        AbstractIndex

                                                                                        AbstractDataStore

                                                                                        Common interface for S3DataStore and PDSBlobStore:

                                                                                        -
                                                                                        +
                                                                                        def write_to_store(store: AbstractDataStore, dataset: Dataset):
                                                                                             urls = store.write_shards(dataset, prefix="data/v1")
                                                                                             # Works with S3 or PDS blob storage
                                                                                        @@ -838,7 +838,7 @@

                                                                                        Data Flow: L

                                                                                        A typical workflow progresses through three stages:

                                                                                        Stage 1: Local Development

                                                                                        -
                                                                                        +
                                                                                        # Define type and create samples
                                                                                         @atdata.packable
                                                                                         class MySample:
                                                                                        @@ -856,7 +856,7 @@ 

                                                                                        Stage 1: Local D

                                                                                        Stage 2: Team Storage

                                                                                        -
                                                                                        +
                                                                                        # Set up team storage
                                                                                         store = S3DataStore(credentials=team_creds, bucket="team-datasets")
                                                                                         index = LocalIndex(data_store=store)
                                                                                        @@ -871,7 +871,7 @@ 

                                                                                        Stage 2: Team Storage

                                                                                        Stage 3: Federation

                                                                                        -
                                                                                        +
                                                                                        # Promote to atmosphere
                                                                                         client = AtmosphereClient()
                                                                                         client.login("handle.bsky.social", "app-password")
                                                                                        @@ -904,7 +904,7 @@ 

                                                                                        Extension Points

                                                                                        Custom DataSources

                                                                                        Implement the DataSource protocol to add new storage backends:

                                                                                        -
                                                                                        +
                                                                                        class MyCustomSource:
                                                                                             def list_shards(self) -> list[str]: ...
                                                                                             def open_shard(self, shard_id: str) -> IO[bytes]: ...
                                                                                        @@ -916,7 +916,7 @@ 

                                                                                        Custom DataSources

                                                                                        Custom Lenses

                                                                                        Register transformations between any PackableSample types:

                                                                                        -
                                                                                        +
                                                                                        @atdata.lens
                                                                                         def my_transform(src: SourceType) -> TargetType:
                                                                                             return TargetType(...)
                                                                                        @@ -929,7 +929,7 @@ 

                                                                                        Custom Lenses

                                                                                        Schema Extensions

                                                                                        The schema format supports custom metadata for domain-specific needs:

                                                                                        -
                                                                                        +
                                                                                        index.publish_schema(
                                                                                             MySample,
                                                                                             version="1.0.0",
                                                                                        diff --git a/docs/reference/atmosphere.html b/docs/reference/atmosphere.html
                                                                                        index 71c6ce9..11a6ce0 100644
                                                                                        --- a/docs/reference/atmosphere.html
                                                                                        +++ b/docs/reference/atmosphere.html
                                                                                        @@ -626,7 +626,7 @@ 

                                                                                        Overview

                                                                                        AtmosphereClient

                                                                                        The client handles authentication and record operations:

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import AtmosphereClient
                                                                                         
                                                                                         client = AtmosphereClient()
                                                                                        @@ -653,7 +653,7 @@ 

                                                                                        AtmosphereClient

                                                                                        Session Management

                                                                                        Save and restore sessions to avoid re-authentication:

                                                                                        -
                                                                                        +
                                                                                        # Export session for later
                                                                                         session_string = client.export_session()
                                                                                         
                                                                                        @@ -665,7 +665,7 @@ 

                                                                                        Session Management

                                                                                        Custom PDS

                                                                                        Connect to a custom PDS instead of bsky.social:

                                                                                        -
                                                                                        +
                                                                                        client = AtmosphereClient(base_url="https://pds.example.com")
                                                                                        @@ -673,7 +673,7 @@

                                                                                        Custom PDS

                                                                                        PDSBlobStore

                                                                                        Store dataset shards as ATProto blobs for fully decentralized storage:

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import AtmosphereClient, PDSBlobStore
                                                                                         
                                                                                         client = AtmosphereClient()
                                                                                        @@ -696,7 +696,7 @@ 

                                                                                        PDSBlobStore

                                                                                        Size Limits

                                                                                        PDS blobs typically have size limits (often 50MB-5GB depending on the PDS). Use maxcount and maxsize parameters to control shard sizes:

                                                                                        -
                                                                                        +
                                                                                        urls = store.write_shards(
                                                                                             dataset,
                                                                                             prefix="large-data/v1",
                                                                                        @@ -709,7 +709,7 @@ 

                                                                                        Size Limits

                                                                                        BlobSource

                                                                                        Read datasets stored as PDS blobs:

                                                                                        -
                                                                                        +
                                                                                        from atdata import BlobSource
                                                                                         
                                                                                         # From blob references
                                                                                        @@ -730,7 +730,7 @@ 

                                                                                        BlobSource

                                                                                        AtmosphereIndex

                                                                                        The unified interface for ATProto operations, implementing the AbstractIndex protocol:

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import AtmosphereClient, AtmosphereIndex, PDSBlobStore
                                                                                         
                                                                                         client = AtmosphereClient()
                                                                                        @@ -745,7 +745,7 @@ 

                                                                                        AtmosphereIndex

                                                                                        Publishing Schemas

                                                                                        -
                                                                                        +
                                                                                        import atdata
                                                                                         from numpy.typing import NDArray
                                                                                         
                                                                                        @@ -766,7 +766,7 @@ 

                                                                                        Publishing Schemas

                                                                                        Publishing Datasets

                                                                                        -
                                                                                        +
                                                                                        dataset = atdata.Dataset[ImageSample]("data-{000000..000009}.tar")
                                                                                         
                                                                                         entry = index.insert_dataset(
                                                                                        @@ -784,7 +784,7 @@ 

                                                                                        Publishing Datasets

                                                                                        Listing and Retrieving

                                                                                        -
                                                                                        +
                                                                                        # List your datasets
                                                                                         for entry in index.list_datasets():
                                                                                             print(f"{entry.name}: {entry.schema_ref}")
                                                                                        @@ -810,7 +810,7 @@ 

                                                                                        Lower-Level Publish

                                                                                        For more control, use the individual publisher classes:

                                                                                        SchemaPublisher

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import SchemaPublisher
                                                                                         
                                                                                         publisher = SchemaPublisher(client)
                                                                                        @@ -826,7 +826,7 @@ 

                                                                                        SchemaPublisher

                                                                                        DatasetPublisher

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import DatasetPublisher
                                                                                         
                                                                                         publisher = DatasetPublisher(client)
                                                                                        @@ -846,7 +846,7 @@ 

                                                                                        Blob Storage

                                                                                        There are two approaches to storing data as ATProto blobs:

                                                                                        Approach 1: PDSBlobStore (Recommended)

                                                                                        Use PDSBlobStore with AtmosphereIndex for automatic shard management:

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import PDSBlobStore, AtmosphereIndex
                                                                                         
                                                                                         store = PDSBlobStore(client)
                                                                                        @@ -865,7 +865,7 @@ 

                                                                                        Blob Storage

                                                                                        Approach 2: Manual Blob Publishing

                                                                                        For more control, use DatasetPublisher.publish_with_blobs() directly:

                                                                                        -
                                                                                        +
                                                                                        import io
                                                                                         import webdataset as wds
                                                                                         
                                                                                        @@ -885,7 +885,7 @@ 

                                                                                        Blob Storage

                                                                                        )

                                                                                        Loading Blob-Stored Datasets

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import DatasetLoader
                                                                                         from atdata import BlobSource
                                                                                         
                                                                                        @@ -909,7 +909,7 @@ 

                                                                                        Blob Storage

                                                                                        LensPublisher

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import LensPublisher
                                                                                         
                                                                                         publisher = LensPublisher(client)
                                                                                        @@ -952,7 +952,7 @@ 

                                                                                        Lower-Level LoadersFor direct access to records, use the loader classes:

                                                                                        SchemaLoader

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import SchemaLoader
                                                                                         
                                                                                         loader = SchemaLoader(client)
                                                                                        @@ -968,7 +968,7 @@ 

                                                                                        SchemaLoader

                                                                                        DatasetLoader

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import DatasetLoader
                                                                                         
                                                                                         loader = DatasetLoader(client)
                                                                                        @@ -996,7 +996,7 @@ 

                                                                                        DatasetLoader

                                                                                        LensLoader

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import LensLoader
                                                                                         
                                                                                         loader = LensLoader(client)
                                                                                        @@ -1021,7 +1021,7 @@ 

                                                                                        LensLoader

                                                                                        AT URIs

                                                                                        ATProto records are identified by AT URIs:

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import AtUri
                                                                                         
                                                                                         # Parse an AT URI
                                                                                        @@ -1088,7 +1088,7 @@ 

                                                                                        Supported Field Type

                                                                                        Complete Example

                                                                                        This example shows the full workflow using PDSBlobStore for decentralized storage:

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         from numpy.typing import NDArray
                                                                                         import atdata
                                                                                        @@ -1159,7 +1159,7 @@ 

                                                                                        Complete Example

                                                                                        break

                                                                                        For external URL storage (without PDSBlobStore):

                                                                                        -
                                                                                        +
                                                                                        # Use AtmosphereIndex without data_store
                                                                                         index = AtmosphereIndex(client)
                                                                                         
                                                                                        diff --git a/docs/reference/datasets.html b/docs/reference/datasets.html
                                                                                        index 30f9dd9..40e4ed2 100644
                                                                                        --- a/docs/reference/datasets.html
                                                                                        +++ b/docs/reference/datasets.html
                                                                                        @@ -603,7 +603,7 @@ 

                                                                                        Datasets

                                                                                        The Dataset class provides typed iteration over WebDataset tar files with automatic batching and lens transformations.

                                                                                        Creating a Dataset

                                                                                        -
                                                                                        +
                                                                                        import atdata
                                                                                         from numpy.typing import NDArray
                                                                                         
                                                                                        @@ -626,7 +626,7 @@ 

                                                                                        Data Sources

                                                                                        URL Source (default)

                                                                                        When you pass a string to Dataset, it automatically wraps it in a URLSource:

                                                                                        -
                                                                                        +
                                                                                        # These are equivalent:
                                                                                         dataset = atdata.Dataset[ImageSample]("data-{000000..000009}.tar")
                                                                                         dataset = atdata.Dataset[ImageSample](atdata.URLSource("data-{000000..000009}.tar"))
                                                                                        @@ -635,7 +635,7 @@

                                                                                        URL Source (default)

                                                                                        S3 Source

                                                                                        For private S3 buckets or S3-compatible storage (Cloudflare R2, MinIO), use S3Source:

                                                                                        -
                                                                                        +
                                                                                        # From explicit credentials
                                                                                         source = atdata.S3Source(
                                                                                             bucket="my-bucket",
                                                                                        @@ -673,7 +673,7 @@ 

                                                                                        Iteration Modes

                                                                                        Ordered Iteration

                                                                                        Iterate through samples in their original order:

                                                                                        -
                                                                                        +
                                                                                        # With batching (default batch_size=1)
                                                                                         for batch in dataset.ordered(batch_size=32):
                                                                                             images = batch.image  # numpy array (32, H, W, C)
                                                                                        @@ -687,7 +687,7 @@ 

                                                                                        Ordered Iteration

                                                                                        Shuffled Iteration

                                                                                        Iterate with randomized order at both shard and sample levels:

                                                                                        -
                                                                                        +
                                                                                        for batch in dataset.shuffled(batch_size=32):
                                                                                             # Samples are shuffled
                                                                                             process(batch)
                                                                                        @@ -718,7 +718,7 @@ 

                                                                                        Shuffled Iteration

                                                                                        SampleBatch

                                                                                        When iterating with a batch_size, each iteration yields a SampleBatch with automatic attribute aggregation.

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class Sample:
                                                                                             features: NDArray  # shape (256,)
                                                                                        @@ -738,7 +738,7 @@ 

                                                                                        SampleBatch

                                                                                        Type Transformations with Lenses

                                                                                        View a dataset through a different sample type using registered lenses:

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class SimplifiedSample:
                                                                                             label: str
                                                                                        @@ -760,7 +760,7 @@ 

                                                                                        Dataset Properties

                                                                                        Shard List

                                                                                        Get the list of individual tar files:

                                                                                        -
                                                                                        +
                                                                                        dataset = atdata.Dataset[Sample]("data-{000000..000009}.tar")
                                                                                         shards = dataset.shard_list
                                                                                         # ['data-000000.tar', 'data-000001.tar', ..., 'data-000009.tar']
                                                                                        @@ -769,7 +769,7 @@

                                                                                        Shard List

                                                                                        Metadata

                                                                                        Datasets can have associated metadata from a URL:

                                                                                        -
                                                                                        +
                                                                                        dataset = atdata.Dataset[Sample](
                                                                                             "data-{000000..000009}.tar",
                                                                                             metadata_url="https://example.com/metadata.msgpack"
                                                                                        @@ -783,7 +783,7 @@ 

                                                                                        Metadata

                                                                                        Writing Datasets

                                                                                        Use WebDataset’s TarWriter or ShardWriter to create datasets:

                                                                                        -
                                                                                        +
                                                                                        import webdataset as wds
                                                                                         import numpy as np
                                                                                         
                                                                                        @@ -806,7 +806,7 @@ 

                                                                                        Writing Datasets

                                                                                        Parquet Export

                                                                                        Export dataset contents to parquet format:

                                                                                        -
                                                                                        +
                                                                                        # Export entire dataset
                                                                                         dataset.to_parquet("output.parquet")
                                                                                         
                                                                                        @@ -857,7 +857,7 @@ 

                                                                                        Dataset Properties

                                                                                        Source

                                                                                        Access the underlying DataSource:

                                                                                        -
                                                                                        +
                                                                                        dataset = atdata.Dataset[Sample]("data.tar")
                                                                                         source = dataset.source  # URLSource instance
                                                                                         print(source.shard_list)  # ['data.tar']
                                                                                        @@ -866,7 +866,7 @@

                                                                                        Source

                                                                                        Sample Type

                                                                                        Get the type parameter used to create the dataset:

                                                                                        -
                                                                                        +
                                                                                        dataset = atdata.Dataset[ImageSample]("data.tar")
                                                                                         print(dataset.sample_type)  # <class 'ImageSample'>
                                                                                         print(dataset.batch_type)   # SampleBatch[ImageSample]
                                                                                        diff --git a/docs/reference/lenses.html b/docs/reference/lenses.html index 1633abc..aba6415 100644 --- a/docs/reference/lenses.html +++ b/docs/reference/lenses.html @@ -595,7 +595,7 @@

                                                                                        Overview

                                                                                        Creating a Lens

                                                                                        Use the @lens decorator to define a getter:

                                                                                        -
                                                                                        +
                                                                                        import atdata
                                                                                         from numpy.typing import NDArray
                                                                                         
                                                                                        @@ -625,7 +625,7 @@ 

                                                                                        Creating a Lens

                                                                                        Adding a Putter

                                                                                        To enable bidirectional updates, add a putter:

                                                                                        -
                                                                                        +
                                                                                        @simplify.putter
                                                                                         def simplify_put(view: SimpleSample, source: FullSample) -> FullSample:
                                                                                             return FullSample(
                                                                                        @@ -645,7 +645,7 @@ 

                                                                                        Adding a Putter

                                                                                        Using Lenses with Datasets

                                                                                        Lenses integrate with Dataset.as_type():

                                                                                        -
                                                                                        +
                                                                                        dataset = atdata.Dataset[FullSample]("data-{000000..000009}.tar")
                                                                                         
                                                                                         # View through a different type
                                                                                        @@ -660,7 +660,7 @@ 

                                                                                        Using Lenses wi

                                                                                        Direct Lens Usage

                                                                                        Lenses can also be called directly:

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         
                                                                                         full = FullSample(
                                                                                        @@ -689,21 +689,21 @@ 

                                                                                        Lens Laws

                                                                                        If you get a view and immediately put it back, the source is unchanged:

                                                                                        -
                                                                                        +
                                                                                        view = lens.get(source)
                                                                                         assert lens.put(view, source) == source

                                                                                        If you put a view, getting it back yields that view:

                                                                                        -
                                                                                        +
                                                                                        updated = lens.put(view, source)
                                                                                         assert lens.get(updated) == view

                                                                                        Putting twice is equivalent to putting once with the final value:

                                                                                        -
                                                                                        +
                                                                                        result1 = lens.put(v2, lens.put(v1, source))
                                                                                         result2 = lens.put(v2, source)
                                                                                         assert result1 == result2
                                                                                        @@ -715,7 +715,7 @@

                                                                                        Lens Laws

                                                                                        Trivial Putter

                                                                                        If no putter is defined, a trivial putter is used that ignores view updates:

                                                                                        -
                                                                                        +
                                                                                        @atdata.lens
                                                                                         def extract_label(src: FullSample) -> SimpleSample:
                                                                                             return SimpleSample(label=src.label, confidence=src.confidence)
                                                                                        @@ -729,7 +729,7 @@ 

                                                                                        Trivial Putter

                                                                                        LensNetwork Registry

                                                                                        The LensNetwork is a singleton that stores all registered lenses:

                                                                                        -
                                                                                        +
                                                                                        from atdata.lens import LensNetwork
                                                                                         
                                                                                         network = LensNetwork()
                                                                                        @@ -746,7 +746,7 @@ 

                                                                                        LensNetwork Registry<

                                                                                        Example: Feature Extraction

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class RawSample:
                                                                                             audio: NDArray
                                                                                        diff --git a/docs/reference/load-dataset.html b/docs/reference/load-dataset.html
                                                                                        index a3cdca0..f606df7 100644
                                                                                        --- a/docs/reference/load-dataset.html
                                                                                        +++ b/docs/reference/load-dataset.html
                                                                                        @@ -604,7 +604,7 @@ 

                                                                                        Overview

                                                                                        Basic Usage

                                                                                        -
                                                                                        +
                                                                                        import atdata
                                                                                         from atdata import load_dataset
                                                                                         from numpy.typing import NDArray
                                                                                        @@ -627,7 +627,7 @@ 

                                                                                        Basic Usage

                                                                                        Path Formats

                                                                                        WebDataset Brace Notation

                                                                                        -
                                                                                        +
                                                                                        # Range notation
                                                                                         ds = load_dataset("data-{000000..000099}.tar", MySample, split="train")
                                                                                         
                                                                                        @@ -637,7 +637,7 @@ 

                                                                                        WebDataset Brace

                                                                                        Glob Patterns

                                                                                        -
                                                                                        +
                                                                                        # Match all tar files
                                                                                         ds = load_dataset("path/to/*.tar", MySample)
                                                                                         
                                                                                        @@ -647,14 +647,14 @@ 

                                                                                        Glob Patterns

                                                                                        Local Directory

                                                                                        -
                                                                                        +
                                                                                        # Scans for .tar files
                                                                                         ds = load_dataset("./my-dataset/", MySample)

                                                                                        Remote URLs

                                                                                        -
                                                                                        +
                                                                                        # S3 (public buckets)
                                                                                         ds = load_dataset("s3://bucket/data-{000..099}.tar", MySample, split="train")
                                                                                         
                                                                                        @@ -680,7 +680,7 @@ 

                                                                                        Remote URLs

                                                                                        Index Lookup

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import LocalIndex
                                                                                         
                                                                                         index = LocalIndex()
                                                                                        @@ -747,7 +747,7 @@ 

                                                                                        Split Detection

                                                                                        DatasetDict

                                                                                        When loading without split=, returns a DatasetDict:

                                                                                        -
                                                                                        +
                                                                                        ds_dict = load_dataset("path/to/data/", MySample)
                                                                                         
                                                                                         # Access splits
                                                                                        @@ -767,7 +767,7 @@ 

                                                                                        DatasetDict

                                                                                        Explicit Data Files

                                                                                        Override automatic detection with data_files:

                                                                                        -
                                                                                        +
                                                                                        # Single pattern
                                                                                         ds = load_dataset(
                                                                                             "path/to/",
                                                                                        @@ -796,7 +796,7 @@ 

                                                                                        Explicit Data Files

                                                                                        Streaming Mode

                                                                                        The streaming parameter signals intent for streaming mode:

                                                                                        -
                                                                                        +
                                                                                        # Mark as streaming
                                                                                         ds_dict = load_dataset("path/to/data.tar", MySample, streaming=True)
                                                                                         
                                                                                        @@ -821,7 +821,7 @@ 

                                                                                        Streaming Mode

                                                                                        Auto Type Resolution

                                                                                        When using index lookup, the sample type can be resolved automatically:

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import LocalIndex
                                                                                         
                                                                                         index = LocalIndex()
                                                                                        @@ -835,7 +835,7 @@ 

                                                                                        Auto Type Resolution<

                                                                                        Error Handling

                                                                                        -
                                                                                        +
                                                                                        try:
                                                                                             ds = load_dataset("path/to/data.tar", MySample, split="train")
                                                                                         except FileNotFoundError:
                                                                                        @@ -851,7 +851,7 @@ 

                                                                                        Error Handling

                                                                                        Complete Example

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         from numpy.typing import NDArray
                                                                                         import atdata
                                                                                        diff --git a/docs/reference/local-storage.html b/docs/reference/local-storage.html
                                                                                        index 8883b97..f5c7f91 100644
                                                                                        --- a/docs/reference/local-storage.html
                                                                                        +++ b/docs/reference/local-storage.html
                                                                                        @@ -603,7 +603,7 @@ 

                                                                                        Overview

                                                                                        LocalIndex

                                                                                        The index tracks datasets in Redis:

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import LocalIndex
                                                                                         
                                                                                         # Default connection (localhost:6379)
                                                                                        @@ -619,7 +619,7 @@ 

                                                                                        LocalIndex

                                                                                        Adding Entries

                                                                                        -
                                                                                        +
                                                                                        import atdata
                                                                                         from numpy.typing import NDArray
                                                                                         
                                                                                        @@ -644,7 +644,7 @@ 

                                                                                        Adding Entries

                                                                                        Listing and Retrieving

                                                                                        -
                                                                                        +
                                                                                        # Iterate all entries
                                                                                         for entry in index.entries:
                                                                                             print(f"{entry.name}: {entry.cid}")
                                                                                        @@ -676,7 +676,7 @@ 

                                                                                        Repo (Deprecated)

                                                                                        The Repo class combines S3 storage with Redis indexing:

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import Repo
                                                                                         
                                                                                         # From credentials file
                                                                                        @@ -696,7 +696,7 @@ 

                                                                                        Repo (Deprecated)

                                                                                        )

                                                                                        Preferred approach - Use LocalIndex with S3DataStore:

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import LocalIndex, S3DataStore
                                                                                         
                                                                                         store = S3DataStore(
                                                                                        @@ -734,7 +734,7 @@ 

                                                                                        Credentials File F

                                                                                        Inserting Datasets

                                                                                        -
                                                                                        +
                                                                                        import webdataset as wds
                                                                                         import numpy as np
                                                                                         
                                                                                        @@ -764,7 +764,7 @@ 

                                                                                        Inserting Datasets

                                                                                        Insert Options

                                                                                        -
                                                                                        +
                                                                                        entry, ds = repo.insert(
                                                                                             dataset,
                                                                                             name="my-dataset",
                                                                                        @@ -778,7 +778,7 @@ 

                                                                                        Insert Options

                                                                                        LocalDatasetEntry

                                                                                        Index entries provide content-addressable identification:

                                                                                        -
                                                                                        +
                                                                                        entry = index.get_entry_by_name("my-dataset")
                                                                                         
                                                                                         # Core properties (IndexEntry protocol)
                                                                                        @@ -811,7 +811,7 @@ 

                                                                                        LocalDatasetEntry

                                                                                        Schema Storage

                                                                                        Schemas can be stored and retrieved from the index:

                                                                                        -
                                                                                        +
                                                                                        # Publish a schema
                                                                                         schema_ref = index.publish_schema(
                                                                                             ImageSample,
                                                                                        @@ -842,7 +842,7 @@ 

                                                                                        Schema Storage

                                                                                        S3DataStore

                                                                                        For direct S3 operations without Redis indexing:

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import S3DataStore
                                                                                         
                                                                                         store = S3DataStore(
                                                                                        @@ -864,7 +864,7 @@ 

                                                                                        S3DataStore

                                                                                        Complete Workflow Example

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         from numpy.typing import NDArray
                                                                                         import atdata
                                                                                        diff --git a/docs/reference/packable-samples.html b/docs/reference/packable-samples.html
                                                                                        index 320a637..bae51df 100644
                                                                                        --- a/docs/reference/packable-samples.html
                                                                                        +++ b/docs/reference/packable-samples.html
                                                                                        @@ -598,7 +598,7 @@ 

                                                                                        Packable Samples

                                                                                        The @packable Decorator

                                                                                        The recommended way to define a sample type is with the @packable decorator:

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         from numpy.typing import NDArray
                                                                                         import atdata
                                                                                        @@ -620,7 +620,7 @@ 

                                                                                        The @packable

                                                                                        Supported Field Types

                                                                                        Primitives

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class PrimitiveSample:
                                                                                             name: str
                                                                                        @@ -633,7 +633,7 @@ 

                                                                                        Primitives

                                                                                        NumPy Arrays

                                                                                        Fields annotated as NDArray are automatically converted:

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class ArraySample:
                                                                                             features: NDArray          # Required array
                                                                                        @@ -655,7 +655,7 @@ 

                                                                                        NumPy Arrays

                                                                                        Lists

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class ListSample:
                                                                                             tags: list[str]
                                                                                        @@ -667,7 +667,7 @@ 

                                                                                        Lists

                                                                                        Serialization

                                                                                        Packing to Bytes

                                                                                        -
                                                                                        +
                                                                                        sample = ImageSample(
                                                                                             image=np.random.rand(224, 224, 3).astype(np.float32),
                                                                                             label="cat",
                                                                                        @@ -681,7 +681,7 @@ 

                                                                                        Packing to Bytes

                                                                                        Unpacking from Bytes

                                                                                        -
                                                                                        +
                                                                                        # Deserialize from bytes
                                                                                         restored = ImageSample.from_bytes(packed_bytes)
                                                                                         
                                                                                        @@ -693,12 +693,12 @@ 

                                                                                        Unpacking from Bytes<

                                                                                        WebDataset Format

                                                                                        The as_wds property returns a dict ready for WebDataset:

                                                                                        -
                                                                                        +
                                                                                        wds_dict = sample.as_wds
                                                                                         # {'__key__': '1234...', 'msgpack': b'...'}

                                                                                        Write samples to a tar file:

                                                                                        -
                                                                                        +
                                                                                        import webdataset as wds
                                                                                         
                                                                                         with wds.writer.TarWriter("data-000000.tar") as sink:
                                                                                        @@ -711,7 +711,7 @@ 

                                                                                        WebDataset Format

                                                                                        Direct Inheritance (Alternative)

                                                                                        You can also inherit directly from PackableSample:

                                                                                        -
                                                                                        +
                                                                                        from dataclasses import dataclass
                                                                                         
                                                                                         @dataclass
                                                                                        @@ -749,7 +749,7 @@ 

                                                                                        Serialization Flow

                                                                                        The _ensure_good() Method

                                                                                        This method runs automatically after construction and handles NDArray conversion:

                                                                                        -
                                                                                        +
                                                                                        def _ensure_good(self):
                                                                                             for field in dataclasses.fields(self):
                                                                                                 if _is_possibly_ndarray_type(field.type):
                                                                                        @@ -765,7 +765,7 @@ 

                                                                                        Best Practices

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class GoodSample:
                                                                                             features: NDArray           # Clear type annotation
                                                                                        @@ -775,7 +775,7 @@ 

                                                                                        Best Practices

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class BadSample:
                                                                                             # DON'T: Nested dataclasses not supported
                                                                                        diff --git a/docs/reference/promotion.html b/docs/reference/promotion.html
                                                                                        index 8b3bc17..bec72cb 100644
                                                                                        --- a/docs/reference/promotion.html
                                                                                        +++ b/docs/reference/promotion.html
                                                                                        @@ -594,7 +594,7 @@ 

                                                                                        Overview

                                                                                        Basic Usage

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import LocalIndex
                                                                                         from atdata.atmosphere import AtmosphereClient
                                                                                         from atdata.promote import promote_to_atmosphere
                                                                                        @@ -614,7 +614,7 @@ 

                                                                                        Basic Usage

                                                                                        With Metadata

                                                                                        -
                                                                                        +
                                                                                        at_uri = promote_to_atmosphere(
                                                                                             entry,
                                                                                             local_index,
                                                                                        @@ -629,7 +629,7 @@ 

                                                                                        With Metadata

                                                                                        Schema Deduplication

                                                                                        The promotion workflow automatically checks for existing schemas:

                                                                                        -
                                                                                        +
                                                                                        # First promotion: publishes schema
                                                                                         uri1 = promote_to_atmosphere(entry1, local_index, client)
                                                                                         
                                                                                        @@ -649,7 +649,7 @@ 

                                                                                        Data Storage Options<

                                                                                        By default, promotion keeps the original data URLs:

                                                                                        -
                                                                                        +
                                                                                        # Data stays in original S3 location
                                                                                         at_uri = promote_to_atmosphere(entry, local_index, client)
                                                                                        @@ -662,7 +662,7 @@

                                                                                        Data Storage Options<

                                                                                        To copy data to a different storage location:

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import S3DataStore
                                                                                         
                                                                                         # Create new data store
                                                                                        @@ -690,7 +690,7 @@ 

                                                                                        Data Storage Options<

                                                                                        Complete Workflow Example

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         from numpy.typing import NDArray
                                                                                         import atdata
                                                                                        @@ -761,7 +761,7 @@ 

                                                                                        Complete Workflo

                                                                                        Error Handling

                                                                                        -
                                                                                        +
                                                                                        try:
                                                                                             at_uri = promote_to_atmosphere(entry, local_index, client)
                                                                                         except KeyError as e:
                                                                                        diff --git a/docs/reference/protocols.html b/docs/reference/protocols.html
                                                                                        index 8e4e0cc..71623f5 100644
                                                                                        --- a/docs/reference/protocols.html
                                                                                        +++ b/docs/reference/protocols.html
                                                                                        @@ -615,7 +615,7 @@ 

                                                                                        Overview

                                                                                        IndexEntry Protocol

                                                                                        Represents a dataset entry in any index:

                                                                                        -
                                                                                        +
                                                                                        from atdata._protocols import IndexEntry
                                                                                         
                                                                                         def process_entry(entry: IndexEntry) -> None:
                                                                                        @@ -669,7 +669,7 @@ 

                                                                                        Implementations

                                                                                        AbstractIndex Protocol

                                                                                        Defines operations for managing schemas and datasets:

                                                                                        -
                                                                                        +
                                                                                        from atdata._protocols import AbstractIndex
                                                                                         
                                                                                         def list_all_datasets(index: AbstractIndex) -> None:
                                                                                        @@ -679,7 +679,7 @@ 

                                                                                        AbstractIndex Proto

                                                                                        Dataset Operations

                                                                                        -
                                                                                        +
                                                                                        # Insert a dataset
                                                                                         entry = index.insert_dataset(
                                                                                             dataset,
                                                                                        @@ -697,7 +697,7 @@ 

                                                                                        Dataset Operations

                                                                                        Schema Operations

                                                                                        -
                                                                                        +
                                                                                        # Publish a schema
                                                                                         schema_ref = index.publish_schema(
                                                                                             MySample,
                                                                                        @@ -728,7 +728,7 @@ 

                                                                                        Implementations

                                                                                        AbstractDataStore Protocol

                                                                                        Abstracts over different storage backends:

                                                                                        -
                                                                                        +
                                                                                        from atdata._protocols import AbstractDataStore
                                                                                         
                                                                                         def write_dataset(store: AbstractDataStore, dataset) -> list[str]:
                                                                                        @@ -738,7 +738,7 @@ 

                                                                                        AbstractDataSto

                                                                                        Methods

                                                                                        -
                                                                                        +
                                                                                        # Write dataset shards
                                                                                         urls = store.write_shards(
                                                                                             dataset,
                                                                                        @@ -765,7 +765,7 @@ 

                                                                                        Implementations

                                                                                        DataSource Protocol

                                                                                        Abstracts over different data source backends for streaming dataset shards:

                                                                                        -
                                                                                        +
                                                                                        from atdata._protocols import DataSource
                                                                                         
                                                                                         def load_from_source(source: DataSource) -> None:
                                                                                        @@ -778,7 +778,7 @@ 

                                                                                        DataSource Protocol

                                                                                        Methods

                                                                                        -
                                                                                        +
                                                                                        # Get list of shard identifiers
                                                                                         shard_ids = source.shard_list  # ['data-000000.tar', 'data-000001.tar', ...]
                                                                                         
                                                                                        @@ -801,7 +801,7 @@ 

                                                                                        Implementations

                                                                                        Creating Custom Data Sources

                                                                                        Implement the DataSource protocol for custom backends:

                                                                                        -
                                                                                        +
                                                                                        from typing import Iterator, IO
                                                                                         from atdata._protocols import DataSource
                                                                                         
                                                                                        @@ -839,7 +839,7 @@ 

                                                                                        Creating Cust

                                                                                        Using Protocols for Polymorphism

                                                                                        Write code that works with any backend:

                                                                                        -
                                                                                        +
                                                                                        from atdata._protocols import AbstractIndex, IndexEntry
                                                                                         from atdata import Dataset
                                                                                         
                                                                                        @@ -910,7 +910,7 @@ 

                                                                                        Schema Reference

                                                                                        Type Checking

                                                                                        Protocols are runtime-checkable:

                                                                                        -
                                                                                        +
                                                                                        from atdata._protocols import IndexEntry, AbstractIndex
                                                                                         
                                                                                         # Check if object implements protocol
                                                                                        @@ -924,7 +924,7 @@ 

                                                                                        Type Checking

                                                                                        Complete Example

                                                                                        -
                                                                                        +
                                                                                        import atdata
                                                                                         from atdata.local import LocalIndex, S3DataStore
                                                                                         from atdata.atmosphere import AtmosphereClient, AtmosphereIndex
                                                                                        diff --git a/docs/reference/uri-spec.html b/docs/reference/uri-spec.html
                                                                                        index 1f44efa..abd6afa 100644
                                                                                        --- a/docs/reference/uri-spec.html
                                                                                        +++ b/docs/reference/uri-spec.html
                                                                                        @@ -685,7 +685,7 @@ 

                                                                                        Version Specifiers

                                                                                        Examples

                                                                                        Local Development

                                                                                        -
                                                                                        +
                                                                                        from atdata.local import Index
                                                                                         
                                                                                         index = Index()
                                                                                        @@ -704,7 +704,7 @@ 

                                                                                        Local Development

                                                                                        Atmosphere (ATProto Federation)

                                                                                        -
                                                                                        +
                                                                                        from atdata.atmosphere import Client
                                                                                         
                                                                                         client = Client()
                                                                                        diff --git a/docs/search.json b/docs/search.json
                                                                                        index 41f35fc..d899327 100644
                                                                                        --- a/docs/search.json
                                                                                        +++ b/docs/search.json
                                                                                        @@ -1072,14 +1072,14 @@
                                                                                             "href": "api/SchemaLoader.html",
                                                                                             "title": "SchemaLoader",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.SchemaLoader(client)\nLoads PackableSample schemas from ATProto.\nThis class fetches schema records from ATProto and can list available schemas from a repository.\n\n\n::\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> loader = SchemaLoader(client)\n>>> schema = loader.get(\"at://did:plc:.../ac.foundation.dataset.sampleSchema/...\")\n>>> print(schema[\"name\"])\n'MySample'\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nFetch a schema record by AT URI.\n\n\nlist_all\nList schema records from a repository.\n\n\n\n\n\natmosphere.SchemaLoader.get(uri)\nFetch a schema record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the schema record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe schema record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a schema record.\n\n\n\natproto.exceptions.AtProtocolError\nIf record not found.\n\n\n\n\n\n\n\natmosphere.SchemaLoader.list_all(repo=None, limit=100)\nList schema records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records."
                                                                                        +    "text": "atmosphere.SchemaLoader(client)\nLoads PackableSample schemas from ATProto.\nThis class fetches schema records from ATProto and can list available schemas from a repository.\n\n\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> loader = SchemaLoader(client)\n>>> schema = loader.get(\"at://did:plc:.../ac.foundation.dataset.sampleSchema/...\")\n>>> print(schema[\"name\"])\n'MySample'\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nFetch a schema record by AT URI.\n\n\nlist_all\nList schema records from a repository.\n\n\n\n\n\natmosphere.SchemaLoader.get(uri)\nFetch a schema record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the schema record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe schema record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a schema record.\n\n\n\natproto.exceptions.AtProtocolError\nIf record not found.\n\n\n\n\n\n\n\natmosphere.SchemaLoader.list_all(repo=None, limit=100)\nList schema records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/SchemaLoader.html#example",
                                                                                        -    "href": "api/SchemaLoader.html#example",
                                                                                        +    "objectID": "api/SchemaLoader.html#examples",
                                                                                        +    "href": "api/SchemaLoader.html#examples",
                                                                                             "title": "SchemaLoader",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> loader = SchemaLoader(client)\n>>> schema = loader.get(\"at://did:plc:.../ac.foundation.dataset.sampleSchema/...\")\n>>> print(schema[\"name\"])\n'MySample'"
                                                                                        +    "text": ">>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> loader = SchemaLoader(client)\n>>> schema = loader.get(\"at://did:plc:.../ac.foundation.dataset.sampleSchema/...\")\n>>> print(schema[\"name\"])\n'MySample'"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/SchemaLoader.html#methods",
                                                                                        @@ -1093,7 +1093,7 @@
                                                                                             "href": "api/BlobSource.html",
                                                                                             "title": "BlobSource",
                                                                                             "section": "",
                                                                                        -    "text": "BlobSource(blob_refs, pds_endpoint=None, _endpoint_cache=dict())\nData source for ATProto PDS blob storage.\nStreams dataset shards stored as blobs on an ATProto Personal Data Server. Each shard is identified by a blob reference containing the DID and CID.\nThis source resolves blob references to HTTP URLs and streams the content directly, supporting efficient iteration over shards without downloading everything upfront.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nblob_refs\nlist[dict[str, str]]\nList of blob reference dicts with ‘did’ and ‘cid’ keys.\n\n\npds_endpoint\nstr | None\nOptional PDS endpoint URL. If not provided, resolved from DID.\n\n\n\n\n\n\n::\n>>> source = BlobSource(\n...     blob_refs=[\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...     ],\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_refs\nCreate BlobSource from blob reference dicts.\n\n\nlist_shards\nReturn list of AT URI-style shard identifiers.\n\n\nopen_shard\nOpen a single shard by its AT URI.\n\n\n\n\n\nBlobSource.from_refs(refs, *, pds_endpoint=None)\nCreate BlobSource from blob reference dicts.\nAccepts blob references in the format returned by upload_blob: {\"$type\": \"blob\", \"ref\": {\"$link\": \"cid\"}, ...}\nAlso accepts simplified format: {\"did\": \"...\", \"cid\": \"...\"}\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrefs\nlist[dict]\nList of blob reference dicts.\nrequired\n\n\npds_endpoint\nstr | None\nOptional PDS endpoint to use for all blobs.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'BlobSource'\nConfigured BlobSource.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf refs is empty or format is invalid.\n\n\n\n\n\n\n\nBlobSource.list_shards()\nReturn list of AT URI-style shard identifiers.\n\n\n\nBlobSource.open_shard(shard_id)\nOpen a single shard by its AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nAT URI of the shard (at://did/blob/cid).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nStreaming response body for reading the blob.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards().\n\n\n\nValueError\nIf shard_id format is invalid."
                                                                                        +    "text": "BlobSource(blob_refs, pds_endpoint=None, _endpoint_cache=dict())\nData source for ATProto PDS blob storage.\nStreams dataset shards stored as blobs on an ATProto Personal Data Server. Each shard is identified by a blob reference containing the DID and CID.\nThis source resolves blob references to HTTP URLs and streams the content directly, supporting efficient iteration over shards without downloading everything upfront.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nblob_refs\nlist[dict[str, str]]\nList of blob reference dicts with ‘did’ and ‘cid’ keys.\n\n\npds_endpoint\nstr | None\nOptional PDS endpoint URL. If not provided, resolved from DID.\n\n\n\n\n\n\n>>> source = BlobSource(\n...     blob_refs=[\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...     ],\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_refs\nCreate BlobSource from blob reference dicts.\n\n\nlist_shards\nReturn list of AT URI-style shard identifiers.\n\n\nopen_shard\nOpen a single shard by its AT URI.\n\n\n\n\n\nBlobSource.from_refs(refs, *, pds_endpoint=None)\nCreate BlobSource from blob reference dicts.\nAccepts blob references in the format returned by upload_blob: {\"$type\": \"blob\", \"ref\": {\"$link\": \"cid\"}, ...}\nAlso accepts simplified format: {\"did\": \"...\", \"cid\": \"...\"}\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrefs\nlist[dict]\nList of blob reference dicts.\nrequired\n\n\npds_endpoint\nstr | None\nOptional PDS endpoint to use for all blobs.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'BlobSource'\nConfigured BlobSource.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf refs is empty or format is invalid.\n\n\n\n\n\n\n\nBlobSource.list_shards()\nReturn list of AT URI-style shard identifiers.\n\n\n\nBlobSource.open_shard(shard_id)\nOpen a single shard by its AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nAT URI of the shard (at://did/blob/cid).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nStreaming response body for reading the blob.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards().\n\n\n\nValueError\nIf shard_id format is invalid."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/BlobSource.html#attributes",
                                                                                        @@ -1103,11 +1103,11 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\nblob_refs\nlist[dict[str, str]]\nList of blob reference dicts with ‘did’ and ‘cid’ keys.\n\n\npds_endpoint\nstr | None\nOptional PDS endpoint URL. If not provided, resolved from DID."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/BlobSource.html#example",
                                                                                        -    "href": "api/BlobSource.html#example",
                                                                                        +    "objectID": "api/BlobSource.html#examples",
                                                                                        +    "href": "api/BlobSource.html#examples",
                                                                                             "title": "BlobSource",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> source = BlobSource(\n...     blob_refs=[\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...     ],\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)"
                                                                                        +    "text": ">>> source = BlobSource(\n...     blob_refs=[\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...         {\"did\": \"did:plc:abc123\", \"cid\": \"bafyrei...\"},\n...     ],\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/BlobSource.html#methods",
                                                                                        @@ -1121,14 +1121,14 @@
                                                                                             "href": "api/AtmosphereClient.html",
                                                                                             "title": "AtmosphereClient",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.AtmosphereClient(base_url=None, *, _client=None)\nATProto client wrapper for atdata operations.\nThis class wraps the atproto SDK client and provides higher-level methods for working with atdata records (schemas, datasets, lenses).\n\n\n::\n>>> client = AtmosphereClient()\n>>> client.login(\"alice.bsky.social\", \"app-password\")\n>>> print(client.did)\n'did:plc:...'\n\n\n\nThe password should be an app-specific password, not your main account password. Create app passwords in your Bluesky account settings.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndid\nGet the DID of the authenticated user.\n\n\nhandle\nGet the handle of the authenticated user.\n\n\nis_authenticated\nCheck if the client has a valid session.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ncreate_record\nCreate a record in the user’s repository.\n\n\ndelete_record\nDelete a record.\n\n\nexport_session\nExport the current session for later reuse.\n\n\nget_blob\nDownload a blob from a PDS.\n\n\nget_blob_url\nGet the direct URL for fetching a blob.\n\n\nget_record\nFetch a record by AT URI.\n\n\nlist_datasets\nList dataset records.\n\n\nlist_lenses\nList lens records.\n\n\nlist_records\nList records in a collection.\n\n\nlist_schemas\nList schema records.\n\n\nlogin\nAuthenticate with the ATProto PDS.\n\n\nlogin_with_session\nAuthenticate using an exported session string.\n\n\nput_record\nCreate or update a record at a specific key.\n\n\nupload_blob\nUpload binary data as a blob to the PDS.\n\n\n\n\n\natmosphere.AtmosphereClient.create_record(\n    collection,\n    record,\n    *,\n    rkey=None,\n    validate=False,\n)\nCreate a record in the user’s repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncollection\nstr\nThe NSID of the record collection (e.g., ‘ac.foundation.dataset.sampleSchema’).\nrequired\n\n\nrecord\ndict\nThe record data. Must include a ‘$type’ field.\nrequired\n\n\nrkey\nOptional[str]\nOptional explicit record key. If not provided, a TID is generated.\nNone\n\n\nvalidate\nbool\nWhether to validate against the Lexicon schema. Set to False for custom lexicons that the PDS doesn’t know about.\nFalse\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf record creation fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.delete_record(uri, *, swap_commit=None)\nDelete a record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the record to delete.\nrequired\n\n\nswap_commit\nOptional[str]\nOptional CID for compare-and-swap delete.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf deletion fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.export_session()\nExport the current session for later reuse.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSession string that can be passed to login_with_session().\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.get_blob(did, cid)\nDownload a blob from a PDS.\nThis resolves the PDS endpoint from the DID document and fetches the blob directly from the PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndid\nstr\nThe DID of the repository containing the blob.\nrequired\n\n\ncid\nstr\nThe CID of the blob.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nbytes\nThe blob data as bytes.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf PDS endpoint cannot be resolved.\n\n\n\nrequests.HTTPError\nIf blob fetch fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.get_blob_url(did, cid)\nGet the direct URL for fetching a blob.\nThis is useful for passing to WebDataset or other HTTP clients.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndid\nstr\nThe DID of the repository containing the blob.\nrequired\n\n\ncid\nstr\nThe CID of the blob.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nThe full URL for fetching the blob.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf PDS endpoint cannot be resolved.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.get_record(uri)\nFetch a record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe record data as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\natproto.exceptions.AtProtocolError\nIf record not found.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_datasets(repo=None, limit=100)\nList dataset records.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID to query. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of dataset records.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_lenses(repo=None, limit=100)\nList lens records.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID to query. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of lens records.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_records(\n    collection,\n    *,\n    repo=None,\n    limit=100,\n    cursor=None,\n)\nList records in a collection.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncollection\nstr\nThe NSID of the record collection.\nrequired\n\n\nrepo\nOptional[str]\nThe DID of the repository to query. Defaults to the authenticated user’s repository.\nNone\n\n\nlimit\nint\nMaximum number of records to return (default 100).\n100\n\n\ncursor\nOptional[str]\nPagination cursor from a previous call.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nA tuple of (records, next_cursor). The cursor is None if there\n\n\n\nOptional[str]\nare no more records.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf repo is None and not authenticated.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_schemas(repo=None, limit=100)\nList schema records.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID to query. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.login(handle, password)\nAuthenticate with the ATProto PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nhandle\nstr\nYour Bluesky handle (e.g., ‘alice.bsky.social’).\nrequired\n\n\npassword\nstr\nApp-specific password (not your main password).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\natproto.exceptions.AtProtocolError\nIf authentication fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.login_with_session(session_string)\nAuthenticate using an exported session string.\nThis allows reusing a session without re-authenticating, which helps avoid rate limits on session creation.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsession_string\nstr\nSession string from export_session().\nrequired\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.put_record(\n    collection,\n    rkey,\n    record,\n    *,\n    validate=False,\n    swap_commit=None,\n)\nCreate or update a record at a specific key.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncollection\nstr\nThe NSID of the record collection.\nrequired\n\n\nrkey\nstr\nThe record key.\nrequired\n\n\nrecord\ndict\nThe record data. Must include a ‘$type’ field.\nrequired\n\n\nvalidate\nbool\nWhether to validate against the Lexicon schema.\nFalse\n\n\nswap_commit\nOptional[str]\nOptional CID for compare-and-swap update.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf operation fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.upload_blob(\n    data,\n    mime_type='application/octet-stream',\n)\nUpload binary data as a blob to the PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndata\nbytes\nBinary data to upload.\nrequired\n\n\nmime_type\nstr\nMIME type of the data (for reference, not enforced by PDS).\n'application/octet-stream'\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nA blob reference dict with keys: ‘$type’, ‘ref’, ‘mimeType’, ‘size’.\n\n\n\ndict\nThis can be embedded directly in record fields.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf upload fails."
                                                                                        +    "text": "atmosphere.AtmosphereClient(base_url=None, *, _client=None)\nATProto client wrapper for atdata operations.\nThis class wraps the atproto SDK client and provides higher-level methods for working with atdata records (schemas, datasets, lenses).\n\n\n>>> client = AtmosphereClient()\n>>> client.login(\"alice.bsky.social\", \"app-password\")\n>>> print(client.did)\n'did:plc:...'\n\n\n\nThe password should be an app-specific password, not your main account password. Create app passwords in your Bluesky account settings.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndid\nGet the DID of the authenticated user.\n\n\nhandle\nGet the handle of the authenticated user.\n\n\nis_authenticated\nCheck if the client has a valid session.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ncreate_record\nCreate a record in the user’s repository.\n\n\ndelete_record\nDelete a record.\n\n\nexport_session\nExport the current session for later reuse.\n\n\nget_blob\nDownload a blob from a PDS.\n\n\nget_blob_url\nGet the direct URL for fetching a blob.\n\n\nget_record\nFetch a record by AT URI.\n\n\nlist_datasets\nList dataset records.\n\n\nlist_lenses\nList lens records.\n\n\nlist_records\nList records in a collection.\n\n\nlist_schemas\nList schema records.\n\n\nlogin\nAuthenticate with the ATProto PDS.\n\n\nlogin_with_session\nAuthenticate using an exported session string.\n\n\nput_record\nCreate or update a record at a specific key.\n\n\nupload_blob\nUpload binary data as a blob to the PDS.\n\n\n\n\n\natmosphere.AtmosphereClient.create_record(\n    collection,\n    record,\n    *,\n    rkey=None,\n    validate=False,\n)\nCreate a record in the user’s repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncollection\nstr\nThe NSID of the record collection (e.g., ‘ac.foundation.dataset.sampleSchema’).\nrequired\n\n\nrecord\ndict\nThe record data. Must include a ‘$type’ field.\nrequired\n\n\nrkey\nOptional[str]\nOptional explicit record key. If not provided, a TID is generated.\nNone\n\n\nvalidate\nbool\nWhether to validate against the Lexicon schema. Set to False for custom lexicons that the PDS doesn’t know about.\nFalse\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf record creation fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.delete_record(uri, *, swap_commit=None)\nDelete a record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the record to delete.\nrequired\n\n\nswap_commit\nOptional[str]\nOptional CID for compare-and-swap delete.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf deletion fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.export_session()\nExport the current session for later reuse.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSession string that can be passed to login_with_session().\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.get_blob(did, cid)\nDownload a blob from a PDS.\nThis resolves the PDS endpoint from the DID document and fetches the blob directly from the PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndid\nstr\nThe DID of the repository containing the blob.\nrequired\n\n\ncid\nstr\nThe CID of the blob.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nbytes\nThe blob data as bytes.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf PDS endpoint cannot be resolved.\n\n\n\nrequests.HTTPError\nIf blob fetch fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.get_blob_url(did, cid)\nGet the direct URL for fetching a blob.\nThis is useful for passing to WebDataset or other HTTP clients.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndid\nstr\nThe DID of the repository containing the blob.\nrequired\n\n\ncid\nstr\nThe CID of the blob.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nThe full URL for fetching the blob.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf PDS endpoint cannot be resolved.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.get_record(uri)\nFetch a record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe record data as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\natproto.exceptions.AtProtocolError\nIf record not found.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_datasets(repo=None, limit=100)\nList dataset records.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID to query. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of dataset records.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_lenses(repo=None, limit=100)\nList lens records.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID to query. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of lens records.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_records(\n    collection,\n    *,\n    repo=None,\n    limit=100,\n    cursor=None,\n)\nList records in a collection.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncollection\nstr\nThe NSID of the record collection.\nrequired\n\n\nrepo\nOptional[str]\nThe DID of the repository to query. Defaults to the authenticated user’s repository.\nNone\n\n\nlimit\nint\nMaximum number of records to return (default 100).\n100\n\n\ncursor\nOptional[str]\nPagination cursor from a previous call.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nA tuple of (records, next_cursor). The cursor is None if there\n\n\n\nOptional[str]\nare no more records.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf repo is None and not authenticated.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.list_schemas(repo=None, limit=100)\nList schema records.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID to query. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.login(handle, password)\nAuthenticate with the ATProto PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nhandle\nstr\nYour Bluesky handle (e.g., ‘alice.bsky.social’).\nrequired\n\n\npassword\nstr\nApp-specific password (not your main password).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\natproto.exceptions.AtProtocolError\nIf authentication fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.login_with_session(session_string)\nAuthenticate using an exported session string.\nThis allows reusing a session without re-authenticating, which helps avoid rate limits on session creation.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsession_string\nstr\nSession string from export_session().\nrequired\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.put_record(\n    collection,\n    rkey,\n    record,\n    *,\n    validate=False,\n    swap_commit=None,\n)\nCreate or update a record at a specific key.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncollection\nstr\nThe NSID of the record collection.\nrequired\n\n\nrkey\nstr\nThe record key.\nrequired\n\n\nrecord\ndict\nThe record data. Must include a ‘$type’ field.\nrequired\n\n\nvalidate\nbool\nWhether to validate against the Lexicon schema.\nFalse\n\n\nswap_commit\nOptional[str]\nOptional CID for compare-and-swap update.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf operation fails.\n\n\n\n\n\n\n\natmosphere.AtmosphereClient.upload_blob(\n    data,\n    mime_type='application/octet-stream',\n)\nUpload binary data as a blob to the PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndata\nbytes\nBinary data to upload.\nrequired\n\n\nmime_type\nstr\nMIME type of the data (for reference, not enforced by PDS).\n'application/octet-stream'\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nA blob reference dict with keys: ‘$type’, ‘ref’, ‘mimeType’, ‘size’.\n\n\n\ndict\nThis can be embedded directly in record fields.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\natproto.exceptions.AtProtocolError\nIf upload fails."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/AtmosphereClient.html#example",
                                                                                        -    "href": "api/AtmosphereClient.html#example",
                                                                                        +    "objectID": "api/AtmosphereClient.html#examples",
                                                                                        +    "href": "api/AtmosphereClient.html#examples",
                                                                                             "title": "AtmosphereClient",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> client = AtmosphereClient()\n>>> client.login(\"alice.bsky.social\", \"app-password\")\n>>> print(client.did)\n'did:plc:...'"
                                                                                        +    "text": ">>> client = AtmosphereClient()\n>>> client.login(\"alice.bsky.social\", \"app-password\")\n>>> print(client.did)\n'did:plc:...'"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AtmosphereClient.html#note",
                                                                                        @@ -1156,7 +1156,7 @@
                                                                                             "href": "api/load_dataset.html",
                                                                                             "title": "load_dataset",
                                                                                             "section": "",
                                                                                        -    "text": "load_dataset(\n    path,\n    sample_type=None,\n    *,\n    split=None,\n    data_files=None,\n    streaming=False,\n    index=None,\n)\nLoad a dataset from local files, remote URLs, or an index.\nThis function provides a HuggingFace Datasets-style interface for loading atdata typed datasets. It handles path resolution, split detection, and returns either a single Dataset or a DatasetDict depending on the split parameter.\nWhen no sample_type is provided, returns a Dataset[DictSample] that provides dynamic dict-like access to fields. Use .as_type(MyType) to convert to a typed schema.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\npath\nstr\nPath to dataset. Can be: - Index lookup: “@handle/dataset-name” or “@local/dataset-name” - WebDataset brace notation: “path/to/{train,test}-{000..099}.tar” - Local directory: “./data/” (scans for .tar files) - Glob pattern: “path/to/.tar” - Remote URL: ”s3://bucket/path/data-.tar” - Single file: “path/to/data.tar”\nrequired\n\n\nsample_type\nType[ST] | None\nThe PackableSample subclass defining the schema. If None, returns Dataset[DictSample] with dynamic field access. Can also be resolved from an index when using @handle/dataset syntax.\nNone\n\n\nsplit\nstr | None\nWhich split to load. If None, returns a DatasetDict with all detected splits. If specified (e.g., “train”, “test”), returns a single Dataset for that split.\nNone\n\n\ndata_files\nstr | list[str] | dict[str, str | list[str]] | None\nOptional explicit mapping of data files. Can be: - str: Single file pattern - list[str]: List of file patterns (assigned to “train”) - dict[str, str | list[str]]: Explicit split -> files mapping\nNone\n\n\nstreaming\nbool\nIf True, explicitly marks the dataset for streaming mode. Note: atdata Datasets are already lazy/streaming via WebDataset pipelines, so this parameter primarily signals intent.\nFalse\n\n\nindex\nOptional['AbstractIndex']\nOptional AbstractIndex for dataset lookup. Required when using @handle/dataset syntax. When provided with an indexed path, the schema can be auto-resolved from the index.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[ST] | DatasetDict[ST]\nIf split is None: DatasetDict with all detected splits.\n\n\n\nDataset[ST] | DatasetDict[ST]\nIf split is specified: Dataset for that split.\n\n\n\nDataset[ST] | DatasetDict[ST]\nType is ST if sample_type provided, otherwise DictSample.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the specified split is not found.\n\n\n\nFileNotFoundError\nIf no data files are found at the path.\n\n\n\nKeyError\nIf dataset not found in index.\n\n\n\n\n\n\n::\n>>> # Load without type - get DictSample for exploration\n>>> ds = load_dataset(\"./data/train.tar\", split=\"train\")\n>>> for sample in ds.ordered():\n...     print(sample.keys())  # Explore fields\n...     print(sample[\"text\"]) # Dict-style access\n...     print(sample.label)   # Attribute access\n>>>\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(TextData)\n>>>\n>>> # Or load with explicit type directly\n>>> train_ds = load_dataset(\"./data/train-*.tar\", TextData, split=\"train\")\n>>>\n>>> # Load from index with auto-type resolution\n>>> index = LocalIndex()\n>>> ds = load_dataset(\"@local/my-dataset\", index=index, split=\"train\")"
                                                                                        +    "text": "load_dataset(\n    path,\n    sample_type=None,\n    *,\n    split=None,\n    data_files=None,\n    streaming=False,\n    index=None,\n)\nLoad a dataset from local files, remote URLs, or an index.\nThis function provides a HuggingFace Datasets-style interface for loading atdata typed datasets. It handles path resolution, split detection, and returns either a single Dataset or a DatasetDict depending on the split parameter.\nWhen no sample_type is provided, returns a Dataset[DictSample] that provides dynamic dict-like access to fields. Use .as_type(MyType) to convert to a typed schema.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\npath\nstr\nPath to dataset. Can be: - Index lookup: “@handle/dataset-name” or “@local/dataset-name” - WebDataset brace notation: “path/to/{train,test}-{000..099}.tar” - Local directory: “./data/” (scans for .tar files) - Glob pattern: “path/to/.tar” - Remote URL: ”s3://bucket/path/data-.tar” - Single file: “path/to/data.tar”\nrequired\n\n\nsample_type\nType[ST] | None\nThe PackableSample subclass defining the schema. If None, returns Dataset[DictSample] with dynamic field access. Can also be resolved from an index when using @handle/dataset syntax.\nNone\n\n\nsplit\nstr | None\nWhich split to load. If None, returns a DatasetDict with all detected splits. If specified (e.g., “train”, “test”), returns a single Dataset for that split.\nNone\n\n\ndata_files\nstr | list[str] | dict[str, str | list[str]] | None\nOptional explicit mapping of data files. Can be: - str: Single file pattern - list[str]: List of file patterns (assigned to “train”) - dict[str, str | list[str]]: Explicit split -> files mapping\nNone\n\n\nstreaming\nbool\nIf True, explicitly marks the dataset for streaming mode. Note: atdata Datasets are already lazy/streaming via WebDataset pipelines, so this parameter primarily signals intent.\nFalse\n\n\nindex\nOptional['AbstractIndex']\nOptional AbstractIndex for dataset lookup. Required when using @handle/dataset syntax. When provided with an indexed path, the schema can be auto-resolved from the index.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[ST] | DatasetDict[ST]\nIf split is None: DatasetDict with all detected splits.\n\n\n\nDataset[ST] | DatasetDict[ST]\nIf split is specified: Dataset for that split.\n\n\n\nDataset[ST] | DatasetDict[ST]\nType is ST if sample_type provided, otherwise DictSample.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the specified split is not found.\n\n\n\nFileNotFoundError\nIf no data files are found at the path.\n\n\n\nKeyError\nIf dataset not found in index.\n\n\n\n\n\n\n>>> # Load without type - get DictSample for exploration\n>>> ds = load_dataset(\"./data/train.tar\", split=\"train\")\n>>> for sample in ds.ordered():\n...     print(sample.keys())  # Explore fields\n...     print(sample[\"text\"]) # Dict-style access\n...     print(sample.label)   # Attribute access\n>>>\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(TextData)\n>>>\n>>> # Or load with explicit type directly\n>>> train_ds = load_dataset(\"./data/train-*.tar\", TextData, split=\"train\")\n>>>\n>>> # Load from index with auto-type resolution\n>>> index = LocalIndex()\n>>> ds = load_dataset(\"@local/my-dataset\", index=index, split=\"train\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/load_dataset.html#parameters",
                                                                                        @@ -1180,18 +1180,18 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\n\nValueError\nIf the specified split is not found.\n\n\n\nFileNotFoundError\nIf no data files are found at the path.\n\n\n\nKeyError\nIf dataset not found in index."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/load_dataset.html#example",
                                                                                        -    "href": "api/load_dataset.html#example",
                                                                                        +    "objectID": "api/load_dataset.html#examples",
                                                                                        +    "href": "api/load_dataset.html#examples",
                                                                                             "title": "load_dataset",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> # Load without type - get DictSample for exploration\n>>> ds = load_dataset(\"./data/train.tar\", split=\"train\")\n>>> for sample in ds.ordered():\n...     print(sample.keys())  # Explore fields\n...     print(sample[\"text\"]) # Dict-style access\n...     print(sample.label)   # Attribute access\n>>>\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(TextData)\n>>>\n>>> # Or load with explicit type directly\n>>> train_ds = load_dataset(\"./data/train-*.tar\", TextData, split=\"train\")\n>>>\n>>> # Load from index with auto-type resolution\n>>> index = LocalIndex()\n>>> ds = load_dataset(\"@local/my-dataset\", index=index, split=\"train\")"
                                                                                        +    "text": ">>> # Load without type - get DictSample for exploration\n>>> ds = load_dataset(\"./data/train.tar\", split=\"train\")\n>>> for sample in ds.ordered():\n...     print(sample.keys())  # Explore fields\n...     print(sample[\"text\"]) # Dict-style access\n...     print(sample.label)   # Attribute access\n>>>\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(TextData)\n>>>\n>>> # Or load with explicit type directly\n>>> train_ds = load_dataset(\"./data/train-*.tar\", TextData, split=\"train\")\n>>>\n>>> # Load from index with auto-type resolution\n>>> index = LocalIndex()\n>>> ds = load_dataset(\"@local/my-dataset\", index=index, split=\"train\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/promote_to_atmosphere.html",
                                                                                             "href": "api/promote_to_atmosphere.html",
                                                                                             "title": "promote_to_atmosphere",
                                                                                             "section": "",
                                                                                        -    "text": "promote.promote_to_atmosphere(\n    local_entry,\n    local_index,\n    atmosphere_client,\n    *,\n    data_store=None,\n    name=None,\n    description=None,\n    tags=None,\n    license=None,\n)\nPromote a local dataset to the atmosphere network.\nThis function takes a locally-indexed dataset and publishes it to ATProto, making it discoverable on the federated atmosphere network.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlocal_entry\nLocalDatasetEntry\nThe LocalDatasetEntry to promote.\nrequired\n\n\nlocal_index\nLocalIndex\nLocal index containing the schema for this entry.\nrequired\n\n\natmosphere_client\nAtmosphereClient\nAuthenticated AtmosphereClient.\nrequired\n\n\ndata_store\nAbstractDataStore | None\nOptional data store for copying data to new location. If None, the existing data_urls are used as-is.\nNone\n\n\nname\nstr | None\nOverride name for the atmosphere record. Defaults to local name.\nNone\n\n\ndescription\nstr | None\nOptional description for the dataset.\nNone\n\n\ntags\nlist[str] | None\nOptional tags for discovery.\nNone\n\n\nlicense\nstr | None\nOptional license identifier.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nAT URI of the created atmosphere dataset record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found in local index.\n\n\n\nValueError\nIf local entry has no data URLs.\n\n\n\n\n\n\n::\n>>> entry = local_index.get_dataset(\"mnist-train\")\n>>> uri = promote_to_atmosphere(entry, local_index, client)\n>>> print(uri)\nat://did:plc:abc123/ac.foundation.dataset.datasetIndex/..."
                                                                                        +    "text": "promote.promote_to_atmosphere(\n    local_entry,\n    local_index,\n    atmosphere_client,\n    *,\n    data_store=None,\n    name=None,\n    description=None,\n    tags=None,\n    license=None,\n)\nPromote a local dataset to the atmosphere network.\nThis function takes a locally-indexed dataset and publishes it to ATProto, making it discoverable on the federated atmosphere network.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlocal_entry\nLocalDatasetEntry\nThe LocalDatasetEntry to promote.\nrequired\n\n\nlocal_index\nLocalIndex\nLocal index containing the schema for this entry.\nrequired\n\n\natmosphere_client\nAtmosphereClient\nAuthenticated AtmosphereClient.\nrequired\n\n\ndata_store\nAbstractDataStore | None\nOptional data store for copying data to new location. If None, the existing data_urls are used as-is.\nNone\n\n\nname\nstr | None\nOverride name for the atmosphere record. Defaults to local name.\nNone\n\n\ndescription\nstr | None\nOptional description for the dataset.\nNone\n\n\ntags\nlist[str] | None\nOptional tags for discovery.\nNone\n\n\nlicense\nstr | None\nOptional license identifier.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nAT URI of the created atmosphere dataset record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found in local index.\n\n\n\nValueError\nIf local entry has no data URLs.\n\n\n\n\n\n\n>>> entry = local_index.get_dataset(\"mnist-train\")\n>>> uri = promote_to_atmosphere(entry, local_index, client)\n>>> print(uri)\nat://did:plc:abc123/ac.foundation.dataset.datasetIndex/..."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/promote_to_atmosphere.html#parameters",
                                                                                        @@ -1215,25 +1215,25 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found in local index.\n\n\n\nValueError\nIf local entry has no data URLs."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/promote_to_atmosphere.html#example",
                                                                                        -    "href": "api/promote_to_atmosphere.html#example",
                                                                                        +    "objectID": "api/promote_to_atmosphere.html#examples",
                                                                                        +    "href": "api/promote_to_atmosphere.html#examples",
                                                                                             "title": "promote_to_atmosphere",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> entry = local_index.get_dataset(\"mnist-train\")\n>>> uri = promote_to_atmosphere(entry, local_index, client)\n>>> print(uri)\nat://did:plc:abc123/ac.foundation.dataset.datasetIndex/..."
                                                                                        +    "text": ">>> entry = local_index.get_dataset(\"mnist-train\")\n>>> uri = promote_to_atmosphere(entry, local_index, client)\n>>> print(uri)\nat://did:plc:abc123/ac.foundation.dataset.datasetIndex/..."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/SchemaPublisher.html",
                                                                                             "href": "api/SchemaPublisher.html",
                                                                                             "title": "SchemaPublisher",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.SchemaPublisher(client)\nPublishes PackableSample schemas to ATProto.\nThis class introspects a PackableSample class to extract its field definitions and publishes them as an ATProto schema record.\n\n\n::\n>>> @atdata.packable\n... class MySample:\n...     image: NDArray\n...     label: str\n...\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = SchemaPublisher(client)\n>>> uri = publisher.publish(MySample, version=\"1.0.0\")\n>>> print(uri)\nat://did:plc:.../ac.foundation.dataset.sampleSchema/...\n\n\n\n\n\n\nName\nDescription\n\n\n\n\npublish\nPublish a PackableSample schema to ATProto.\n\n\n\n\n\natmosphere.SchemaPublisher.publish(\n    sample_type,\n    *,\n    name=None,\n    version='1.0.0',\n    description=None,\n    metadata=None,\n    rkey=None,\n)\nPublish a PackableSample schema to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\nType[ST]\nThe PackableSample class to publish.\nrequired\n\n\nname\nOptional[str]\nHuman-readable name. Defaults to the class name.\nNone\n\n\nversion\nstr\nSemantic version string (e.g., ‘1.0.0’).\n'1.0.0'\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\nmetadata\nOptional[dict]\nArbitrary metadata dictionary.\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key. If not provided, a TID is generated.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created schema record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf sample_type is not a dataclass or client is not authenticated.\n\n\n\nTypeError\nIf a field type is not supported."
                                                                                        +    "text": "atmosphere.SchemaPublisher(client)\nPublishes PackableSample schemas to ATProto.\nThis class introspects a PackableSample class to extract its field definitions and publishes them as an ATProto schema record.\n\n\n>>> @atdata.packable\n... class MySample:\n...     image: NDArray\n...     label: str\n...\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = SchemaPublisher(client)\n>>> uri = publisher.publish(MySample, version=\"1.0.0\")\n>>> print(uri)\nat://did:plc:.../ac.foundation.dataset.sampleSchema/...\n\n\n\n\n\n\nName\nDescription\n\n\n\n\npublish\nPublish a PackableSample schema to ATProto.\n\n\n\n\n\natmosphere.SchemaPublisher.publish(\n    sample_type,\n    *,\n    name=None,\n    version='1.0.0',\n    description=None,\n    metadata=None,\n    rkey=None,\n)\nPublish a PackableSample schema to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\nType[ST]\nThe PackableSample class to publish.\nrequired\n\n\nname\nOptional[str]\nHuman-readable name. Defaults to the class name.\nNone\n\n\nversion\nstr\nSemantic version string (e.g., ‘1.0.0’).\n'1.0.0'\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\nmetadata\nOptional[dict]\nArbitrary metadata dictionary.\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key. If not provided, a TID is generated.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created schema record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf sample_type is not a dataclass or client is not authenticated.\n\n\n\nTypeError\nIf a field type is not supported."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/SchemaPublisher.html#example",
                                                                                        -    "href": "api/SchemaPublisher.html#example",
                                                                                        +    "objectID": "api/SchemaPublisher.html#examples",
                                                                                        +    "href": "api/SchemaPublisher.html#examples",
                                                                                             "title": "SchemaPublisher",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> @atdata.packable\n... class MySample:\n...     image: NDArray\n...     label: str\n...\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = SchemaPublisher(client)\n>>> uri = publisher.publish(MySample, version=\"1.0.0\")\n>>> print(uri)\nat://did:plc:.../ac.foundation.dataset.sampleSchema/..."
                                                                                        +    "text": ">>> @atdata.packable\n... class MySample:\n...     image: NDArray\n...     label: str\n...\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = SchemaPublisher(client)\n>>> uri = publisher.publish(MySample, version=\"1.0.0\")\n>>> print(uri)\nat://did:plc:.../ac.foundation.dataset.sampleSchema/..."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/SchemaPublisher.html#methods",
                                                                                        @@ -1247,14 +1247,14 @@
                                                                                             "href": "api/DatasetPublisher.html",
                                                                                             "title": "DatasetPublisher",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.DatasetPublisher(client)\nPublishes dataset index records to ATProto.\nThis class creates dataset records that reference a schema and point to external storage (WebDataset URLs) or ATProto blobs.\n\n\n::\n>>> dataset = atdata.Dataset[MySample](\"s3://bucket/data-{000000..000009}.tar\")\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = DatasetPublisher(client)\n>>> uri = publisher.publish(\n...     dataset,\n...     name=\"My Training Data\",\n...     description=\"Training data for my model\",\n...     tags=[\"computer-vision\", \"training\"],\n... )\n\n\n\n\n\n\nName\nDescription\n\n\n\n\npublish\nPublish a dataset index record to ATProto.\n\n\npublish_with_blobs\nPublish a dataset with data stored as ATProto blobs.\n\n\npublish_with_urls\nPublish a dataset record with explicit URLs.\n\n\n\n\n\natmosphere.DatasetPublisher.publish(\n    dataset,\n    *,\n    name,\n    schema_uri=None,\n    description=None,\n    tags=None,\n    license=None,\n    auto_publish_schema=True,\n    schema_version='1.0.0',\n    rkey=None,\n)\nPublish a dataset index record to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndataset\nDataset[ST]\nThe Dataset to publish.\nrequired\n\n\nname\nstr\nHuman-readable dataset name.\nrequired\n\n\nschema_uri\nOptional[str]\nAT URI of the schema record. If not provided and auto_publish_schema is True, the schema will be published.\nNone\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\ntags\nOptional[list[str]]\nSearchable tags for discovery.\nNone\n\n\nlicense\nOptional[str]\nSPDX license identifier (e.g., ‘MIT’, ‘Apache-2.0’).\nNone\n\n\nauto_publish_schema\nbool\nIf True and schema_uri not provided, automatically publish the schema first.\nTrue\n\n\nschema_version\nstr\nVersion for auto-published schema.\n'1.0.0'\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created dataset record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf schema_uri is not provided and auto_publish_schema is False.\n\n\n\n\n\n\n\natmosphere.DatasetPublisher.publish_with_blobs(\n    blobs,\n    schema_uri,\n    *,\n    name,\n    description=None,\n    tags=None,\n    license=None,\n    metadata=None,\n    mime_type='application/x-tar',\n    rkey=None,\n)\nPublish a dataset with data stored as ATProto blobs.\nThis method uploads the provided data as blobs to the PDS and creates a dataset record referencing them. Suitable for smaller datasets that fit within blob size limits (typically 50MB per blob, configurable).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nblobs\nlist[bytes]\nList of binary data (e.g., tar shards) to upload as blobs.\nrequired\n\n\nschema_uri\nstr\nAT URI of the schema record.\nrequired\n\n\nname\nstr\nHuman-readable dataset name.\nrequired\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\ntags\nOptional[list[str]]\nSearchable tags for discovery.\nNone\n\n\nlicense\nOptional[str]\nSPDX license identifier.\nNone\n\n\nmetadata\nOptional[dict]\nArbitrary metadata dictionary.\nNone\n\n\nmime_type\nstr\nMIME type for the blobs (default: application/x-tar).\n'application/x-tar'\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created dataset record.\n\n\n\n\n\n\nBlobs are only retained by the PDS when referenced in a committed record. This method handles that automatically.\n\n\n\n\natmosphere.DatasetPublisher.publish_with_urls(\n    urls,\n    schema_uri,\n    *,\n    name,\n    description=None,\n    tags=None,\n    license=None,\n    metadata=None,\n    rkey=None,\n)\nPublish a dataset record with explicit URLs.\nThis method allows publishing a dataset record without having a Dataset object, useful for registering existing WebDataset files.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of WebDataset URLs with brace notation.\nrequired\n\n\nschema_uri\nstr\nAT URI of the schema record.\nrequired\n\n\nname\nstr\nHuman-readable dataset name.\nrequired\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\ntags\nOptional[list[str]]\nSearchable tags for discovery.\nNone\n\n\nlicense\nOptional[str]\nSPDX license identifier.\nNone\n\n\nmetadata\nOptional[dict]\nArbitrary metadata dictionary.\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created dataset record."
                                                                                        +    "text": "atmosphere.DatasetPublisher(client)\nPublishes dataset index records to ATProto.\nThis class creates dataset records that reference a schema and point to external storage (WebDataset URLs) or ATProto blobs.\n\n\n>>> dataset = atdata.Dataset[MySample](\"s3://bucket/data-{000000..000009}.tar\")\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = DatasetPublisher(client)\n>>> uri = publisher.publish(\n...     dataset,\n...     name=\"My Training Data\",\n...     description=\"Training data for my model\",\n...     tags=[\"computer-vision\", \"training\"],\n... )\n\n\n\n\n\n\nName\nDescription\n\n\n\n\npublish\nPublish a dataset index record to ATProto.\n\n\npublish_with_blobs\nPublish a dataset with data stored as ATProto blobs.\n\n\npublish_with_urls\nPublish a dataset record with explicit URLs.\n\n\n\n\n\natmosphere.DatasetPublisher.publish(\n    dataset,\n    *,\n    name,\n    schema_uri=None,\n    description=None,\n    tags=None,\n    license=None,\n    auto_publish_schema=True,\n    schema_version='1.0.0',\n    rkey=None,\n)\nPublish a dataset index record to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndataset\nDataset[ST]\nThe Dataset to publish.\nrequired\n\n\nname\nstr\nHuman-readable dataset name.\nrequired\n\n\nschema_uri\nOptional[str]\nAT URI of the schema record. If not provided and auto_publish_schema is True, the schema will be published.\nNone\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\ntags\nOptional[list[str]]\nSearchable tags for discovery.\nNone\n\n\nlicense\nOptional[str]\nSPDX license identifier (e.g., ‘MIT’, ‘Apache-2.0’).\nNone\n\n\nauto_publish_schema\nbool\nIf True and schema_uri not provided, automatically publish the schema first.\nTrue\n\n\nschema_version\nstr\nVersion for auto-published schema.\n'1.0.0'\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created dataset record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf schema_uri is not provided and auto_publish_schema is False.\n\n\n\n\n\n\n\natmosphere.DatasetPublisher.publish_with_blobs(\n    blobs,\n    schema_uri,\n    *,\n    name,\n    description=None,\n    tags=None,\n    license=None,\n    metadata=None,\n    mime_type='application/x-tar',\n    rkey=None,\n)\nPublish a dataset with data stored as ATProto blobs.\nThis method uploads the provided data as blobs to the PDS and creates a dataset record referencing them. Suitable for smaller datasets that fit within blob size limits (typically 50MB per blob, configurable).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nblobs\nlist[bytes]\nList of binary data (e.g., tar shards) to upload as blobs.\nrequired\n\n\nschema_uri\nstr\nAT URI of the schema record.\nrequired\n\n\nname\nstr\nHuman-readable dataset name.\nrequired\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\ntags\nOptional[list[str]]\nSearchable tags for discovery.\nNone\n\n\nlicense\nOptional[str]\nSPDX license identifier.\nNone\n\n\nmetadata\nOptional[dict]\nArbitrary metadata dictionary.\nNone\n\n\nmime_type\nstr\nMIME type for the blobs (default: application/x-tar).\n'application/x-tar'\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created dataset record.\n\n\n\n\n\n\nBlobs are only retained by the PDS when referenced in a committed record. This method handles that automatically.\n\n\n\n\natmosphere.DatasetPublisher.publish_with_urls(\n    urls,\n    schema_uri,\n    *,\n    name,\n    description=None,\n    tags=None,\n    license=None,\n    metadata=None,\n    rkey=None,\n)\nPublish a dataset record with explicit URLs.\nThis method allows publishing a dataset record without having a Dataset object, useful for registering existing WebDataset files.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of WebDataset URLs with brace notation.\nrequired\n\n\nschema_uri\nstr\nAT URI of the schema record.\nrequired\n\n\nname\nstr\nHuman-readable dataset name.\nrequired\n\n\ndescription\nOptional[str]\nHuman-readable description.\nNone\n\n\ntags\nOptional[list[str]]\nSearchable tags for discovery.\nNone\n\n\nlicense\nOptional[str]\nSPDX license identifier.\nNone\n\n\nmetadata\nOptional[dict]\nArbitrary metadata dictionary.\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created dataset record."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/DatasetPublisher.html#example",
                                                                                        -    "href": "api/DatasetPublisher.html#example",
                                                                                        +    "objectID": "api/DatasetPublisher.html#examples",
                                                                                        +    "href": "api/DatasetPublisher.html#examples",
                                                                                             "title": "DatasetPublisher",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> dataset = atdata.Dataset[MySample](\"s3://bucket/data-{000000..000009}.tar\")\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = DatasetPublisher(client)\n>>> uri = publisher.publish(\n...     dataset,\n...     name=\"My Training Data\",\n...     description=\"Training data for my model\",\n...     tags=[\"computer-vision\", \"training\"],\n... )"
                                                                                        +    "text": ">>> dataset = atdata.Dataset[MySample](\"s3://bucket/data-{000000..000009}.tar\")\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = DatasetPublisher(client)\n>>> uri = publisher.publish(\n...     dataset,\n...     name=\"My Training Data\",\n...     description=\"Training data for my model\",\n...     tags=[\"computer-vision\", \"training\"],\n... )"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DatasetPublisher.html#methods",
                                                                                        @@ -1268,7 +1268,7 @@
                                                                                             "href": "api/URLSource.html",
                                                                                             "title": "URLSource",
                                                                                             "section": "",
                                                                                        -    "text": "URLSource(url)\nData source for WebDataset-compatible URLs.\nWraps WebDataset’s gopen to open URLs using built-in handlers for http, https, pipe, gs, hf, sftp, etc. Supports brace expansion for shard patterns like “data-{000..099}.tar”.\nThis is the default source type when a string URL is passed to Dataset.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nurl\nstr\nURL or brace pattern for the shards.\n\n\n\n\n\n\n::\n>>> source = URLSource(\"https://example.com/train-{000..009}.tar\")\n>>> for shard_id, stream in source.shards:\n...     print(f\"Streaming {shard_id}\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nlist_shards\nExpand brace pattern and return list of shard URLs.\n\n\nopen_shard\nOpen a single shard by URL.\n\n\n\n\n\nURLSource.list_shards()\nExpand brace pattern and return list of shard URLs.\n\n\n\nURLSource.open_shard(shard_id)\nOpen a single shard by URL.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nURL of the shard to open.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nFile-like stream from gopen.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards()."
                                                                                        +    "text": "URLSource(url)\nData source for WebDataset-compatible URLs.\nWraps WebDataset’s gopen to open URLs using built-in handlers for http, https, pipe, gs, hf, sftp, etc. Supports brace expansion for shard patterns like “data-{000..099}.tar”.\nThis is the default source type when a string URL is passed to Dataset.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nurl\nstr\nURL or brace pattern for the shards.\n\n\n\n\n\n\n>>> source = URLSource(\"https://example.com/train-{000..009}.tar\")\n>>> for shard_id, stream in source.shards:\n...     print(f\"Streaming {shard_id}\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nlist_shards\nExpand brace pattern and return list of shard URLs.\n\n\nopen_shard\nOpen a single shard by URL.\n\n\n\n\n\nURLSource.list_shards()\nExpand brace pattern and return list of shard URLs.\n\n\n\nURLSource.open_shard(shard_id)\nOpen a single shard by URL.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nURL of the shard to open.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nFile-like stream from gopen.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards()."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/URLSource.html#attributes",
                                                                                        @@ -1278,11 +1278,11 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\nurl\nstr\nURL or brace pattern for the shards."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/URLSource.html#example",
                                                                                        -    "href": "api/URLSource.html#example",
                                                                                        +    "objectID": "api/URLSource.html#examples",
                                                                                        +    "href": "api/URLSource.html#examples",
                                                                                             "title": "URLSource",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> source = URLSource(\"https://example.com/train-{000..009}.tar\")\n>>> for shard_id, stream in source.shards:\n...     print(f\"Streaming {shard_id}\")"
                                                                                        +    "text": ">>> source = URLSource(\"https://example.com/train-{000..009}.tar\")\n>>> for shard_id, stream in source.shards:\n...     print(f\"Streaming {shard_id}\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/URLSource.html#methods",
                                                                                        @@ -1366,7 +1366,7 @@
                                                                                             "href": "api/S3Source.html",
                                                                                             "title": "S3Source",
                                                                                             "section": "",
                                                                                        -    "text": "S3Source(\n    bucket,\n    keys,\n    endpoint=None,\n    access_key=None,\n    secret_key=None,\n    region=None,\n    _client=None,\n)\nData source for S3-compatible storage with explicit credentials.\nUses boto3 to stream directly from S3, supporting: - Standard AWS S3 - S3-compatible endpoints (Cloudflare R2, MinIO, etc.) - Private buckets with credentials - IAM role authentication (when keys not provided)\nUnlike URL-based approaches, this doesn’t require URL transformation or global gopen_schemes registration. Credentials are scoped to the source instance.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nbucket\nstr\nS3 bucket name.\n\n\nkeys\nlist[str]\nList of object keys (paths within bucket).\n\n\nendpoint\nstr | None\nOptional custom endpoint URL for S3-compatible services.\n\n\naccess_key\nstr | None\nOptional AWS access key ID.\n\n\nsecret_key\nstr | None\nOptional AWS secret access key.\n\n\nregion\nstr | None\nOptional AWS region (defaults to us-east-1).\n\n\n\n\n\n\n::\n>>> source = S3Source(\n...     bucket=\"my-datasets\",\n...     keys=[\"train/shard-000.tar\", \"train/shard-001.tar\"],\n...     endpoint=\"https://abc123.r2.cloudflarestorage.com\",\n...     access_key=\"AKIAIOSFODNN7EXAMPLE\",\n...     secret_key=\"wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY\",\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_credentials\nCreate S3Source from a credentials dictionary.\n\n\nfrom_urls\nCreate S3Source from s3:// URLs.\n\n\nlist_shards\nReturn list of S3 URIs for the shards.\n\n\nopen_shard\nOpen a single shard by S3 URI.\n\n\n\n\n\nS3Source.from_credentials(credentials, bucket, keys)\nCreate S3Source from a credentials dictionary.\nAccepts the same credential format used by S3DataStore.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncredentials\ndict[str, str]\nDict with AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, and optionally AWS_ENDPOINT.\nrequired\n\n\nbucket\nstr\nS3 bucket name.\nrequired\n\n\nkeys\nlist[str]\nList of object keys.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nConfigured S3Source.\n\n\n\n\n\n\n::\n>>> creds = {\n...     \"AWS_ACCESS_KEY_ID\": \"...\",\n...     \"AWS_SECRET_ACCESS_KEY\": \"...\",\n...     \"AWS_ENDPOINT\": \"https://r2.example.com\",\n... }\n>>> source = S3Source.from_credentials(creds, \"my-bucket\", [\"data.tar\"])\n\n\n\n\nS3Source.from_urls(\n    urls,\n    *,\n    endpoint=None,\n    access_key=None,\n    secret_key=None,\n    region=None,\n)\nCreate S3Source from s3:// URLs.\nParses s3://bucket/key URLs and extracts bucket and keys. All URLs must be in the same bucket.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of s3:// URLs.\nrequired\n\n\nendpoint\nstr | None\nOptional custom endpoint.\nNone\n\n\naccess_key\nstr | None\nOptional access key.\nNone\n\n\nsecret_key\nstr | None\nOptional secret key.\nNone\n\n\nregion\nstr | None\nOptional region.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nS3Source configured for the given URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URLs are not valid s3:// URLs or span multiple buckets.\n\n\n\n\n\n\n::\n>>> source = S3Source.from_urls(\n...     [\"s3://my-bucket/train-000.tar\", \"s3://my-bucket/train-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n... )\n\n\n\n\nS3Source.list_shards()\nReturn list of S3 URIs for the shards.\n\n\n\nS3Source.open_shard(shard_id)\nOpen a single shard by S3 URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nS3 URI of the shard (s3://bucket/key).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nStreamingBody for reading the object.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards()."
                                                                                        +    "text": "S3Source(\n    bucket,\n    keys,\n    endpoint=None,\n    access_key=None,\n    secret_key=None,\n    region=None,\n    _client=None,\n)\nData source for S3-compatible storage with explicit credentials.\nUses boto3 to stream directly from S3, supporting: - Standard AWS S3 - S3-compatible endpoints (Cloudflare R2, MinIO, etc.) - Private buckets with credentials - IAM role authentication (when keys not provided)\nUnlike URL-based approaches, this doesn’t require URL transformation or global gopen_schemes registration. Credentials are scoped to the source instance.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nbucket\nstr\nS3 bucket name.\n\n\nkeys\nlist[str]\nList of object keys (paths within bucket).\n\n\nendpoint\nstr | None\nOptional custom endpoint URL for S3-compatible services.\n\n\naccess_key\nstr | None\nOptional AWS access key ID.\n\n\nsecret_key\nstr | None\nOptional AWS secret access key.\n\n\nregion\nstr | None\nOptional AWS region (defaults to us-east-1).\n\n\n\n\n\n\n>>> source = S3Source(\n...     bucket=\"my-datasets\",\n...     keys=[\"train/shard-000.tar\", \"train/shard-001.tar\"],\n...     endpoint=\"https://abc123.r2.cloudflarestorage.com\",\n...     access_key=\"AKIAIOSFODNN7EXAMPLE\",\n...     secret_key=\"wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY\",\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_credentials\nCreate S3Source from a credentials dictionary.\n\n\nfrom_urls\nCreate S3Source from s3:// URLs.\n\n\nlist_shards\nReturn list of S3 URIs for the shards.\n\n\nopen_shard\nOpen a single shard by S3 URI.\n\n\n\n\n\nS3Source.from_credentials(credentials, bucket, keys)\nCreate S3Source from a credentials dictionary.\nAccepts the same credential format used by S3DataStore.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncredentials\ndict[str, str]\nDict with AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, and optionally AWS_ENDPOINT.\nrequired\n\n\nbucket\nstr\nS3 bucket name.\nrequired\n\n\nkeys\nlist[str]\nList of object keys.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nConfigured S3Source.\n\n\n\n\n\n\n>>> creds = {\n...     \"AWS_ACCESS_KEY_ID\": \"...\",\n...     \"AWS_SECRET_ACCESS_KEY\": \"...\",\n...     \"AWS_ENDPOINT\": \"https://r2.example.com\",\n... }\n>>> source = S3Source.from_credentials(creds, \"my-bucket\", [\"data.tar\"])\n\n\n\n\nS3Source.from_urls(\n    urls,\n    *,\n    endpoint=None,\n    access_key=None,\n    secret_key=None,\n    region=None,\n)\nCreate S3Source from s3:// URLs.\nParses s3://bucket/key URLs and extracts bucket and keys. All URLs must be in the same bucket.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of s3:// URLs.\nrequired\n\n\nendpoint\nstr | None\nOptional custom endpoint.\nNone\n\n\naccess_key\nstr | None\nOptional access key.\nNone\n\n\nsecret_key\nstr | None\nOptional secret key.\nNone\n\n\nregion\nstr | None\nOptional region.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nS3Source configured for the given URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URLs are not valid s3:// URLs or span multiple buckets.\n\n\n\n\n\n\n>>> source = S3Source.from_urls(\n...     [\"s3://my-bucket/train-000.tar\", \"s3://my-bucket/train-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n... )\n\n\n\n\nS3Source.list_shards()\nReturn list of S3 URIs for the shards.\n\n\n\nS3Source.open_shard(shard_id)\nOpen a single shard by S3 URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nS3 URI of the shard (s3://bucket/key).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nStreamingBody for reading the object.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards()."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/S3Source.html#attributes",
                                                                                        @@ -1376,18 +1376,18 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\nbucket\nstr\nS3 bucket name.\n\n\nkeys\nlist[str]\nList of object keys (paths within bucket).\n\n\nendpoint\nstr | None\nOptional custom endpoint URL for S3-compatible services.\n\n\naccess_key\nstr | None\nOptional AWS access key ID.\n\n\nsecret_key\nstr | None\nOptional AWS secret access key.\n\n\nregion\nstr | None\nOptional AWS region (defaults to us-east-1)."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/S3Source.html#example",
                                                                                        -    "href": "api/S3Source.html#example",
                                                                                        +    "objectID": "api/S3Source.html#examples",
                                                                                        +    "href": "api/S3Source.html#examples",
                                                                                             "title": "S3Source",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> source = S3Source(\n...     bucket=\"my-datasets\",\n...     keys=[\"train/shard-000.tar\", \"train/shard-001.tar\"],\n...     endpoint=\"https://abc123.r2.cloudflarestorage.com\",\n...     access_key=\"AKIAIOSFODNN7EXAMPLE\",\n...     secret_key=\"wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY\",\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)"
                                                                                        +    "text": ">>> source = S3Source(\n...     bucket=\"my-datasets\",\n...     keys=[\"train/shard-000.tar\", \"train/shard-001.tar\"],\n...     endpoint=\"https://abc123.r2.cloudflarestorage.com\",\n...     access_key=\"AKIAIOSFODNN7EXAMPLE\",\n...     secret_key=\"wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY\",\n... )\n>>> for shard_id, stream in source.shards:\n...     process(stream)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/S3Source.html#methods",
                                                                                             "href": "api/S3Source.html#methods",
                                                                                             "title": "S3Source",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\nfrom_credentials\nCreate S3Source from a credentials dictionary.\n\n\nfrom_urls\nCreate S3Source from s3:// URLs.\n\n\nlist_shards\nReturn list of S3 URIs for the shards.\n\n\nopen_shard\nOpen a single shard by S3 URI.\n\n\n\n\n\nS3Source.from_credentials(credentials, bucket, keys)\nCreate S3Source from a credentials dictionary.\nAccepts the same credential format used by S3DataStore.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncredentials\ndict[str, str]\nDict with AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, and optionally AWS_ENDPOINT.\nrequired\n\n\nbucket\nstr\nS3 bucket name.\nrequired\n\n\nkeys\nlist[str]\nList of object keys.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nConfigured S3Source.\n\n\n\n\n\n\n::\n>>> creds = {\n...     \"AWS_ACCESS_KEY_ID\": \"...\",\n...     \"AWS_SECRET_ACCESS_KEY\": \"...\",\n...     \"AWS_ENDPOINT\": \"https://r2.example.com\",\n... }\n>>> source = S3Source.from_credentials(creds, \"my-bucket\", [\"data.tar\"])\n\n\n\n\nS3Source.from_urls(\n    urls,\n    *,\n    endpoint=None,\n    access_key=None,\n    secret_key=None,\n    region=None,\n)\nCreate S3Source from s3:// URLs.\nParses s3://bucket/key URLs and extracts bucket and keys. All URLs must be in the same bucket.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of s3:// URLs.\nrequired\n\n\nendpoint\nstr | None\nOptional custom endpoint.\nNone\n\n\naccess_key\nstr | None\nOptional access key.\nNone\n\n\nsecret_key\nstr | None\nOptional secret key.\nNone\n\n\nregion\nstr | None\nOptional region.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nS3Source configured for the given URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URLs are not valid s3:// URLs or span multiple buckets.\n\n\n\n\n\n\n::\n>>> source = S3Source.from_urls(\n...     [\"s3://my-bucket/train-000.tar\", \"s3://my-bucket/train-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n... )\n\n\n\n\nS3Source.list_shards()\nReturn list of S3 URIs for the shards.\n\n\n\nS3Source.open_shard(shard_id)\nOpen a single shard by S3 URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nS3 URI of the shard (s3://bucket/key).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nStreamingBody for reading the object.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards()."
                                                                                        +    "text": "Name\nDescription\n\n\n\n\nfrom_credentials\nCreate S3Source from a credentials dictionary.\n\n\nfrom_urls\nCreate S3Source from s3:// URLs.\n\n\nlist_shards\nReturn list of S3 URIs for the shards.\n\n\nopen_shard\nOpen a single shard by S3 URI.\n\n\n\n\n\nS3Source.from_credentials(credentials, bucket, keys)\nCreate S3Source from a credentials dictionary.\nAccepts the same credential format used by S3DataStore.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncredentials\ndict[str, str]\nDict with AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY, and optionally AWS_ENDPOINT.\nrequired\n\n\nbucket\nstr\nS3 bucket name.\nrequired\n\n\nkeys\nlist[str]\nList of object keys.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nConfigured S3Source.\n\n\n\n\n\n\n>>> creds = {\n...     \"AWS_ACCESS_KEY_ID\": \"...\",\n...     \"AWS_SECRET_ACCESS_KEY\": \"...\",\n...     \"AWS_ENDPOINT\": \"https://r2.example.com\",\n... }\n>>> source = S3Source.from_credentials(creds, \"my-bucket\", [\"data.tar\"])\n\n\n\n\nS3Source.from_urls(\n    urls,\n    *,\n    endpoint=None,\n    access_key=None,\n    secret_key=None,\n    region=None,\n)\nCreate S3Source from s3:// URLs.\nParses s3://bucket/key URLs and extracts bucket and keys. All URLs must be in the same bucket.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of s3:// URLs.\nrequired\n\n\nendpoint\nstr | None\nOptional custom endpoint.\nNone\n\n\naccess_key\nstr | None\nOptional access key.\nNone\n\n\nsecret_key\nstr | None\nOptional secret key.\nNone\n\n\nregion\nstr | None\nOptional region.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'S3Source'\nS3Source configured for the given URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URLs are not valid s3:// URLs or span multiple buckets.\n\n\n\n\n\n\n>>> source = S3Source.from_urls(\n...     [\"s3://my-bucket/train-000.tar\", \"s3://my-bucket/train-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n... )\n\n\n\n\nS3Source.list_shards()\nReturn list of S3 URIs for the shards.\n\n\n\nS3Source.open_shard(shard_id)\nOpen a single shard by S3 URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nS3 URI of the shard (s3://bucket/key).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nStreamingBody for reading the object.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in list_shards()."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/local.LocalDatasetEntry.html",
                                                                                        @@ -1415,7 +1415,7 @@
                                                                                             "href": "api/AbstractIndex.html",
                                                                                             "title": "AbstractIndex",
                                                                                             "section": "",
                                                                                        -    "text": "AbstractIndex()\nProtocol for index operations - implemented by LocalIndex and AtmosphereIndex.\nThis protocol defines the common interface for managing dataset metadata: - Publishing and retrieving schemas - Inserting and listing datasets - (Future) Publishing and retrieving lenses\nA single index can hold datasets of many different sample types. The sample type is tracked via schema references, not as a generic parameter on the index.\n\n\nSome index implementations support additional features: - data_store: An AbstractDataStore for reading/writing dataset shards. If present, load_dataset will use it for S3 credential resolution.\n\n\n\n::\n>>> def publish_and_list(index: AbstractIndex) -> None:\n...     # Publish schemas for different types\n...     schema1 = index.publish_schema(ImageSample, version=\"1.0.0\")\n...     schema2 = index.publish_schema(TextSample, version=\"1.0.0\")\n...\n...     # Insert datasets of different types\n...     index.insert_dataset(image_ds, name=\"images\")\n...     index.insert_dataset(text_ds, name=\"texts\")\n...\n...     # List all datasets (mixed types)\n...     for entry in index.list_datasets():\n...         print(f\"{entry.name} -> {entry.schema_ref}\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndata_store\nOptional data store for reading/writing shards.\n\n\ndatasets\nLazily iterate over all dataset entries in this index.\n\n\nschemas\nLazily iterate over all schema records in this index.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndecode_schema\nReconstruct a Python Packable type from a stored schema.\n\n\nget_dataset\nGet a dataset entry by name or reference.\n\n\nget_schema\nGet a schema record by reference.\n\n\ninsert_dataset\nInsert a dataset into the index.\n\n\nlist_datasets\nGet all dataset entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list.\n\n\npublish_schema\nPublish a schema for a sample type.\n\n\n\n\n\nAbstractIndex.decode_schema(ref)\nReconstruct a Python Packable type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a Packable class matching the schema definition.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA dynamically generated Packable class with fields matching\n\n\n\nType[Packable]\nthe schema definition. The class can be used with\n\n\n\nType[Packable]\nDataset[T] to load and iterate over samples.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded (unsupported field types).\n\n\n\n\n\n\n::\n>>> entry = index.get_dataset(\"my-dataset\")\n>>> SampleType = index.decode_schema(entry.schema_ref)\n>>> ds = Dataset[SampleType](entry.data_urls[0])\n>>> for sample in ds.ordered():\n...     print(sample)  # sample is instance of SampleType\n\n\n\n\nAbstractIndex.get_dataset(ref)\nGet a dataset entry by name or reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name, path, or full reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nAbstractIndex.get_schema(ref)\nGet a schema record by reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with fields like ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\n\n\n\n\nAbstractIndex.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index.\nThe sample type is inferred from ds.sample_type. If schema_ref is not provided, the schema may be auto-published based on the sample type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register in the index (any sample type).\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nOptional[str]\nOptional explicit schema reference. If not provided, the schema may be auto-published or inferred from ds.sample_type.\nNone\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_datasets()\nGet all dataset entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[IndexEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_schemas()\nGet all schema records as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nAbstractIndex.publish_schema(sample_type, *, version='1.0.0', **kwargs)\nPublish a schema for a sample type.\nThe sample_type is accepted as type rather than Type[Packable] to support @packable-decorated classes, which satisfy the Packable protocol at runtime but cannot be statically verified by type checkers.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (PackableSample subclass or @packable-decorated). Validated at runtime via the @runtime_checkable Packable protocol.\nrequired\n\n\nversion\nstr\nSemantic version string for the schema.\n'1.0.0'\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string:\n\n\n\nstr\n- Local: ‘local://schemas/{module.Class}@version’\n\n\n\nstr\n- Atmosphere: ‘at://did:plc:…/ac.foundation.dataset.sampleSchema/…’"
                                                                                        +    "text": "AbstractIndex()\nProtocol for index operations - implemented by LocalIndex and AtmosphereIndex.\nThis protocol defines the common interface for managing dataset metadata: - Publishing and retrieving schemas - Inserting and listing datasets - (Future) Publishing and retrieving lenses\nA single index can hold datasets of many different sample types. The sample type is tracked via schema references, not as a generic parameter on the index.\n\n\nSome index implementations support additional features: - data_store: An AbstractDataStore for reading/writing dataset shards. If present, load_dataset will use it for S3 credential resolution.\n\n\n\n>>> def publish_and_list(index: AbstractIndex) -> None:\n...     # Publish schemas for different types\n...     schema1 = index.publish_schema(ImageSample, version=\"1.0.0\")\n...     schema2 = index.publish_schema(TextSample, version=\"1.0.0\")\n...\n...     # Insert datasets of different types\n...     index.insert_dataset(image_ds, name=\"images\")\n...     index.insert_dataset(text_ds, name=\"texts\")\n...\n...     # List all datasets (mixed types)\n...     for entry in index.list_datasets():\n...         print(f\"{entry.name} -> {entry.schema_ref}\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndata_store\nOptional data store for reading/writing shards.\n\n\ndatasets\nLazily iterate over all dataset entries in this index.\n\n\nschemas\nLazily iterate over all schema records in this index.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndecode_schema\nReconstruct a Python Packable type from a stored schema.\n\n\nget_dataset\nGet a dataset entry by name or reference.\n\n\nget_schema\nGet a schema record by reference.\n\n\ninsert_dataset\nInsert a dataset into the index.\n\n\nlist_datasets\nGet all dataset entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list.\n\n\npublish_schema\nPublish a schema for a sample type.\n\n\n\n\n\nAbstractIndex.decode_schema(ref)\nReconstruct a Python Packable type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a Packable class matching the schema definition.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA dynamically generated Packable class with fields matching\n\n\n\nType[Packable]\nthe schema definition. The class can be used with\n\n\n\nType[Packable]\nDataset[T] to load and iterate over samples.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded (unsupported field types).\n\n\n\n\n\n\n>>> entry = index.get_dataset(\"my-dataset\")\n>>> SampleType = index.decode_schema(entry.schema_ref)\n>>> ds = Dataset[SampleType](entry.data_urls[0])\n>>> for sample in ds.ordered():\n...     print(sample)  # sample is instance of SampleType\n\n\n\n\nAbstractIndex.get_dataset(ref)\nGet a dataset entry by name or reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name, path, or full reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nAbstractIndex.get_schema(ref)\nGet a schema record by reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with fields like ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\n\n\n\n\nAbstractIndex.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index.\nThe sample type is inferred from ds.sample_type. If schema_ref is not provided, the schema may be auto-published based on the sample type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register in the index (any sample type).\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nOptional[str]\nOptional explicit schema reference. If not provided, the schema may be auto-published or inferred from ds.sample_type.\nNone\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_datasets()\nGet all dataset entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[IndexEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_schemas()\nGet all schema records as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nAbstractIndex.publish_schema(sample_type, *, version='1.0.0', **kwargs)\nPublish a schema for a sample type.\nThe sample_type is accepted as type rather than Type[Packable] to support @packable-decorated classes, which satisfy the Packable protocol at runtime but cannot be statically verified by type checkers.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (PackableSample subclass or @packable-decorated). Validated at runtime via the @runtime_checkable Packable protocol.\nrequired\n\n\nversion\nstr\nSemantic version string for the schema.\n'1.0.0'\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string:\n\n\n\nstr\n- Local: ‘local://schemas/{module.Class}@version’\n\n\n\nstr\n- Atmosphere: ‘at://did:plc:…/ac.foundation.dataset.sampleSchema/…’"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AbstractIndex.html#optional-extensions",
                                                                                        @@ -1425,11 +1425,11 @@
                                                                                             "text": "Some index implementations support additional features: - data_store: An AbstractDataStore for reading/writing dataset shards. If present, load_dataset will use it for S3 credential resolution."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/AbstractIndex.html#example",
                                                                                        -    "href": "api/AbstractIndex.html#example",
                                                                                        +    "objectID": "api/AbstractIndex.html#examples",
                                                                                        +    "href": "api/AbstractIndex.html#examples",
                                                                                             "title": "AbstractIndex",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> def publish_and_list(index: AbstractIndex) -> None:\n...     # Publish schemas for different types\n...     schema1 = index.publish_schema(ImageSample, version=\"1.0.0\")\n...     schema2 = index.publish_schema(TextSample, version=\"1.0.0\")\n...\n...     # Insert datasets of different types\n...     index.insert_dataset(image_ds, name=\"images\")\n...     index.insert_dataset(text_ds, name=\"texts\")\n...\n...     # List all datasets (mixed types)\n...     for entry in index.list_datasets():\n...         print(f\"{entry.name} -> {entry.schema_ref}\")"
                                                                                        +    "text": ">>> def publish_and_list(index: AbstractIndex) -> None:\n...     # Publish schemas for different types\n...     schema1 = index.publish_schema(ImageSample, version=\"1.0.0\")\n...     schema2 = index.publish_schema(TextSample, version=\"1.0.0\")\n...\n...     # Insert datasets of different types\n...     index.insert_dataset(image_ds, name=\"images\")\n...     index.insert_dataset(text_ds, name=\"texts\")\n...\n...     # List all datasets (mixed types)\n...     for entry in index.list_datasets():\n...         print(f\"{entry.name} -> {entry.schema_ref}\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AbstractIndex.html#attributes",
                                                                                        @@ -1443,7 +1443,7 @@
                                                                                             "href": "api/AbstractIndex.html#methods",
                                                                                             "title": "AbstractIndex",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\ndecode_schema\nReconstruct a Python Packable type from a stored schema.\n\n\nget_dataset\nGet a dataset entry by name or reference.\n\n\nget_schema\nGet a schema record by reference.\n\n\ninsert_dataset\nInsert a dataset into the index.\n\n\nlist_datasets\nGet all dataset entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list.\n\n\npublish_schema\nPublish a schema for a sample type.\n\n\n\n\n\nAbstractIndex.decode_schema(ref)\nReconstruct a Python Packable type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a Packable class matching the schema definition.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA dynamically generated Packable class with fields matching\n\n\n\nType[Packable]\nthe schema definition. The class can be used with\n\n\n\nType[Packable]\nDataset[T] to load and iterate over samples.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded (unsupported field types).\n\n\n\n\n\n\n::\n>>> entry = index.get_dataset(\"my-dataset\")\n>>> SampleType = index.decode_schema(entry.schema_ref)\n>>> ds = Dataset[SampleType](entry.data_urls[0])\n>>> for sample in ds.ordered():\n...     print(sample)  # sample is instance of SampleType\n\n\n\n\nAbstractIndex.get_dataset(ref)\nGet a dataset entry by name or reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name, path, or full reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nAbstractIndex.get_schema(ref)\nGet a schema record by reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with fields like ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\n\n\n\n\nAbstractIndex.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index.\nThe sample type is inferred from ds.sample_type. If schema_ref is not provided, the schema may be auto-published based on the sample type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register in the index (any sample type).\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nOptional[str]\nOptional explicit schema reference. If not provided, the schema may be auto-published or inferred from ds.sample_type.\nNone\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_datasets()\nGet all dataset entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[IndexEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_schemas()\nGet all schema records as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nAbstractIndex.publish_schema(sample_type, *, version='1.0.0', **kwargs)\nPublish a schema for a sample type.\nThe sample_type is accepted as type rather than Type[Packable] to support @packable-decorated classes, which satisfy the Packable protocol at runtime but cannot be statically verified by type checkers.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (PackableSample subclass or @packable-decorated). Validated at runtime via the @runtime_checkable Packable protocol.\nrequired\n\n\nversion\nstr\nSemantic version string for the schema.\n'1.0.0'\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string:\n\n\n\nstr\n- Local: ‘local://schemas/{module.Class}@version’\n\n\n\nstr\n- Atmosphere: ‘at://did:plc:…/ac.foundation.dataset.sampleSchema/…’"
                                                                                        +    "text": "Name\nDescription\n\n\n\n\ndecode_schema\nReconstruct a Python Packable type from a stored schema.\n\n\nget_dataset\nGet a dataset entry by name or reference.\n\n\nget_schema\nGet a schema record by reference.\n\n\ninsert_dataset\nInsert a dataset into the index.\n\n\nlist_datasets\nGet all dataset entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list.\n\n\npublish_schema\nPublish a schema for a sample type.\n\n\n\n\n\nAbstractIndex.decode_schema(ref)\nReconstruct a Python Packable type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a Packable class matching the schema definition.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA dynamically generated Packable class with fields matching\n\n\n\nType[Packable]\nthe schema definition. The class can be used with\n\n\n\nType[Packable]\nDataset[T] to load and iterate over samples.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded (unsupported field types).\n\n\n\n\n\n\n>>> entry = index.get_dataset(\"my-dataset\")\n>>> SampleType = index.decode_schema(entry.schema_ref)\n>>> ds = Dataset[SampleType](entry.data_urls[0])\n>>> for sample in ds.ordered():\n...     print(sample)  # sample is instance of SampleType\n\n\n\n\nAbstractIndex.get_dataset(ref)\nGet a dataset entry by name or reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name, path, or full reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nAbstractIndex.get_schema(ref)\nGet a schema record by reference.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (local:// or at://).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with fields like ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\n\n\n\n\nAbstractIndex.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index.\nThe sample type is inferred from ds.sample_type. If schema_ref is not provided, the schema may be auto-published based on the sample type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register in the index (any sample type).\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nOptional[str]\nOptional explicit schema reference. If not provided, the schema may be auto-published or inferred from ds.sample_type.\nNone\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIndexEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_datasets()\nGet all dataset entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[IndexEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nAbstractIndex.list_schemas()\nGet all schema records as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nAbstractIndex.publish_schema(sample_type, *, version='1.0.0', **kwargs)\nPublish a schema for a sample type.\nThe sample_type is accepted as type rather than Type[Packable] to support @packable-decorated classes, which satisfy the Packable protocol at runtime but cannot be statically verified by type checkers.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (PackableSample subclass or @packable-decorated). Validated at runtime via the @runtime_checkable Packable protocol.\nrequired\n\n\nversion\nstr\nSemantic version string for the schema.\n'1.0.0'\n\n\n**kwargs\n\nAdditional backend-specific options.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string:\n\n\n\nstr\n- Local: ‘local://schemas/{module.Class}@version’\n\n\n\nstr\n- Atmosphere: ‘at://did:plc:…/ac.foundation.dataset.sampleSchema/…’"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AtmosphereIndexEntry.html",
                                                                                        @@ -1464,14 +1464,14 @@
                                                                                             "href": "api/LensPublisher.html",
                                                                                             "title": "LensPublisher",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.LensPublisher(client)\nPublishes Lens transformation records to ATProto.\nThis class creates lens records that reference source and target schemas and point to the transformation code in a git repository.\n\n\n::\n>>> @atdata.lens\n... def my_lens(source: SourceType) -> TargetType:\n...     return TargetType(field=source.other_field)\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = LensPublisher(client)\n>>> uri = publisher.publish(\n...     name=\"my_lens\",\n...     source_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/source\",\n...     target_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/target\",\n...     code_repository=\"https://github.com/user/repo\",\n...     code_commit=\"abc123def456\",\n...     getter_path=\"mymodule.lenses:my_lens\",\n...     putter_path=\"mymodule.lenses:my_lens_putter\",\n... )\n\n\n\nLens code is stored as references to git repositories rather than inline code. This prevents arbitrary code execution from ATProto records. Users must manually install and trust lens implementations.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\npublish\nPublish a lens transformation record to ATProto.\n\n\npublish_from_lens\nPublish a lens record from an existing Lens object.\n\n\n\n\n\natmosphere.LensPublisher.publish(\n    name,\n    source_schema_uri,\n    target_schema_uri,\n    description=None,\n    code_repository=None,\n    code_commit=None,\n    getter_path=None,\n    putter_path=None,\n    rkey=None,\n)\nPublish a lens transformation record to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nname\nstr\nHuman-readable lens name.\nrequired\n\n\nsource_schema_uri\nstr\nAT URI of the source schema.\nrequired\n\n\ntarget_schema_uri\nstr\nAT URI of the target schema.\nrequired\n\n\ndescription\nOptional[str]\nWhat this transformation does.\nNone\n\n\ncode_repository\nOptional[str]\nGit repository URL containing the lens code.\nNone\n\n\ncode_commit\nOptional[str]\nGit commit hash for reproducibility.\nNone\n\n\ngetter_path\nOptional[str]\nModule path to the getter function (e.g., ‘mymodule.lenses:my_getter’).\nNone\n\n\nputter_path\nOptional[str]\nModule path to the putter function (e.g., ‘mymodule.lenses:my_putter’).\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created lens record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf code references are incomplete.\n\n\n\n\n\n\n\natmosphere.LensPublisher.publish_from_lens(\n    lens_obj,\n    *,\n    name,\n    source_schema_uri,\n    target_schema_uri,\n    code_repository,\n    code_commit,\n    description=None,\n    rkey=None,\n)\nPublish a lens record from an existing Lens object.\nThis method extracts the getter and putter function names from the Lens object and publishes a record referencing them.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlens_obj\nLens\nThe Lens object to publish.\nrequired\n\n\nname\nstr\nHuman-readable lens name.\nrequired\n\n\nsource_schema_uri\nstr\nAT URI of the source schema.\nrequired\n\n\ntarget_schema_uri\nstr\nAT URI of the target schema.\nrequired\n\n\ncode_repository\nstr\nGit repository URL.\nrequired\n\n\ncode_commit\nstr\nGit commit hash.\nrequired\n\n\ndescription\nOptional[str]\nWhat this transformation does.\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created lens record."
                                                                                        +    "text": "atmosphere.LensPublisher(client)\nPublishes Lens transformation records to ATProto.\nThis class creates lens records that reference source and target schemas and point to the transformation code in a git repository.\n\n\n>>> @atdata.lens\n... def my_lens(source: SourceType) -> TargetType:\n...     return TargetType(field=source.other_field)\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = LensPublisher(client)\n>>> uri = publisher.publish(\n...     name=\"my_lens\",\n...     source_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/source\",\n...     target_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/target\",\n...     code_repository=\"https://github.com/user/repo\",\n...     code_commit=\"abc123def456\",\n...     getter_path=\"mymodule.lenses:my_lens\",\n...     putter_path=\"mymodule.lenses:my_lens_putter\",\n... )\n\n\n\nLens code is stored as references to git repositories rather than inline code. This prevents arbitrary code execution from ATProto records. Users must manually install and trust lens implementations.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\npublish\nPublish a lens transformation record to ATProto.\n\n\npublish_from_lens\nPublish a lens record from an existing Lens object.\n\n\n\n\n\natmosphere.LensPublisher.publish(\n    name,\n    source_schema_uri,\n    target_schema_uri,\n    description=None,\n    code_repository=None,\n    code_commit=None,\n    getter_path=None,\n    putter_path=None,\n    rkey=None,\n)\nPublish a lens transformation record to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nname\nstr\nHuman-readable lens name.\nrequired\n\n\nsource_schema_uri\nstr\nAT URI of the source schema.\nrequired\n\n\ntarget_schema_uri\nstr\nAT URI of the target schema.\nrequired\n\n\ndescription\nOptional[str]\nWhat this transformation does.\nNone\n\n\ncode_repository\nOptional[str]\nGit repository URL containing the lens code.\nNone\n\n\ncode_commit\nOptional[str]\nGit commit hash for reproducibility.\nNone\n\n\ngetter_path\nOptional[str]\nModule path to the getter function (e.g., ‘mymodule.lenses:my_getter’).\nNone\n\n\nputter_path\nOptional[str]\nModule path to the putter function (e.g., ‘mymodule.lenses:my_putter’).\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created lens record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf code references are incomplete.\n\n\n\n\n\n\n\natmosphere.LensPublisher.publish_from_lens(\n    lens_obj,\n    *,\n    name,\n    source_schema_uri,\n    target_schema_uri,\n    code_repository,\n    code_commit,\n    description=None,\n    rkey=None,\n)\nPublish a lens record from an existing Lens object.\nThis method extracts the getter and putter function names from the Lens object and publishes a record referencing them.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nlens_obj\nLens\nThe Lens object to publish.\nrequired\n\n\nname\nstr\nHuman-readable lens name.\nrequired\n\n\nsource_schema_uri\nstr\nAT URI of the source schema.\nrequired\n\n\ntarget_schema_uri\nstr\nAT URI of the target schema.\nrequired\n\n\ncode_repository\nstr\nGit repository URL.\nrequired\n\n\ncode_commit\nstr\nGit commit hash.\nrequired\n\n\ndescription\nOptional[str]\nWhat this transformation does.\nNone\n\n\nrkey\nOptional[str]\nOptional explicit record key.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nThe AT URI of the created lens record."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/LensPublisher.html#example",
                                                                                        -    "href": "api/LensPublisher.html#example",
                                                                                        +    "objectID": "api/LensPublisher.html#examples",
                                                                                        +    "href": "api/LensPublisher.html#examples",
                                                                                             "title": "LensPublisher",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> @atdata.lens\n... def my_lens(source: SourceType) -> TargetType:\n...     return TargetType(field=source.other_field)\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = LensPublisher(client)\n>>> uri = publisher.publish(\n...     name=\"my_lens\",\n...     source_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/source\",\n...     target_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/target\",\n...     code_repository=\"https://github.com/user/repo\",\n...     code_commit=\"abc123def456\",\n...     getter_path=\"mymodule.lenses:my_lens\",\n...     putter_path=\"mymodule.lenses:my_lens_putter\",\n... )"
                                                                                        +    "text": ">>> @atdata.lens\n... def my_lens(source: SourceType) -> TargetType:\n...     return TargetType(field=source.other_field)\n>>>\n>>> client = AtmosphereClient()\n>>> client.login(\"handle\", \"password\")\n>>>\n>>> publisher = LensPublisher(client)\n>>> uri = publisher.publish(\n...     name=\"my_lens\",\n...     source_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/source\",\n...     target_schema_uri=\"at://did:plc:abc/ac.foundation.dataset.sampleSchema/target\",\n...     code_repository=\"https://github.com/user/repo\",\n...     code_commit=\"abc123def456\",\n...     getter_path=\"mymodule.lenses:my_lens\",\n...     putter_path=\"mymodule.lenses:my_lens_putter\",\n... )"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/LensPublisher.html#security-note",
                                                                                        @@ -1492,7 +1492,7 @@
                                                                                             "href": "api/SampleBatch.html",
                                                                                             "title": "SampleBatch",
                                                                                             "section": "",
                                                                                        -    "text": "SampleBatch(samples)\nA batch of samples with automatic attribute aggregation.\nThis class wraps a sequence of samples and provides magic __getattr__ access to aggregate sample attributes. When you access an attribute that exists on the sample type, it automatically aggregates values across all samples in the batch.\nNDArray fields are stacked into a numpy array with a batch dimension. Other fields are aggregated into a list.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nDT\n\nThe sample type, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nsamples\n\nThe list of sample instances in this batch.\n\n\n\n\n\n\n::\n>>> batch = SampleBatch[MyData]([sample1, sample2, sample3])\n>>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)\n>>> batch.names  # Returns list of names\n\n\n\nThis class uses Python’s __orig_class__ mechanism to extract the type parameter at runtime. Instances must be created using the subscripted syntax SampleBatch[MyType](samples) rather than calling the constructor directly with an unsubscripted class."
                                                                                        +    "text": "SampleBatch(samples)\nA batch of samples with automatic attribute aggregation.\nThis class wraps a sequence of samples and provides magic __getattr__ access to aggregate sample attributes. When you access an attribute that exists on the sample type, it automatically aggregates values across all samples in the batch.\nNDArray fields are stacked into a numpy array with a batch dimension. Other fields are aggregated into a list.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nDT\n\nThe sample type, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nsamples\n\nThe list of sample instances in this batch.\n\n\n\n\n\n\n>>> batch = SampleBatch[MyData]([sample1, sample2, sample3])\n>>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)\n>>> batch.names  # Returns list of names\n\n\n\nThis class uses Python’s __orig_class__ mechanism to extract the type parameter at runtime. Instances must be created using the subscripted syntax SampleBatch[MyType](samples) rather than calling the constructor directly with an unsubscripted class."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/SampleBatch.html#parameters",
                                                                                        @@ -1509,11 +1509,11 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\nsamples\n\nThe list of sample instances in this batch."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/SampleBatch.html#example",
                                                                                        -    "href": "api/SampleBatch.html#example",
                                                                                        +    "objectID": "api/SampleBatch.html#examples",
                                                                                        +    "href": "api/SampleBatch.html#examples",
                                                                                             "title": "SampleBatch",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> batch = SampleBatch[MyData]([sample1, sample2, sample3])\n>>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)\n>>> batch.names  # Returns list of names"
                                                                                        +    "text": ">>> batch = SampleBatch[MyData]([sample1, sample2, sample3])\n>>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)\n>>> batch.names  # Returns list of names"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/SampleBatch.html#note",
                                                                                        @@ -1637,7 +1637,7 @@
                                                                                             "href": "api/packable.html",
                                                                                             "title": "packable",
                                                                                             "section": "",
                                                                                        -    "text": "packable(cls)\nDecorator to convert a regular class into a PackableSample.\nThis decorator transforms a class into a dataclass that inherits from PackableSample, enabling automatic msgpack serialization/deserialization with special handling for NDArray fields.\nThe resulting class satisfies the Packable protocol, making it compatible with all atdata APIs that accept packable types (e.g., publish_schema, lens transformations, etc.).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncls\ntype[_T]\nThe class to convert. Should have type annotations for its fields.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ntype[_T]\nA new dataclass that inherits from PackableSample with the same\n\n\n\ntype[_T]\nname and annotations as the original class. The class satisfies the\n\n\n\ntype[_T]\nPackable protocol and can be used with Type[Packable] signatures.\n\n\n\n\n\n\nThis is a test of the functionality::\n@packable\nclass MyData:\n    name: str\n    values: NDArray\n\nsample = MyData(name=\"test\", values=np.array([1, 2, 3]))\nbytes_data = sample.packed\nrestored = MyData.from_bytes(bytes_data)\n\n# Works with Packable-typed APIs\nindex.publish_schema(MyData, version=\"1.0.0\")  # Type-safe"
                                                                                        +    "text": "packable(cls)\nDecorator to convert a regular class into a PackableSample.\nThis decorator transforms a class into a dataclass that inherits from PackableSample, enabling automatic msgpack serialization/deserialization with special handling for NDArray fields.\nThe resulting class satisfies the Packable protocol, making it compatible with all atdata APIs that accept packable types (e.g., publish_schema, lens transformations, etc.).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncls\ntype[_T]\nThe class to convert. Should have type annotations for its fields.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ntype[_T]\nA new dataclass that inherits from PackableSample with the same\n\n\n\ntype[_T]\nname and annotations as the original class. The class satisfies the\n\n\n\ntype[_T]\nPackable protocol and can be used with Type[Packable] signatures.\n\n\n\n\n\n\n>>> @packable\n... class MyData:\n...     name: str\n...     values: NDArray\n...\n>>> sample = MyData(name=\"test\", values=np.array([1, 2, 3]))\n>>> bytes_data = sample.packed\n>>> restored = MyData.from_bytes(bytes_data)\n>>>\n>>> # Works with Packable-typed APIs\n>>> index.publish_schema(MyData, version=\"1.0.0\")  # Type-safe"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/packable.html#parameters",
                                                                                        @@ -1658,21 +1658,21 @@
                                                                                             "href": "api/packable.html#examples",
                                                                                             "title": "packable",
                                                                                             "section": "",
                                                                                        -    "text": "This is a test of the functionality::\n@packable\nclass MyData:\n    name: str\n    values: NDArray\n\nsample = MyData(name=\"test\", values=np.array([1, 2, 3]))\nbytes_data = sample.packed\nrestored = MyData.from_bytes(bytes_data)\n\n# Works with Packable-typed APIs\nindex.publish_schema(MyData, version=\"1.0.0\")  # Type-safe"
                                                                                        +    "text": ">>> @packable\n... class MyData:\n...     name: str\n...     values: NDArray\n...\n>>> sample = MyData(name=\"test\", values=np.array([1, 2, 3]))\n>>> bytes_data = sample.packed\n>>> restored = MyData.from_bytes(bytes_data)\n>>>\n>>> # Works with Packable-typed APIs\n>>> index.publish_schema(MyData, version=\"1.0.0\")  # Type-safe"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Packable-protocol.html",
                                                                                             "href": "api/Packable-protocol.html",
                                                                                             "title": "Packable",
                                                                                             "section": "",
                                                                                        -    "text": "Packable()\nStructural protocol for packable sample types.\nThis protocol allows classes decorated with @packable to be recognized as valid types for lens transformations and schema operations, even though the decorator doesn’t change the class’s nominal type at static analysis time.\nBoth PackableSample subclasses and @packable-decorated classes satisfy this protocol structurally.\nThe protocol captures the full interface needed for: - Lens type transformations (as_wds, from_data) - Schema publishing (class introspection via dataclass fields) - Serialization/deserialization (packed, from_bytes)\n\n\n::\n>>> @packable\n... class MySample:\n...     name: str\n...     value: int\n...\n>>> def process(sample_type: Type[Packable]) -> None:\n...     # Type checker knows sample_type has from_bytes, packed, etc.\n...     instance = sample_type.from_bytes(data)\n...     print(instance.packed)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_wds\nWebDataset-compatible representation with key and msgpack.\n\n\npacked\nPack this sample’s data into msgpack bytes.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_bytes\nCreate instance from raw msgpack bytes.\n\n\nfrom_data\nCreate instance from unpacked msgpack data dictionary.\n\n\n\n\n\nPackable.from_bytes(bs)\nCreate instance from raw msgpack bytes.\n\n\n\nPackable.from_data(data)\nCreate instance from unpacked msgpack data dictionary."
                                                                                        +    "text": "Packable()\nStructural protocol for packable sample types.\nThis protocol allows classes decorated with @packable to be recognized as valid types for lens transformations and schema operations, even though the decorator doesn’t change the class’s nominal type at static analysis time.\nBoth PackableSample subclasses and @packable-decorated classes satisfy this protocol structurally.\nThe protocol captures the full interface needed for: - Lens type transformations (as_wds, from_data) - Schema publishing (class introspection via dataclass fields) - Serialization/deserialization (packed, from_bytes)\n\n\n>>> @packable\n... class MySample:\n...     name: str\n...     value: int\n...\n>>> def process(sample_type: Type[Packable]) -> None:\n...     # Type checker knows sample_type has from_bytes, packed, etc.\n...     instance = sample_type.from_bytes(data)\n...     print(instance.packed)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_wds\nWebDataset-compatible representation with key and msgpack.\n\n\npacked\nPack this sample’s data into msgpack bytes.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_bytes\nCreate instance from raw msgpack bytes.\n\n\nfrom_data\nCreate instance from unpacked msgpack data dictionary.\n\n\n\n\n\nPackable.from_bytes(bs)\nCreate instance from raw msgpack bytes.\n\n\n\nPackable.from_data(data)\nCreate instance from unpacked msgpack data dictionary."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/Packable-protocol.html#example",
                                                                                        -    "href": "api/Packable-protocol.html#example",
                                                                                        +    "objectID": "api/Packable-protocol.html#examples",
                                                                                        +    "href": "api/Packable-protocol.html#examples",
                                                                                             "title": "Packable",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> @packable\n... class MySample:\n...     name: str\n...     value: int\n...\n>>> def process(sample_type: Type[Packable]) -> None:\n...     # Type checker knows sample_type has from_bytes, packed, etc.\n...     instance = sample_type.from_bytes(data)\n...     print(instance.packed)"
                                                                                        +    "text": ">>> @packable\n... class MySample:\n...     name: str\n...     value: int\n...\n>>> def process(sample_type: Type[Packable]) -> None:\n...     # Type checker knows sample_type has from_bytes, packed, etc.\n...     instance = sample_type.from_bytes(data)\n...     print(instance.packed)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Packable-protocol.html#attributes",
                                                                                        @@ -1693,14 +1693,14 @@
                                                                                             "href": "api/AtUri.html",
                                                                                             "title": "AtUri",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.AtUri(authority, collection, rkey)\nParsed AT Protocol URI.\nAT URIs follow the format: at:////\n\n\n::\n>>> uri = AtUri.parse(\"at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz\")\n>>> uri.authority\n'did:plc:abc123'\n>>> uri.collection\n'ac.foundation.dataset.sampleSchema'\n>>> uri.rkey\n'xyz'\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nauthority\nThe DID or handle of the repository owner.\n\n\ncollection\nThe NSID of the record collection.\n\n\nrkey\nThe record key within the collection.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nparse\nParse an AT URI string into components.\n\n\n\n\n\natmosphere.AtUri.parse(uri)\nParse an AT URI string into components.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr\nAT URI string in format at://<authority>/<collection>/<rkey>\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nParsed AtUri instance.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the URI format is invalid."
                                                                                        +    "text": "atmosphere.AtUri(authority, collection, rkey)\nParsed AT Protocol URI.\nAT URIs follow the format: at:////\n\n\n>>> uri = AtUri.parse(\"at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz\")\n>>> uri.authority\n'did:plc:abc123'\n>>> uri.collection\n'ac.foundation.dataset.sampleSchema'\n>>> uri.rkey\n'xyz'\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nauthority\nThe DID or handle of the repository owner.\n\n\ncollection\nThe NSID of the record collection.\n\n\nrkey\nThe record key within the collection.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nparse\nParse an AT URI string into components.\n\n\n\n\n\natmosphere.AtUri.parse(uri)\nParse an AT URI string into components.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr\nAT URI string in format at://<authority>/<collection>/<rkey>\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtUri\nParsed AtUri instance.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the URI format is invalid."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/AtUri.html#example",
                                                                                        -    "href": "api/AtUri.html#example",
                                                                                        +    "objectID": "api/AtUri.html#examples",
                                                                                        +    "href": "api/AtUri.html#examples",
                                                                                             "title": "AtUri",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> uri = AtUri.parse(\"at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz\")\n>>> uri.authority\n'did:plc:abc123'\n>>> uri.collection\n'ac.foundation.dataset.sampleSchema'\n>>> uri.rkey\n'xyz'"
                                                                                        +    "text": ">>> uri = AtUri.parse(\"at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz\")\n>>> uri.authority\n'did:plc:abc123'\n>>> uri.collection\n'ac.foundation.dataset.sampleSchema'\n>>> uri.rkey\n'xyz'"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AtUri.html#attributes",
                                                                                        @@ -1742,14 +1742,14 @@
                                                                                             "href": "api/AbstractDataStore.html",
                                                                                             "title": "AbstractDataStore",
                                                                                             "section": "",
                                                                                        -    "text": "AbstractDataStore()\nProtocol for data storage operations.\nThis protocol abstracts over different storage backends for dataset data: - S3DataStore: S3-compatible object storage - PDSBlobStore: ATProto PDS blob storage (future)\nThe separation of index (metadata) from data store (actual files) allows flexible deployment: local index with S3 storage, atmosphere index with S3 storage, or atmosphere index with PDS blobs.\n\n\n::\n>>> store = S3DataStore(credentials, bucket=\"my-bucket\")\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> print(urls)\n['s3://my-bucket/training/v1/shard-000000.tar', ...]\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nread_url\nResolve a storage URL for reading.\n\n\nsupports_streaming\nWhether this store supports streaming reads.\n\n\nwrite_shards\nWrite dataset shards to storage.\n\n\n\n\n\nAbstractDataStore.read_url(url)\nResolve a storage URL for reading.\nSome storage backends may need to transform URLs (e.g., signing S3 URLs or resolving blob references). This method returns a URL that can be used directly with WebDataset.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurl\nstr\nStorage URL to resolve.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nWebDataset-compatible URL for reading.\n\n\n\n\n\n\n\nAbstractDataStore.supports_streaming()\nWhether this store supports streaming reads.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nbool\nTrue if the store supports efficient streaming (like S3),\n\n\n\nbool\nFalse if data must be fully downloaded first.\n\n\n\n\n\n\n\nAbstractDataStore.write_shards(ds, *, prefix, **kwargs)\nWrite dataset shards to storage.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to write.\nrequired\n\n\nprefix\nstr\nPath prefix for the shards (e.g., ‘datasets/mnist/v1’).\nrequired\n\n\n**kwargs\n\nBackend-specific options (e.g., maxcount for shard size).\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of URLs for the written shards, suitable for use with\n\n\n\nlist[str]\nWebDataset or atdata.Dataset()."
                                                                                        +    "text": "AbstractDataStore()\nProtocol for data storage operations.\nThis protocol abstracts over different storage backends for dataset data: - S3DataStore: S3-compatible object storage - PDSBlobStore: ATProto PDS blob storage (future)\nThe separation of index (metadata) from data store (actual files) allows flexible deployment: local index with S3 storage, atmosphere index with S3 storage, or atmosphere index with PDS blobs.\n\n\n>>> store = S3DataStore(credentials, bucket=\"my-bucket\")\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> print(urls)\n['s3://my-bucket/training/v1/shard-000000.tar', ...]\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nread_url\nResolve a storage URL for reading.\n\n\nsupports_streaming\nWhether this store supports streaming reads.\n\n\nwrite_shards\nWrite dataset shards to storage.\n\n\n\n\n\nAbstractDataStore.read_url(url)\nResolve a storage URL for reading.\nSome storage backends may need to transform URLs (e.g., signing S3 URLs or resolving blob references). This method returns a URL that can be used directly with WebDataset.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurl\nstr\nStorage URL to resolve.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nWebDataset-compatible URL for reading.\n\n\n\n\n\n\n\nAbstractDataStore.supports_streaming()\nWhether this store supports streaming reads.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nbool\nTrue if the store supports efficient streaming (like S3),\n\n\n\nbool\nFalse if data must be fully downloaded first.\n\n\n\n\n\n\n\nAbstractDataStore.write_shards(ds, *, prefix, **kwargs)\nWrite dataset shards to storage.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to write.\nrequired\n\n\nprefix\nstr\nPath prefix for the shards (e.g., ‘datasets/mnist/v1’).\nrequired\n\n\n**kwargs\n\nBackend-specific options (e.g., maxcount for shard size).\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of URLs for the written shards, suitable for use with\n\n\n\nlist[str]\nWebDataset or atdata.Dataset()."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/AbstractDataStore.html#example",
                                                                                        -    "href": "api/AbstractDataStore.html#example",
                                                                                        +    "objectID": "api/AbstractDataStore.html#examples",
                                                                                        +    "href": "api/AbstractDataStore.html#examples",
                                                                                             "title": "AbstractDataStore",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> store = S3DataStore(credentials, bucket=\"my-bucket\")\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> print(urls)\n['s3://my-bucket/training/v1/shard-000000.tar', ...]"
                                                                                        +    "text": ">>> store = S3DataStore(credentials, bucket=\"my-bucket\")\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> print(urls)\n['s3://my-bucket/training/v1/shard-000000.tar', ...]"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AbstractDataStore.html#methods",
                                                                                        @@ -1763,7 +1763,7 @@
                                                                                             "href": "api/Dataset.html",
                                                                                             "title": "Dataset",
                                                                                             "section": "",
                                                                                        -    "text": "Dataset(source=None, metadata_url=None, *, url=None)\nA typed dataset built on WebDataset with lens transformations.\nThis class wraps WebDataset tar archives and provides type-safe iteration over samples of a specific PackableSample type. Samples are stored as msgpack-serialized data within WebDataset shards.\nThe dataset supports: - Ordered and shuffled iteration - Automatic batching with SampleBatch - Type transformations via the lens system (as_type()) - Export to parquet format\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nST\n\nThe sample type for this dataset, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nurl\n\nWebDataset brace-notation URL for the tar file(s).\n\n\n\n\n\n\n::\n>>> ds = Dataset[MyData](\"path/to/data-{000000..000009}.tar\")\n>>> for sample in ds.ordered(batch_size=32):\n...     # sample is SampleBatch[MyData] with batch_size samples\n...     embeddings = sample.embeddings  # shape: (32, ...)\n...\n>>> # Transform to a different view\n>>> ds_view = ds.as_type(MyDataView)\n\n\n\nThis class uses Python’s __orig_class__ mechanism to extract the type parameter at runtime. Instances must be created using the subscripted syntax Dataset[MyType](url) rather than calling the constructor directly with an unsubscripted class.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_type\nView this dataset through a different sample type using a registered lens.\n\n\nlist_shards\nGet list of individual dataset shards.\n\n\nordered\nIterate over the dataset in order\n\n\nshuffled\nIterate over the dataset in random order.\n\n\nto_parquet\nExport dataset contents to parquet format.\n\n\nwrap\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\nwrap_batch\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nDataset.as_type(other)\nView this dataset through a different sample type using a registered lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nother\nType[RT]\nThe target sample type to transform into. Must be a type derived from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[RT]\nA new Dataset instance that yields samples of type other\n\n\n\nDataset[RT]\nby applying the appropriate lens transformation from the global\n\n\n\nDataset[RT]\nLensNetwork registry.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no registered lens exists between the current sample type and the target type.\n\n\n\n\n\n\n\nDataset.list_shards()\nGet list of individual dataset shards.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nA full (non-lazy) list of the individual tar files within the\n\n\n\nlist[str]\nsource WebDataset.\n\n\n\n\n\n\n\nDataset.ordered(batch_size=None)\nIterate over the dataset in order\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch_size (\n\nobj:int, optional): The size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nobj:webdataset.DataPipeline A data pipeline that iterates over\n\n\n\nIterable[ST]\nthe dataset in its original sample order\n\n\n\n\n\n\n\nDataset.shuffled(buffer_shards=100, buffer_samples=10000, batch_size=None)\nIterate over the dataset in random order.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbuffer_shards\nint\nNumber of shards to buffer for shuffling at the shard level. Larger values increase randomness but use more memory. Default: 100.\n100\n\n\nbuffer_samples\nint\nNumber of samples to buffer for shuffling within shards. Larger values increase randomness but use more memory. Default: 10,000.\n10000\n\n\nbatch_size\nint | None\nThe size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nA WebDataset data pipeline that iterates over the dataset in\n\n\n\nIterable[ST]\nrandomized order. If batch_size is not None, yields\n\n\n\nIterable[ST]\nSampleBatch[ST] instances; otherwise yields individual ST\n\n\n\nIterable[ST]\nsamples.\n\n\n\n\n\n\n\nDataset.to_parquet(path, sample_map=None, maxcount=None, **kwargs)\nExport dataset contents to parquet format.\nConverts all samples to a pandas DataFrame and saves to parquet file(s). Useful for interoperability with data analysis tools.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\npath\nPathlike\nOutput path for the parquet file. If maxcount is specified, files are named {stem}-{segment:06d}.parquet.\nrequired\n\n\nsample_map\nOptional[SampleExportMap]\nOptional function to convert samples to dictionaries. Defaults to dataclasses.asdict.\nNone\n\n\nmaxcount\nOptional[int]\nIf specified, split output into multiple files with at most this many samples each. Recommended for large datasets.\nNone\n\n\n**kwargs\n\nAdditional arguments passed to pandas.DataFrame.to_parquet(). Common options include compression, index, engine.\n{}\n\n\n\n\n\n\nMemory Usage: When maxcount=None (default), this method loads the entire dataset into memory as a pandas DataFrame before writing. For large datasets, this can cause memory exhaustion.\nFor datasets larger than available RAM, always specify maxcount::\n# Safe for large datasets - processes in chunks\nds.to_parquet(\"output.parquet\", maxcount=10000)\nThis creates multiple parquet files: output-000000.parquet, output-000001.parquet, etc.\n\n\n\n::\n>>> ds = Dataset[MySample](\"data.tar\")\n>>> # Small dataset - load all at once\n>>> ds.to_parquet(\"output.parquet\")\n>>>\n>>> # Large dataset - process in chunks\n>>> ds.to_parquet(\"output.parquet\", maxcount=50000)\n\n\n\n\nDataset.wrap(sample)\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample\nWDSRawSample\nA dictionary containing at minimum a 'msgpack' key with serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nST\nA deserialized sample of type ST, optionally transformed through\n\n\n\nST\na lens if as_type() was called.\n\n\n\n\n\n\n\nDataset.wrap_batch(batch)\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch\nWDSRawBatch\nA dictionary containing a 'msgpack' key with a list of serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSampleBatch[ST]\nA SampleBatch[ST] containing deserialized samples, optionally\n\n\n\nSampleBatch[ST]\ntransformed through a lens if as_type() was called.\n\n\n\n\n\n\nThis implementation deserializes samples one at a time, then aggregates them into a batch."
                                                                                        +    "text": "Dataset(source=None, metadata_url=None, *, url=None)\nA typed dataset built on WebDataset with lens transformations.\nThis class wraps WebDataset tar archives and provides type-safe iteration over samples of a specific PackableSample type. Samples are stored as msgpack-serialized data within WebDataset shards.\nThe dataset supports: - Ordered and shuffled iteration - Automatic batching with SampleBatch - Type transformations via the lens system (as_type()) - Export to parquet format\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nST\n\nThe sample type for this dataset, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nurl\n\nWebDataset brace-notation URL for the tar file(s).\n\n\n\n\n\n\n>>> ds = Dataset[MyData](\"path/to/data-{000000..000009}.tar\")\n>>> for sample in ds.ordered(batch_size=32):\n...     # sample is SampleBatch[MyData] with batch_size samples\n...     embeddings = sample.embeddings  # shape: (32, ...)\n...\n>>> # Transform to a different view\n>>> ds_view = ds.as_type(MyDataView)\n\n\n\nThis class uses Python’s __orig_class__ mechanism to extract the type parameter at runtime. Instances must be created using the subscripted syntax Dataset[MyType](url) rather than calling the constructor directly with an unsubscripted class.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_type\nView this dataset through a different sample type using a registered lens.\n\n\nlist_shards\nGet list of individual dataset shards.\n\n\nordered\nIterate over the dataset in order\n\n\nshuffled\nIterate over the dataset in random order.\n\n\nto_parquet\nExport dataset contents to parquet format.\n\n\nwrap\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\nwrap_batch\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nDataset.as_type(other)\nView this dataset through a different sample type using a registered lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nother\nType[RT]\nThe target sample type to transform into. Must be a type derived from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[RT]\nA new Dataset instance that yields samples of type other\n\n\n\nDataset[RT]\nby applying the appropriate lens transformation from the global\n\n\n\nDataset[RT]\nLensNetwork registry.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no registered lens exists between the current sample type and the target type.\n\n\n\n\n\n\n\nDataset.list_shards()\nGet list of individual dataset shards.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nA full (non-lazy) list of the individual tar files within the\n\n\n\nlist[str]\nsource WebDataset.\n\n\n\n\n\n\n\nDataset.ordered(batch_size=None)\nIterate over the dataset in order\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch_size (\n\nobj:int, optional): The size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nobj:webdataset.DataPipeline A data pipeline that iterates over\n\n\n\nIterable[ST]\nthe dataset in its original sample order\n\n\n\n\n\n\n\nDataset.shuffled(buffer_shards=100, buffer_samples=10000, batch_size=None)\nIterate over the dataset in random order.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbuffer_shards\nint\nNumber of shards to buffer for shuffling at the shard level. Larger values increase randomness but use more memory. Default: 100.\n100\n\n\nbuffer_samples\nint\nNumber of samples to buffer for shuffling within shards. Larger values increase randomness but use more memory. Default: 10,000.\n10000\n\n\nbatch_size\nint | None\nThe size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nA WebDataset data pipeline that iterates over the dataset in\n\n\n\nIterable[ST]\nrandomized order. If batch_size is not None, yields\n\n\n\nIterable[ST]\nSampleBatch[ST] instances; otherwise yields individual ST\n\n\n\nIterable[ST]\nsamples.\n\n\n\n\n\n\n\nDataset.to_parquet(path, sample_map=None, maxcount=None, **kwargs)\nExport dataset contents to parquet format.\nConverts all samples to a pandas DataFrame and saves to parquet file(s). Useful for interoperability with data analysis tools.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\npath\nPathlike\nOutput path for the parquet file. If maxcount is specified, files are named {stem}-{segment:06d}.parquet.\nrequired\n\n\nsample_map\nOptional[SampleExportMap]\nOptional function to convert samples to dictionaries. Defaults to dataclasses.asdict.\nNone\n\n\nmaxcount\nOptional[int]\nIf specified, split output into multiple files with at most this many samples each. Recommended for large datasets.\nNone\n\n\n**kwargs\n\nAdditional arguments passed to pandas.DataFrame.to_parquet(). Common options include compression, index, engine.\n{}\n\n\n\n\n\n\nMemory Usage: When maxcount=None (default), this method loads the entire dataset into memory as a pandas DataFrame before writing. For large datasets, this can cause memory exhaustion.\nFor datasets larger than available RAM, always specify maxcount::\n# Safe for large datasets - processes in chunks\nds.to_parquet(\"output.parquet\", maxcount=10000)\nThis creates multiple parquet files: output-000000.parquet, output-000001.parquet, etc.\n\n\n\n>>> ds = Dataset[MySample](\"data.tar\")\n>>> # Small dataset - load all at once\n>>> ds.to_parquet(\"output.parquet\")\n>>>\n>>> # Large dataset - process in chunks\n>>> ds.to_parquet(\"output.parquet\", maxcount=50000)\n\n\n\n\nDataset.wrap(sample)\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample\nWDSRawSample\nA dictionary containing at minimum a 'msgpack' key with serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nST\nA deserialized sample of type ST, optionally transformed through\n\n\n\nST\na lens if as_type() was called.\n\n\n\n\n\n\n\nDataset.wrap_batch(batch)\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch\nWDSRawBatch\nA dictionary containing a 'msgpack' key with a list of serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSampleBatch[ST]\nA SampleBatch[ST] containing deserialized samples, optionally\n\n\n\nSampleBatch[ST]\ntransformed through a lens if as_type() was called.\n\n\n\n\n\n\nThis implementation deserializes samples one at a time, then aggregates them into a batch."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Dataset.html#parameters",
                                                                                        @@ -1780,11 +1780,11 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\nurl\n\nWebDataset brace-notation URL for the tar file(s)."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/Dataset.html#example",
                                                                                        -    "href": "api/Dataset.html#example",
                                                                                        +    "objectID": "api/Dataset.html#examples",
                                                                                        +    "href": "api/Dataset.html#examples",
                                                                                             "title": "Dataset",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> ds = Dataset[MyData](\"path/to/data-{000000..000009}.tar\")\n>>> for sample in ds.ordered(batch_size=32):\n...     # sample is SampleBatch[MyData] with batch_size samples\n...     embeddings = sample.embeddings  # shape: (32, ...)\n...\n>>> # Transform to a different view\n>>> ds_view = ds.as_type(MyDataView)"
                                                                                        +    "text": ">>> ds = Dataset[MyData](\"path/to/data-{000000..000009}.tar\")\n>>> for sample in ds.ordered(batch_size=32):\n...     # sample is SampleBatch[MyData] with batch_size samples\n...     embeddings = sample.embeddings  # shape: (32, ...)\n...\n>>> # Transform to a different view\n>>> ds_view = ds.as_type(MyDataView)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Dataset.html#note",
                                                                                        @@ -1798,14 +1798,14 @@
                                                                                             "href": "api/Dataset.html#methods",
                                                                                             "title": "Dataset",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\nas_type\nView this dataset through a different sample type using a registered lens.\n\n\nlist_shards\nGet list of individual dataset shards.\n\n\nordered\nIterate over the dataset in order\n\n\nshuffled\nIterate over the dataset in random order.\n\n\nto_parquet\nExport dataset contents to parquet format.\n\n\nwrap\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\nwrap_batch\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nDataset.as_type(other)\nView this dataset through a different sample type using a registered lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nother\nType[RT]\nThe target sample type to transform into. Must be a type derived from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[RT]\nA new Dataset instance that yields samples of type other\n\n\n\nDataset[RT]\nby applying the appropriate lens transformation from the global\n\n\n\nDataset[RT]\nLensNetwork registry.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no registered lens exists between the current sample type and the target type.\n\n\n\n\n\n\n\nDataset.list_shards()\nGet list of individual dataset shards.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nA full (non-lazy) list of the individual tar files within the\n\n\n\nlist[str]\nsource WebDataset.\n\n\n\n\n\n\n\nDataset.ordered(batch_size=None)\nIterate over the dataset in order\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch_size (\n\nobj:int, optional): The size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nobj:webdataset.DataPipeline A data pipeline that iterates over\n\n\n\nIterable[ST]\nthe dataset in its original sample order\n\n\n\n\n\n\n\nDataset.shuffled(buffer_shards=100, buffer_samples=10000, batch_size=None)\nIterate over the dataset in random order.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbuffer_shards\nint\nNumber of shards to buffer for shuffling at the shard level. Larger values increase randomness but use more memory. Default: 100.\n100\n\n\nbuffer_samples\nint\nNumber of samples to buffer for shuffling within shards. Larger values increase randomness but use more memory. Default: 10,000.\n10000\n\n\nbatch_size\nint | None\nThe size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nA WebDataset data pipeline that iterates over the dataset in\n\n\n\nIterable[ST]\nrandomized order. If batch_size is not None, yields\n\n\n\nIterable[ST]\nSampleBatch[ST] instances; otherwise yields individual ST\n\n\n\nIterable[ST]\nsamples.\n\n\n\n\n\n\n\nDataset.to_parquet(path, sample_map=None, maxcount=None, **kwargs)\nExport dataset contents to parquet format.\nConverts all samples to a pandas DataFrame and saves to parquet file(s). Useful for interoperability with data analysis tools.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\npath\nPathlike\nOutput path for the parquet file. If maxcount is specified, files are named {stem}-{segment:06d}.parquet.\nrequired\n\n\nsample_map\nOptional[SampleExportMap]\nOptional function to convert samples to dictionaries. Defaults to dataclasses.asdict.\nNone\n\n\nmaxcount\nOptional[int]\nIf specified, split output into multiple files with at most this many samples each. Recommended for large datasets.\nNone\n\n\n**kwargs\n\nAdditional arguments passed to pandas.DataFrame.to_parquet(). Common options include compression, index, engine.\n{}\n\n\n\n\n\n\nMemory Usage: When maxcount=None (default), this method loads the entire dataset into memory as a pandas DataFrame before writing. For large datasets, this can cause memory exhaustion.\nFor datasets larger than available RAM, always specify maxcount::\n# Safe for large datasets - processes in chunks\nds.to_parquet(\"output.parquet\", maxcount=10000)\nThis creates multiple parquet files: output-000000.parquet, output-000001.parquet, etc.\n\n\n\n::\n>>> ds = Dataset[MySample](\"data.tar\")\n>>> # Small dataset - load all at once\n>>> ds.to_parquet(\"output.parquet\")\n>>>\n>>> # Large dataset - process in chunks\n>>> ds.to_parquet(\"output.parquet\", maxcount=50000)\n\n\n\n\nDataset.wrap(sample)\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample\nWDSRawSample\nA dictionary containing at minimum a 'msgpack' key with serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nST\nA deserialized sample of type ST, optionally transformed through\n\n\n\nST\na lens if as_type() was called.\n\n\n\n\n\n\n\nDataset.wrap_batch(batch)\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch\nWDSRawBatch\nA dictionary containing a 'msgpack' key with a list of serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSampleBatch[ST]\nA SampleBatch[ST] containing deserialized samples, optionally\n\n\n\nSampleBatch[ST]\ntransformed through a lens if as_type() was called.\n\n\n\n\n\n\nThis implementation deserializes samples one at a time, then aggregates them into a batch."
                                                                                        +    "text": "Name\nDescription\n\n\n\n\nas_type\nView this dataset through a different sample type using a registered lens.\n\n\nlist_shards\nGet list of individual dataset shards.\n\n\nordered\nIterate over the dataset in order\n\n\nshuffled\nIterate over the dataset in random order.\n\n\nto_parquet\nExport dataset contents to parquet format.\n\n\nwrap\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\nwrap_batch\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nDataset.as_type(other)\nView this dataset through a different sample type using a registered lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nother\nType[RT]\nThe target sample type to transform into. Must be a type derived from PackableSample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[RT]\nA new Dataset instance that yields samples of type other\n\n\n\nDataset[RT]\nby applying the appropriate lens transformation from the global\n\n\n\nDataset[RT]\nLensNetwork registry.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no registered lens exists between the current sample type and the target type.\n\n\n\n\n\n\n\nDataset.list_shards()\nGet list of individual dataset shards.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nA full (non-lazy) list of the individual tar files within the\n\n\n\nlist[str]\nsource WebDataset.\n\n\n\n\n\n\n\nDataset.ordered(batch_size=None)\nIterate over the dataset in order\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch_size (\n\nobj:int, optional): The size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nobj:webdataset.DataPipeline A data pipeline that iterates over\n\n\n\nIterable[ST]\nthe dataset in its original sample order\n\n\n\n\n\n\n\nDataset.shuffled(buffer_shards=100, buffer_samples=10000, batch_size=None)\nIterate over the dataset in random order.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbuffer_shards\nint\nNumber of shards to buffer for shuffling at the shard level. Larger values increase randomness but use more memory. Default: 100.\n100\n\n\nbuffer_samples\nint\nNumber of samples to buffer for shuffling within shards. Larger values increase randomness but use more memory. Default: 10,000.\n10000\n\n\nbatch_size\nint | None\nThe size of iterated batches. Default: None (unbatched). If None, iterates over one sample at a time with no batch dimension.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIterable[ST]\nA WebDataset data pipeline that iterates over the dataset in\n\n\n\nIterable[ST]\nrandomized order. If batch_size is not None, yields\n\n\n\nIterable[ST]\nSampleBatch[ST] instances; otherwise yields individual ST\n\n\n\nIterable[ST]\nsamples.\n\n\n\n\n\n\n\nDataset.to_parquet(path, sample_map=None, maxcount=None, **kwargs)\nExport dataset contents to parquet format.\nConverts all samples to a pandas DataFrame and saves to parquet file(s). Useful for interoperability with data analysis tools.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\npath\nPathlike\nOutput path for the parquet file. If maxcount is specified, files are named {stem}-{segment:06d}.parquet.\nrequired\n\n\nsample_map\nOptional[SampleExportMap]\nOptional function to convert samples to dictionaries. Defaults to dataclasses.asdict.\nNone\n\n\nmaxcount\nOptional[int]\nIf specified, split output into multiple files with at most this many samples each. Recommended for large datasets.\nNone\n\n\n**kwargs\n\nAdditional arguments passed to pandas.DataFrame.to_parquet(). Common options include compression, index, engine.\n{}\n\n\n\n\n\n\nMemory Usage: When maxcount=None (default), this method loads the entire dataset into memory as a pandas DataFrame before writing. For large datasets, this can cause memory exhaustion.\nFor datasets larger than available RAM, always specify maxcount::\n# Safe for large datasets - processes in chunks\nds.to_parquet(\"output.parquet\", maxcount=10000)\nThis creates multiple parquet files: output-000000.parquet, output-000001.parquet, etc.\n\n\n\n>>> ds = Dataset[MySample](\"data.tar\")\n>>> # Small dataset - load all at once\n>>> ds.to_parquet(\"output.parquet\")\n>>>\n>>> # Large dataset - process in chunks\n>>> ds.to_parquet(\"output.parquet\", maxcount=50000)\n\n\n\n\nDataset.wrap(sample)\nWrap a raw msgpack sample into the appropriate dataset-specific type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample\nWDSRawSample\nA dictionary containing at minimum a 'msgpack' key with serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nST\nA deserialized sample of type ST, optionally transformed through\n\n\n\nST\na lens if as_type() was called.\n\n\n\n\n\n\n\nDataset.wrap_batch(batch)\nWrap a batch of raw msgpack samples into a typed SampleBatch.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbatch\nWDSRawBatch\nA dictionary containing a 'msgpack' key with a list of serialized sample bytes.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSampleBatch[ST]\nA SampleBatch[ST] containing deserialized samples, optionally\n\n\n\nSampleBatch[ST]\ntransformed through a lens if as_type() was called.\n\n\n\n\n\n\nThis implementation deserializes samples one at a time, then aggregates them into a batch."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/local.Index.html",
                                                                                             "href": "api/local.Index.html",
                                                                                             "title": "local.Index",
                                                                                             "section": "",
                                                                                        -    "text": "local.Index(\n    redis=None,\n    data_store=None,\n    auto_stubs=False,\n    stub_dir=None,\n    **kwargs,\n)\nRedis-backed index for tracking datasets in a repository.\nImplements the AbstractIndex protocol. Maintains a registry of LocalDatasetEntry objects in Redis, allowing enumeration and lookup of stored datasets.\nWhen initialized with a data_store, insert_dataset() will write dataset shards to storage before indexing. Without a data_store, insert_dataset() only indexes existing URLs.\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n_redis\n\nRedis connection for index storage.\n\n\n_data_store\n\nOptional AbstractDataStore for writing dataset shards.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nadd_entry\nAdd a dataset to the index.\n\n\nclear_stubs\nRemove all auto-generated stub files.\n\n\ndecode_schema\nReconstruct a Python PackableSample type from a stored schema.\n\n\ndecode_schema_as\nDecode a schema with explicit type hint for IDE support.\n\n\nget_dataset\nGet a dataset entry by name (AbstractIndex protocol).\n\n\nget_entry\nGet an entry by its CID.\n\n\nget_entry_by_name\nGet an entry by its human-readable name.\n\n\nget_import_path\nGet the import path for a schema’s generated module.\n\n\nget_schema\nGet a schema record by reference (AbstractIndex protocol).\n\n\nget_schema_record\nGet a schema record as LocalSchemaRecord object.\n\n\ninsert_dataset\nInsert a dataset into the index (AbstractIndex protocol).\n\n\nlist_datasets\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\nlist_entries\nGet all index entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\nload_schema\nLoad a schema and make it available in the types namespace.\n\n\npublish_schema\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nlocal.Index.add_entry(ds, *, name, schema_ref=None, metadata=None)\nAdd a dataset to the index.\nCreates a LocalDatasetEntry for the dataset and persists it to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe dataset to add to the index.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference. If None, generates from sample type.\nNone\n\n\nmetadata\ndict | None\nOptional metadata dictionary. If None, uses ds._metadata if available.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nThe created LocalDatasetEntry object.\n\n\n\n\n\n\n\nlocal.Index.clear_stubs()\nRemove all auto-generated stub files.\nOnly works if auto_stubs was enabled when creating the Index.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nint\nNumber of stub files removed, or 0 if auto_stubs is disabled.\n\n\n\n\n\n\n\nlocal.Index.decode_schema(ref)\nReconstruct a Python PackableSample type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a PackableSample subclass matching the schema definition.\nIf auto_stubs is enabled, a Python module will be generated and the class will be imported from it, providing full IDE autocomplete support. The returned class has proper type information that IDEs can understand.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA PackableSample subclass - either imported from a generated module\n\n\n\nType[Packable]\n(if auto_stubs is enabled) or dynamically created.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n\nlocal.Index.decode_schema_as(ref, type_hint)\nDecode a schema with explicit type hint for IDE support.\nThis is a typed wrapper around decode_schema() that preserves the type information for IDE autocomplete. Use this when you have a stub file for the schema and want full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\ntype_hint\ntype[T]\nThe stub type to use for type hints. Import this from the generated stub file.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ntype[T]\nThe decoded type, cast to match the type_hint for IDE support.\n\n\n\n\n\n\n::\n>>> # After enabling auto_stubs and configuring IDE extraPaths:\n>>> from local.MySample_1_0_0 import MySample\n>>>\n>>> # This gives full IDE autocomplete:\n>>> DecodedType = index.decode_schema_as(ref, MySample)\n>>> sample = DecodedType(text=\"hello\", value=42)  # IDE knows signature!\n\n\n\nThe type_hint is only used for static type checking - at runtime, the actual decoded type from the schema is returned. Ensure the stub matches the schema to avoid runtime surprises.\n\n\n\n\nlocal.Index.get_dataset(ref)\nGet a dataset entry by name (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry(cid)\nGet an entry by its CID.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncid\nstr\nContent identifier of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry for the given CID.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf entry not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry_by_name(name)\nGet an entry by its human-readable name.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nname\nstr\nHuman-readable name of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry with the given name.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf no entry with that name exists.\n\n\n\n\n\n\n\nlocal.Index.get_import_path(ref)\nGet the import path for a schema’s generated module.\nWhen auto_stubs is enabled, this returns the import path that can be used to import the schema type with full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr | None\nImport path like “local.MySample_1_0_0”, or None if auto_stubs\n\n\n\nstr | None\nis disabled.\n\n\n\n\n\n\n::\n>>> index = LocalIndex(auto_stubs=True)\n>>> ref = index.publish_schema(MySample, version=\"1.0.0\")\n>>> index.load_schema(ref)\n>>> print(index.get_import_path(ref))\nlocal.MySample_1_0_0\n>>> # Then in your code:\n>>> # from local.MySample_1_0_0 import MySample\n\n\n\n\nlocal.Index.get_schema(ref)\nGet a schema record by reference (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string. Supports both new format (atdata://local/sampleSchema/{name}@version) and legacy format (local://schemas/{module.Class}@version).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with keys ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, ‘$ref’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.get_schema_record(ref)\nGet a schema record as LocalSchemaRecord object.\nUse this when you need the full LocalSchemaRecord with typed properties. For Protocol-compliant dict access, use get_schema() instead.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalSchemaRecord\nLocalSchemaRecord with schema details.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index (AbstractIndex protocol).\nIf a data_store was provided at initialization, writes dataset shards to storage first, then indexes the new URLs. Otherwise, indexes the dataset’s existing URL.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference.\nNone\n\n\n**kwargs\n\nAdditional options: - metadata: Optional metadata dict - prefix: Storage prefix (default: dataset name) - cache_local: If True, cache writes locally first\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nlocal.Index.list_datasets()\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nlocal.Index.list_entries()\nGet all index entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of all LocalDatasetEntry objects in the index.\n\n\n\n\n\n\n\nlocal.Index.list_schemas()\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nlocal.Index.load_schema(ref)\nLoad a schema and make it available in the types namespace.\nThis method decodes the schema, optionally generates a Python module for IDE support (if auto_stubs is enabled), and registers the type in the :attr:types namespace for easy access.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nThe decoded PackableSample subclass. Also available via\n\n\n\nType[Packable]\nindex.types.<ClassName> after this call.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n::\n>>> # Load and use immediately\n>>> MyType = index.load_schema(\"atdata://local/sampleSchema/MySample@1.0.0\")\n>>> sample = MyType(name=\"hello\", value=42)\n>>>\n>>> # Or access later via namespace\n>>> index.load_schema(\"atdata://local/sampleSchema/OtherType@1.0.0\")\n>>> other = index.types.OtherType(data=\"test\")\n\n\n\n\nlocal.Index.publish_schema(sample_type, *, version=None, description=None)\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (@packable-decorated or PackableSample subclass).\nrequired\n\n\nversion\nstr | None\nSemantic version string (e.g., ‘1.0.0’). If None, auto-increments from the latest published version (patch bump), or starts at ‘1.0.0’ if no previous version exists.\nNone\n\n\ndescription\nstr | None\nOptional human-readable description. If None, uses the class docstring.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string: ‘atdata://local/sampleSchema/{name}@version’.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf sample_type is not a dataclass.\n\n\n\nTypeError\nIf sample_type doesn’t satisfy the Packable protocol, or if a field type is not supported."
                                                                                        +    "text": "local.Index(\n    redis=None,\n    data_store=None,\n    auto_stubs=False,\n    stub_dir=None,\n    **kwargs,\n)\nRedis-backed index for tracking datasets in a repository.\nImplements the AbstractIndex protocol. Maintains a registry of LocalDatasetEntry objects in Redis, allowing enumeration and lookup of stored datasets.\nWhen initialized with a data_store, insert_dataset() will write dataset shards to storage before indexing. Without a data_store, insert_dataset() only indexes existing URLs.\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n_redis\n\nRedis connection for index storage.\n\n\n_data_store\n\nOptional AbstractDataStore for writing dataset shards.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nadd_entry\nAdd a dataset to the index.\n\n\nclear_stubs\nRemove all auto-generated stub files.\n\n\ndecode_schema\nReconstruct a Python PackableSample type from a stored schema.\n\n\ndecode_schema_as\nDecode a schema with explicit type hint for IDE support.\n\n\nget_dataset\nGet a dataset entry by name (AbstractIndex protocol).\n\n\nget_entry\nGet an entry by its CID.\n\n\nget_entry_by_name\nGet an entry by its human-readable name.\n\n\nget_import_path\nGet the import path for a schema’s generated module.\n\n\nget_schema\nGet a schema record by reference (AbstractIndex protocol).\n\n\nget_schema_record\nGet a schema record as LocalSchemaRecord object.\n\n\ninsert_dataset\nInsert a dataset into the index (AbstractIndex protocol).\n\n\nlist_datasets\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\nlist_entries\nGet all index entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\nload_schema\nLoad a schema and make it available in the types namespace.\n\n\npublish_schema\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nlocal.Index.add_entry(ds, *, name, schema_ref=None, metadata=None)\nAdd a dataset to the index.\nCreates a LocalDatasetEntry for the dataset and persists it to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe dataset to add to the index.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference. If None, generates from sample type.\nNone\n\n\nmetadata\ndict | None\nOptional metadata dictionary. If None, uses ds._metadata if available.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nThe created LocalDatasetEntry object.\n\n\n\n\n\n\n\nlocal.Index.clear_stubs()\nRemove all auto-generated stub files.\nOnly works if auto_stubs was enabled when creating the Index.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nint\nNumber of stub files removed, or 0 if auto_stubs is disabled.\n\n\n\n\n\n\n\nlocal.Index.decode_schema(ref)\nReconstruct a Python PackableSample type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a PackableSample subclass matching the schema definition.\nIf auto_stubs is enabled, a Python module will be generated and the class will be imported from it, providing full IDE autocomplete support. The returned class has proper type information that IDEs can understand.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA PackableSample subclass - either imported from a generated module\n\n\n\nType[Packable]\n(if auto_stubs is enabled) or dynamically created.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n\nlocal.Index.decode_schema_as(ref, type_hint)\nDecode a schema with explicit type hint for IDE support.\nThis is a typed wrapper around decode_schema() that preserves the type information for IDE autocomplete. Use this when you have a stub file for the schema and want full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\ntype_hint\ntype[T]\nThe stub type to use for type hints. Import this from the generated stub file.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ntype[T]\nThe decoded type, cast to match the type_hint for IDE support.\n\n\n\n\n\n\n>>> # After enabling auto_stubs and configuring IDE extraPaths:\n>>> from local.MySample_1_0_0 import MySample\n>>>\n>>> # This gives full IDE autocomplete:\n>>> DecodedType = index.decode_schema_as(ref, MySample)\n>>> sample = DecodedType(text=\"hello\", value=42)  # IDE knows signature!\n\n\n\nThe type_hint is only used for static type checking - at runtime, the actual decoded type from the schema is returned. Ensure the stub matches the schema to avoid runtime surprises.\n\n\n\n\nlocal.Index.get_dataset(ref)\nGet a dataset entry by name (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry(cid)\nGet an entry by its CID.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncid\nstr\nContent identifier of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry for the given CID.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf entry not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry_by_name(name)\nGet an entry by its human-readable name.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nname\nstr\nHuman-readable name of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry with the given name.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf no entry with that name exists.\n\n\n\n\n\n\n\nlocal.Index.get_import_path(ref)\nGet the import path for a schema’s generated module.\nWhen auto_stubs is enabled, this returns the import path that can be used to import the schema type with full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr | None\nImport path like “local.MySample_1_0_0”, or None if auto_stubs\n\n\n\nstr | None\nis disabled.\n\n\n\n\n\n\n>>> index = LocalIndex(auto_stubs=True)\n>>> ref = index.publish_schema(MySample, version=\"1.0.0\")\n>>> index.load_schema(ref)\n>>> print(index.get_import_path(ref))\nlocal.MySample_1_0_0\n>>> # Then in your code:\n>>> # from local.MySample_1_0_0 import MySample\n\n\n\n\nlocal.Index.get_schema(ref)\nGet a schema record by reference (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string. Supports both new format (atdata://local/sampleSchema/{name}@version) and legacy format (local://schemas/{module.Class}@version).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with keys ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, ‘$ref’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.get_schema_record(ref)\nGet a schema record as LocalSchemaRecord object.\nUse this when you need the full LocalSchemaRecord with typed properties. For Protocol-compliant dict access, use get_schema() instead.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalSchemaRecord\nLocalSchemaRecord with schema details.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index (AbstractIndex protocol).\nIf a data_store was provided at initialization, writes dataset shards to storage first, then indexes the new URLs. Otherwise, indexes the dataset’s existing URL.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference.\nNone\n\n\n**kwargs\n\nAdditional options: - metadata: Optional metadata dict - prefix: Storage prefix (default: dataset name) - cache_local: If True, cache writes locally first\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nlocal.Index.list_datasets()\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nlocal.Index.list_entries()\nGet all index entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of all LocalDatasetEntry objects in the index.\n\n\n\n\n\n\n\nlocal.Index.list_schemas()\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nlocal.Index.load_schema(ref)\nLoad a schema and make it available in the types namespace.\nThis method decodes the schema, optionally generates a Python module for IDE support (if auto_stubs is enabled), and registers the type in the :attr:types namespace for easy access.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nThe decoded PackableSample subclass. Also available via\n\n\n\nType[Packable]\nindex.types.<ClassName> after this call.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n>>> # Load and use immediately\n>>> MyType = index.load_schema(\"atdata://local/sampleSchema/MySample@1.0.0\")\n>>> sample = MyType(name=\"hello\", value=42)\n>>>\n>>> # Or access later via namespace\n>>> index.load_schema(\"atdata://local/sampleSchema/OtherType@1.0.0\")\n>>> other = index.types.OtherType(data=\"test\")\n\n\n\n\nlocal.Index.publish_schema(sample_type, *, version=None, description=None)\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (@packable-decorated or PackableSample subclass).\nrequired\n\n\nversion\nstr | None\nSemantic version string (e.g., ‘1.0.0’). If None, auto-increments from the latest published version (patch bump), or starts at ‘1.0.0’ if no previous version exists.\nNone\n\n\ndescription\nstr | None\nOptional human-readable description. If None, uses the class docstring.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string: ‘atdata://local/sampleSchema/{name}@version’.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf sample_type is not a dataclass.\n\n\n\nTypeError\nIf sample_type doesn’t satisfy the Packable protocol, or if a field type is not supported."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/local.Index.html#attributes",
                                                                                        @@ -1819,70 +1819,70 @@
                                                                                             "href": "api/local.Index.html#methods",
                                                                                             "title": "local.Index",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\nadd_entry\nAdd a dataset to the index.\n\n\nclear_stubs\nRemove all auto-generated stub files.\n\n\ndecode_schema\nReconstruct a Python PackableSample type from a stored schema.\n\n\ndecode_schema_as\nDecode a schema with explicit type hint for IDE support.\n\n\nget_dataset\nGet a dataset entry by name (AbstractIndex protocol).\n\n\nget_entry\nGet an entry by its CID.\n\n\nget_entry_by_name\nGet an entry by its human-readable name.\n\n\nget_import_path\nGet the import path for a schema’s generated module.\n\n\nget_schema\nGet a schema record by reference (AbstractIndex protocol).\n\n\nget_schema_record\nGet a schema record as LocalSchemaRecord object.\n\n\ninsert_dataset\nInsert a dataset into the index (AbstractIndex protocol).\n\n\nlist_datasets\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\nlist_entries\nGet all index entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\nload_schema\nLoad a schema and make it available in the types namespace.\n\n\npublish_schema\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nlocal.Index.add_entry(ds, *, name, schema_ref=None, metadata=None)\nAdd a dataset to the index.\nCreates a LocalDatasetEntry for the dataset and persists it to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe dataset to add to the index.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference. If None, generates from sample type.\nNone\n\n\nmetadata\ndict | None\nOptional metadata dictionary. If None, uses ds._metadata if available.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nThe created LocalDatasetEntry object.\n\n\n\n\n\n\n\nlocal.Index.clear_stubs()\nRemove all auto-generated stub files.\nOnly works if auto_stubs was enabled when creating the Index.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nint\nNumber of stub files removed, or 0 if auto_stubs is disabled.\n\n\n\n\n\n\n\nlocal.Index.decode_schema(ref)\nReconstruct a Python PackableSample type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a PackableSample subclass matching the schema definition.\nIf auto_stubs is enabled, a Python module will be generated and the class will be imported from it, providing full IDE autocomplete support. The returned class has proper type information that IDEs can understand.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA PackableSample subclass - either imported from a generated module\n\n\n\nType[Packable]\n(if auto_stubs is enabled) or dynamically created.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n\nlocal.Index.decode_schema_as(ref, type_hint)\nDecode a schema with explicit type hint for IDE support.\nThis is a typed wrapper around decode_schema() that preserves the type information for IDE autocomplete. Use this when you have a stub file for the schema and want full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\ntype_hint\ntype[T]\nThe stub type to use for type hints. Import this from the generated stub file.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ntype[T]\nThe decoded type, cast to match the type_hint for IDE support.\n\n\n\n\n\n\n::\n>>> # After enabling auto_stubs and configuring IDE extraPaths:\n>>> from local.MySample_1_0_0 import MySample\n>>>\n>>> # This gives full IDE autocomplete:\n>>> DecodedType = index.decode_schema_as(ref, MySample)\n>>> sample = DecodedType(text=\"hello\", value=42)  # IDE knows signature!\n\n\n\nThe type_hint is only used for static type checking - at runtime, the actual decoded type from the schema is returned. Ensure the stub matches the schema to avoid runtime surprises.\n\n\n\n\nlocal.Index.get_dataset(ref)\nGet a dataset entry by name (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry(cid)\nGet an entry by its CID.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncid\nstr\nContent identifier of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry for the given CID.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf entry not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry_by_name(name)\nGet an entry by its human-readable name.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nname\nstr\nHuman-readable name of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry with the given name.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf no entry with that name exists.\n\n\n\n\n\n\n\nlocal.Index.get_import_path(ref)\nGet the import path for a schema’s generated module.\nWhen auto_stubs is enabled, this returns the import path that can be used to import the schema type with full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr | None\nImport path like “local.MySample_1_0_0”, or None if auto_stubs\n\n\n\nstr | None\nis disabled.\n\n\n\n\n\n\n::\n>>> index = LocalIndex(auto_stubs=True)\n>>> ref = index.publish_schema(MySample, version=\"1.0.0\")\n>>> index.load_schema(ref)\n>>> print(index.get_import_path(ref))\nlocal.MySample_1_0_0\n>>> # Then in your code:\n>>> # from local.MySample_1_0_0 import MySample\n\n\n\n\nlocal.Index.get_schema(ref)\nGet a schema record by reference (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string. Supports both new format (atdata://local/sampleSchema/{name}@version) and legacy format (local://schemas/{module.Class}@version).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with keys ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, ‘$ref’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.get_schema_record(ref)\nGet a schema record as LocalSchemaRecord object.\nUse this when you need the full LocalSchemaRecord with typed properties. For Protocol-compliant dict access, use get_schema() instead.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalSchemaRecord\nLocalSchemaRecord with schema details.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index (AbstractIndex protocol).\nIf a data_store was provided at initialization, writes dataset shards to storage first, then indexes the new URLs. Otherwise, indexes the dataset’s existing URL.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference.\nNone\n\n\n**kwargs\n\nAdditional options: - metadata: Optional metadata dict - prefix: Storage prefix (default: dataset name) - cache_local: If True, cache writes locally first\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nlocal.Index.list_datasets()\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nlocal.Index.list_entries()\nGet all index entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of all LocalDatasetEntry objects in the index.\n\n\n\n\n\n\n\nlocal.Index.list_schemas()\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nlocal.Index.load_schema(ref)\nLoad a schema and make it available in the types namespace.\nThis method decodes the schema, optionally generates a Python module for IDE support (if auto_stubs is enabled), and registers the type in the :attr:types namespace for easy access.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nThe decoded PackableSample subclass. Also available via\n\n\n\nType[Packable]\nindex.types.<ClassName> after this call.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n::\n>>> # Load and use immediately\n>>> MyType = index.load_schema(\"atdata://local/sampleSchema/MySample@1.0.0\")\n>>> sample = MyType(name=\"hello\", value=42)\n>>>\n>>> # Or access later via namespace\n>>> index.load_schema(\"atdata://local/sampleSchema/OtherType@1.0.0\")\n>>> other = index.types.OtherType(data=\"test\")\n\n\n\n\nlocal.Index.publish_schema(sample_type, *, version=None, description=None)\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (@packable-decorated or PackableSample subclass).\nrequired\n\n\nversion\nstr | None\nSemantic version string (e.g., ‘1.0.0’). If None, auto-increments from the latest published version (patch bump), or starts at ‘1.0.0’ if no previous version exists.\nNone\n\n\ndescription\nstr | None\nOptional human-readable description. If None, uses the class docstring.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string: ‘atdata://local/sampleSchema/{name}@version’.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf sample_type is not a dataclass.\n\n\n\nTypeError\nIf sample_type doesn’t satisfy the Packable protocol, or if a field type is not supported."
                                                                                        +    "text": "Name\nDescription\n\n\n\n\nadd_entry\nAdd a dataset to the index.\n\n\nclear_stubs\nRemove all auto-generated stub files.\n\n\ndecode_schema\nReconstruct a Python PackableSample type from a stored schema.\n\n\ndecode_schema_as\nDecode a schema with explicit type hint for IDE support.\n\n\nget_dataset\nGet a dataset entry by name (AbstractIndex protocol).\n\n\nget_entry\nGet an entry by its CID.\n\n\nget_entry_by_name\nGet an entry by its human-readable name.\n\n\nget_import_path\nGet the import path for a schema’s generated module.\n\n\nget_schema\nGet a schema record by reference (AbstractIndex protocol).\n\n\nget_schema_record\nGet a schema record as LocalSchemaRecord object.\n\n\ninsert_dataset\nInsert a dataset into the index (AbstractIndex protocol).\n\n\nlist_datasets\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\nlist_entries\nGet all index entries as a materialized list.\n\n\nlist_schemas\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\nload_schema\nLoad a schema and make it available in the types namespace.\n\n\npublish_schema\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nlocal.Index.add_entry(ds, *, name, schema_ref=None, metadata=None)\nAdd a dataset to the index.\nCreates a LocalDatasetEntry for the dataset and persists it to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe dataset to add to the index.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference. If None, generates from sample type.\nNone\n\n\nmetadata\ndict | None\nOptional metadata dictionary. If None, uses ds._metadata if available.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nThe created LocalDatasetEntry object.\n\n\n\n\n\n\n\nlocal.Index.clear_stubs()\nRemove all auto-generated stub files.\nOnly works if auto_stubs was enabled when creating the Index.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nint\nNumber of stub files removed, or 0 if auto_stubs is disabled.\n\n\n\n\n\n\n\nlocal.Index.decode_schema(ref)\nReconstruct a Python PackableSample type from a stored schema.\nThis method enables loading datasets without knowing the sample type ahead of time. The index retrieves the schema record and dynamically generates a PackableSample subclass matching the schema definition.\nIf auto_stubs is enabled, a Python module will be generated and the class will be imported from it, providing full IDE autocomplete support. The returned class has proper type information that IDEs can understand.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nA PackableSample subclass - either imported from a generated module\n\n\n\nType[Packable]\n(if auto_stubs is enabled) or dynamically created.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n\nlocal.Index.decode_schema_as(ref, type_hint)\nDecode a schema with explicit type hint for IDE support.\nThis is a typed wrapper around decode_schema() that preserves the type information for IDE autocomplete. Use this when you have a stub file for the schema and want full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\ntype_hint\ntype[T]\nThe stub type to use for type hints. Import this from the generated stub file.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ntype[T]\nThe decoded type, cast to match the type_hint for IDE support.\n\n\n\n\n\n\n>>> # After enabling auto_stubs and configuring IDE extraPaths:\n>>> from local.MySample_1_0_0 import MySample\n>>>\n>>> # This gives full IDE autocomplete:\n>>> DecodedType = index.decode_schema_as(ref, MySample)\n>>> sample = DecodedType(text=\"hello\", value=42)  # IDE knows signature!\n\n\n\nThe type_hint is only used for static type checking - at runtime, the actual decoded type from the schema is returned. Ensure the stub matches the schema to avoid runtime surprises.\n\n\n\n\nlocal.Index.get_dataset(ref)\nGet a dataset entry by name (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nDataset name.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf dataset not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry(cid)\nGet an entry by its CID.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ncid\nstr\nContent identifier of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry for the given CID.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf entry not found.\n\n\n\n\n\n\n\nlocal.Index.get_entry_by_name(name)\nGet an entry by its human-readable name.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nname\nstr\nHuman-readable name of the entry.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nLocalDatasetEntry with the given name.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf no entry with that name exists.\n\n\n\n\n\n\n\nlocal.Index.get_import_path(ref)\nGet the import path for a schema’s generated module.\nWhen auto_stubs is enabled, this returns the import path that can be used to import the schema type with full IDE support.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr | None\nImport path like “local.MySample_1_0_0”, or None if auto_stubs\n\n\n\nstr | None\nis disabled.\n\n\n\n\n\n\n>>> index = LocalIndex(auto_stubs=True)\n>>> ref = index.publish_schema(MySample, version=\"1.0.0\")\n>>> index.load_schema(ref)\n>>> print(index.get_import_path(ref))\nlocal.MySample_1_0_0\n>>> # Then in your code:\n>>> # from local.MySample_1_0_0 import MySample\n\n\n\n\nlocal.Index.get_schema(ref)\nGet a schema record by reference (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string. Supports both new format (atdata://local/sampleSchema/{name}@version) and legacy format (local://schemas/{module.Class}@version).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record as a dictionary with keys ‘name’, ‘version’,\n\n\n\ndict\n‘fields’, ‘$ref’, etc.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.get_schema_record(ref)\nGet a schema record as LocalSchemaRecord object.\nUse this when you need the full LocalSchemaRecord with typed properties. For Protocol-compliant dict access, use get_schema() instead.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalSchemaRecord\nLocalSchemaRecord with schema details.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf reference format is invalid.\n\n\n\n\n\n\n\nlocal.Index.insert_dataset(ds, *, name, schema_ref=None, **kwargs)\nInsert a dataset into the index (AbstractIndex protocol).\nIf a data_store was provided at initialization, writes dataset shards to storage first, then indexes the new URLs. Otherwise, indexes the dataset’s existing URL.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to register.\nrequired\n\n\nname\nstr\nHuman-readable name for the dataset.\nrequired\n\n\nschema_ref\nstr | None\nOptional schema reference.\nNone\n\n\n**kwargs\n\nAdditional options: - metadata: Optional metadata dict - prefix: Storage prefix (default: dataset name) - cache_local: If True, cache writes locally first\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLocalDatasetEntry\nIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\nlocal.Index.list_datasets()\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of IndexEntry for each dataset.\n\n\n\n\n\n\n\nlocal.Index.list_entries()\nGet all index entries as a materialized list.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[LocalDatasetEntry]\nList of all LocalDatasetEntry objects in the index.\n\n\n\n\n\n\n\nlocal.Index.list_schemas()\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\nlocal.Index.load_schema(ref)\nLoad a schema and make it available in the types namespace.\nThis method decodes the schema, optionally generates a Python module for IDE support (if auto_stubs is enabled), and registers the type in the :attr:types namespace for easy access.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nSchema reference string (atdata://local/sampleSchema/… or legacy local://schemas/…).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nThe decoded PackableSample subclass. Also available via\n\n\n\nType[Packable]\nindex.types.<ClassName> after this call.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf schema not found.\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n>>> # Load and use immediately\n>>> MyType = index.load_schema(\"atdata://local/sampleSchema/MySample@1.0.0\")\n>>> sample = MyType(name=\"hello\", value=42)\n>>>\n>>> # Or access later via namespace\n>>> index.load_schema(\"atdata://local/sampleSchema/OtherType@1.0.0\")\n>>> other = index.types.OtherType(data=\"test\")\n\n\n\n\nlocal.Index.publish_schema(sample_type, *, version=None, description=None)\nPublish a schema for a sample type to Redis.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\ntype\nA Packable type (@packable-decorated or PackableSample subclass).\nrequired\n\n\nversion\nstr | None\nSemantic version string (e.g., ‘1.0.0’). If None, auto-increments from the latest published version (patch bump), or starts at ‘1.0.0’ if no previous version exists.\nNone\n\n\ndescription\nstr | None\nOptional human-readable description. If None, uses the class docstring.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nSchema reference string: ‘atdata://local/sampleSchema/{name}@version’.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf sample_type is not a dataclass.\n\n\n\nTypeError\nIf sample_type doesn’t satisfy the Packable protocol, or if a field type is not supported."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Lens.html",
                                                                                             "href": "api/Lens.html",
                                                                                             "title": "lens",
                                                                                             "section": "",
                                                                                        -    "text": "lens\nLens-based type transformations for datasets.\nThis module implements a lens system for bidirectional transformations between different sample types. Lenses enable viewing a dataset through different type schemas without duplicating the underlying data.\nKey components:\n\nLens: Bidirectional transformation with getter (S -> V) and optional putter (V, S -> S)\nLensNetwork: Global singleton registry for lens transformations\n@lens: Decorator to create and register lens transformations\n\nLenses support the functional programming concept of composable, well-behaved transformations that satisfy lens laws (GetPut and PutGet).\n\n\n::\n>>> @packable\n... class FullData:\n...     name: str\n...     age: int\n...     embedding: NDArray\n...\n>>> @packable\n... class NameOnly:\n...     name: str\n...\n>>> @lens\n... def name_view(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_view.putter\n... def name_view_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age,\n...                     embedding=source.embedding)\n...\n>>> ds = Dataset[FullData](\"data.tar\")\n>>> ds_names = ds.as_type(NameOnly)  # Uses registered lens\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nLens\nA bidirectional transformation between two sample types.\n\n\nLensNetwork\nGlobal registry for lens transformations between sample types.\n\n\n\n\n\nlens.Lens(get, put=None)\nA bidirectional transformation between two sample types.\nA lens provides a way to view and update data of type S (source) as if it were type V (view). It consists of a getter that transforms S -> V and an optional putter that transforms (V, S) -> S, enabling updates to the view to be reflected back in the source.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nS\n\nThe source type, must derive from PackableSample.\nrequired\n\n\nV\n\nThe view type, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n::\n>>> @lens\n... def name_lens(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_lens.putter\n... def name_lens_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nTransform the source into the view type.\n\n\nput\nUpdate the source based on a modified view.\n\n\nputter\nDecorator to register a putter function for this lens.\n\n\n\n\n\nlens.Lens.get(s)\nTransform the source into the view type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ns\nS\nThe source sample of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nV\nA view of the source as type V.\n\n\n\n\n\n\n\nlens.Lens.put(v, s)\nUpdate the source based on a modified view.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nv\nV\nThe modified view of type V.\nrequired\n\n\ns\nS\nThe original source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nS\nAn updated source of type S that reflects changes from the view.\n\n\n\n\n\n\n\nlens.Lens.putter(put)\nDecorator to register a putter function for this lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nput\nLensPutter[S, V]\nA function that takes a view of type V and source of type S, and returns an updated source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLensPutter[S, V]\nThe putter function, allowing this to be used as a decorator.\n\n\n\n\n\n\n::\n>>> @my_lens.putter\n... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:\n...     return SourceType(...)\n\n\n\n\n\n\nlens.LensNetwork()\nGlobal registry for lens transformations between sample types.\nThis class implements a singleton pattern to maintain a global registry of all lenses decorated with @lens. It enables looking up transformations between different PackableSample types.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n_instance\n\nThe singleton instance of this class.\n\n\n_registry\nDict[LensSignature, Lens]\nDictionary mapping (source_type, view_type) tuples to their corresponding Lens objects.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nregister\nRegister a lens as the canonical transformation between two types.\n\n\ntransform\nLook up the lens transformation between two sample types.\n\n\n\n\n\nlens.LensNetwork.register(_lens)\nRegister a lens as the canonical transformation between two types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\n_lens\nLens\nThe lens to register. Will be stored in the registry under the key (_lens.source_type, _lens.view_type).\nrequired\n\n\n\n\n\n\nIf a lens already exists for the same type pair, it will be overwritten.\n\n\n\n\nlens.LensNetwork.transform(source, view)\nLook up the lens transformation between two sample types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsource\nDatasetType\nThe source sample type (must derive from PackableSample).\nrequired\n\n\nview\nDatasetType\nThe target view type (must derive from PackableSample).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens\nThe registered Lens that transforms from source to view.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no lens has been registered for the given type pair.\n\n\n\n\n\n\nCurrently only supports direct transformations. Compositional transformations (chaining multiple lenses) are not yet implemented.\n\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nlens\nDecorator to create and register a lens transformation.\n\n\n\n\n\nlens.lens(f)\nDecorator to create and register a lens transformation.\nThis decorator converts a getter function into a Lens object and automatically registers it in the global LensNetwork registry.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nf\nLensGetter[S, V]\nA getter function that transforms from source type S to view type V. Must have exactly one parameter with a type annotation.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens[S, V]\nA Lens[S, V] object that can be called to apply the transformation\n\n\n\nLens[S, V]\nor decorated with @lens_name.putter to add a putter function.\n\n\n\n\n\n\n::\n>>> @lens\n... def extract_name(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @extract_name.putter\n... def extract_name_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)"
                                                                                        +    "text": "lens\nLens-based type transformations for datasets.\nThis module implements a lens system for bidirectional transformations between different sample types. Lenses enable viewing a dataset through different type schemas without duplicating the underlying data.\nKey components:\n\nLens: Bidirectional transformation with getter (S -> V) and optional putter (V, S -> S)\nLensNetwork: Global singleton registry for lens transformations\n@lens: Decorator to create and register lens transformations\n\nLenses support the functional programming concept of composable, well-behaved transformations that satisfy lens laws (GetPut and PutGet).\n\n\n>>> @packable\n... class FullData:\n...     name: str\n...     age: int\n...     embedding: NDArray\n...\n>>> @packable\n... class NameOnly:\n...     name: str\n...\n>>> @lens\n... def name_view(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_view.putter\n... def name_view_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age,\n...                     embedding=source.embedding)\n...\n>>> ds = Dataset[FullData](\"data.tar\")\n>>> ds_names = ds.as_type(NameOnly)  # Uses registered lens\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nLens\nA bidirectional transformation between two sample types.\n\n\nLensNetwork\nGlobal registry for lens transformations between sample types.\n\n\n\n\n\nlens.Lens(get, put=None)\nA bidirectional transformation between two sample types.\nA lens provides a way to view and update data of type S (source) as if it were type V (view). It consists of a getter that transforms S -> V and an optional putter that transforms (V, S) -> S, enabling updates to the view to be reflected back in the source.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nS\n\nThe source type, must derive from PackableSample.\nrequired\n\n\nV\n\nThe view type, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n>>> @lens\n... def name_lens(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_lens.putter\n... def name_lens_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nTransform the source into the view type.\n\n\nput\nUpdate the source based on a modified view.\n\n\nputter\nDecorator to register a putter function for this lens.\n\n\n\n\n\nlens.Lens.get(s)\nTransform the source into the view type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ns\nS\nThe source sample of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nV\nA view of the source as type V.\n\n\n\n\n\n\n\nlens.Lens.put(v, s)\nUpdate the source based on a modified view.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nv\nV\nThe modified view of type V.\nrequired\n\n\ns\nS\nThe original source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nS\nAn updated source of type S that reflects changes from the view.\n\n\n\n\n\n\n\nlens.Lens.putter(put)\nDecorator to register a putter function for this lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nput\nLensPutter[S, V]\nA function that takes a view of type V and source of type S, and returns an updated source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLensPutter[S, V]\nThe putter function, allowing this to be used as a decorator.\n\n\n\n\n\n\n>>> @my_lens.putter\n... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:\n...     return SourceType(field=view.field, other=source.other)\n\n\n\n\n\n\nlens.LensNetwork()\nGlobal registry for lens transformations between sample types.\nThis class implements a singleton pattern to maintain a global registry of all lenses decorated with @lens. It enables looking up transformations between different PackableSample types.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n_instance\n\nThe singleton instance of this class.\n\n\n_registry\nDict[LensSignature, Lens]\nDictionary mapping (source_type, view_type) tuples to their corresponding Lens objects.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nregister\nRegister a lens as the canonical transformation between two types.\n\n\ntransform\nLook up the lens transformation between two sample types.\n\n\n\n\n\nlens.LensNetwork.register(_lens)\nRegister a lens as the canonical transformation between two types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\n_lens\nLens\nThe lens to register. Will be stored in the registry under the key (_lens.source_type, _lens.view_type).\nrequired\n\n\n\n\n\n\nIf a lens already exists for the same type pair, it will be overwritten.\n\n\n\n\nlens.LensNetwork.transform(source, view)\nLook up the lens transformation between two sample types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsource\nDatasetType\nThe source sample type (must derive from PackableSample).\nrequired\n\n\nview\nDatasetType\nThe target view type (must derive from PackableSample).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens\nThe registered Lens that transforms from source to view.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no lens has been registered for the given type pair.\n\n\n\n\n\n\nCurrently only supports direct transformations. Compositional transformations (chaining multiple lenses) are not yet implemented.\n\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nlens\nDecorator to create and register a lens transformation.\n\n\n\n\n\nlens.lens(f)\nDecorator to create and register a lens transformation.\nThis decorator converts a getter function into a Lens object and automatically registers it in the global LensNetwork registry.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nf\nLensGetter[S, V]\nA getter function that transforms from source type S to view type V. Must have exactly one parameter with a type annotation.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens[S, V]\nA Lens[S, V] object that can be called to apply the transformation\n\n\n\nLens[S, V]\nor decorated with @lens_name.putter to add a putter function.\n\n\n\n\n\n\n>>> @lens\n... def extract_name(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @extract_name.putter\n... def extract_name_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)"
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/Lens.html#example",
                                                                                        -    "href": "api/Lens.html#example",
                                                                                        +    "objectID": "api/Lens.html#examples",
                                                                                        +    "href": "api/Lens.html#examples",
                                                                                             "title": "lens",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> @packable\n... class FullData:\n...     name: str\n...     age: int\n...     embedding: NDArray\n...\n>>> @packable\n... class NameOnly:\n...     name: str\n...\n>>> @lens\n... def name_view(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_view.putter\n... def name_view_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age,\n...                     embedding=source.embedding)\n...\n>>> ds = Dataset[FullData](\"data.tar\")\n>>> ds_names = ds.as_type(NameOnly)  # Uses registered lens"
                                                                                        +    "text": ">>> @packable\n... class FullData:\n...     name: str\n...     age: int\n...     embedding: NDArray\n...\n>>> @packable\n... class NameOnly:\n...     name: str\n...\n>>> @lens\n... def name_view(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_view.putter\n... def name_view_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age,\n...                     embedding=source.embedding)\n...\n>>> ds = Dataset[FullData](\"data.tar\")\n>>> ds_names = ds.as_type(NameOnly)  # Uses registered lens"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Lens.html#classes",
                                                                                             "href": "api/Lens.html#classes",
                                                                                             "title": "lens",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\nLens\nA bidirectional transformation between two sample types.\n\n\nLensNetwork\nGlobal registry for lens transformations between sample types.\n\n\n\n\n\nlens.Lens(get, put=None)\nA bidirectional transformation between two sample types.\nA lens provides a way to view and update data of type S (source) as if it were type V (view). It consists of a getter that transforms S -> V and an optional putter that transforms (V, S) -> S, enabling updates to the view to be reflected back in the source.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nS\n\nThe source type, must derive from PackableSample.\nrequired\n\n\nV\n\nThe view type, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n::\n>>> @lens\n... def name_lens(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_lens.putter\n... def name_lens_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nTransform the source into the view type.\n\n\nput\nUpdate the source based on a modified view.\n\n\nputter\nDecorator to register a putter function for this lens.\n\n\n\n\n\nlens.Lens.get(s)\nTransform the source into the view type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ns\nS\nThe source sample of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nV\nA view of the source as type V.\n\n\n\n\n\n\n\nlens.Lens.put(v, s)\nUpdate the source based on a modified view.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nv\nV\nThe modified view of type V.\nrequired\n\n\ns\nS\nThe original source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nS\nAn updated source of type S that reflects changes from the view.\n\n\n\n\n\n\n\nlens.Lens.putter(put)\nDecorator to register a putter function for this lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nput\nLensPutter[S, V]\nA function that takes a view of type V and source of type S, and returns an updated source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLensPutter[S, V]\nThe putter function, allowing this to be used as a decorator.\n\n\n\n\n\n\n::\n>>> @my_lens.putter\n... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:\n...     return SourceType(...)\n\n\n\n\n\n\nlens.LensNetwork()\nGlobal registry for lens transformations between sample types.\nThis class implements a singleton pattern to maintain a global registry of all lenses decorated with @lens. It enables looking up transformations between different PackableSample types.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n_instance\n\nThe singleton instance of this class.\n\n\n_registry\nDict[LensSignature, Lens]\nDictionary mapping (source_type, view_type) tuples to their corresponding Lens objects.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nregister\nRegister a lens as the canonical transformation between two types.\n\n\ntransform\nLook up the lens transformation between two sample types.\n\n\n\n\n\nlens.LensNetwork.register(_lens)\nRegister a lens as the canonical transformation between two types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\n_lens\nLens\nThe lens to register. Will be stored in the registry under the key (_lens.source_type, _lens.view_type).\nrequired\n\n\n\n\n\n\nIf a lens already exists for the same type pair, it will be overwritten.\n\n\n\n\nlens.LensNetwork.transform(source, view)\nLook up the lens transformation between two sample types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsource\nDatasetType\nThe source sample type (must derive from PackableSample).\nrequired\n\n\nview\nDatasetType\nThe target view type (must derive from PackableSample).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens\nThe registered Lens that transforms from source to view.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no lens has been registered for the given type pair.\n\n\n\n\n\n\nCurrently only supports direct transformations. Compositional transformations (chaining multiple lenses) are not yet implemented."
                                                                                        +    "text": "Name\nDescription\n\n\n\n\nLens\nA bidirectional transformation between two sample types.\n\n\nLensNetwork\nGlobal registry for lens transformations between sample types.\n\n\n\n\n\nlens.Lens(get, put=None)\nA bidirectional transformation between two sample types.\nA lens provides a way to view and update data of type S (source) as if it were type V (view). It consists of a getter that transforms S -> V and an optional putter that transforms (V, S) -> S, enabling updates to the view to be reflected back in the source.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nS\n\nThe source type, must derive from PackableSample.\nrequired\n\n\nV\n\nThe view type, must derive from PackableSample.\nrequired\n\n\n\n\n\n\n>>> @lens\n... def name_lens(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @name_lens.putter\n... def name_lens_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nTransform the source into the view type.\n\n\nput\nUpdate the source based on a modified view.\n\n\nputter\nDecorator to register a putter function for this lens.\n\n\n\n\n\nlens.Lens.get(s)\nTransform the source into the view type.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ns\nS\nThe source sample of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nV\nA view of the source as type V.\n\n\n\n\n\n\n\nlens.Lens.put(v, s)\nUpdate the source based on a modified view.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nv\nV\nThe modified view of type V.\nrequired\n\n\ns\nS\nThe original source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nS\nAn updated source of type S that reflects changes from the view.\n\n\n\n\n\n\n\nlens.Lens.putter(put)\nDecorator to register a putter function for this lens.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nput\nLensPutter[S, V]\nA function that takes a view of type V and source of type S, and returns an updated source of type S.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLensPutter[S, V]\nThe putter function, allowing this to be used as a decorator.\n\n\n\n\n\n\n>>> @my_lens.putter\n... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:\n...     return SourceType(field=view.field, other=source.other)\n\n\n\n\n\n\nlens.LensNetwork()\nGlobal registry for lens transformations between sample types.\nThis class implements a singleton pattern to maintain a global registry of all lenses decorated with @lens. It enables looking up transformations between different PackableSample types.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n_instance\n\nThe singleton instance of this class.\n\n\n_registry\nDict[LensSignature, Lens]\nDictionary mapping (source_type, view_type) tuples to their corresponding Lens objects.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nregister\nRegister a lens as the canonical transformation between two types.\n\n\ntransform\nLook up the lens transformation between two sample types.\n\n\n\n\n\nlens.LensNetwork.register(_lens)\nRegister a lens as the canonical transformation between two types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\n_lens\nLens\nThe lens to register. Will be stored in the registry under the key (_lens.source_type, _lens.view_type).\nrequired\n\n\n\n\n\n\nIf a lens already exists for the same type pair, it will be overwritten.\n\n\n\n\nlens.LensNetwork.transform(source, view)\nLook up the lens transformation between two sample types.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsource\nDatasetType\nThe source sample type (must derive from PackableSample).\nrequired\n\n\nview\nDatasetType\nThe target view type (must derive from PackableSample).\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens\nThe registered Lens that transforms from source to view.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no lens has been registered for the given type pair.\n\n\n\n\n\n\nCurrently only supports direct transformations. Compositional transformations (chaining multiple lenses) are not yet implemented."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/Lens.html#functions",
                                                                                             "href": "api/Lens.html#functions",
                                                                                             "title": "lens",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\nlens\nDecorator to create and register a lens transformation.\n\n\n\n\n\nlens.lens(f)\nDecorator to create and register a lens transformation.\nThis decorator converts a getter function into a Lens object and automatically registers it in the global LensNetwork registry.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nf\nLensGetter[S, V]\nA getter function that transforms from source type S to view type V. Must have exactly one parameter with a type annotation.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens[S, V]\nA Lens[S, V] object that can be called to apply the transformation\n\n\n\nLens[S, V]\nor decorated with @lens_name.putter to add a putter function.\n\n\n\n\n\n\n::\n>>> @lens\n... def extract_name(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @extract_name.putter\n... def extract_name_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)"
                                                                                        +    "text": "Name\nDescription\n\n\n\n\nlens\nDecorator to create and register a lens transformation.\n\n\n\n\n\nlens.lens(f)\nDecorator to create and register a lens transformation.\nThis decorator converts a getter function into a Lens object and automatically registers it in the global LensNetwork registry.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nf\nLensGetter[S, V]\nA getter function that transforms from source type S to view type V. Must have exactly one parameter with a type annotation.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nLens[S, V]\nA Lens[S, V] object that can be called to apply the transformation\n\n\n\nLens[S, V]\nor decorated with @lens_name.putter to add a putter function.\n\n\n\n\n\n\n>>> @lens\n... def extract_name(full: FullData) -> NameOnly:\n...     return NameOnly(name=full.name)\n...\n>>> @extract_name.putter\n... def extract_name_put(view: NameOnly, source: FullData) -> FullData:\n...     return FullData(name=view.name, age=source.age)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DatasetLoader.html",
                                                                                             "href": "api/DatasetLoader.html",
                                                                                             "title": "DatasetLoader",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.DatasetLoader(client)\nLoads dataset records from ATProto.\nThis class fetches dataset index records and can create Dataset objects from them. Note that loading a dataset requires having the corresponding Python class for the sample type.\n\n\n::\n>>> client = AtmosphereClient()\n>>> loader = DatasetLoader(client)\n>>>\n>>> # List available datasets\n>>> datasets = loader.list()\n>>> for ds in datasets:\n...     print(ds[\"name\"], ds[\"schemaRef\"])\n>>>\n>>> # Get a specific dataset record\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.record/xyz\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nFetch a dataset record by AT URI.\n\n\nget_blob_urls\nGet fetchable URLs for blob-stored dataset shards.\n\n\nget_blobs\nGet the blob references from a dataset record.\n\n\nget_metadata\nGet the metadata from a dataset record.\n\n\nget_storage_type\nGet the storage type of a dataset record.\n\n\nget_urls\nGet the WebDataset URLs from a dataset record.\n\n\nlist_all\nList dataset records from a repository.\n\n\nto_dataset\nCreate a Dataset object from an ATProto record.\n\n\n\n\n\natmosphere.DatasetLoader.get(uri)\nFetch a dataset record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe dataset record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a dataset record.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blob_urls(uri)\nGet fetchable URLs for blob-stored dataset shards.\nThis resolves the PDS endpoint and constructs URLs that can be used to fetch the blob data directly.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of URLs for fetching the blob data.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is not blobs or PDS cannot be resolved.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blobs(uri)\nGet the blob references from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of blob reference dicts with keys: $type, ref, mimeType, size.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not blobs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_metadata(uri)\nGet the metadata from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nOptional[dict]\nThe metadata dictionary, or None if no metadata.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_storage_type(uri)\nGet the storage type of a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nEither “external” or “blobs”.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is unknown.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_urls(uri)\nGet the WebDataset URLs from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of WebDataset URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not external URLs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.list_all(repo=None, limit=100)\nList dataset records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of dataset records.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.to_dataset(uri, sample_type)\nCreate a Dataset object from an ATProto record.\nThis method creates a Dataset instance from a published record. You must provide the sample type class, which should match the schema referenced by the record.\nSupports both external URL storage and ATProto blob storage.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\nsample_type\nType[ST]\nThe Python class for the sample type.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[ST]\nA Dataset instance configured from the record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no storage URLs can be resolved.\n\n\n\n\n\n\n::\n>>> loader = DatasetLoader(client)\n>>> dataset = loader.to_dataset(uri, MySampleType)\n>>> for batch in dataset.shuffled(batch_size=32):\n...     process(batch)"
                                                                                        +    "text": "atmosphere.DatasetLoader(client)\nLoads dataset records from ATProto.\nThis class fetches dataset index records and can create Dataset objects from them. Note that loading a dataset requires having the corresponding Python class for the sample type.\n\n\n>>> client = AtmosphereClient()\n>>> loader = DatasetLoader(client)\n>>>\n>>> # List available datasets\n>>> datasets = loader.list()\n>>> for ds in datasets:\n...     print(ds[\"name\"], ds[\"schemaRef\"])\n>>>\n>>> # Get a specific dataset record\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.record/xyz\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nget\nFetch a dataset record by AT URI.\n\n\nget_blob_urls\nGet fetchable URLs for blob-stored dataset shards.\n\n\nget_blobs\nGet the blob references from a dataset record.\n\n\nget_metadata\nGet the metadata from a dataset record.\n\n\nget_storage_type\nGet the storage type of a dataset record.\n\n\nget_urls\nGet the WebDataset URLs from a dataset record.\n\n\nlist_all\nList dataset records from a repository.\n\n\nto_dataset\nCreate a Dataset object from an ATProto record.\n\n\n\n\n\natmosphere.DatasetLoader.get(uri)\nFetch a dataset record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe dataset record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a dataset record.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blob_urls(uri)\nGet fetchable URLs for blob-stored dataset shards.\nThis resolves the PDS endpoint and constructs URLs that can be used to fetch the blob data directly.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of URLs for fetching the blob data.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is not blobs or PDS cannot be resolved.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blobs(uri)\nGet the blob references from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of blob reference dicts with keys: $type, ref, mimeType, size.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not blobs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_metadata(uri)\nGet the metadata from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nOptional[dict]\nThe metadata dictionary, or None if no metadata.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_storage_type(uri)\nGet the storage type of a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nEither “external” or “blobs”.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is unknown.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_urls(uri)\nGet the WebDataset URLs from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of WebDataset URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not external URLs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.list_all(repo=None, limit=100)\nList dataset records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of dataset records.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.to_dataset(uri, sample_type)\nCreate a Dataset object from an ATProto record.\nThis method creates a Dataset instance from a published record. You must provide the sample type class, which should match the schema referenced by the record.\nSupports both external URL storage and ATProto blob storage.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\nsample_type\nType[ST]\nThe Python class for the sample type.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[ST]\nA Dataset instance configured from the record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no storage URLs can be resolved.\n\n\n\n\n\n\n>>> loader = DatasetLoader(client)\n>>> dataset = loader.to_dataset(uri, MySampleType)\n>>> for batch in dataset.shuffled(batch_size=32):\n...     process(batch)"
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/DatasetLoader.html#example",
                                                                                        -    "href": "api/DatasetLoader.html#example",
                                                                                        +    "objectID": "api/DatasetLoader.html#examples",
                                                                                        +    "href": "api/DatasetLoader.html#examples",
                                                                                             "title": "DatasetLoader",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> client = AtmosphereClient()\n>>> loader = DatasetLoader(client)\n>>>\n>>> # List available datasets\n>>> datasets = loader.list()\n>>> for ds in datasets:\n...     print(ds[\"name\"], ds[\"schemaRef\"])\n>>>\n>>> # Get a specific dataset record\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.record/xyz\")"
                                                                                        +    "text": ">>> client = AtmosphereClient()\n>>> loader = DatasetLoader(client)\n>>>\n>>> # List available datasets\n>>> datasets = loader.list()\n>>> for ds in datasets:\n...     print(ds[\"name\"], ds[\"schemaRef\"])\n>>>\n>>> # Get a specific dataset record\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.record/xyz\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DatasetLoader.html#methods",
                                                                                             "href": "api/DatasetLoader.html#methods",
                                                                                             "title": "DatasetLoader",
                                                                                             "section": "",
                                                                                        -    "text": "Name\nDescription\n\n\n\n\nget\nFetch a dataset record by AT URI.\n\n\nget_blob_urls\nGet fetchable URLs for blob-stored dataset shards.\n\n\nget_blobs\nGet the blob references from a dataset record.\n\n\nget_metadata\nGet the metadata from a dataset record.\n\n\nget_storage_type\nGet the storage type of a dataset record.\n\n\nget_urls\nGet the WebDataset URLs from a dataset record.\n\n\nlist_all\nList dataset records from a repository.\n\n\nto_dataset\nCreate a Dataset object from an ATProto record.\n\n\n\n\n\natmosphere.DatasetLoader.get(uri)\nFetch a dataset record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe dataset record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a dataset record.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blob_urls(uri)\nGet fetchable URLs for blob-stored dataset shards.\nThis resolves the PDS endpoint and constructs URLs that can be used to fetch the blob data directly.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of URLs for fetching the blob data.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is not blobs or PDS cannot be resolved.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blobs(uri)\nGet the blob references from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of blob reference dicts with keys: $type, ref, mimeType, size.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not blobs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_metadata(uri)\nGet the metadata from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nOptional[dict]\nThe metadata dictionary, or None if no metadata.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_storage_type(uri)\nGet the storage type of a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nEither “external” or “blobs”.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is unknown.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_urls(uri)\nGet the WebDataset URLs from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of WebDataset URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not external URLs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.list_all(repo=None, limit=100)\nList dataset records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of dataset records.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.to_dataset(uri, sample_type)\nCreate a Dataset object from an ATProto record.\nThis method creates a Dataset instance from a published record. You must provide the sample type class, which should match the schema referenced by the record.\nSupports both external URL storage and ATProto blob storage.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\nsample_type\nType[ST]\nThe Python class for the sample type.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[ST]\nA Dataset instance configured from the record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no storage URLs can be resolved.\n\n\n\n\n\n\n::\n>>> loader = DatasetLoader(client)\n>>> dataset = loader.to_dataset(uri, MySampleType)\n>>> for batch in dataset.shuffled(batch_size=32):\n...     process(batch)"
                                                                                        +    "text": "Name\nDescription\n\n\n\n\nget\nFetch a dataset record by AT URI.\n\n\nget_blob_urls\nGet fetchable URLs for blob-stored dataset shards.\n\n\nget_blobs\nGet the blob references from a dataset record.\n\n\nget_metadata\nGet the metadata from a dataset record.\n\n\nget_storage_type\nGet the storage type of a dataset record.\n\n\nget_urls\nGet the WebDataset URLs from a dataset record.\n\n\nlist_all\nList dataset records from a repository.\n\n\nto_dataset\nCreate a Dataset object from an ATProto record.\n\n\n\n\n\natmosphere.DatasetLoader.get(uri)\nFetch a dataset record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe dataset record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a dataset record.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blob_urls(uri)\nGet fetchable URLs for blob-stored dataset shards.\nThis resolves the PDS endpoint and constructs URLs that can be used to fetch the blob data directly.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of URLs for fetching the blob data.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is not blobs or PDS cannot be resolved.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_blobs(uri)\nGet the blob references from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of blob reference dicts with keys: $type, ref, mimeType, size.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not blobs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_metadata(uri)\nGet the metadata from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nOptional[dict]\nThe metadata dictionary, or None if no metadata.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_storage_type(uri)\nGet the storage type of a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nEither “external” or “blobs”.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf storage type is unknown.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.get_urls(uri)\nGet the WebDataset URLs from a dataset record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of WebDataset URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the storage type is not external URLs.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.list_all(repo=None, limit=100)\nList dataset records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of dataset records.\n\n\n\n\n\n\n\natmosphere.DatasetLoader.to_dataset(uri, sample_type)\nCreate a Dataset object from an ATProto record.\nThis method creates a Dataset instance from a published record. You must provide the sample type class, which should match the schema referenced by the record.\nSupports both external URL storage and ATProto blob storage.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the dataset record.\nrequired\n\n\nsample_type\nType[ST]\nThe Python class for the sample type.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDataset[ST]\nA Dataset instance configured from the record.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf no storage URLs can be resolved.\n\n\n\n\n\n\n>>> loader = DatasetLoader(client)\n>>> dataset = loader.to_dataset(uri, MySampleType)\n>>> for batch in dataset.shuffled(batch_size=32):\n...     process(batch)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DataSource.html",
                                                                                             "href": "api/DataSource.html",
                                                                                             "title": "DataSource",
                                                                                             "section": "",
                                                                                        -    "text": "DataSource()\nProtocol for data sources that provide streams to Dataset.\nA DataSource abstracts over different ways of accessing dataset shards: - URLSource: Standard WebDataset-compatible URLs (http, https, pipe, gs, etc.) - S3Source: S3-compatible storage with explicit credentials - BlobSource: ATProto blob references (future)\nThe key method is shards(), which yields (identifier, stream) pairs. These are fed directly to WebDataset’s tar_file_expander, bypassing URL resolution entirely. This enables: - Private S3 repos with credentials - Custom endpoints (Cloudflare R2, MinIO) - ATProto blob streaming - Any other source that can provide file-like objects\n\n\n::\n>>> source = S3Source(\n...     bucket=\"my-bucket\",\n...     keys=[\"data-000.tar\", \"data-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n...     credentials=creds,\n... )\n>>> ds = Dataset[MySample](source)\n>>> for sample in ds.ordered():\n...     print(sample)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nshards\nLazily yield (identifier, stream) pairs for each shard.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nlist_shards\nGet list of shard identifiers without opening streams.\n\n\nopen_shard\nOpen a single shard by its identifier.\n\n\n\n\n\nDataSource.list_shards()\nGet list of shard identifiers without opening streams.\nUsed for metadata queries like counting shards without actually streaming data. Implementations should return identifiers that match what shards would yield.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of shard identifier strings.\n\n\n\n\n\n\n\nDataSource.open_shard(shard_id)\nOpen a single shard by its identifier.\nThis method enables random access to individual shards, which is required for PyTorch DataLoader worker splitting. Each worker opens only its assigned shards rather than iterating all shards.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nShard identifier from shard_list.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nFile-like stream for reading the shard.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in shard_list."
                                                                                        +    "text": "DataSource()\nProtocol for data sources that provide streams to Dataset.\nA DataSource abstracts over different ways of accessing dataset shards: - URLSource: Standard WebDataset-compatible URLs (http, https, pipe, gs, etc.) - S3Source: S3-compatible storage with explicit credentials - BlobSource: ATProto blob references (future)\nThe key method is shards(), which yields (identifier, stream) pairs. These are fed directly to WebDataset’s tar_file_expander, bypassing URL resolution entirely. This enables: - Private S3 repos with credentials - Custom endpoints (Cloudflare R2, MinIO) - ATProto blob streaming - Any other source that can provide file-like objects\n\n\n>>> source = S3Source(\n...     bucket=\"my-bucket\",\n...     keys=[\"data-000.tar\", \"data-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n...     credentials=creds,\n... )\n>>> ds = Dataset[MySample](source)\n>>> for sample in ds.ordered():\n...     print(sample)\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nshards\nLazily yield (identifier, stream) pairs for each shard.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nlist_shards\nGet list of shard identifiers without opening streams.\n\n\nopen_shard\nOpen a single shard by its identifier.\n\n\n\n\n\nDataSource.list_shards()\nGet list of shard identifiers without opening streams.\nUsed for metadata queries like counting shards without actually streaming data. Implementations should return identifiers that match what shards would yield.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of shard identifier strings.\n\n\n\n\n\n\n\nDataSource.open_shard(shard_id)\nOpen a single shard by its identifier.\nThis method enables random access to individual shards, which is required for PyTorch DataLoader worker splitting. Each worker opens only its assigned shards rather than iterating all shards.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nshard_id\nstr\nShard identifier from shard_list.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nIO[bytes]\nFile-like stream for reading the shard.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nKeyError\nIf shard_id is not in shard_list."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/DataSource.html#example",
                                                                                        -    "href": "api/DataSource.html#example",
                                                                                        +    "objectID": "api/DataSource.html#examples",
                                                                                        +    "href": "api/DataSource.html#examples",
                                                                                             "title": "DataSource",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> source = S3Source(\n...     bucket=\"my-bucket\",\n...     keys=[\"data-000.tar\", \"data-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n...     credentials=creds,\n... )\n>>> ds = Dataset[MySample](source)\n>>> for sample in ds.ordered():\n...     print(sample)"
                                                                                        +    "text": ">>> source = S3Source(\n...     bucket=\"my-bucket\",\n...     keys=[\"data-000.tar\", \"data-001.tar\"],\n...     endpoint=\"https://r2.example.com\",\n...     credentials=creds,\n... )\n>>> ds = Dataset[MySample](source)\n>>> for sample in ds.ordered():\n...     print(sample)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DataSource.html#attributes",
                                                                                        @@ -1903,14 +1903,14 @@
                                                                                             "href": "api/AtmosphereIndex.html",
                                                                                             "title": "AtmosphereIndex",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.AtmosphereIndex(client, *, data_store=None)\nATProto index implementing AbstractIndex protocol.\nWraps SchemaPublisher/Loader and DatasetPublisher/Loader to provide a unified interface compatible with LocalIndex.\nOptionally accepts a PDSBlobStore for writing dataset shards as ATProto blobs, enabling fully decentralized dataset storage.\n\n\n::\n>>> client = AtmosphereClient()\n>>> client.login(\"handle.bsky.social\", \"app-password\")\n>>>\n>>> # Without blob storage (external URLs only)\n>>> index = AtmosphereIndex(client)\n>>>\n>>> # With PDS blob storage\n>>> store = PDSBlobStore(client)\n>>> index = AtmosphereIndex(client, data_store=store)\n>>> entry = index.insert_dataset(dataset, name=\"my-data\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndata_store\nThe PDS blob store for writing shards, or None if not configured.\n\n\ndatasets\nLazily iterate over all dataset entries (AbstractIndex protocol).\n\n\nschemas\nLazily iterate over all schema records (AbstractIndex protocol).\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndecode_schema\nReconstruct a Python type from a schema record.\n\n\nget_dataset\nGet a dataset by AT URI.\n\n\nget_schema\nGet a schema record by AT URI.\n\n\ninsert_dataset\nInsert a dataset into ATProto.\n\n\nlist_datasets\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\nlist_schemas\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\npublish_schema\nPublish a schema to ATProto.\n\n\n\n\n\natmosphere.AtmosphereIndex.decode_schema(ref)\nReconstruct a Python type from a schema record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nAT URI of the schema record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nDynamically generated Packable type.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.get_dataset(ref)\nGet a dataset by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nAT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtmosphereIndexEntry\nAtmosphereIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf record is not a dataset.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.get_schema(ref)\nGet a schema record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nAT URI of the schema record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf record is not a schema.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.insert_dataset(\n    ds,\n    *,\n    name,\n    schema_ref=None,\n    **kwargs,\n)\nInsert a dataset into ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to publish.\nrequired\n\n\nname\nstr\nHuman-readable name.\nrequired\n\n\nschema_ref\nOptional[str]\nOptional schema AT URI. If None, auto-publishes schema.\nNone\n\n\n**kwargs\n\nAdditional options (description, tags, license).\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtmosphereIndexEntry\nAtmosphereIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.list_datasets(repo=None)\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nDID of repository. Defaults to authenticated user.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[AtmosphereIndexEntry]\nList of AtmosphereIndexEntry for each dataset.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.list_schemas(repo=None)\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nDID of repository. Defaults to authenticated user.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.publish_schema(\n    sample_type,\n    *,\n    version='1.0.0',\n    **kwargs,\n)\nPublish a schema to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\nType[Packable]\nA Packable type (PackableSample subclass or @packable-decorated).\nrequired\n\n\nversion\nstr\nSemantic version string.\n'1.0.0'\n\n\n**kwargs\n\nAdditional options (description, metadata).\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nAT URI of the schema record."
                                                                                        +    "text": "atmosphere.AtmosphereIndex(client, *, data_store=None)\nATProto index implementing AbstractIndex protocol.\nWraps SchemaPublisher/Loader and DatasetPublisher/Loader to provide a unified interface compatible with LocalIndex.\nOptionally accepts a PDSBlobStore for writing dataset shards as ATProto blobs, enabling fully decentralized dataset storage.\n\n\n>>> client = AtmosphereClient()\n>>> client.login(\"handle.bsky.social\", \"app-password\")\n>>>\n>>> # Without blob storage (external URLs only)\n>>> index = AtmosphereIndex(client)\n>>>\n>>> # With PDS blob storage\n>>> store = PDSBlobStore(client)\n>>> index = AtmosphereIndex(client, data_store=store)\n>>> entry = index.insert_dataset(dataset, name=\"my-data\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndata_store\nThe PDS blob store for writing shards, or None if not configured.\n\n\ndatasets\nLazily iterate over all dataset entries (AbstractIndex protocol).\n\n\nschemas\nLazily iterate over all schema records (AbstractIndex protocol).\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ndecode_schema\nReconstruct a Python type from a schema record.\n\n\nget_dataset\nGet a dataset by AT URI.\n\n\nget_schema\nGet a schema record by AT URI.\n\n\ninsert_dataset\nInsert a dataset into ATProto.\n\n\nlist_datasets\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\nlist_schemas\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\npublish_schema\nPublish a schema to ATProto.\n\n\n\n\n\natmosphere.AtmosphereIndex.decode_schema(ref)\nReconstruct a Python type from a schema record.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nAT URI of the schema record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nType[Packable]\nDynamically generated Packable type.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf schema cannot be decoded.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.get_dataset(ref)\nGet a dataset by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nAT URI of the dataset record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtmosphereIndexEntry\nAtmosphereIndexEntry for the dataset.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf record is not a dataset.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.get_schema(ref)\nGet a schema record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nref\nstr\nAT URI of the schema record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nSchema record dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf record is not a schema.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.insert_dataset(\n    ds,\n    *,\n    name,\n    schema_ref=None,\n    **kwargs,\n)\nInsert a dataset into ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\nDataset\nThe Dataset to publish.\nrequired\n\n\nname\nstr\nHuman-readable name.\nrequired\n\n\nschema_ref\nOptional[str]\nOptional schema AT URI. If None, auto-publishes schema.\nNone\n\n\n**kwargs\n\nAdditional options (description, tags, license).\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAtmosphereIndexEntry\nAtmosphereIndexEntry for the inserted dataset.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.list_datasets(repo=None)\nGet all dataset entries as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nDID of repository. Defaults to authenticated user.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[AtmosphereIndexEntry]\nList of AtmosphereIndexEntry for each dataset.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.list_schemas(repo=None)\nGet all schema records as a materialized list (AbstractIndex protocol).\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nDID of repository. Defaults to authenticated user.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of schema records as dictionaries.\n\n\n\n\n\n\n\natmosphere.AtmosphereIndex.publish_schema(\n    sample_type,\n    *,\n    version='1.0.0',\n    **kwargs,\n)\nPublish a schema to ATProto.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsample_type\nType[Packable]\nA Packable type (PackableSample subclass or @packable-decorated).\nrequired\n\n\nversion\nstr\nSemantic version string.\n'1.0.0'\n\n\n**kwargs\n\nAdditional options (description, metadata).\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nAT URI of the schema record."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/AtmosphereIndex.html#example",
                                                                                        -    "href": "api/AtmosphereIndex.html#example",
                                                                                        +    "objectID": "api/AtmosphereIndex.html#examples",
                                                                                        +    "href": "api/AtmosphereIndex.html#examples",
                                                                                             "title": "AtmosphereIndex",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> client = AtmosphereClient()\n>>> client.login(\"handle.bsky.social\", \"app-password\")\n>>>\n>>> # Without blob storage (external URLs only)\n>>> index = AtmosphereIndex(client)\n>>>\n>>> # With PDS blob storage\n>>> store = PDSBlobStore(client)\n>>> index = AtmosphereIndex(client, data_store=store)\n>>> entry = index.insert_dataset(dataset, name=\"my-data\")"
                                                                                        +    "text": ">>> client = AtmosphereClient()\n>>> client.login(\"handle.bsky.social\", \"app-password\")\n>>>\n>>> # Without blob storage (external URLs only)\n>>> index = AtmosphereIndex(client)\n>>>\n>>> # With PDS blob storage\n>>> store = PDSBlobStore(client)\n>>> index = AtmosphereIndex(client, data_store=store)\n>>> entry = index.insert_dataset(dataset, name=\"my-data\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/AtmosphereIndex.html#attributes",
                                                                                        @@ -1931,14 +1931,14 @@
                                                                                             "href": "api/LensLoader.html",
                                                                                             "title": "LensLoader",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.LensLoader(client)\nLoads lens records from ATProto.\nThis class fetches lens transformation records. Note that actually using a lens requires installing the referenced code and importing it manually.\n\n\n::\n>>> client = AtmosphereClient()\n>>> loader = LensLoader(client)\n>>>\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.lens/xyz\")\n>>> print(record[\"name\"])\n>>> print(record[\"sourceSchema\"])\n>>> print(record.get(\"getterCode\", {}).get(\"repository\"))\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfind_by_schemas\nFind lenses that transform between specific schemas.\n\n\nget\nFetch a lens record by AT URI.\n\n\nlist_all\nList lens records from a repository.\n\n\n\n\n\natmosphere.LensLoader.find_by_schemas(\n    source_schema_uri,\n    target_schema_uri=None,\n    repo=None,\n)\nFind lenses that transform between specific schemas.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsource_schema_uri\nstr\nAT URI of the source schema.\nrequired\n\n\ntarget_schema_uri\nOptional[str]\nOptional AT URI of the target schema. If not provided, returns all lenses from the source.\nNone\n\n\nrepo\nOptional[str]\nThe DID of the repository to search.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of matching lens records.\n\n\n\n\n\n\n\natmosphere.LensLoader.get(uri)\nFetch a lens record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the lens record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe lens record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a lens record.\n\n\n\n\n\n\n\natmosphere.LensLoader.list_all(repo=None, limit=100)\nList lens records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of lens records."
                                                                                        +    "text": "atmosphere.LensLoader(client)\nLoads lens records from ATProto.\nThis class fetches lens transformation records. Note that actually using a lens requires installing the referenced code and importing it manually.\n\n\n>>> client = AtmosphereClient()\n>>> loader = LensLoader(client)\n>>>\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.lens/xyz\")\n>>> print(record[\"name\"])\n>>> print(record[\"sourceSchema\"])\n>>> print(record.get(\"getterCode\", {}).get(\"repository\"))\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfind_by_schemas\nFind lenses that transform between specific schemas.\n\n\nget\nFetch a lens record by AT URI.\n\n\nlist_all\nList lens records from a repository.\n\n\n\n\n\natmosphere.LensLoader.find_by_schemas(\n    source_schema_uri,\n    target_schema_uri=None,\n    repo=None,\n)\nFind lenses that transform between specific schemas.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nsource_schema_uri\nstr\nAT URI of the source schema.\nrequired\n\n\ntarget_schema_uri\nOptional[str]\nOptional AT URI of the target schema. If not provided, returns all lenses from the source.\nNone\n\n\nrepo\nOptional[str]\nThe DID of the repository to search.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of matching lens records.\n\n\n\n\n\n\n\natmosphere.LensLoader.get(uri)\nFetch a lens record by AT URI.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nuri\nstr | AtUri\nThe AT URI of the lens record.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\ndict\nThe lens record as a dictionary.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf the record is not a lens record.\n\n\n\n\n\n\n\natmosphere.LensLoader.list_all(repo=None, limit=100)\nList lens records from a repository.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nrepo\nOptional[str]\nThe DID of the repository. Defaults to authenticated user.\nNone\n\n\nlimit\nint\nMaximum number of records to return.\n100\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[dict]\nList of lens records."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/LensLoader.html#example",
                                                                                        -    "href": "api/LensLoader.html#example",
                                                                                        +    "objectID": "api/LensLoader.html#examples",
                                                                                        +    "href": "api/LensLoader.html#examples",
                                                                                             "title": "LensLoader",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> client = AtmosphereClient()\n>>> loader = LensLoader(client)\n>>>\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.lens/xyz\")\n>>> print(record[\"name\"])\n>>> print(record[\"sourceSchema\"])\n>>> print(record.get(\"getterCode\", {}).get(\"repository\"))"
                                                                                        +    "text": ">>> client = AtmosphereClient()\n>>> loader = LensLoader(client)\n>>>\n>>> record = loader.get(\"at://did:plc:abc/ac.foundation.dataset.lens/xyz\")\n>>> print(record[\"name\"])\n>>> print(record[\"sourceSchema\"])\n>>> print(record.get(\"getterCode\", {}).get(\"repository\"))"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/LensLoader.html#methods",
                                                                                        @@ -1952,14 +1952,14 @@
                                                                                             "href": "api/DictSample.html",
                                                                                             "title": "DictSample",
                                                                                             "section": "",
                                                                                        -    "text": "DictSample(_data=None, **kwargs)\nDynamic sample type providing dict-like access to raw msgpack data.\nThis class is the default sample type for datasets when no explicit type is specified. It stores the raw unpacked msgpack data and provides both attribute-style (sample.field) and dict-style (sample[\"field\"]) access to fields.\nDictSample is useful for: - Exploring datasets without defining a schema first - Working with datasets that have variable schemas - Prototyping before committing to a typed schema\nTo convert to a typed schema, use Dataset.as_type() with a @packable-decorated class. Every @packable class automatically registers a lens from DictSample, making this conversion seamless.\n\n\n::\n>>> ds = load_dataset(\"path/to/data.tar\")  # Returns Dataset[DictSample]\n>>> for sample in ds.ordered():\n...     print(sample.some_field)      # Attribute access\n...     print(sample[\"other_field\"])  # Dict access\n...     print(sample.keys())          # Inspect available fields\n...\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(MyTypedSample)\n\n\n\nNDArray fields are stored as raw bytes in DictSample. They are only converted to numpy arrays when accessed through a typed sample class.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_wds\nPack this sample’s data for writing to WebDataset.\n\n\npacked\nPack this sample’s data into msgpack bytes.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_bytes\nCreate a DictSample from raw msgpack bytes.\n\n\nfrom_data\nCreate a DictSample from unpacked msgpack data.\n\n\nget\nGet a field value with optional default.\n\n\nitems\nReturn list of (field_name, value) tuples.\n\n\nkeys\nReturn list of field names.\n\n\nto_dict\nReturn a copy of the underlying data dictionary.\n\n\nvalues\nReturn list of field values.\n\n\n\n\n\nDictSample.from_bytes(bs)\nCreate a DictSample from raw msgpack bytes.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbs\nbytes\nRaw bytes from a msgpack-serialized sample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDictSample\nNew DictSample instance with the unpacked data.\n\n\n\n\n\n\n\nDictSample.from_data(data)\nCreate a DictSample from unpacked msgpack data.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndata\ndict[str, Any]\nDictionary with field names as keys.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDictSample\nNew DictSample instance wrapping the data.\n\n\n\n\n\n\n\nDictSample.get(key, default=None)\nGet a field value with optional default.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nkey\nstr\nField name to access.\nrequired\n\n\ndefault\nAny\nValue to return if field doesn’t exist.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAny\nThe field value or default.\n\n\n\n\n\n\n\nDictSample.items()\nReturn list of (field_name, value) tuples.\n\n\n\nDictSample.keys()\nReturn list of field names.\n\n\n\nDictSample.to_dict()\nReturn a copy of the underlying data dictionary.\n\n\n\nDictSample.values()\nReturn list of field values."
                                                                                        +    "text": "DictSample(_data=None, **kwargs)\nDynamic sample type providing dict-like access to raw msgpack data.\nThis class is the default sample type for datasets when no explicit type is specified. It stores the raw unpacked msgpack data and provides both attribute-style (sample.field) and dict-style (sample[\"field\"]) access to fields.\nDictSample is useful for: - Exploring datasets without defining a schema first - Working with datasets that have variable schemas - Prototyping before committing to a typed schema\nTo convert to a typed schema, use Dataset.as_type() with a @packable-decorated class. Every @packable class automatically registers a lens from DictSample, making this conversion seamless.\n\n\n>>> ds = load_dataset(\"path/to/data.tar\")  # Returns Dataset[DictSample]\n>>> for sample in ds.ordered():\n...     print(sample.some_field)      # Attribute access\n...     print(sample[\"other_field\"])  # Dict access\n...     print(sample.keys())          # Inspect available fields\n...\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(MyTypedSample)\n\n\n\nNDArray fields are stored as raw bytes in DictSample. They are only converted to numpy arrays when accessed through a typed sample class.\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_wds\nPack this sample’s data for writing to WebDataset.\n\n\npacked\nPack this sample’s data into msgpack bytes.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_bytes\nCreate a DictSample from raw msgpack bytes.\n\n\nfrom_data\nCreate a DictSample from unpacked msgpack data.\n\n\nget\nGet a field value with optional default.\n\n\nitems\nReturn list of (field_name, value) tuples.\n\n\nkeys\nReturn list of field names.\n\n\nto_dict\nReturn a copy of the underlying data dictionary.\n\n\nvalues\nReturn list of field values.\n\n\n\n\n\nDictSample.from_bytes(bs)\nCreate a DictSample from raw msgpack bytes.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbs\nbytes\nRaw bytes from a msgpack-serialized sample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDictSample\nNew DictSample instance with the unpacked data.\n\n\n\n\n\n\n\nDictSample.from_data(data)\nCreate a DictSample from unpacked msgpack data.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndata\ndict[str, Any]\nDictionary with field names as keys.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nDictSample\nNew DictSample instance wrapping the data.\n\n\n\n\n\n\n\nDictSample.get(key, default=None)\nGet a field value with optional default.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nkey\nstr\nField name to access.\nrequired\n\n\ndefault\nAny\nValue to return if field doesn’t exist.\nNone\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nAny\nThe field value or default.\n\n\n\n\n\n\n\nDictSample.items()\nReturn list of (field_name, value) tuples.\n\n\n\nDictSample.keys()\nReturn list of field names.\n\n\n\nDictSample.to_dict()\nReturn a copy of the underlying data dictionary.\n\n\n\nDictSample.values()\nReturn list of field values."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/DictSample.html#example",
                                                                                        -    "href": "api/DictSample.html#example",
                                                                                        +    "objectID": "api/DictSample.html#examples",
                                                                                        +    "href": "api/DictSample.html#examples",
                                                                                             "title": "DictSample",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> ds = load_dataset(\"path/to/data.tar\")  # Returns Dataset[DictSample]\n>>> for sample in ds.ordered():\n...     print(sample.some_field)      # Attribute access\n...     print(sample[\"other_field\"])  # Dict access\n...     print(sample.keys())          # Inspect available fields\n...\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(MyTypedSample)"
                                                                                        +    "text": ">>> ds = load_dataset(\"path/to/data.tar\")  # Returns Dataset[DictSample]\n>>> for sample in ds.ordered():\n...     print(sample.some_field)      # Attribute access\n...     print(sample[\"other_field\"])  # Dict access\n...     print(sample.keys())          # Inspect available fields\n...\n>>> # Convert to typed schema\n>>> typed_ds = ds.as_type(MyTypedSample)"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DictSample.html#note",
                                                                                        @@ -1987,7 +1987,7 @@
                                                                                             "href": "api/PDSBlobStore.html",
                                                                                             "title": "PDSBlobStore",
                                                                                             "section": "",
                                                                                        -    "text": "atmosphere.PDSBlobStore(client)\nPDS blob store implementing AbstractDataStore protocol.\nStores dataset shards as ATProto blobs, enabling decentralized dataset storage on the AT Protocol network.\nEach shard is written to a temporary tar file, then uploaded as a blob to the user’s PDS. The returned URLs are AT URIs that can be resolved to HTTP URLs for streaming.\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nclient\n'AtmosphereClient'\nAuthenticated AtmosphereClient instance.\n\n\n\n\n\n\n::\n>>> store = PDSBlobStore(client)\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> # Returns AT URIs like:\n>>> # ['at://did:plc:abc/blob/bafyrei...', ...]\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ncreate_source\nCreate a BlobSource for reading these AT URIs.\n\n\nread_url\nResolve an AT URI blob reference to an HTTP URL.\n\n\nsupports_streaming\nPDS blobs support streaming via HTTP.\n\n\nwrite_shards\nWrite dataset shards as PDS blobs.\n\n\n\n\n\natmosphere.PDSBlobStore.create_source(urls)\nCreate a BlobSource for reading these AT URIs.\nThis is a convenience method for creating a DataSource that can stream the blobs written by this store.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of AT URIs from write_shards().\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'BlobSource'\nBlobSource configured for the given URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URLs are not valid AT URIs.\n\n\n\n\n\n\n\natmosphere.PDSBlobStore.read_url(url)\nResolve an AT URI blob reference to an HTTP URL.\nTransforms at://did/blob/cid URIs to HTTP URLs that can be streamed by WebDataset.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurl\nstr\nAT URI in format at://{did}/blob/{cid}.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nHTTP URL for fetching the blob via PDS API.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URL format is invalid or PDS cannot be resolved.\n\n\n\n\n\n\n\natmosphere.PDSBlobStore.supports_streaming()\nPDS blobs support streaming via HTTP.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nbool\nTrue.\n\n\n\n\n\n\n\natmosphere.PDSBlobStore.write_shards(\n    ds,\n    *,\n    prefix,\n    maxcount=10000,\n    maxsize=3000000000.0,\n    **kwargs,\n)\nWrite dataset shards as PDS blobs.\nCreates tar archives from the dataset and uploads each as a blob to the authenticated user’s PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\n'Dataset'\nThe Dataset to write.\nrequired\n\n\nprefix\nstr\nLogical path prefix for naming (used in shard names only).\nrequired\n\n\nmaxcount\nint\nMaximum samples per shard (default: 10000).\n10000\n\n\nmaxsize\nfloat\nMaximum shard size in bytes (default: 3GB, PDS limit).\n3000000000.0\n\n\n**kwargs\nAny\nAdditional args passed to wds.ShardWriter.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of AT URIs for the written blobs, in format:\n\n\n\nlist[str]\nat://{did}/blob/{cid}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\nRuntimeError\nIf no shards were written.\n\n\n\n\n\n\nPDS blobs have size limits (typically 50MB-5GB depending on PDS). Adjust maxcount/maxsize to stay within limits."
                                                                                        +    "text": "atmosphere.PDSBlobStore(client)\nPDS blob store implementing AbstractDataStore protocol.\nStores dataset shards as ATProto blobs, enabling decentralized dataset storage on the AT Protocol network.\nEach shard is written to a temporary tar file, then uploaded as a blob to the user’s PDS. The returned URLs are AT URIs that can be resolved to HTTP URLs for streaming.\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\nclient\n'AtmosphereClient'\nAuthenticated AtmosphereClient instance.\n\n\n\n\n\n\n>>> store = PDSBlobStore(client)\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> # Returns AT URIs like:\n>>> # ['at://did:plc:abc/blob/bafyrei...', ...]\n\n\n\n\n\n\nName\nDescription\n\n\n\n\ncreate_source\nCreate a BlobSource for reading these AT URIs.\n\n\nread_url\nResolve an AT URI blob reference to an HTTP URL.\n\n\nsupports_streaming\nPDS blobs support streaming via HTTP.\n\n\nwrite_shards\nWrite dataset shards as PDS blobs.\n\n\n\n\n\natmosphere.PDSBlobStore.create_source(urls)\nCreate a BlobSource for reading these AT URIs.\nThis is a convenience method for creating a DataSource that can stream the blobs written by this store.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurls\nlist[str]\nList of AT URIs from write_shards().\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\n'BlobSource'\nBlobSource configured for the given URLs.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URLs are not valid AT URIs.\n\n\n\n\n\n\n\natmosphere.PDSBlobStore.read_url(url)\nResolve an AT URI blob reference to an HTTP URL.\nTransforms at://did/blob/cid URIs to HTTP URLs that can be streamed by WebDataset.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nurl\nstr\nAT URI in format at://{did}/blob/{cid}.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nstr\nHTTP URL for fetching the blob via PDS API.\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf URL format is invalid or PDS cannot be resolved.\n\n\n\n\n\n\n\natmosphere.PDSBlobStore.supports_streaming()\nPDS blobs support streaming via HTTP.\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nbool\nTrue.\n\n\n\n\n\n\n\natmosphere.PDSBlobStore.write_shards(\n    ds,\n    *,\n    prefix,\n    maxcount=10000,\n    maxsize=3000000000.0,\n    **kwargs,\n)\nWrite dataset shards as PDS blobs.\nCreates tar archives from the dataset and uploads each as a blob to the authenticated user’s PDS.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nds\n'Dataset'\nThe Dataset to write.\nrequired\n\n\nprefix\nstr\nLogical path prefix for naming (used in shard names only).\nrequired\n\n\nmaxcount\nint\nMaximum samples per shard (default: 10000).\n10000\n\n\nmaxsize\nfloat\nMaximum shard size in bytes (default: 3GB, PDS limit).\n3000000000.0\n\n\n**kwargs\nAny\nAdditional args passed to wds.ShardWriter.\n{}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nlist[str]\nList of AT URIs for the written blobs, in format:\n\n\n\nlist[str]\nat://{did}/blob/{cid}\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nValueError\nIf not authenticated.\n\n\n\nRuntimeError\nIf no shards were written.\n\n\n\n\n\n\nPDS blobs have size limits (typically 50MB-5GB depending on PDS). Adjust maxcount/maxsize to stay within limits."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/PDSBlobStore.html#attributes",
                                                                                        @@ -1997,11 +1997,11 @@
                                                                                             "text": "Name\nType\nDescription\n\n\n\n\nclient\n'AtmosphereClient'\nAuthenticated AtmosphereClient instance."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/PDSBlobStore.html#example",
                                                                                        -    "href": "api/PDSBlobStore.html#example",
                                                                                        +    "objectID": "api/PDSBlobStore.html#examples",
                                                                                        +    "href": "api/PDSBlobStore.html#examples",
                                                                                             "title": "PDSBlobStore",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> store = PDSBlobStore(client)\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> # Returns AT URIs like:\n>>> # ['at://did:plc:abc/blob/bafyrei...', ...]"
                                                                                        +    "text": ">>> store = PDSBlobStore(client)\n>>> urls = store.write_shards(dataset, prefix=\"training/v1\")\n>>> # Returns AT URIs like:\n>>> # ['at://did:plc:abc/blob/bafyrei...', ...]"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/PDSBlobStore.html#methods",
                                                                                        @@ -2015,14 +2015,14 @@
                                                                                             "href": "api/PackableSample.html",
                                                                                             "title": "PackableSample",
                                                                                             "section": "",
                                                                                        -    "text": "PackableSample()\nBase class for samples that can be serialized with msgpack.\nThis abstract base class provides automatic serialization/deserialization for dataclass-based samples. Fields annotated as NDArray or NDArray | None are automatically converted between numpy arrays and bytes during packing/unpacking.\nSubclasses should be defined either by: 1. Direct inheritance with the @dataclass decorator 2. Using the @packable decorator (recommended)\n\n\n::\n>>> @packable\n... class MyData:\n...     name: str\n...     embeddings: NDArray\n...\n>>> sample = MyData(name=\"test\", embeddings=np.array([1.0, 2.0]))\n>>> packed = sample.packed  # Serialize to bytes\n>>> restored = MyData.from_bytes(packed)  # Deserialize\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_wds\nPack this sample’s data for writing to WebDataset.\n\n\npacked\nPack this sample’s data into msgpack bytes.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_bytes\nCreate a sample instance from raw msgpack bytes.\n\n\nfrom_data\nCreate a sample instance from unpacked msgpack data.\n\n\n\n\n\nPackableSample.from_bytes(bs)\nCreate a sample instance from raw msgpack bytes.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbs\nbytes\nRaw bytes from a msgpack-serialized sample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSelf\nA new instance of this sample class deserialized from the bytes.\n\n\n\n\n\n\n\nPackableSample.from_data(data)\nCreate a sample instance from unpacked msgpack data.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndata\nWDSRawSample\nDictionary with keys matching the sample’s field names.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSelf\nNew instance with NDArray fields auto-converted from bytes."
                                                                                        +    "text": "PackableSample()\nBase class for samples that can be serialized with msgpack.\nThis abstract base class provides automatic serialization/deserialization for dataclass-based samples. Fields annotated as NDArray or NDArray | None are automatically converted between numpy arrays and bytes during packing/unpacking.\nSubclasses should be defined either by: 1. Direct inheritance with the @dataclass decorator 2. Using the @packable decorator (recommended)\n\n\n>>> @packable\n... class MyData:\n...     name: str\n...     embeddings: NDArray\n...\n>>> sample = MyData(name=\"test\", embeddings=np.array([1.0, 2.0]))\n>>> packed = sample.packed  # Serialize to bytes\n>>> restored = MyData.from_bytes(packed)  # Deserialize\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nas_wds\nPack this sample’s data for writing to WebDataset.\n\n\npacked\nPack this sample’s data into msgpack bytes.\n\n\n\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nfrom_bytes\nCreate a sample instance from raw msgpack bytes.\n\n\nfrom_data\nCreate a sample instance from unpacked msgpack data.\n\n\n\n\n\nPackableSample.from_bytes(bs)\nCreate a sample instance from raw msgpack bytes.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nbs\nbytes\nRaw bytes from a msgpack-serialized sample.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSelf\nA new instance of this sample class deserialized from the bytes.\n\n\n\n\n\n\n\nPackableSample.from_data(data)\nCreate a sample instance from unpacked msgpack data.\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\ndata\nWDSRawSample\nDictionary with keys matching the sample’s field names.\nrequired\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\n\n\n\n\n\nSelf\nNew instance with NDArray fields auto-converted from bytes."
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/PackableSample.html#example",
                                                                                        -    "href": "api/PackableSample.html#example",
                                                                                        +    "objectID": "api/PackableSample.html#examples",
                                                                                        +    "href": "api/PackableSample.html#examples",
                                                                                             "title": "PackableSample",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> @packable\n... class MyData:\n...     name: str\n...     embeddings: NDArray\n...\n>>> sample = MyData(name=\"test\", embeddings=np.array([1.0, 2.0]))\n>>> packed = sample.packed  # Serialize to bytes\n>>> restored = MyData.from_bytes(packed)  # Deserialize"
                                                                                        +    "text": ">>> @packable\n... class MyData:\n...     name: str\n...     embeddings: NDArray\n...\n>>> sample = MyData(name=\"test\", embeddings=np.array([1.0, 2.0]))\n>>> packed = sample.packed  # Serialize to bytes\n>>> restored = MyData.from_bytes(packed)  # Deserialize"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/PackableSample.html#attributes",
                                                                                        @@ -2043,7 +2043,7 @@
                                                                                             "href": "api/DatasetDict.html",
                                                                                             "title": "DatasetDict",
                                                                                             "section": "",
                                                                                        -    "text": "DatasetDict(splits=None, sample_type=None, streaming=False)\nA dictionary of split names to Dataset instances.\nSimilar to HuggingFace’s DatasetDict, this provides a container for multiple dataset splits (train, test, validation, etc.) with convenience methods that operate across all splits.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nST\n\nThe sample type for all datasets in this dict.\nrequired\n\n\n\n\n\n\n::\n>>> ds_dict = load_dataset(\"path/to/data\", MyData)\n>>> train = ds_dict[\"train\"]\n>>> test = ds_dict[\"test\"]\n>>>\n>>> # Iterate over all splits\n>>> for split_name, dataset in ds_dict.items():\n...     print(f\"{split_name}: {len(dataset.shard_list)} shards\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nnum_shards\nNumber of shards in each split.\n\n\nsample_type\nThe sample type for datasets in this dict.\n\n\nstreaming\nWhether this DatasetDict was loaded in streaming mode."
                                                                                        +    "text": "DatasetDict(splits=None, sample_type=None, streaming=False)\nA dictionary of split names to Dataset instances.\nSimilar to HuggingFace’s DatasetDict, this provides a container for multiple dataset splits (train, test, validation, etc.) with convenience methods that operate across all splits.\n\n\n\n\n\n\n\n\n\n\n\nName\nType\nDescription\nDefault\n\n\n\n\nST\n\nThe sample type for all datasets in this dict.\nrequired\n\n\n\n\n\n\n>>> ds_dict = load_dataset(\"path/to/data\", MyData)\n>>> train = ds_dict[\"train\"]\n>>> test = ds_dict[\"test\"]\n>>>\n>>> # Iterate over all splits\n>>> for split_name, dataset in ds_dict.items():\n...     print(f\"{split_name}: {len(dataset.shard_list)} shards\")\n\n\n\n\n\n\nName\nDescription\n\n\n\n\nnum_shards\nNumber of shards in each split.\n\n\nsample_type\nThe sample type for datasets in this dict.\n\n\nstreaming\nWhether this DatasetDict was loaded in streaming mode."
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DatasetDict.html#parameters",
                                                                                        @@ -2053,11 +2053,11 @@
                                                                                             "text": "Name\nType\nDescription\nDefault\n\n\n\n\nST\n\nThe sample type for all datasets in this dict.\nrequired"
                                                                                           },
                                                                                           {
                                                                                        -    "objectID": "api/DatasetDict.html#example",
                                                                                        -    "href": "api/DatasetDict.html#example",
                                                                                        +    "objectID": "api/DatasetDict.html#examples",
                                                                                        +    "href": "api/DatasetDict.html#examples",
                                                                                             "title": "DatasetDict",
                                                                                             "section": "",
                                                                                        -    "text": "::\n>>> ds_dict = load_dataset(\"path/to/data\", MyData)\n>>> train = ds_dict[\"train\"]\n>>> test = ds_dict[\"test\"]\n>>>\n>>> # Iterate over all splits\n>>> for split_name, dataset in ds_dict.items():\n...     print(f\"{split_name}: {len(dataset.shard_list)} shards\")"
                                                                                        +    "text": ">>> ds_dict = load_dataset(\"path/to/data\", MyData)\n>>> train = ds_dict[\"train\"]\n>>> test = ds_dict[\"test\"]\n>>>\n>>> # Iterate over all splits\n>>> for split_name, dataset in ds_dict.items():\n...     print(f\"{split_name}: {len(dataset.shard_list)} shards\")"
                                                                                           },
                                                                                           {
                                                                                             "objectID": "api/DatasetDict.html#attributes",
                                                                                        diff --git a/docs/sitemap.xml b/docs/sitemap.xml
                                                                                        index 293deec..88c1bf1 100644
                                                                                        --- a/docs/sitemap.xml
                                                                                        +++ b/docs/sitemap.xml
                                                                                        @@ -2,183 +2,183 @@
                                                                                         
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/protocols.html
                                                                                        -    2026-01-22T19:31:03.723Z
                                                                                        +    2026-01-28T18:46:20.894Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/datasets.html
                                                                                        -    2026-01-22T19:31:03.722Z
                                                                                        +    2026-01-28T18:46:20.893Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/architecture.html
                                                                                        -    2026-01-27T06:13:33.690Z
                                                                                        +    2026-01-28T19:56:53.889Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/atmosphere.html
                                                                                        -    2026-01-27T05:32:25.227Z
                                                                                        +    2026-01-28T19:56:53.889Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/local-storage.html
                                                                                        -    2026-01-22T19:31:03.723Z
                                                                                        +    2026-01-28T18:46:20.894Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/uri-spec.html
                                                                                        -    2026-01-22T19:31:03.723Z
                                                                                        +    2026-01-28T18:46:20.895Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/tutorials/quickstart.html
                                                                                        -    2026-01-27T06:16:24.980Z
                                                                                        +    2026-01-28T19:56:53.890Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/tutorials/atmosphere.html
                                                                                        -    2026-01-27T06:18:15.908Z
                                                                                        +    2026-01-28T19:56:53.889Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/SchemaLoader.html
                                                                                        -    2026-01-23T23:20:15.746Z
                                                                                        +    2026-01-28T20:31:19.270Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/BlobSource.html
                                                                                        -    2026-01-27T05:36:00.209Z
                                                                                        +    2026-01-28T20:31:19.167Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/AtmosphereClient.html
                                                                                        -    2026-01-23T23:20:15.723Z
                                                                                        +    2026-01-28T20:31:19.237Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/load_dataset.html
                                                                                        -    2026-01-24T19:19:45.334Z
                                                                                        +    2026-01-28T20:31:19.114Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/promote_to_atmosphere.html
                                                                                        -    2026-01-24T19:19:45.514Z
                                                                                        +    2026-01-28T20:31:19.321Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/SchemaPublisher.html
                                                                                        -    2026-01-23T23:20:15.742Z
                                                                                        +    2026-01-28T20:31:19.265Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/DatasetPublisher.html
                                                                                        -    2026-01-23T23:20:15.757Z
                                                                                        +    2026-01-28T20:31:19.283Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/URLSource.html
                                                                                        -    2026-01-24T19:19:45.367Z
                                                                                        +    2026-01-28T20:31:19.150Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/index.html
                                                                                        -    2026-01-27T06:39:59.502Z
                                                                                        +    2026-01-28T20:50:17.801Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/IndexEntry.html
                                                                                        -    2026-01-23T23:03:53.795Z
                                                                                        +    2026-01-28T19:56:53.885Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/S3Source.html
                                                                                        -    2026-01-24T19:19:45.376Z
                                                                                        +    2026-01-28T20:31:19.160Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/local.LocalDatasetEntry.html
                                                                                        -    2026-01-23T23:03:53.862Z
                                                                                        +    2026-01-28T19:56:53.887Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/AbstractIndex.html
                                                                                        -    2026-01-27T05:36:00.180Z
                                                                                        +    2026-01-28T20:31:19.135Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/AtmosphereIndexEntry.html
                                                                                        -    2026-01-23T23:03:53.910Z
                                                                                        +    2026-01-28T19:56:53.884Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/LensPublisher.html
                                                                                        -    2026-01-23T23:20:15.781Z
                                                                                        +    2026-01-28T20:31:19.307Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/SampleBatch.html
                                                                                        -    2026-01-23T23:20:15.589Z
                                                                                        +    2026-01-28T20:31:19.088Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/index.html
                                                                                        -    2026-01-27T06:14:32.068Z
                                                                                        +    2026-01-28T19:56:53.888Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/packable.html
                                                                                        -    2026-01-23T23:21:24.522Z
                                                                                        +    2026-01-28T20:31:19.057Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/Packable-protocol.html
                                                                                        -    2026-01-23T23:20:15.617Z
                                                                                        +    2026-01-28T20:31:19.119Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/AtUri.html
                                                                                        -    2026-01-23T23:20:15.791Z
                                                                                        +    2026-01-28T20:31:19.317Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/local.S3DataStore.html
                                                                                        -    2026-01-23T23:03:53.869Z
                                                                                        +    2026-01-28T19:56:53.887Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/AbstractDataStore.html
                                                                                        -    2026-01-23T23:20:15.638Z
                                                                                        +    2026-01-28T20:31:19.141Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/Dataset.html
                                                                                        -    2026-01-23T23:20:15.588Z
                                                                                        +    2026-01-28T20:31:19.086Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/local.Index.html
                                                                                        -    2026-01-27T05:36:00.238Z
                                                                                        +    2026-01-28T20:31:19.196Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/Lens.html
                                                                                        -    2026-01-27T06:39:59.563Z
                                                                                        +    2026-01-28T20:50:17.859Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/DatasetLoader.html
                                                                                        -    2026-01-23T23:20:15.773Z
                                                                                        +    2026-01-28T20:31:19.298Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/DataSource.html
                                                                                        -    2026-01-23T23:20:15.642Z
                                                                                        +    2026-01-28T20:31:19.146Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/AtmosphereIndex.html
                                                                                        -    2026-01-27T05:36:00.293Z
                                                                                        +    2026-01-28T20:31:19.251Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/LensLoader.html
                                                                                        -    2026-01-23T23:20:15.788Z
                                                                                        +    2026-01-28T20:31:19.314Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/DictSample.html
                                                                                        -    2026-01-23T23:20:15.573Z
                                                                                        +    2026-01-28T20:31:19.071Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/PDSBlobStore.html
                                                                                        -    2026-01-27T05:36:00.303Z
                                                                                        +    2026-01-28T20:31:19.261Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/PackableSample.html
                                                                                        -    2026-01-23T23:20:15.564Z
                                                                                        +    2026-01-28T20:31:19.062Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/api/DatasetDict.html
                                                                                        -    2026-01-24T19:19:45.336Z
                                                                                        +    2026-01-28T20:31:19.116Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/tutorials/promotion.html
                                                                                        -    2026-01-27T06:18:38.425Z
                                                                                        +    2026-01-28T19:56:53.890Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/tutorials/local-workflow.html
                                                                                        -    2026-01-27T06:17:20.489Z
                                                                                        +    2026-01-28T19:56:53.890Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/promotion.html
                                                                                        -    2026-01-22T19:31:03.723Z
                                                                                        +    2026-01-28T18:46:20.894Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/load-dataset.html
                                                                                        -    2026-01-22T19:31:03.722Z
                                                                                        +    2026-01-28T18:46:20.894Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/lenses.html
                                                                                        @@ -190,10 +190,10 @@
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/deployment.html
                                                                                        -    2026-01-22T20:19:56.455Z
                                                                                        +    2026-01-28T19:56:53.889Z
                                                                                           
                                                                                           
                                                                                             https://github.com/your-org/atdata/reference/troubleshooting.html
                                                                                        -    2026-01-22T20:18:56.494Z
                                                                                        +    2026-01-28T19:56:53.889Z
                                                                                           
                                                                                         
                                                                                        diff --git a/docs/tutorials/atmosphere.html b/docs/tutorials/atmosphere.html
                                                                                        index 520b2cc..883024e 100644
                                                                                        --- a/docs/tutorials/atmosphere.html
                                                                                        +++ b/docs/tutorials/atmosphere.html
                                                                                        @@ -658,7 +658,7 @@ 

                                                                                        Prerequisites

                                                                                        Setup

                                                                                        -
                                                                                        +
                                                                                        import numpy as np
                                                                                         from numpy.typing import NDArray
                                                                                         import atdata
                                                                                        @@ -678,7 +678,7 @@ 

                                                                                        Setup

                                                                                        Define Sample Types

                                                                                        -
                                                                                        +
                                                                                        @atdata.packable
                                                                                         class ImageSample:
                                                                                             """A sample containing image data with metadata."""
                                                                                        @@ -697,7 +697,7 @@ 

                                                                                        Define Sample Types

                                                                                        Type Introspection

                                                                                        See what information is available from a PackableSample type:

                                                                                        -
                                                                                        +
                                                                                        from dataclasses import fields, is_dataclass
                                                                                         
                                                                                         print(f"Sample type: {ImageSample.__name__}")
                                                                                        @@ -732,7 +732,7 @@ 

                                                                                        AT URI Parsing

                                                                                      Understanding AT URIs is essential for working with atmosphere datasets, as they’re how you reference schemas, datasets, and lenses.

                                                                                      ATProto records are identified by AT URIs:

                                                                                      -
                                                                                      +
                                                                                      uris = [
                                                                                           "at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz789",
                                                                                           "at://alice.bsky.social/ac.foundation.dataset.record/my-dataset",
                                                                                      @@ -750,7 +750,7 @@ 

                                                                                      AT URI Parsing

                                                                                      Authentication

                                                                                      The AtmosphereClient handles ATProto authentication. When you authenticate, you’re proving ownership of your decentralized identity (DID), which gives you permission to create and modify records in your Personal Data Server (PDS).

                                                                                      Connect to ATProto:

                                                                                      -
                                                                                      +
                                                                                      client = AtmosphereClient()
                                                                                       client.login("your.handle.social", "your-app-password")
                                                                                       
                                                                                      @@ -761,7 +761,7 @@ 

                                                                                      Authentication

                                                                                      Publish a Schema

                                                                                      When you publish a schema to ATProto, it becomes a public, immutable record that others can reference. The schema CID ensures that anyone can verify they’re using exactly the same type definition you published.

                                                                                      -
                                                                                      +
                                                                                      schema_publisher = SchemaPublisher(client)
                                                                                       schema_uri = schema_publisher.publish(
                                                                                           ImageSample,
                                                                                      @@ -774,7 +774,7 @@ 

                                                                                      Publish a Schema

                                                                                      List Your Schemas

                                                                                      -
                                                                                      +
                                                                                      schema_loader = SchemaLoader(client)
                                                                                       schemas = schema_loader.list_all(limit=10)
                                                                                       print(f"Found {len(schemas)} schema(s)")
                                                                                      @@ -787,7 +787,7 @@ 

                                                                                      List Your Schemas

                                                                                      Publish a Dataset

                                                                                      With External URLs

                                                                                      -
                                                                                      +
                                                                                      dataset_publisher = DatasetPublisher(client)
                                                                                       dataset_uri = dataset_publisher.publish_with_urls(
                                                                                           urls=["s3://example-bucket/demo-data-{000000..000009}.tar"],
                                                                                      @@ -809,7 +809,7 @@ 

                                                                                      With PDS
                                                                                    • Federated replication: Relays can mirror your blobs for availability

                                                                                    For fully decentralized storage, use PDSBlobStore to store dataset shards directly as ATProto blobs in your PDS:

                                                                                    -
                                                                                    +
                                                                                    # Create store and index with blob storage
                                                                                     store = PDSBlobStore(client)
                                                                                     index = AtmosphereIndex(client, data_store=store)
                                                                                    @@ -853,7 +853,7 @@ 

                                                                                    With PDS

                                                                                    Use BlobSource to stream directly from PDS blobs:

                                                                                    -
                                                                                    +
                                                                                    # Create source from the blob URLs
                                                                                     source = store.create_source(entry.data_urls)
                                                                                     
                                                                                    @@ -874,7 +874,7 @@ 

                                                                                    With PDS

                                                                                    With External URLs

                                                                                    For larger datasets that exceed PDS blob limits, or when you already have data in object storage, you can publish a dataset record that references external URLs. The ATProto record serves as the index entry while the actual data lives elsewhere.

                                                                                    For larger datasets or when using existing object storage:

                                                                                    -
                                                                                    +
                                                                                    dataset_publisher = DatasetPublisher(client)
                                                                                     dataset_uri = dataset_publisher.publish_with_urls(
                                                                                         urls=["s3://example-bucket/demo-data-{000000..000009}.tar"],
                                                                                    @@ -890,7 +890,7 @@ 

                                                                                    With External URLs

                                                                                    List and Load Datasets

                                                                                    -
                                                                                    +
                                                                                    dataset_loader = DatasetLoader(client)
                                                                                     datasets = dataset_loader.list_all(limit=10)
                                                                                     print(f"Found {len(datasets)} dataset(s)")
                                                                                    @@ -905,7 +905,7 @@ 

                                                                                    List and Load Datas

                                                                                    Load a Dataset

                                                                                    -
                                                                                    +
                                                                                    # Check storage type
                                                                                     storage_type = dataset_loader.get_storage_type(str(blob_dataset_uri))
                                                                                     print(f"Storage type: {storage_type}")
                                                                                    @@ -933,7 +933,7 @@ 

                                                                                    Complete Publ

                                                                                    Notice how similar this is to the local workflow—the same sample types and patterns, just with a different storage backend.

                                                                                    This example shows the recommended workflow using PDSBlobStore for fully decentralized storage:

                                                                                    -
                                                                                    +
                                                                                    # 1. Define and create samples
                                                                                     @atdata.packable
                                                                                     class FeatureSample:
                                                                                    diff --git a/docs/tutorials/local-workflow.html b/docs/tutorials/local-workflow.html
                                                                                    index 8705637..554faf1 100644
                                                                                    --- a/docs/tutorials/local-workflow.html
                                                                                    +++ b/docs/tutorials/local-workflow.html
                                                                                    @@ -644,7 +644,7 @@ 

                                                                                    Prerequisites

                                                                                    Setup

                                                                                    -
                                                                                    +
                                                                                    import numpy as np
                                                                                     from numpy.typing import NDArray
                                                                                     import atdata
                                                                                    @@ -654,7 +654,7 @@ 

                                                                                    Setup

                                                                                    Define Sample Types

                                                                                    -
                                                                                    +
                                                                                    @atdata.packable
                                                                                     class TrainingSample:
                                                                                         """A sample containing features and label for training."""
                                                                                    @@ -678,7 +678,7 @@ 

                                                                                    LocalDatasetEntry

                                                                                  CIDs are computed from the entry’s schema reference and data URLs, so the same logical dataset will have the same CID regardless of where it’s stored.

                                                                                  Create entries with content-addressable CIDs:

                                                                                  -
                                                                                  +
                                                                                  # Create an entry manually
                                                                                   entry = LocalDatasetEntry(
                                                                                       _name="my-dataset",
                                                                                  @@ -711,7 +711,7 @@ 

                                                                                  LocalDatasetEntry

                                                                                  LocalIndex

                                                                                  The LocalIndex is your team’s dataset registry. It implements the AbstractIndex protocol, meaning code written against LocalIndex will also work with AtmosphereIndex when you’re ready for federated sharing.

                                                                                  The index tracks datasets in Redis:

                                                                                  -
                                                                                  +
                                                                                  from redis import Redis
                                                                                   
                                                                                   # Connect to Redis
                                                                                  @@ -724,7 +724,7 @@ 

                                                                                  LocalIndex

                                                                                  Schema Management

                                                                                  Schema publishing is how you ensure type consistency across your team. When you publish a schema, atdata stores the complete type definition (field names, types, metadata) so anyone can reconstruct the Python class from just the schema reference.

                                                                                  This enables a powerful workflow: share a dataset by sharing its name, and consumers can dynamically reconstruct the sample type without having the original Python code.

                                                                                  -
                                                                                  +
                                                                                  # Publish a schema
                                                                                   schema_ref = index.publish_schema(TrainingSample, version="1.0.0")
                                                                                   print(f"Published schema: {schema_ref}")
                                                                                  @@ -753,7 +753,7 @@ 

                                                                                  S3DataStore

                                                                                The data store handles uploading tar shards and creating signed URLs for streaming access.

                                                                                For direct S3 operations:

                                                                                -
                                                                                +
                                                                                creds = {
                                                                                     "AWS_ENDPOINT": "http://localhost:9000",
                                                                                     "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                                @@ -779,7 +779,7 @@ 

                                                                                Complete Index Wor

                                                                                The index composition pattern (LocalIndex(data_store=S3DataStore(...))) is deliberate—it separates the concern of “where is metadata?” from “where is data?”, making it easy to swap storage backends.

                                                                                Use LocalIndex with S3DataStore to store datasets with S3 storage and Redis indexing:

                                                                                -
                                                                                +
                                                                                # 1. Create sample data
                                                                                 samples = [
                                                                                     TrainingSample(
                                                                                @@ -829,7 +829,7 @@ 

                                                                                Complete Index Wor

                                                                                Using load_dataset with Index

                                                                                The load_dataset() function provides a HuggingFace-style API that abstracts away the details of where data lives. When you pass an index, it can resolve @local/ prefixed paths to the actual data URLs and apply the correct credentials automatically.

                                                                                The load_dataset() function supports index lookup:

                                                                                -
                                                                                +
                                                                                from atdata import load_dataset
                                                                                 
                                                                                 # Load from local index
                                                                                diff --git a/docs/tutorials/promotion.html b/docs/tutorials/promotion.html
                                                                                index bc216bf..47abcc5 100644
                                                                                --- a/docs/tutorials/promotion.html
                                                                                +++ b/docs/tutorials/promotion.html
                                                                                @@ -621,7 +621,7 @@ 

                                                                                Overview

                                                                              Setup

                                                                              -
                                                                              +
                                                                              import numpy as np
                                                                               from numpy.typing import NDArray
                                                                               import atdata
                                                                              @@ -634,7 +634,7 @@ 

                                                                              Setup

                                                                              Prepare a Local Dataset

                                                                              First, set up a dataset in local storage:

                                                                              -
                                                                              +
                                                                              # 1. Define sample type
                                                                               @atdata.packable
                                                                               class ExperimentSample:
                                                                              @@ -684,7 +684,7 @@ 

                                                                              Prepare a Local Da

                                                                              Basic Promotion

                                                                              Promote the dataset to ATProto:

                                                                              -
                                                                              +
                                                                              # Connect to atmosphere
                                                                               client = AtmosphereClient()
                                                                               client.login("myhandle.bsky.social", "app-password")
                                                                              @@ -697,7 +697,7 @@ 

                                                                              Basic Promotion

                                                                              Promotion with Metadata

                                                                              Add description, tags, and license:

                                                                              -
                                                                              +
                                                                              at_uri = promote_to_atmosphere(
                                                                                   local_entry,
                                                                                   local_index,
                                                                              @@ -713,7 +713,7 @@ 

                                                                              Promotion with Met

                                                                              Schema Deduplication

                                                                              The promotion workflow automatically checks for existing schemas:

                                                                              -
                                                                              +
                                                                              from atdata.promote import _find_existing_schema
                                                                               
                                                                               # Check if schema already exists
                                                                              @@ -725,7 +725,7 @@ 

                                                                              Schema Deduplication< print("No existing schema found, will publish new one")

                                                                              When you promote multiple datasets with the same sample type:

                                                                              -
                                                                              +
                                                                              # First promotion: publishes schema
                                                                               uri1 = promote_to_atmosphere(entry1, local_index, client)
                                                                               
                                                                              @@ -740,7 +740,7 @@ 

                                                                              Data Migration Opti

                                                                              By default, promotion keeps the original data URLs:

                                                                              -
                                                                              +
                                                                              # Data stays in original S3 location
                                                                               at_uri = promote_to_atmosphere(local_entry, local_index, client)
                                                                              @@ -753,7 +753,7 @@

                                                                              Data Migration Opti

                                                                              To copy data to a different storage location:

                                                                              -
                                                                              +
                                                                              from atdata.local import S3DataStore
                                                                               
                                                                               # Create new data store
                                                                              @@ -783,7 +783,7 @@ 

                                                                              Data Migration Opti

                                                                              Verify on Atmosphere

                                                                              After promotion, verify the dataset is accessible:

                                                                              -
                                                                              +
                                                                              from atdata.atmosphere import AtmosphereIndex
                                                                               
                                                                               atm_index = AtmosphereIndex(client)
                                                                              @@ -804,7 +804,7 @@ 

                                                                              Verify on Atmosphere<

                                                                              Error Handling

                                                                              -
                                                                              +
                                                                              try:
                                                                                   at_uri = promote_to_atmosphere(local_entry, local_index, client)
                                                                               except KeyError as e:
                                                                              @@ -828,7 +828,7 @@ 

                                                                              Requirements Checkl

                                                                              Complete Workflow

                                                                              -
                                                                              +
                                                                              # Complete local-to-atmosphere workflow
                                                                               import numpy as np
                                                                               from numpy.typing import NDArray
                                                                              diff --git a/docs/tutorials/quickstart.html b/docs/tutorials/quickstart.html
                                                                              index c900714..5cffdb5 100644
                                                                              --- a/docs/tutorials/quickstart.html
                                                                              +++ b/docs/tutorials/quickstart.html
                                                                              @@ -606,7 +606,7 @@ 

                                                                              Define a Sample Type<
                                                                            • Round-trip fidelity: Data survives serialization without loss

                                                                            Use the @packable decorator to create a typed sample:

                                                                            -
                                                                            +
                                                                            import numpy as np
                                                                             from numpy.typing import NDArray
                                                                             import atdata
                                                                            @@ -627,7 +627,7 @@ 

                                                                            Define a Sample Type<

                                                                        Create Sample Instances

                                                                        -
                                                                        +
                                                                        # Create a single sample
                                                                         sample = ImageSample(
                                                                             image=np.random.rand(224, 224, 3).astype(np.float32),
                                                                        @@ -655,7 +655,7 @@ 

                                                                        Write a Dataset

                                                                      The as_wds property on your sample provides the dictionary format WebDataset expects:

                                                                      Use WebDataset’s TarWriter to create dataset files:

                                                                      -
                                                                      +
                                                                      import webdataset as wds
                                                                       
                                                                       # Create 100 samples
                                                                      @@ -686,7 +686,7 @@ 

                                                                      Load and Iterate

                                                                    This eliminates boilerplate collation code and works automatically with any PackableSample type.

                                                                    Create a typed Dataset and iterate with batching:

                                                                    -
                                                                    +
                                                                    # Load dataset with type
                                                                     dataset = atdata.Dataset[ImageSample]("my-dataset-000000.tar")
                                                                     
                                                                    @@ -713,7 +713,7 @@ 

                                                                    Shuffled Iteration

                                                                    This approach balances randomness with streaming efficiency—you get well-shuffled data without needing random access to the entire dataset.

                                                                    For training, use shuffled iteration:

                                                                    -
                                                                    +
                                                                    for batch in dataset.shuffled(batch_size=32):
                                                                         # Samples are shuffled at shard and sample level
                                                                         images = batch.image
                                                                    @@ -734,7 +734,7 @@ 

                                                                    Use Le
                                                                  • Derived features: Compute fields on-the-fly during iteration

                                                                  View datasets through different schemas:

                                                                  -
                                                                  +
                                                                  # Define a simplified view type
                                                                   @atdata.packable
                                                                   class SimplifiedSample:
                                                                  diff --git a/docs_src/api/AbstractDataStore.qmd b/docs_src/api/AbstractDataStore.qmd
                                                                  index 06bc319..197c57d 100644
                                                                  --- a/docs_src/api/AbstractDataStore.qmd
                                                                  +++ b/docs_src/api/AbstractDataStore.qmd
                                                                  @@ -14,14 +14,14 @@ The separation of index (metadata) from data store (actual files) allows
                                                                   flexible deployment: local index with S3 storage, atmosphere index with
                                                                   S3 storage, or atmosphere index with PDS blobs.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> store = S3DataStore(credentials, bucket="my-bucket")
                                                                  -    >>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  -    >>> print(urls)
                                                                  -    ['s3://my-bucket/training/v1/shard-000000.tar', ...]
                                                                  +```python
                                                                  +>>> store = S3DataStore(credentials, bucket="my-bucket")
                                                                  +>>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  +>>> print(urls)
                                                                  +['s3://my-bucket/training/v1/shard-000000.tar', ...]
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/AbstractIndex.qmd b/docs_src/api/AbstractIndex.qmd
                                                                  index 860787f..c991bd8 100644
                                                                  --- a/docs_src/api/AbstractIndex.qmd
                                                                  +++ b/docs_src/api/AbstractIndex.qmd
                                                                  @@ -20,22 +20,22 @@ Some index implementations support additional features:
                                                                   - ``data_store``: An AbstractDataStore for reading/writing dataset shards.
                                                                     If present, ``load_dataset`` will use it for S3 credential resolution.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> def publish_and_list(index: AbstractIndex) -> None:
                                                                  -    ...     # Publish schemas for different types
                                                                  -    ...     schema1 = index.publish_schema(ImageSample, version="1.0.0")
                                                                  -    ...     schema2 = index.publish_schema(TextSample, version="1.0.0")
                                                                  -    ...
                                                                  -    ...     # Insert datasets of different types
                                                                  -    ...     index.insert_dataset(image_ds, name="images")
                                                                  -    ...     index.insert_dataset(text_ds, name="texts")
                                                                  -    ...
                                                                  -    ...     # List all datasets (mixed types)
                                                                  -    ...     for entry in index.list_datasets():
                                                                  -    ...         print(f"{entry.name} -> {entry.schema_ref}")
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> def publish_and_list(index: AbstractIndex) -> None:
                                                                  +...     # Publish schemas for different types
                                                                  +...     schema1 = index.publish_schema(ImageSample, version="1.0.0")
                                                                  +...     schema2 = index.publish_schema(TextSample, version="1.0.0")
                                                                  +...
                                                                  +...     # Insert datasets of different types
                                                                  +...     index.insert_dataset(image_ds, name="images")
                                                                  +...     index.insert_dataset(text_ds, name="texts")
                                                                  +...
                                                                  +...     # List all datasets (mixed types)
                                                                  +...     for entry in index.list_datasets():
                                                                  +...         print(f"{entry.name} -> {entry.schema_ref}")
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  @@ -90,15 +90,15 @@ generates a Packable class matching the schema definition.
                                                                   |        | [KeyError](`KeyError`)     | If schema not found.                                   |
                                                                   |        | [ValueError](`ValueError`) | If schema cannot be decoded (unsupported field types). |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> entry = index.get_dataset("my-dataset")
                                                                  -    >>> SampleType = index.decode_schema(entry.schema_ref)
                                                                  -    >>> ds = Dataset[SampleType](entry.data_urls[0])
                                                                  -    >>> for sample in ds.ordered():
                                                                  -    ...     print(sample)  # sample is instance of SampleType
                                                                  +```python
                                                                  +>>> entry = index.get_dataset("my-dataset")
                                                                  +>>> SampleType = index.decode_schema(entry.schema_ref)
                                                                  +>>> ds = Dataset[SampleType](entry.data_urls[0])
                                                                  +>>> for sample in ds.ordered():
                                                                  +...     print(sample)  # sample is instance of SampleType
                                                                  +```
                                                                   
                                                                   ### get_dataset { #atdata.AbstractIndex.get_dataset }
                                                                   
                                                                  diff --git a/docs_src/api/AtUri.qmd b/docs_src/api/AtUri.qmd
                                                                  index e6b9330..84e7177 100644
                                                                  --- a/docs_src/api/AtUri.qmd
                                                                  +++ b/docs_src/api/AtUri.qmd
                                                                  @@ -8,17 +8,17 @@ Parsed AT Protocol URI.
                                                                   
                                                                   AT URIs follow the format: at:////
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> uri = AtUri.parse("at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz")
                                                                  -    >>> uri.authority
                                                                  -    'did:plc:abc123'
                                                                  -    >>> uri.collection
                                                                  -    'ac.foundation.dataset.sampleSchema'
                                                                  -    >>> uri.rkey
                                                                  -    'xyz'
                                                                  +```python
                                                                  +>>> uri = AtUri.parse("at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz")
                                                                  +>>> uri.authority
                                                                  +'did:plc:abc123'
                                                                  +>>> uri.collection
                                                                  +'ac.foundation.dataset.sampleSchema'
                                                                  +>>> uri.rkey
                                                                  +'xyz'
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  diff --git a/docs_src/api/AtmosphereClient.qmd b/docs_src/api/AtmosphereClient.qmd
                                                                  index dcbd7af..0fa96da 100644
                                                                  --- a/docs_src/api/AtmosphereClient.qmd
                                                                  +++ b/docs_src/api/AtmosphereClient.qmd
                                                                  @@ -9,14 +9,14 @@ ATProto client wrapper for atdata operations.
                                                                   This class wraps the atproto SDK client and provides higher-level methods
                                                                   for working with atdata records (schemas, datasets, lenses).
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> client.login("alice.bsky.social", "app-password")
                                                                  -    >>> print(client.did)
                                                                  -    'did:plc:...'
                                                                  +```python
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> client.login("alice.bsky.social", "app-password")
                                                                  +>>> print(client.did)
                                                                  +'did:plc:...'
                                                                  +```
                                                                   
                                                                   ## Note {.doc-section .doc-section-note}
                                                                   
                                                                  diff --git a/docs_src/api/AtmosphereIndex.qmd b/docs_src/api/AtmosphereIndex.qmd
                                                                  index cbe33d6..45d8bdc 100644
                                                                  --- a/docs_src/api/AtmosphereIndex.qmd
                                                                  +++ b/docs_src/api/AtmosphereIndex.qmd
                                                                  @@ -12,20 +12,20 @@ a unified interface compatible with LocalIndex.
                                                                   Optionally accepts a ``PDSBlobStore`` for writing dataset shards as
                                                                   ATProto blobs, enabling fully decentralized dataset storage.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> client.login("handle.bsky.social", "app-password")
                                                                  -    >>>
                                                                  -    >>> # Without blob storage (external URLs only)
                                                                  -    >>> index = AtmosphereIndex(client)
                                                                  -    >>>
                                                                  -    >>> # With PDS blob storage
                                                                  -    >>> store = PDSBlobStore(client)
                                                                  -    >>> index = AtmosphereIndex(client, data_store=store)
                                                                  -    >>> entry = index.insert_dataset(dataset, name="my-data")
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> client.login("handle.bsky.social", "app-password")
                                                                  +>>>
                                                                  +>>> # Without blob storage (external URLs only)
                                                                  +>>> index = AtmosphereIndex(client)
                                                                  +>>>
                                                                  +>>> # With PDS blob storage
                                                                  +>>> store = PDSBlobStore(client)
                                                                  +>>> index = AtmosphereIndex(client, data_store=store)
                                                                  +>>> entry = index.insert_dataset(dataset, name="my-data")
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  diff --git a/docs_src/api/BlobSource.qmd b/docs_src/api/BlobSource.qmd
                                                                  index f006047..565d401 100644
                                                                  --- a/docs_src/api/BlobSource.qmd
                                                                  +++ b/docs_src/api/BlobSource.qmd
                                                                  @@ -20,18 +20,18 @@ everything upfront.
                                                                   | blob_refs    | [list](`list`)\[[dict](`dict`)\[[str](`str`), [str](`str`)\]\] | List of blob reference dicts with 'did' and 'cid' keys.        |
                                                                   | pds_endpoint | [str](`str`) \| None                                           | Optional PDS endpoint URL. If not provided, resolved from DID. |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> source = BlobSource(
                                                                  -    ...     blob_refs=[
                                                                  -    ...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  -    ...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  -    ...     ],
                                                                  -    ... )
                                                                  -    >>> for shard_id, stream in source.shards:
                                                                  -    ...     process(stream)
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> source = BlobSource(
                                                                  +...     blob_refs=[
                                                                  +...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  +...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  +...     ],
                                                                  +... )
                                                                  +>>> for shard_id, stream in source.shards:
                                                                  +...     process(stream)
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/DataSource.qmd b/docs_src/api/DataSource.qmd
                                                                  index 103a539..1c1c066 100644
                                                                  --- a/docs_src/api/DataSource.qmd
                                                                  +++ b/docs_src/api/DataSource.qmd
                                                                  @@ -19,19 +19,19 @@ resolution entirely. This enables:
                                                                   - ATProto blob streaming
                                                                   - Any other source that can provide file-like objects
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> source = S3Source(
                                                                  -    ...     bucket="my-bucket",
                                                                  -    ...     keys=["data-000.tar", "data-001.tar"],
                                                                  -    ...     endpoint="https://r2.example.com",
                                                                  -    ...     credentials=creds,
                                                                  -    ... )
                                                                  -    >>> ds = Dataset[MySample](source)
                                                                  -    >>> for sample in ds.ordered():
                                                                  -    ...     print(sample)
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> source = S3Source(
                                                                  +...     bucket="my-bucket",
                                                                  +...     keys=["data-000.tar", "data-001.tar"],
                                                                  +...     endpoint="https://r2.example.com",
                                                                  +...     credentials=creds,
                                                                  +... )
                                                                  +>>> ds = Dataset[MySample](source)
                                                                  +>>> for sample in ds.ordered():
                                                                  +...     print(sample)
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  diff --git a/docs_src/api/Dataset.qmd b/docs_src/api/Dataset.qmd
                                                                  index ee80c09..34974de 100644
                                                                  --- a/docs_src/api/Dataset.qmd
                                                                  +++ b/docs_src/api/Dataset.qmd
                                                                  @@ -28,17 +28,17 @@ The dataset supports:
                                                                   |--------|--------|----------------------------------------------------|
                                                                   | url    |        | WebDataset brace-notation URL for the tar file(s). |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> ds = Dataset[MyData]("path/to/data-{000000..000009}.tar")
                                                                  -    >>> for sample in ds.ordered(batch_size=32):
                                                                  -    ...     # sample is SampleBatch[MyData] with batch_size samples
                                                                  -    ...     embeddings = sample.embeddings  # shape: (32, ...)
                                                                  -    ...
                                                                  -    >>> # Transform to a different view
                                                                  -    >>> ds_view = ds.as_type(MyDataView)
                                                                  +```python
                                                                  +>>> ds = Dataset[MyData]("path/to/data-{000000..000009}.tar")
                                                                  +>>> for sample in ds.ordered(batch_size=32):
                                                                  +...     # sample is SampleBatch[MyData] with batch_size samples
                                                                  +...     embeddings = sample.embeddings  # shape: (32, ...)
                                                                  +...
                                                                  +>>> # Transform to a different view
                                                                  +>>> ds_view = ds.as_type(MyDataView)
                                                                  +```
                                                                   
                                                                   ## Note {.doc-section .doc-section-note}
                                                                   
                                                                  @@ -182,16 +182,16 @@ For datasets larger than available RAM, always specify ``maxcount``::
                                                                   This creates multiple parquet files: ``output-000000.parquet``,
                                                                   ``output-000001.parquet``, etc.
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> ds = Dataset[MySample]("data.tar")
                                                                  -    >>> # Small dataset - load all at once
                                                                  -    >>> ds.to_parquet("output.parquet")
                                                                  -    >>>
                                                                  -    >>> # Large dataset - process in chunks
                                                                  -    >>> ds.to_parquet("output.parquet", maxcount=50000)
                                                                  +```python
                                                                  +>>> ds = Dataset[MySample]("data.tar")
                                                                  +>>> # Small dataset - load all at once
                                                                  +>>> ds.to_parquet("output.parquet")
                                                                  +>>>
                                                                  +>>> # Large dataset - process in chunks
                                                                  +>>> ds.to_parquet("output.parquet", maxcount=50000)
                                                                  +```
                                                                   
                                                                   ### wrap { #atdata.Dataset.wrap }
                                                                   
                                                                  diff --git a/docs_src/api/DatasetDict.qmd b/docs_src/api/DatasetDict.qmd
                                                                  index 00eb12b..77ac7ce 100644
                                                                  --- a/docs_src/api/DatasetDict.qmd
                                                                  +++ b/docs_src/api/DatasetDict.qmd
                                                                  @@ -16,17 +16,17 @@ methods that operate across all splits.
                                                                   |--------|--------|------------------------------------------------|------------|
                                                                   | ST     |        | The sample type for all datasets in this dict. | _required_ |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> ds_dict = load_dataset("path/to/data", MyData)
                                                                  -    >>> train = ds_dict["train"]
                                                                  -    >>> test = ds_dict["test"]
                                                                  -    >>>
                                                                  -    >>> # Iterate over all splits
                                                                  -    >>> for split_name, dataset in ds_dict.items():
                                                                  -    ...     print(f"{split_name}: {len(dataset.shard_list)} shards")
                                                                  +```python
                                                                  +>>> ds_dict = load_dataset("path/to/data", MyData)
                                                                  +>>> train = ds_dict["train"]
                                                                  +>>> test = ds_dict["test"]
                                                                  +>>>
                                                                  +>>> # Iterate over all splits
                                                                  +>>> for split_name, dataset in ds_dict.items():
                                                                  +...     print(f"{split_name}: {len(dataset.shard_list)} shards")
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  diff --git a/docs_src/api/DatasetLoader.qmd b/docs_src/api/DatasetLoader.qmd
                                                                  index 28c34f0..e830c4a 100644
                                                                  --- a/docs_src/api/DatasetLoader.qmd
                                                                  +++ b/docs_src/api/DatasetLoader.qmd
                                                                  @@ -10,20 +10,20 @@ This class fetches dataset index records and can create Dataset objects
                                                                   from them. Note that loading a dataset requires having the corresponding
                                                                   Python class for the sample type.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> loader = DatasetLoader(client)
                                                                  -    >>>
                                                                  -    >>> # List available datasets
                                                                  -    >>> datasets = loader.list()
                                                                  -    >>> for ds in datasets:
                                                                  -    ...     print(ds["name"], ds["schemaRef"])
                                                                  -    >>>
                                                                  -    >>> # Get a specific dataset record
                                                                  -    >>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.record/xyz")
                                                                  +```python
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> loader = DatasetLoader(client)
                                                                  +>>>
                                                                  +>>> # List available datasets
                                                                  +>>> datasets = loader.list()
                                                                  +>>> for ds in datasets:
                                                                  +...     print(ds["name"], ds["schemaRef"])
                                                                  +>>>
                                                                  +>>> # Get a specific dataset record
                                                                  +>>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.record/xyz")
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  @@ -245,11 +245,11 @@ Supports both external URL storage and ATProto blob storage.
                                                                   |--------|----------------------------|-------------------------------------|
                                                                   |        | [ValueError](`ValueError`) | If no storage URLs can be resolved. |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> loader = DatasetLoader(client)
                                                                  -    >>> dataset = loader.to_dataset(uri, MySampleType)
                                                                  -    >>> for batch in dataset.shuffled(batch_size=32):
                                                                  -    ...     process(batch)
                                                                  \ No newline at end of file
                                                                  +```python
                                                                  +>>> loader = DatasetLoader(client)
                                                                  +>>> dataset = loader.to_dataset(uri, MySampleType)
                                                                  +>>> for batch in dataset.shuffled(batch_size=32):
                                                                  +...     process(batch)
                                                                  +```
                                                                  \ No newline at end of file
                                                                  diff --git a/docs_src/api/DatasetPublisher.qmd b/docs_src/api/DatasetPublisher.qmd
                                                                  index a254ee1..726304b 100644
                                                                  --- a/docs_src/api/DatasetPublisher.qmd
                                                                  +++ b/docs_src/api/DatasetPublisher.qmd
                                                                  @@ -9,22 +9,22 @@ Publishes dataset index records to ATProto.
                                                                   This class creates dataset records that reference a schema and point to
                                                                   external storage (WebDataset URLs) or ATProto blobs.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> dataset = atdata.Dataset[MySample]("s3://bucket/data-{000000..000009}.tar")
                                                                  -    >>>
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> client.login("handle", "password")
                                                                  -    >>>
                                                                  -    >>> publisher = DatasetPublisher(client)
                                                                  -    >>> uri = publisher.publish(
                                                                  -    ...     dataset,
                                                                  -    ...     name="My Training Data",
                                                                  -    ...     description="Training data for my model",
                                                                  -    ...     tags=["computer-vision", "training"],
                                                                  -    ... )
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> dataset = atdata.Dataset[MySample]("s3://bucket/data-{000000..000009}.tar")
                                                                  +>>>
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> client.login("handle", "password")
                                                                  +>>>
                                                                  +>>> publisher = DatasetPublisher(client)
                                                                  +>>> uri = publisher.publish(
                                                                  +...     dataset,
                                                                  +...     name="My Training Data",
                                                                  +...     description="Training data for my model",
                                                                  +...     tags=["computer-vision", "training"],
                                                                  +... )
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/DictSample.qmd b/docs_src/api/DictSample.qmd
                                                                  index c98dfc7..56fb8d6 100644
                                                                  --- a/docs_src/api/DictSample.qmd
                                                                  +++ b/docs_src/api/DictSample.qmd
                                                                  @@ -20,18 +20,18 @@ To convert to a typed schema, use ``Dataset.as_type()`` with a
                                                                   ``@packable``-decorated class. Every ``@packable`` class automatically
                                                                   registers a lens from ``DictSample``, making this conversion seamless.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> ds = load_dataset("path/to/data.tar")  # Returns Dataset[DictSample]
                                                                  -    >>> for sample in ds.ordered():
                                                                  -    ...     print(sample.some_field)      # Attribute access
                                                                  -    ...     print(sample["other_field"])  # Dict access
                                                                  -    ...     print(sample.keys())          # Inspect available fields
                                                                  -    ...
                                                                  -    >>> # Convert to typed schema
                                                                  -    >>> typed_ds = ds.as_type(MyTypedSample)
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> ds = load_dataset("path/to/data.tar")  # Returns Dataset[DictSample]
                                                                  +>>> for sample in ds.ordered():
                                                                  +...     print(sample.some_field)      # Attribute access
                                                                  +...     print(sample["other_field"])  # Dict access
                                                                  +...     print(sample.keys())          # Inspect available fields
                                                                  +...
                                                                  +>>> # Convert to typed schema
                                                                  +>>> typed_ds = ds.as_type(MyTypedSample)
                                                                  +```
                                                                   
                                                                   ## Note {.doc-section .doc-section-note}
                                                                   
                                                                  diff --git a/docs_src/api/Lens.qmd b/docs_src/api/Lens.qmd
                                                                  index ce1e8ce..9e532ba 100644
                                                                  --- a/docs_src/api/Lens.qmd
                                                                  +++ b/docs_src/api/Lens.qmd
                                                                  @@ -18,31 +18,31 @@ Key components:
                                                                   Lenses support the functional programming concept of composable, well-behaved
                                                                   transformations that satisfy lens laws (GetPut and PutGet).
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> @packable
                                                                  -    ... class FullData:
                                                                  -    ...     name: str
                                                                  -    ...     age: int
                                                                  -    ...     embedding: NDArray
                                                                  -    ...
                                                                  -    >>> @packable
                                                                  -    ... class NameOnly:
                                                                  -    ...     name: str
                                                                  -    ...
                                                                  -    >>> @lens
                                                                  -    ... def name_view(full: FullData) -> NameOnly:
                                                                  -    ...     return NameOnly(name=full.name)
                                                                  -    ...
                                                                  -    >>> @name_view.putter
                                                                  -    ... def name_view_put(view: NameOnly, source: FullData) -> FullData:
                                                                  -    ...     return FullData(name=view.name, age=source.age,
                                                                  -    ...                     embedding=source.embedding)
                                                                  -    ...
                                                                  -    >>> ds = Dataset[FullData]("data.tar")
                                                                  -    >>> ds_names = ds.as_type(NameOnly)  # Uses registered lens
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> @packable
                                                                  +... class FullData:
                                                                  +...     name: str
                                                                  +...     age: int
                                                                  +...     embedding: NDArray
                                                                  +...
                                                                  +>>> @packable
                                                                  +... class NameOnly:
                                                                  +...     name: str
                                                                  +...
                                                                  +>>> @lens
                                                                  +... def name_view(full: FullData) -> NameOnly:
                                                                  +...     return NameOnly(name=full.name)
                                                                  +...
                                                                  +>>> @name_view.putter
                                                                  +... def name_view_put(view: NameOnly, source: FullData) -> FullData:
                                                                  +...     return FullData(name=view.name, age=source.age,
                                                                  +...                     embedding=source.embedding)
                                                                  +...
                                                                  +>>> ds = Dataset[FullData]("data.tar")
                                                                  +>>> ds_names = ds.as_type(NameOnly)  # Uses registered lens
                                                                  +```
                                                                   
                                                                   ## Classes
                                                                   
                                                                  @@ -71,17 +71,17 @@ the view to be reflected back in the source.
                                                                   | S      |        | The source type, must derive from ``PackableSample``. | _required_ |
                                                                   | V      |        | The view type, must derive from ``PackableSample``.   | _required_ |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> @lens
                                                                  -    ... def name_lens(full: FullData) -> NameOnly:
                                                                  -    ...     return NameOnly(name=full.name)
                                                                  -    ...
                                                                  -    >>> @name_lens.putter
                                                                  -    ... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                                  -    ...     return FullData(name=view.name, age=source.age)
                                                                  +```python
                                                                  +>>> @lens
                                                                  +... def name_lens(full: FullData) -> NameOnly:
                                                                  +...     return NameOnly(name=full.name)
                                                                  +...
                                                                  +>>> @name_lens.putter
                                                                  +... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                                  +...     return FullData(name=view.name, age=source.age)
                                                                  +```
                                                                   
                                                                   #### Methods
                                                                   
                                                                  @@ -152,13 +152,13 @@ Decorator to register a putter function for this lens.
                                                                   |--------|--------------------------------------------------------------------------------------|---------------------------------------------------------------|
                                                                   |        | [LensPutter](`atdata.lens.LensPutter`)\[[S](`atdata.lens.S`), [V](`atdata.lens.V`)\] | The putter function, allowing this to be used as a decorator. |
                                                                   
                                                                  -###### Example {.doc-section .doc-section-example}
                                                                  +###### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> @my_lens.putter
                                                                  -    ... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:
                                                                  -    ...     return SourceType(...)
                                                                  +```python
                                                                  +>>> @my_lens.putter
                                                                  +... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:
                                                                  +...     return SourceType(field=view.field, other=source.other)
                                                                  +```
                                                                   
                                                                   ### LensNetwork { #atdata.lens.LensNetwork }
                                                                   
                                                                  @@ -267,14 +267,14 @@ automatically registers it in the global ``LensNetwork`` registry.
                                                                   |        | [Lens](`atdata.lens.Lens`)\[[S](`atdata.lens.S`), [V](`atdata.lens.V`)\] | A ``Lens[S, V]`` object that can be called to apply the transformation |
                                                                   |        | [Lens](`atdata.lens.Lens`)\[[S](`atdata.lens.S`), [V](`atdata.lens.V`)\] | or decorated with ``@lens_name.putter`` to add a putter function.      |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> @lens
                                                                  -    ... def extract_name(full: FullData) -> NameOnly:
                                                                  -    ...     return NameOnly(name=full.name)
                                                                  -    ...
                                                                  -    >>> @extract_name.putter
                                                                  -    ... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                                  -    ...     return FullData(name=view.name, age=source.age)
                                                                  \ No newline at end of file
                                                                  +```python
                                                                  +>>> @lens
                                                                  +... def extract_name(full: FullData) -> NameOnly:
                                                                  +...     return NameOnly(name=full.name)
                                                                  +...
                                                                  +>>> @extract_name.putter
                                                                  +... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                                  +...     return FullData(name=view.name, age=source.age)
                                                                  +```
                                                                  \ No newline at end of file
                                                                  diff --git a/docs_src/api/LensLoader.qmd b/docs_src/api/LensLoader.qmd
                                                                  index 68e2b14..a9570e7 100644
                                                                  --- a/docs_src/api/LensLoader.qmd
                                                                  +++ b/docs_src/api/LensLoader.qmd
                                                                  @@ -10,17 +10,17 @@ This class fetches lens transformation records. Note that actually
                                                                   using a lens requires installing the referenced code and importing
                                                                   it manually.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> loader = LensLoader(client)
                                                                  -    >>>
                                                                  -    >>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.lens/xyz")
                                                                  -    >>> print(record["name"])
                                                                  -    >>> print(record["sourceSchema"])
                                                                  -    >>> print(record.get("getterCode", {}).get("repository"))
                                                                  +```python
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> loader = LensLoader(client)
                                                                  +>>>
                                                                  +>>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.lens/xyz")
                                                                  +>>> print(record["name"])
                                                                  +>>> print(record["sourceSchema"])
                                                                  +>>> print(record.get("getterCode", {}).get("repository"))
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/LensPublisher.qmd b/docs_src/api/LensPublisher.qmd
                                                                  index be11302..4243c86 100644
                                                                  --- a/docs_src/api/LensPublisher.qmd
                                                                  +++ b/docs_src/api/LensPublisher.qmd
                                                                  @@ -9,27 +9,27 @@ Publishes Lens transformation records to ATProto.
                                                                   This class creates lens records that reference source and target schemas
                                                                   and point to the transformation code in a git repository.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> @atdata.lens
                                                                  -    ... def my_lens(source: SourceType) -> TargetType:
                                                                  -    ...     return TargetType(field=source.other_field)
                                                                  -    >>>
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> client.login("handle", "password")
                                                                  -    >>>
                                                                  -    >>> publisher = LensPublisher(client)
                                                                  -    >>> uri = publisher.publish(
                                                                  -    ...     name="my_lens",
                                                                  -    ...     source_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/source",
                                                                  -    ...     target_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/target",
                                                                  -    ...     code_repository="https://github.com/user/repo",
                                                                  -    ...     code_commit="abc123def456",
                                                                  -    ...     getter_path="mymodule.lenses:my_lens",
                                                                  -    ...     putter_path="mymodule.lenses:my_lens_putter",
                                                                  -    ... )
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> @atdata.lens
                                                                  +... def my_lens(source: SourceType) -> TargetType:
                                                                  +...     return TargetType(field=source.other_field)
                                                                  +>>>
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> client.login("handle", "password")
                                                                  +>>>
                                                                  +>>> publisher = LensPublisher(client)
                                                                  +>>> uri = publisher.publish(
                                                                  +...     name="my_lens",
                                                                  +...     source_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/source",
                                                                  +...     target_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/target",
                                                                  +...     code_repository="https://github.com/user/repo",
                                                                  +...     code_commit="abc123def456",
                                                                  +...     getter_path="mymodule.lenses:my_lens",
                                                                  +...     putter_path="mymodule.lenses:my_lens_putter",
                                                                  +... )
                                                                  +```
                                                                   
                                                                   ## Security Note {.doc-section .doc-section-security-note}
                                                                   
                                                                  diff --git a/docs_src/api/PDSBlobStore.qmd b/docs_src/api/PDSBlobStore.qmd
                                                                  index c322a3d..025bacd 100644
                                                                  --- a/docs_src/api/PDSBlobStore.qmd
                                                                  +++ b/docs_src/api/PDSBlobStore.qmd
                                                                  @@ -19,14 +19,14 @@ to HTTP URLs for streaming.
                                                                   |--------|----------------------|------------------------------------------|
                                                                   | client | \'AtmosphereClient\' | Authenticated AtmosphereClient instance. |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> store = PDSBlobStore(client)
                                                                  -    >>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  -    >>> # Returns AT URIs like:
                                                                  -    >>> # ['at://did:plc:abc/blob/bafyrei...', ...]
                                                                  +```python
                                                                  +>>> store = PDSBlobStore(client)
                                                                  +>>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  +>>> # Returns AT URIs like:
                                                                  +>>> # ['at://did:plc:abc/blob/bafyrei...', ...]
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/Packable-protocol.qmd b/docs_src/api/Packable-protocol.qmd
                                                                  index 8e57c3b..9b4e763 100644
                                                                  --- a/docs_src/api/Packable-protocol.qmd
                                                                  +++ b/docs_src/api/Packable-protocol.qmd
                                                                  @@ -18,19 +18,19 @@ The protocol captures the full interface needed for:
                                                                   - Schema publishing (class introspection via dataclass fields)
                                                                   - Serialization/deserialization (packed, from_bytes)
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> @packable
                                                                  -    ... class MySample:
                                                                  -    ...     name: str
                                                                  -    ...     value: int
                                                                  -    ...
                                                                  -    >>> def process(sample_type: Type[Packable]) -> None:
                                                                  -    ...     # Type checker knows sample_type has from_bytes, packed, etc.
                                                                  -    ...     instance = sample_type.from_bytes(data)
                                                                  -    ...     print(instance.packed)
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> @packable
                                                                  +... class MySample:
                                                                  +...     name: str
                                                                  +...     value: int
                                                                  +...
                                                                  +>>> def process(sample_type: Type[Packable]) -> None:
                                                                  +...     # Type checker knows sample_type has from_bytes, packed, etc.
                                                                  +...     instance = sample_type.from_bytes(data)
                                                                  +...     print(instance.packed)
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  diff --git a/docs_src/api/PackableSample.qmd b/docs_src/api/PackableSample.qmd
                                                                  index e29861a..1316db9 100644
                                                                  --- a/docs_src/api/PackableSample.qmd
                                                                  +++ b/docs_src/api/PackableSample.qmd
                                                                  @@ -15,18 +15,18 @@ Subclasses should be defined either by:
                                                                   1. Direct inheritance with the ``@dataclass`` decorator
                                                                   2. Using the ``@packable`` decorator (recommended)
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> @packable
                                                                  -    ... class MyData:
                                                                  -    ...     name: str
                                                                  -    ...     embeddings: NDArray
                                                                  -    ...
                                                                  -    >>> sample = MyData(name="test", embeddings=np.array([1.0, 2.0]))
                                                                  -    >>> packed = sample.packed  # Serialize to bytes
                                                                  -    >>> restored = MyData.from_bytes(packed)  # Deserialize
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> @packable
                                                                  +... class MyData:
                                                                  +...     name: str
                                                                  +...     embeddings: NDArray
                                                                  +...
                                                                  +>>> sample = MyData(name="test", embeddings=np.array([1.0, 2.0]))
                                                                  +>>> packed = sample.packed  # Serialize to bytes
                                                                  +>>> restored = MyData.from_bytes(packed)  # Deserialize
                                                                  +```
                                                                   
                                                                   ## Attributes
                                                                   
                                                                  diff --git a/docs_src/api/S3Source.qmd b/docs_src/api/S3Source.qmd
                                                                  index a297197..ab31042 100644
                                                                  --- a/docs_src/api/S3Source.qmd
                                                                  +++ b/docs_src/api/S3Source.qmd
                                                                  @@ -35,19 +35,19 @@ source instance.
                                                                   | secret_key | [str](`str`) \| None           | Optional AWS secret access key.                          |
                                                                   | region     | [str](`str`) \| None           | Optional AWS region (defaults to us-east-1).             |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> source = S3Source(
                                                                  -    ...     bucket="my-datasets",
                                                                  -    ...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                  -    ...     endpoint="https://abc123.r2.cloudflarestorage.com",
                                                                  -    ...     access_key="AKIAIOSFODNN7EXAMPLE",
                                                                  -    ...     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                                                                  -    ... )
                                                                  -    >>> for shard_id, stream in source.shards:
                                                                  -    ...     process(stream)
                                                                  +```python
                                                                  +>>> source = S3Source(
                                                                  +...     bucket="my-datasets",
                                                                  +...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                  +...     endpoint="https://abc123.r2.cloudflarestorage.com",
                                                                  +...     access_key="AKIAIOSFODNN7EXAMPLE",
                                                                  +...     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                                                                  +... )
                                                                  +>>> for shard_id, stream in source.shards:
                                                                  +...     process(stream)
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  @@ -82,16 +82,16 @@ Accepts the same credential format used by S3DataStore.
                                                                   |--------|--------------|----------------------|
                                                                   |        | \'S3Source\' | Configured S3Source. |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> creds = {
                                                                  -    ...     "AWS_ACCESS_KEY_ID": "...",
                                                                  -    ...     "AWS_SECRET_ACCESS_KEY": "...",
                                                                  -    ...     "AWS_ENDPOINT": "https://r2.example.com",
                                                                  -    ... }
                                                                  -    >>> source = S3Source.from_credentials(creds, "my-bucket", ["data.tar"])
                                                                  +```python
                                                                  +>>> creds = {
                                                                  +...     "AWS_ACCESS_KEY_ID": "...",
                                                                  +...     "AWS_SECRET_ACCESS_KEY": "...",
                                                                  +...     "AWS_ENDPOINT": "https://r2.example.com",
                                                                  +... }
                                                                  +>>> source = S3Source.from_credentials(creds, "my-bucket", ["data.tar"])
                                                                  +```
                                                                   
                                                                   ### from_urls { #atdata.S3Source.from_urls }
                                                                   
                                                                  @@ -133,14 +133,14 @@ All URLs must be in the same bucket.
                                                                   |--------|----------------------------|------------------------------------------------------------|
                                                                   |        | [ValueError](`ValueError`) | If URLs are not valid s3:// URLs or span multiple buckets. |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -    >>> source = S3Source.from_urls(
                                                                  -    ...     ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"],
                                                                  -    ...     endpoint="https://r2.example.com",
                                                                  -    ... )
                                                                  +```python
                                                                  +>>> source = S3Source.from_urls(
                                                                  +...     ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"],
                                                                  +...     endpoint="https://r2.example.com",
                                                                  +... )
                                                                  +```
                                                                   
                                                                   ### list_shards { #atdata.S3Source.list_shards }
                                                                   
                                                                  diff --git a/docs_src/api/SampleBatch.qmd b/docs_src/api/SampleBatch.qmd
                                                                  index 6e3eef0..4000b08 100644
                                                                  --- a/docs_src/api/SampleBatch.qmd
                                                                  +++ b/docs_src/api/SampleBatch.qmd
                                                                  @@ -26,13 +26,13 @@ Other fields are aggregated into a list.
                                                                   |---------|--------|---------------------------------------------|
                                                                   | samples |        | The list of sample instances in this batch. |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> batch = SampleBatch[MyData]([sample1, sample2, sample3])
                                                                  -    >>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)
                                                                  -    >>> batch.names  # Returns list of names
                                                                  +```python
                                                                  +>>> batch = SampleBatch[MyData]([sample1, sample2, sample3])
                                                                  +>>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)
                                                                  +>>> batch.names  # Returns list of names
                                                                  +```
                                                                   
                                                                   ## Note {.doc-section .doc-section-note}
                                                                   
                                                                  diff --git a/docs_src/api/SchemaLoader.qmd b/docs_src/api/SchemaLoader.qmd
                                                                  index 359b577..cec555f 100644
                                                                  --- a/docs_src/api/SchemaLoader.qmd
                                                                  +++ b/docs_src/api/SchemaLoader.qmd
                                                                  @@ -9,17 +9,17 @@ Loads PackableSample schemas from ATProto.
                                                                   This class fetches schema records from ATProto and can list available
                                                                   schemas from a repository.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> client.login("handle", "password")
                                                                  -    >>>
                                                                  -    >>> loader = SchemaLoader(client)
                                                                  -    >>> schema = loader.get("at://did:plc:.../ac.foundation.dataset.sampleSchema/...")
                                                                  -    >>> print(schema["name"])
                                                                  -    'MySample'
                                                                  +```python
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> client.login("handle", "password")
                                                                  +>>>
                                                                  +>>> loader = SchemaLoader(client)
                                                                  +>>> schema = loader.get("at://did:plc:.../ac.foundation.dataset.sampleSchema/...")
                                                                  +>>> print(schema["name"])
                                                                  +'MySample'
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/SchemaPublisher.qmd b/docs_src/api/SchemaPublisher.qmd
                                                                  index 874d97a..8ef326a 100644
                                                                  --- a/docs_src/api/SchemaPublisher.qmd
                                                                  +++ b/docs_src/api/SchemaPublisher.qmd
                                                                  @@ -9,22 +9,22 @@ Publishes PackableSample schemas to ATProto.
                                                                   This class introspects a PackableSample class to extract its field
                                                                   definitions and publishes them as an ATProto schema record.
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  -
                                                                  -    >>> @atdata.packable
                                                                  -    ... class MySample:
                                                                  -    ...     image: NDArray
                                                                  -    ...     label: str
                                                                  -    ...
                                                                  -    >>> client = AtmosphereClient()
                                                                  -    >>> client.login("handle", "password")
                                                                  -    >>>
                                                                  -    >>> publisher = SchemaPublisher(client)
                                                                  -    >>> uri = publisher.publish(MySample, version="1.0.0")
                                                                  -    >>> print(uri)
                                                                  -    at://did:plc:.../ac.foundation.dataset.sampleSchema/...
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                  +
                                                                  +```python
                                                                  +>>> @atdata.packable
                                                                  +... class MySample:
                                                                  +...     image: NDArray
                                                                  +...     label: str
                                                                  +...
                                                                  +>>> client = AtmosphereClient()
                                                                  +>>> client.login("handle", "password")
                                                                  +>>>
                                                                  +>>> publisher = SchemaPublisher(client)
                                                                  +>>> uri = publisher.publish(MySample, version="1.0.0")
                                                                  +>>> print(uri)
                                                                  +at://did:plc:.../ac.foundation.dataset.sampleSchema/...
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/URLSource.qmd b/docs_src/api/URLSource.qmd
                                                                  index 17a44ce..ce5a122 100644
                                                                  --- a/docs_src/api/URLSource.qmd
                                                                  +++ b/docs_src/api/URLSource.qmd
                                                                  @@ -18,13 +18,13 @@ This is the default source type when a string URL is passed to Dataset.
                                                                   |--------|--------------|--------------------------------------|
                                                                   | url    | [str](`str`) | URL or brace pattern for the shards. |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> source = URLSource("https://example.com/train-{000..009}.tar")
                                                                  -    >>> for shard_id, stream in source.shards:
                                                                  -    ...     print(f"Streaming {shard_id}")
                                                                  +```python
                                                                  +>>> source = URLSource("https://example.com/train-{000..009}.tar")
                                                                  +>>> for shard_id, stream in source.shards:
                                                                  +...     print(f"Streaming {shard_id}")
                                                                  +```
                                                                   
                                                                   ## Methods
                                                                   
                                                                  diff --git a/docs_src/api/load_dataset.qmd b/docs_src/api/load_dataset.qmd
                                                                  index 99e61bf..bfa2aee 100644
                                                                  --- a/docs_src/api/load_dataset.qmd
                                                                  +++ b/docs_src/api/load_dataset.qmd
                                                                  @@ -50,23 +50,23 @@ convert to a typed schema.
                                                                   |        | [FileNotFoundError](`FileNotFoundError`) | If no data files are found at the path. |
                                                                   |        | [KeyError](`KeyError`)                   | If dataset not found in index.          |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> # Load without type - get DictSample for exploration
                                                                  -    >>> ds = load_dataset("./data/train.tar", split="train")
                                                                  -    >>> for sample in ds.ordered():
                                                                  -    ...     print(sample.keys())  # Explore fields
                                                                  -    ...     print(sample["text"]) # Dict-style access
                                                                  -    ...     print(sample.label)   # Attribute access
                                                                  -    >>>
                                                                  -    >>> # Convert to typed schema
                                                                  -    >>> typed_ds = ds.as_type(TextData)
                                                                  -    >>>
                                                                  -    >>> # Or load with explicit type directly
                                                                  -    >>> train_ds = load_dataset("./data/train-*.tar", TextData, split="train")
                                                                  -    >>>
                                                                  -    >>> # Load from index with auto-type resolution
                                                                  -    >>> index = LocalIndex()
                                                                  -    >>> ds = load_dataset("@local/my-dataset", index=index, split="train")
                                                                  \ No newline at end of file
                                                                  +```python
                                                                  +>>> # Load without type - get DictSample for exploration
                                                                  +>>> ds = load_dataset("./data/train.tar", split="train")
                                                                  +>>> for sample in ds.ordered():
                                                                  +...     print(sample.keys())  # Explore fields
                                                                  +...     print(sample["text"]) # Dict-style access
                                                                  +...     print(sample.label)   # Attribute access
                                                                  +>>>
                                                                  +>>> # Convert to typed schema
                                                                  +>>> typed_ds = ds.as_type(TextData)
                                                                  +>>>
                                                                  +>>> # Or load with explicit type directly
                                                                  +>>> train_ds = load_dataset("./data/train-*.tar", TextData, split="train")
                                                                  +>>>
                                                                  +>>> # Load from index with auto-type resolution
                                                                  +>>> index = LocalIndex()
                                                                  +>>> ds = load_dataset("@local/my-dataset", index=index, split="train")
                                                                  +```
                                                                  \ No newline at end of file
                                                                  diff --git a/docs_src/api/local.Index.qmd b/docs_src/api/local.Index.qmd
                                                                  index 35f0cb6..82bdf01 100644
                                                                  --- a/docs_src/api/local.Index.qmd
                                                                  +++ b/docs_src/api/local.Index.qmd
                                                                  @@ -150,16 +150,16 @@ stub file for the schema and want full IDE support.
                                                                   |--------|-----------------------------------------|----------------------------------------------------------------|
                                                                   |        | [type](`type`)\[[T](`atdata.local.T`)\] | The decoded type, cast to match the type_hint for IDE support. |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> # After enabling auto_stubs and configuring IDE extraPaths:
                                                                  -    >>> from local.MySample_1_0_0 import MySample
                                                                  -    >>>
                                                                  -    >>> # This gives full IDE autocomplete:
                                                                  -    >>> DecodedType = index.decode_schema_as(ref, MySample)
                                                                  -    >>> sample = DecodedType(text="hello", value=42)  # IDE knows signature!
                                                                  +```python
                                                                  +>>> # After enabling auto_stubs and configuring IDE extraPaths:
                                                                  +>>> from local.MySample_1_0_0 import MySample
                                                                  +>>>
                                                                  +>>> # This gives full IDE autocomplete:
                                                                  +>>> DecodedType = index.decode_schema_as(ref, MySample)
                                                                  +>>> sample = DecodedType(text="hello", value=42)  # IDE knows signature!
                                                                  +```
                                                                   
                                                                   #### Note {.doc-section .doc-section-note}
                                                                   
                                                                  @@ -269,17 +269,17 @@ be used to import the schema type with full IDE support.
                                                                   |        | [str](`str`) \| None | Import path like "local.MySample_1_0_0", or None if auto_stubs |
                                                                   |        | [str](`str`) \| None | is disabled.                                                   |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> index = LocalIndex(auto_stubs=True)
                                                                  -    >>> ref = index.publish_schema(MySample, version="1.0.0")
                                                                  -    >>> index.load_schema(ref)
                                                                  -    >>> print(index.get_import_path(ref))
                                                                  -    local.MySample_1_0_0
                                                                  -    >>> # Then in your code:
                                                                  -    >>> # from local.MySample_1_0_0 import MySample
                                                                  +```python
                                                                  +>>> index = LocalIndex(auto_stubs=True)
                                                                  +>>> ref = index.publish_schema(MySample, version="1.0.0")
                                                                  +>>> index.load_schema(ref)
                                                                  +>>> print(index.get_import_path(ref))
                                                                  +local.MySample_1_0_0
                                                                  +>>> # Then in your code:
                                                                  +>>> # from local.MySample_1_0_0 import MySample
                                                                  +```
                                                                   
                                                                   ### get_schema { #atdata.local.Index.get_schema }
                                                                   
                                                                  @@ -440,17 +440,17 @@ in the :attr:`types` namespace for easy access.
                                                                   |        | [KeyError](`KeyError`)     | If schema not found.         |
                                                                   |        | [ValueError](`ValueError`) | If schema cannot be decoded. |
                                                                   
                                                                  -#### Example {.doc-section .doc-section-example}
                                                                  -
                                                                  -::
                                                                  +#### Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -    >>> # Load and use immediately
                                                                  -    >>> MyType = index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -    >>> sample = MyType(name="hello", value=42)
                                                                  -    >>>
                                                                  -    >>> # Or access later via namespace
                                                                  -    >>> index.load_schema("atdata://local/sampleSchema/OtherType@1.0.0")
                                                                  -    >>> other = index.types.OtherType(data="test")
                                                                  +```python
                                                                  +>>> # Load and use immediately
                                                                  +>>> MyType = index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +>>> sample = MyType(name="hello", value=42)
                                                                  +>>>
                                                                  +>>> # Or access later via namespace
                                                                  +>>> index.load_schema("atdata://local/sampleSchema/OtherType@1.0.0")
                                                                  +>>> other = index.types.OtherType(data="test")
                                                                  +```
                                                                   
                                                                   ### publish_schema { #atdata.local.Index.publish_schema }
                                                                   
                                                                  diff --git a/docs_src/api/packable.qmd b/docs_src/api/packable.qmd
                                                                  index a064e06..f866752 100644
                                                                  --- a/docs_src/api/packable.qmd
                                                                  +++ b/docs_src/api/packable.qmd
                                                                  @@ -30,16 +30,16 @@ lens transformations, etc.).
                                                                   
                                                                   ## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -This is a test of the functionality::
                                                                  -
                                                                  -    @packable
                                                                  -    class MyData:
                                                                  -        name: str
                                                                  -        values: NDArray
                                                                  -    
                                                                  -    sample = MyData(name="test", values=np.array([1, 2, 3]))
                                                                  -    bytes_data = sample.packed
                                                                  -    restored = MyData.from_bytes(bytes_data)
                                                                  -    
                                                                  -    # Works with Packable-typed APIs
                                                                  -    index.publish_schema(MyData, version="1.0.0")  # Type-safe
                                                                  \ No newline at end of file
                                                                  +```python
                                                                  +>>> @packable
                                                                  +... class MyData:
                                                                  +...     name: str
                                                                  +...     values: NDArray
                                                                  +...
                                                                  +>>> sample = MyData(name="test", values=np.array([1, 2, 3]))
                                                                  +>>> bytes_data = sample.packed
                                                                  +>>> restored = MyData.from_bytes(bytes_data)
                                                                  +>>>
                                                                  +>>> # Works with Packable-typed APIs
                                                                  +>>> index.publish_schema(MyData, version="1.0.0")  # Type-safe
                                                                  +```
                                                                  \ No newline at end of file
                                                                  diff --git a/docs_src/api/promote_to_atmosphere.qmd b/docs_src/api/promote_to_atmosphere.qmd
                                                                  index ff0c7bd..357322b 100644
                                                                  --- a/docs_src/api/promote_to_atmosphere.qmd
                                                                  +++ b/docs_src/api/promote_to_atmosphere.qmd
                                                                  @@ -45,11 +45,11 @@ making it discoverable on the federated atmosphere network.
                                                                   |        | [KeyError](`KeyError`)     | If schema not found in local index. |
                                                                   |        | [ValueError](`ValueError`) | If local entry has no data URLs.    |
                                                                   
                                                                  -## Example {.doc-section .doc-section-example}
                                                                  +## Examples {.doc-section .doc-section-examples}
                                                                   
                                                                  -::
                                                                  -
                                                                  -    >>> entry = local_index.get_dataset("mnist-train")
                                                                  -    >>> uri = promote_to_atmosphere(entry, local_index, client)
                                                                  -    >>> print(uri)
                                                                  -    at://did:plc:abc123/ac.foundation.dataset.datasetIndex/...
                                                                  \ No newline at end of file
                                                                  +```python
                                                                  +>>> entry = local_index.get_dataset("mnist-train")
                                                                  +>>> uri = promote_to_atmosphere(entry, local_index, client)
                                                                  +>>> print(uri)
                                                                  +at://did:plc:abc123/ac.foundation.dataset.datasetIndex/...
                                                                  +```
                                                                  \ No newline at end of file
                                                                  diff --git a/src/atdata/_cid.py b/src/atdata/_cid.py
                                                                  index 933bdea..a34e265 100644
                                                                  --- a/src/atdata/_cid.py
                                                                  +++ b/src/atdata/_cid.py
                                                                  @@ -12,13 +12,11 @@
                                                                   This ensures compatibility with ATProto's CID requirements and enables
                                                                   seamless promotion from local storage to atmosphere (ATProto network).
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> schema = {"name": "ImageSample", "version": "1.0.0", "fields": [...]}
                                                                  -        >>> cid = generate_cid(schema)
                                                                  -        >>> print(cid)
                                                                  -        bafyreihffx5a2e7k6r5zqgp5iwpjqr2gfyheqhzqtlxagvqjqyxzqpzqaa
                                                                  +Examples:
                                                                  +    >>> schema = {"name": "ImageSample", "version": "1.0.0", "fields": [...]}
                                                                  +    >>> cid = generate_cid(schema)
                                                                  +    >>> print(cid)
                                                                  +    bafyreihffx5a2e7k6r5zqgp5iwpjqr2gfyheqhzqtlxagvqjqyxzqpzqaa
                                                                   """
                                                                   
                                                                   import hashlib
                                                                  @@ -50,11 +48,9 @@ def generate_cid(data: Any) -> str:
                                                                       Raises:
                                                                           ValueError: If the data cannot be encoded as DAG-CBOR.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> generate_cid({"name": "test", "value": 42})
                                                                  -            'bafyrei...'
                                                                  +    Examples:
                                                                  +        >>> generate_cid({"name": "test", "value": 42})
                                                                  +        'bafyrei...'
                                                                       """
                                                                       # Encode data as DAG-CBOR
                                                                       try:
                                                                  @@ -86,11 +82,9 @@ def generate_cid_from_bytes(data_bytes: bytes) -> str:
                                                                       Returns:
                                                                           CIDv1 string in base32 multibase format.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> cbor_bytes = libipld.encode_dag_cbor({"key": "value"})
                                                                  -            >>> cid = generate_cid_from_bytes(cbor_bytes)
                                                                  +    Examples:
                                                                  +        >>> cbor_bytes = libipld.encode_dag_cbor({"key": "value"})
                                                                  +        >>> cid = generate_cid_from_bytes(cbor_bytes)
                                                                       """
                                                                       sha256_hash = hashlib.sha256(data_bytes).digest()
                                                                       raw_cid_bytes = bytes([CID_VERSION_1, CODEC_DAG_CBOR, HASH_SHA256, SHA256_SIZE]) + sha256_hash
                                                                  @@ -107,14 +101,12 @@ def verify_cid(cid: str, data: Any) -> bool:
                                                                       Returns:
                                                                           True if the CID matches the data, False otherwise.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> cid = generate_cid({"name": "test"})
                                                                  -            >>> verify_cid(cid, {"name": "test"})
                                                                  -            True
                                                                  -            >>> verify_cid(cid, {"name": "different"})
                                                                  -            False
                                                                  +    Examples:
                                                                  +        >>> cid = generate_cid({"name": "test"})
                                                                  +        >>> verify_cid(cid, {"name": "test"})
                                                                  +        True
                                                                  +        >>> verify_cid(cid, {"name": "different"})
                                                                  +        False
                                                                       """
                                                                       expected_cid = generate_cid(data)
                                                                       return cid == expected_cid
                                                                  @@ -130,14 +122,12 @@ def parse_cid(cid: str) -> dict:
                                                                           Dictionary with 'version', 'codec', and 'hash' keys.
                                                                           The 'hash' value is itself a dict with 'code', 'size', and 'digest'.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> info = parse_cid('bafyrei...')
                                                                  -            >>> info['version']
                                                                  -            1
                                                                  -            >>> info['codec']
                                                                  -            113  # 0x71 = dag-cbor
                                                                  +    Examples:
                                                                  +        >>> info = parse_cid('bafyrei...')
                                                                  +        >>> info['version']
                                                                  +        1
                                                                  +        >>> info['codec']
                                                                  +        113  # 0x71 = dag-cbor
                                                                       """
                                                                       return libipld.decode_cid(cid)
                                                                   
                                                                  diff --git a/src/atdata/_hf_api.py b/src/atdata/_hf_api.py
                                                                  index 0fe2495..35bdde3 100644
                                                                  --- a/src/atdata/_hf_api.py
                                                                  +++ b/src/atdata/_hf_api.py
                                                                  @@ -9,23 +9,21 @@
                                                                   - Built on WebDataset for efficient streaming of large datasets
                                                                   - No Arrow caching layer (WebDataset handles remote/local transparently)
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> import atdata
                                                                  -        >>> from atdata import load_dataset
                                                                  -        >>>
                                                                  -        >>> @atdata.packable
                                                                  -        ... class MyData:
                                                                  -        ...     text: str
                                                                  -        ...     label: int
                                                                  -        >>>
                                                                  -        >>> # Load a single split
                                                                  -        >>> ds = load_dataset("path/to/train-{000000..000099}.tar", MyData, split="train")
                                                                  -        >>>
                                                                  -        >>> # Load all splits (returns DatasetDict)
                                                                  -        >>> ds_dict = load_dataset("path/to/{train,test}-*.tar", MyData)
                                                                  -        >>> train_ds = ds_dict["train"]
                                                                  +Examples:
                                                                  +    >>> import atdata
                                                                  +    >>> from atdata import load_dataset
                                                                  +    >>>
                                                                  +    >>> @atdata.packable
                                                                  +    ... class MyData:
                                                                  +    ...     text: str
                                                                  +    ...     label: int
                                                                  +    >>>
                                                                  +    >>> # Load a single split
                                                                  +    >>> ds = load_dataset("path/to/train-{000000..000099}.tar", MyData, split="train")
                                                                  +    >>>
                                                                  +    >>> # Load all splits (returns DatasetDict)
                                                                  +    >>> ds_dict = load_dataset("path/to/{train,test}-*.tar", MyData)
                                                                  +    >>> train_ds = ds_dict["train"]
                                                                   """
                                                                   
                                                                   from __future__ import annotations
                                                                  @@ -70,16 +68,14 @@ class DatasetDict(Generic[ST], dict):
                                                                       Parameters:
                                                                           ST: The sample type for all datasets in this dict.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> ds_dict = load_dataset("path/to/data", MyData)
                                                                  -            >>> train = ds_dict["train"]
                                                                  -            >>> test = ds_dict["test"]
                                                                  -            >>>
                                                                  -            >>> # Iterate over all splits
                                                                  -            >>> for split_name, dataset in ds_dict.items():
                                                                  -            ...     print(f"{split_name}: {len(dataset.shard_list)} shards")
                                                                  +    Examples:
                                                                  +        >>> ds_dict = load_dataset("path/to/data", MyData)
                                                                  +        >>> train = ds_dict["train"]
                                                                  +        >>> test = ds_dict["test"]
                                                                  +        >>>
                                                                  +        >>> # Iterate over all splits
                                                                  +        >>> for split_name, dataset in ds_dict.items():
                                                                  +        ...     print(f"{split_name}: {len(dataset.shard_list)} shards")
                                                                       """
                                                                       # TODO The above has a line for "Parameters:" that should be "Type Parameters:"; this is a temporary fix for `quartodoc` auto-generation bugs.
                                                                   
                                                                  @@ -613,25 +609,23 @@ def load_dataset(
                                                                           FileNotFoundError: If no data files are found at the path.
                                                                           KeyError: If dataset not found in index.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> # Load without type - get DictSample for exploration
                                                                  -            >>> ds = load_dataset("./data/train.tar", split="train")
                                                                  -            >>> for sample in ds.ordered():
                                                                  -            ...     print(sample.keys())  # Explore fields
                                                                  -            ...     print(sample["text"]) # Dict-style access
                                                                  -            ...     print(sample.label)   # Attribute access
                                                                  -            >>>
                                                                  -            >>> # Convert to typed schema
                                                                  -            >>> typed_ds = ds.as_type(TextData)
                                                                  -            >>>
                                                                  -            >>> # Or load with explicit type directly
                                                                  -            >>> train_ds = load_dataset("./data/train-*.tar", TextData, split="train")
                                                                  -            >>>
                                                                  -            >>> # Load from index with auto-type resolution
                                                                  -            >>> index = LocalIndex()
                                                                  -            >>> ds = load_dataset("@local/my-dataset", index=index, split="train")
                                                                  +    Examples:
                                                                  +        >>> # Load without type - get DictSample for exploration
                                                                  +        >>> ds = load_dataset("./data/train.tar", split="train")
                                                                  +        >>> for sample in ds.ordered():
                                                                  +        ...     print(sample.keys())  # Explore fields
                                                                  +        ...     print(sample["text"]) # Dict-style access
                                                                  +        ...     print(sample.label)   # Attribute access
                                                                  +        >>>
                                                                  +        >>> # Convert to typed schema
                                                                  +        >>> typed_ds = ds.as_type(TextData)
                                                                  +        >>>
                                                                  +        >>> # Or load with explicit type directly
                                                                  +        >>> train_ds = load_dataset("./data/train-*.tar", TextData, split="train")
                                                                  +        >>>
                                                                  +        >>> # Load from index with auto-type resolution
                                                                  +        >>> index = LocalIndex()
                                                                  +        >>> ds = load_dataset("@local/my-dataset", index=index, split="train")
                                                                       """
                                                                       # Handle @handle/dataset indexed path resolution
                                                                       if _is_indexed_path(path):
                                                                  diff --git a/src/atdata/_protocols.py b/src/atdata/_protocols.py
                                                                  index 7ed8b30..c89d544 100644
                                                                  --- a/src/atdata/_protocols.py
                                                                  +++ b/src/atdata/_protocols.py
                                                                  @@ -19,16 +19,14 @@
                                                                       AbstractIndex: Protocol for index operations (schemas, datasets, lenses)
                                                                       AbstractDataStore: Protocol for data storage operations
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> def process_datasets(index: AbstractIndex) -> None:
                                                                  -        ...     for entry in index.list_datasets():
                                                                  -        ...         print(f"{entry.name}: {entry.data_urls}")
                                                                  -        ...
                                                                  -        >>> # Works with either LocalIndex or AtmosphereIndex
                                                                  -        >>> process_datasets(local_index)
                                                                  -        >>> process_datasets(atmosphere_index)
                                                                  +Examples:
                                                                  +    >>> def process_datasets(index: AbstractIndex) -> None:
                                                                  +    ...     for entry in index.list_datasets():
                                                                  +    ...         print(f"{entry.name}: {entry.data_urls}")
                                                                  +    ...
                                                                  +    >>> # Works with either LocalIndex or AtmosphereIndex
                                                                  +    >>> process_datasets(local_index)
                                                                  +    >>> process_datasets(atmosphere_index)
                                                                   """
                                                                   
                                                                   from typing import (
                                                                  @@ -67,18 +65,16 @@ class Packable(Protocol):
                                                                       - Schema publishing (class introspection via dataclass fields)
                                                                       - Serialization/deserialization (packed, from_bytes)
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> @packable
                                                                  -            ... class MySample:
                                                                  -            ...     name: str
                                                                  -            ...     value: int
                                                                  -            ...
                                                                  -            >>> def process(sample_type: Type[Packable]) -> None:
                                                                  -            ...     # Type checker knows sample_type has from_bytes, packed, etc.
                                                                  -            ...     instance = sample_type.from_bytes(data)
                                                                  -            ...     print(instance.packed)
                                                                  +    Examples:
                                                                  +        >>> @packable
                                                                  +        ... class MySample:
                                                                  +        ...     name: str
                                                                  +        ...     value: int
                                                                  +        ...
                                                                  +        >>> def process(sample_type: Type[Packable]) -> None:
                                                                  +        ...     # Type checker knows sample_type has from_bytes, packed, etc.
                                                                  +        ...     instance = sample_type.from_bytes(data)
                                                                  +        ...     print(instance.packed)
                                                                       """
                                                                   
                                                                       @classmethod
                                                                  @@ -169,21 +165,19 @@ class AbstractIndex(Protocol):
                                                                           - ``data_store``: An AbstractDataStore for reading/writing dataset shards.
                                                                             If present, ``load_dataset`` will use it for S3 credential resolution.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> def publish_and_list(index: AbstractIndex) -> None:
                                                                  -            ...     # Publish schemas for different types
                                                                  -            ...     schema1 = index.publish_schema(ImageSample, version="1.0.0")
                                                                  -            ...     schema2 = index.publish_schema(TextSample, version="1.0.0")
                                                                  -            ...
                                                                  -            ...     # Insert datasets of different types
                                                                  -            ...     index.insert_dataset(image_ds, name="images")
                                                                  -            ...     index.insert_dataset(text_ds, name="texts")
                                                                  -            ...
                                                                  -            ...     # List all datasets (mixed types)
                                                                  -            ...     for entry in index.list_datasets():
                                                                  -            ...         print(f"{entry.name} -> {entry.schema_ref}")
                                                                  +    Examples:
                                                                  +        >>> def publish_and_list(index: AbstractIndex) -> None:
                                                                  +        ...     # Publish schemas for different types
                                                                  +        ...     schema1 = index.publish_schema(ImageSample, version="1.0.0")
                                                                  +        ...     schema2 = index.publish_schema(TextSample, version="1.0.0")
                                                                  +        ...
                                                                  +        ...     # Insert datasets of different types
                                                                  +        ...     index.insert_dataset(image_ds, name="images")
                                                                  +        ...     index.insert_dataset(text_ds, name="texts")
                                                                  +        ...
                                                                  +        ...     # List all datasets (mixed types)
                                                                  +        ...     for entry in index.list_datasets():
                                                                  +        ...         print(f"{entry.name} -> {entry.schema_ref}")
                                                                       """
                                                                   
                                                                       @property
                                                                  @@ -341,14 +335,12 @@ def decode_schema(self, ref: str) -> Type[Packable]:
                                                                               KeyError: If schema not found.
                                                                               ValueError: If schema cannot be decoded (unsupported field types).
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> entry = index.get_dataset("my-dataset")
                                                                  -                >>> SampleType = index.decode_schema(entry.schema_ref)
                                                                  -                >>> ds = Dataset[SampleType](entry.data_urls[0])
                                                                  -                >>> for sample in ds.ordered():
                                                                  -                ...     print(sample)  # sample is instance of SampleType
                                                                  +        Examples:
                                                                  +            >>> entry = index.get_dataset("my-dataset")
                                                                  +            >>> SampleType = index.decode_schema(entry.schema_ref)
                                                                  +            >>> ds = Dataset[SampleType](entry.data_urls[0])
                                                                  +            >>> for sample in ds.ordered():
                                                                  +            ...     print(sample)  # sample is instance of SampleType
                                                                           """
                                                                           ...
                                                                   
                                                                  @@ -368,13 +360,11 @@ class AbstractDataStore(Protocol):
                                                                       flexible deployment: local index with S3 storage, atmosphere index with
                                                                       S3 storage, or atmosphere index with PDS blobs.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> store = S3DataStore(credentials, bucket="my-bucket")
                                                                  -            >>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  -            >>> print(urls)
                                                                  -            ['s3://my-bucket/training/v1/shard-000000.tar', ...]
                                                                  +    Examples:
                                                                  +        >>> store = S3DataStore(credentials, bucket="my-bucket")
                                                                  +        >>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  +        >>> print(urls)
                                                                  +        ['s3://my-bucket/training/v1/shard-000000.tar', ...]
                                                                       """
                                                                   
                                                                       def write_shards(
                                                                  @@ -443,18 +433,16 @@ class DataSource(Protocol):
                                                                       - ATProto blob streaming
                                                                       - Any other source that can provide file-like objects
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> source = S3Source(
                                                                  -            ...     bucket="my-bucket",
                                                                  -            ...     keys=["data-000.tar", "data-001.tar"],
                                                                  -            ...     endpoint="https://r2.example.com",
                                                                  -            ...     credentials=creds,
                                                                  -            ... )
                                                                  -            >>> ds = Dataset[MySample](source)
                                                                  -            >>> for sample in ds.ordered():
                                                                  -            ...     print(sample)
                                                                  +    Examples:
                                                                  +        >>> source = S3Source(
                                                                  +        ...     bucket="my-bucket",
                                                                  +        ...     keys=["data-000.tar", "data-001.tar"],
                                                                  +        ...     endpoint="https://r2.example.com",
                                                                  +        ...     credentials=creds,
                                                                  +        ... )
                                                                  +        >>> ds = Dataset[MySample](source)
                                                                  +        >>> for sample in ds.ordered():
                                                                  +        ...     print(sample)
                                                                       """
                                                                   
                                                                       @property
                                                                  @@ -467,12 +455,10 @@ def shards(self) -> Iterator[tuple[str, IO[bytes]]]:
                                                                           Yields:
                                                                               Tuple of (shard_identifier, file_like_stream).
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> for shard_id, stream in source.shards:
                                                                  -                ...     print(f"Processing {shard_id}")
                                                                  -                ...     data = stream.read()
                                                                  +        Examples:
                                                                  +            >>> for shard_id, stream in source.shards:
                                                                  +            ...     print(f"Processing {shard_id}")
                                                                  +            ...     data = stream.read()
                                                                           """
                                                                           ...
                                                                   
                                                                  diff --git a/src/atdata/_schema_codec.py b/src/atdata/_schema_codec.py
                                                                  index c8d046c..e0ebc33 100644
                                                                  --- a/src/atdata/_schema_codec.py
                                                                  +++ b/src/atdata/_schema_codec.py
                                                                  @@ -9,19 +9,17 @@
                                                                   ``atmosphere/_types.py``, with field types supporting primitives, ndarrays,
                                                                   arrays, and schema references.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> schema = {
                                                                  -        ...     "name": "ImageSample",
                                                                  -        ...     "version": "1.0.0",
                                                                  -        ...     "fields": [
                                                                  -        ...         {"name": "image", "fieldType": {"$type": "...#ndarray", "dtype": "float32"}, "optional": False},
                                                                  -        ...         {"name": "label", "fieldType": {"$type": "...#primitive", "primitive": "str"}, "optional": False},
                                                                  -        ...     ]
                                                                  -        ... }
                                                                  -        >>> ImageSample = schema_to_type(schema)
                                                                  -        >>> sample = ImageSample(image=np.zeros((64, 64)), label="cat")
                                                                  +Examples:
                                                                  +    >>> schema = {
                                                                  +    ...     "name": "ImageSample",
                                                                  +    ...     "version": "1.0.0",
                                                                  +    ...     "fields": [
                                                                  +    ...         {"name": "image", "fieldType": {"$type": "...#ndarray", "dtype": "float32"}, "optional": False},
                                                                  +    ...         {"name": "label", "fieldType": {"$type": "...#primitive", "primitive": "str"}, "optional": False},
                                                                  +    ...     ]
                                                                  +    ... }
                                                                  +    >>> ImageSample = schema_to_type(schema)
                                                                  +    >>> sample = ImageSample(image=np.zeros((64, 64)), label="cat")
                                                                   """
                                                                   
                                                                   from dataclasses import field, make_dataclass
                                                                  @@ -151,14 +149,12 @@ def schema_to_type(
                                                                       Raises:
                                                                           ValueError: If schema is malformed or contains unsupported types.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> schema = index.get_schema("local://schemas/MySample@1.0.0")
                                                                  -            >>> MySample = schema_to_type(schema)
                                                                  -            >>> ds = Dataset[MySample]("data.tar")
                                                                  -            >>> for sample in ds.ordered():
                                                                  -            ...     print(sample)
                                                                  +    Examples:
                                                                  +        >>> schema = index.get_schema("local://schemas/MySample@1.0.0")
                                                                  +        >>> MySample = schema_to_type(schema)
                                                                  +        >>> ds = Dataset[MySample]("data.tar")
                                                                  +        >>> for sample in ds.ordered():
                                                                  +        ...     print(sample)
                                                                       """
                                                                       # Check cache first
                                                                       if use_cache:
                                                                  @@ -282,14 +278,12 @@ def generate_stub(schema: dict) -> str:
                                                                       Returns:
                                                                           String content for a .pyi stub file.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> schema = index.get_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -            >>> stub_content = generate_stub(schema.to_dict())
                                                                  -            >>> # Save to a stubs directory configured in your IDE
                                                                  -            >>> with open("stubs/my_sample.pyi", "w") as f:
                                                                  -            ...     f.write(stub_content)
                                                                  +    Examples:
                                                                  +        >>> schema = index.get_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +        >>> stub_content = generate_stub(schema.to_dict())
                                                                  +        >>> # Save to a stubs directory configured in your IDE
                                                                  +        >>> with open("stubs/my_sample.pyi", "w") as f:
                                                                  +        ...     f.write(stub_content)
                                                                       """
                                                                       name = schema.get("name", "UnknownSample")
                                                                       version = schema.get("version", "1.0.0")
                                                                  @@ -360,12 +354,10 @@ def generate_module(schema: dict) -> str:
                                                                       Returns:
                                                                           String content for a .py module file.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> schema = index.get_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -            >>> module_content = generate_module(schema.to_dict())
                                                                  -            >>> # The module can be imported after being saved
                                                                  +    Examples:
                                                                  +        >>> schema = index.get_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +        >>> module_content = generate_module(schema.to_dict())
                                                                  +        >>> # The module can be imported after being saved
                                                                       """
                                                                       name = schema.get("name", "UnknownSample")
                                                                       version = schema.get("version", "1.0.0")
                                                                  diff --git a/src/atdata/_sources.py b/src/atdata/_sources.py
                                                                  index 165d6f2..64bdf3d 100644
                                                                  --- a/src/atdata/_sources.py
                                                                  +++ b/src/atdata/_sources.py
                                                                  @@ -13,22 +13,20 @@
                                                                   By providing streams directly, we can support private repos, custom
                                                                   endpoints, and future backends like ATProto blobs.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> # Standard URL (uses WebDataset's gopen)
                                                                  -        >>> source = URLSource("https://example.com/data-{000..009}.tar")
                                                                  -        >>> ds = Dataset[MySample](source)
                                                                  -        >>>
                                                                  -        >>> # Private S3 with credentials
                                                                  -        >>> source = S3Source(
                                                                  -        ...     bucket="my-bucket",
                                                                  -        ...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                  -        ...     endpoint="https://my-r2.cloudflarestorage.com",
                                                                  -        ...     access_key="...",
                                                                  -        ...     secret_key="...",
                                                                  -        ... )
                                                                  -        >>> ds = Dataset[MySample](source)
                                                                  +Examples:
                                                                  +    >>> # Standard URL (uses WebDataset's gopen)
                                                                  +    >>> source = URLSource("https://example.com/data-{000..009}.tar")
                                                                  +    >>> ds = Dataset[MySample](source)
                                                                  +    >>>
                                                                  +    >>> # Private S3 with credentials
                                                                  +    >>> source = S3Source(
                                                                  +    ...     bucket="my-bucket",
                                                                  +    ...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                  +    ...     endpoint="https://my-r2.cloudflarestorage.com",
                                                                  +    ...     access_key="...",
                                                                  +    ...     secret_key="...",
                                                                  +    ... )
                                                                  +    >>> ds = Dataset[MySample](source)
                                                                   """
                                                                   
                                                                   from __future__ import annotations
                                                                  @@ -54,12 +52,10 @@ class URLSource:
                                                                       Attributes:
                                                                           url: URL or brace pattern for the shards.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> source = URLSource("https://example.com/train-{000..009}.tar")
                                                                  -            >>> for shard_id, stream in source.shards:
                                                                  -            ...     print(f"Streaming {shard_id}")
                                                                  +    Examples:
                                                                  +        >>> source = URLSource("https://example.com/train-{000..009}.tar")
                                                                  +        >>> for shard_id, stream in source.shards:
                                                                  +        ...     print(f"Streaming {shard_id}")
                                                                       """
                                                                   
                                                                       url: str
                                                                  @@ -131,18 +127,16 @@ class S3Source:
                                                                           secret_key: Optional AWS secret access key.
                                                                           region: Optional AWS region (defaults to us-east-1).
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> source = S3Source(
                                                                  -            ...     bucket="my-datasets",
                                                                  -            ...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                  -            ...     endpoint="https://abc123.r2.cloudflarestorage.com",
                                                                  -            ...     access_key="AKIAIOSFODNN7EXAMPLE",
                                                                  -            ...     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                                                                  -            ... )
                                                                  -            >>> for shard_id, stream in source.shards:
                                                                  -            ...     process(stream)
                                                                  +    Examples:
                                                                  +        >>> source = S3Source(
                                                                  +        ...     bucket="my-datasets",
                                                                  +        ...     keys=["train/shard-000.tar", "train/shard-001.tar"],
                                                                  +        ...     endpoint="https://abc123.r2.cloudflarestorage.com",
                                                                  +        ...     access_key="AKIAIOSFODNN7EXAMPLE",
                                                                  +        ...     secret_key="wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY",
                                                                  +        ... )
                                                                  +        >>> for shard_id, stream in source.shards:
                                                                  +        ...     process(stream)
                                                                       """
                                                                   
                                                                       bucket: str
                                                                  @@ -258,13 +252,11 @@ def from_urls(
                                                                           Raises:
                                                                               ValueError: If URLs are not valid s3:// URLs or span multiple buckets.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> source = S3Source.from_urls(
                                                                  -                ...     ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"],
                                                                  -                ...     endpoint="https://r2.example.com",
                                                                  -                ... )
                                                                  +        Examples:
                                                                  +            >>> source = S3Source.from_urls(
                                                                  +            ...     ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"],
                                                                  +            ...     endpoint="https://r2.example.com",
                                                                  +            ... )
                                                                           """
                                                                           if not urls:
                                                                               raise ValueError("urls cannot be empty")
                                                                  @@ -317,15 +309,13 @@ def from_credentials(
                                                                           Returns:
                                                                               Configured S3Source.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> creds = {
                                                                  -                ...     "AWS_ACCESS_KEY_ID": "...",
                                                                  -                ...     "AWS_SECRET_ACCESS_KEY": "...",
                                                                  -                ...     "AWS_ENDPOINT": "https://r2.example.com",
                                                                  -                ... }
                                                                  -                >>> source = S3Source.from_credentials(creds, "my-bucket", ["data.tar"])
                                                                  +        Examples:
                                                                  +            >>> creds = {
                                                                  +            ...     "AWS_ACCESS_KEY_ID": "...",
                                                                  +            ...     "AWS_SECRET_ACCESS_KEY": "...",
                                                                  +            ...     "AWS_ENDPOINT": "https://r2.example.com",
                                                                  +            ... }
                                                                  +            >>> source = S3Source.from_credentials(creds, "my-bucket", ["data.tar"])
                                                                           """
                                                                           return cls(
                                                                               bucket=bucket,
                                                                  @@ -352,17 +342,15 @@ class BlobSource:
                                                                           blob_refs: List of blob reference dicts with 'did' and 'cid' keys.
                                                                           pds_endpoint: Optional PDS endpoint URL. If not provided, resolved from DID.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> source = BlobSource(
                                                                  -            ...     blob_refs=[
                                                                  -            ...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  -            ...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  -            ...     ],
                                                                  -            ... )
                                                                  -            >>> for shard_id, stream in source.shards:
                                                                  -            ...     process(stream)
                                                                  +    Examples:
                                                                  +        >>> source = BlobSource(
                                                                  +        ...     blob_refs=[
                                                                  +        ...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  +        ...         {"did": "did:plc:abc123", "cid": "bafyrei..."},
                                                                  +        ...     ],
                                                                  +        ... )
                                                                  +        >>> for shard_id, stream in source.shards:
                                                                  +        ...     process(stream)
                                                                       """
                                                                   
                                                                       blob_refs: list[dict[str, str]]
                                                                  diff --git a/src/atdata/_stub_manager.py b/src/atdata/_stub_manager.py
                                                                  index 7784dc4..0a2256a 100644
                                                                  --- a/src/atdata/_stub_manager.py
                                                                  +++ b/src/atdata/_stub_manager.py
                                                                  @@ -8,20 +8,18 @@
                                                                   can be imported at runtime. This allows ``decode_schema`` to return properly
                                                                   typed classes that work with both static type checkers and runtime.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> from atdata.local import Index
                                                                  -        >>>
                                                                  -        >>> # Enable auto-stub generation
                                                                  -        >>> index = Index(auto_stubs=True)
                                                                  -        >>>
                                                                  -        >>> # Modules are generated automatically on decode_schema
                                                                  -        >>> MyType = index.decode_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -        >>> # MyType is now properly typed for IDE autocomplete!
                                                                  -        >>>
                                                                  -        >>> # Get the stub directory path for IDE configuration
                                                                  -        >>> print(f"Add to IDE: {index.stub_dir}")
                                                                  +Examples:
                                                                  +    >>> from atdata.local import Index
                                                                  +    >>>
                                                                  +    >>> # Enable auto-stub generation
                                                                  +    >>> index = Index(auto_stubs=True)
                                                                  +    >>>
                                                                  +    >>> # Modules are generated automatically on decode_schema
                                                                  +    >>> MyType = index.decode_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +    >>> # MyType is now properly typed for IDE autocomplete!
                                                                  +    >>>
                                                                  +    >>> # Get the stub directory path for IDE configuration
                                                                  +    >>> print(f"Add to IDE: {index.stub_dir}")
                                                                   """
                                                                   
                                                                   from pathlib import Path
                                                                  @@ -101,14 +99,12 @@ class StubManager:
                                                                       Args:
                                                                           stub_dir: Directory to write module files. Defaults to ``~/.atdata/stubs/``.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> manager = StubManager()
                                                                  -            >>> schema_dict = {"name": "MySample", "version": "1.0.0", "fields": [...]}
                                                                  -            >>> SampleClass = manager.ensure_module(schema_dict)
                                                                  -            >>> print(manager.stub_dir)
                                                                  -            /Users/you/.atdata/stubs
                                                                  +    Examples:
                                                                  +        >>> manager = StubManager()
                                                                  +        >>> schema_dict = {"name": "MySample", "version": "1.0.0", "fields": [...]}
                                                                  +        >>> SampleClass = manager.ensure_module(schema_dict)
                                                                  +        >>> print(manager.stub_dir)
                                                                  +        /Users/you/.atdata/stubs
                                                                       """
                                                                   
                                                                       def __init__(self, stub_dir: Optional[Union[str, Path]] = None):
                                                                  diff --git a/src/atdata/atmosphere/__init__.py b/src/atdata/atmosphere/__init__.py
                                                                  index 3916419..fb13578 100644
                                                                  --- a/src/atdata/atmosphere/__init__.py
                                                                  +++ b/src/atdata/atmosphere/__init__.py
                                                                  @@ -15,16 +15,14 @@
                                                                   to work unchanged. These features are opt-in for users who want to publish
                                                                   or discover datasets on the ATProto network.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> from atdata.atmosphere import AtmosphereClient, SchemaPublisher
                                                                  -        >>>
                                                                  -        >>> client = AtmosphereClient()
                                                                  -        >>> client.login("handle.bsky.social", "app-password")
                                                                  -        >>>
                                                                  -        >>> publisher = SchemaPublisher(client)
                                                                  -        >>> schema_uri = publisher.publish(MySampleType, version="1.0.0")
                                                                  +Examples:
                                                                  +    >>> from atdata.atmosphere import AtmosphereClient, SchemaPublisher
                                                                  +    >>>
                                                                  +    >>> client = AtmosphereClient()
                                                                  +    >>> client.login("handle.bsky.social", "app-password")
                                                                  +    >>>
                                                                  +    >>> publisher = SchemaPublisher(client)
                                                                  +    >>> schema_uri = publisher.publish(MySampleType, version="1.0.0")
                                                                   
                                                                   Note:
                                                                       This module requires the ``atproto`` package to be installed::
                                                                  @@ -106,19 +104,17 @@ class AtmosphereIndex:
                                                                       Optionally accepts a ``PDSBlobStore`` for writing dataset shards as
                                                                       ATProto blobs, enabling fully decentralized dataset storage.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> client.login("handle.bsky.social", "app-password")
                                                                  -            >>>
                                                                  -            >>> # Without blob storage (external URLs only)
                                                                  -            >>> index = AtmosphereIndex(client)
                                                                  -            >>>
                                                                  -            >>> # With PDS blob storage
                                                                  -            >>> store = PDSBlobStore(client)
                                                                  -            >>> index = AtmosphereIndex(client, data_store=store)
                                                                  -            >>> entry = index.insert_dataset(dataset, name="my-data")
                                                                  +    Examples:
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> client.login("handle.bsky.social", "app-password")
                                                                  +        >>>
                                                                  +        >>> # Without blob storage (external URLs only)
                                                                  +        >>> index = AtmosphereIndex(client)
                                                                  +        >>>
                                                                  +        >>> # With PDS blob storage
                                                                  +        >>> store = PDSBlobStore(client)
                                                                  +        >>> index = AtmosphereIndex(client, data_store=store)
                                                                  +        >>> entry = index.insert_dataset(dataset, name="my-data")
                                                                       """
                                                                   
                                                                       def __init__(
                                                                  diff --git a/src/atdata/atmosphere/_types.py b/src/atdata/atmosphere/_types.py
                                                                  index 65a4463..2f70df6 100644
                                                                  --- a/src/atdata/atmosphere/_types.py
                                                                  +++ b/src/atdata/atmosphere/_types.py
                                                                  @@ -19,16 +19,14 @@ class AtUri:
                                                                   
                                                                       AT URIs follow the format: at:////
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> uri = AtUri.parse("at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz")
                                                                  -            >>> uri.authority
                                                                  -            'did:plc:abc123'
                                                                  -            >>> uri.collection
                                                                  -            'ac.foundation.dataset.sampleSchema'
                                                                  -            >>> uri.rkey
                                                                  -            'xyz'
                                                                  +    Examples:
                                                                  +        >>> uri = AtUri.parse("at://did:plc:abc123/ac.foundation.dataset.sampleSchema/xyz")
                                                                  +        >>> uri.authority
                                                                  +        'did:plc:abc123'
                                                                  +        >>> uri.collection
                                                                  +        'ac.foundation.dataset.sampleSchema'
                                                                  +        >>> uri.rkey
                                                                  +        'xyz'
                                                                       """
                                                                   
                                                                       authority: str
                                                                  diff --git a/src/atdata/atmosphere/client.py b/src/atdata/atmosphere/client.py
                                                                  index d409ef9..15d8e07 100644
                                                                  --- a/src/atdata/atmosphere/client.py
                                                                  +++ b/src/atdata/atmosphere/client.py
                                                                  @@ -33,13 +33,11 @@ class AtmosphereClient:
                                                                       This class wraps the atproto SDK client and provides higher-level methods
                                                                       for working with atdata records (schemas, datasets, lenses).
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> client.login("alice.bsky.social", "app-password")
                                                                  -            >>> print(client.did)
                                                                  -            'did:plc:...'
                                                                  +    Examples:
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> client.login("alice.bsky.social", "app-password")
                                                                  +        >>> print(client.did)
                                                                  +        'did:plc:...'
                                                                   
                                                                       Note:
                                                                           The password should be an app-specific password, not your main account
                                                                  diff --git a/src/atdata/atmosphere/lens.py b/src/atdata/atmosphere/lens.py
                                                                  index b8b19e2..765690d 100644
                                                                  --- a/src/atdata/atmosphere/lens.py
                                                                  +++ b/src/atdata/atmosphere/lens.py
                                                                  @@ -31,26 +31,24 @@ class LensPublisher:
                                                                       This class creates lens records that reference source and target schemas
                                                                       and point to the transformation code in a git repository.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> @atdata.lens
                                                                  -            ... def my_lens(source: SourceType) -> TargetType:
                                                                  -            ...     return TargetType(field=source.other_field)
                                                                  -            >>>
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> client.login("handle", "password")
                                                                  -            >>>
                                                                  -            >>> publisher = LensPublisher(client)
                                                                  -            >>> uri = publisher.publish(
                                                                  -            ...     name="my_lens",
                                                                  -            ...     source_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/source",
                                                                  -            ...     target_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/target",
                                                                  -            ...     code_repository="https://github.com/user/repo",
                                                                  -            ...     code_commit="abc123def456",
                                                                  -            ...     getter_path="mymodule.lenses:my_lens",
                                                                  -            ...     putter_path="mymodule.lenses:my_lens_putter",
                                                                  -            ... )
                                                                  +    Examples:
                                                                  +        >>> @atdata.lens
                                                                  +        ... def my_lens(source: SourceType) -> TargetType:
                                                                  +        ...     return TargetType(field=source.other_field)
                                                                  +        >>>
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> client.login("handle", "password")
                                                                  +        >>>
                                                                  +        >>> publisher = LensPublisher(client)
                                                                  +        >>> uri = publisher.publish(
                                                                  +        ...     name="my_lens",
                                                                  +        ...     source_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/source",
                                                                  +        ...     target_schema_uri="at://did:plc:abc/ac.foundation.dataset.sampleSchema/target",
                                                                  +        ...     code_repository="https://github.com/user/repo",
                                                                  +        ...     code_commit="abc123def456",
                                                                  +        ...     getter_path="mymodule.lenses:my_lens",
                                                                  +        ...     putter_path="mymodule.lenses:my_lens_putter",
                                                                  +        ... )
                                                                   
                                                                       Security Note:
                                                                           Lens code is stored as references to git repositories rather than
                                                                  @@ -195,16 +193,14 @@ class LensLoader:
                                                                       using a lens requires installing the referenced code and importing
                                                                       it manually.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> loader = LensLoader(client)
                                                                  -            >>>
                                                                  -            >>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.lens/xyz")
                                                                  -            >>> print(record["name"])
                                                                  -            >>> print(record["sourceSchema"])
                                                                  -            >>> print(record.get("getterCode", {}).get("repository"))
                                                                  +    Examples:
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> loader = LensLoader(client)
                                                                  +        >>>
                                                                  +        >>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.lens/xyz")
                                                                  +        >>> print(record["name"])
                                                                  +        >>> print(record["sourceSchema"])
                                                                  +        >>> print(record.get("getterCode", {}).get("repository"))
                                                                       """
                                                                   
                                                                       def __init__(self, client: AtmosphereClient):
                                                                  diff --git a/src/atdata/atmosphere/records.py b/src/atdata/atmosphere/records.py
                                                                  index 78cd9d8..82d22c2 100644
                                                                  --- a/src/atdata/atmosphere/records.py
                                                                  +++ b/src/atdata/atmosphere/records.py
                                                                  @@ -31,21 +31,19 @@ class DatasetPublisher:
                                                                       This class creates dataset records that reference a schema and point to
                                                                       external storage (WebDataset URLs) or ATProto blobs.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> dataset = atdata.Dataset[MySample]("s3://bucket/data-{000000..000009}.tar")
                                                                  -            >>>
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> client.login("handle", "password")
                                                                  -            >>>
                                                                  -            >>> publisher = DatasetPublisher(client)
                                                                  -            >>> uri = publisher.publish(
                                                                  -            ...     dataset,
                                                                  -            ...     name="My Training Data",
                                                                  -            ...     description="Training data for my model",
                                                                  -            ...     tags=["computer-vision", "training"],
                                                                  -            ... )
                                                                  +    Examples:
                                                                  +        >>> dataset = atdata.Dataset[MySample]("s3://bucket/data-{000000..000009}.tar")
                                                                  +        >>>
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> client.login("handle", "password")
                                                                  +        >>>
                                                                  +        >>> publisher = DatasetPublisher(client)
                                                                  +        >>> uri = publisher.publish(
                                                                  +        ...     dataset,
                                                                  +        ...     name="My Training Data",
                                                                  +        ...     description="Training data for my model",
                                                                  +        ...     tags=["computer-vision", "training"],
                                                                  +        ... )
                                                                       """
                                                                   
                                                                       def __init__(self, client: AtmosphereClient):
                                                                  @@ -267,19 +265,17 @@ class DatasetLoader:
                                                                       from them. Note that loading a dataset requires having the corresponding
                                                                       Python class for the sample type.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> loader = DatasetLoader(client)
                                                                  -            >>>
                                                                  -            >>> # List available datasets
                                                                  -            >>> datasets = loader.list()
                                                                  -            >>> for ds in datasets:
                                                                  -            ...     print(ds["name"], ds["schemaRef"])
                                                                  -            >>>
                                                                  -            >>> # Get a specific dataset record
                                                                  -            >>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.record/xyz")
                                                                  +    Examples:
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> loader = DatasetLoader(client)
                                                                  +        >>>
                                                                  +        >>> # List available datasets
                                                                  +        >>> datasets = loader.list()
                                                                  +        >>> for ds in datasets:
                                                                  +        ...     print(ds["name"], ds["schemaRef"])
                                                                  +        >>>
                                                                  +        >>> # Get a specific dataset record
                                                                  +        >>> record = loader.get("at://did:plc:abc/ac.foundation.dataset.record/xyz")
                                                                       """
                                                                   
                                                                       def __init__(self, client: AtmosphereClient):
                                                                  @@ -478,13 +474,11 @@ def to_dataset(
                                                                           Raises:
                                                                               ValueError: If no storage URLs can be resolved.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> loader = DatasetLoader(client)
                                                                  -                >>> dataset = loader.to_dataset(uri, MySampleType)
                                                                  -                >>> for batch in dataset.shuffled(batch_size=32):
                                                                  -                ...     process(batch)
                                                                  +        Examples:
                                                                  +            >>> loader = DatasetLoader(client)
                                                                  +            >>> dataset = loader.to_dataset(uri, MySampleType)
                                                                  +            >>> for batch in dataset.shuffled(batch_size=32):
                                                                  +            ...     process(batch)
                                                                           """
                                                                           # Import here to avoid circular import
                                                                           from ..dataset import Dataset
                                                                  diff --git a/src/atdata/atmosphere/schema.py b/src/atdata/atmosphere/schema.py
                                                                  index ef75e7f..9eef3b4 100644
                                                                  --- a/src/atdata/atmosphere/schema.py
                                                                  +++ b/src/atdata/atmosphere/schema.py
                                                                  @@ -37,21 +37,19 @@ class SchemaPublisher:
                                                                       This class introspects a PackableSample class to extract its field
                                                                       definitions and publishes them as an ATProto schema record.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> @atdata.packable
                                                                  -            ... class MySample:
                                                                  -            ...     image: NDArray
                                                                  -            ...     label: str
                                                                  -            ...
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> client.login("handle", "password")
                                                                  -            >>>
                                                                  -            >>> publisher = SchemaPublisher(client)
                                                                  -            >>> uri = publisher.publish(MySample, version="1.0.0")
                                                                  -            >>> print(uri)
                                                                  -            at://did:plc:.../ac.foundation.dataset.sampleSchema/...
                                                                  +    Examples:
                                                                  +        >>> @atdata.packable
                                                                  +        ... class MySample:
                                                                  +        ...     image: NDArray
                                                                  +        ...     label: str
                                                                  +        ...
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> client.login("handle", "password")
                                                                  +        >>>
                                                                  +        >>> publisher = SchemaPublisher(client)
                                                                  +        >>> uri = publisher.publish(MySample, version="1.0.0")
                                                                  +        >>> print(uri)
                                                                  +        at://did:plc:.../ac.foundation.dataset.sampleSchema/...
                                                                       """
                                                                   
                                                                       def __init__(self, client: AtmosphereClient):
                                                                  @@ -178,16 +176,14 @@ class SchemaLoader:
                                                                       This class fetches schema records from ATProto and can list available
                                                                       schemas from a repository.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> client = AtmosphereClient()
                                                                  -            >>> client.login("handle", "password")
                                                                  -            >>>
                                                                  -            >>> loader = SchemaLoader(client)
                                                                  -            >>> schema = loader.get("at://did:plc:.../ac.foundation.dataset.sampleSchema/...")
                                                                  -            >>> print(schema["name"])
                                                                  -            'MySample'
                                                                  +    Examples:
                                                                  +        >>> client = AtmosphereClient()
                                                                  +        >>> client.login("handle", "password")
                                                                  +        >>>
                                                                  +        >>> loader = SchemaLoader(client)
                                                                  +        >>> schema = loader.get("at://did:plc:.../ac.foundation.dataset.sampleSchema/...")
                                                                  +        >>> print(schema["name"])
                                                                  +        'MySample'
                                                                       """
                                                                   
                                                                       def __init__(self, client: AtmosphereClient):
                                                                  diff --git a/src/atdata/atmosphere/store.py b/src/atdata/atmosphere/store.py
                                                                  index 8b398b2..e6913b5 100644
                                                                  --- a/src/atdata/atmosphere/store.py
                                                                  +++ b/src/atdata/atmosphere/store.py
                                                                  @@ -6,18 +6,16 @@
                                                                   This enables fully decentralized dataset storage where both metadata (records)
                                                                   and data (blobs) live on the AT Protocol network.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> from atdata.atmosphere import AtmosphereClient, PDSBlobStore
                                                                  -        >>>
                                                                  -        >>> client = AtmosphereClient()
                                                                  -        >>> client.login("handle.bsky.social", "app-password")
                                                                  -        >>>
                                                                  -        >>> store = PDSBlobStore(client)
                                                                  -        >>> urls = store.write_shards(dataset, prefix="mnist/v1")
                                                                  -        >>> print(urls)
                                                                  -        ['at://did:plc:.../blob/bafyrei...', ...]
                                                                  +Examples:
                                                                  +    >>> from atdata.atmosphere import AtmosphereClient, PDSBlobStore
                                                                  +    >>>
                                                                  +    >>> client = AtmosphereClient()
                                                                  +    >>> client.login("handle.bsky.social", "app-password")
                                                                  +    >>>
                                                                  +    >>> store = PDSBlobStore(client)
                                                                  +    >>> urls = store.write_shards(dataset, prefix="mnist/v1")
                                                                  +    >>> print(urls)
                                                                  +    ['at://did:plc:.../blob/bafyrei...', ...]
                                                                   """
                                                                   
                                                                   from __future__ import annotations
                                                                  @@ -48,13 +46,11 @@ class PDSBlobStore:
                                                                       Attributes:
                                                                           client: Authenticated AtmosphereClient instance.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> store = PDSBlobStore(client)
                                                                  -            >>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  -            >>> # Returns AT URIs like:
                                                                  -            >>> # ['at://did:plc:abc/blob/bafyrei...', ...]
                                                                  +    Examples:
                                                                  +        >>> store = PDSBlobStore(client)
                                                                  +        >>> urls = store.write_shards(dataset, prefix="training/v1")
                                                                  +        >>> # Returns AT URIs like:
                                                                  +        >>> # ['at://did:plc:abc/blob/bafyrei...', ...]
                                                                       """
                                                                   
                                                                       client: "AtmosphereClient"
                                                                  diff --git a/src/atdata/dataset.py b/src/atdata/dataset.py
                                                                  index 0513b4b..81e2f8f 100644
                                                                  --- a/src/atdata/dataset.py
                                                                  +++ b/src/atdata/dataset.py
                                                                  @@ -13,18 +13,16 @@
                                                                   during serialization, enabling efficient storage of numerical data in WebDataset
                                                                   archives.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> @packable
                                                                  -        ... class ImageSample:
                                                                  -        ...     image: NDArray
                                                                  -        ...     label: str
                                                                  -        ...
                                                                  -        >>> ds = Dataset[ImageSample]("data-{000000..000009}.tar")
                                                                  -        >>> for batch in ds.shuffled(batch_size=32):
                                                                  -        ...     images = batch.image  # Stacked numpy array (32, H, W, C)
                                                                  -        ...     labels = batch.label  # List of 32 strings
                                                                  +Examples:
                                                                  +    >>> @packable
                                                                  +    ... class ImageSample:
                                                                  +    ...     image: NDArray
                                                                  +    ...     label: str
                                                                  +    ...
                                                                  +    >>> ds = Dataset[ImageSample]("data-{000000..000009}.tar")
                                                                  +    >>> for batch in ds.shuffled(batch_size=32):
                                                                  +    ...     images = batch.image  # Stacked numpy array (32, H, W, C)
                                                                  +    ...     labels = batch.label  # List of 32 strings
                                                                   """
                                                                   
                                                                   ##
                                                                  @@ -126,17 +124,15 @@ class DictSample:
                                                                       ``@packable``-decorated class. Every ``@packable`` class automatically
                                                                       registers a lens from ``DictSample``, making this conversion seamless.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> ds = load_dataset("path/to/data.tar")  # Returns Dataset[DictSample]
                                                                  -            >>> for sample in ds.ordered():
                                                                  -            ...     print(sample.some_field)      # Attribute access
                                                                  -            ...     print(sample["other_field"])  # Dict access
                                                                  -            ...     print(sample.keys())          # Inspect available fields
                                                                  -            ...
                                                                  -            >>> # Convert to typed schema
                                                                  -            >>> typed_ds = ds.as_type(MyTypedSample)
                                                                  +    Examples:
                                                                  +        >>> ds = load_dataset("path/to/data.tar")  # Returns Dataset[DictSample]
                                                                  +        >>> for sample in ds.ordered():
                                                                  +        ...     print(sample.some_field)      # Attribute access
                                                                  +        ...     print(sample["other_field"])  # Dict access
                                                                  +        ...     print(sample.keys())          # Inspect available fields
                                                                  +        ...
                                                                  +        >>> # Convert to typed schema
                                                                  +        >>> typed_ds = ds.as_type(MyTypedSample)
                                                                   
                                                                       Note:
                                                                           NDArray fields are stored as raw bytes in DictSample. They are only
                                                                  @@ -289,17 +285,15 @@ class PackableSample( ABC ):
                                                                       1. Direct inheritance with the ``@dataclass`` decorator
                                                                       2. Using the ``@packable`` decorator (recommended)
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> @packable
                                                                  -            ... class MyData:
                                                                  -            ...     name: str
                                                                  -            ...     embeddings: NDArray
                                                                  -            ...
                                                                  -            >>> sample = MyData(name="test", embeddings=np.array([1.0, 2.0]))
                                                                  -            >>> packed = sample.packed  # Serialize to bytes
                                                                  -            >>> restored = MyData.from_bytes(packed)  # Deserialize
                                                                  +    Examples:
                                                                  +        >>> @packable
                                                                  +        ... class MyData:
                                                                  +        ...     name: str
                                                                  +        ...     embeddings: NDArray
                                                                  +        ...
                                                                  +        >>> sample = MyData(name="test", embeddings=np.array([1.0, 2.0]))
                                                                  +        >>> packed = sample.packed  # Serialize to bytes
                                                                  +        >>> restored = MyData.from_bytes(packed)  # Deserialize
                                                                       """
                                                                   
                                                                       def _ensure_good( self ):
                                                                  @@ -430,12 +424,10 @@ class SampleBatch( Generic[DT] ):
                                                                       Attributes:
                                                                           samples: The list of sample instances in this batch.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> batch = SampleBatch[MyData]([sample1, sample2, sample3])
                                                                  -            >>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)
                                                                  -            >>> batch.names  # Returns list of names
                                                                  +    Examples:
                                                                  +        >>> batch = SampleBatch[MyData]([sample1, sample2, sample3])
                                                                  +        >>> batch.embeddings  # Returns stacked numpy array of shape (3, ...)
                                                                  +        >>> batch.names  # Returns list of names
                                                                   
                                                                       Note:
                                                                           This class uses Python's ``__orig_class__`` mechanism to extract the
                                                                  @@ -557,16 +549,14 @@ class Dataset( Generic[ST] ):
                                                                       Attributes:
                                                                           url: WebDataset brace-notation URL for the tar file(s).
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> ds = Dataset[MyData]("path/to/data-{000000..000009}.tar")
                                                                  -            >>> for sample in ds.ordered(batch_size=32):
                                                                  -            ...     # sample is SampleBatch[MyData] with batch_size samples
                                                                  -            ...     embeddings = sample.embeddings  # shape: (32, ...)
                                                                  -            ...
                                                                  -            >>> # Transform to a different view
                                                                  -            >>> ds_view = ds.as_type(MyDataView)
                                                                  +    Examples:
                                                                  +        >>> ds = Dataset[MyData]("path/to/data-{000000..000009}.tar")
                                                                  +        >>> for sample in ds.ordered(batch_size=32):
                                                                  +        ...     # sample is SampleBatch[MyData] with batch_size samples
                                                                  +        ...     embeddings = sample.embeddings  # shape: (32, ...)
                                                                  +        ...
                                                                  +        >>> # Transform to a different view
                                                                  +        >>> ds_view = ds.as_type(MyDataView)
                                                                   
                                                                       Note:
                                                                           This class uses Python's ``__orig_class__`` mechanism to extract the
                                                                  @@ -679,11 +669,9 @@ def shards(self) -> Iterator[str]:
                                                                           Yields:
                                                                               Shard identifiers (e.g., 'train-000000.tar', 'train-000001.tar').
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> for shard in ds.shards:
                                                                  -                ...     print(f"Processing {shard}")
                                                                  +        Examples:
                                                                  +            >>> for shard in ds.shards:
                                                                  +            ...     print(f"Processing {shard}")
                                                                           """
                                                                           return iter(self._source.list_shards())
                                                                   
                                                                  @@ -851,15 +839,13 @@ def to_parquet( self, path: Pathlike,
                                                                               This creates multiple parquet files: ``output-000000.parquet``,
                                                                               ``output-000001.parquet``, etc.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> ds = Dataset[MySample]("data.tar")
                                                                  -                >>> # Small dataset - load all at once
                                                                  -                >>> ds.to_parquet("output.parquet")
                                                                  -                >>>
                                                                  -                >>> # Large dataset - process in chunks
                                                                  -                >>> ds.to_parquet("output.parquet", maxcount=50000)
                                                                  +        Examples:
                                                                  +            >>> ds = Dataset[MySample]("data.tar")
                                                                  +            >>> # Small dataset - load all at once
                                                                  +            >>> ds.to_parquet("output.parquet")
                                                                  +            >>>
                                                                  +            >>> # Large dataset - process in chunks
                                                                  +            >>> ds.to_parquet("output.parquet", maxcount=50000)
                                                                           """
                                                                           ##
                                                                   
                                                                  @@ -984,19 +970,17 @@ def packable( cls: type[_T] ) -> type[_T]:
                                                                           ``Packable`` protocol and can be used with ``Type[Packable]`` signatures.
                                                                   
                                                                       Examples:
                                                                  -        This is a test of the functionality::
                                                                  -
                                                                  -            @packable
                                                                  -            class MyData:
                                                                  -                name: str
                                                                  -                values: NDArray
                                                                  -            
                                                                  -            sample = MyData(name="test", values=np.array([1, 2, 3]))
                                                                  -            bytes_data = sample.packed
                                                                  -            restored = MyData.from_bytes(bytes_data)
                                                                  -            
                                                                  -            # Works with Packable-typed APIs
                                                                  -            index.publish_schema(MyData, version="1.0.0")  # Type-safe
                                                                  +        >>> @packable
                                                                  +        ... class MyData:
                                                                  +        ...     name: str
                                                                  +        ...     values: NDArray
                                                                  +        ...
                                                                  +        >>> sample = MyData(name="test", values=np.array([1, 2, 3]))
                                                                  +        >>> bytes_data = sample.packed
                                                                  +        >>> restored = MyData.from_bytes(bytes_data)
                                                                  +        >>>
                                                                  +        >>> # Works with Packable-typed APIs
                                                                  +        >>> index.publish_schema(MyData, version="1.0.0")  # Type-safe
                                                                       """
                                                                   
                                                                       ##
                                                                  diff --git a/src/atdata/lens.py b/src/atdata/lens.py
                                                                  index 133dab4..fe1974c 100644
                                                                  --- a/src/atdata/lens.py
                                                                  +++ b/src/atdata/lens.py
                                                                  @@ -14,30 +14,28 @@
                                                                   Lenses support the functional programming concept of composable, well-behaved
                                                                   transformations that satisfy lens laws (GetPut and PutGet).
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> @packable
                                                                  -        ... class FullData:
                                                                  -        ...     name: str
                                                                  -        ...     age: int
                                                                  -        ...     embedding: NDArray
                                                                  -        ...
                                                                  -        >>> @packable
                                                                  -        ... class NameOnly:
                                                                  -        ...     name: str
                                                                  -        ...
                                                                  -        >>> @lens
                                                                  -        ... def name_view(full: FullData) -> NameOnly:
                                                                  -        ...     return NameOnly(name=full.name)
                                                                  -        ...
                                                                  -        >>> @name_view.putter
                                                                  -        ... def name_view_put(view: NameOnly, source: FullData) -> FullData:
                                                                  -        ...     return FullData(name=view.name, age=source.age,
                                                                  -        ...                     embedding=source.embedding)
                                                                  -        ...
                                                                  -        >>> ds = Dataset[FullData]("data.tar")
                                                                  -        >>> ds_names = ds.as_type(NameOnly)  # Uses registered lens
                                                                  +Examples:
                                                                  +    >>> @packable
                                                                  +    ... class FullData:
                                                                  +    ...     name: str
                                                                  +    ...     age: int
                                                                  +    ...     embedding: NDArray
                                                                  +    ...
                                                                  +    >>> @packable
                                                                  +    ... class NameOnly:
                                                                  +    ...     name: str
                                                                  +    ...
                                                                  +    >>> @lens
                                                                  +    ... def name_view(full: FullData) -> NameOnly:
                                                                  +    ...     return NameOnly(name=full.name)
                                                                  +    ...
                                                                  +    >>> @name_view.putter
                                                                  +    ... def name_view_put(view: NameOnly, source: FullData) -> FullData:
                                                                  +    ...     return FullData(name=view.name, age=source.age,
                                                                  +    ...                     embedding=source.embedding)
                                                                  +    ...
                                                                  +    >>> ds = Dataset[FullData]("data.tar")
                                                                  +    >>> ds_names = ds.as_type(NameOnly)  # Uses registered lens
                                                                   """
                                                                   
                                                                   ##
                                                                  @@ -92,16 +90,14 @@ class Lens( Generic[S, V] ):
                                                                           S: The source type, must derive from ``PackableSample``.
                                                                           V: The view type, must derive from ``PackableSample``.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> @lens
                                                                  -            ... def name_lens(full: FullData) -> NameOnly:
                                                                  -            ...     return NameOnly(name=full.name)
                                                                  -            ...
                                                                  -            >>> @name_lens.putter
                                                                  -            ... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                                  -            ...     return FullData(name=view.name, age=source.age)
                                                                  +    Examples:
                                                                  +        >>> @lens
                                                                  +        ... def name_lens(full: FullData) -> NameOnly:
                                                                  +        ...     return NameOnly(name=full.name)
                                                                  +        ...
                                                                  +        >>> @name_lens.putter
                                                                  +        ... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                                  +        ...     return FullData(name=view.name, age=source.age)
                                                                       """
                                                                       # TODO The above has a line for "Parameters:" that should be "Type Parameters:"; this is a temporary fix for `quartodoc` auto-generation bugs.
                                                                   
                                                                  @@ -163,12 +159,10 @@ def putter( self, put: LensPutter[S, V] ) -> LensPutter[S, V]:
                                                                           Returns:
                                                                               The putter function, allowing this to be used as a decorator.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> @my_lens.putter
                                                                  -                ... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:
                                                                  -                ...     return SourceType(...)
                                                                  +        Examples:
                                                                  +            >>> @my_lens.putter
                                                                  +            ... def my_lens_put(view: ViewType, source: SourceType) -> SourceType:
                                                                  +            ...     return SourceType(field=view.field, other=source.other)
                                                                           """
                                                                           ##
                                                                           self._putter = put
                                                                  @@ -218,16 +212,14 @@ def lens(  f: LensGetter[S, V] ) -> Lens[S, V]:
                                                                           A ``Lens[S, V]`` object that can be called to apply the transformation
                                                                           or decorated with ``@lens_name.putter`` to add a putter function.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> @lens
                                                                  -            ... def extract_name(full: FullData) -> NameOnly:
                                                                  -            ...     return NameOnly(name=full.name)
                                                                  -            ...
                                                                  -            >>> @extract_name.putter
                                                                  -            ... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                                  -            ...     return FullData(name=view.name, age=source.age)
                                                                  +    Examples:
                                                                  +        >>> @lens
                                                                  +        ... def extract_name(full: FullData) -> NameOnly:
                                                                  +        ...     return NameOnly(name=full.name)
                                                                  +        ...
                                                                  +        >>> @extract_name.putter
                                                                  +        ... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                                  +        ...     return FullData(name=view.name, age=source.age)
                                                                       """
                                                                       ret = Lens[S, V]( f )
                                                                       _network.register( ret )
                                                                  diff --git a/src/atdata/local.py b/src/atdata/local.py
                                                                  index 2d0b6fd..ae50629 100644
                                                                  --- a/src/atdata/local.py
                                                                  +++ b/src/atdata/local.py
                                                                  @@ -84,12 +84,10 @@ class SchemaNamespace:
                                                                       loaded schema types. After calling ``index.load_schema(uri)``, the
                                                                       schema's class becomes available as an attribute on this namespace.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -            >>> MyType = index.types.MySample
                                                                  -            >>> sample = MyType(field1="hello", field2=42)
                                                                  +    Examples:
                                                                  +        >>> index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +        >>> MyType = index.types.MySample
                                                                  +        >>> sample = MyType(field1="hello", field2=42)
                                                                   
                                                                       The namespace supports:
                                                                       - Attribute access: ``index.types.MySample``
                                                                  @@ -1027,12 +1025,10 @@ def types(self) -> SchemaNamespace:
                                                                           After calling :meth:`load_schema`, schema types become available
                                                                           as attributes on this namespace.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -                >>> MyType = index.types.MySample
                                                                  -                >>> sample = MyType(name="hello", value=42)
                                                                  +        Examples:
                                                                  +            >>> index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +            >>> MyType = index.types.MySample
                                                                  +            >>> sample = MyType(name="hello", value=42)
                                                                   
                                                                           Returns:
                                                                               SchemaNamespace containing all loaded schema types.
                                                                  @@ -1058,16 +1054,14 @@ def load_schema(self, ref: str) -> Type[Packable]:
                                                                               KeyError: If schema not found.
                                                                               ValueError: If schema cannot be decoded.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> # Load and use immediately
                                                                  -                >>> MyType = index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  -                >>> sample = MyType(name="hello", value=42)
                                                                  -                >>>
                                                                  -                >>> # Or access later via namespace
                                                                  -                >>> index.load_schema("atdata://local/sampleSchema/OtherType@1.0.0")
                                                                  -                >>> other = index.types.OtherType(data="test")
                                                                  +        Examples:
                                                                  +            >>> # Load and use immediately
                                                                  +            >>> MyType = index.load_schema("atdata://local/sampleSchema/MySample@1.0.0")
                                                                  +            >>> sample = MyType(name="hello", value=42)
                                                                  +            >>>
                                                                  +            >>> # Or access later via namespace
                                                                  +            >>> index.load_schema("atdata://local/sampleSchema/OtherType@1.0.0")
                                                                  +            >>> other = index.types.OtherType(data="test")
                                                                           """
                                                                           # Decode the schema (uses generated module if auto_stubs enabled)
                                                                           cls = self.decode_schema(ref)
                                                                  @@ -1090,16 +1084,14 @@ def get_import_path(self, ref: str) -> str | None:
                                                                               Import path like "local.MySample_1_0_0", or None if auto_stubs
                                                                               is disabled.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> index = LocalIndex(auto_stubs=True)
                                                                  -                >>> ref = index.publish_schema(MySample, version="1.0.0")
                                                                  -                >>> index.load_schema(ref)
                                                                  -                >>> print(index.get_import_path(ref))
                                                                  -                local.MySample_1_0_0
                                                                  -                >>> # Then in your code:
                                                                  -                >>> # from local.MySample_1_0_0 import MySample
                                                                  +        Examples:
                                                                  +            >>> index = LocalIndex(auto_stubs=True)
                                                                  +            >>> ref = index.publish_schema(MySample, version="1.0.0")
                                                                  +            >>> index.load_schema(ref)
                                                                  +            >>> print(index.get_import_path(ref))
                                                                  +            local.MySample_1_0_0
                                                                  +            >>> # Then in your code:
                                                                  +            >>> # from local.MySample_1_0_0 import MySample
                                                                           """
                                                                           if self._stub_manager is None:
                                                                               return None
                                                                  @@ -1551,15 +1543,13 @@ def decode_schema_as(self, ref: str, type_hint: type[T]) -> type[T]:
                                                                           Returns:
                                                                               The decoded type, cast to match the type_hint for IDE support.
                                                                   
                                                                  -        Example:
                                                                  -            ::
                                                                  -
                                                                  -                >>> # After enabling auto_stubs and configuring IDE extraPaths:
                                                                  -                >>> from local.MySample_1_0_0 import MySample
                                                                  -                >>>
                                                                  -                >>> # This gives full IDE autocomplete:
                                                                  -                >>> DecodedType = index.decode_schema_as(ref, MySample)
                                                                  -                >>> sample = DecodedType(text="hello", value=42)  # IDE knows signature!
                                                                  +        Examples:
                                                                  +            >>> # After enabling auto_stubs and configuring IDE extraPaths:
                                                                  +            >>> from local.MySample_1_0_0 import MySample
                                                                  +            >>>
                                                                  +            >>> # This gives full IDE autocomplete:
                                                                  +            >>> DecodedType = index.decode_schema_as(ref, MySample)
                                                                  +            >>> sample = DecodedType(text="hello", value=42)  # IDE knows signature!
                                                                   
                                                                           Note:
                                                                               The type_hint is only used for static type checking - at runtime,
                                                                  diff --git a/src/atdata/promote.py b/src/atdata/promote.py
                                                                  index 2e03a42..76e78be 100644
                                                                  --- a/src/atdata/promote.py
                                                                  +++ b/src/atdata/promote.py
                                                                  @@ -4,21 +4,19 @@
                                                                   ATProto atmosphere network. This enables sharing datasets with the broader
                                                                   federation while maintaining schema consistency.
                                                                   
                                                                  -Example:
                                                                  -    ::
                                                                  -
                                                                  -        >>> from atdata.local import LocalIndex, Repo
                                                                  -        >>> from atdata.atmosphere import AtmosphereClient, AtmosphereIndex
                                                                  -        >>> from atdata.promote import promote_to_atmosphere
                                                                  -        >>>
                                                                  -        >>> # Setup
                                                                  -        >>> local_index = LocalIndex()
                                                                  -        >>> client = AtmosphereClient()
                                                                  -        >>> client.login("handle.bsky.social", "app-password")
                                                                  -        >>>
                                                                  -        >>> # Promote a dataset
                                                                  -        >>> entry = local_index.get_dataset("my-dataset")
                                                                  -        >>> at_uri = promote_to_atmosphere(entry, local_index, client)
                                                                  +Examples:
                                                                  +    >>> from atdata.local import LocalIndex, Repo
                                                                  +    >>> from atdata.atmosphere import AtmosphereClient, AtmosphereIndex
                                                                  +    >>> from atdata.promote import promote_to_atmosphere
                                                                  +    >>>
                                                                  +    >>> # Setup
                                                                  +    >>> local_index = LocalIndex()
                                                                  +    >>> client = AtmosphereClient()
                                                                  +    >>> client.login("handle.bsky.social", "app-password")
                                                                  +    >>>
                                                                  +    >>> # Promote a dataset
                                                                  +    >>> entry = local_index.get_dataset("my-dataset")
                                                                  +    >>> at_uri = promote_to_atmosphere(entry, local_index, client)
                                                                   """
                                                                   
                                                                   from typing import TYPE_CHECKING, Type
                                                                  @@ -128,13 +126,11 @@ def promote_to_atmosphere(
                                                                           KeyError: If schema not found in local index.
                                                                           ValueError: If local entry has no data URLs.
                                                                   
                                                                  -    Example:
                                                                  -        ::
                                                                  -
                                                                  -            >>> entry = local_index.get_dataset("mnist-train")
                                                                  -            >>> uri = promote_to_atmosphere(entry, local_index, client)
                                                                  -            >>> print(uri)
                                                                  -            at://did:plc:abc123/ac.foundation.dataset.datasetIndex/...
                                                                  +    Examples:
                                                                  +        >>> entry = local_index.get_dataset("mnist-train")
                                                                  +        >>> uri = promote_to_atmosphere(entry, local_index, client)
                                                                  +        >>> print(uri)
                                                                  +        at://did:plc:abc123/ac.foundation.dataset.datasetIndex/...
                                                                       """
                                                                       from .atmosphere import DatasetPublisher
                                                                       from ._schema_codec import schema_to_type
                                                                  
                                                                  From d8bf84f67cd5c439ce45adfdcd3e03775711dd8c Mon Sep 17 00:00:00 2001
                                                                  From: Maxine Levesque <170461181+maxinelevesque@users.noreply.github.com>
                                                                  Date: Wed, 28 Jan 2026 13:00:04 -0800
                                                                  Subject: [PATCH 3/6] fix(types): add overload signatures to
                                                                   Dataset.ordered/shuffled for batch type inference
                                                                  
                                                                  Type checkers now correctly infer:
                                                                  - Iterable[ST] when batch_size is None (default)
                                                                  - Iterable[SampleBatch[ST]] when batch_size is int
                                                                  
                                                                  Closes #28
                                                                  
                                                                  Co-Authored-By: Claude Opus 4.5 
                                                                  ---
                                                                   .chainlink/issues.db  | Bin 483328 -> 483328 bytes
                                                                   CHANGELOG.md          |   1 +
                                                                   src/atdata/dataset.py |  64 ++++++++++++++++++++++++++++++++++--------
                                                                   3 files changed, 53 insertions(+), 12 deletions(-)
                                                                  
                                                                  diff --git a/.chainlink/issues.db b/.chainlink/issues.db
                                                                  index e7d74f7273ba4d09de48d496200617a5a425e0fe..95698542ace7413a0e8e6537380d856b952ef992 100644
                                                                  GIT binary patch
                                                                  delta 768
                                                                  zcmZvZO=uHA6vuaVvpa2*ZL6e8mFTomNSh|zY??G#wf4)8f~l<*ER?F7?zCA*vSDXa
                                                                  zKQ1=*5Ijg*65NwLc~B2(Ep(xX6hskER-t$hdhjR+9!gOW-F^rKJ^XnykKep`@BJs2
                                                                  zJ(J6xnS*;YilQcV1SrZa%{GUCHh<+<3y_NUS`L9i@p((r0g3Sdf(cQz3~@MFrj}L-
                                                                  z$zcLY&xH6lFkto7f%~9*0Pb&Sm^yrC!1`Ga$~{cMv2MW*z%#UCeW4DmfilR@G$?_B
                                                                  z^{WR!A5&s|Cd`5rn)O*9vtYo%u?%Gcg7j~M7@EJ_h}i7bZq4G3_-mH*5yPB
                                                                  z4__D@X+~FVbZ{4oRD#ru@gG`FXH&Rqv*_l_5m68$ej(@=ql2O#g~L)X6o|C7ib2sQ
                                                                  z2$CRJ4+C%rglwDlQ5#g%aOFa!lh<7DT}@6>@v5S_e5mXZ{0@7$BKw8ybj&iJ88b@Fjlkbs
                                                                  zAn)ljN5p93Xi6hk+4kU{euY>d9JaION;g~M49BEkED(vsB4INxI%@XrN(eg>ofP0D
                                                                  zU{;IpoprtyKB3L&5L`6ZL^yBupMX!zws!c~nr?#+xWx`A^S0}WIQ(6;9p~sjdCBL?
                                                                  
                                                                  delta 511
                                                                  zcmX|;O=uHg5Qb;=+x;fl>~1Y-lvtvsSSYnkvRO=)>Y+adLEPG&s+ZCTIaIKXEYX7q
                                                                  z0ihzLq%qR(q!&d<51K0W1Mv_L6g*i99*W>WJm^96BKT8uwc_D{f#G>yhN-p_)plaJ
                                                                  zyK@QvF8%`nx>EH42dCC%j;1l!xRu_A3yr7g{=GStK)>Ysv-5Ml8qru>nf3ij$;ZaS
                                                                  zjQ0gSIiAG381`sSZ|~LpOP>7EgW&{QP`4!wqsbm64}3tmeGKUkYiPEhGH%GNGES=X
                                                                  ziO@B>f&oeZ3s9w9(KcT)P5B;-==X_?nB
                                                                  zXvk>d$qJ9ijTP=(H_&Q=)wBl8v!)v?`%K{u(rxkDqfOaN(P^~t4#c3q%p1l>V>EIv
                                                                  z64s01)zHt-ajIGQj#J88R^w;12kI|%QrQJxp->-*%5|Hj(NYU=UWdyNiz%K5q8g>$
                                                                  zV#}taI$ti$3Cz+#(dE!9aUxGGaWhAc4MH$mXowG_*G4~sfn3PRvdNF_^$
                                                                  zcl=hC9PunmoopzRk>j6vw<7%`w4{nvm+o7^yUyuw1!B>^<>ZpP3CS
                                                                  
                                                                  diff --git a/CHANGELOG.md b/CHANGELOG.md
                                                                  index 1bdd95b..f3e51a4 100644
                                                                  --- a/CHANGELOG.md
                                                                  +++ b/CHANGELOG.md
                                                                  @@ -25,6 +25,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
                                                                   - **Comprehensive integration test suite**: 593 tests covering E2E flows, error handling, edge cases
                                                                   
                                                                   ### Changed
                                                                  +- Fix type signatures for Dataset.ordered and Dataset.shuffled (GH#28) (#404)
                                                                   - Investigate quartodoc Example section rendering - missing CSS classes on pre/code tags (#401)
                                                                   - Update all docstrings from Example: to Examples: format (#403)
                                                                   - Create GitHub issues for v0.3 roadmap feature domains (#402)
                                                                  diff --git a/src/atdata/dataset.py b/src/atdata/dataset.py
                                                                  index 81e2f8f..4ae3353 100644
                                                                  --- a/src/atdata/dataset.py
                                                                  +++ b/src/atdata/dataset.py
                                                                  @@ -64,6 +64,8 @@
                                                                       TypeVar,
                                                                       TypeAlias,
                                                                       dataclass_transform,
                                                                  +    overload,
                                                                  +    Literal,
                                                                   )
                                                                   from numpy.typing import NDArray
                                                                   
                                                                  @@ -721,20 +723,37 @@ def metadata( self ) -> dict[str, Any] | None:
                                                                           # Use our cached values
                                                                           return self._metadata
                                                                       
                                                                  +    @overload
                                                                  +    def ordered( self,
                                                                  +                batch_size: None = None,
                                                                  +            ) -> Iterable[ST]: ...
                                                                  +
                                                                  +    @overload
                                                                  +    def ordered( self,
                                                                  +                batch_size: int,
                                                                  +            ) -> Iterable[SampleBatch[ST]]: ...
                                                                  +
                                                                       def ordered( self,
                                                                                   batch_size: int | None = None,
                                                                  -            ) -> Iterable[ST]:
                                                                  -        """Iterate over the dataset in order
                                                                  +            ) -> Iterable[ST] | Iterable[SampleBatch[ST]]:
                                                                  +        """Iterate over the dataset in order.
                                                                   
                                                                           Args:
                                                                  -            batch_size (:obj:`int`, optional): The size of iterated batches.
                                                                  -                Default: None (unbatched). If ``None``, iterates over one
                                                                  -                sample at a time with no batch dimension.
                                                                  +            batch_size: The size of iterated batches. Default: None (unbatched).
                                                                  +                If ``None``, iterates over one sample at a time with no batch
                                                                  +                dimension.
                                                                   
                                                                           Returns:
                                                                  -            :obj:`webdataset.DataPipeline` A data pipeline that iterates over
                                                                  -            the dataset in its original sample order
                                                                  +            A data pipeline that iterates over the dataset in its original
                                                                  +            sample order. When ``batch_size`` is ``None``, yields individual
                                                                  +            samples of type ``ST``. When ``batch_size`` is an integer, yields
                                                                  +            ``SampleBatch[ST]`` instances containing that many samples.
                                                                   
                                                                  +        Examples:
                                                                  +            >>> for sample in ds.ordered():
                                                                  +            ...     process(sample)  # sample is ST
                                                                  +            >>> for batch in ds.ordered(batch_size=32):
                                                                  +            ...     process(batch)  # batch is SampleBatch[ST]
                                                                           """
                                                                           if batch_size is None:
                                                                               return wds.pipeline.DataPipeline(
                                                                  @@ -756,11 +775,26 @@ def ordered( self,
                                                                               wds.filters.map( self.wrap_batch ),
                                                                           )
                                                                   
                                                                  +    @overload
                                                                  +    def shuffled( self,
                                                                  +                buffer_shards: int = 100,
                                                                  +                buffer_samples: int = 10_000,
                                                                  +                batch_size: None = None,
                                                                  +            ) -> Iterable[ST]: ...
                                                                  +
                                                                  +    @overload
                                                                  +    def shuffled( self,
                                                                  +                buffer_shards: int = 100,
                                                                  +                buffer_samples: int = 10_000,
                                                                  +                *,
                                                                  +                batch_size: int,
                                                                  +            ) -> Iterable[SampleBatch[ST]]: ...
                                                                  +
                                                                       def shuffled( self,
                                                                                   buffer_shards: int = 100,
                                                                                   buffer_samples: int = 10_000,
                                                                                   batch_size: int | None = None,
                                                                  -            ) -> Iterable[ST]:
                                                                  +            ) -> Iterable[ST] | Iterable[SampleBatch[ST]]:
                                                                           """Iterate over the dataset in random order.
                                                                   
                                                                           Args:
                                                                  @@ -775,10 +809,16 @@ def shuffled( self,
                                                                                   dimension.
                                                                   
                                                                           Returns:
                                                                  -            A WebDataset data pipeline that iterates over the dataset in
                                                                  -            randomized order. If ``batch_size`` is not ``None``, yields
                                                                  -            ``SampleBatch[ST]`` instances; otherwise yields individual ``ST``
                                                                  -            samples.
                                                                  +            A data pipeline that iterates over the dataset in randomized order.
                                                                  +            When ``batch_size`` is ``None``, yields individual samples of type
                                                                  +            ``ST``. When ``batch_size`` is an integer, yields ``SampleBatch[ST]``
                                                                  +            instances containing that many samples.
                                                                  +
                                                                  +        Examples:
                                                                  +            >>> for sample in ds.shuffled():
                                                                  +            ...     process(sample)  # sample is ST
                                                                  +            >>> for batch in ds.shuffled(batch_size=32):
                                                                  +            ...     process(batch)  # batch is SampleBatch[ST]
                                                                           """
                                                                           if batch_size is None:
                                                                               return wds.pipeline.DataPipeline(
                                                                  
                                                                  From 926f4ab46f78904f0c2504d8a8303d9a53acbe0e Mon Sep 17 00:00:00 2001
                                                                  From: Maxine Levesque <170461181+maxinelevesque@users.noreply.github.com>
                                                                  Date: Wed, 28 Jan 2026 13:02:21 -0800
                                                                  Subject: [PATCH 4/6] bumped package version
                                                                  
                                                                  ---
                                                                   pyproject.toml | 2 +-
                                                                   1 file changed, 1 insertion(+), 1 deletion(-)
                                                                  
                                                                  diff --git a/pyproject.toml b/pyproject.toml
                                                                  index 1487042..cab39e1 100644
                                                                  --- a/pyproject.toml
                                                                  +++ b/pyproject.toml
                                                                  @@ -1,6 +1,6 @@
                                                                   [project]
                                                                   name = "atdata"
                                                                  -version = "0.2.2b1"
                                                                  +version = "0.2.3b1"
                                                                   description = "A loose federation of distributed, typed datasets"
                                                                   readme = "README.md"
                                                                   authors = [
                                                                  
                                                                  From 82c78a9b516f44db0957838b91bc82cb97efca98 Mon Sep 17 00:00:00 2001
                                                                  From: Maxine Levesque <170461181+maxinelevesque@users.noreply.github.com>
                                                                  Date: Wed, 28 Jan 2026 13:10:02 -0800
                                                                  Subject: [PATCH 5/6] ci: modernize GitHub Actions with caching, concurrency,
                                                                   lint checks, and trusted publishing
                                                                  
                                                                  - Add uv caching (enable-cache: true) for faster CI runs
                                                                  - Add concurrency control to cancel in-progress runs on new commits
                                                                  - Add ruff lint job (check + format) targeting src/ and tests/
                                                                  - Switch to --locked flag for reproducible dependency resolution
                                                                  - Add fail-fast: false to test matrix to see all failures
                                                                  - Enable Codecov coverage upload
                                                                  - Switch PyPI publishing to trusted publishing (OIDC)
                                                                  - Split publish workflow into build and publish jobs with artifacts
                                                                  - Fix all ruff lint issues (unused imports, undefined names)
                                                                  - Format entire codebase with ruff format (42 files)
                                                                  
                                                                  Co-Authored-By: Claude Opus 4.5 
                                                                  ---
                                                                   .chainlink/issues.db                      | Bin 483328 -> 483328 bytes
                                                                   .github/workflows/uv-publish-pypi.yml     |  61 ++-
                                                                   .github/workflows/uv-test.yml             |  59 ++-
                                                                   CHANGELOG.md                              |   1 +
                                                                   src/atdata/__init__.py                    |   2 +-
                                                                   src/atdata/_cid.py                        |   8 +-
                                                                   src/atdata/_helpers.py                    |  12 +-
                                                                   src/atdata/_hf_api.py                     |  12 +-
                                                                   src/atdata/_protocols.py                  |   1 -
                                                                   src/atdata/_schema_codec.py               |   8 +-
                                                                   src/atdata/_sources.py                    |  11 +-
                                                                   src/atdata/_stub_manager.py               |  17 +-
                                                                   src/atdata/_type_utils.py                 |  24 +-
                                                                   src/atdata/atmosphere/__init__.py         |   1 +
                                                                   src/atdata/atmosphere/_types.py           |   4 +-
                                                                   src/atdata/atmosphere/client.py           |   7 +-
                                                                   src/atdata/atmosphere/lens.py             |   1 +
                                                                   src/atdata/atmosphere/records.py          |   4 +-
                                                                   src/atdata/atmosphere/schema.py           |  16 +-
                                                                   src/atdata/atmosphere/store.py            |   2 +-
                                                                   src/atdata/cli/__init__.py                |  15 +-
                                                                   src/atdata/cli/diagnose.py                |  20 +-
                                                                   src/atdata/cli/local.py                   |   5 +-
                                                                   src/atdata/dataset.py                     | 367 +++++++++--------
                                                                   src/atdata/lens.py                        |  67 ++--
                                                                   src/atdata/local.py                       | 279 +++++++------
                                                                   tests/conftest.py                         |  25 +-
                                                                   tests/test_atmosphere.py                  | 200 +++++++---
                                                                   tests/test_cid.py                         |  15 +-
                                                                   tests/test_dataset.py                     | 411 ++++++++++---------
                                                                   tests/test_helpers.py                     |  38 +-
                                                                   tests/test_hf_api.py                      |   5 +-
                                                                   tests/test_integration.py                 |  25 +-
                                                                   tests/test_integration_atmosphere.py      |  94 +++--
                                                                   tests/test_integration_atmosphere_live.py |  26 +-
                                                                   tests/test_integration_cross_backend.py   |  41 +-
                                                                   tests/test_integration_dynamic_types.py   | 427 +++++++++++++++-----
                                                                   tests/test_integration_e2e.py             |  27 +-
                                                                   tests/test_integration_edge_cases.py      |  11 +-
                                                                   tests/test_integration_error_handling.py  |  20 +-
                                                                   tests/test_integration_lens.py            |  12 +-
                                                                   tests/test_integration_local.py           | 108 ++---
                                                                   tests/test_integration_promotion.py       |  78 +++-
                                                                   tests/test_lens.py                        | 149 ++++---
                                                                   tests/test_local.py                       | 460 +++++++++++++---------
                                                                   tests/test_promote.py                     |  55 ++-
                                                                   tests/test_protocols.py                   |  10 +-
                                                                   tests/test_sources.py                     |  57 +--
                                                                   48 files changed, 2081 insertions(+), 1217 deletions(-)
                                                                  
                                                                  diff --git a/.chainlink/issues.db b/.chainlink/issues.db
                                                                  index 95698542ace7413a0e8e6537380d856b952ef992..12b36ae2d0358448d5941f0e0e33262998fc97f3 100644
                                                                  GIT binary patch
                                                                  delta 716
                                                                  zcmZ8fO-$546z;U!7G#kSA&9#AG}#2*ZfU!~vW+1kM!{Gm>YhxD-F6lV-L~n^uABh(
                                                                  z03J*rxb8_$Uc8W~heqRpt6q&J8sovUka#d2Jm@TN(Mi6%mpAWy^UXK2w3S-gO06C3
                                                                  zUZE(ex(`HAJr~!799nsP=jp+cGD4<3>Fu{Gf!o1}+V}6ZWM>Mxdj_qNauOGF#ViGRjPcq3J>fp1^kI9|RQL_Ct
                                                                  zxwvfB+c=7@vl?nlRXiJ4uoGI&j0)MT?HcpggtD#A2w=F55&1rLjD=Pn`mQZNR0Tsf
                                                                  zW=X#QepD`lZ8;&#xg`P0x@Bj}dJw8*-43t-6RwF3w<TNc1xd~gD;Y^rC29FuohKK)cS}PLr)}L)Vcaw!{+{Iz(@Y;V+2kc&EKZUS
                                                                  z@!wj`)O!z5HLAa#(lqTlu3ESTm#lCyDnZTl=gYQR3qW^FBF1nl#6J@zrogIrzFXb3
                                                                  zb7Q@?w|ifgtPbVGQCX2kn)>a;p-v(-uMX$MLO!2Yc1u0~lnQ(5<6ja#cZ|GaN#p-?
                                                                  I?ZqebUsbo%UH||9
                                                                  
                                                                  delta 292
                                                                  zcmZoTAlq<2c7n8EJ_7?o85A=zFo;-9)G=nv-&`{X_PT2fZbvW&&4
                                                                  z#l@NVdBrT;tjv;3nZ?DWsl|*_HW~PSWDMNyr^vX3ku#88TvfGMzBh3DZzV=fS>|?@
                                                                  zuiM!j82OnuD=OS)-2T;tkzbp63aj>X_5#Ms+wT-Gda_Im;Aqz_Wdvd-AZ7+)mhIZ5
                                                                  ztT#_FvoXB`YG&KXxc!hLn*!s+0KVz(oY*WGr)<}7X1n9Ve1dJ)cJ>Ku6B&VqE3r;r
                                                                  zH;s*Z`_XA^_MJRTyw@1`R`OosP2#lxN^0;{@s8*O9p5#Q2swZgr3I)0Nz str:
                                                                       # Build raw CID bytes:
                                                                       # CIDv1 = version(1) + codec(dag-cbor) + multihash
                                                                       # Multihash = code(sha256) + size(32) + digest
                                                                  -    raw_cid_bytes = bytes([CID_VERSION_1, CODEC_DAG_CBOR, HASH_SHA256, SHA256_SIZE]) + sha256_hash
                                                                  +    raw_cid_bytes = (
                                                                  +        bytes([CID_VERSION_1, CODEC_DAG_CBOR, HASH_SHA256, SHA256_SIZE]) + sha256_hash
                                                                  +    )
                                                                   
                                                                       # Encode to base32 multibase string
                                                                       return libipld.encode_cid(raw_cid_bytes)
                                                                  @@ -87,7 +89,9 @@ def generate_cid_from_bytes(data_bytes: bytes) -> str:
                                                                           >>> cid = generate_cid_from_bytes(cbor_bytes)
                                                                       """
                                                                       sha256_hash = hashlib.sha256(data_bytes).digest()
                                                                  -    raw_cid_bytes = bytes([CID_VERSION_1, CODEC_DAG_CBOR, HASH_SHA256, SHA256_SIZE]) + sha256_hash
                                                                  +    raw_cid_bytes = (
                                                                  +        bytes([CID_VERSION_1, CODEC_DAG_CBOR, HASH_SHA256, SHA256_SIZE]) + sha256_hash
                                                                  +    )
                                                                       return libipld.encode_cid(raw_cid_bytes)
                                                                   
                                                                   
                                                                  diff --git a/src/atdata/_helpers.py b/src/atdata/_helpers.py
                                                                  index 749a936..79dba09 100644
                                                                  --- a/src/atdata/_helpers.py
                                                                  +++ b/src/atdata/_helpers.py
                                                                  @@ -22,7 +22,8 @@
                                                                   
                                                                   ##
                                                                   
                                                                  -def array_to_bytes( x: np.ndarray ) -> bytes:
                                                                  +
                                                                  +def array_to_bytes(x: np.ndarray) -> bytes:
                                                                       """Convert a numpy array to bytes for msgpack serialization.
                                                                   
                                                                       Uses numpy's native ``save()`` format to preserve array dtype and shape.
                                                                  @@ -37,10 +38,11 @@ def array_to_bytes( x: np.ndarray ) -> bytes:
                                                                           Uses ``allow_pickle=True`` to support object dtypes.
                                                                       """
                                                                       np_bytes = BytesIO()
                                                                  -    np.save( np_bytes, x, allow_pickle = True )
                                                                  +    np.save(np_bytes, x, allow_pickle=True)
                                                                       return np_bytes.getvalue()
                                                                   
                                                                  -def bytes_to_array( b: bytes ) -> np.ndarray:
                                                                  +
                                                                  +def bytes_to_array(b: bytes) -> np.ndarray:
                                                                       """Convert serialized bytes back to a numpy array.
                                                                   
                                                                       Reverses the serialization performed by ``array_to_bytes()``.
                                                                  @@ -54,5 +56,5 @@ def bytes_to_array( b: bytes ) -> np.ndarray:
                                                                       Note:
                                                                           Uses ``allow_pickle=True`` to support object dtypes.
                                                                       """
                                                                  -    np_bytes = BytesIO( b )
                                                                  -    return np.load( np_bytes, allow_pickle = True )
                                                                  \ No newline at end of file
                                                                  +    np_bytes = BytesIO(b)
                                                                  +    return np.load(np_bytes, allow_pickle=True)
                                                                  diff --git a/src/atdata/_hf_api.py b/src/atdata/_hf_api.py
                                                                  index 35bdde3..b810f9f 100644
                                                                  --- a/src/atdata/_hf_api.py
                                                                  +++ b/src/atdata/_hf_api.py
                                                                  @@ -46,7 +46,6 @@
                                                                   
                                                                   if TYPE_CHECKING:
                                                                       from ._protocols import AbstractIndex
                                                                  -    from .local import S3DataStore
                                                                   
                                                                   ##
                                                                   # Type variables
                                                                  @@ -77,6 +76,7 @@ class DatasetDict(Generic[ST], dict):
                                                                           >>> for split_name, dataset in ds_dict.items():
                                                                           ...     print(f"{split_name}: {len(dataset.shard_list)} shards")
                                                                       """
                                                                  +
                                                                       # TODO The above has a line for "Parameters:" that should be "Type Parameters:"; this is a temporary fix for `quartodoc` auto-generation bugs.
                                                                   
                                                                       def __init__(
                                                                  @@ -464,7 +464,7 @@ def _resolve_indexed_path(
                                                                       data_urls = entry.data_urls
                                                                   
                                                                       # Check if index has a data store
                                                                  -    if hasattr(index, 'data_store') and index.data_store is not None:
                                                                  +    if hasattr(index, "data_store") and index.data_store is not None:
                                                                           store = index.data_store
                                                                   
                                                                           # Import here to avoid circular imports at module level
                                                                  @@ -638,7 +638,9 @@ def load_dataset(
                                                                           source, schema_ref = _resolve_indexed_path(path, index)
                                                                   
                                                                           # Resolve sample_type from schema if not provided
                                                                  -        resolved_type: Type = sample_type if sample_type is not None else index.decode_schema(schema_ref)
                                                                  +        resolved_type: Type = (
                                                                  +            sample_type if sample_type is not None else index.decode_schema(schema_ref)
                                                                  +        )
                                                                   
                                                                           # Create dataset from the resolved source (includes credentials if S3)
                                                                           ds = Dataset[resolved_type](source)
                                                                  @@ -647,7 +649,9 @@ def load_dataset(
                                                                               # Indexed datasets are single-split by default
                                                                               return ds
                                                                   
                                                                  -        return DatasetDict({"train": ds}, sample_type=resolved_type, streaming=streaming)
                                                                  +        return DatasetDict(
                                                                  +            {"train": ds}, sample_type=resolved_type, streaming=streaming
                                                                  +        )
                                                                   
                                                                       # Use DictSample as default when no type specified
                                                                       resolved_type = sample_type if sample_type is not None else DictSample
                                                                  diff --git a/src/atdata/_protocols.py b/src/atdata/_protocols.py
                                                                  index c89d544..9067784 100644
                                                                  --- a/src/atdata/_protocols.py
                                                                  +++ b/src/atdata/_protocols.py
                                                                  @@ -32,7 +32,6 @@
                                                                   from typing import (
                                                                       IO,
                                                                       Any,
                                                                  -    ClassVar,
                                                                       Iterator,
                                                                       Optional,
                                                                       Protocol,
                                                                  diff --git a/src/atdata/_schema_codec.py b/src/atdata/_schema_codec.py
                                                                  index e0ebc33..ef733a2 100644
                                                                  --- a/src/atdata/_schema_codec.py
                                                                  +++ b/src/atdata/_schema_codec.py
                                                                  @@ -203,7 +203,9 @@ def schema_to_type(
                                                                           namespace={
                                                                               "__post_init__": lambda self: PackableSample.__post_init__(self),
                                                                               "__schema_version__": version,
                                                                  -            "__schema_ref__": schema.get("$ref", None),  # Store original ref if available
                                                                  +            "__schema_ref__": schema.get(
                                                                  +                "$ref", None
                                                                  +            ),  # Store original ref if available
                                                                           },
                                                                       )
                                                                   
                                                                  @@ -239,7 +241,9 @@ def _field_type_to_stub_str(field_type: dict, optional: bool = False) -> str:
                                                                   
                                                                       if kind == "primitive":
                                                                           primitive = field_type.get("primitive", "str")
                                                                  -        py_type = primitive  # str, int, float, bool, bytes are all valid Python type names
                                                                  +        py_type = (
                                                                  +            primitive  # str, int, float, bool, bytes are all valid Python type names
                                                                  +        )
                                                                       elif kind == "ndarray":
                                                                           py_type = "NDArray[Any]"
                                                                       elif kind == "array":
                                                                  diff --git a/src/atdata/_sources.py b/src/atdata/_sources.py
                                                                  index 64bdf3d..532add3 100644
                                                                  --- a/src/atdata/_sources.py
                                                                  +++ b/src/atdata/_sources.py
                                                                  @@ -167,7 +167,9 @@ def _get_client(self) -> Any:
                                                                               client_kwargs["region_name"] = self.region
                                                                           elif not self.endpoint:
                                                                               # Default region for AWS S3
                                                                  -            client_kwargs["region_name"] = os.environ.get("AWS_DEFAULT_REGION", "us-east-1")
                                                                  +            client_kwargs["region_name"] = os.environ.get(
                                                                  +                "AWS_DEFAULT_REGION", "us-east-1"
                                                                  +            )
                                                                   
                                                                           self._client = boto3.client("s3", **client_kwargs)
                                                                           return self._client
                                                                  @@ -219,7 +221,7 @@ def open_shard(self, shard_id: str) -> IO[bytes]:
                                                                           if not shard_id.startswith(f"s3://{self.bucket}/"):
                                                                               raise KeyError(f"Shard not in this bucket: {shard_id}")
                                                                   
                                                                  -        key = shard_id[len(f"s3://{self.bucket}/"):]
                                                                  +        key = shard_id[len(f"s3://{self.bucket}/") :]
                                                                           client = self._get_client()
                                                                           response = client.get_object(Bucket=self.bucket, Key=key)
                                                                           return response["Body"]
                                                                  @@ -355,7 +357,9 @@ class BlobSource:
                                                                   
                                                                       blob_refs: list[dict[str, str]]
                                                                       pds_endpoint: str | None = None
                                                                  -    _endpoint_cache: dict[str, str] = field(default_factory=dict, repr=False, compare=False)
                                                                  +    _endpoint_cache: dict[str, str] = field(
                                                                  +        default_factory=dict, repr=False, compare=False
                                                                  +    )
                                                                   
                                                                       def _resolve_pds_endpoint(self, did: str) -> str:
                                                                           """Resolve PDS endpoint for a DID, with caching."""
                                                                  @@ -447,6 +451,7 @@ def open_shard(self, shard_id: str) -> IO[bytes]:
                                                                           url = self._get_blob_url(did, cid)
                                                                   
                                                                           import requests
                                                                  +
                                                                           response = requests.get(url, stream=True, timeout=60)
                                                                           response.raise_for_status()
                                                                           return response.raw
                                                                  diff --git a/src/atdata/_stub_manager.py b/src/atdata/_stub_manager.py
                                                                  index 0a2256a..00d7faa 100644
                                                                  --- a/src/atdata/_stub_manager.py
                                                                  +++ b/src/atdata/_stub_manager.py
                                                                  @@ -153,7 +153,9 @@ def _stub_filename(self, name: str, version: str) -> str:
                                                                           """Alias for _module_filename for backwards compatibility."""
                                                                           return self._module_filename(name, version)
                                                                   
                                                                  -    def _module_path(self, name: str, version: str, authority: str = DEFAULT_AUTHORITY) -> Path:
                                                                  +    def _module_path(
                                                                  +        self, name: str, version: str, authority: str = DEFAULT_AUTHORITY
                                                                  +    ) -> Path:
                                                                           """Get full path to module file for a schema.
                                                                   
                                                                           Args:
                                                                  @@ -166,7 +168,9 @@ def _module_path(self, name: str, version: str, authority: str = DEFAULT_AUTHORI
                                                                           """
                                                                           return self._stub_dir / authority / self._module_filename(name, version)
                                                                   
                                                                  -    def _stub_path(self, name: str, version: str, authority: str = DEFAULT_AUTHORITY) -> Path:
                                                                  +    def _stub_path(
                                                                  +        self, name: str, version: str, authority: str = DEFAULT_AUTHORITY
                                                                  +    ) -> Path:
                                                                           """Alias for _module_path for backwards compatibility."""
                                                                           return self._module_path(name, version, authority)
                                                                   
                                                                  @@ -207,7 +211,9 @@ def _ensure_authority_package(self, authority: str) -> None:
                                                                           authority_dir.mkdir(parents=True, exist_ok=True)
                                                                           init_path = authority_dir / "__init__.py"
                                                                           if not init_path.exists():
                                                                  -            init_path.write_text(f'"""Auto-generated schema modules for {authority}."""\n')
                                                                  +            init_path.write_text(
                                                                  +                f'"""Auto-generated schema modules for {authority}."""\n'
                                                                  +            )
                                                                   
                                                                       def _write_module_atomic(self, path: Path, content: str, authority: str) -> None:
                                                                           """Write module file atomically using temp file and rename.
                                                                  @@ -355,7 +361,9 @@ def ensure_module(self, schema: dict) -> Optional[Type]:
                                                                   
                                                                           return cls
                                                                   
                                                                  -    def _import_class_from_module(self, module_path: Path, class_name: str) -> Optional[Type]:
                                                                  +    def _import_class_from_module(
                                                                  +        self, module_path: Path, class_name: str
                                                                  +    ) -> Optional[Type]:
                                                                           """Import a class from a generated module file.
                                                                   
                                                                           Uses importlib to dynamically load the module and extract the class.
                                                                  @@ -395,6 +403,7 @@ def _import_class_from_module(self, module_path: Path, class_name: str) -> Optio
                                                                       def _print_ide_hint(self) -> None:
                                                                           """Print a one-time hint about IDE configuration."""
                                                                           import sys as _sys
                                                                  +
                                                                           print(
                                                                               f"\n[atdata] Generated schema module in: {self._stub_dir}\n"
                                                                               f"[atdata] For IDE support, add this path to your type checker:\n"
                                                                  diff --git a/src/atdata/_type_utils.py b/src/atdata/_type_utils.py
                                                                  index 03b6a10..1cafadd 100644
                                                                  --- a/src/atdata/_type_utils.py
                                                                  +++ b/src/atdata/_type_utils.py
                                                                  @@ -9,15 +9,29 @@
                                                                   
                                                                   # Mapping from numpy dtype strings to schema dtype names
                                                                   NUMPY_DTYPE_MAP = {
                                                                  -    "float16": "float16", "float32": "float32", "float64": "float64",
                                                                  -    "int8": "int8", "int16": "int16", "int32": "int32", "int64": "int64",
                                                                  -    "uint8": "uint8", "uint16": "uint16", "uint32": "uint32", "uint64": "uint64",
                                                                  -    "bool": "bool", "complex64": "complex64", "complex128": "complex128",
                                                                  +    "float16": "float16",
                                                                  +    "float32": "float32",
                                                                  +    "float64": "float64",
                                                                  +    "int8": "int8",
                                                                  +    "int16": "int16",
                                                                  +    "int32": "int32",
                                                                  +    "int64": "int64",
                                                                  +    "uint8": "uint8",
                                                                  +    "uint16": "uint16",
                                                                  +    "uint32": "uint32",
                                                                  +    "uint64": "uint64",
                                                                  +    "bool": "bool",
                                                                  +    "complex64": "complex64",
                                                                  +    "complex128": "complex128",
                                                                   }
                                                                   
                                                                   # Mapping from Python primitive types to schema type names
                                                                   PRIMITIVE_TYPE_MAP = {
                                                                  -    str: "str", int: "int", float: "float", bool: "bool", bytes: "bytes",
                                                                  +    str: "str",
                                                                  +    int: "int",
                                                                  +    float: "float",
                                                                  +    bool: "bool",
                                                                  +    bytes: "bytes",
                                                                   }
                                                                   
                                                                   
                                                                  diff --git a/src/atdata/atmosphere/__init__.py b/src/atdata/atmosphere/__init__.py
                                                                  index fb13578..e5367bf 100644
                                                                  --- a/src/atdata/atmosphere/__init__.py
                                                                  +++ b/src/atdata/atmosphere/__init__.py
                                                                  @@ -84,6 +84,7 @@ def data_urls(self) -> list[str]:
                                                                       def metadata(self) -> Optional[dict]:
                                                                           """Metadata from the record, if any."""
                                                                           import msgpack
                                                                  +
                                                                           metadata_bytes = self._record.get("metadata")
                                                                           if metadata_bytes is None:
                                                                               return None
                                                                  diff --git a/src/atdata/atmosphere/_types.py b/src/atdata/atmosphere/_types.py
                                                                  index 2f70df6..f810a3d 100644
                                                                  --- a/src/atdata/atmosphere/_types.py
                                                                  +++ b/src/atdata/atmosphere/_types.py
                                                                  @@ -56,7 +56,9 @@ def parse(cls, uri: str) -> "AtUri":
                                                                   
                                                                           parts = uri[5:].split("/")
                                                                           if len(parts) < 3:
                                                                  -            raise ValueError(f"Invalid AT URI: expected authority/collection/rkey: {uri}")
                                                                  +            raise ValueError(
                                                                  +                f"Invalid AT URI: expected authority/collection/rkey: {uri}"
                                                                  +            )
                                                                   
                                                                           return cls(
                                                                               authority=parts[0],
                                                                  diff --git a/src/atdata/atmosphere/client.py b/src/atdata/atmosphere/client.py
                                                                  index 15d8e07..a5da320 100644
                                                                  --- a/src/atdata/atmosphere/client.py
                                                                  +++ b/src/atdata/atmosphere/client.py
                                                                  @@ -18,6 +18,7 @@ def _get_atproto_client_class():
                                                                       if _atproto_client_class is None:
                                                                           try:
                                                                               from atproto import Client
                                                                  +
                                                                               _atproto_client_class = Client
                                                                           except ImportError as e:
                                                                               raise ImportError(
                                                                  @@ -325,7 +326,11 @@ def upload_blob(
                                                                           # Convert to dict format suitable for embedding in records
                                                                           return {
                                                                               "$type": "blob",
                                                                  -            "ref": {"$link": blob_ref.ref.link if hasattr(blob_ref.ref, "link") else str(blob_ref.ref)},
                                                                  +            "ref": {
                                                                  +                "$link": blob_ref.ref.link
                                                                  +                if hasattr(blob_ref.ref, "link")
                                                                  +                else str(blob_ref.ref)
                                                                  +            },
                                                                               "mimeType": blob_ref.mime_type,
                                                                               "size": blob_ref.size,
                                                                           }
                                                                  diff --git a/src/atdata/atmosphere/lens.py b/src/atdata/atmosphere/lens.py
                                                                  index 765690d..837141b 100644
                                                                  --- a/src/atdata/atmosphere/lens.py
                                                                  +++ b/src/atdata/atmosphere/lens.py
                                                                  @@ -21,6 +21,7 @@
                                                                   
                                                                   # Import for type checking only
                                                                   from typing import TYPE_CHECKING
                                                                  +
                                                                   if TYPE_CHECKING:
                                                                       from ..lens import Lens
                                                                   
                                                                  diff --git a/src/atdata/atmosphere/records.py b/src/atdata/atmosphere/records.py
                                                                  index 82d22c2..eb34a94 100644
                                                                  --- a/src/atdata/atmosphere/records.py
                                                                  +++ b/src/atdata/atmosphere/records.py
                                                                  @@ -19,6 +19,7 @@
                                                                   
                                                                   # Import for type checking only to avoid circular imports
                                                                   from typing import TYPE_CHECKING
                                                                  +
                                                                   if TYPE_CHECKING:
                                                                       from ..dataset import PackableSample, Dataset
                                                                   
                                                                  @@ -394,8 +395,7 @@ def get_blobs(self, uri: str | AtUri) -> list[dict]:
                                                                               return storage.get("blobs", [])
                                                                           elif "storageExternal" in storage_type:
                                                                               raise ValueError(
                                                                  -                "Dataset uses external URL storage, not blobs. "
                                                                  -                "Use get_urls() instead."
                                                                  +                "Dataset uses external URL storage, not blobs. Use get_urls() instead."
                                                                               )
                                                                           else:
                                                                               raise ValueError(f"Unknown storage type: {storage_type}")
                                                                  diff --git a/src/atdata/atmosphere/schema.py b/src/atdata/atmosphere/schema.py
                                                                  index 9eef3b4..66e8cd1 100644
                                                                  --- a/src/atdata/atmosphere/schema.py
                                                                  +++ b/src/atdata/atmosphere/schema.py
                                                                  @@ -17,7 +17,6 @@
                                                                       LEXICON_NAMESPACE,
                                                                   )
                                                                   from .._type_utils import (
                                                                  -    numpy_dtype_to_string,
                                                                       unwrap_optional,
                                                                       is_ndarray_type,
                                                                       extract_ndarray_dtype,
                                                                  @@ -25,6 +24,7 @@
                                                                   
                                                                   # Import for type checking only to avoid circular imports
                                                                   from typing import TYPE_CHECKING
                                                                  +
                                                                   if TYPE_CHECKING:
                                                                       from ..dataset import PackableSample
                                                                   
                                                                  @@ -88,7 +88,9 @@ def publish(
                                                                               TypeError: If a field type is not supported.
                                                                           """
                                                                           if not is_dataclass(sample_type):
                                                                  -            raise ValueError(f"{sample_type.__name__} must be a dataclass (use @packable)")
                                                                  +            raise ValueError(
                                                                  +                f"{sample_type.__name__} must be a dataclass (use @packable)"
                                                                  +            )
                                                                   
                                                                           # Build the schema record
                                                                           schema_record = self._build_schema_record(
                                                                  @@ -153,12 +155,18 @@ def _python_type_to_field_type(self, python_type) -> FieldType:
                                                                               return FieldType(kind="primitive", primitive="bytes")
                                                                   
                                                                           if is_ndarray_type(python_type):
                                                                  -            return FieldType(kind="ndarray", dtype=extract_ndarray_dtype(python_type), shape=None)
                                                                  +            return FieldType(
                                                                  +                kind="ndarray", dtype=extract_ndarray_dtype(python_type), shape=None
                                                                  +            )
                                                                   
                                                                           origin = get_origin(python_type)
                                                                           if origin is list:
                                                                               args = get_args(python_type)
                                                                  -            items = self._python_type_to_field_type(args[0]) if args else FieldType(kind="primitive", primitive="str")
                                                                  +            items = (
                                                                  +                self._python_type_to_field_type(args[0])
                                                                  +                if args
                                                                  +                else FieldType(kind="primitive", primitive="str")
                                                                  +            )
                                                                               return FieldType(kind="array", items=items)
                                                                   
                                                                           if is_dataclass(python_type):
                                                                  diff --git a/src/atdata/atmosphere/store.py b/src/atdata/atmosphere/store.py
                                                                  index e6913b5..cb15b72 100644
                                                                  --- a/src/atdata/atmosphere/store.py
                                                                  +++ b/src/atdata/atmosphere/store.py
                                                                  @@ -20,7 +20,6 @@
                                                                   
                                                                   from __future__ import annotations
                                                                   
                                                                  -import io
                                                                   import tempfile
                                                                   from dataclasses import dataclass
                                                                   from typing import TYPE_CHECKING, Any
                                                                  @@ -29,6 +28,7 @@
                                                                   
                                                                   if TYPE_CHECKING:
                                                                       from ..dataset import Dataset
                                                                  +    from .._sources import BlobSource
                                                                       from .client import AtmosphereClient
                                                                   
                                                                   
                                                                  diff --git a/src/atdata/cli/__init__.py b/src/atdata/cli/__init__.py
                                                                  index 0686ef8..7661e11 100644
                                                                  --- a/src/atdata/cli/__init__.py
                                                                  +++ b/src/atdata/cli/__init__.py
                                                                  @@ -42,7 +42,8 @@ def main(argv: Sequence[str] | None = None) -> int:
                                                                           formatter_class=argparse.RawDescriptionHelpFormatter,
                                                                       )
                                                                       parser.add_argument(
                                                                  -        "--version", "-v",
                                                                  +        "--version",
                                                                  +        "-v",
                                                                           action="store_true",
                                                                           help="Show version information",
                                                                       )
                                                                  @@ -83,7 +84,8 @@ def main(argv: Sequence[str] | None = None) -> int:
                                                                           help="MinIO console port (default: 9001)",
                                                                       )
                                                                       up_parser.add_argument(
                                                                  -        "--detach", "-d",
                                                                  +        "--detach",
                                                                  +        "-d",
                                                                           action="store_true",
                                                                           default=True,
                                                                           help="Run containers in detached mode (default: True)",
                                                                  @@ -95,7 +97,8 @@ def main(argv: Sequence[str] | None = None) -> int:
                                                                           help="Stop local development containers",
                                                                       )
                                                                       down_parser.add_argument(
                                                                  -        "--volumes", "-v",
                                                                  +        "--volumes",
                                                                  +        "-v",
                                                                           action="store_true",
                                                                           help="Also remove volumes (deletes all data)",
                                                                       )
                                                                  @@ -165,10 +168,12 @@ def _cmd_version() -> int:
                                                                       """Show version information."""
                                                                       try:
                                                                           from atdata import __version__
                                                                  +
                                                                           version = __version__
                                                                       except ImportError:
                                                                           # Fallback to package metadata
                                                                           from importlib.metadata import version as pkg_version
                                                                  +
                                                                           version = pkg_version("atdata")
                                                                   
                                                                       print(f"atdata {version}")
                                                                  @@ -183,6 +188,7 @@ def _cmd_local_up(
                                                                   ) -> int:
                                                                       """Start local development infrastructure."""
                                                                       from .local import local_up
                                                                  +
                                                                       return local_up(
                                                                           redis_port=redis_port,
                                                                           minio_port=minio_port,
                                                                  @@ -194,18 +200,21 @@ def _cmd_local_up(
                                                                   def _cmd_local_down(remove_volumes: bool) -> int:
                                                                       """Stop local development infrastructure."""
                                                                       from .local import local_down
                                                                  +
                                                                       return local_down(remove_volumes=remove_volumes)
                                                                   
                                                                   
                                                                   def _cmd_local_status() -> int:
                                                                       """Show status of local infrastructure."""
                                                                       from .local import local_status
                                                                  +
                                                                       return local_status()
                                                                   
                                                                   
                                                                   def _cmd_diagnose(host: str, port: int) -> int:
                                                                       """Diagnose Redis configuration."""
                                                                       from .diagnose import diagnose_redis
                                                                  +
                                                                       return diagnose_redis(host=host, port=port)
                                                                   
                                                                   
                                                                  diff --git a/src/atdata/cli/diagnose.py b/src/atdata/cli/diagnose.py
                                                                  index c523f55..b826346 100644
                                                                  --- a/src/atdata/cli/diagnose.py
                                                                  +++ b/src/atdata/cli/diagnose.py
                                                                  @@ -5,7 +5,6 @@
                                                                   """
                                                                   
                                                                   import sys
                                                                  -from typing import Any
                                                                   
                                                                   
                                                                   def _print_status(label: str, ok: bool, detail: str = "") -> None:
                                                                  @@ -41,6 +40,7 @@ def diagnose_redis(host: str = "localhost", port: int = 6379) -> int:
                                                                       # Try to connect
                                                                       try:
                                                                           from redis import Redis
                                                                  +
                                                                           redis = Redis(host=host, port=port, socket_connect_timeout=5)
                                                                           redis.ping()
                                                                           _print_status("Connection", True, "connected")
                                                                  @@ -70,7 +70,7 @@ def diagnose_redis(host: str = "localhost", port: int = 6379) -> int:
                                                                           _print_status(
                                                                               "AOF Persistence",
                                                                               aof_ok,
                                                                  -            "enabled" if aof_ok else "DISABLED - data may be lost on restart!"
                                                                  +            "enabled" if aof_ok else "DISABLED - data may be lost on restart!",
                                                                           )
                                                                           if not aof_ok:
                                                                               issues_found = True
                                                                  @@ -85,7 +85,7 @@ def diagnose_redis(host: str = "localhost", port: int = 6379) -> int:
                                                                           _print_status(
                                                                               "RDB Persistence",
                                                                               rdb_ok,
                                                                  -            f"configured ({save_config})" if rdb_ok else "DISABLED"
                                                                  +            f"configured ({save_config})" if rdb_ok else "DISABLED",
                                                                           )
                                                                           # RDB disabled is only a warning if AOF is enabled
                                                                       except Exception as e:
                                                                  @@ -95,7 +95,13 @@ def diagnose_redis(host: str = "localhost", port: int = 6379) -> int:
                                                                       try:
                                                                           policy = redis.config_get("maxmemory-policy").get("maxmemory-policy", "unknown")
                                                                           # Safe policies that won't evict index data
                                                                  -        safe_policies = {"noeviction", "volatile-lru", "volatile-lfu", "volatile-ttl", "volatile-random"}
                                                                  +        safe_policies = {
                                                                  +            "noeviction",
                                                                  +            "volatile-lru",
                                                                  +            "volatile-lfu",
                                                                  +            "volatile-ttl",
                                                                  +            "volatile-random",
                                                                  +        }
                                                                           policy_ok = policy in safe_policies
                                                                   
                                                                           if policy_ok:
                                                                  @@ -104,7 +110,7 @@ def diagnose_redis(host: str = "localhost", port: int = 6379) -> int:
                                                                               _print_status(
                                                                                   "Memory Policy",
                                                                                   False,
                                                                  -                f"{policy} - may evict index data! Use 'noeviction' or 'volatile-*'"
                                                                  +                f"{policy} - may evict index data! Use 'noeviction' or 'volatile-*'",
                                                                               )
                                                                               issues_found = True
                                                                       except Exception as e:
                                                                  @@ -141,9 +147,7 @@ def diagnose_redis(host: str = "localhost", port: int = 6379) -> int:
                                                                           for key in redis.scan_iter(match="LocalSchema:*", count=100):
                                                                               schema_count += 1
                                                                           _print_status(
                                                                  -            "atdata Keys",
                                                                  -            True,
                                                                  -            f"{dataset_count} datasets, {schema_count} schemas"
                                                                  +            "atdata Keys", True, f"{dataset_count} datasets, {schema_count} schemas"
                                                                           )
                                                                       except Exception as e:
                                                                           _print_status("atdata Keys", False, f"check failed: {e}")
                                                                  diff --git a/src/atdata/cli/local.py b/src/atdata/cli/local.py
                                                                  index fa08026..5dd9216 100644
                                                                  --- a/src/atdata/cli/local.py
                                                                  +++ b/src/atdata/cli/local.py
                                                                  @@ -144,7 +144,9 @@ def _run_compose(
                                                                           elif shutil.which("docker-compose"):
                                                                               base_cmd = ["docker-compose"]
                                                                           else:
                                                                  -            raise RuntimeError("Neither 'docker compose' nor 'docker-compose' available")
                                                                  +            raise RuntimeError(
                                                                  +                "Neither 'docker compose' nor 'docker-compose' available"
                                                                  +            )
                                                                       else:
                                                                           raise RuntimeError("Docker not found")
                                                                   
                                                                  @@ -195,6 +197,7 @@ def local_up(
                                                                   
                                                                       # Wait a moment for containers to be healthy
                                                                       import time
                                                                  +
                                                                       time.sleep(2)
                                                                   
                                                                       # Show status
                                                                  diff --git a/src/atdata/dataset.py b/src/atdata/dataset.py
                                                                  index 4ae3353..cefda6f 100644
                                                                  --- a/src/atdata/dataset.py
                                                                  +++ b/src/atdata/dataset.py
                                                                  @@ -41,7 +41,7 @@
                                                                   )
                                                                   from abc import ABC
                                                                   
                                                                  -from ._sources import URLSource, S3Source
                                                                  +from ._sources import URLSource
                                                                   from ._protocols import DataSource
                                                                   
                                                                   from tqdm import tqdm
                                                                  @@ -65,7 +65,6 @@
                                                                       TypeAlias,
                                                                       dataclass_transform,
                                                                       overload,
                                                                  -    Literal,
                                                                   )
                                                                   from numpy.typing import NDArray
                                                                   
                                                                  @@ -85,30 +84,31 @@
                                                                   WDSRawBatch: TypeAlias = Dict[str, Any]
                                                                   
                                                                   SampleExportRow: TypeAlias = Dict[str, Any]
                                                                  -SampleExportMap: TypeAlias = Callable[['PackableSample'], SampleExportRow]
                                                                  +SampleExportMap: TypeAlias = Callable[["PackableSample"], SampleExportRow]
                                                                   
                                                                   
                                                                   ##
                                                                   # Main base classes
                                                                   
                                                                  -DT = TypeVar( 'DT' )
                                                                  +DT = TypeVar("DT")
                                                                   
                                                                   
                                                                  -def _make_packable( x ):
                                                                  +def _make_packable(x):
                                                                       """Convert numpy arrays to bytes; pass through other values unchanged."""
                                                                  -    if isinstance( x, np.ndarray ):
                                                                  -        return eh.array_to_bytes( x )
                                                                  +    if isinstance(x, np.ndarray):
                                                                  +        return eh.array_to_bytes(x)
                                                                       return x
                                                                   
                                                                   
                                                                  -def _is_possibly_ndarray_type( t ):
                                                                  +def _is_possibly_ndarray_type(t):
                                                                       """Return True if type annotation is NDArray or Optional[NDArray]."""
                                                                       if t == NDArray:
                                                                           return True
                                                                  -    if isinstance( t, types.UnionType ):
                                                                  -        return any( x == NDArray for x in t.__args__ )
                                                                  +    if isinstance(t, types.UnionType):
                                                                  +        return any(x == NDArray for x in t.__args__)
                                                                       return False
                                                                   
                                                                  +
                                                                   class DictSample:
                                                                       """Dynamic sample type providing dict-like access to raw msgpack data.
                                                                   
                                                                  @@ -141,7 +141,7 @@ class DictSample:
                                                                           converted to numpy arrays when accessed through a typed sample class.
                                                                       """
                                                                   
                                                                  -    __slots__ = ('_data',)
                                                                  +    __slots__ = ("_data",)
                                                                   
                                                                       def __init__(self, _data: dict[str, Any] | None = None, **kwargs: Any) -> None:
                                                                           """Create a DictSample from a dictionary or keyword arguments.
                                                                  @@ -151,12 +151,12 @@ def __init__(self, _data: dict[str, Any] | None = None, **kwargs: Any) -> None:
                                                                               **kwargs: Field values if _data is not provided.
                                                                           """
                                                                           if _data is not None:
                                                                  -            object.__setattr__(self, '_data', _data)
                                                                  +            object.__setattr__(self, "_data", _data)
                                                                           else:
                                                                  -            object.__setattr__(self, '_data', kwargs)
                                                                  +            object.__setattr__(self, "_data", kwargs)
                                                                   
                                                                       @classmethod
                                                                  -    def from_data(cls, data: dict[str, Any]) -> 'DictSample':
                                                                  +    def from_data(cls, data: dict[str, Any]) -> "DictSample":
                                                                           """Create a DictSample from unpacked msgpack data.
                                                                   
                                                                           Args:
                                                                  @@ -168,7 +168,7 @@ def from_data(cls, data: dict[str, Any]) -> 'DictSample':
                                                                           return cls(_data=data)
                                                                   
                                                                       @classmethod
                                                                  -    def from_bytes(cls, bs: bytes) -> 'DictSample':
                                                                  +    def from_bytes(cls, bs: bytes) -> "DictSample":
                                                                           """Create a DictSample from raw msgpack bytes.
                                                                   
                                                                           Args:
                                                                  @@ -192,7 +192,7 @@ def __getattr__(self, name: str) -> Any:
                                                                               AttributeError: If the field doesn't exist.
                                                                           """
                                                                           # Avoid infinite recursion for _data lookup
                                                                  -        if name == '_data':
                                                                  +        if name == "_data":
                                                                               raise AttributeError(name)
                                                                           try:
                                                                               return self._data[name]
                                                                  @@ -258,24 +258,24 @@ def packed(self) -> bytes:
                                                                           return msgpack.packb(self._data)
                                                                   
                                                                       @property
                                                                  -    def as_wds(self) -> 'WDSRawSample':
                                                                  +    def as_wds(self) -> "WDSRawSample":
                                                                           """Pack this sample's data for writing to WebDataset.
                                                                   
                                                                           Returns:
                                                                               A dictionary with ``__key__`` and ``msgpack`` fields.
                                                                           """
                                                                           return {
                                                                  -            '__key__': str(uuid.uuid1(0, 0)),
                                                                  -            'msgpack': self.packed,
                                                                  +            "__key__": str(uuid.uuid1(0, 0)),
                                                                  +            "msgpack": self.packed,
                                                                           }
                                                                   
                                                                       def __repr__(self) -> str:
                                                                  -        fields = ', '.join(f'{k}=...' for k in self._data.keys())
                                                                  -        return f'DictSample({fields})'
                                                                  +        fields = ", ".join(f"{k}=..." for k in self._data.keys())
                                                                  +        return f"DictSample({fields})"
                                                                   
                                                                   
                                                                   @dataclass
                                                                  -class PackableSample( ABC ):
                                                                  +class PackableSample(ABC):
                                                                       """Base class for samples that can be serialized with msgpack.
                                                                   
                                                                       This abstract base class provides automatic serialization/deserialization
                                                                  @@ -298,41 +298,41 @@ class PackableSample( ABC ):
                                                                           >>> restored = MyData.from_bytes(packed)  # Deserialize
                                                                       """
                                                                   
                                                                  -    def _ensure_good( self ):
                                                                  +    def _ensure_good(self):
                                                                           """Convert bytes to NDArray for fields annotated as NDArray or NDArray | None."""
                                                                   
                                                                           # Auto-convert known types when annotated
                                                                           # for var_name, var_type in vars( self.__class__ )['__annotations__'].items():
                                                                  -        for field in dataclasses.fields( self ):
                                                                  +        for field in dataclasses.fields(self):
                                                                               var_name = field.name
                                                                               var_type = field.type
                                                                   
                                                                               # Annotation for this variable is to be an NDArray
                                                                  -            if _is_possibly_ndarray_type( var_type ):
                                                                  +            if _is_possibly_ndarray_type(var_type):
                                                                                   # ... so, we'll always auto-convert to numpy
                                                                   
                                                                  -                var_cur_value = getattr( self, var_name )
                                                                  +                var_cur_value = getattr(self, var_name)
                                                                   
                                                                                   # Execute the appropriate conversion for intermediate data
                                                                                   # based on what is provided
                                                                   
                                                                  -                if isinstance( var_cur_value, np.ndarray ):
                                                                  +                if isinstance(var_cur_value, np.ndarray):
                                                                                       # Already the correct type, no conversion needed
                                                                                       continue
                                                                   
                                                                  -                elif isinstance( var_cur_value, bytes ):
                                                                  +                elif isinstance(var_cur_value, bytes):
                                                                                       # Design note: bytes in NDArray-typed fields are always interpreted
                                                                                       # as serialized arrays. This means raw bytes fields must not be
                                                                                       # annotated as NDArray.
                                                                  -                    setattr( self, var_name, eh.bytes_to_array( var_cur_value ) )
                                                                  +                    setattr(self, var_name, eh.bytes_to_array(var_cur_value))
                                                                   
                                                                  -    def __post_init__( self ):
                                                                  +    def __post_init__(self):
                                                                           self._ensure_good()
                                                                   
                                                                       ##
                                                                   
                                                                       @classmethod
                                                                  -    def from_data( cls, data: WDSRawSample ) -> Self:
                                                                  +    def from_data(cls, data: WDSRawSample) -> Self:
                                                                           """Create a sample instance from unpacked msgpack data.
                                                                   
                                                                           Args:
                                                                  @@ -341,10 +341,10 @@ def from_data( cls, data: WDSRawSample ) -> Self:
                                                                           Returns:
                                                                               New instance with NDArray fields auto-converted from bytes.
                                                                           """
                                                                  -        return cls( **data )
                                                                  -    
                                                                  +        return cls(**data)
                                                                  +
                                                                       @classmethod
                                                                  -    def from_bytes( cls, bs: bytes ) -> Self:
                                                                  +    def from_bytes(cls, bs: bytes) -> Self:
                                                                           """Create a sample instance from raw msgpack bytes.
                                                                   
                                                                           Args:
                                                                  @@ -353,10 +353,10 @@ def from_bytes( cls, bs: bytes ) -> Self:
                                                                           Returns:
                                                                               A new instance of this sample class deserialized from the bytes.
                                                                           """
                                                                  -        return cls.from_data( ormsgpack.unpackb( bs ) )
                                                                  +        return cls.from_data(ormsgpack.unpackb(bs))
                                                                   
                                                                       @property
                                                                  -    def packed( self ) -> bytes:
                                                                  +    def packed(self) -> bytes:
                                                                           """Pack this sample's data into msgpack bytes.
                                                                   
                                                                           NDArray fields are automatically converted to bytes before packing.
                                                                  @@ -371,20 +371,17 @@ def packed( self ) -> bytes:
                                                                   
                                                                           # Make sure that all of our (possibly unpackable) data is in a packable
                                                                           # format
                                                                  -        o = {
                                                                  -            k: _make_packable( v )
                                                                  -            for k, v in vars( self ).items()
                                                                  -        }
                                                                  +        o = {k: _make_packable(v) for k, v in vars(self).items()}
                                                                   
                                                                  -        ret = msgpack.packb( o )
                                                                  +        ret = msgpack.packb(o)
                                                                   
                                                                           if ret is None:
                                                                  -            raise RuntimeError( f'Failed to pack sample to bytes: {o}' )
                                                                  +            raise RuntimeError(f"Failed to pack sample to bytes: {o}")
                                                                   
                                                                           return ret
                                                                  -    
                                                                  +
                                                                       @property
                                                                  -    def as_wds( self ) -> WDSRawSample:
                                                                  +    def as_wds(self) -> WDSRawSample:
                                                                           """Pack this sample's data for writing to WebDataset.
                                                                   
                                                                           Returns:
                                                                  @@ -397,19 +394,21 @@ def as_wds( self ) -> WDSRawSample:
                                                                           """
                                                                           return {
                                                                               # Generates a UUID that is timelike-sortable
                                                                  -            '__key__': str( uuid.uuid1( 0, 0 ) ),
                                                                  -            'msgpack': self.packed,
                                                                  +            "__key__": str(uuid.uuid1(0, 0)),
                                                                  +            "msgpack": self.packed,
                                                                           }
                                                                   
                                                                  -def _batch_aggregate( xs: Sequence ):
                                                                  +
                                                                  +def _batch_aggregate(xs: Sequence):
                                                                       """Stack arrays into numpy array with batch dim; otherwise return list."""
                                                                       if not xs:
                                                                           return []
                                                                  -    if isinstance( xs[0], np.ndarray ):
                                                                  -        return np.array( list( xs ) )
                                                                  -    return list( xs )
                                                                  +    if isinstance(xs[0], np.ndarray):
                                                                  +        return np.array(list(xs))
                                                                  +    return list(xs)
                                                                  +
                                                                   
                                                                  -class SampleBatch( Generic[DT] ):
                                                                  +class SampleBatch(Generic[DT]):
                                                                       """A batch of samples with automatic attribute aggregation.
                                                                   
                                                                       This class wraps a sequence of samples and provides magic ``__getattr__``
                                                                  @@ -437,10 +436,11 @@ class SampleBatch( Generic[DT] ):
                                                                           subscripted syntax ``SampleBatch[MyType](samples)`` rather than
                                                                           calling the constructor directly with an unsubscripted class.
                                                                       """
                                                                  +
                                                                       # Design note: The docstring uses "Parameters:" for type parameters because
                                                                       # quartodoc doesn't yet support "Type Parameters:" sections in generated docs.
                                                                   
                                                                  -    def __init__( self, samples: Sequence[DT] ):
                                                                  +    def __init__(self, samples: Sequence[DT]):
                                                                           """Create a batch from a sequence of samples.
                                                                   
                                                                           Args:
                                                                  @@ -448,23 +448,23 @@ def __init__( self, samples: Sequence[DT] ):
                                                                                   Each sample must be an instance of a type derived from
                                                                                   ``PackableSample``.
                                                                           """
                                                                  -        self.samples = list( samples )
                                                                  +        self.samples = list(samples)
                                                                           self._aggregate_cache = dict()
                                                                           self._sample_type_cache: Type | None = None
                                                                   
                                                                       @property
                                                                  -    def sample_type( self ) -> Type:
                                                                  +    def sample_type(self) -> Type:
                                                                           """The type of each sample in this batch.
                                                                   
                                                                           Returns:
                                                                               The type parameter ``DT`` used when creating this ``SampleBatch[DT]``.
                                                                           """
                                                                           if self._sample_type_cache is None:
                                                                  -            self._sample_type_cache = typing.get_args( self.__orig_class__)[0]
                                                                  +            self._sample_type_cache = typing.get_args(self.__orig_class__)[0]
                                                                               assert self._sample_type_cache is not None
                                                                           return self._sample_type_cache
                                                                   
                                                                  -    def __getattr__( self, name ):
                                                                  +    def __getattr__(self, name):
                                                                           """Aggregate an attribute across all samples in the batch.
                                                                   
                                                                           This magic method enables attribute-style access to aggregated sample
                                                                  @@ -481,20 +481,19 @@ def __getattr__( self, name ):
                                                                               AttributeError: If the attribute doesn't exist on the sample type.
                                                                           """
                                                                           # Aggregate named params of sample type
                                                                  -        if name in vars( self.sample_type )['__annotations__']:
                                                                  +        if name in vars(self.sample_type)["__annotations__"]:
                                                                               if name not in self._aggregate_cache:
                                                                                   self._aggregate_cache[name] = _batch_aggregate(
                                                                  -                    [ getattr( x, name )
                                                                  -                      for x in self.samples ]
                                                                  +                    [getattr(x, name) for x in self.samples]
                                                                                   )
                                                                   
                                                                               return self._aggregate_cache[name]
                                                                   
                                                                  -        raise AttributeError( f'No sample attribute named {name}' )
                                                                  +        raise AttributeError(f"No sample attribute named {name}")
                                                                   
                                                                   
                                                                  -ST = TypeVar( 'ST', bound = PackableSample )
                                                                  -RT = TypeVar( 'RT', bound = PackableSample )
                                                                  +ST = TypeVar("ST", bound=PackableSample)
                                                                  +RT = TypeVar("RT", bound=PackableSample)
                                                                   
                                                                   
                                                                   class _ShardListStage(wds.utils.PipelineStage):
                                                                  @@ -532,7 +531,7 @@ def run(self, src):
                                                                               yield sample
                                                                   
                                                                   
                                                                  -class Dataset( Generic[ST] ):
                                                                  +class Dataset(Generic[ST]):
                                                                       """A typed dataset built on WebDataset with lens transformations.
                                                                   
                                                                       This class wraps WebDataset tar archives and provides type-safe iteration
                                                                  @@ -566,22 +565,24 @@ class Dataset( Generic[ST] ):
                                                                           subscripted syntax ``Dataset[MyType](url)`` rather than calling the
                                                                           constructor directly with an unsubscripted class.
                                                                       """
                                                                  +
                                                                       # Design note: The docstring uses "Parameters:" for type parameters because
                                                                       # quartodoc doesn't yet support "Type Parameters:" sections in generated docs.
                                                                   
                                                                       @property
                                                                  -    def sample_type( self ) -> Type:
                                                                  +    def sample_type(self) -> Type:
                                                                           """The type of each returned sample from this dataset's iterator.
                                                                   
                                                                           Returns:
                                                                               The type parameter ``ST`` used when creating this ``Dataset[ST]``.
                                                                           """
                                                                           if self._sample_type_cache is None:
                                                                  -            self._sample_type_cache = typing.get_args( self.__orig_class__ )[0]
                                                                  +            self._sample_type_cache = typing.get_args(self.__orig_class__)[0]
                                                                               assert self._sample_type_cache is not None
                                                                           return self._sample_type_cache
                                                                  +
                                                                       @property
                                                                  -    def batch_type( self ) -> Type:
                                                                  +    def batch_type(self) -> Type:
                                                                           """The type of batches produced by this dataset.
                                                                   
                                                                           Returns:
                                                                  @@ -589,12 +590,13 @@ def batch_type( self ) -> Type:
                                                                           """
                                                                           return SampleBatch[self.sample_type]
                                                                   
                                                                  -    def __init__( self,
                                                                  -                 source: DataSource | str | None = None,
                                                                  -                 metadata_url: str | None = None,
                                                                  -                 *,
                                                                  -                 url: str | None = None,
                                                                  -             ) -> None:
                                                                  +    def __init__(
                                                                  +        self,
                                                                  +        source: DataSource | str | None = None,
                                                                  +        metadata_url: str | None = None,
                                                                  +        *,
                                                                  +        url: str | None = None,
                                                                  +    ) -> None:
                                                                           """Create a dataset from a DataSource or URL.
                                                                   
                                                                           Args:
                                                                  @@ -642,7 +644,7 @@ def source(self) -> DataSource:
                                                                           """The underlying data source for this dataset."""
                                                                           return self._source
                                                                   
                                                                  -    def as_type( self, other: Type[RT] ) -> 'Dataset[RT]':
                                                                  +    def as_type(self, other: Type[RT]) -> "Dataset[RT]":
                                                                           """View this dataset through a different sample type using a registered lens.
                                                                   
                                                                           Args:
                                                                  @@ -658,10 +660,10 @@ def as_type( self, other: Type[RT] ) -> 'Dataset[RT]':
                                                                               ValueError: If no registered lens exists between the current
                                                                                   sample type and the target type.
                                                                           """
                                                                  -        ret = Dataset[other]( self._source )
                                                                  +        ret = Dataset[other](self._source)
                                                                           # Get the singleton lens registry
                                                                           lenses = LensNetwork()
                                                                  -        ret._output_lens = lenses.transform( self.sample_type, ret.sample_type )
                                                                  +        ret._output_lens = lenses.transform(self.sample_type, ret.sample_type)
                                                                           return ret
                                                                   
                                                                       @property
                                                                  @@ -695,6 +697,7 @@ def shard_list(self) -> list[str]:
                                                                               Use :meth:`list_shards` instead.
                                                                           """
                                                                           import warnings
                                                                  +
                                                                           warnings.warn(
                                                                               "shard_list is deprecated, use list_shards() instead",
                                                                               DeprecationWarning,
                                                                  @@ -703,7 +706,7 @@ def shard_list(self) -> list[str]:
                                                                           return self.list_shards()
                                                                   
                                                                       @property
                                                                  -    def metadata( self ) -> dict[str, Any] | None:
                                                                  +    def metadata(self) -> dict[str, Any] | None:
                                                                           """Fetch and cache metadata from metadata_url.
                                                                   
                                                                           Returns:
                                                                  @@ -716,26 +719,29 @@ def metadata( self ) -> dict[str, Any] | None:
                                                                               return None
                                                                   
                                                                           if self._metadata is None:
                                                                  -            with requests.get( self.metadata_url, stream = True ) as response:
                                                                  +            with requests.get(self.metadata_url, stream=True) as response:
                                                                                   response.raise_for_status()
                                                                  -                self._metadata = msgpack.unpackb( response.content, raw = False )
                                                                  -        
                                                                  +                self._metadata = msgpack.unpackb(response.content, raw=False)
                                                                  +
                                                                           # Use our cached values
                                                                           return self._metadata
                                                                  -    
                                                                  -    @overload
                                                                  -    def ordered( self,
                                                                  -                batch_size: None = None,
                                                                  -            ) -> Iterable[ST]: ...
                                                                   
                                                                       @overload
                                                                  -    def ordered( self,
                                                                  -                batch_size: int,
                                                                  -            ) -> Iterable[SampleBatch[ST]]: ...
                                                                  +    def ordered(
                                                                  +        self,
                                                                  +        batch_size: None = None,
                                                                  +    ) -> Iterable[ST]: ...
                                                                   
                                                                  -    def ordered( self,
                                                                  -                batch_size: int | None = None,
                                                                  -            ) -> Iterable[ST] | Iterable[SampleBatch[ST]]:
                                                                  +    @overload
                                                                  +    def ordered(
                                                                  +        self,
                                                                  +        batch_size: int,
                                                                  +    ) -> Iterable[SampleBatch[ST]]: ...
                                                                  +
                                                                  +    def ordered(
                                                                  +        self,
                                                                  +        batch_size: int | None = None,
                                                                  +    ) -> Iterable[ST] | Iterable[SampleBatch[ST]]:
                                                                           """Iterate over the dataset in order.
                                                                   
                                                                           Args:
                                                                  @@ -762,7 +768,7 @@ def ordered( self,
                                                                                   _StreamOpenerStage(self._source),
                                                                                   wds.tariterators.tar_file_expander,
                                                                                   wds.tariterators.group_by_keys,
                                                                  -                wds.filters.map( self.wrap ),
                                                                  +                wds.filters.map(self.wrap),
                                                                               )
                                                                   
                                                                           return wds.pipeline.DataPipeline(
                                                                  @@ -771,30 +777,33 @@ def ordered( self,
                                                                               _StreamOpenerStage(self._source),
                                                                               wds.tariterators.tar_file_expander,
                                                                               wds.tariterators.group_by_keys,
                                                                  -            wds.filters.batched( batch_size ),
                                                                  -            wds.filters.map( self.wrap_batch ),
                                                                  +            wds.filters.batched(batch_size),
                                                                  +            wds.filters.map(self.wrap_batch),
                                                                           )
                                                                   
                                                                       @overload
                                                                  -    def shuffled( self,
                                                                  -                buffer_shards: int = 100,
                                                                  -                buffer_samples: int = 10_000,
                                                                  -                batch_size: None = None,
                                                                  -            ) -> Iterable[ST]: ...
                                                                  +    def shuffled(
                                                                  +        self,
                                                                  +        buffer_shards: int = 100,
                                                                  +        buffer_samples: int = 10_000,
                                                                  +        batch_size: None = None,
                                                                  +    ) -> Iterable[ST]: ...
                                                                   
                                                                       @overload
                                                                  -    def shuffled( self,
                                                                  -                buffer_shards: int = 100,
                                                                  -                buffer_samples: int = 10_000,
                                                                  -                *,
                                                                  -                batch_size: int,
                                                                  -            ) -> Iterable[SampleBatch[ST]]: ...
                                                                  -
                                                                  -    def shuffled( self,
                                                                  -                buffer_shards: int = 100,
                                                                  -                buffer_samples: int = 10_000,
                                                                  -                batch_size: int | None = None,
                                                                  -            ) -> Iterable[ST] | Iterable[SampleBatch[ST]]:
                                                                  +    def shuffled(
                                                                  +        self,
                                                                  +        buffer_shards: int = 100,
                                                                  +        buffer_samples: int = 10_000,
                                                                  +        *,
                                                                  +        batch_size: int,
                                                                  +    ) -> Iterable[SampleBatch[ST]]: ...
                                                                  +
                                                                  +    def shuffled(
                                                                  +        self,
                                                                  +        buffer_shards: int = 100,
                                                                  +        buffer_samples: int = 10_000,
                                                                  +        batch_size: int | None = None,
                                                                  +    ) -> Iterable[ST] | Iterable[SampleBatch[ST]]:
                                                                           """Iterate over the dataset in random order.
                                                                   
                                                                           Args:
                                                                  @@ -823,34 +832,36 @@ def shuffled( self,
                                                                           if batch_size is None:
                                                                               return wds.pipeline.DataPipeline(
                                                                                   _ShardListStage(self._source),
                                                                  -                wds.filters.shuffle( buffer_shards ),
                                                                  +                wds.filters.shuffle(buffer_shards),
                                                                                   wds.shardlists.split_by_worker,
                                                                                   _StreamOpenerStage(self._source),
                                                                                   wds.tariterators.tar_file_expander,
                                                                                   wds.tariterators.group_by_keys,
                                                                  -                wds.filters.shuffle( buffer_samples ),
                                                                  -                wds.filters.map( self.wrap ),
                                                                  +                wds.filters.shuffle(buffer_samples),
                                                                  +                wds.filters.map(self.wrap),
                                                                               )
                                                                   
                                                                           return wds.pipeline.DataPipeline(
                                                                               _ShardListStage(self._source),
                                                                  -            wds.filters.shuffle( buffer_shards ),
                                                                  +            wds.filters.shuffle(buffer_shards),
                                                                               wds.shardlists.split_by_worker,
                                                                               _StreamOpenerStage(self._source),
                                                                               wds.tariterators.tar_file_expander,
                                                                               wds.tariterators.group_by_keys,
                                                                  -            wds.filters.shuffle( buffer_samples ),
                                                                  -            wds.filters.batched( batch_size ),
                                                                  -            wds.filters.map( self.wrap_batch ),
                                                                  +            wds.filters.shuffle(buffer_samples),
                                                                  +            wds.filters.batched(batch_size),
                                                                  +            wds.filters.map(self.wrap_batch),
                                                                           )
                                                                  -    
                                                                  +
                                                                       # Design note: Uses pandas for parquet export. Could be replaced with
                                                                       # direct fastparquet calls to reduce dependencies if needed.
                                                                  -    def to_parquet( self, path: Pathlike,
                                                                  -                sample_map: Optional[SampleExportMap] = None,
                                                                  -                maxcount: Optional[int] = None,
                                                                  -                **kwargs,
                                                                  -            ):
                                                                  +    def to_parquet(
                                                                  +        self,
                                                                  +        path: Pathlike,
                                                                  +        sample_map: Optional[SampleExportMap] = None,
                                                                  +        maxcount: Optional[int] = None,
                                                                  +        **kwargs,
                                                                  +    ):
                                                                           """Export dataset contents to parquet format.
                                                                   
                                                                           Converts all samples to a pandas DataFrame and saves to parquet file(s).
                                                                  @@ -890,50 +901,51 @@ def to_parquet( self, path: Pathlike,
                                                                           ##
                                                                   
                                                                           # Normalize args
                                                                  -        path = Path( path )
                                                                  +        path = Path(path)
                                                                           if sample_map is None:
                                                                               sample_map = asdict
                                                                  -        
                                                                  -        verbose = kwargs.get( 'verbose', False )
                                                                   
                                                                  -        it = self.ordered( batch_size = None )
                                                                  +        verbose = kwargs.get("verbose", False)
                                                                  +
                                                                  +        it = self.ordered(batch_size=None)
                                                                           if verbose:
                                                                  -            it = tqdm( it )
                                                                  +            it = tqdm(it)
                                                                   
                                                                           #
                                                                   
                                                                           if maxcount is None:
                                                                               # Load and save full dataset
                                                                  -            df = pd.DataFrame( [ sample_map( x )
                                                                  -                                 for x in self.ordered( batch_size = None ) ] )
                                                                  -            df.to_parquet( path, **kwargs )
                                                                  -        
                                                                  +            df = pd.DataFrame([sample_map(x) for x in self.ordered(batch_size=None)])
                                                                  +            df.to_parquet(path, **kwargs)
                                                                  +
                                                                           else:
                                                                               # Load and save dataset in segments of size `maxcount`
                                                                   
                                                                               cur_segment = 0
                                                                               cur_buffer = []
                                                                  -            path_template = (path.parent / f'{path.stem}-{{:06d}}{path.suffix}').as_posix()
                                                                  +            path_template = (
                                                                  +                path.parent / f"{path.stem}-{{:06d}}{path.suffix}"
                                                                  +            ).as_posix()
                                                                   
                                                                  -            for x in self.ordered( batch_size = None ):
                                                                  -                cur_buffer.append( sample_map( x ) )
                                                                  +            for x in self.ordered(batch_size=None):
                                                                  +                cur_buffer.append(sample_map(x))
                                                                   
                                                                  -                if len( cur_buffer ) >= maxcount:
                                                                  +                if len(cur_buffer) >= maxcount:
                                                                                       # Write current segment
                                                                  -                    cur_path = path_template.format( cur_segment )
                                                                  -                    df = pd.DataFrame( cur_buffer )
                                                                  -                    df.to_parquet( cur_path, **kwargs )
                                                                  +                    cur_path = path_template.format(cur_segment)
                                                                  +                    df = pd.DataFrame(cur_buffer)
                                                                  +                    df.to_parquet(cur_path, **kwargs)
                                                                   
                                                                                       cur_segment += 1
                                                                                       cur_buffer = []
                                                                  -                
                                                                  -            if len( cur_buffer ) > 0:
                                                                  +
                                                                  +            if len(cur_buffer) > 0:
                                                                                   # Write one last segment with remainder
                                                                  -                cur_path = path_template.format( cur_segment )
                                                                  -                df = pd.DataFrame( cur_buffer )
                                                                  -                df.to_parquet( cur_path, **kwargs )
                                                                  +                cur_path = path_template.format(cur_segment)
                                                                  +                df = pd.DataFrame(cur_buffer)
                                                                  +                df.to_parquet(cur_path, **kwargs)
                                                                   
                                                                  -    def wrap( self, sample: WDSRawSample ) -> ST:
                                                                  +    def wrap(self, sample: WDSRawSample) -> ST:
                                                                           """Wrap a raw msgpack sample into the appropriate dataset-specific type.
                                                                   
                                                                           Args:
                                                                  @@ -944,18 +956,22 @@ def wrap( self, sample: WDSRawSample ) -> ST:
                                                                               A deserialized sample of type ``ST``, optionally transformed through
                                                                               a lens if ``as_type()`` was called.
                                                                           """
                                                                  -        if 'msgpack' not in sample:
                                                                  -            raise ValueError(f"Sample missing 'msgpack' key, got keys: {list(sample.keys())}")
                                                                  -        if not isinstance(sample['msgpack'], bytes):
                                                                  -            raise ValueError(f"Expected sample['msgpack'] to be bytes, got {type(sample['msgpack']).__name__}")
                                                                  +        if "msgpack" not in sample:
                                                                  +            raise ValueError(
                                                                  +                f"Sample missing 'msgpack' key, got keys: {list(sample.keys())}"
                                                                  +            )
                                                                  +        if not isinstance(sample["msgpack"], bytes):
                                                                  +            raise ValueError(
                                                                  +                f"Expected sample['msgpack'] to be bytes, got {type(sample['msgpack']).__name__}"
                                                                  +            )
                                                                   
                                                                           if self._output_lens is None:
                                                                  -            return self.sample_type.from_bytes( sample['msgpack'] )
                                                                  +            return self.sample_type.from_bytes(sample["msgpack"])
                                                                   
                                                                  -        source_sample = self._output_lens.source_type.from_bytes( sample['msgpack'] )
                                                                  -        return self._output_lens( source_sample )
                                                                  +        source_sample = self._output_lens.source_type.from_bytes(sample["msgpack"])
                                                                  +        return self._output_lens(source_sample)
                                                                   
                                                                  -    def wrap_batch( self, batch: WDSRawBatch ) -> SampleBatch[ST]:
                                                                  +    def wrap_batch(self, batch: WDSRawBatch) -> SampleBatch[ST]:
                                                                           """Wrap a batch of raw msgpack samples into a typed SampleBatch.
                                                                   
                                                                           Args:
                                                                  @@ -971,26 +987,29 @@ def wrap_batch( self, batch: WDSRawBatch ) -> SampleBatch[ST]:
                                                                               aggregates them into a batch.
                                                                           """
                                                                   
                                                                  -        if 'msgpack' not in batch:
                                                                  -            raise ValueError(f"Batch missing 'msgpack' key, got keys: {list(batch.keys())}")
                                                                  +        if "msgpack" not in batch:
                                                                  +            raise ValueError(
                                                                  +                f"Batch missing 'msgpack' key, got keys: {list(batch.keys())}"
                                                                  +            )
                                                                   
                                                                           if self._output_lens is None:
                                                                  -            batch_unpacked = [ self.sample_type.from_bytes( bs )
                                                                  -                               for bs in batch['msgpack'] ]
                                                                  -            return SampleBatch[self.sample_type]( batch_unpacked )
                                                                  +            batch_unpacked = [
                                                                  +                self.sample_type.from_bytes(bs) for bs in batch["msgpack"]
                                                                  +            ]
                                                                  +            return SampleBatch[self.sample_type](batch_unpacked)
                                                                   
                                                                  -        batch_source = [ self._output_lens.source_type.from_bytes( bs )
                                                                  -                         for bs in batch['msgpack'] ]
                                                                  -        batch_view = [ self._output_lens( s )
                                                                  -                       for s in batch_source ]
                                                                  -        return SampleBatch[self.sample_type]( batch_view )
                                                                  +        batch_source = [
                                                                  +            self._output_lens.source_type.from_bytes(bs) for bs in batch["msgpack"]
                                                                  +        ]
                                                                  +        batch_view = [self._output_lens(s) for s in batch_source]
                                                                  +        return SampleBatch[self.sample_type](batch_view)
                                                                   
                                                                   
                                                                  -_T = TypeVar('_T')
                                                                  +_T = TypeVar("_T")
                                                                   
                                                                   
                                                                   @dataclass_transform()
                                                                  -def packable( cls: type[_T] ) -> type[_T]:
                                                                  +def packable(cls: type[_T]) -> type[_T]:
                                                                       """Decorator to convert a regular class into a ``PackableSample``.
                                                                   
                                                                       This decorator transforms a class into a dataclass that inherits from
                                                                  @@ -1029,14 +1048,14 @@ def packable( cls: type[_T] ) -> type[_T]:
                                                                       class_annotations = cls.__annotations__
                                                                   
                                                                       # Add in dataclass niceness to original class
                                                                  -    as_dataclass = dataclass( cls )
                                                                  +    as_dataclass = dataclass(cls)
                                                                   
                                                                       # This triggers a bunch of behind-the-scenes stuff for the newly annotated class
                                                                       @dataclass
                                                                  -    class as_packable( as_dataclass, PackableSample ):
                                                                  -        def __post_init__( self ):
                                                                  -            return PackableSample.__post_init__( self )
                                                                  -    
                                                                  +    class as_packable(as_dataclass, PackableSample):
                                                                  +        def __post_init__(self):
                                                                  +            return PackableSample.__post_init__(self)
                                                                  +
                                                                       # Restore original class identity for better repr/debugging
                                                                       as_packable.__name__ = class_name
                                                                       as_packable.__qualname__ = class_name
                                                                  @@ -1047,10 +1066,10 @@ def __post_init__( self ):
                                                                   
                                                                       # Fix qualnames of dataclass-generated methods so they don't show
                                                                       # 'packable..as_packable' in help() and IDE hints
                                                                  -    old_qualname_prefix = 'packable..as_packable'
                                                                  -    for attr_name in ('__init__', '__repr__', '__eq__', '__post_init__'):
                                                                  +    old_qualname_prefix = "packable..as_packable"
                                                                  +    for attr_name in ("__init__", "__repr__", "__eq__", "__post_init__"):
                                                                           attr = getattr(as_packable, attr_name, None)
                                                                  -        if attr is not None and hasattr(attr, '__qualname__'):
                                                                  +        if attr is not None and hasattr(attr, "__qualname__"):
                                                                               if attr.__qualname__.startswith(old_qualname_prefix):
                                                                                   attr.__qualname__ = attr.__qualname__.replace(
                                                                                       old_qualname_prefix, class_name, 1
                                                                  @@ -1066,4 +1085,4 @@ def _dict_to_typed(ds: DictSample) -> as_packable:
                                                                   
                                                                       ##
                                                                   
                                                                  -    return as_packable
                                                                  \ No newline at end of file
                                                                  +    return as_packable
                                                                  diff --git a/src/atdata/lens.py b/src/atdata/lens.py
                                                                  index fe1974c..97747ba 100644
                                                                  --- a/src/atdata/lens.py
                                                                  +++ b/src/atdata/lens.py
                                                                  @@ -54,7 +54,7 @@
                                                                       Optional,
                                                                       Generic,
                                                                       #
                                                                  -    TYPE_CHECKING
                                                                  +    TYPE_CHECKING,
                                                                   )
                                                                   
                                                                   if TYPE_CHECKING:
                                                                  @@ -66,11 +66,11 @@
                                                                   ##
                                                                   # Typing helpers
                                                                   
                                                                  -DatasetType: TypeAlias = Type['PackableSample']
                                                                  +DatasetType: TypeAlias = Type["PackableSample"]
                                                                   LensSignature: TypeAlias = Tuple[DatasetType, DatasetType]
                                                                   
                                                                  -S = TypeVar( 'S', bound = Packable )
                                                                  -V = TypeVar( 'V', bound = Packable )
                                                                  +S = TypeVar("S", bound=Packable)
                                                                  +V = TypeVar("V", bound=Packable)
                                                                   type LensGetter[S, V] = Callable[[S], V]
                                                                   type LensPutter[S, V] = Callable[[V, S], S]
                                                                   
                                                                  @@ -78,7 +78,8 @@
                                                                   ##
                                                                   # Shortcut decorators
                                                                   
                                                                  -class Lens( Generic[S, V] ):
                                                                  +
                                                                  +class Lens(Generic[S, V]):
                                                                       """A bidirectional transformation between two sample types.
                                                                   
                                                                       A lens provides a way to view and update data of type ``S`` (source) as if
                                                                  @@ -99,11 +100,12 @@ class Lens( Generic[S, V] ):
                                                                           ... def name_lens_put(view: NameOnly, source: FullData) -> FullData:
                                                                           ...     return FullData(name=view.name, age=source.age)
                                                                       """
                                                                  +
                                                                       # TODO The above has a line for "Parameters:" that should be "Type Parameters:"; this is a temporary fix for `quartodoc` auto-generation bugs.
                                                                   
                                                                  -    def __init__( self, get: LensGetter[S, V],
                                                                  -                put: Optional[LensPutter[S, V]] = None
                                                                  -            ) -> None:
                                                                  +    def __init__(
                                                                  +        self, get: LensGetter[S, V], put: Optional[LensPutter[S, V]] = None
                                                                  +    ) -> None:
                                                                           """Initialize a lens with a getter and optional putter function.
                                                                   
                                                                           Args:
                                                                  @@ -122,8 +124,8 @@ def __init__( self, get: LensGetter[S, V],
                                                                   
                                                                           # Check argument validity
                                                                   
                                                                  -        sig = inspect.signature( get )
                                                                  -        input_types = list( sig.parameters.values() )
                                                                  +        sig = inspect.signature(get)
                                                                  +        input_types = list(sig.parameters.values())
                                                                           if len(input_types) != 1:
                                                                               raise ValueError(
                                                                                   f"Lens getter must have exactly one parameter, got {len(input_types)}: "
                                                                  @@ -131,7 +133,7 @@ def __init__( self, get: LensGetter[S, V],
                                                                               )
                                                                   
                                                                           # Update function details for this object as returned by annotation
                                                                  -        functools.update_wrapper( self, get )
                                                                  +        functools.update_wrapper(self, get)
                                                                   
                                                                           self.source_type: Type[Packable] = input_types[0].annotation
                                                                           self.view_type: Type[Packable] = sig.return_annotation
                                                                  @@ -142,14 +144,15 @@ def __init__( self, get: LensGetter[S, V],
                                                                           # Determine and store the putter
                                                                           if put is None:
                                                                               # Trivial putter does not update the source
                                                                  -            def _trivial_put( v: V, s: S ) -> S:
                                                                  +            def _trivial_put(v: V, s: S) -> S:
                                                                                   return s
                                                                  +
                                                                               put = _trivial_put
                                                                           self._putter = put
                                                                  -    
                                                                  +
                                                                       #
                                                                   
                                                                  -    def putter( self, put: LensPutter[S, V] ) -> LensPutter[S, V]:
                                                                  +    def putter(self, put: LensPutter[S, V]) -> LensPutter[S, V]:
                                                                           """Decorator to register a putter function for this lens.
                                                                   
                                                                           Args:
                                                                  @@ -167,10 +170,10 @@ def putter( self, put: LensPutter[S, V] ) -> LensPutter[S, V]:
                                                                           ##
                                                                           self._putter = put
                                                                           return put
                                                                  -    
                                                                  +
                                                                       # Methods to actually execute transformations
                                                                   
                                                                  -    def put( self, v: V, s: S ) -> S:
                                                                  +    def put(self, v: V, s: S) -> S:
                                                                           """Update the source based on a modified view.
                                                                   
                                                                           Args:
                                                                  @@ -180,9 +183,9 @@ def put( self, v: V, s: S ) -> S:
                                                                           Returns:
                                                                               An updated source of type ``S`` that reflects changes from the view.
                                                                           """
                                                                  -        return self._putter( v, s )
                                                                  +        return self._putter(v, s)
                                                                   
                                                                  -    def get( self, s: S ) -> V:
                                                                  +    def get(self, s: S) -> V:
                                                                           """Transform the source into the view type.
                                                                   
                                                                           Args:
                                                                  @@ -191,14 +194,14 @@ def get( self, s: S ) -> V:
                                                                           Returns:
                                                                               A view of the source as type ``V``.
                                                                           """
                                                                  -        return self( s )
                                                                  +        return self(s)
                                                                   
                                                                  -    def __call__( self, s: S ) -> V:
                                                                  +    def __call__(self, s: S) -> V:
                                                                           """Apply the lens transformation (same as ``get()``)."""
                                                                  -        return self._getter( s )
                                                                  +        return self._getter(s)
                                                                   
                                                                   
                                                                  -def lens(  f: LensGetter[S, V] ) -> Lens[S, V]:
                                                                  +def lens(f: LensGetter[S, V]) -> Lens[S, V]:
                                                                       """Decorator to create and register a lens transformation.
                                                                   
                                                                       This decorator converts a getter function into a ``Lens`` object and
                                                                  @@ -221,8 +224,8 @@ def lens(  f: LensGetter[S, V] ) -> Lens[S, V]:
                                                                           ... def extract_name_put(view: NameOnly, source: FullData) -> FullData:
                                                                           ...     return FullData(name=view.name, age=source.age)
                                                                       """
                                                                  -    ret = Lens[S, V]( f )
                                                                  -    _network.register( ret )
                                                                  +    ret = Lens[S, V](f)
                                                                  +    _network.register(ret)
                                                                       return ret
                                                                   
                                                                   
                                                                  @@ -251,11 +254,11 @@ def __new__(cls, *args, **kwargs):
                                                                   
                                                                       def __init__(self):
                                                                           """Initialize the lens registry (only on first instantiation)."""
                                                                  -        if not hasattr(self, '_initialized'):  # Check if already initialized
                                                                  +        if not hasattr(self, "_initialized"):  # Check if already initialized
                                                                               self._registry: Dict[LensSignature, Lens] = dict()
                                                                               self._initialized = True
                                                                  -    
                                                                  -    def register( self, _lens: Lens ):
                                                                  +
                                                                  +    def register(self, _lens: Lens):
                                                                           """Register a lens as the canonical transformation between two types.
                                                                   
                                                                           Args:
                                                                  @@ -267,8 +270,8 @@ def register( self, _lens: Lens ):
                                                                               overwritten.
                                                                           """
                                                                           self._registry[_lens.source_type, _lens.view_type] = _lens
                                                                  -    
                                                                  -    def transform( self, source: DatasetType, view: DatasetType ) -> Lens:
                                                                  +
                                                                  +    def transform(self, source: DatasetType, view: DatasetType) -> Lens:
                                                                           """Look up the lens transformation between two sample types.
                                                                   
                                                                           Args:
                                                                  @@ -285,12 +288,12 @@ def transform( self, source: DatasetType, view: DatasetType ) -> Lens:
                                                                               Currently only supports direct transformations. Compositional
                                                                               transformations (chaining multiple lenses) are not yet implemented.
                                                                           """
                                                                  -        ret = self._registry.get( (source, view), None )
                                                                  +        ret = self._registry.get((source, view), None)
                                                                           if ret is None:
                                                                  -            raise ValueError( f'No registered lens from source {source} to view {view}' )
                                                                  +            raise ValueError(f"No registered lens from source {source} to view {view}")
                                                                   
                                                                           return ret
                                                                   
                                                                   
                                                                   # Global singleton registry instance
                                                                  -_network = LensNetwork()
                                                                  \ No newline at end of file
                                                                  +_network = LensNetwork()
                                                                  diff --git a/src/atdata/local.py b/src/atdata/local.py
                                                                  index ae50629..fec2a8d 100644
                                                                  --- a/src/atdata/local.py
                                                                  +++ b/src/atdata/local.py
                                                                  @@ -24,13 +24,12 @@
                                                                   )
                                                                   from atdata._cid import generate_cid
                                                                   from atdata._type_utils import (
                                                                  -    numpy_dtype_to_string,
                                                                       PRIMITIVE_TYPE_MAP,
                                                                       unwrap_optional,
                                                                       is_ndarray_type,
                                                                       extract_ndarray_dtype,
                                                                   )
                                                                  -from atdata._protocols import IndexEntry, AbstractDataStore, Packable
                                                                  +from atdata._protocols import AbstractDataStore, Packable
                                                                   
                                                                   from pathlib import Path
                                                                   from uuid import uuid4
                                                                  @@ -57,7 +56,6 @@
                                                                       Generator,
                                                                       Iterator,
                                                                       BinaryIO,
                                                                  -    Union,
                                                                       Optional,
                                                                       Literal,
                                                                       cast,
                                                                  @@ -70,7 +68,7 @@
                                                                   import json
                                                                   import warnings
                                                                   
                                                                  -T = TypeVar( 'T', bound = PackableSample )
                                                                  +T = TypeVar("T", bound=PackableSample)
                                                                   
                                                                   # Redis key prefixes for index entries and schemas
                                                                   REDIS_KEY_DATASET_ENTRY = "LocalDatasetEntry"
                                                                  @@ -355,9 +353,10 @@ def get(self, key: str, default: Any = None) -> Any:
                                                                   ##
                                                                   # Helpers
                                                                   
                                                                  -def _kind_str_for_sample_type( st: Type[Packable] ) -> str:
                                                                  +
                                                                  +def _kind_str_for_sample_type(st: Type[Packable]) -> str:
                                                                       """Return fully-qualified 'module.name' string for a sample type."""
                                                                  -    return f'{st.__module__}.{st.__name__}'
                                                                  +    return f"{st.__module__}.{st.__name__}"
                                                                   
                                                                   
                                                                   def _create_s3_write_callbacks(
                                                                  @@ -385,17 +384,17 @@ def _create_s3_write_callbacks(
                                                                           import boto3
                                                                   
                                                                           s3_client_kwargs = {
                                                                  -            'aws_access_key_id': credentials['AWS_ACCESS_KEY_ID'],
                                                                  -            'aws_secret_access_key': credentials['AWS_SECRET_ACCESS_KEY']
                                                                  +            "aws_access_key_id": credentials["AWS_ACCESS_KEY_ID"],
                                                                  +            "aws_secret_access_key": credentials["AWS_SECRET_ACCESS_KEY"],
                                                                           }
                                                                  -        if 'AWS_ENDPOINT' in credentials:
                                                                  -            s3_client_kwargs['endpoint_url'] = credentials['AWS_ENDPOINT']
                                                                  -        s3_client = boto3.client('s3', **s3_client_kwargs)
                                                                  +        if "AWS_ENDPOINT" in credentials:
                                                                  +            s3_client_kwargs["endpoint_url"] = credentials["AWS_ENDPOINT"]
                                                                  +        s3_client = boto3.client("s3", **s3_client_kwargs)
                                                                   
                                                                           def _writer_opener(p: str):
                                                                               local_path = Path(temp_dir) / p
                                                                               local_path.parent.mkdir(parents=True, exist_ok=True)
                                                                  -            return open(local_path, 'wb')
                                                                  +            return open(local_path, "wb")
                                                                   
                                                                           def _writer_post(p: str):
                                                                               local_path = Path(temp_dir) / p
                                                                  @@ -403,7 +402,7 @@ def _writer_post(p: str):
                                                                               bucket = path_parts[0]
                                                                               key = str(Path(*path_parts[1:]))
                                                                   
                                                                  -            with open(local_path, 'rb') as f_in:
                                                                  +            with open(local_path, "rb") as f_in:
                                                                                   s3_client.put_object(Bucket=bucket, Key=key, Body=f_in.read())
                                                                   
                                                                               local_path.unlink()
                                                                  @@ -417,7 +416,7 @@ def _writer_post(p: str):
                                                                           assert fs is not None, "S3FileSystem required when cache_local=False"
                                                                   
                                                                           def _direct_opener(s: str):
                                                                  -            return cast(BinaryIO, fs.open(f's3://{s}', 'wb'))
                                                                  +            return cast(BinaryIO, fs.open(f"s3://{s}", "wb"))
                                                                   
                                                                           def _direct_post(s: str):
                                                                               if add_s3_prefix:
                                                                  @@ -427,6 +426,7 @@ def _direct_post(s: str):
                                                                   
                                                                           return _direct_opener, _direct_post
                                                                   
                                                                  +
                                                                   ##
                                                                   # Schema helpers
                                                                   
                                                                  @@ -452,9 +452,9 @@ def _parse_schema_ref(ref: str) -> tuple[str, str]:
                                                                       and legacy format: 'local://schemas/{module.Class}@{version}'
                                                                       """
                                                                       if ref.startswith(_ATDATA_URI_PREFIX):
                                                                  -        path = ref[len(_ATDATA_URI_PREFIX):]
                                                                  +        path = ref[len(_ATDATA_URI_PREFIX) :]
                                                                       elif ref.startswith(_LEGACY_URI_PREFIX):
                                                                  -        path = ref[len(_LEGACY_URI_PREFIX):]
                                                                  +        path = ref[len(_LEGACY_URI_PREFIX) :]
                                                                       else:
                                                                           raise ValueError(f"Invalid schema reference: {ref}")
                                                                   
                                                                  @@ -485,7 +485,10 @@ def _increment_patch(version: str) -> str:
                                                                   def _python_type_to_field_type(python_type: Any) -> dict:
                                                                       """Convert Python type annotation to schema field type dict."""
                                                                       if python_type in PRIMITIVE_TYPE_MAP:
                                                                  -        return {"$type": "local#primitive", "primitive": PRIMITIVE_TYPE_MAP[python_type]}
                                                                  +        return {
                                                                  +            "$type": "local#primitive",
                                                                  +            "primitive": PRIMITIVE_TYPE_MAP[python_type],
                                                                  +        }
                                                                   
                                                                       if is_ndarray_type(python_type):
                                                                           return {"$type": "local#ndarray", "dtype": extract_ndarray_dtype(python_type)}
                                                                  @@ -493,7 +496,11 @@ def _python_type_to_field_type(python_type: Any) -> dict:
                                                                       origin = get_origin(python_type)
                                                                       if origin is list:
                                                                           args = get_args(python_type)
                                                                  -        items = _python_type_to_field_type(args[0]) if args else {"$type": "local#primitive", "primitive": "str"}
                                                                  +        items = (
                                                                  +            _python_type_to_field_type(args[0])
                                                                  +            if args
                                                                  +            else {"$type": "local#primitive", "primitive": "str"}
                                                                  +        )
                                                                           return {"$type": "local#array", "items": items}
                                                                   
                                                                       if is_dataclass(python_type):
                                                                  @@ -541,11 +548,13 @@ class docstring.
                                                                           field_type, is_optional = unwrap_optional(field_type)
                                                                           field_type_dict = _python_type_to_field_type(field_type)
                                                                   
                                                                  -        field_defs.append({
                                                                  -            "name": f.name,
                                                                  -            "fieldType": field_type_dict,
                                                                  -            "optional": is_optional,
                                                                  -        })
                                                                  +        field_defs.append(
                                                                  +            {
                                                                  +                "name": f.name,
                                                                  +                "fieldType": field_type_dict,
                                                                  +                "optional": is_optional,
                                                                  +            }
                                                                  +        )
                                                                   
                                                                       return {
                                                                           "name": sample_type.__name__,
                                                                  @@ -559,6 +568,7 @@ class docstring.
                                                                   ##
                                                                   # Redis object model
                                                                   
                                                                  +
                                                                   @dataclass
                                                                   class LocalDatasetEntry:
                                                                       """Index entry for a dataset stored in the local repository.
                                                                  @@ -577,6 +587,7 @@ class LocalDatasetEntry:
                                                                           data_urls: WebDataset URLs for the data.
                                                                           metadata: Arbitrary metadata dictionary, or None if not set.
                                                                       """
                                                                  +
                                                                       ##
                                                                   
                                                                       name: str
                                                                  @@ -638,17 +649,17 @@ def write_to(self, redis: Redis):
                                                                           Args:
                                                                               redis: Redis connection to write to.
                                                                           """
                                                                  -        save_key = f'{REDIS_KEY_DATASET_ENTRY}:{self.cid}'
                                                                  +        save_key = f"{REDIS_KEY_DATASET_ENTRY}:{self.cid}"
                                                                           data = {
                                                                  -            'name': self.name,
                                                                  -            'schema_ref': self.schema_ref,
                                                                  -            'data_urls': msgpack.packb(self.data_urls),  # Serialize list
                                                                  -            'cid': self.cid,
                                                                  +            "name": self.name,
                                                                  +            "schema_ref": self.schema_ref,
                                                                  +            "data_urls": msgpack.packb(self.data_urls),  # Serialize list
                                                                  +            "cid": self.cid,
                                                                           }
                                                                           if self.metadata is not None:
                                                                  -            data['metadata'] = msgpack.packb(self.metadata)
                                                                  +            data["metadata"] = msgpack.packb(self.metadata)
                                                                           if self._legacy_uuid is not None:
                                                                  -            data['legacy_uuid'] = self._legacy_uuid
                                                                  +            data["legacy_uuid"] = self._legacy_uuid
                                                                   
                                                                           redis.hset(save_key, mapping=data)  # type: ignore[arg-type]
                                                                   
                                                                  @@ -666,23 +677,23 @@ def from_redis(cls, redis: Redis, cid: str) -> "LocalDatasetEntry":
                                                                           Raises:
                                                                               KeyError: If entry not found.
                                                                           """
                                                                  -        save_key = f'{REDIS_KEY_DATASET_ENTRY}:{cid}'
                                                                  +        save_key = f"{REDIS_KEY_DATASET_ENTRY}:{cid}"
                                                                           raw_data = redis.hgetall(save_key)
                                                                           if not raw_data:
                                                                               raise KeyError(f"{REDIS_KEY_DATASET_ENTRY} not found: {cid}")
                                                                   
                                                                           # Decode string fields, keep binary fields as bytes for msgpack
                                                                           raw_data_typed = cast(dict[bytes, bytes], raw_data)
                                                                  -        name = raw_data_typed[b'name'].decode('utf-8')
                                                                  -        schema_ref = raw_data_typed[b'schema_ref'].decode('utf-8')
                                                                  -        cid_value = raw_data_typed.get(b'cid', b'').decode('utf-8') or None
                                                                  -        legacy_uuid = raw_data_typed.get(b'legacy_uuid', b'').decode('utf-8') or None
                                                                  +        name = raw_data_typed[b"name"].decode("utf-8")
                                                                  +        schema_ref = raw_data_typed[b"schema_ref"].decode("utf-8")
                                                                  +        cid_value = raw_data_typed.get(b"cid", b"").decode("utf-8") or None
                                                                  +        legacy_uuid = raw_data_typed.get(b"legacy_uuid", b"").decode("utf-8") or None
                                                                   
                                                                           # Deserialize msgpack fields (stored as raw bytes)
                                                                  -        data_urls = msgpack.unpackb(raw_data_typed[b'data_urls'])
                                                                  +        data_urls = msgpack.unpackb(raw_data_typed[b"data_urls"])
                                                                           metadata = None
                                                                  -        if b'metadata' in raw_data_typed:
                                                                  -            metadata = msgpack.unpackb(raw_data_typed[b'metadata'])
                                                                  +        if b"metadata" in raw_data_typed:
                                                                  +            metadata = msgpack.unpackb(raw_data_typed[b"metadata"])
                                                                   
                                                                           return cls(
                                                                               name=name,
                                                                  @@ -697,7 +708,8 @@ def from_redis(cls, redis: Redis, cid: str) -> "LocalDatasetEntry":
                                                                   # Backwards compatibility alias
                                                                   BasicIndexEntry = LocalDatasetEntry
                                                                   
                                                                  -def _s3_env( credentials_path: str | Path ) -> dict[str, Any]:
                                                                  +
                                                                  +def _s3_env(credentials_path: str | Path) -> dict[str, Any]:
                                                                       """Load S3 credentials from .env file.
                                                                   
                                                                       Args:
                                                                  @@ -710,28 +722,31 @@ def _s3_env( credentials_path: str | Path ) -> dict[str, Any]:
                                                                       Raises:
                                                                           ValueError: If any required key is missing from the .env file.
                                                                       """
                                                                  -    credentials_path = Path( credentials_path )
                                                                  -    env_values = dotenv_values( credentials_path )
                                                                  +    credentials_path = Path(credentials_path)
                                                                  +    env_values = dotenv_values(credentials_path)
                                                                   
                                                                  -    required_keys = ('AWS_ENDPOINT', 'AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY')
                                                                  +    required_keys = ("AWS_ENDPOINT", "AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY")
                                                                       missing = [k for k in required_keys if k not in env_values]
                                                                       if missing:
                                                                  -        raise ValueError(f"Missing required keys in {credentials_path}: {', '.join(missing)}")
                                                                  +        raise ValueError(
                                                                  +            f"Missing required keys in {credentials_path}: {', '.join(missing)}"
                                                                  +        )
                                                                   
                                                                       return {k: env_values[k] for k in required_keys}
                                                                   
                                                                  -def _s3_from_credentials( creds: str | Path | dict ) -> S3FileSystem:
                                                                  +
                                                                  +def _s3_from_credentials(creds: str | Path | dict) -> S3FileSystem:
                                                                       """Create S3FileSystem from credentials dict or .env file path."""
                                                                  -    if not isinstance( creds, dict ):
                                                                  -        creds = _s3_env( creds )
                                                                  +    if not isinstance(creds, dict):
                                                                  +        creds = _s3_env(creds)
                                                                   
                                                                       # Build kwargs, making endpoint_url optional
                                                                       kwargs = {
                                                                  -        'key': creds['AWS_ACCESS_KEY_ID'],
                                                                  -        'secret': creds['AWS_SECRET_ACCESS_KEY']
                                                                  +        "key": creds["AWS_ACCESS_KEY_ID"],
                                                                  +        "secret": creds["AWS_SECRET_ACCESS_KEY"],
                                                                       }
                                                                  -    if 'AWS_ENDPOINT' in creds:
                                                                  -        kwargs['endpoint_url'] = creds['AWS_ENDPOINT']
                                                                  +    if "AWS_ENDPOINT" in creds:
                                                                  +        kwargs["endpoint_url"] = creds["AWS_ENDPOINT"]
                                                                   
                                                                       return S3FileSystem(**kwargs)
                                                                   
                                                                  @@ -739,6 +754,7 @@ def _s3_from_credentials( creds: str | Path | dict ) -> S3FileSystem:
                                                                   ##
                                                                   # Classes
                                                                   
                                                                  +
                                                                   class Repo:
                                                                       """Repository for storing and managing atdata datasets.
                                                                   
                                                                  @@ -795,20 +811,20 @@ def __init__(
                                                                   
                                                                           if s3_credentials is None:
                                                                               self.s3_credentials = None
                                                                  -        elif isinstance( s3_credentials, dict ):
                                                                  +        elif isinstance(s3_credentials, dict):
                                                                               self.s3_credentials = s3_credentials
                                                                           else:
                                                                  -            self.s3_credentials = _s3_env( s3_credentials )
                                                                  +            self.s3_credentials = _s3_env(s3_credentials)
                                                                   
                                                                           if self.s3_credentials is None:
                                                                               self.bucket_fs = None
                                                                           else:
                                                                  -            self.bucket_fs = _s3_from_credentials( self.s3_credentials )
                                                                  +            self.bucket_fs = _s3_from_credentials(self.s3_credentials)
                                                                   
                                                                           if self.bucket_fs is not None:
                                                                               if hive_path is None:
                                                                  -                raise ValueError( 'Must specify hive path within bucket' )
                                                                  -            self.hive_path = Path( hive_path )
                                                                  +                raise ValueError("Must specify hive path within bucket")
                                                                  +            self.hive_path = Path(hive_path)
                                                                               self.hive_bucket = self.hive_path.parts[0]
                                                                           else:
                                                                               self.hive_path = None
                                                                  @@ -816,18 +832,19 @@ def __init__(
                                                                   
                                                                           #
                                                                   
                                                                  -        self.index = Index( redis = redis )
                                                                  +        self.index = Index(redis=redis)
                                                                   
                                                                       ##
                                                                   
                                                                  -    def insert(self,
                                                                  -               ds: Dataset[T],
                                                                  -               *,
                                                                  -               name: str,
                                                                  -               cache_local: bool = False,
                                                                  -               schema_ref: str | None = None,
                                                                  -               **kwargs
                                                                  -               ) -> tuple[LocalDatasetEntry, Dataset[T]]:
                                                                  +    def insert(
                                                                  +        self,
                                                                  +        ds: Dataset[T],
                                                                  +        *,
                                                                  +        name: str,
                                                                  +        cache_local: bool = False,
                                                                  +        schema_ref: str | None = None,
                                                                  +        **kwargs,
                                                                  +    ) -> tuple[LocalDatasetEntry, Dataset[T]]:
                                                                           """Insert a dataset into the repository.
                                                                   
                                                                           Writes the dataset to S3 as WebDataset tar files, stores metadata,
                                                                  @@ -851,35 +868,35 @@ def insert(self,
                                                                               RuntimeError: If no shards were written.
                                                                           """
                                                                           if self.s3_credentials is None:
                                                                  -            raise ValueError("S3 credentials required for insert(). Initialize Repo with s3_credentials.")
                                                                  +            raise ValueError(
                                                                  +                "S3 credentials required for insert(). Initialize Repo with s3_credentials."
                                                                  +            )
                                                                           if self.hive_bucket is None or self.hive_path is None:
                                                                  -            raise ValueError("hive_path required for insert(). Initialize Repo with hive_path.")
                                                                  +            raise ValueError(
                                                                  +                "hive_path required for insert(). Initialize Repo with hive_path."
                                                                  +            )
                                                                   
                                                                  -        new_uuid = str( uuid4() )
                                                                  +        new_uuid = str(uuid4())
                                                                   
                                                                  -        hive_fs = _s3_from_credentials( self.s3_credentials )
                                                                  +        hive_fs = _s3_from_credentials(self.s3_credentials)
                                                                   
                                                                           # Write metadata
                                                                           metadata_path = (
                                                                  -            self.hive_path
                                                                  -            / 'metadata'
                                                                  -            / f'atdata-metadata--{new_uuid}.msgpack'
                                                                  +            self.hive_path / "metadata" / f"atdata-metadata--{new_uuid}.msgpack"
                                                                           )
                                                                           # Note: S3 doesn't need directories created beforehand - s3fs handles this
                                                                   
                                                                           if ds.metadata is not None:
                                                                               # Use s3:// prefix to ensure s3fs treats this as an S3 path
                                                                  -            with cast( BinaryIO, hive_fs.open( f's3://{metadata_path.as_posix()}', 'wb' ) ) as f:
                                                                  -                meta_packed = msgpack.packb( ds.metadata )
                                                                  +            with cast(
                                                                  +                BinaryIO, hive_fs.open(f"s3://{metadata_path.as_posix()}", "wb")
                                                                  +            ) as f:
                                                                  +                meta_packed = msgpack.packb(ds.metadata)
                                                                                   assert meta_packed is not None
                                                                  -                f.write( cast( bytes, meta_packed ) )
                                                                  -
                                                                  +                f.write(cast(bytes, meta_packed))
                                                                   
                                                                           # Write data
                                                                  -        shard_pattern = (
                                                                  -            self.hive_path
                                                                  -            / f'atdata--{new_uuid}--%06d.tar'
                                                                  -        ).as_posix()
                                                                  +        shard_pattern = (self.hive_path / f"atdata--{new_uuid}--%06d.tar").as_posix()
                                                                   
                                                                           written_shards: list[str] = []
                                                                           with TemporaryDirectory() as temp_dir:
                                                                  @@ -902,24 +919,22 @@ def insert(self,
                                                                                       sink.write(sample.as_wds)
                                                                   
                                                                           # Make a new Dataset object for the written dataset copy
                                                                  -        if len( written_shards ) == 0:
                                                                  -            raise RuntimeError( 'Cannot form new dataset entry -- did not write any shards' )
                                                                  -        
                                                                  -        elif len( written_shards ) < 2:
                                                                  +        if len(written_shards) == 0:
                                                                  +            raise RuntimeError(
                                                                  +                "Cannot form new dataset entry -- did not write any shards"
                                                                  +            )
                                                                  +
                                                                  +        elif len(written_shards) < 2:
                                                                               new_dataset_url = (
                                                                  -                self.hive_path
                                                                  -                / ( Path( written_shards[0] ).name )
                                                                  +                self.hive_path / (Path(written_shards[0]).name)
                                                                               ).as_posix()
                                                                   
                                                                           else:
                                                                               shard_s3_format = (
                                                                  -                (
                                                                  -                    self.hive_path
                                                                  -                    / f'atdata--{new_uuid}'
                                                                  -                ).as_posix()
                                                                  -            ) + '--{shard_id}.tar'
                                                                  -            shard_id_braced = '{' + f'{0:06d}..{len( written_shards ) - 1:06d}' + '}'
                                                                  -            new_dataset_url = shard_s3_format.format( shard_id = shard_id_braced )
                                                                  +                (self.hive_path / f"atdata--{new_uuid}").as_posix()
                                                                  +            ) + "--{shard_id}.tar"
                                                                  +            shard_id_braced = "{" + f"{0:06d}..{len(written_shards) - 1:06d}" + "}"
                                                                  +            new_dataset_url = shard_s3_format.format(shard_id=shard_id_braced)
                                                                   
                                                                           new_dataset = Dataset[ds.sample_type](
                                                                               url=new_dataset_url,
                                                                  @@ -993,6 +1008,7 @@ def __init__(
                                                                           # Providing stub_dir implies auto_stubs=True
                                                                           if auto_stubs or stub_dir is not None:
                                                                               from ._stub_manager import StubManager
                                                                  +
                                                                               self._stub_manager: StubManager | None = StubManager(stub_dir=stub_dir)
                                                                           else:
                                                                               self._stub_manager = None
                                                                  @@ -1130,19 +1146,20 @@ def entries(self) -> Generator[LocalDatasetEntry, None, None]:
                                                                           Yields:
                                                                               LocalDatasetEntry objects from the index.
                                                                           """
                                                                  -        prefix = f'{REDIS_KEY_DATASET_ENTRY}:'
                                                                  -        for key in self._redis.scan_iter(match=f'{prefix}*'):
                                                                  -            key_str = key.decode('utf-8') if isinstance(key, bytes) else key
                                                                  -            cid = key_str[len(prefix):]
                                                                  +        prefix = f"{REDIS_KEY_DATASET_ENTRY}:"
                                                                  +        for key in self._redis.scan_iter(match=f"{prefix}*"):
                                                                  +            key_str = key.decode("utf-8") if isinstance(key, bytes) else key
                                                                  +            cid = key_str[len(prefix) :]
                                                                               yield LocalDatasetEntry.from_redis(self._redis, cid)
                                                                   
                                                                  -    def add_entry(self,
                                                                  -                  ds: Dataset,
                                                                  -                  *,
                                                                  -                  name: str,
                                                                  -                  schema_ref: str | None = None,
                                                                  -                  metadata: dict | None = None,
                                                                  -                  ) -> LocalDatasetEntry:
                                                                  +    def add_entry(
                                                                  +        self,
                                                                  +        ds: Dataset,
                                                                  +        *,
                                                                  +        name: str,
                                                                  +        schema_ref: str | None = None,
                                                                  +        metadata: dict | None = None,
                                                                  +    ) -> LocalDatasetEntry:
                                                                           """Add a dataset to the index.
                                                                   
                                                                           Creates a LocalDatasetEntry for the dataset and persists it to Redis.
                                                                  @@ -1158,7 +1175,9 @@ def add_entry(self,
                                                                           """
                                                                           ##
                                                                           if schema_ref is None:
                                                                  -            schema_ref = f"local://schemas/{_kind_str_for_sample_type(ds.sample_type)}@1.0.0"
                                                                  +            schema_ref = (
                                                                  +                f"local://schemas/{_kind_str_for_sample_type(ds.sample_type)}@1.0.0"
                                                                  +            )
                                                                   
                                                                           # Normalize URL to list
                                                                           data_urls = [ds.url]
                                                                  @@ -1237,12 +1256,12 @@ def insert_dataset(
                                                                           Returns:
                                                                               IndexEntry for the inserted dataset.
                                                                           """
                                                                  -        metadata = kwargs.get('metadata')
                                                                  +        metadata = kwargs.get("metadata")
                                                                   
                                                                           if self._data_store is not None:
                                                                               # Write shards to data store, then index the new URLs
                                                                  -            prefix = kwargs.get('prefix', name)
                                                                  -            cache_local = kwargs.get('cache_local', False)
                                                                  +            prefix = kwargs.get("prefix", name)
                                                                  +            cache_local = kwargs.get("cache_local", False)
                                                                   
                                                                               written_urls = self._data_store.write_shards(
                                                                                   ds,
                                                                  @@ -1306,10 +1325,10 @@ def _get_latest_schema_version(self, name: str) -> str | None:
                                                                           latest_version: tuple[int, int, int] | None = None
                                                                           latest_version_str: str | None = None
                                                                   
                                                                  -        prefix = f'{REDIS_KEY_SCHEMA}:'
                                                                  -        for key in self._redis.scan_iter(match=f'{prefix}*'):
                                                                  -            key_str = key.decode('utf-8') if isinstance(key, bytes) else key
                                                                  -            schema_id = key_str[len(prefix):]
                                                                  +        prefix = f"{REDIS_KEY_SCHEMA}:"
                                                                  +        for key in self._redis.scan_iter(match=f"{prefix}*"):
                                                                  +            key_str = key.decode("utf-8") if isinstance(key, bytes) else key
                                                                  +            schema_id = key_str[len(prefix) :]
                                                                   
                                                                               if "@" not in schema_id:
                                                                                   continue
                                                                  @@ -1361,10 +1380,12 @@ def publish_schema(
                                                                           # This catches non-packable types early with a clear error message
                                                                           try:
                                                                               # Check protocol compliance by verifying required methods exist
                                                                  -            if not (hasattr(sample_type, 'from_data') and
                                                                  -                    hasattr(sample_type, 'from_bytes') and
                                                                  -                    callable(getattr(sample_type, 'from_data', None)) and
                                                                  -                    callable(getattr(sample_type, 'from_bytes', None))):
                                                                  +            if not (
                                                                  +                hasattr(sample_type, "from_data")
                                                                  +                and hasattr(sample_type, "from_bytes")
                                                                  +                and callable(getattr(sample_type, "from_data", None))
                                                                  +                and callable(getattr(sample_type, "from_bytes", None))
                                                                  +            ):
                                                                                   raise TypeError(
                                                                                       f"{sample_type.__name__} does not satisfy the Packable protocol. "
                                                                                       "Use @packable decorator or inherit from PackableSample."
                                                                  @@ -1422,10 +1443,10 @@ def get_schema(self, ref: str) -> dict:
                                                                               raise KeyError(f"Schema not found: {ref}")
                                                                   
                                                                           if isinstance(schema_json, bytes):
                                                                  -            schema_json = schema_json.decode('utf-8')
                                                                  +            schema_json = schema_json.decode("utf-8")
                                                                   
                                                                           schema = json.loads(schema_json)
                                                                  -        schema['$ref'] = _make_schema_ref(name, version)
                                                                  +        schema["$ref"] = _make_schema_ref(name, version)
                                                                   
                                                                           # Auto-generate stub if enabled
                                                                           if self._stub_manager is not None:
                                                                  @@ -1460,29 +1481,29 @@ def schemas(self) -> Generator[LocalSchemaRecord, None, None]:
                                                                           Yields:
                                                                               LocalSchemaRecord for each schema.
                                                                           """
                                                                  -        prefix = f'{REDIS_KEY_SCHEMA}:'
                                                                  -        for key in self._redis.scan_iter(match=f'{prefix}*'):
                                                                  -            key_str = key.decode('utf-8') if isinstance(key, bytes) else key
                                                                  +        prefix = f"{REDIS_KEY_SCHEMA}:"
                                                                  +        for key in self._redis.scan_iter(match=f"{prefix}*"):
                                                                  +            key_str = key.decode("utf-8") if isinstance(key, bytes) else key
                                                                               # Extract name@version from key
                                                                  -            schema_id = key_str[len(prefix):]
                                                                  +            schema_id = key_str[len(prefix) :]
                                                                   
                                                                               schema_json = self._redis.get(key)
                                                                               if schema_json is None:
                                                                                   continue
                                                                   
                                                                               if isinstance(schema_json, bytes):
                                                                  -                schema_json = schema_json.decode('utf-8')
                                                                  +                schema_json = schema_json.decode("utf-8")
                                                                   
                                                                               schema = json.loads(schema_json)
                                                                               # Handle legacy keys that have module.Class format
                                                                               if "." in schema_id.split("@")[0]:
                                                                                   name = schema_id.split("@")[0].rsplit(".", 1)[1]
                                                                                   version = schema_id.split("@")[1]
                                                                  -                schema['$ref'] = _make_schema_ref(name, version)
                                                                  +                schema["$ref"] = _make_schema_ref(name, version)
                                                                               else:
                                                                                   # schema_id is already "name@version"
                                                                                   name, version = schema_id.rsplit("@", 1)
                                                                  -                schema['$ref'] = _make_schema_ref(name, version)
                                                                  +                schema["$ref"] = _make_schema_ref(name, version)
                                                                               yield LocalSchemaRecord.from_dict(schema)
                                                                   
                                                                       def list_schemas(self) -> list[dict]:
                                                                  @@ -1526,6 +1547,7 @@ class will be imported from it, providing full IDE autocomplete support.
                                                                   
                                                                           # Fall back to dynamic type generation
                                                                           from atdata._schema_codec import schema_to_type
                                                                  +
                                                                           return schema_to_type(schema_dict)
                                                                   
                                                                       def decode_schema_as(self, ref: str, type_hint: type[T]) -> type[T]:
                                                                  @@ -1557,6 +1579,7 @@ def decode_schema_as(self, ref: str, type_hint: type[T]) -> type[T]:
                                                                               stub matches the schema to avoid runtime surprises.
                                                                           """
                                                                           from typing import cast
                                                                  +
                                                                           return cast(type[T], self.decode_schema(ref))
                                                                   
                                                                       def clear_stubs(self) -> int:
                                                                  @@ -1677,11 +1700,11 @@ def read_url(self, url: str) -> str:
                                                                               HTTPS URL if custom endpoint is configured, otherwise unchanged.
                                                                               Example: 's3://bucket/path' -> 'https://endpoint.com/bucket/path'
                                                                           """
                                                                  -        endpoint = self.credentials.get('AWS_ENDPOINT')
                                                                  -        if endpoint and url.startswith('s3://'):
                                                                  +        endpoint = self.credentials.get("AWS_ENDPOINT")
                                                                  +        if endpoint and url.startswith("s3://"):
                                                                               # s3://bucket/path -> https://endpoint/bucket/path
                                                                               path = url[5:]  # Remove 's3://' prefix
                                                                  -            endpoint = endpoint.rstrip('/')
                                                                  +            endpoint = endpoint.rstrip("/")
                                                                               return f"{endpoint}/{path}"
                                                                           return url
                                                                   
                                                                  @@ -1694,4 +1717,4 @@ def supports_streaming(self) -> bool:
                                                                           return True
                                                                   
                                                                   
                                                                  -#
                                                                  \ No newline at end of file
                                                                  +#
                                                                  diff --git a/tests/conftest.py b/tests/conftest.py
                                                                  index 757f826..a418592 100644
                                                                  --- a/tests/conftest.py
                                                                  +++ b/tests/conftest.py
                                                                  @@ -3,12 +3,14 @@
                                                                   This module provides shared fixtures and sample types for the test suite.
                                                                   """
                                                                   
                                                                  -import pytest
                                                                  -from redis import Redis
                                                                  -from typing import Optional
                                                                  +from pathlib import Path
                                                                  +from typing import Optional, TypeVar
                                                                   
                                                                   import numpy as np
                                                                  +import pytest
                                                                  +import webdataset as wds
                                                                   from numpy.typing import NDArray
                                                                  +from redis import Redis
                                                                   
                                                                   import atdata
                                                                   
                                                                  @@ -41,6 +43,7 @@ class SharedBasicSample:
                                                                   
                                                                       Fields: name (str), value (int)
                                                                       """
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -51,6 +54,7 @@ class SharedNumpySample:
                                                                   
                                                                       Fields: data (NDArray), label (str)
                                                                       """
                                                                  +
                                                                       data: NDArray
                                                                       label: str
                                                                   
                                                                  @@ -61,6 +65,7 @@ class SharedOptionalSample:
                                                                   
                                                                       Fields: required (str), optional_int (int|None), optional_array (NDArray|None)
                                                                       """
                                                                  +
                                                                       required: str
                                                                       optional_int: Optional[int] = None
                                                                       optional_array: Optional[NDArray] = None
                                                                  @@ -72,6 +77,7 @@ class SharedAllTypesSample:
                                                                   
                                                                       Fields: str_field, int_field, float_field, bool_field, bytes_field
                                                                       """
                                                                  +
                                                                       str_field: str
                                                                       int_field: int
                                                                       float_field: float
                                                                  @@ -85,6 +91,7 @@ class SharedListSample:
                                                                   
                                                                       Fields: tags (list[str]), scores (list[float])
                                                                       """
                                                                  +
                                                                       tags: list[str]
                                                                       scores: list[float]
                                                                   
                                                                  @@ -95,6 +102,7 @@ class SharedMetadataSample:
                                                                   
                                                                       Fields: id (int), content (str), score (float)
                                                                       """
                                                                  +
                                                                       id: int
                                                                       content: str
                                                                       score: float
                                                                  @@ -109,10 +117,6 @@ class SharedMetadataSample:
                                                                   #
                                                                   # =============================================================================
                                                                   
                                                                  -import webdataset as wds
                                                                  -from pathlib import Path
                                                                  -from typing import Type, TypeVar
                                                                  -
                                                                   ST = TypeVar("ST")
                                                                   
                                                                   
                                                                  @@ -150,8 +154,7 @@ def create_basic_dataset(
                                                                       """
                                                                       tar_path = tmp_path / f"{name}-000000.tar"
                                                                       samples = [
                                                                  -        SharedBasicSample(name=f"sample_{i}", value=i * 10)
                                                                  -        for i in range(num_samples)
                                                                  +        SharedBasicSample(name=f"sample_{i}", value=i * 10) for i in range(num_samples)
                                                                       ]
                                                                       create_tar_with_samples(tar_path, samples)
                                                                       return atdata.Dataset[SharedBasicSample](url=str(tar_path))
                                                                  @@ -190,6 +193,7 @@ def create_numpy_dataset(
                                                                   # Fixtures
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   @pytest.fixture
                                                                   def redis_connection():
                                                                       """Provide a Redis connection, skip test if Redis is not available."""
                                                                  @@ -208,8 +212,9 @@ def clean_redis(redis_connection):
                                                                       Clears LocalDatasetEntry, BasicIndexEntry (legacy), and LocalSchema keys
                                                                       before and after each test to ensure test isolation.
                                                                       """
                                                                  +
                                                                       def _clear_all():
                                                                  -        for pattern in ('LocalDatasetEntry:*', 'BasicIndexEntry:*', 'LocalSchema:*'):
                                                                  +        for pattern in ("LocalDatasetEntry:*", "BasicIndexEntry:*", "LocalSchema:*"):
                                                                               for key in redis_connection.scan_iter(match=pattern):
                                                                                   redis_connection.delete(key)
                                                                   
                                                                  diff --git a/tests/test_atmosphere.py b/tests/test_atmosphere.py
                                                                  index f6023e6..4836834 100644
                                                                  --- a/tests/test_atmosphere.py
                                                                  +++ b/tests/test_atmosphere.py
                                                                  @@ -44,6 +44,7 @@
                                                                   # Test Fixtures
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   @pytest.fixture
                                                                   def mock_atproto_client():
                                                                       """Create a mock atproto SDK client."""
                                                                  @@ -75,6 +76,7 @@ def authenticated_client(mock_atproto_client):
                                                                   @atdata.packable
                                                                   class BasicSample:
                                                                       """Simple sample type for testing."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -82,6 +84,7 @@ class BasicSample:
                                                                   @atdata.packable
                                                                   class NumpySample:
                                                                       """Sample type with NDArray field."""
                                                                  +
                                                                       data: NDArray
                                                                       label: str
                                                                   
                                                                  @@ -89,6 +92,7 @@ class NumpySample:
                                                                   @atdata.packable
                                                                   class OptionalSample:
                                                                       """Sample type with optional fields."""
                                                                  +
                                                                       required_field: str
                                                                       optional_field: Optional[int]
                                                                       optional_array: Optional[NDArray]
                                                                  @@ -97,6 +101,7 @@ class OptionalSample:
                                                                   @atdata.packable
                                                                   class AllTypesSample:
                                                                       """Sample type with all primitive types."""
                                                                  +
                                                                       str_field: str
                                                                       int_field: int
                                                                       float_field: float
                                                                  @@ -108,6 +113,7 @@ class AllTypesSample:
                                                                   # Tests for _types.py - AtUri
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestAtUri:
                                                                       """Tests for AtUri parsing and formatting."""
                                                                   
                                                                  @@ -162,6 +168,7 @@ def test_parse_atdata_namespace(self):
                                                                   # Tests for _types.py - FieldType
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestFieldType:
                                                                       """Tests for FieldType dataclass."""
                                                                   
                                                                  @@ -203,6 +210,7 @@ def test_array_type(self):
                                                                   # Tests for _types.py - FieldDef
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestFieldDef:
                                                                       """Tests for FieldDef dataclass."""
                                                                   
                                                                  @@ -243,6 +251,7 @@ def test_field_with_description(self):
                                                                   # Tests for _types.py - SchemaRecord
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestSchemaRecord:
                                                                       """Tests for SchemaRecord dataclass and to_record()."""
                                                                   
                                                                  @@ -318,14 +327,19 @@ def test_to_record_field_types(self):
                                                                           # Check primitive field
                                                                           prim_field = record["fields"][0]
                                                                           assert prim_field["name"] == "primitive_field"
                                                                  -        assert prim_field["fieldType"]["$type"] == f"{LEXICON_NAMESPACE}.schemaType#primitive"
                                                                  +        assert (
                                                                  +            prim_field["fieldType"]["$type"]
                                                                  +            == f"{LEXICON_NAMESPACE}.schemaType#primitive"
                                                                  +        )
                                                                           assert prim_field["fieldType"]["primitive"] == "int"
                                                                           assert prim_field["optional"] is False
                                                                   
                                                                           # Check ndarray field
                                                                           arr_field = record["fields"][1]
                                                                           assert arr_field["name"] == "array_field"
                                                                  -        assert arr_field["fieldType"]["$type"] == f"{LEXICON_NAMESPACE}.schemaType#ndarray"
                                                                  +        assert (
                                                                  +            arr_field["fieldType"]["$type"] == f"{LEXICON_NAMESPACE}.schemaType#ndarray"
                                                                  +        )
                                                                           assert arr_field["fieldType"]["dtype"] == "float32"
                                                                           assert arr_field["optional"] is True
                                                                   
                                                                  @@ -334,6 +348,7 @@ def test_to_record_field_types(self):
                                                                   # Tests for _types.py - StorageLocation
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestStorageLocation:
                                                                       """Tests for StorageLocation dataclass."""
                                                                   
                                                                  @@ -364,6 +379,7 @@ def test_blob_storage(self):
                                                                   # Tests for _types.py - DatasetRecord
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestDatasetRecord:
                                                                       """Tests for DatasetRecord dataclass and to_record()."""
                                                                   
                                                                  @@ -382,7 +398,10 @@ def test_to_record_external_storage(self):
                                                                   
                                                                           assert record["$type"] == f"{LEXICON_NAMESPACE}.record"
                                                                           assert record["name"] == "TestDataset"
                                                                  -        assert record["schemaRef"] == "at://did:plc:abc/ac.foundation.dataset.sampleSchema/xyz"
                                                                  +        assert (
                                                                  +            record["schemaRef"]
                                                                  +            == "at://did:plc:abc/ac.foundation.dataset.sampleSchema/xyz"
                                                                  +        )
                                                                           assert record["storage"]["$type"] == f"{LEXICON_NAMESPACE}.storageExternal"
                                                                           assert record["storage"]["urls"] == ["s3://bucket/data.tar"]
                                                                   
                                                                  @@ -438,6 +457,7 @@ def test_to_record_with_metadata(self):
                                                                   # Tests for _types.py - LensRecord
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestLensRecord:
                                                                       """Tests for LensRecord dataclass and to_record()."""
                                                                   
                                                                  @@ -500,6 +520,7 @@ def test_to_record_with_code_references(self):
                                                                   # Tests for client.py - AtmosphereClient
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestAtmosphereClient:
                                                                       """Tests for AtmosphereClient."""
                                                                   
                                                                  @@ -539,7 +560,9 @@ def test_login_success(self, mock_atproto_client):
                                                                           assert client.is_authenticated
                                                                           assert client.did == "did:plc:test123456789"
                                                                           assert client.handle == "test.bsky.social"
                                                                  -        mock_atproto_client.login.assert_called_once_with("test.bsky.social", "password123")
                                                                  +        mock_atproto_client.login.assert_called_once_with(
                                                                  +            "test.bsky.social", "password123"
                                                                  +        )
                                                                   
                                                                       def test_login_with_session(self, mock_atproto_client):
                                                                           """Login with exported session string."""
                                                                  @@ -548,7 +571,9 @@ def test_login_with_session(self, mock_atproto_client):
                                                                           client.login_with_session("test-session-string")
                                                                   
                                                                           assert client.is_authenticated
                                                                  -        mock_atproto_client.login.assert_called_once_with(session_string="test-session-string")
                                                                  +        mock_atproto_client.login.assert_called_once_with(
                                                                  +            session_string="test-session-string"
                                                                  +        )
                                                                   
                                                                       def test_export_session(self, authenticated_client, mock_atproto_client):
                                                                           """Export session string."""
                                                                  @@ -625,7 +650,9 @@ def test_get_record(self, authenticated_client, mock_atproto_client):
                                                                   
                                                                           assert record["field"] == "value"
                                                                   
                                                                  -    def test_get_record_with_aturi_object(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_get_record_with_aturi_object(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Get a record using AtUri object."""
                                                                           mock_response = Mock()
                                                                           mock_response.value = {"$type": "test", "data": 123}
                                                                  @@ -653,7 +680,9 @@ def test_upload_blob(self, authenticated_client, mock_atproto_client):
                                                                           mock_response.blob = mock_blob_ref
                                                                           mock_atproto_client.upload_blob.return_value = mock_response
                                                                   
                                                                  -        result = authenticated_client.upload_blob(b"test data", mime_type="application/x-tar")
                                                                  +        result = authenticated_client.upload_blob(
                                                                  +            b"test data", mime_type="application/x-tar"
                                                                  +        )
                                                                   
                                                                           assert result["$type"] == "blob"
                                                                           assert result["ref"]["$link"] == "bafkreitest123"
                                                                  @@ -673,7 +702,10 @@ def test_get_blob(self, authenticated_client):
                                                                               mock_did_response = Mock()
                                                                               mock_did_response.json.return_value = {
                                                                                   "service": [
                                                                  -                    {"type": "AtprotoPersonalDataServer", "serviceEndpoint": "https://pds.example.com"}
                                                                  +                    {
                                                                  +                        "type": "AtprotoPersonalDataServer",
                                                                  +                        "serviceEndpoint": "https://pds.example.com",
                                                                  +                    }
                                                                                   ]
                                                                               }
                                                                               mock_did_response.raise_for_status = Mock()
                                                                  @@ -692,6 +724,7 @@ def test_get_blob(self, authenticated_client):
                                                                       def test_get_blob_pds_not_found(self, authenticated_client):
                                                                           """Get blob raises when PDS cannot be resolved."""
                                                                           import requests as req_module
                                                                  +
                                                                           with patch("requests.get") as mock_get:
                                                                               mock_get.side_effect = req_module.RequestException("Network error")
                                                                   
                                                                  @@ -704,7 +737,10 @@ def test_get_blob_url(self, authenticated_client):
                                                                               mock_response = Mock()
                                                                               mock_response.json.return_value = {
                                                                                   "service": [
                                                                  -                    {"type": "AtprotoPersonalDataServer", "serviceEndpoint": "https://pds.example.com"}
                                                                  +                    {
                                                                  +                        "type": "AtprotoPersonalDataServer",
                                                                  +                        "serviceEndpoint": "https://pds.example.com",
                                                                  +                    }
                                                                                   ]
                                                                               }
                                                                               mock_response.raise_for_status = Mock()
                                                                  @@ -712,11 +748,15 @@ def test_get_blob_url(self, authenticated_client):
                                                                   
                                                                               url = authenticated_client.get_blob_url("did:plc:abc", "bafkreitest")
                                                                   
                                                                  -            assert url == "https://pds.example.com/xrpc/com.atproto.sync.getBlob?did=did:plc:abc&cid=bafkreitest"
                                                                  +            assert (
                                                                  +                url
                                                                  +                == "https://pds.example.com/xrpc/com.atproto.sync.getBlob?did=did:plc:abc&cid=bafkreitest"
                                                                  +            )
                                                                   
                                                                       def test_get_blob_url_pds_not_found(self, authenticated_client):
                                                                           """Get blob URL raises when PDS cannot be resolved."""
                                                                           import requests as req_module
                                                                  +
                                                                           with patch("requests.get") as mock_get:
                                                                               mock_get.side_effect = req_module.RequestException("Network error")
                                                                   
                                                                  @@ -763,13 +803,16 @@ def test_list_schemas_convenience(self, authenticated_client, mock_atproto_clien
                                                                   # Tests for schema.py - SchemaPublisher
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestSchemaPublisher:
                                                                       """Tests for SchemaPublisher."""
                                                                   
                                                                       def test_publish_basic_sample(self, authenticated_client, mock_atproto_client):
                                                                           """Publish a basic sample type schema."""
                                                                           mock_response = Mock()
                                                                  -        mock_response.uri = f"at://did:plc:test123456789/{LEXICON_NAMESPACE}.sampleSchema/abc"
                                                                  +        mock_response.uri = (
                                                                  +            f"at://did:plc:test123456789/{LEXICON_NAMESPACE}.sampleSchema/abc"
                                                                  +        )
                                                                           mock_atproto_client.com.atproto.repo.create_record.return_value = mock_response
                                                                   
                                                                           publisher = SchemaPublisher(authenticated_client)
                                                                  @@ -833,7 +876,9 @@ def test_publish_optional_fields(self, authenticated_client, mock_atproto_client
                                                                           assert required["optional"] is False
                                                                           assert optional["optional"] is True
                                                                   
                                                                  -    def test_publish_all_primitive_types(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_publish_all_primitive_types(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Publish sample with all primitive types."""
                                                                           mock_response = Mock()
                                                                           mock_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/abc"
                                                                  @@ -918,6 +963,7 @@ def test_list_all_schemas(self, authenticated_client, mock_atproto_client):
                                                                   # Tests for records.py - DatasetPublisher
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestDatasetPublisher:
                                                                       """Tests for DatasetPublisher."""
                                                                   
                                                                  @@ -950,11 +996,15 @@ def test_publish_auto_schema(self, authenticated_client, mock_atproto_client):
                                                                           """Publish dataset with auto schema publishing."""
                                                                           # Mock for schema creation
                                                                           schema_response = Mock()
                                                                  -        schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/schema123"
                                                                  +        schema_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/schema123"
                                                                  +        )
                                                                   
                                                                           # Mock for dataset creation
                                                                           dataset_response = Mock()
                                                                  -        dataset_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.record/dataset456"
                                                                  +        dataset_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.record/dataset456"
                                                                  +        )
                                                                   
                                                                           mock_atproto_client.com.atproto.repo.create_record.side_effect = [
                                                                               schema_response,
                                                                  @@ -977,7 +1027,9 @@ def test_publish_auto_schema(self, authenticated_client, mock_atproto_client):
                                                                           # Should have called create_record twice (schema + dataset)
                                                                           assert mock_atproto_client.com.atproto.repo.create_record.call_count == 2
                                                                   
                                                                  -    def test_publish_explicit_schema_uri(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_publish_explicit_schema_uri(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Publish dataset with explicit schema URI (no auto publish)."""
                                                                           mock_response = Mock()
                                                                           mock_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.record/abc"
                                                                  @@ -1026,8 +1078,12 @@ def test_publish_with_blobs(self, authenticated_client, mock_atproto_client):
                                                                   
                                                                           # Mock create_record response
                                                                           mock_create_response = Mock()
                                                                  -        mock_create_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.record/blobds"
                                                                  -        mock_atproto_client.com.atproto.repo.create_record.return_value = mock_create_response
                                                                  +        mock_create_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.record/blobds"
                                                                  +        )
                                                                  +        mock_atproto_client.com.atproto.repo.create_record.return_value = (
                                                                  +            mock_create_response
                                                                  +        )
                                                                   
                                                                           publisher = DatasetPublisher(authenticated_client)
                                                                           uri = publisher.publish_with_blobs(
                                                                  @@ -1050,7 +1106,9 @@ def test_publish_with_blobs(self, authenticated_client, mock_atproto_client):
                                                                           assert record["name"] == "BlobStoredDataset"
                                                                           assert "storageBlobs" in record["storage"]["$type"]
                                                                   
                                                                  -    def test_publish_with_blobs_with_metadata(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_publish_with_blobs_with_metadata(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Publish with blobs includes metadata when provided."""
                                                                           mock_blob_ref = Mock()
                                                                           mock_blob_ref.ref = Mock(link="bafkreiblob456")
                                                                  @@ -1062,8 +1120,12 @@ def test_publish_with_blobs_with_metadata(self, authenticated_client, mock_atpro
                                                                           mock_atproto_client.upload_blob.return_value = mock_upload_response
                                                                   
                                                                           mock_create_response = Mock()
                                                                  -        mock_create_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.record/metads"
                                                                  -        mock_atproto_client.com.atproto.repo.create_record.return_value = mock_create_response
                                                                  +        mock_create_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.record/metads"
                                                                  +        )
                                                                  +        mock_atproto_client.com.atproto.repo.create_record.return_value = (
                                                                  +            mock_create_response
                                                                  +        )
                                                                   
                                                                           publisher = DatasetPublisher(authenticated_client)
                                                                           publisher.publish_with_blobs(
                                                                  @@ -1123,7 +1185,10 @@ def test_get_urls(self, authenticated_client, mock_atproto_client):
                                                                               "schemaRef": "at://schema",
                                                                               "storage": {
                                                                                   "$type": f"{LEXICON_NAMESPACE}.storageExternal",
                                                                  -                "urls": ["s3://bucket/data-{000000..000009}.tar", "s3://bucket/extra.tar"],
                                                                  +                "urls": [
                                                                  +                    "s3://bucket/data-{000000..000009}.tar",
                                                                  +                    "s3://bucket/extra.tar",
                                                                  +                ],
                                                                               },
                                                                           }
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                  @@ -1134,7 +1199,9 @@ def test_get_urls(self, authenticated_client, mock_atproto_client):
                                                                           assert len(urls) == 2
                                                                           assert "data-{000000..000009}.tar" in urls[0]
                                                                   
                                                                  -    def test_get_urls_blob_storage_error(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_get_urls_blob_storage_error(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Get URLs raises for blob storage datasets."""
                                                                           mock_response = Mock()
                                                                           mock_response.value = {
                                                                  @@ -1170,7 +1237,9 @@ def test_get_metadata(self, authenticated_client, mock_atproto_client):
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                   
                                                                           loader = DatasetLoader(authenticated_client)
                                                                  -        metadata = loader.get_metadata(f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz")
                                                                  +        metadata = loader.get_metadata(
                                                                  +            f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz"
                                                                  +        )
                                                                   
                                                                           assert metadata["split"] == "train"
                                                                           assert metadata["samples"] == 10000
                                                                  @@ -1187,7 +1256,9 @@ def test_get_metadata_none(self, authenticated_client, mock_atproto_client):
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                   
                                                                           loader = DatasetLoader(authenticated_client)
                                                                  -        metadata = loader.get_metadata(f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz")
                                                                  +        metadata = loader.get_metadata(
                                                                  +            f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz"
                                                                  +        )
                                                                   
                                                                           assert metadata is None
                                                                   
                                                                  @@ -1221,7 +1292,9 @@ def test_get_storage_type_external(self, authenticated_client, mock_atproto_clie
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                   
                                                                           loader = DatasetLoader(authenticated_client)
                                                                  -        storage_type = loader.get_storage_type(f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz")
                                                                  +        storage_type = loader.get_storage_type(
                                                                  +            f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz"
                                                                  +        )
                                                                   
                                                                           assert storage_type == "external"
                                                                   
                                                                  @@ -1240,7 +1313,9 @@ def test_get_storage_type_blobs(self, authenticated_client, mock_atproto_client)
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                   
                                                                           loader = DatasetLoader(authenticated_client)
                                                                  -        storage_type = loader.get_storage_type(f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz")
                                                                  +        storage_type = loader.get_storage_type(
                                                                  +            f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz"
                                                                  +        )
                                                                   
                                                                           assert storage_type == "blobs"
                                                                   
                                                                  @@ -1265,8 +1340,16 @@ def test_get_storage_type_unknown(self, authenticated_client, mock_atproto_clien
                                                                       def test_get_blobs(self, authenticated_client, mock_atproto_client):
                                                                           """Get blobs returns blob references from storage."""
                                                                           blob_refs = [
                                                                  -            {"ref": {"$link": "bafkreitest1"}, "mimeType": "application/x-tar", "size": 1024},
                                                                  -            {"ref": {"$link": "bafkreitest2"}, "mimeType": "application/x-tar", "size": 2048},
                                                                  +            {
                                                                  +                "ref": {"$link": "bafkreitest1"},
                                                                  +                "mimeType": "application/x-tar",
                                                                  +                "size": 1024,
                                                                  +            },
                                                                  +            {
                                                                  +                "ref": {"$link": "bafkreitest2"},
                                                                  +                "mimeType": "application/x-tar",
                                                                  +                "size": 2048,
                                                                  +            },
                                                                           ]
                                                                           mock_response = Mock()
                                                                           mock_response.value = {
                                                                  @@ -1287,7 +1370,9 @@ def test_get_blobs(self, authenticated_client, mock_atproto_client):
                                                                           assert blobs[0]["ref"]["$link"] == "bafkreitest1"
                                                                           assert blobs[1]["ref"]["$link"] == "bafkreitest2"
                                                                   
                                                                  -    def test_get_blobs_external_storage_error(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_get_blobs_external_storage_error(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Get blobs raises for external URL storage datasets."""
                                                                           mock_response = Mock()
                                                                           mock_response.value = {
                                                                  @@ -1306,7 +1391,9 @@ def test_get_blobs_external_storage_error(self, authenticated_client, mock_atpro
                                                                           with pytest.raises(ValueError, match="external URL storage"):
                                                                               loader.get_blobs(f"at://did:plc:abc/{LEXICON_NAMESPACE}.record/xyz")
                                                                   
                                                                  -    def test_get_blobs_unknown_storage_error(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_get_blobs_unknown_storage_error(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Get blobs raises for unknown storage type."""
                                                                           mock_response = Mock()
                                                                           mock_response.value = {
                                                                  @@ -1346,21 +1433,28 @@ def test_get_blob_urls(self, authenticated_client, mock_atproto_client):
                                                                               mock_did_response = Mock()
                                                                               mock_did_response.json.return_value = {
                                                                                   "service": [
                                                                  -                    {"type": "AtprotoPersonalDataServer", "serviceEndpoint": "https://pds.example.com"}
                                                                  +                    {
                                                                  +                        "type": "AtprotoPersonalDataServer",
                                                                  +                        "serviceEndpoint": "https://pds.example.com",
                                                                  +                    }
                                                                                   ]
                                                                               }
                                                                               mock_did_response.raise_for_status = Mock()
                                                                               mock_get.return_value = mock_did_response
                                                                   
                                                                               loader = DatasetLoader(authenticated_client)
                                                                  -            urls = loader.get_blob_urls(f"at://did:plc:abc123/{LEXICON_NAMESPACE}.record/xyz")
                                                                  +            urls = loader.get_blob_urls(
                                                                  +                f"at://did:plc:abc123/{LEXICON_NAMESPACE}.record/xyz"
                                                                  +            )
                                                                   
                                                                               assert len(urls) == 2
                                                                               assert "bafkreitest1" in urls[0]
                                                                               assert "bafkreitest2" in urls[1]
                                                                               assert "did:plc:abc123" in urls[0]
                                                                   
                                                                  -    def test_get_urls_unknown_storage_error(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_get_urls_unknown_storage_error(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Get URLs raises for unknown storage type."""
                                                                           mock_response = Mock()
                                                                           mock_response.value = {
                                                                  @@ -1383,6 +1477,7 @@ def test_get_urls_unknown_storage_error(self, authenticated_client, mock_atproto
                                                                   # Tests for lens.py - LensPublisher
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestLensPublisher:
                                                                       """Tests for LensPublisher."""
                                                                   
                                                                  @@ -1508,12 +1603,20 @@ def test_list_all(self, authenticated_client, mock_atproto_client):
                                                                   
                                                                           assert len(lenses) == 1
                                                                   
                                                                  -    def test_find_by_schemas_source_only(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_find_by_schemas_source_only(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Find lenses by source schema only."""
                                                                           mock_records = [
                                                                  -            Mock(value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/b"}),
                                                                  -            Mock(value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/c"}),
                                                                  -            Mock(value={"sourceSchema": "at://schema/x", "targetSchema": "at://schema/y"}),
                                                                  +            Mock(
                                                                  +                value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/b"}
                                                                  +            ),
                                                                  +            Mock(
                                                                  +                value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/c"}
                                                                  +            ),
                                                                  +            Mock(
                                                                  +                value={"sourceSchema": "at://schema/x", "targetSchema": "at://schema/y"}
                                                                  +            ),
                                                                           ]
                                                                   
                                                                           mock_response = Mock()
                                                                  @@ -1529,8 +1632,12 @@ def test_find_by_schemas_source_only(self, authenticated_client, mock_atproto_cl
                                                                       def test_find_by_schemas_both(self, authenticated_client, mock_atproto_client):
                                                                           """Find lenses by both source and target schema."""
                                                                           mock_records = [
                                                                  -            Mock(value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/b"}),
                                                                  -            Mock(value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/c"}),
                                                                  +            Mock(
                                                                  +                value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/b"}
                                                                  +            ),
                                                                  +            Mock(
                                                                  +                value={"sourceSchema": "at://schema/a", "targetSchema": "at://schema/c"}
                                                                  +            ),
                                                                           ]
                                                                   
                                                                           mock_response = Mock()
                                                                  @@ -1691,6 +1798,7 @@ class UnsupportedSample:
                                                                   # AtmosphereIndex Tests
                                                                   # =============================================================================
                                                                   
                                                                  +
                                                                   class TestAtmosphereIndexEntry:
                                                                       """Tests for AtmosphereIndexEntry wrapper."""
                                                                   
                                                                  @@ -1738,13 +1846,13 @@ def test_has_protocol_methods(self, authenticated_client):
                                                                           """Index has all AbstractIndex protocol methods."""
                                                                           index = AtmosphereIndex(authenticated_client)
                                                                   
                                                                  -        assert hasattr(index, 'insert_dataset')
                                                                  -        assert hasattr(index, 'get_dataset')
                                                                  -        assert hasattr(index, 'list_datasets')
                                                                  -        assert hasattr(index, 'publish_schema')
                                                                  -        assert hasattr(index, 'get_schema')
                                                                  -        assert hasattr(index, 'list_schemas')
                                                                  -        assert hasattr(index, 'decode_schema')
                                                                  +        assert hasattr(index, "insert_dataset")
                                                                  +        assert hasattr(index, "get_dataset")
                                                                  +        assert hasattr(index, "list_datasets")
                                                                  +        assert hasattr(index, "publish_schema")
                                                                  +        assert hasattr(index, "get_schema")
                                                                  +        assert hasattr(index, "list_schemas")
                                                                  +        assert hasattr(index, "decode_schema")
                                                                   
                                                                       def test_publish_schema(self, authenticated_client, mock_atproto_client):
                                                                           """publish_schema delegates to SchemaPublisher."""
                                                                  diff --git a/tests/test_cid.py b/tests/test_cid.py
                                                                  index f13f310..96bc0c7 100644
                                                                  --- a/tests/test_cid.py
                                                                  +++ b/tests/test_cid.py
                                                                  @@ -176,12 +176,15 @@ def test_parse_cid_digest_matches(self):
                                                                           parsed = parse_cid(cid)
                                                                           assert parsed["hash"]["digest"] == expected_digest
                                                                   
                                                                  -    @pytest.mark.parametrize("malformed_cid", [
                                                                  -        "",           # empty
                                                                  -        "invalid",    # not a CID
                                                                  -        "bafy123",    # truncated CID
                                                                  -        "Qm123",      # v0 prefix but invalid
                                                                  -    ])
                                                                  +    @pytest.mark.parametrize(
                                                                  +        "malformed_cid",
                                                                  +        [
                                                                  +            "",  # empty
                                                                  +            "invalid",  # not a CID
                                                                  +            "bafy123",  # truncated CID
                                                                  +            "Qm123",  # v0 prefix but invalid
                                                                  +        ],
                                                                  +    )
                                                                       def test_parse_cid_malformed_raises_valueerror(self, malformed_cid):
                                                                           """Malformed CID strings raise ValueError."""
                                                                           with pytest.raises(ValueError, match="Failed to decode CID"):
                                                                  diff --git a/tests/test_dataset.py b/tests/test_dataset.py
                                                                  index efe77b6..672f4f3 100644
                                                                  --- a/tests/test_dataset.py
                                                                  +++ b/tests/test_dataset.py
                                                                  @@ -27,138 +27,144 @@
                                                                   ##
                                                                   # Sample test cases
                                                                   
                                                                  +
                                                                   @dataclass
                                                                  -class BasicTestSample( atdata.PackableSample ):
                                                                  +class BasicTestSample(atdata.PackableSample):
                                                                       name: str
                                                                       position: int
                                                                       value: float
                                                                   
                                                                  +
                                                                   @dataclass
                                                                  -class NumpyTestSample( atdata.PackableSample ):
                                                                  +class NumpyTestSample(atdata.PackableSample):
                                                                       label: int
                                                                       image: NDArray
                                                                   
                                                                  +
                                                                   @atdata.packable
                                                                   class BasicTestSampleDecorated:
                                                                       name: str
                                                                       position: int
                                                                       value: float
                                                                   
                                                                  +
                                                                   @atdata.packable
                                                                   class NumpyTestSampleDecorated:
                                                                       label: int
                                                                       image: NDArray
                                                                   
                                                                  +
                                                                   @atdata.packable
                                                                   class NumpyOptionalSampleDecorated:
                                                                       label: int
                                                                       image: NDArray
                                                                       embeddings: NDArray | None = None
                                                                   
                                                                  +
                                                                   test_cases = [
                                                                       {
                                                                  -        'SampleType': BasicTestSample,
                                                                  -        'sample_data': {
                                                                  -            'name': 'Hello, world!',
                                                                  -            'position': 42,
                                                                  -            'value': 1024.768,
                                                                  +        "SampleType": BasicTestSample,
                                                                  +        "sample_data": {
                                                                  +            "name": "Hello, world!",
                                                                  +            "position": 42,
                                                                  +            "value": 1024.768,
                                                                           },
                                                                  -        'sample_wds_stem': 'basic_test',
                                                                  -        'test_parquet': True,
                                                                  +        "sample_wds_stem": "basic_test",
                                                                  +        "test_parquet": True,
                                                                       },
                                                                       {
                                                                  -        'SampleType': NumpyTestSample,
                                                                  -        'sample_data':
                                                                  -        {
                                                                  -            'label': 9_001,
                                                                  -            'image': np.random.randn( 1024, 1024 ),
                                                                  +        "SampleType": NumpyTestSample,
                                                                  +        "sample_data": {
                                                                  +            "label": 9_001,
                                                                  +            "image": np.random.randn(1024, 1024),
                                                                           },
                                                                  -        'sample_wds_stem': 'numpy_test',
                                                                  -        'test_parquet': False,
                                                                  +        "sample_wds_stem": "numpy_test",
                                                                  +        "test_parquet": False,
                                                                       },
                                                                       {
                                                                  -        'SampleType': BasicTestSampleDecorated,
                                                                  -        'sample_data': {
                                                                  -            'name': 'Hello, world!',
                                                                  -            'position': 42,
                                                                  -            'value': 1024.768,
                                                                  +        "SampleType": BasicTestSampleDecorated,
                                                                  +        "sample_data": {
                                                                  +            "name": "Hello, world!",
                                                                  +            "position": 42,
                                                                  +            "value": 1024.768,
                                                                           },
                                                                  -        'sample_wds_stem': 'basic_test_decorated',
                                                                  -        'test_parquet': True,
                                                                  +        "sample_wds_stem": "basic_test_decorated",
                                                                  +        "test_parquet": True,
                                                                       },
                                                                       {
                                                                  -        'SampleType': NumpyTestSampleDecorated,
                                                                  -        'sample_data':
                                                                  -        {
                                                                  -            'label': 9_001,
                                                                  -            'image': np.random.randn( 1024, 1024 ),
                                                                  +        "SampleType": NumpyTestSampleDecorated,
                                                                  +        "sample_data": {
                                                                  +            "label": 9_001,
                                                                  +            "image": np.random.randn(1024, 1024),
                                                                           },
                                                                  -        'sample_wds_stem': 'numpy_test_decorated',
                                                                  -        'test_parquet': False,
                                                                  +        "sample_wds_stem": "numpy_test_decorated",
                                                                  +        "test_parquet": False,
                                                                       },
                                                                       {
                                                                  -        'SampleType': NumpyOptionalSampleDecorated,
                                                                  -        'sample_data':
                                                                  -        {
                                                                  -            'label': 9_001,
                                                                  -            'image': np.random.randn( 1024, 1024 ),
                                                                  -            'embeddings': np.random.randn( 512 ),
                                                                  +        "SampleType": NumpyOptionalSampleDecorated,
                                                                  +        "sample_data": {
                                                                  +            "label": 9_001,
                                                                  +            "image": np.random.randn(1024, 1024),
                                                                  +            "embeddings": np.random.randn(512),
                                                                           },
                                                                  -        'sample_wds_stem': 'numpy_optional_decorated',
                                                                  -        'test_parquet': False,
                                                                  +        "sample_wds_stem": "numpy_optional_decorated",
                                                                  +        "test_parquet": False,
                                                                       },
                                                                       {
                                                                  -        'SampleType': NumpyOptionalSampleDecorated,
                                                                  -        'sample_data':
                                                                  -        {
                                                                  -            'label': 9_001,
                                                                  -            'image': np.random.randn( 1024, 1024 ),
                                                                  -            'embeddings': None,
                                                                  +        "SampleType": NumpyOptionalSampleDecorated,
                                                                  +        "sample_data": {
                                                                  +            "label": 9_001,
                                                                  +            "image": np.random.randn(1024, 1024),
                                                                  +            "embeddings": None,
                                                                           },
                                                                  -        'sample_wds_stem': 'numpy_optional_decorated_none',
                                                                  -        'test_parquet': False,
                                                                  +        "sample_wds_stem": "numpy_optional_decorated_none",
                                                                  +        "test_parquet": False,
                                                                       },
                                                                   ]
                                                                   
                                                                   
                                                                   ## Tests
                                                                   
                                                                  +
                                                                   @pytest.mark.parametrize(
                                                                  -    ('SampleType', 'sample_data'),
                                                                  -    [ (case['SampleType'], case['sample_data'])
                                                                  -      for case in test_cases ]
                                                                  +    ("SampleType", "sample_data"),
                                                                  +    [(case["SampleType"], case["sample_data"]) for case in test_cases],
                                                                   )
                                                                   def test_create_sample(
                                                                  -            SampleType: Type[atdata.PackableSample],
                                                                  -            sample_data: atds.WDSRawSample,
                                                                  -        ):
                                                                  +    SampleType: Type[atdata.PackableSample],
                                                                  +    sample_data: atds.WDSRawSample,
                                                                  +):
                                                                       """Test our ability to create samples from semi-structured data"""
                                                                   
                                                                  -    sample = SampleType.from_data( sample_data )
                                                                  -    assert isinstance( sample, SampleType ), \
                                                                  -        f'Did not properly form sample for test type {SampleType}'
                                                                  +    sample = SampleType.from_data(sample_data)
                                                                  +    assert isinstance(sample, SampleType), (
                                                                  +        f"Did not properly form sample for test type {SampleType}"
                                                                  +    )
                                                                   
                                                                       for k, v in sample_data.items():
                                                                           cur_assertion: bool
                                                                  -        if isinstance( v, np.ndarray ):
                                                                  -            cur_assertion = np.all( getattr( sample, k ) == v )
                                                                  +        if isinstance(v, np.ndarray):
                                                                  +            cur_assertion = np.all(getattr(sample, k) == v)
                                                                           else:
                                                                  -            cur_assertion = getattr( sample, k ) == v
                                                                  -        assert cur_assertion, \
                                                                  -            f'Did not properly incorporate property {k} of test type {SampleType}'
                                                                  +            cur_assertion = getattr(sample, k) == v
                                                                  +        assert cur_assertion, (
                                                                  +            f"Did not properly incorporate property {k} of test type {SampleType}"
                                                                  +        )
                                                                   
                                                                   
                                                                   @pytest.mark.parametrize(
                                                                  -    ('SampleType', 'sample_data', 'sample_wds_stem'),
                                                                  -    [ (case['SampleType'], case['sample_data'], case['sample_wds_stem'])
                                                                  -      for case in test_cases ]
                                                                  +    ("SampleType", "sample_data", "sample_wds_stem"),
                                                                  +    [
                                                                  +        (case["SampleType"], case["sample_data"], case["sample_wds_stem"])
                                                                  +        for case in test_cases
                                                                  +    ],
                                                                   )
                                                                   def test_wds(
                                                                  -            SampleType: Type[atdata.PackableSample],
                                                                  -            sample_data: atds.WDSRawSample,
                                                                  -            sample_wds_stem: str,
                                                                  -            tmp_path
                                                                  -        ):
                                                                  +    SampleType: Type[atdata.PackableSample],
                                                                  +    sample_data: atds.WDSRawSample,
                                                                  +    sample_wds_stem: str,
                                                                  +    tmp_path,
                                                                  +):
                                                                       """Test our ability to write samples as `WebDatasets` to disk"""
                                                                   
                                                                       ## Testing hyperparameters
                                                                  @@ -170,179 +176,190 @@ def test_wds(
                                                                   
                                                                       ## Write sharded dataset
                                                                   
                                                                  -    file_pattern = (
                                                                  -        tmp_path
                                                                  -        / (f'{sample_wds_stem}' + '-{shard_id}.tar')
                                                                  -    ).as_posix()
                                                                  -    file_wds_pattern = file_pattern.format( shard_id = '%06d' )
                                                                  +    file_pattern = (tmp_path / (f"{sample_wds_stem}" + "-{shard_id}.tar")).as_posix()
                                                                  +    file_wds_pattern = file_pattern.format(shard_id="%06d")
                                                                   
                                                                       with wds.writer.ShardWriter(
                                                                  -        pattern = file_wds_pattern,
                                                                  -        maxcount = shard_maxcount,
                                                                  +        pattern=file_wds_pattern,
                                                                  +        maxcount=shard_maxcount,
                                                                       ) as sink:
                                                                  -        
                                                                  -        for i_sample in range( n_copies ):
                                                                  -            new_sample = SampleType.from_data( sample_data )
                                                                  -            assert isinstance( new_sample, SampleType ), \
                                                                  -                f'Did not properly form sample for test type {SampleType}'
                                                                  +        for i_sample in range(n_copies):
                                                                  +            new_sample = SampleType.from_data(sample_data)
                                                                  +            assert isinstance(new_sample, SampleType), (
                                                                  +                f"Did not properly form sample for test type {SampleType}"
                                                                  +            )
                                                                   
                                                                  -            sink.write( new_sample.as_wds )
                                                                  -    
                                                                  +            sink.write(new_sample.as_wds)
                                                                   
                                                                       ## Ordered
                                                                   
                                                                       # Read first shard, no batches
                                                                   
                                                                  -    first_filename = file_pattern.format( shard_id = f'{0:06d}' )
                                                                  -    dataset = atdata.Dataset[SampleType]( first_filename )
                                                                  +    first_filename = file_pattern.format(shard_id=f"{0:06d}")
                                                                  +    dataset = atdata.Dataset[SampleType](first_filename)
                                                                   
                                                                       iterations_run = 0
                                                                  -    for i_iterate, cur_sample in enumerate( dataset.ordered( batch_size = None ) ):
                                                                  +    for i_iterate, cur_sample in enumerate(dataset.ordered(batch_size=None)):
                                                                  +        assert isinstance(cur_sample, SampleType), (
                                                                  +            f"Single sample for {SampleType} written to `wds` is of wrong type"
                                                                  +        )
                                                                   
                                                                  -        assert isinstance( cur_sample, SampleType ), \
                                                                  -            f'Single sample for {SampleType} written to `wds` is of wrong type'
                                                                  -        
                                                                           # Check sample values
                                                                  -        
                                                                  +
                                                                           for k, v in sample_data.items():
                                                                  -            if isinstance( v, np.ndarray ):
                                                                  -                is_correct = np.all( getattr( cur_sample, k ) == v )
                                                                  +            if isinstance(v, np.ndarray):
                                                                  +                is_correct = np.all(getattr(cur_sample, k) == v)
                                                                               else:
                                                                  -                is_correct = getattr( cur_sample, k ) == v
                                                                  -            assert is_correct, \
                                                                  -                f'{SampleType}: Incorrect sample value found for {k} - {type( getattr( cur_sample, k ) )}'
                                                                  +                is_correct = getattr(cur_sample, k) == v
                                                                  +            assert is_correct, (
                                                                  +                f"{SampleType}: Incorrect sample value found for {k} - {type(getattr(cur_sample, k))}"
                                                                  +            )
                                                                   
                                                                           iterations_run += 1
                                                                           if iterations_run >= n_iterate:
                                                                               break
                                                                   
                                                                  -    assert iterations_run == n_iterate, \
                                                                  +    assert iterations_run == n_iterate, (
                                                                           f"Only found {iterations_run} samples, not {n_iterate}"
                                                                  +    )
                                                                   
                                                                       # Read all shards, batches
                                                                   
                                                                  -    start_id = f'{0:06d}'
                                                                  -    end_id = f'{9:06d}'
                                                                  -    first_filename = file_pattern.format( shard_id = '{' + start_id + '..' + end_id + '}' )
                                                                  -    dataset = atdata.Dataset[SampleType]( first_filename )
                                                                  +    start_id = f"{0:06d}"
                                                                  +    end_id = f"{9:06d}"
                                                                  +    first_filename = file_pattern.format(shard_id="{" + start_id + ".." + end_id + "}")
                                                                  +    dataset = atdata.Dataset[SampleType](first_filename)
                                                                   
                                                                       iterations_run = 0
                                                                  -    for i_iterate, cur_batch in enumerate( dataset.ordered( batch_size = batch_size ) ):
                                                                  -        
                                                                  -        assert isinstance( cur_batch, atdata.SampleBatch ), \
                                                                  -            f'{SampleType}: Batch sample is not correctly a batch'
                                                                  -        
                                                                  -        assert cur_batch.sample_type == SampleType, \
                                                                  -            f'{SampleType}: Batch `sample_type` is incorrect type'
                                                                  -        
                                                                  +    for i_iterate, cur_batch in enumerate(dataset.ordered(batch_size=batch_size)):
                                                                  +        assert isinstance(cur_batch, atdata.SampleBatch), (
                                                                  +            f"{SampleType}: Batch sample is not correctly a batch"
                                                                  +        )
                                                                  +
                                                                  +        assert cur_batch.sample_type == SampleType, (
                                                                  +            f"{SampleType}: Batch `sample_type` is incorrect type"
                                                                  +        )
                                                                  +
                                                                           if i_iterate == 0:
                                                                  -            cur_n = len( cur_batch.samples )
                                                                  -            assert cur_n == batch_size, \
                                                                  -                f'{SampleType}: Batch has {cur_n} samples, not {batch_size}'
                                                                  -        
                                                                  -        assert isinstance( cur_batch.samples[0], SampleType ), \
                                                                  -            f'{SampleType}: Batch sample of wrong type ({type( cur_batch.samples[0])})'
                                                                  -        
                                                                  +            cur_n = len(cur_batch.samples)
                                                                  +            assert cur_n == batch_size, (
                                                                  +                f"{SampleType}: Batch has {cur_n} samples, not {batch_size}"
                                                                  +            )
                                                                  +
                                                                  +        assert isinstance(cur_batch.samples[0], SampleType), (
                                                                  +            f"{SampleType}: Batch sample of wrong type ({type(cur_batch.samples[0])})"
                                                                  +        )
                                                                  +
                                                                           # Check batch values
                                                                           for k, v in sample_data.items():
                                                                  -            cur_batch_data = getattr( cur_batch, k )
                                                                  -
                                                                  -            if isinstance( v, np.ndarray ):
                                                                  -                assert isinstance( cur_batch_data, np.ndarray ), \
                                                                  -                    f'{SampleType}: `NDArray` not carried through to batch'
                                                                  -                
                                                                  -                is_correct = all( 
                                                                  -                    [ np.all( cur_batch_data[i] == v )
                                                                  -                      for i in range( cur_batch_data.shape[0] ) ]
                                                                  +            cur_batch_data = getattr(cur_batch, k)
                                                                  +
                                                                  +            if isinstance(v, np.ndarray):
                                                                  +                assert isinstance(cur_batch_data, np.ndarray), (
                                                                  +                    f"{SampleType}: `NDArray` not carried through to batch"
                                                                  +                )
                                                                  +
                                                                  +                is_correct = all(
                                                                  +                    [
                                                                  +                        np.all(cur_batch_data[i] == v)
                                                                  +                        for i in range(cur_batch_data.shape[0])
                                                                  +                    ]
                                                                                   )
                                                                   
                                                                               else:
                                                                  -                is_correct = all( 
                                                                  -                    [ cur_batch_data[i] == v
                                                                  -                      for i in range( len( cur_batch_data ) ) ]
                                                                  +                is_correct = all(
                                                                  +                    [cur_batch_data[i] == v for i in range(len(cur_batch_data))]
                                                                                   )
                                                                   
                                                                  -            assert is_correct, \
                                                                  -                f'{SampleType}: Incorrect sample value found for {k}'
                                                                  +            assert is_correct, f"{SampleType}: Incorrect sample value found for {k}"
                                                                   
                                                                           iterations_run += 1
                                                                           if iterations_run >= n_iterate:
                                                                               break
                                                                   
                                                                  -    assert iterations_run == n_iterate, \
                                                                  +    assert iterations_run == n_iterate, (
                                                                           f"Only found {iterations_run} samples, not {n_iterate}"
                                                                  -    
                                                                  +    )
                                                                   
                                                                       ## Shuffled
                                                                   
                                                                       # Read first shard, no batches
                                                                   
                                                                  -    first_filename = file_pattern.format( shard_id = f'{0:06d}' )
                                                                  -    dataset = atdata.Dataset[SampleType]( first_filename )
                                                                  +    first_filename = file_pattern.format(shard_id=f"{0:06d}")
                                                                  +    dataset = atdata.Dataset[SampleType](first_filename)
                                                                   
                                                                       iterations_run = 0
                                                                  -    for i_iterate, cur_sample in enumerate( dataset.shuffled( batch_size = None ) ):
                                                                  -        
                                                                  -        assert isinstance( cur_sample, SampleType ), \
                                                                  -            f'Single sample for {SampleType} written to `wds` is of wrong type'
                                                                  -        
                                                                  +    for i_iterate, cur_sample in enumerate(dataset.shuffled(batch_size=None)):
                                                                  +        assert isinstance(cur_sample, SampleType), (
                                                                  +            f"Single sample for {SampleType} written to `wds` is of wrong type"
                                                                  +        )
                                                                  +
                                                                           iterations_run += 1
                                                                           if iterations_run >= n_iterate:
                                                                               break
                                                                   
                                                                  -    assert iterations_run == n_iterate, \
                                                                  +    assert iterations_run == n_iterate, (
                                                                           f"Only found {iterations_run} samples, not {n_iterate}"
                                                                  +    )
                                                                   
                                                                       # Read all shards, batches
                                                                   
                                                                  -    start_id = f'{0:06d}'
                                                                  -    end_id = f'{9:06d}'
                                                                  -    first_filename = file_pattern.format( shard_id = '{' + start_id + '..' + end_id + '}' )
                                                                  -    dataset = atdata.Dataset[SampleType]( first_filename )
                                                                  +    start_id = f"{0:06d}"
                                                                  +    end_id = f"{9:06d}"
                                                                  +    first_filename = file_pattern.format(shard_id="{" + start_id + ".." + end_id + "}")
                                                                  +    dataset = atdata.Dataset[SampleType](first_filename)
                                                                   
                                                                       iterations_run = 0
                                                                  -    for i_iterate, cur_sample in enumerate( dataset.shuffled( batch_size = batch_size ) ):
                                                                  -        
                                                                  -        assert isinstance( cur_sample, atdata.SampleBatch ), \
                                                                  -            f'{SampleType}: Batch sample is not correctly a batch'
                                                                  -        
                                                                  -        assert cur_sample.sample_type == SampleType, \
                                                                  -            f'{SampleType}: Batch `sample_type` is incorrect type'
                                                                  -        
                                                                  +    for i_iterate, cur_sample in enumerate(dataset.shuffled(batch_size=batch_size)):
                                                                  +        assert isinstance(cur_sample, atdata.SampleBatch), (
                                                                  +            f"{SampleType}: Batch sample is not correctly a batch"
                                                                  +        )
                                                                  +
                                                                  +        assert cur_sample.sample_type == SampleType, (
                                                                  +            f"{SampleType}: Batch `sample_type` is incorrect type"
                                                                  +        )
                                                                  +
                                                                           if i_iterate == 0:
                                                                  -            cur_n = len( cur_sample.samples )
                                                                  -            assert cur_n == batch_size, \
                                                                  -                f'{SampleType}: Batch has {cur_n} samples, not {batch_size}'
                                                                  -        
                                                                  -        assert isinstance( cur_sample.samples[0], SampleType ), \
                                                                  -            f'{SampleType}: Batch sample of wrong type ({type( cur_sample.samples[0])})'
                                                                  -        
                                                                  +            cur_n = len(cur_sample.samples)
                                                                  +            assert cur_n == batch_size, (
                                                                  +                f"{SampleType}: Batch has {cur_n} samples, not {batch_size}"
                                                                  +            )
                                                                  +
                                                                  +        assert isinstance(cur_sample.samples[0], SampleType), (
                                                                  +            f"{SampleType}: Batch sample of wrong type ({type(cur_sample.samples[0])})"
                                                                  +        )
                                                                  +
                                                                           iterations_run += 1
                                                                           if iterations_run >= n_iterate:
                                                                               break
                                                                   
                                                                  -    assert iterations_run == n_iterate, \
                                                                  +    assert iterations_run == n_iterate, (
                                                                           f"Only found {iterations_run} samples, not {n_iterate}"
                                                                  +    )
                                                                  +
                                                                   
                                                                   #
                                                                   
                                                                  +
                                                                   @pytest.mark.parametrize(
                                                                  -    ('SampleType', 'sample_data', 'sample_wds_stem', 'test_parquet'),
                                                                  -    [ (
                                                                  -        case['SampleType'],
                                                                  -        case['sample_data'],
                                                                  -        case['sample_wds_stem'],
                                                                  -        case['test_parquet']
                                                                  -      )
                                                                  -      for case in test_cases ]
                                                                  +    ("SampleType", "sample_data", "sample_wds_stem", "test_parquet"),
                                                                  +    [
                                                                  +        (
                                                                  +            case["SampleType"],
                                                                  +            case["sample_data"],
                                                                  +            case["sample_wds_stem"],
                                                                  +            case["test_parquet"],
                                                                  +        )
                                                                  +        for case in test_cases
                                                                  +    ],
                                                                   )
                                                                   def test_parquet_export(
                                                                  -            SampleType: Type[atdata.PackableSample],
                                                                  -            sample_data: atds.WDSRawSample,
                                                                  -            sample_wds_stem: str,
                                                                  -            test_parquet: bool,
                                                                  -            tmp_path
                                                                  -        ):
                                                                  +    SampleType: Type[atdata.PackableSample],
                                                                  +    sample_data: atds.WDSRawSample,
                                                                  +    sample_wds_stem: str,
                                                                  +    test_parquet: bool,
                                                                  +    tmp_path,
                                                                  +):
                                                                       """Test our ability to export a dataset to `parquet` format"""
                                                                   
                                                                       # Skip irrelevant test cases
                                                                  @@ -356,20 +373,20 @@ def test_parquet_export(
                                                                   
                                                                       ## Start out by writing tar dataset
                                                                   
                                                                  -    wds_filename = (tmp_path / f'{sample_wds_stem}.tar').as_posix()
                                                                  -    with wds.writer.TarWriter( wds_filename ) as sink:
                                                                  -        for _ in range( n_copies_dataset ):
                                                                  -            new_sample = SampleType.from_data( sample_data )
                                                                  -            sink.write( new_sample.as_wds )
                                                                  -    
                                                                  +    wds_filename = (tmp_path / f"{sample_wds_stem}.tar").as_posix()
                                                                  +    with wds.writer.TarWriter(wds_filename) as sink:
                                                                  +        for _ in range(n_copies_dataset):
                                                                  +            new_sample = SampleType.from_data(sample_data)
                                                                  +            sink.write(new_sample.as_wds)
                                                                  +
                                                                       ## Now export to `parquet`
                                                                   
                                                                  -    dataset = atdata.Dataset[SampleType]( wds_filename )
                                                                  -    parquet_filename = tmp_path / f'{sample_wds_stem}.parquet'
                                                                  -    dataset.to_parquet( parquet_filename )
                                                                  +    dataset = atdata.Dataset[SampleType](wds_filename)
                                                                  +    parquet_filename = tmp_path / f"{sample_wds_stem}.parquet"
                                                                  +    dataset.to_parquet(parquet_filename)
                                                                   
                                                                  -    parquet_filename = tmp_path / f'{sample_wds_stem}-segments.parquet'
                                                                  -    dataset.to_parquet( parquet_filename, maxcount = n_per_file )
                                                                  +    parquet_filename = tmp_path / f"{sample_wds_stem}-segments.parquet"
                                                                  +    dataset.to_parquet(parquet_filename, maxcount=n_per_file)
                                                                   
                                                                   
                                                                   ##
                                                                  @@ -384,6 +401,7 @@ def test_batch_aggregate_empty():
                                                                   
                                                                   def test_sample_batch_attribute_error():
                                                                       """Test SampleBatch raises AttributeError for non-existent attributes."""
                                                                  +
                                                                       @atdata.packable
                                                                       class SimpleSample:
                                                                           name: str
                                                                  @@ -398,6 +416,7 @@ class SimpleSample:
                                                                   
                                                                   def test_sample_batch_type_property():
                                                                       """Test SampleBatch.sample_type property."""
                                                                  +
                                                                       @atdata.packable
                                                                       class TypedSample:
                                                                           data: str
                                                                  @@ -410,6 +429,7 @@ class TypedSample:
                                                                   
                                                                   def test_dataset_batch_type_property(tmp_path):
                                                                       """Test Dataset.batch_type property."""
                                                                  +
                                                                       @atdata.packable
                                                                       class BatchTypeSample:
                                                                           value: int
                                                                  @@ -429,6 +449,7 @@ class BatchTypeSample:
                                                                   
                                                                   def test_dataset_shard_list_property(tmp_path):
                                                                       """Test Dataset.shard_list property returns list of shard URLs."""
                                                                  +
                                                                       @atdata.packable
                                                                       class ShardListSample:
                                                                           value: int
                                                                  @@ -474,14 +495,15 @@ class MetadataSample:
                                                                   
                                                                       with patch("atdata.dataset.requests.get", return_value=mock_response) as mock_get:
                                                                           dataset = atdata.Dataset[MetadataSample](
                                                                  -            wds_filename,
                                                                  -            metadata_url="http://example.com/metadata.msgpack"
                                                                  +            wds_filename, metadata_url="http://example.com/metadata.msgpack"
                                                                           )
                                                                   
                                                                           # First call should fetch
                                                                           metadata = dataset.metadata
                                                                           assert metadata == mock_metadata
                                                                  -        mock_get.assert_called_once_with("http://example.com/metadata.msgpack", stream=True)
                                                                  +        mock_get.assert_called_once_with(
                                                                  +            "http://example.com/metadata.msgpack", stream=True
                                                                  +        )
                                                                   
                                                                           # Second call should use cache
                                                                           metadata2 = dataset.metadata
                                                                  @@ -491,6 +513,7 @@ class MetadataSample:
                                                                   
                                                                   def test_dataset_metadata_property_none(tmp_path):
                                                                       """Test Dataset.metadata returns None when no metadata_url is set."""
                                                                  +
                                                                       @atdata.packable
                                                                       class NoMetadataSample:
                                                                           value: int
                                                                  @@ -506,6 +529,7 @@ class NoMetadataSample:
                                                                   
                                                                   def test_parquet_export_with_remainder(tmp_path):
                                                                       """Test parquet export with maxcount that doesn't divide evenly."""
                                                                  +
                                                                       @atdata.packable
                                                                       class RemainderSample:
                                                                           name: str
                                                                  @@ -527,6 +551,7 @@ class RemainderSample:
                                                                   
                                                                       # Should have created 3 segment files
                                                                       import pandas as pd
                                                                  +
                                                                       segment_files = list(tmp_path.glob("remainder_output-*.parquet"))
                                                                       assert len(segment_files) == 3
                                                                   
                                                                  @@ -586,6 +611,7 @@ def extract_view(s: SourceSample) -> ViewSample:
                                                                   
                                                                   def test_from_bytes_invalid_msgpack():
                                                                       """Test from_bytes raises on invalid msgpack data."""
                                                                  +
                                                                       @atdata.packable
                                                                       class SimpleSample:
                                                                           value: int
                                                                  @@ -596,12 +622,14 @@ class SimpleSample:
                                                                   
                                                                   def test_from_bytes_missing_field():
                                                                       """Test from_bytes raises when required field is missing."""
                                                                  +
                                                                       @atdata.packable
                                                                       class RequiredFieldSample:
                                                                           name: str
                                                                           count: int
                                                                   
                                                                       import ormsgpack
                                                                  +
                                                                       # Only provide 'name', missing 'count'
                                                                       incomplete_data = ormsgpack.packb({"name": "test"})
                                                                   
                                                                  @@ -611,6 +639,7 @@ class RequiredFieldSample:
                                                                   
                                                                   def test_wrap_missing_msgpack_key(tmp_path):
                                                                       """Test wrap raises ValueError on sample missing msgpack key."""
                                                                  +
                                                                       @atdata.packable
                                                                       class WrapTestSample:
                                                                           value: int
                                                                  @@ -629,6 +658,7 @@ class WrapTestSample:
                                                                   
                                                                   def test_wrap_wrong_msgpack_type(tmp_path):
                                                                       """Test wrap raises ValueError when msgpack value is not bytes."""
                                                                  +
                                                                       @atdata.packable
                                                                       class WrapTypeSample:
                                                                           value: int
                                                                  @@ -647,6 +677,7 @@ class WrapTypeSample:
                                                                   
                                                                   def test_wrap_corrupted_msgpack(tmp_path):
                                                                       """Test wrap raises on corrupted msgpack bytes."""
                                                                  +
                                                                       @atdata.packable
                                                                       class CorruptedSample:
                                                                           value: int
                                                                  @@ -665,6 +696,7 @@ class CorruptedSample:
                                                                   
                                                                   def test_dataset_nonexistent_file():
                                                                       """Test Dataset raises on nonexistent tar file during iteration."""
                                                                  +
                                                                       @atdata.packable
                                                                       class NonexistentSample:
                                                                           value: int
                                                                  @@ -681,6 +713,7 @@ class NonexistentSample:
                                                                   
                                                                   def test_dataset_invalid_batch_size(tmp_path):
                                                                       """Test Dataset raises on invalid batch_size values."""
                                                                  +
                                                                       @atdata.packable
                                                                       class BatchSizeSample:
                                                                           value: int
                                                                  @@ -798,6 +831,7 @@ def test_dictsample_repr():
                                                                   
                                                                   def test_dictsample_dataset_iteration(tmp_path):
                                                                       """Test Dataset[DictSample] can iterate over data."""
                                                                  +
                                                                       # Create typed sample data
                                                                       @atdata.packable
                                                                       class SourceSample:
                                                                  @@ -824,6 +858,7 @@ class SourceSample:
                                                                   
                                                                   def test_dictsample_to_typed_via_as_type(tmp_path):
                                                                       """Test converting DictSample dataset to typed via as_type."""
                                                                  +
                                                                       @atdata.packable
                                                                       class TypedSample:
                                                                           text: str
                                                                  @@ -854,6 +889,7 @@ class TypedSample:
                                                                   
                                                                   def test_packable_auto_registers_dictsample_lens():
                                                                       """Test @packable decorator auto-registers lens from DictSample."""
                                                                  +
                                                                       @atdata.packable
                                                                       class AutoLensSample:
                                                                           name: str
                                                                  @@ -874,6 +910,7 @@ class AutoLensSample:
                                                                   
                                                                   def test_dictsample_batched_iteration(tmp_path):
                                                                       """Test Dataset[DictSample] works with batched iteration."""
                                                                  +
                                                                       @atdata.packable
                                                                       class BatchSource:
                                                                           text: str
                                                                  @@ -899,4 +936,4 @@ class BatchSource:
                                                                       assert batch_count == 3  # 10 samples / 4 per batch = 2 full + 1 partial
                                                                   
                                                                   
                                                                  -##
                                                                  \ No newline at end of file
                                                                  +##
                                                                  diff --git a/tests/test_helpers.py b/tests/test_helpers.py
                                                                  index b4cf3b6..b80977d 100644
                                                                  --- a/tests/test_helpers.py
                                                                  +++ b/tests/test_helpers.py
                                                                  @@ -9,15 +9,18 @@
                                                                   class TestArraySerialization:
                                                                       """Test array_to_bytes and bytes_to_array round-trip serialization."""
                                                                   
                                                                  -    @pytest.mark.parametrize("dtype", [
                                                                  -        np.float32,
                                                                  -        np.float64,
                                                                  -        np.int32,
                                                                  -        np.int64,
                                                                  -        np.uint8,
                                                                  -        np.bool_,
                                                                  -        np.complex64,
                                                                  -    ])
                                                                  +    @pytest.mark.parametrize(
                                                                  +        "dtype",
                                                                  +        [
                                                                  +            np.float32,
                                                                  +            np.float64,
                                                                  +            np.int32,
                                                                  +            np.int64,
                                                                  +            np.uint8,
                                                                  +            np.bool_,
                                                                  +            np.complex64,
                                                                  +        ],
                                                                  +    )
                                                                       def test_dtype_preservation(self, dtype):
                                                                           """Verify dtype is preserved through serialization."""
                                                                           original = np.array([1, 2, 3], dtype=dtype)
                                                                  @@ -27,12 +30,15 @@ def test_dtype_preservation(self, dtype):
                                                                           assert restored.dtype == original.dtype
                                                                           np.testing.assert_array_equal(restored, original)
                                                                   
                                                                  -    @pytest.mark.parametrize("shape", [
                                                                  -        (10,),
                                                                  -        (3, 4),
                                                                  -        (2, 3, 4),
                                                                  -        (1, 1, 1, 1),
                                                                  -    ])
                                                                  +    @pytest.mark.parametrize(
                                                                  +        "shape",
                                                                  +        [
                                                                  +            (10,),
                                                                  +            (3, 4),
                                                                  +            (2, 3, 4),
                                                                  +            (1, 1, 1, 1),
                                                                  +        ],
                                                                  +    )
                                                                       def test_shape_preservation(self, shape):
                                                                           """Verify shape is preserved through serialization."""
                                                                           original = np.random.rand(*shape).astype(np.float32)
                                                                  @@ -73,7 +79,7 @@ def test_contiguous_and_noncontiguous(self):
                                                                           original = np.random.rand(10, 10).astype(np.float32)
                                                                           non_contiguous = original[::2, ::2]  # Strided view
                                                                   
                                                                  -        assert not non_contiguous.flags['C_CONTIGUOUS']
                                                                  +        assert not non_contiguous.flags["C_CONTIGUOUS"]
                                                                   
                                                                           serialized = array_to_bytes(non_contiguous)
                                                                           restored = bytes_to_array(serialized)
                                                                  diff --git a/tests/test_hf_api.py b/tests/test_hf_api.py
                                                                  index 2de29e3..ace7ec6 100644
                                                                  --- a/tests/test_hf_api.py
                                                                  +++ b/tests/test_hf_api.py
                                                                  @@ -855,7 +855,10 @@ def test_indexed_path_creates_s3source_with_credentials(self):
                                                                           mock_index = Mock()
                                                                           mock_index.data_store = mock_store
                                                                           mock_entry = Mock()
                                                                  -        mock_entry.data_urls = ["s3://my-bucket/train-000.tar", "s3://my-bucket/train-001.tar"]
                                                                  +        mock_entry.data_urls = [
                                                                  +            "s3://my-bucket/train-000.tar",
                                                                  +            "s3://my-bucket/train-001.tar",
                                                                  +        ]
                                                                           mock_entry.schema_ref = "local://schemas/test@1.0.0"
                                                                           mock_index.get_dataset.return_value = mock_entry
                                                                   
                                                                  diff --git a/tests/test_integration.py b/tests/test_integration.py
                                                                  index 39fb3e9..32d22e5 100644
                                                                  --- a/tests/test_integration.py
                                                                  +++ b/tests/test_integration.py
                                                                  @@ -16,6 +16,7 @@
                                                                   @atdata.packable
                                                                   class IntegrationTestSample:
                                                                       """Sample type for integration tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -39,8 +40,16 @@ def test_promote_preserves_data_urls(self, tmp_path):
                                                                               "name": "test_integration.IntegrationTestSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                               ],
                                                                           }
                                                                   
                                                                  @@ -86,7 +95,11 @@ def test_promote_transfers_schema_metadata(self, tmp_path):
                                                                               "version": "2.1.0",
                                                                               "description": "A sample with specific version",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                               ],
                                                                           }
                                                                   
                                                                  @@ -97,7 +110,9 @@ def test_promote_transfers_schema_metadata(self, tmp_path):
                                                                   
                                                                               with patch("atdata.atmosphere.DatasetPublisher") as MockPublisher:
                                                                                   mock_publisher = MockPublisher.return_value
                                                                  -                mock_publisher.publish_with_urls.return_value = Mock(__str__=lambda s: "at://result")
                                                                  +                mock_publisher.publish_with_urls.return_value = Mock(
                                                                  +                    __str__=lambda s: "at://result"
                                                                  +                )
                                                                   
                                                                                   promote_to_atmosphere(local_entry, mock_local_index, mock_client)
                                                                   
                                                                  @@ -173,7 +188,7 @@ def test_version_mismatch_creates_new_schema(self):
                                                                                       "value": {
                                                                                           "name": "test_integration.IntegrationTestSample",
                                                                                           "version": "1.0.0",  # Different version
                                                                  -                    }
                                                                  +                    },
                                                                                   }
                                                                               ]
                                                                   
                                                                  diff --git a/tests/test_integration_atmosphere.py b/tests/test_integration_atmosphere.py
                                                                  index 70ec5aa..27771b8 100644
                                                                  --- a/tests/test_integration_atmosphere.py
                                                                  +++ b/tests/test_integration_atmosphere.py
                                                                  @@ -33,6 +33,7 @@
                                                                   @atdata.packable
                                                                   class AtmoSample:
                                                                       """Sample for atmosphere tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -40,6 +41,7 @@ class AtmoSample:
                                                                   @atdata.packable
                                                                   class AtmoNDArraySample:
                                                                       """Sample with NDArray for atmosphere tests."""
                                                                  +
                                                                       label: str
                                                                       data: NDArray
                                                                   
                                                                  @@ -84,10 +86,14 @@ def test_login_publish_schema_publish_dataset(self, mock_atproto_client):
                                                                           """Full workflow: login → publish schema → publish dataset."""
                                                                           # Setup mock responses
                                                                           schema_response = Mock()
                                                                  -        schema_response.uri = f"at://did:plc:integration123/{LEXICON_NAMESPACE}.sampleSchema/schema123"
                                                                  +        schema_response.uri = (
                                                                  +            f"at://did:plc:integration123/{LEXICON_NAMESPACE}.sampleSchema/schema123"
                                                                  +        )
                                                                   
                                                                           dataset_response = Mock()
                                                                  -        dataset_response.uri = f"at://did:plc:integration123/{LEXICON_NAMESPACE}.dataset/dataset456"
                                                                  +        dataset_response.uri = (
                                                                  +            f"at://did:plc:integration123/{LEXICON_NAMESPACE}.dataset/dataset456"
                                                                  +        )
                                                                   
                                                                           mock_atproto_client.com.atproto.repo.create_record.side_effect = [
                                                                               schema_response,
                                                                  @@ -135,7 +141,9 @@ def test_login_with_session_restores_auth(self, mock_atproto_client):
                                                                           client.login_with_session("saved-session-string")
                                                                   
                                                                           assert client.is_authenticated
                                                                  -        mock_atproto_client.login.assert_called_with(session_string="saved-session-string")
                                                                  +        mock_atproto_client.login.assert_called_with(
                                                                  +            session_string="saved-session-string"
                                                                  +        )
                                                                   
                                                                       def test_session_round_trip(self, mock_atproto_client):
                                                                           """Export then import session should maintain auth."""
                                                                  @@ -188,8 +196,15 @@ def test_get_schema_by_uri(self, authenticated_client, mock_atproto_client):
                                                                               "name": "FoundSchema",
                                                                               "version": "2.0.0",
                                                                               "fields": [
                                                                  -                {"name": "field1", "fieldType": {"$type": f"{LEXICON_NAMESPACE}.schemaType#primitive", "primitive": "str"}, "optional": False}
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "field1",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": f"{LEXICON_NAMESPACE}.schemaType#primitive",
                                                                  +                        "primitive": "str",
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                }
                                                                  +            ],
                                                                           }
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                   
                                                                  @@ -203,14 +218,19 @@ def test_get_schema_by_uri(self, authenticated_client, mock_atproto_client):
                                                                   class TestAtmosphereIndex:
                                                                       """Tests for AtmosphereIndex AbstractIndex compliance."""
                                                                   
                                                                  -    def test_index_list_datasets_yields_entries(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_index_list_datasets_yields_entries(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """list_datasets should yield AtmosphereIndexEntry objects."""
                                                                           mock_record = Mock()
                                                                           mock_record.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.dataset/d1"
                                                                           mock_record.value = {
                                                                               "name": "listed-dataset",
                                                                               "schemaRef": "at://schema",
                                                                  -            "storage": {"$type": f"{LEXICON_NAMESPACE}.storageExternal", "urls": ["s3://data.tar"]},
                                                                  +            "storage": {
                                                                  +                "$type": f"{LEXICON_NAMESPACE}.storageExternal",
                                                                  +                "urls": ["s3://data.tar"],
                                                                  +            },
                                                                           }
                                                                   
                                                                           mock_response = Mock()
                                                                  @@ -229,7 +249,10 @@ def test_entry_from_record_has_properties(self):
                                                                           record = {
                                                                               "name": "test-dataset",
                                                                               "schemaRef": "at://did:plc:schema/schema/key",
                                                                  -            "storage": {"$type": f"{LEXICON_NAMESPACE}.storageExternal", "urls": ["s3://data.tar"]},
                                                                  +            "storage": {
                                                                  +                "$type": f"{LEXICON_NAMESPACE}.storageExternal",
                                                                  +                "urls": ["s3://data.tar"],
                                                                  +            },
                                                                           }
                                                                   
                                                                           entry = AtmosphereIndexEntry("at://test/dataset/key", record)
                                                                  @@ -247,7 +270,10 @@ def test_entry_metadata_unpacking(self):
                                                                           record = {
                                                                               "name": "meta-dataset",
                                                                               "schemaRef": "at://schema",
                                                                  -            "storage": {"$type": f"{LEXICON_NAMESPACE}.storageExternal", "urls": ["s3://data.tar"]},
                                                                  +            "storage": {
                                                                  +                "$type": f"{LEXICON_NAMESPACE}.storageExternal",
                                                                  +                "urls": ["s3://data.tar"],
                                                                  +            },
                                                                               "metadata": packed_meta,
                                                                           }
                                                                   
                                                                  @@ -261,7 +287,10 @@ def test_entry_no_metadata_returns_none(self):
                                                                           record = {
                                                                               "name": "no-meta",
                                                                               "schemaRef": "at://schema",
                                                                  -            "storage": {"$type": f"{LEXICON_NAMESPACE}.storageExternal", "urls": ["s3://data.tar"]},
                                                                  +            "storage": {
                                                                  +                "$type": f"{LEXICON_NAMESPACE}.storageExternal",
                                                                  +                "urls": ["s3://data.tar"],
                                                                  +            },
                                                                           }
                                                                   
                                                                           entry = AtmosphereIndexEntry("at://test/dataset/key", record)
                                                                  @@ -300,7 +329,10 @@ def test_entry_extracts_external_urls(self):
                                                                               "schemaRef": "at://schema",
                                                                               "storage": {
                                                                                   "$type": f"{LEXICON_NAMESPACE}.storageExternal",
                                                                  -                "urls": ["https://cdn.example.com/data-000.tar", "https://cdn.example.com/data-001.tar"],
                                                                  +                "urls": [
                                                                  +                    "https://cdn.example.com/data-000.tar",
                                                                  +                    "https://cdn.example.com/data-001.tar",
                                                                  +                ],
                                                                               },
                                                                           }
                                                                   
                                                                  @@ -339,7 +371,9 @@ def test_publish_basic_schema(self, authenticated_client, mock_atproto_client):
                                                                       def test_publish_ndarray_schema(self, authenticated_client, mock_atproto_client):
                                                                           """Schema with NDArray field should publish correctly."""
                                                                           mock_response = Mock()
                                                                  -        mock_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/ndarray"
                                                                  +        mock_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/ndarray"
                                                                  +        )
                                                                           mock_atproto_client.com.atproto.repo.create_record.return_value = mock_response
                                                                   
                                                                           publisher = SchemaPublisher(authenticated_client)
                                                                  @@ -438,7 +472,9 @@ def test_write_shards_requires_auth(self, mock_atproto_client):
                                                                           with pytest.raises(ValueError, match="Not authenticated"):
                                                                               store.write_shards(mock_ds, prefix="test")
                                                                   
                                                                  -    def test_write_shards_uploads_blobs(self, authenticated_client, mock_atproto_client, tmp_path):
                                                                  +    def test_write_shards_uploads_blobs(
                                                                  +        self, authenticated_client, mock_atproto_client, tmp_path
                                                                  +    ):
                                                                           """write_shards uploads each shard as a blob."""
                                                                           from atdata.atmosphere import PDSBlobStore
                                                                           import webdataset as wds
                                                                  @@ -452,12 +488,14 @@ def test_write_shards_uploads_blobs(self, authenticated_client, mock_atproto_cli
                                                                           ds = atdata.Dataset[AtmoSample](str(tar_path))
                                                                   
                                                                           # Mock upload_blob to return a blob reference
                                                                  -        authenticated_client.upload_blob = Mock(return_value={
                                                                  -            "$type": "blob",
                                                                  -            "ref": {"$link": "bafyrei123abc"},
                                                                  -            "mimeType": "application/x-tar",
                                                                  -            "size": 1024,
                                                                  -        })
                                                                  +        authenticated_client.upload_blob = Mock(
                                                                  +            return_value={
                                                                  +                "$type": "blob",
                                                                  +                "ref": {"$link": "bafyrei123abc"},
                                                                  +                "mimeType": "application/x-tar",
                                                                  +                "size": 1024,
                                                                  +            }
                                                                  +        )
                                                                   
                                                                           store = PDSBlobStore(client=authenticated_client)
                                                                           urls = store.write_shards(ds, prefix="test/v1", maxcount=100)
                                                                  @@ -473,7 +511,9 @@ def test_write_shards_uploads_blobs(self, authenticated_client, mock_atproto_cli
                                                                           # First arg should be bytes (tar data)
                                                                           assert isinstance(call_args.args[0], bytes)
                                                                   
                                                                  -    def test_read_url_transforms_at_uri(self, authenticated_client, mock_atproto_client):
                                                                  +    def test_read_url_transforms_at_uri(
                                                                  +        self, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """read_url transforms AT URIs to HTTP URLs."""
                                                                           from atdata.atmosphere import PDSBlobStore
                                                                   
                                                                  @@ -486,7 +526,9 @@ def test_read_url_transforms_at_uri(self, authenticated_client, mock_atproto_cli
                                                                   
                                                                           assert "https://pds.example.com" in url
                                                                           assert "bafyrei123" in url
                                                                  -        authenticated_client.get_blob_url.assert_called_once_with("did:plc:abc", "bafyrei123")
                                                                  +        authenticated_client.get_blob_url.assert_called_once_with(
                                                                  +            "did:plc:abc", "bafyrei123"
                                                                  +        )
                                                                   
                                                                       def test_read_url_passes_non_at_uri(self, authenticated_client):
                                                                           """read_url returns non-AT URIs unchanged."""
                                                                  @@ -512,10 +554,12 @@ def test_create_source_returns_blob_source(self, authenticated_client):
                                                                           from atdata._sources import BlobSource
                                                                   
                                                                           store = PDSBlobStore(client=authenticated_client)
                                                                  -        source = store.create_source([
                                                                  -            "at://did:plc:abc/blob/bafyrei111",
                                                                  -            "at://did:plc:abc/blob/bafyrei222",
                                                                  -        ])
                                                                  +        source = store.create_source(
                                                                  +            [
                                                                  +                "at://did:plc:abc/blob/bafyrei111",
                                                                  +                "at://did:plc:abc/blob/bafyrei222",
                                                                  +            ]
                                                                  +        )
                                                                   
                                                                           assert isinstance(source, BlobSource)
                                                                           assert len(source.blob_refs) == 2
                                                                  diff --git a/tests/test_integration_atmosphere_live.py b/tests/test_integration_atmosphere_live.py
                                                                  index d5b33d3..38f70e8 100644
                                                                  --- a/tests/test_integration_atmosphere_live.py
                                                                  +++ b/tests/test_integration_atmosphere_live.py
                                                                  @@ -52,7 +52,9 @@ def skip_if_no_credentials():
                                                                       """Skip test if credentials not available."""
                                                                       handle, password = get_test_credentials()
                                                                       if not handle or not password:
                                                                  -        pytest.skip("Live test credentials not configured (set ATDATA_TEST_HANDLE and ATDATA_TEST_APP_PASSWORD)")
                                                                  +        pytest.skip(
                                                                  +            "Live test credentials not configured (set ATDATA_TEST_HANDLE and ATDATA_TEST_APP_PASSWORD)"
                                                                  +        )
                                                                   
                                                                   
                                                                   ##
                                                                  @@ -62,6 +64,7 @@ def skip_if_no_credentials():
                                                                   @atdata.packable
                                                                   class LiveTestSample:
                                                                       """Simple sample for live tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -69,6 +72,7 @@ class LiveTestSample:
                                                                   @atdata.packable
                                                                   class LiveTestArraySample:
                                                                       """Sample with NDArray for live tests."""
                                                                  +
                                                                       label: str
                                                                       data: NDArray
                                                                   
                                                                  @@ -215,6 +219,7 @@ class TestLiveSchemaOperations:
                                                                   
                                                                       def test_publish_schema(self, live_client, unique_name):
                                                                           """Should publish a schema to ATProto."""
                                                                  +
                                                                           # Create a unique sample type for this test
                                                                           @atdata.packable
                                                                           class UniqueTestSample:
                                                                  @@ -241,6 +246,7 @@ def test_list_schemas(self, live_client):
                                                                   
                                                                       def test_publish_and_retrieve_schema(self, live_client, unique_name):
                                                                           """Should publish then retrieve a schema by URI."""
                                                                  +
                                                                           @atdata.packable
                                                                           class RetrievableTestSample:
                                                                               field1: str
                                                                  @@ -265,6 +271,7 @@ class RetrievableTestSample:
                                                                   
                                                                       def test_schema_with_ndarray_field(self, live_client, unique_name):
                                                                           """Should publish schema with NDArray field type."""
                                                                  +
                                                                           @atdata.packable
                                                                           class ArrayTestSample:
                                                                               label: str
                                                                  @@ -296,6 +303,7 @@ class TestLiveDatasetOperations:
                                                                   
                                                                       def test_publish_dataset_with_urls(self, live_client, unique_name):
                                                                           """Should publish a dataset record with external URLs."""
                                                                  +
                                                                           # First publish a schema
                                                                           @atdata.packable
                                                                           class DatasetTestSample:
                                                                  @@ -327,6 +335,7 @@ def test_list_datasets(self, live_client):
                                                                   
                                                                       def test_publish_and_retrieve_dataset(self, live_client, unique_name):
                                                                           """Should publish then retrieve a dataset."""
                                                                  +
                                                                           @atdata.packable
                                                                           class RetrievableDatasetSample:
                                                                               value: int
                                                                  @@ -360,7 +369,9 @@ class RetrievableDatasetSample:
                                                                           assert dataset["name"] == unique_name
                                                                           assert dataset["description"] == "Retrievable test dataset"
                                                                   
                                                                  -    def test_to_dataset_with_fake_urls_fails_on_iteration(self, live_client, unique_name):
                                                                  +    def test_to_dataset_with_fake_urls_fails_on_iteration(
                                                                  +        self, live_client, unique_name
                                                                  +    ):
                                                                           """Attempting to iterate a dataset with fake URLs should fail.
                                                                   
                                                                           This test documents a known limitation: we can publish and retrieve
                                                                  @@ -369,6 +380,7 @@ def test_to_dataset_with_fake_urls_fails_on_iteration(self, live_client, unique_
                                                                           1. Real external URLs (e.g., S3 with test data)
                                                                           2. ATProto blob storage support (not yet implemented)
                                                                           """
                                                                  +
                                                                           @atdata.packable
                                                                           class IterationTestSample:
                                                                               value: int
                                                                  @@ -560,6 +572,7 @@ def test_index_list_datasets(self, live_index):
                                                                   
                                                                       def test_index_publish_schema(self, live_index, unique_name):
                                                                           """Should publish schema via AtmosphereIndex."""
                                                                  +
                                                                           @atdata.packable
                                                                           class IndexTestSample:
                                                                               data: str
                                                                  @@ -573,6 +586,7 @@ class IndexTestSample:
                                                                   
                                                                       def test_index_get_schema(self, live_index, unique_name):
                                                                           """Should retrieve schema via AtmosphereIndex."""
                                                                  +
                                                                           @atdata.packable
                                                                           class GetSchemaTestSample:
                                                                               field: int
                                                                  @@ -599,7 +613,9 @@ def test_get_nonexistent_record(self, live_client):
                                                                           """Should raise on getting non-existent record."""
                                                                           loader = SchemaLoader(live_client)
                                                                   
                                                                  -        fake_uri = f"at://{live_client.did}/{LEXICON_NAMESPACE}.sampleSchema/nonexistent12345"
                                                                  +        fake_uri = (
                                                                  +            f"at://{live_client.did}/{LEXICON_NAMESPACE}.sampleSchema/nonexistent12345"
                                                                  +        )
                                                                   
                                                                           with pytest.raises(Exception):
                                                                               loader.get(fake_uri)
                                                                  @@ -629,7 +645,9 @@ def test_cleanup_test_records(self, live_client):
                                                                           schemas_deleted = cleanup_test_schemas(live_client)
                                                                           datasets_deleted = cleanup_test_datasets(live_client)
                                                                   
                                                                  -        print(f"\nCleanup: deleted {schemas_deleted} schemas, {datasets_deleted} datasets")
                                                                  +        print(
                                                                  +            f"\nCleanup: deleted {schemas_deleted} schemas, {datasets_deleted} datasets"
                                                                  +        )
                                                                   
                                                                           # Just verify cleanup ran without error
                                                                           assert True
                                                                  diff --git a/tests/test_integration_cross_backend.py b/tests/test_integration_cross_backend.py
                                                                  index 797a9e2..94ab173 100644
                                                                  --- a/tests/test_integration_cross_backend.py
                                                                  +++ b/tests/test_integration_cross_backend.py
                                                                  @@ -29,6 +29,7 @@
                                                                   @atdata.packable
                                                                   class CrossBackendSample:
                                                                       """Sample for cross-backend tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -36,6 +37,7 @@ class CrossBackendSample:
                                                                   @atdata.packable
                                                                   class CrossBackendArraySample:
                                                                       """Sample with NDArray for cross-backend tests."""
                                                                  +
                                                                       label: str
                                                                       data: NDArray
                                                                   
                                                                  @@ -116,12 +118,16 @@ def test_atmosphere_entry_satisfies_protocol(self):
                                                                   
                                                                           assert isinstance(entry, IndexEntry)
                                                                           assert entry.name == "atmo-dataset"
                                                                  -        assert entry.schema_ref == "at://did:plc:test/ac.foundation.dataset.sampleSchema/abc"
                                                                  +        assert (
                                                                  +            entry.schema_ref
                                                                  +            == "at://did:plc:test/ac.foundation.dataset.sampleSchema/abc"
                                                                  +        )
                                                                           assert entry.data_urls == ["s3://bucket/atmo.tar"]
                                                                           assert entry.metadata is None
                                                                   
                                                                       def test_entries_work_with_common_function(self):
                                                                           """Both entry types should work with functions accepting IndexEntry."""
                                                                  +
                                                                           def process_entry(entry: IndexEntry) -> dict:
                                                                               return {
                                                                                   "name": entry.name,
                                                                  @@ -294,16 +300,23 @@ def test_local_index_get_schema(self, local_index):
                                                                           assert schema["version"] == "2.0.0"
                                                                           assert len(schema["fields"]) == 2
                                                                   
                                                                  -    def test_atmosphere_index_get_schema(
                                                                  -        self, atmosphere_index, mock_atproto_client
                                                                  -    ):
                                                                  +    def test_atmosphere_index_get_schema(self, atmosphere_index, mock_atproto_client):
                                                                           """AtmosphereIndex should retrieve schemas."""
                                                                           mock_response = Mock()
                                                                           mock_response.value = {
                                                                               "$type": f"{LEXICON_NAMESPACE}.sampleSchema",
                                                                               "name": "RetrievedSchema",
                                                                               "version": "1.0.0",
                                                                  -            "fields": [{"name": "field1", "fieldType": {"$type": f"{LEXICON_NAMESPACE}.schemaType#primitive", "primitive": "str"}, "optional": False}],
                                                                  +            "fields": [
                                                                  +                {
                                                                  +                    "name": "field1",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": f"{LEXICON_NAMESPACE}.schemaType#primitive",
                                                                  +                        "primitive": "str",
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                }
                                                                  +            ],
                                                                           }
                                                                           mock_atproto_client.com.atproto.repo.get_record.return_value = mock_response
                                                                   
                                                                  @@ -332,7 +345,9 @@ def test_schema_field_structure_matches(self, local_index):
                                                                   
                                                                       def test_ndarray_schema_field_structure(self, local_index):
                                                                           """NDArray fields should be represented consistently."""
                                                                  -        schema_ref = local_index.publish_schema(CrossBackendArraySample, version="1.0.0")
                                                                  +        schema_ref = local_index.publish_schema(
                                                                  +            CrossBackendArraySample, version="1.0.0"
                                                                  +        )
                                                                           schema = local_index.get_schema(schema_ref)
                                                                   
                                                                           field_names = {f["name"] for f in schema["fields"]}
                                                                  @@ -343,8 +358,10 @@ def test_ndarray_schema_field_structure(self, local_index):
                                                                           data_field = next(f for f in schema["fields"] if f["name"] == "data")
                                                                           field_type = data_field["fieldType"]
                                                                           # Field type should indicate it's an ndarray
                                                                  -        assert "ndarray" in field_type.get("$type", "").lower() or \
                                                                  -               field_type.get("primitive") == "ndarray"
                                                                  +        assert (
                                                                  +            "ndarray" in field_type.get("$type", "").lower()
                                                                  +            or field_type.get("primitive") == "ndarray"
                                                                  +        )
                                                                   
                                                                   
                                                                   class TestCrossBackendSchemaResolution:
                                                                  @@ -357,9 +374,7 @@ def test_local_schema_ref_format(self, local_index):
                                                                           assert schema_ref.startswith("atdata://local/sampleSchema/")
                                                                           assert "CrossBackendSample" in schema_ref
                                                                   
                                                                  -    def test_atmosphere_schema_ref_format(
                                                                  -        self, atmosphere_index, mock_atproto_client
                                                                  -    ):
                                                                  +    def test_atmosphere_schema_ref_format(self, atmosphere_index, mock_atproto_client):
                                                                           """Atmosphere schema refs should use at:// URI scheme."""
                                                                           mock_response = Mock()
                                                                           mock_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/abc"
                                                                  @@ -483,9 +498,7 @@ def test_count_works_with_local(self, local_index, clean_redis):
                                                                           count = self.count_datasets(local_index)
                                                                           assert count >= 3
                                                                   
                                                                  -    def test_count_works_with_atmosphere(
                                                                  -        self, atmosphere_index, mock_atproto_client
                                                                  -    ):
                                                                  +    def test_count_works_with_atmosphere(self, atmosphere_index, mock_atproto_client):
                                                                           """Dataset count function should work with AtmosphereIndex."""
                                                                           mock_records = []
                                                                           for i in range(5):
                                                                  diff --git a/tests/test_integration_dynamic_types.py b/tests/test_integration_dynamic_types.py
                                                                  index 1901706..605effa 100644
                                                                  --- a/tests/test_integration_dynamic_types.py
                                                                  +++ b/tests/test_integration_dynamic_types.py
                                                                  @@ -16,7 +16,12 @@
                                                                   import webdataset as wds
                                                                   
                                                                   import atdata
                                                                  -from atdata._schema_codec import schema_to_type, generate_stub, clear_type_cache, get_cached_types
                                                                  +from atdata._schema_codec import (
                                                                  +    schema_to_type,
                                                                  +    generate_stub,
                                                                  +    clear_type_cache,
                                                                  +    get_cached_types,
                                                                  +)
                                                                   import atdata.local as atlocal
                                                                   
                                                                   
                                                                  @@ -27,6 +32,7 @@
                                                                   @dataclass
                                                                   class SimpleSample(atdata.PackableSample):
                                                                       """Simple sample for testing."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                       score: float
                                                                  @@ -35,6 +41,7 @@ class SimpleSample(atdata.PackableSample):
                                                                   @dataclass
                                                                   class ArraySample(atdata.PackableSample):
                                                                       """Sample with NDArray field."""
                                                                  +
                                                                       label: str
                                                                       image: NDArray
                                                                   
                                                                  @@ -42,6 +49,7 @@ class ArraySample(atdata.PackableSample):
                                                                   @dataclass
                                                                   class OptionalSample(atdata.PackableSample):
                                                                       """Sample with optional fields."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                       extra: str | None = None
                                                                  @@ -51,6 +59,7 @@ class OptionalSample(atdata.PackableSample):
                                                                   @dataclass
                                                                   class ListSample(atdata.PackableSample):
                                                                       """Sample with list fields."""
                                                                  +
                                                                       tags: list[str]
                                                                       scores: list[float]
                                                                   
                                                                  @@ -80,10 +89,22 @@ def test_simple_primitive_schema(self):
                                                                               "name": "SimpleSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -                {"name": "score", "fieldType": {"$type": "local#primitive", "primitive": "float"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "score",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "float"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           SampleType = schema_to_type(schema)
                                                                  @@ -100,9 +121,17 @@ def test_ndarray_field_schema(self):
                                                                               "name": "ArraySample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "label", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "image", "fieldType": {"$type": "local#ndarray", "dtype": "float32"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "label",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "image",
                                                                  +                    "fieldType": {"$type": "local#ndarray", "dtype": "float32"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           SampleType = schema_to_type(schema)
                                                                  @@ -119,9 +148,17 @@ def test_optional_field_schema(self):
                                                                               "name": "OptionalSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "extra", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": True},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "extra",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": True,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           SampleType = schema_to_type(schema)
                                                                  @@ -141,9 +178,23 @@ def test_list_field_schema(self):
                                                                               "name": "ListSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "tags", "fieldType": {"$type": "local#array", "items": {"$type": "local#primitive", "primitive": "str"}}, "optional": False},
                                                                  -                {"name": "scores", "fieldType": {"$type": "local#array", "items": {"$type": "local#primitive", "primitive": "float"}}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "tags",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#array",
                                                                  +                        "items": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "scores",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#array",
                                                                  +                        "items": {"$type": "local#primitive", "primitive": "float"},
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           SampleType = schema_to_type(schema)
                                                                  @@ -158,12 +209,32 @@ def test_all_primitive_types(self):
                                                                               "name": "AllPrimitives",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "s", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "i", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -                {"name": "f", "fieldType": {"$type": "local#primitive", "primitive": "float"}, "optional": False},
                                                                  -                {"name": "b", "fieldType": {"$type": "local#primitive", "primitive": "bool"}, "optional": False},
                                                                  -                {"name": "raw", "fieldType": {"$type": "local#primitive", "primitive": "bytes"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "s",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "i",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "f",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "float"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "b",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "bool"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "raw",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "bytes"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           SampleType = schema_to_type(schema)
                                                                  @@ -197,10 +268,22 @@ def test_load_dataset_with_dynamic_type(self, tmp_path):
                                                                               "name": "SimpleSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -                {"name": "score", "fieldType": {"$type": "local#primitive", "primitive": "float"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "score",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "float"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           DynamicType = schema_to_type(schema)
                                                                  @@ -231,9 +314,17 @@ def test_load_dataset_with_ndarray_dynamic_type(self, tmp_path):
                                                                               "name": "ArraySample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "label", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "image", "fieldType": {"$type": "local#ndarray", "dtype": "float32"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "label",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "image",
                                                                  +                    "fieldType": {"$type": "local#ndarray", "dtype": "float32"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           DynamicType = schema_to_type(schema)
                                                                  @@ -258,10 +349,22 @@ def test_batch_iteration_with_dynamic_type(self, tmp_path):
                                                                               "name": "SimpleSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -                {"name": "score", "fieldType": {"$type": "local#primitive", "primitive": "float"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "score",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "float"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           DynamicType = schema_to_type(schema)
                                                                  @@ -284,8 +387,12 @@ def test_same_schema_returns_cached_type(self):
                                                                               "name": "CachedSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           Type1 = schema_to_type(schema)
                                                                  @@ -299,15 +406,23 @@ def test_different_version_different_type(self):
                                                                               "name": "VersionedSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                           schema2 = {
                                                                               "name": "VersionedSample",
                                                                               "version": "2.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           Type1 = schema_to_type(schema1)
                                                                  @@ -322,15 +437,23 @@ def test_different_fields_different_type(self):
                                                                               "name": "FieldSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "a", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "a",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                           schema2 = {
                                                                               "name": "FieldSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "b", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "b",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           Type1 = schema_to_type(schema1)
                                                                  @@ -344,8 +467,12 @@ def test_use_cache_false_bypasses_cache(self):
                                                                               "name": "NoCacheSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           Type1 = schema_to_type(schema, use_cache=False)
                                                                  @@ -360,8 +487,12 @@ def test_clear_cache_removes_types(self):
                                                                               "name": "ClearableSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           Type1 = schema_to_type(schema)
                                                                  @@ -377,8 +508,12 @@ def test_get_cached_types_returns_cache_copy(self):
                                                                               "name": "TrackedSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           schema_to_type(schema)
                                                                  @@ -450,8 +585,12 @@ def test_schema_without_name_raises(self):
                                                                           schema = {
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           with pytest.raises(ValueError, match="must have a 'name'"):
                                                                  @@ -459,11 +598,7 @@ def test_schema_without_name_raises(self):
                                                                   
                                                                       def test_schema_without_fields_raises(self):
                                                                           """Schema without fields should raise ValueError."""
                                                                  -        schema = {
                                                                  -            "name": "EmptySample",
                                                                  -            "version": "1.0.0",
                                                                  -            "fields": []
                                                                  -        }
                                                                  +        schema = {"name": "EmptySample", "version": "1.0.0", "fields": []}
                                                                   
                                                                           with pytest.raises(ValueError, match="must have at least one field"):
                                                                               schema_to_type(schema)
                                                                  @@ -474,8 +609,11 @@ def test_field_without_name_raises(self):
                                                                               "name": "BadFieldSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           # Raises KeyError during cache key generation or ValueError during field processing
                                                                  @@ -488,8 +626,15 @@ def test_unknown_primitive_raises(self):
                                                                               "name": "UnknownPrimitive",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "complex128"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#primitive",
                                                                  +                        "primitive": "complex128",
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           with pytest.raises(ValueError, match="Unknown primitive type"):
                                                                  @@ -501,8 +646,12 @@ def test_unknown_field_type_raises(self):
                                                                               "name": "UnknownType",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#custom"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#custom"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           with pytest.raises(ValueError, match="Unknown field type kind"):
                                                                  @@ -518,9 +667,17 @@ def test_optional_ndarray_schema(self, tmp_path):
                                                                               "name": "OptionalArraySample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "embedding", "fieldType": {"$type": "local#ndarray", "dtype": "float32"}, "optional": True},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "embedding",
                                                                  +                    "fieldType": {"$type": "local#ndarray", "dtype": "float32"},
                                                                  +                    "optional": True,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           DynamicType = schema_to_type(schema)
                                                                  @@ -530,7 +687,9 @@ def test_optional_ndarray_schema(self, tmp_path):
                                                                           with wds.writer.TarWriter(str(tar_path)) as sink:
                                                                               for i in range(6):
                                                                                   if i % 2 == 0:
                                                                  -                    sample = OptionalSample(name=f"s_{i}", value=i, embedding=np.zeros(4, dtype=np.float32))
                                                                  +                    sample = OptionalSample(
                                                                  +                        name=f"s_{i}", value=i, embedding=np.zeros(4, dtype=np.float32)
                                                                  +                    )
                                                                                   else:
                                                                                       sample = OptionalSample(name=f"s_{i}", value=i, embedding=None)
                                                                                   sink.write(sample.as_wds)
                                                                  @@ -550,11 +709,18 @@ def test_nested_list_schema(self):
                                                                               "name": "NestedListSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "matrix", "fieldType": {
                                                                  -                    "$type": "local#array",
                                                                  -                    "items": {"$type": "local#array", "items": {"$type": "local#primitive", "primitive": "int"}}
                                                                  -                }, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "matrix",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#array",
                                                                  +                        "items": {
                                                                  +                            "$type": "local#array",
                                                                  +                            "items": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                        },
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           DynamicType = schema_to_type(schema)
                                                                  @@ -591,9 +757,17 @@ def test_basic_stub_generation(self):
                                                                               "name": "SimpleSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -609,8 +783,12 @@ def test_stub_with_ndarray_field(self):
                                                                               "name": "ArraySample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "image", "fieldType": {"$type": "local#ndarray", "dtype": "float32"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "image",
                                                                  +                    "fieldType": {"$type": "local#ndarray", "dtype": "float32"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -624,9 +802,17 @@ def test_stub_with_optional_field(self):
                                                                               "name": "OptionalSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "extra", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": True},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "extra",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": True,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -641,8 +827,15 @@ def test_stub_with_list_field(self):
                                                                               "name": "ListSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "tags", "fieldType": {"$type": "local#array", "items": {"$type": "local#primitive", "primitive": "str"}}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "tags",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#array",
                                                                  +                        "items": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -655,8 +848,12 @@ def test_stub_includes_header_comments(self):
                                                                               "name": "MySample",
                                                                               "version": "2.1.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -671,8 +868,12 @@ def test_stub_includes_imports(self):
                                                                               "name": "ImportSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -686,12 +887,32 @@ def test_stub_all_primitive_types(self):
                                                                               "name": "AllPrimitives",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "s", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "i", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  -                {"name": "f", "fieldType": {"$type": "local#primitive", "primitive": "float"}, "optional": False},
                                                                  -                {"name": "b", "fieldType": {"$type": "local#primitive", "primitive": "bool"}, "optional": False},
                                                                  -                {"name": "raw", "fieldType": {"$type": "local#primitive", "primitive": "bytes"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "s",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "i",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "f",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "float"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "b",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "bool"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "raw",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "bytes"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -708,11 +929,18 @@ def test_stub_with_nested_list(self):
                                                                               "name": "NestedSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "matrix", "fieldType": {
                                                                  -                    "$type": "local#array",
                                                                  -                    "items": {"$type": "local#array", "items": {"$type": "local#primitive", "primitive": "int"}}
                                                                  -                }, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "matrix",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#array",
                                                                  +                        "items": {
                                                                  +                            "$type": "local#array",
                                                                  +                            "items": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                        },
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  @@ -725,8 +953,15 @@ def test_stub_with_ref_field_uses_any(self):
                                                                               "name": "RefSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "nested", "fieldType": {"$type": "local#ref", "ref": "local://schemas/Other@1.0.0"}, "optional": False},
                                                                  -            ]
                                                                  +                {
                                                                  +                    "name": "nested",
                                                                  +                    "fieldType": {
                                                                  +                        "$type": "local#ref",
                                                                  +                        "ref": "local://schemas/Other@1.0.0",
                                                                  +                    },
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +            ],
                                                                           }
                                                                   
                                                                           stub = generate_stub(schema)
                                                                  diff --git a/tests/test_integration_e2e.py b/tests/test_integration_e2e.py
                                                                  index d4fc840..36cfa23 100644
                                                                  --- a/tests/test_integration_e2e.py
                                                                  +++ b/tests/test_integration_e2e.py
                                                                  @@ -27,6 +27,7 @@
                                                                   @atdata.packable
                                                                   class SimpleSample:
                                                                       """Basic sample with primitive types only."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                       score: float
                                                                  @@ -36,6 +37,7 @@ class SimpleSample:
                                                                   @atdata.packable
                                                                   class NDArraySample:
                                                                       """Sample with multiple NDArray fields of different shapes."""
                                                                  +
                                                                       label: int
                                                                       image: NDArray
                                                                       features: NDArray
                                                                  @@ -44,6 +46,7 @@ class NDArraySample:
                                                                   @atdata.packable
                                                                   class OptionalNDArraySample:
                                                                       """Sample with optional NDArray fields."""
                                                                  +
                                                                       label: int
                                                                       image: NDArray
                                                                       embeddings: NDArray | None = None
                                                                  @@ -52,6 +55,7 @@ class OptionalNDArraySample:
                                                                   @atdata.packable
                                                                   class BytesSample:
                                                                       """Sample with bytes field."""
                                                                  +
                                                                       name: str
                                                                       raw_data: bytes
                                                                   
                                                                  @@ -59,6 +63,7 @@ class BytesSample:
                                                                   @atdata.packable
                                                                   class ListSample:
                                                                       """Sample with list fields."""
                                                                  +
                                                                       tags: list[str]
                                                                       scores: list[float]
                                                                       ids: list[int]
                                                                  @@ -67,6 +72,7 @@ class ListSample:
                                                                   @dataclass
                                                                   class InheritanceSample(atdata.PackableSample):
                                                                       """Sample using inheritance syntax instead of decorator."""
                                                                  +
                                                                       title: str
                                                                       count: int
                                                                       measurements: NDArray
                                                                  @@ -101,13 +107,17 @@ def create_ndarray_samples(n: int, img_shape: tuple = (64, 64)) -> list[NDArrayS
                                                                       ]
                                                                   
                                                                   
                                                                  -def create_optional_samples(n: int, include_optional: bool) -> list[OptionalNDArraySample]:
                                                                  +def create_optional_samples(
                                                                  +    n: int, include_optional: bool
                                                                  +) -> list[OptionalNDArraySample]:
                                                                       """Create samples with or without optional embeddings."""
                                                                       return [
                                                                           OptionalNDArraySample(
                                                                               label=i,
                                                                               image=np.random.randn(32, 32).astype(np.float32),
                                                                  -            embeddings=np.random.randn(64).astype(np.float32) if include_optional else None,
                                                                  +            embeddings=np.random.randn(64).astype(np.float32)
                                                                  +            if include_optional
                                                                  +            else None,
                                                                           )
                                                                           for i in range(n)
                                                                       ]
                                                                  @@ -134,9 +144,7 @@ def write_multi_shard(
                                                                               sink.write(sample.as_wds)
                                                                   
                                                                       n_shards = (len(samples) + samples_per_shard - 1) // samples_per_shard
                                                                  -    brace_pattern = (
                                                                  -        base_path / f"shard-{{000000..{n_shards - 1:06d}}}.tar"
                                                                  -    ).as_posix()
                                                                  +    brace_pattern = (base_path / f"shard-{{000000..{n_shards - 1:06d}}}.tar").as_posix()
                                                                       return brace_pattern, n_shards
                                                                   
                                                                   
                                                                  @@ -241,7 +249,9 @@ def test_ndarray_serialization_roundtrip(self, tmp_path):
                                                                           for original, loaded_sample in zip(samples, loaded):
                                                                               assert loaded_sample.label == original.label
                                                                               np.testing.assert_array_almost_equal(loaded_sample.image, original.image)
                                                                  -            np.testing.assert_array_almost_equal(loaded_sample.features, original.features)
                                                                  +            np.testing.assert_array_almost_equal(
                                                                  +                loaded_sample.features, original.features
                                                                  +            )
                                                                   
                                                                       def test_ndarray_batch_stacking(self, tmp_path):
                                                                           """NDArray fields should stack into batch dimension."""
                                                                  @@ -309,6 +319,7 @@ def test_optional_ndarray_with_none(self, tmp_path):
                                                                   
                                                                       def test_mixed_dtypes(self, tmp_path):
                                                                           """Various numpy dtypes should serialize correctly."""
                                                                  +
                                                                           @atdata.packable
                                                                           class MultiDtypeSample:
                                                                               f32: NDArray
                                                                  @@ -657,7 +668,9 @@ def test_shuffled_changes_order(self, tmp_path):
                                                                   
                                                                           # At least two passes should differ (very high probability with 100 samples)
                                                                           # Note: This could theoretically fail, but probability is astronomically low
                                                                  -        assert passes[0] != passes[1] or passes[1] != passes[2] or passes[0] != passes[2]
                                                                  +        assert (
                                                                  +            passes[0] != passes[1] or passes[1] != passes[2] or passes[0] != passes[2]
                                                                  +        )
                                                                   
                                                                       def test_batch_size_one(self, tmp_path):
                                                                           """batch_size=1 should return single-element batches."""
                                                                  diff --git a/tests/test_integration_edge_cases.py b/tests/test_integration_edge_cases.py
                                                                  index f6c1e07..53d8184 100644
                                                                  --- a/tests/test_integration_edge_cases.py
                                                                  +++ b/tests/test_integration_edge_cases.py
                                                                  @@ -9,8 +9,6 @@
                                                                   - All primitive type variations
                                                                   """
                                                                   
                                                                  -from pathlib import Path
                                                                  -
                                                                   import numpy as np
                                                                   from numpy.typing import NDArray
                                                                   
                                                                  @@ -28,12 +26,14 @@
                                                                   @atdata.packable
                                                                   class EmptyCompatSample:
                                                                       """Sample type for empty dataset tests."""
                                                                  +
                                                                       id: int
                                                                   
                                                                   
                                                                   @atdata.packable
                                                                   class AllPrimitivesSample:
                                                                       """Sample with all primitive types."""
                                                                  +
                                                                       str_field: str
                                                                       int_field: int
                                                                       float_field: float
                                                                  @@ -44,6 +44,7 @@ class AllPrimitivesSample:
                                                                   @atdata.packable
                                                                   class OptionalFieldsSample:
                                                                       """Sample with optional fields."""
                                                                  +
                                                                       required_str: str
                                                                       optional_str: str | None
                                                                       optional_int: int | None
                                                                  @@ -54,6 +55,7 @@ class OptionalFieldsSample:
                                                                   @atdata.packable
                                                                   class ListFieldsSample:
                                                                       """Sample with list fields."""
                                                                  +
                                                                       str_list: list[str]
                                                                       int_list: list[int]
                                                                       float_list: list[float]
                                                                  @@ -63,6 +65,7 @@ class ListFieldsSample:
                                                                   @atdata.packable
                                                                   class UnicodeSample:
                                                                       """Sample with unicode content."""
                                                                  +
                                                                       text: str
                                                                       label: str
                                                                   
                                                                  @@ -70,6 +73,7 @@ class UnicodeSample:
                                                                   @atdata.packable
                                                                   class NDArraySample:
                                                                       """Sample with NDArray field."""
                                                                  +
                                                                       label: str
                                                                       data: NDArray
                                                                   
                                                                  @@ -368,8 +372,7 @@ def test_emoji(self, tmp_path):
                                                                           tar_path = tmp_path / "emoji-000000.tar"
                                                                   
                                                                           sample = UnicodeSample(
                                                                  -            text="Hello World! Have a great day!",
                                                                  -            label="with-emoji"
                                                                  +            text="Hello World! Have a great day!", label="with-emoji"
                                                                           )
                                                                           create_tar_with_samples(tar_path, [sample])
                                                                   
                                                                  diff --git a/tests/test_integration_error_handling.py b/tests/test_integration_error_handling.py
                                                                  index 4a24fff..8f13496 100644
                                                                  --- a/tests/test_integration_error_handling.py
                                                                  +++ b/tests/test_integration_error_handling.py
                                                                  @@ -28,6 +28,7 @@
                                                                   @atdata.packable
                                                                   class ErrorTestSample:
                                                                       """Sample for error handling tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -110,8 +111,6 @@ def test_invalid_msgpack_in_tar(self, tmp_path):
                                                                           """Tar with invalid msgpack should raise on iteration."""
                                                                           tar_path = tmp_path / "corrupted-000000.tar"
                                                                   
                                                                  -        import io
                                                                  -
                                                                           # Create tar with invalid msgpack data
                                                                           with tarfile.open(tar_path, "w") as tar:
                                                                               # Add a valid key file
                                                                  @@ -146,6 +145,7 @@ def test_truncated_tar_raises(self, tmp_path):
                                                                               info = tarfile.TarInfo(name="test.txt")
                                                                               info.size = len(data)
                                                                               import io
                                                                  +
                                                                               tar.addfile(info, fileobj=io.BytesIO(data))
                                                                   
                                                                           # Truncate the file
                                                                  @@ -183,7 +183,9 @@ def test_redis_connection_error(self):
                                                                           from redis import Redis, ConnectionError
                                                                   
                                                                           # Create index with invalid Redis connection
                                                                  -        bad_redis = Redis(host="nonexistent.invalid.host", port=9999, socket_timeout=0.1)
                                                                  +        bad_redis = Redis(
                                                                  +            host="nonexistent.invalid.host", port=9999, socket_timeout=0.1
                                                                  +        )
                                                                   
                                                                           index = LocalIndex(redis=bad_redis)
                                                                   
                                                                  @@ -227,6 +229,7 @@ def test_unauthenticated_publish_raises(self):
                                                                           assert not client.is_authenticated
                                                                   
                                                                           from atdata.atmosphere import SchemaPublisher
                                                                  +
                                                                           publisher = SchemaPublisher(client)
                                                                   
                                                                           with pytest.raises(ValueError, match="authenticated"):
                                                                  @@ -264,6 +267,7 @@ def test_api_error_response_handling(self):
                                                                           client._session = {"did": "did:plc:test123"}  # Mark as authenticated
                                                                   
                                                                           from atdata.atmosphere import SchemaPublisher
                                                                  +
                                                                           publisher = SchemaPublisher(client)
                                                                   
                                                                           # Should propagate the API error
                                                                  @@ -339,6 +343,7 @@ def test_auth_error_no_credential_leak(self):
                                                                           client = AtmosphereClient(_client=mock_client)
                                                                   
                                                                           from atdata.atmosphere import SchemaPublisher
                                                                  +
                                                                           publisher = SchemaPublisher(client)
                                                                   
                                                                           try:
                                                                  @@ -374,6 +379,7 @@ def test_can_continue_after_bad_sample(self, tmp_path, clean_redis):
                                                                           # Now use a good file - should still work
                                                                           good_tar = tmp_path / "good-000000.tar"
                                                                           import webdataset as wds
                                                                  +
                                                                           with wds.writer.TarWriter(str(good_tar)) as writer:
                                                                               sample = ErrorTestSample(name="good", value=42)
                                                                               writer.write(sample.as_wds)
                                                                  @@ -421,7 +427,9 @@ def test_special_chars_in_version(self, clean_redis):
                                                                           """Special characters in version should be handled."""
                                                                           index = LocalIndex(redis=clean_redis)
                                                                   
                                                                  -        schema_ref = index.publish_schema(ErrorTestSample, version="1.0.0-beta+build.123")
                                                                  +        schema_ref = index.publish_schema(
                                                                  +            ErrorTestSample, version="1.0.0-beta+build.123"
                                                                  +        )
                                                                           schema = index.get_schema(schema_ref)
                                                                   
                                                                           assert schema["version"] == "1.0.0-beta+build.123"
                                                                  @@ -615,7 +623,9 @@ def test_s3_connection_timeout_simulation(self):
                                                                           # Mock the client after source creation
                                                                           with patch.object(source, "_get_client") as mock_get_client:
                                                                               mock_client = Mock()
                                                                  -            mock_client.get_object.side_effect = ConnectTimeoutError(endpoint_url="s3://test")
                                                                  +            mock_client.get_object.side_effect = ConnectTimeoutError(
                                                                  +                endpoint_url="s3://test"
                                                                  +            )
                                                                               mock_get_client.return_value = mock_client
                                                                   
                                                                               # Use full S3 URI as returned by shard_list
                                                                  diff --git a/tests/test_integration_lens.py b/tests/test_integration_lens.py
                                                                  index 4ef2134..fe39332 100644
                                                                  --- a/tests/test_integration_lens.py
                                                                  +++ b/tests/test_integration_lens.py
                                                                  @@ -26,6 +26,7 @@
                                                                   @atdata.packable
                                                                   class FullRecord:
                                                                       """Complete record with all fields."""
                                                                  +
                                                                       id: int
                                                                       name: str
                                                                       email: str
                                                                  @@ -37,6 +38,7 @@ class FullRecord:
                                                                   @atdata.packable
                                                                   class ProfileView:
                                                                       """View with profile information only."""
                                                                  +
                                                                       name: str
                                                                       email: str
                                                                       age: int
                                                                  @@ -45,12 +47,14 @@ class ProfileView:
                                                                   @atdata.packable
                                                                   class NameView:
                                                                       """Minimal view with just name."""
                                                                  +
                                                                       name: str
                                                                   
                                                                   
                                                                   @atdata.packable
                                                                   class ScoredRecord:
                                                                       """Record with score and embedding."""
                                                                  +
                                                                       id: int
                                                                       score: float
                                                                       embedding: NDArray
                                                                  @@ -59,6 +63,7 @@ class ScoredRecord:
                                                                   @atdata.packable
                                                                   class OptionalFieldSample:
                                                                       """Sample with optional fields."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                       extra: str | None = None
                                                                  @@ -68,6 +73,7 @@ class OptionalFieldSample:
                                                                   @atdata.packable
                                                                   class OptionalView:
                                                                       """View of optional sample."""
                                                                  +
                                                                       name: str
                                                                       extra: str | None = None
                                                                   
                                                                  @@ -148,7 +154,9 @@ def optional_to_view(opt: OptionalFieldSample) -> OptionalView:
                                                                   
                                                                   
                                                                   @optional_to_view.putter
                                                                  -def optional_to_view_put(view: OptionalView, source: OptionalFieldSample) -> OptionalFieldSample:
                                                                  +def optional_to_view_put(
                                                                  +    view: OptionalView, source: OptionalFieldSample
                                                                  +) -> OptionalFieldSample:
                                                                       """Update optional sample from view."""
                                                                       return OptionalFieldSample(
                                                                           name=view.name,
                                                                  @@ -486,6 +494,7 @@ def test_registered_lens_discoverable(self):
                                                                   
                                                                       def test_unregistered_lens_raises(self):
                                                                           """Querying unregistered lens should raise ValueError."""
                                                                  +
                                                                           @atdata.packable
                                                                           class UnknownSource:
                                                                               x: int
                                                                  @@ -536,6 +545,7 @@ def test_ndarray_field_preserved(self, tmp_path):
                                                                   
                                                                       def test_ndarray_transformation_lens(self):
                                                                           """Lens that transforms NDArray values."""
                                                                  +
                                                                           @atdata.packable
                                                                           class RawData:
                                                                               values: NDArray
                                                                  diff --git a/tests/test_integration_local.py b/tests/test_integration_local.py
                                                                  index 663bd0a..08dca88 100644
                                                                  --- a/tests/test_integration_local.py
                                                                  +++ b/tests/test_integration_local.py
                                                                  @@ -28,6 +28,7 @@
                                                                   @dataclass
                                                                   class WorkflowSample(atdata.PackableSample):
                                                                       """Sample for workflow tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                       score: float
                                                                  @@ -36,6 +37,7 @@ class WorkflowSample(atdata.PackableSample):
                                                                   @dataclass
                                                                   class ArrayWorkflowSample(atdata.PackableSample):
                                                                       """Sample with array for workflow tests."""
                                                                  +
                                                                       label: str
                                                                       data: NDArray
                                                                   
                                                                  @@ -43,6 +45,7 @@ class ArrayWorkflowSample(atdata.PackableSample):
                                                                   @dataclass
                                                                   class MetadataSample(atdata.PackableSample):
                                                                       """Sample for metadata workflow tests."""
                                                                  +
                                                                       id: int
                                                                       content: str
                                                                   
                                                                  @@ -61,23 +64,21 @@ def mock_s3():
                                                                       """
                                                                       with mock_aws():
                                                                           import boto3
                                                                  -        creds = {
                                                                  -            'AWS_ACCESS_KEY_ID': 'testing',
                                                                  -            'AWS_SECRET_ACCESS_KEY': 'testing'
                                                                  -        }
                                                                  +
                                                                  +        creds = {"AWS_ACCESS_KEY_ID": "testing", "AWS_SECRET_ACCESS_KEY": "testing"}
                                                                           s3_client = boto3.client(
                                                                  -            's3',
                                                                  -            aws_access_key_id=creds['AWS_ACCESS_KEY_ID'],
                                                                  -            aws_secret_access_key=creds['AWS_SECRET_ACCESS_KEY'],
                                                                  -            region_name='us-east-1'
                                                                  +            "s3",
                                                                  +            aws_access_key_id=creds["AWS_ACCESS_KEY_ID"],
                                                                  +            aws_secret_access_key=creds["AWS_SECRET_ACCESS_KEY"],
                                                                  +            region_name="us-east-1",
                                                                           )
                                                                  -        bucket_name = 'integration-test-bucket'
                                                                  +        bucket_name = "integration-test-bucket"
                                                                           s3_client.create_bucket(Bucket=bucket_name)
                                                                           yield {
                                                                  -            'credentials': creds,
                                                                  -            'bucket': bucket_name,
                                                                  -            'hive_path': f'{bucket_name}/datasets',
                                                                  -            's3_client': s3_client
                                                                  +            "credentials": creds,
                                                                  +            "bucket": bucket_name,
                                                                  +            "hive_path": f"{bucket_name}/datasets",
                                                                  +            "s3_client": s3_client,
                                                                           }
                                                                   
                                                                   
                                                                  @@ -124,9 +125,9 @@ def test_init_publish_schema_insert_query(self, mock_s3, clean_redis, tmp_path):
                                                                           """Full workflow: init repo → publish schema → insert → query entry."""
                                                                           # Initialize repo
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           # Publish schema first
                                                                  @@ -154,9 +155,9 @@ def test_init_publish_schema_insert_query(self, mock_s3, clean_redis, tmp_path):
                                                                       def test_multiple_datasets_same_schema(self, mock_s3, clean_redis, tmp_path):
                                                                           """Insert multiple datasets with same schema type."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           # Create multiple datasets
                                                                  @@ -187,9 +188,9 @@ def test_multiple_datasets_same_schema(self, mock_s3, clean_redis, tmp_path):
                                                                       def test_different_schema_types(self, mock_s3, clean_redis, tmp_path):
                                                                           """Insert datasets with different schema types."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           # Different sample types
                                                                  @@ -429,6 +430,7 @@ def test_entries_generator_is_lazy(self, clean_redis):
                                                                           # Should be a generator
                                                                           entries = index.entries
                                                                           import types
                                                                  +
                                                                           assert isinstance(entries, types.GeneratorType)
                                                                   
                                                                           # Can iterate partially
                                                                  @@ -449,9 +451,9 @@ class TestMetadataPersistence:
                                                                       def test_metadata_preserved_through_insert(self, mock_s3, clean_redis, tmp_path):
                                                                           """Metadata should be preserved when inserting dataset."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           ds = create_workflow_dataset(tmp_path, n_samples=5)
                                                                  @@ -518,12 +520,14 @@ class TestCacheLocalModes:
                                                                       @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                       @pytest.mark.filterwarnings("ignore:coroutine.*was never awaited:RuntimeWarning")
                                                                       @pytest.mark.filterwarnings("ignore:Repo is deprecated:DeprecationWarning")
                                                                  -    def test_cache_local_true_produces_valid_entry(self, mock_s3, clean_redis, tmp_path):
                                                                  +    def test_cache_local_true_produces_valid_entry(
                                                                  +        self, mock_s3, clean_redis, tmp_path
                                                                  +    ):
                                                                           """cache_local=True should produce valid index entry."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           ds = create_workflow_dataset(tmp_path, n_samples=10)
                                                                  @@ -536,12 +540,14 @@ def test_cache_local_true_produces_valid_entry(self, mock_s3, clean_redis, tmp_p
                                                                       @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                       @pytest.mark.filterwarnings("ignore:coroutine.*was never awaited:RuntimeWarning")
                                                                       @pytest.mark.filterwarnings("ignore:Repo is deprecated:DeprecationWarning")
                                                                  -    def test_cache_local_false_produces_valid_entry(self, mock_s3, clean_redis, tmp_path):
                                                                  +    def test_cache_local_false_produces_valid_entry(
                                                                  +        self, mock_s3, clean_redis, tmp_path
                                                                  +    ):
                                                                           """cache_local=False should produce valid index entry."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           ds = create_workflow_dataset(tmp_path, n_samples=10)
                                                                  @@ -557,16 +563,18 @@ def test_cache_local_false_produces_valid_entry(self, mock_s3, clean_redis, tmp_
                                                                       def test_both_modes_produce_same_structure(self, mock_s3, clean_redis, tmp_path):
                                                                           """Both cache modes should produce entries with same structure."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           ds1 = create_workflow_dataset(tmp_path / "cached", n_samples=10)
                                                                           ds2 = create_workflow_dataset(tmp_path / "direct", n_samples=10)
                                                                   
                                                                           entry1, _ = repo.insert(ds1, name="cached-mode", cache_local=True, maxcount=100)
                                                                  -        entry2, _ = repo.insert(ds2, name="direct-mode", cache_local=False, maxcount=100)
                                                                  +        entry2, _ = repo.insert(
                                                                  +            ds2, name="direct-mode", cache_local=False, maxcount=100
                                                                  +        )
                                                                   
                                                                           # Both should have valid structure
                                                                           assert entry1.schema_ref == entry2.schema_ref  # Same type
                                                                  @@ -598,11 +606,11 @@ def test_entry_has_required_properties(self):
                                                                           )
                                                                   
                                                                           # Required properties
                                                                  -        assert hasattr(entry, 'name')
                                                                  -        assert hasattr(entry, 'schema_ref')
                                                                  -        assert hasattr(entry, 'data_urls')
                                                                  -        assert hasattr(entry, 'metadata')
                                                                  -        assert hasattr(entry, 'cid')
                                                                  +        assert hasattr(entry, "name")
                                                                  +        assert hasattr(entry, "schema_ref")
                                                                  +        assert hasattr(entry, "data_urls")
                                                                  +        assert hasattr(entry, "metadata")
                                                                  +        assert hasattr(entry, "cid")
                                                                   
                                                                           # Values accessible
                                                                           assert entry.name == "props-test"
                                                                  @@ -629,12 +637,14 @@ class TestMultiShardStorage:
                                                                       @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                       @pytest.mark.filterwarnings("ignore:coroutine.*was never awaited:RuntimeWarning")
                                                                       @pytest.mark.filterwarnings("ignore:Repo is deprecated:DeprecationWarning")
                                                                  -    def test_large_dataset_creates_multiple_shards(self, mock_s3, clean_redis, tmp_path):
                                                                  +    def test_large_dataset_creates_multiple_shards(
                                                                  +        self, mock_s3, clean_redis, tmp_path
                                                                  +    ):
                                                                           """Large dataset should create multiple shard files."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           # Create dataset with many samples
                                                                  @@ -662,9 +672,9 @@ def test_large_dataset_creates_multiple_shards(self, mock_s3, clean_redis, tmp_p
                                                                       def test_single_shard_no_brace_notation(self, mock_s3, clean_redis, tmp_path):
                                                                           """Small dataset should result in single shard without brace notation."""
                                                                           repo = atlocal.Repo(
                                                                  -            s3_credentials=mock_s3['credentials'],
                                                                  -            hive_path=mock_s3['hive_path'],
                                                                  -            redis=clean_redis
                                                                  +            s3_credentials=mock_s3["credentials"],
                                                                  +            hive_path=mock_s3["hive_path"],
                                                                  +            redis=clean_redis,
                                                                           )
                                                                   
                                                                           ds = create_workflow_dataset(tmp_path, n_samples=5)
                                                                  diff --git a/tests/test_integration_promotion.py b/tests/test_integration_promotion.py
                                                                  index 9f37480..dec4716 100644
                                                                  --- a/tests/test_integration_promotion.py
                                                                  +++ b/tests/test_integration_promotion.py
                                                                  @@ -28,6 +28,7 @@
                                                                   @atdata.packable
                                                                   class PromotionSample:
                                                                       """Sample for promotion tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -35,6 +36,7 @@ class PromotionSample:
                                                                   @atdata.packable
                                                                   class PromotionArraySample:
                                                                       """Sample with NDArray for promotion tests."""
                                                                  +
                                                                       label: str
                                                                       features: NDArray
                                                                   
                                                                  @@ -110,10 +112,14 @@ def test_promote_local_to_atmosphere(
                                                                   
                                                                           # Setup mock responses for atmosphere operations
                                                                           schema_response = Mock()
                                                                  -        schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/promoted-schema"
                                                                  +        schema_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/promoted-schema"
                                                                  +        )
                                                                   
                                                                           dataset_response = Mock()
                                                                  -        dataset_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.dataset/promoted-dataset"
                                                                  +        dataset_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.dataset/promoted-dataset"
                                                                  +        )
                                                                   
                                                                           mock_atproto_client.com.atproto.repo.create_record.side_effect = [
                                                                               schema_response,
                                                                  @@ -124,7 +130,9 @@ def test_promote_local_to_atmosphere(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           # Promote
                                                                           result_uri = promote_to_atmosphere(
                                                                  @@ -146,7 +154,9 @@ def test_promoted_dataset_preserves_name(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -177,7 +187,9 @@ def test_promoted_dataset_preserves_data_urls(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -215,12 +227,16 @@ def test_reuses_existing_schema(
                                                                   
                                                                           # Patch _find_existing_schema to return an existing schema URI
                                                                           with patch("atdata.promote._find_existing_schema") as mock_find:
                                                                  -            mock_find.return_value = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/existing"
                                                                  +            mock_find.return_value = (
                                                                  +                f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/existing"
                                                                  +            )
                                                                   
                                                                               # Only dataset should be created (schema exists)
                                                                               dataset_response = Mock()
                                                                               dataset_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.dataset/d1"
                                                                  -            mock_atproto_client.com.atproto.repo.create_record.return_value = dataset_response
                                                                  +            mock_atproto_client.com.atproto.repo.create_record.return_value = (
                                                                  +                dataset_response
                                                                  +            )
                                                                   
                                                                               promote_to_atmosphere(local_entry, local_index, authenticated_client)
                                                                   
                                                                  @@ -228,7 +244,9 @@ def test_reuses_existing_schema(
                                                                               assert mock_atproto_client.com.atproto.repo.create_record.call_count == 1
                                                                   
                                                                               # Verify it was the dataset call
                                                                  -            call_kwargs = mock_atproto_client.com.atproto.repo.create_record.call_args.kwargs
                                                                  +            call_kwargs = (
                                                                  +                mock_atproto_client.com.atproto.repo.create_record.call_args.kwargs
                                                                  +            )
                                                                               assert "dataset" in call_kwargs["data"]["collection"]
                                                                   
                                                                       def test_creates_schema_when_not_found(
                                                                  @@ -241,7 +259,9 @@ def test_creates_schema_when_not_found(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           # Both schema and dataset should be created
                                                                           schema_response = Mock()
                                                                  @@ -277,11 +297,15 @@ def test_version_mismatch_creates_new_schema(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = [existing_schema]
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           # Both should be created (version mismatch)
                                                                           schema_response = Mock()
                                                                  -        schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/v1new"
                                                                  +        schema_response.uri = (
                                                                  +            f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/v1new"
                                                                  +        )
                                                                   
                                                                           dataset_response = Mock()
                                                                           dataset_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.dataset/d1"
                                                                  @@ -314,7 +338,9 @@ def test_metadata_included_in_promoted_dataset(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -337,7 +363,9 @@ def test_metadata_included_in_promoted_dataset(
                                                                           # The metadata should be in the record (may be msgpack encoded)
                                                                           assert "metadata" in record
                                                                   
                                                                  -    def test_none_metadata_handled(self, clean_redis, authenticated_client, mock_atproto_client):
                                                                  +    def test_none_metadata_handled(
                                                                  +        self, clean_redis, authenticated_client, mock_atproto_client
                                                                  +    ):
                                                                           """Entry without metadata should promote successfully."""
                                                                           index = LocalIndex(redis=clean_redis)
                                                                           schema_ref = index.publish_schema(PromotionSample, version="1.0.0")
                                                                  @@ -354,7 +382,9 @@ def test_none_metadata_handled(self, clean_redis, authenticated_client, mock_atp
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -423,7 +453,9 @@ def mock_find_existing(client, name, version):
                                                                               dataset_responses[2],  # Third dataset
                                                                           ]
                                                                   
                                                                  -        with patch("atdata.promote._find_existing_schema", side_effect=mock_find_existing):
                                                                  +        with patch(
                                                                  +            "atdata.promote._find_existing_schema", side_effect=mock_find_existing
                                                                  +        ):
                                                                               # Promote all three
                                                                               for i, entry in enumerate(entries):
                                                                                   promote_to_atmosphere(entry, index, authenticated_client)
                                                                  @@ -462,7 +494,9 @@ def test_many_shards_promoted(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -543,7 +577,9 @@ def test_custom_name_override(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -578,7 +614,9 @@ def test_tags_and_license(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  @@ -616,7 +654,9 @@ def test_description_passed(
                                                                           mock_list_response = Mock()
                                                                           mock_list_response.records = []
                                                                           mock_list_response.cursor = None
                                                                  -        mock_atproto_client.com.atproto.repo.list_records.return_value = mock_list_response
                                                                  +        mock_atproto_client.com.atproto.repo.list_records.return_value = (
                                                                  +            mock_list_response
                                                                  +        )
                                                                   
                                                                           schema_response = Mock()
                                                                           schema_response.uri = f"at://did:plc:test/{LEXICON_NAMESPACE}.sampleSchema/s1"
                                                                  diff --git a/tests/test_lens.py b/tests/test_lens.py
                                                                  index cfe514f..c558a1d 100644
                                                                  --- a/tests/test_lens.py
                                                                  +++ b/tests/test_lens.py
                                                                  @@ -16,6 +16,7 @@
                                                                   ##
                                                                   # Tests
                                                                   
                                                                  +
                                                                   def test_lens():
                                                                       """Test a lens between sample types"""
                                                                   
                                                                  @@ -31,140 +32,134 @@ class Source:
                                                                       class View:
                                                                           name: str
                                                                           height: float
                                                                  -    
                                                                  +
                                                                       @atdata.lens
                                                                  -    def polite( s: Source ) -> View:
                                                                  +    def polite(s: Source) -> View:
                                                                           return View(
                                                                  -            name = s.name,
                                                                  -            height = s.height,
                                                                  +            name=s.name,
                                                                  +            height=s.height,
                                                                           )
                                                                  -    
                                                                  +
                                                                       @polite.putter
                                                                  -    def polite_update( v: View, s: Source ) -> Source:
                                                                  +    def polite_update(v: View, s: Source) -> Source:
                                                                           return Source(
                                                                  -            name = v.name,
                                                                  -            height = v.height,
                                                                  +            name=v.name,
                                                                  +            height=v.height,
                                                                               #
                                                                  -            age = s.age,
                                                                  +            age=s.age,
                                                                           )
                                                                  -    
                                                                  +
                                                                       # Test with an example sample
                                                                   
                                                                       test_source = Source(
                                                                  -        name = 'Hello World',
                                                                  -        age = 42,
                                                                  -        height = 182.9,
                                                                  +        name="Hello World",
                                                                  +        age=42,
                                                                  +        height=182.9,
                                                                       )
                                                                       correct_view = View(
                                                                  -        name = test_source.name,
                                                                  -        height = test_source.height,
                                                                  +        name=test_source.name,
                                                                  +        height=test_source.height,
                                                                       )
                                                                   
                                                                  -    test_view = polite( test_source )
                                                                  -    assert test_view == correct_view, \
                                                                  -        f'Incorrect lens behavior: {test_view}, and not {correct_view}'
                                                                  +    test_view = polite(test_source)
                                                                  +    assert test_view == correct_view, (
                                                                  +        f"Incorrect lens behavior: {test_view}, and not {correct_view}"
                                                                  +    )
                                                                   
                                                                       # This lens should be well-behaved
                                                                   
                                                                       update_view = View(
                                                                  -        name = 'Now Taller',
                                                                  -        height = 192.9,
                                                                  +        name="Now Taller",
                                                                  +        height=192.9,
                                                                       )
                                                                   
                                                                  -    x = polite( polite.put( update_view, test_source ) )
                                                                  -    assert x == update_view, \
                                                                  -        f'Violation of GetPut: {x} =/= {update_view}'
                                                                  -    
                                                                  -    y = polite.put( polite( test_source ), test_source )
                                                                  -    assert y == test_source, \
                                                                  -        f'Violation of PutGet: {y} =/= {test_source}'
                                                                  +    x = polite(polite.put(update_view, test_source))
                                                                  +    assert x == update_view, f"Violation of GetPut: {x} =/= {update_view}"
                                                                  +
                                                                  +    y = polite.put(polite(test_source), test_source)
                                                                  +    assert y == test_source, f"Violation of PutGet: {y} =/= {test_source}"
                                                                   
                                                                       # PutPut law: put(v2, put(v1, s)) = put(v2, s)
                                                                       another_view = View(
                                                                  -        name = 'Different Name',
                                                                  -        height = 165.0,
                                                                  +        name="Different Name",
                                                                  +        height=165.0,
                                                                       )
                                                                  -    z1 = polite.put( another_view, polite.put( update_view, test_source ) )
                                                                  -    z2 = polite.put( another_view, test_source )
                                                                  -    assert z1 == z2, \
                                                                  -        f'Violation of PutPut: {z1} =/= {z2}'
                                                                  +    z1 = polite.put(another_view, polite.put(update_view, test_source))
                                                                  +    z2 = polite.put(another_view, test_source)
                                                                  +    assert z1 == z2, f"Violation of PutPut: {z1} =/= {z2}"
                                                                  +
                                                                   
                                                                  -def test_conversion( tmp_path ):
                                                                  +def test_conversion(tmp_path):
                                                                       """Test automatic interconversion between sample types"""
                                                                   
                                                                       @dataclass
                                                                  -    class Source( atdata.PackableSample ):
                                                                  +    class Source(atdata.PackableSample):
                                                                           name: str
                                                                           height: float
                                                                           favorite_pizza: str
                                                                           favorite_image: NDArray
                                                                  -    
                                                                  +
                                                                       @dataclass
                                                                  -    class View( atdata.PackableSample ):
                                                                  +    class View(atdata.PackableSample):
                                                                           name: str
                                                                           favorite_pizza: str
                                                                           favorite_image: NDArray
                                                                  -    
                                                                  +
                                                                       @atdata.lens
                                                                  -    def polite( s: Source ) -> View:
                                                                  +    def polite(s: Source) -> View:
                                                                           return View(
                                                                  -            name = s.name,
                                                                  -            favorite_pizza = s.favorite_pizza,
                                                                  -            favorite_image = s.favorite_image,
                                                                  +            name=s.name,
                                                                  +            favorite_pizza=s.favorite_pizza,
                                                                  +            favorite_image=s.favorite_image,
                                                                           )
                                                                  -    
                                                                  +
                                                                       # Map a test sample through the view
                                                                       test_source = Source(
                                                                  -        name = 'Larry',
                                                                  -        height = 42.,
                                                                  -        favorite_pizza = 'pineapple',
                                                                  -        favorite_image = np.random.randn( 224, 224 )
                                                                  +        name="Larry",
                                                                  +        height=42.0,
                                                                  +        favorite_pizza="pineapple",
                                                                  +        favorite_image=np.random.randn(224, 224),
                                                                       )
                                                                  -    test_view = polite( test_source )
                                                                  +    test_view = polite(test_source)
                                                                   
                                                                       # Create a test dataset
                                                                   
                                                                       k_test = 100
                                                                  -    test_filename = (
                                                                  -        tmp_path
                                                                  -        / 'test-source.tar'
                                                                  -    ).as_posix()
                                                                  +    test_filename = (tmp_path / "test-source.tar").as_posix()
                                                                   
                                                                  -    with wds.writer.TarWriter( test_filename ) as dest:
                                                                  -        for i in range( k_test ):
                                                                  +    with wds.writer.TarWriter(test_filename) as dest:
                                                                  +        for i in range(k_test):
                                                                               # Create a new copied sample
                                                                               cur_sample = Source(
                                                                  -                name = test_source.name,
                                                                  -                height = test_source.height,
                                                                  -                favorite_pizza = test_source.favorite_pizza,
                                                                  -                favorite_image = test_source.favorite_image,
                                                                  +                name=test_source.name,
                                                                  +                height=test_source.height,
                                                                  +                favorite_pizza=test_source.favorite_pizza,
                                                                  +                favorite_image=test_source.favorite_image,
                                                                               )
                                                                  -            dest.write( cur_sample.as_wds )
                                                                  -    
                                                                  +            dest.write(cur_sample.as_wds)
                                                                  +
                                                                       # Try reading the test dataset
                                                                   
                                                                  -    ds = (
                                                                  -        atdata.Dataset[Source]( test_filename )
                                                                  -            .as_type( View )
                                                                  -    )
                                                                  +    ds = atdata.Dataset[Source](test_filename).as_type(View)
                                                                   
                                                                  -    assert ds.sample_type == View, \
                                                                  -        'Auto-mapped'
                                                                  +    assert ds.sample_type == View, "Auto-mapped"
                                                                   
                                                                       sample: View | None = None
                                                                  -    for sample in ds.ordered( batch_size = None ):
                                                                  +    for sample in ds.ordered(batch_size=None):
                                                                           # Load only the first sample
                                                                           break
                                                                   
                                                                  -    assert sample is not None, \
                                                                  -        'Did not load any samples from `Source` dataset'
                                                                  +    assert sample is not None, "Did not load any samples from `Source` dataset"
                                                                   
                                                                  -    assert sample.name == test_view.name, \
                                                                  -        f'Divergence on auto-mapped dataset: `name` should be {test_view.name}, but is {sample.name}'
                                                                  -    assert sample.favorite_pizza == test_view.favorite_pizza, \
                                                                  -        f'Divergence on auto-mapped dataset: `favorite_pizza` should be {test_view.favorite_pizza}, but is {sample.favorite_pizza}'
                                                                  -    assert np.all( sample.favorite_image == test_view.favorite_image ), \
                                                                  -        'Divergence on auto-mapped dataset: `favorite_image`'
                                                                  +    assert sample.name == test_view.name, (
                                                                  +        f"Divergence on auto-mapped dataset: `name` should be {test_view.name}, but is {sample.name}"
                                                                  +    )
                                                                  +    assert sample.favorite_pizza == test_view.favorite_pizza, (
                                                                  +        f"Divergence on auto-mapped dataset: `favorite_pizza` should be {test_view.favorite_pizza}, but is {sample.favorite_pizza}"
                                                                  +    )
                                                                  +    assert np.all(sample.favorite_image == test_view.favorite_image), (
                                                                  +        "Divergence on auto-mapped dataset: `favorite_image`"
                                                                  +    )
                                                                   
                                                                   
                                                                   ##
                                                                  @@ -173,6 +168,7 @@ def polite( s: Source ) -> View:
                                                                   
                                                                   def test_lens_get_method():
                                                                       """Test calling lens.get() explicitly instead of lens()."""
                                                                  +
                                                                       @atdata.packable
                                                                       class GetSource:
                                                                           value: int
                                                                  @@ -197,6 +193,7 @@ def doubler(s: GetSource) -> GetView:
                                                                   
                                                                   def test_lens_trivial_putter():
                                                                       """Test lens without explicit putter uses trivial putter."""
                                                                  +
                                                                       @atdata.packable
                                                                       class TrivialSource:
                                                                           a: int
                                                                  @@ -237,4 +234,4 @@ class UnregisteredView:
                                                                           network.transform(UnregisteredSource, UnregisteredView)
                                                                   
                                                                   
                                                                  -##
                                                                  \ No newline at end of file
                                                                  +##
                                                                  diff --git a/tests/test_local.py b/tests/test_local.py
                                                                  index 3a91e8c..79665f3 100644
                                                                  --- a/tests/test_local.py
                                                                  +++ b/tests/test_local.py
                                                                  @@ -26,6 +26,7 @@
                                                                   ##
                                                                   # Test fixtures (redis_connection and clean_redis are in conftest.py)
                                                                   
                                                                  +
                                                                   @pytest.fixture
                                                                   def mock_s3():
                                                                       """Provide a mock S3 environment using moto.
                                                                  @@ -36,28 +37,26 @@ def mock_s3():
                                                                       """
                                                                       with mock_aws():
                                                                           # Create S3 credentials dict (no endpoint_url for moto)
                                                                  -        creds = {
                                                                  -            'AWS_ACCESS_KEY_ID': 'testing',
                                                                  -            'AWS_SECRET_ACCESS_KEY': 'testing'
                                                                  -        }
                                                                  +        creds = {"AWS_ACCESS_KEY_ID": "testing", "AWS_SECRET_ACCESS_KEY": "testing"}
                                                                   
                                                                           # Create S3 client and bucket
                                                                           import boto3
                                                                  +
                                                                           s3_client = boto3.client(
                                                                  -            's3',
                                                                  -            aws_access_key_id=creds['AWS_ACCESS_KEY_ID'],
                                                                  -            aws_secret_access_key=creds['AWS_SECRET_ACCESS_KEY'],
                                                                  -            region_name='us-east-1'
                                                                  +            "s3",
                                                                  +            aws_access_key_id=creds["AWS_ACCESS_KEY_ID"],
                                                                  +            aws_secret_access_key=creds["AWS_SECRET_ACCESS_KEY"],
                                                                  +            region_name="us-east-1",
                                                                           )
                                                                   
                                                                  -        bucket_name = 'test-bucket'
                                                                  +        bucket_name = "test-bucket"
                                                                           s3_client.create_bucket(Bucket=bucket_name)
                                                                   
                                                                           yield {
                                                                  -            'credentials': creds,
                                                                  -            'bucket': bucket_name,
                                                                  -            'hive_path': f'{bucket_name}/datasets',
                                                                  -            's3_client': s3_client
                                                                  +            "credentials": creds,
                                                                  +            "bucket": bucket_name,
                                                                  +            "hive_path": f"{bucket_name}/datasets",
                                                                  +            "s3_client": s3_client,
                                                                           }
                                                                   
                                                                   
                                                                  @@ -83,6 +82,7 @@ class SimpleTestSample(atdata.PackableSample):
                                                                       Note: This matches SharedBasicSample in conftest.py but is kept local
                                                                       because tests verify class name behavior.
                                                                       """
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -93,11 +93,14 @@ class ArrayTestSample(atdata.PackableSample):
                                                                   
                                                                       Note: Similar to SharedNumpySample but kept local for test isolation.
                                                                       """
                                                                  +
                                                                       label: str
                                                                       data: NDArray
                                                                   
                                                                   
                                                                  -def make_simple_dataset(tmp_path: Path, num_samples: int = 10, name: str = "test") -> atdata.Dataset:
                                                                  +def make_simple_dataset(
                                                                  +    tmp_path: Path, num_samples: int = 10, name: str = "test"
                                                                  +) -> atdata.Dataset:
                                                                       """Create a SimpleTestSample dataset for testing."""
                                                                       dataset_path = tmp_path / f"{name}-dataset-000000.tar"
                                                                       with wds.writer.TarWriter(str(dataset_path)) as sink:
                                                                  @@ -107,7 +110,9 @@ def make_simple_dataset(tmp_path: Path, num_samples: int = 10, name: str = "test
                                                                       return atdata.Dataset[SimpleTestSample](url=str(dataset_path))
                                                                   
                                                                   
                                                                  -def make_array_dataset(tmp_path: Path, num_samples: int = 3, array_shape: tuple = (10, 10)) -> atdata.Dataset:
                                                                  +def make_array_dataset(
                                                                  +    tmp_path: Path, num_samples: int = 3, array_shape: tuple = (10, 10)
                                                                  +) -> atdata.Dataset:
                                                                       """Create an ArrayTestSample dataset for testing."""
                                                                       dataset_path = tmp_path / "array-dataset-000000.tar"
                                                                       with wds.writer.TarWriter(str(dataset_path)) as sink:
                                                                  @@ -121,6 +126,7 @@ def make_array_dataset(tmp_path: Path, num_samples: int = 3, array_shape: tuple
                                                                   ##
                                                                   # Helper function tests
                                                                   
                                                                  +
                                                                   def test_kind_str_for_sample_type():
                                                                       """Test that sample types are converted to correct fully-qualified string identifiers.
                                                                   
                                                                  @@ -149,17 +155,29 @@ def test_s3_env_valid_credentials(tmp_path):
                                                                       result = atlocal._s3_env(env_file)
                                                                   
                                                                       assert result == {
                                                                  -        'AWS_ENDPOINT': 'http://localhost:9000',
                                                                  -        'AWS_ACCESS_KEY_ID': 'minioadmin',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'minioadmin'
                                                                  +        "AWS_ENDPOINT": "http://localhost:9000",
                                                                  +        "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "minioadmin",
                                                                       }
                                                                   
                                                                   
                                                                  -@pytest.mark.parametrize("missing_field,env_content", [
                                                                  -    ("AWS_ENDPOINT", "AWS_ACCESS_KEY_ID=minioadmin\nAWS_SECRET_ACCESS_KEY=minioadmin\n"),
                                                                  -    ("AWS_ACCESS_KEY_ID", "AWS_ENDPOINT=http://localhost:9000\nAWS_SECRET_ACCESS_KEY=minioadmin\n"),
                                                                  -    ("AWS_SECRET_ACCESS_KEY", "AWS_ENDPOINT=http://localhost:9000\nAWS_ACCESS_KEY_ID=minioadmin\n"),
                                                                  -])
                                                                  +@pytest.mark.parametrize(
                                                                  +    "missing_field,env_content",
                                                                  +    [
                                                                  +        (
                                                                  +            "AWS_ENDPOINT",
                                                                  +            "AWS_ACCESS_KEY_ID=minioadmin\nAWS_SECRET_ACCESS_KEY=minioadmin\n",
                                                                  +        ),
                                                                  +        (
                                                                  +            "AWS_ACCESS_KEY_ID",
                                                                  +            "AWS_ENDPOINT=http://localhost:9000\nAWS_SECRET_ACCESS_KEY=minioadmin\n",
                                                                  +        ),
                                                                  +        (
                                                                  +            "AWS_SECRET_ACCESS_KEY",
                                                                  +            "AWS_ENDPOINT=http://localhost:9000\nAWS_ACCESS_KEY_ID=minioadmin\n",
                                                                  +        ),
                                                                  +    ],
                                                                  +)
                                                                   def test_s3_env_missing_required_field(tmp_path, missing_field, env_content):
                                                                       """Test that loading S3 credentials fails when a required field is missing.
                                                                   
                                                                  @@ -179,17 +197,17 @@ def test_s3_from_credentials_with_dict():
                                                                       Should create a properly configured S3FileSystem instance using dict credentials.
                                                                       """
                                                                       creds = {
                                                                  -        'AWS_ENDPOINT': 'http://localhost:9000',
                                                                  -        'AWS_ACCESS_KEY_ID': 'minioadmin',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'minioadmin'
                                                                  +        "AWS_ENDPOINT": "http://localhost:9000",
                                                                  +        "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "minioadmin",
                                                                       }
                                                                   
                                                                       fs = atlocal._s3_from_credentials(creds)
                                                                   
                                                                       assert isinstance(fs, atlocal.S3FileSystem)
                                                                  -    assert fs.endpoint_url == 'http://localhost:9000'
                                                                  -    assert fs.key == 'minioadmin'
                                                                  -    assert fs.secret == 'minioadmin'
                                                                  +    assert fs.endpoint_url == "http://localhost:9000"
                                                                  +    assert fs.key == "minioadmin"
                                                                  +    assert fs.secret == "minioadmin"
                                                                   
                                                                   
                                                                   def test_s3_from_credentials_with_path(tmp_path):
                                                                  @@ -207,14 +225,15 @@ def test_s3_from_credentials_with_path(tmp_path):
                                                                       fs = atlocal._s3_from_credentials(env_file)
                                                                   
                                                                       assert isinstance(fs, atlocal.S3FileSystem)
                                                                  -    assert fs.endpoint_url == 'http://localhost:9000'
                                                                  -    assert fs.key == 'minioadmin'
                                                                  -    assert fs.secret == 'minioadmin'
                                                                  +    assert fs.endpoint_url == "http://localhost:9000"
                                                                  +    assert fs.key == "minioadmin"
                                                                  +    assert fs.secret == "minioadmin"
                                                                   
                                                                   
                                                                   ##
                                                                   # LocalDatasetEntry tests
                                                                   
                                                                  +
                                                                   def test_local_dataset_entry_creation():
                                                                       """Test creating a LocalDatasetEntry with explicit values.
                                                                   
                                                                  @@ -315,7 +334,9 @@ def test_local_dataset_entry_round_trip_redis(clean_redis):
                                                                       original_entry.write_to(clean_redis)
                                                                   
                                                                       # Read back from Redis
                                                                  -    retrieved_entry = atlocal.LocalDatasetEntry.from_redis(clean_redis, original_entry.cid)
                                                                  +    retrieved_entry = atlocal.LocalDatasetEntry.from_redis(
                                                                  +        clean_redis, original_entry.cid
                                                                  +    )
                                                                   
                                                                       assert retrieved_entry.name == original_entry.name
                                                                       assert retrieved_entry.schema_ref == original_entry.schema_ref
                                                                  @@ -356,13 +377,13 @@ def test_index_implements_abstract_index_protocol():
                                                                       index = atlocal.Index()
                                                                   
                                                                       # Check protocol methods exist
                                                                  -    assert hasattr(index, 'insert_dataset')
                                                                  -    assert hasattr(index, 'get_dataset')
                                                                  -    assert hasattr(index, 'list_datasets')
                                                                  -    assert hasattr(index, 'publish_schema')
                                                                  -    assert hasattr(index, 'get_schema')
                                                                  -    assert hasattr(index, 'list_schemas')
                                                                  -    assert hasattr(index, 'decode_schema')
                                                                  +    assert hasattr(index, "insert_dataset")
                                                                  +    assert hasattr(index, "get_dataset")
                                                                  +    assert hasattr(index, "list_datasets")
                                                                  +    assert hasattr(index, "publish_schema")
                                                                  +    assert hasattr(index, "get_schema")
                                                                  +    assert hasattr(index, "list_schemas")
                                                                  +    assert hasattr(index, "decode_schema")
                                                                   
                                                                       # Check they are callable
                                                                       assert callable(index.insert_dataset)
                                                                  @@ -373,6 +394,7 @@ def test_index_implements_abstract_index_protocol():
                                                                   ##
                                                                   # Index tests
                                                                   
                                                                  +
                                                                   def test_index_init_default_redis():
                                                                       """Test creating an Index with default Redis connection.
                                                                   
                                                                  @@ -401,7 +423,7 @@ def test_index_init_with_redis_kwargs():
                                                                   
                                                                       Should pass custom kwargs to Redis constructor when creating a new connection.
                                                                       """
                                                                  -    index = atlocal.Index(host='localhost', port=6379, db=0)
                                                                  +    index = atlocal.Index(host="localhost", port=6379, db=0)
                                                                   
                                                                       assert index._redis is not None
                                                                       assert isinstance(index._redis, Redis)
                                                                  @@ -415,8 +437,7 @@ def test_index_add_entry(clean_redis):
                                                                       index = atlocal.Index(redis=clean_redis)
                                                                   
                                                                       ds = atdata.Dataset[SimpleTestSample](
                                                                  -        url="s3://bucket/dataset.tar",
                                                                  -        metadata_url="s3://bucket/metadata.msgpack"
                                                                  +        url="s3://bucket/dataset.tar", metadata_url="s3://bucket/metadata.msgpack"
                                                                       )
                                                                   
                                                                       entry = index.add_entry(ds, name="test-dataset")
                                                                  @@ -442,9 +463,7 @@ def test_index_add_entry_with_schema_ref(clean_redis):
                                                                       ds = atdata.Dataset[SimpleTestSample](url="s3://bucket/dataset.tar")
                                                                   
                                                                       entry = index.add_entry(
                                                                  -        ds,
                                                                  -        name="test-dataset",
                                                                  -        schema_ref="local://schemas/custom.Schema@2.0.0"
                                                                  +        ds, name="test-dataset", schema_ref="local://schemas/custom.Schema@2.0.0"
                                                                       )
                                                                   
                                                                       assert entry.schema_ref == "local://schemas/custom.Schema@2.0.0"
                                                                  @@ -460,9 +479,7 @@ def test_index_add_entry_with_metadata(clean_redis):
                                                                       ds = atdata.Dataset[SimpleTestSample](url="s3://bucket/dataset.tar")
                                                                   
                                                                       entry = index.add_entry(
                                                                  -        ds,
                                                                  -        name="test-dataset",
                                                                  -        metadata={"version": "1.0", "author": "test"}
                                                                  +        ds, name="test-dataset", metadata={"version": "1.0", "author": "test"}
                                                                       )
                                                                   
                                                                       assert entry.metadata == {"version": "1.0", "author": "test"}
                                                                  @@ -593,6 +610,7 @@ def test_index_get_entry_by_name_not_found(clean_redis):
                                                                   ##
                                                                   # AbstractIndex protocol method tests
                                                                   
                                                                  +
                                                                   def test_index_insert_dataset(clean_redis):
                                                                       """Test insert_dataset protocol method."""
                                                                       index = atlocal.Index(redis=clean_redis)
                                                                  @@ -667,9 +685,9 @@ def test_repo_init_with_s3_dict():
                                                                       Should create a Repo with S3FileSystem and set hive_path and hive_bucket.
                                                                       """
                                                                       creds = {
                                                                  -        'AWS_ENDPOINT': 'http://localhost:9000',
                                                                  -        'AWS_ACCESS_KEY_ID': 'minioadmin',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'minioadmin'
                                                                  +        "AWS_ENDPOINT": "http://localhost:9000",
                                                                  +        "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "minioadmin",
                                                                       }
                                                                   
                                                                       repo = atlocal.Repo(s3_credentials=creds, hive_path="test-bucket/datasets")
                                                                  @@ -710,9 +728,9 @@ def test_repo_init_s3_without_hive_path():
                                                                       Should raise ValueError when s3_credentials is provided but hive_path is None.
                                                                       """
                                                                       creds = {
                                                                  -        'AWS_ENDPOINT': 'http://localhost:9000',
                                                                  -        'AWS_ACCESS_KEY_ID': 'minioadmin',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'minioadmin'
                                                                  +        "AWS_ENDPOINT": "http://localhost:9000",
                                                                  +        "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "minioadmin",
                                                                       }
                                                                   
                                                                       with pytest.raises(ValueError, match="Must specify hive path"):
                                                                  @@ -726,9 +744,9 @@ def test_repo_init_hive_path_parsing():
                                                                       Should set hive_bucket to the first component of hive_path.
                                                                       """
                                                                       creds = {
                                                                  -        'AWS_ENDPOINT': 'http://localhost:9000',
                                                                  -        'AWS_ACCESS_KEY_ID': 'minioadmin',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'minioadmin'
                                                                  +        "AWS_ENDPOINT": "http://localhost:9000",
                                                                  +        "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "minioadmin",
                                                                       }
                                                                   
                                                                       repo = atlocal.Repo(s3_credentials=creds, hive_path="my-bucket/path/to/datasets")
                                                                  @@ -752,6 +770,7 @@ def test_repo_init_with_custom_redis():
                                                                   ##
                                                                   # Repo tests - Insert functionality
                                                                   
                                                                  +
                                                                   @pytest.mark.filterwarnings("ignore:Repo is deprecated:DeprecationWarning")
                                                                   def test_repo_insert_without_s3():
                                                                       """Test that inserting a dataset without S3 configured raises ValueError.
                                                                  @@ -775,12 +794,14 @@ def test_repo_insert_single_shard(mock_s3, clean_redis, sample_dataset):
                                                                       a new Dataset pointing to the stored copy with correct URL format.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                  -    entry, new_ds = repo.insert(sample_dataset, name="single-shard-dataset", maxcount=100)
                                                                  +    entry, new_ds = repo.insert(
                                                                  +        sample_dataset, name="single-shard-dataset", maxcount=100
                                                                  +    )
                                                                   
                                                                       assert entry.cid is not None
                                                                       assert entry.cid.startswith("bafy")
                                                                  @@ -788,8 +809,8 @@ def test_repo_insert_single_shard(mock_s3, clean_redis, sample_dataset):
                                                                       assert len(entry.data_urls) > 0
                                                                       assert "SimpleTestSample" in entry.schema_ref
                                                                       assert len(repo.index.all_entries) == 1
                                                                  -    assert '.tar' in new_ds.url
                                                                  -    assert new_ds.url.startswith(mock_s3['hive_path'])
                                                                  +    assert ".tar" in new_ds.url
                                                                  +    assert new_ds.url.startswith(mock_s3["hive_path"])
                                                                   
                                                                   
                                                                   @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                  @@ -803,16 +824,16 @@ def test_repo_insert_multiple_shards(mock_s3, clean_redis, tmp_path):
                                                                       """
                                                                       ds = make_simple_dataset(tmp_path, num_samples=50, name="large")
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(ds, name="multi-shard-dataset", maxcount=10)
                                                                   
                                                                       assert entry.cid is not None
                                                                       assert len(entry.data_urls) > 0
                                                                  -    assert '{' in new_ds.url and '}' in new_ds.url
                                                                  +    assert "{" in new_ds.url and "}" in new_ds.url
                                                                   
                                                                   
                                                                   @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                  @@ -827,9 +848,9 @@ def test_repo_insert_with_metadata(mock_s3, clean_redis, tmp_path):
                                                                       ds._metadata = {"description": "test dataset", "version": "1.0"}
                                                                   
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(ds, name="metadata-dataset", maxcount=100)
                                                                  @@ -849,9 +870,9 @@ def test_repo_insert_without_metadata(mock_s3, clean_redis, tmp_path):
                                                                       """
                                                                       ds = make_simple_dataset(tmp_path, num_samples=5)
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(ds, name="no-metadata-dataset", maxcount=100)
                                                                  @@ -869,12 +890,14 @@ def test_repo_insert_cache_local_false(mock_s3, clean_redis, sample_dataset):
                                                                       Should write tar shards directly to S3 without local caching.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                  -    entry, new_ds = repo.insert(sample_dataset, name="direct-write", cache_local=False, maxcount=100)
                                                                  +    entry, new_ds = repo.insert(
                                                                  +        sample_dataset, name="direct-write", cache_local=False, maxcount=100
                                                                  +    )
                                                                   
                                                                       assert entry.cid is not None
                                                                       assert len(entry.data_urls) > 0
                                                                  @@ -890,12 +913,14 @@ def test_repo_insert_cache_local_true(mock_s3, clean_redis, sample_dataset):
                                                                       local cache files after copying.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                  -    entry, new_ds = repo.insert(sample_dataset, name="cached-write", cache_local=True, maxcount=100)
                                                                  +    entry, new_ds = repo.insert(
                                                                  +        sample_dataset, name="cached-write", cache_local=True, maxcount=100
                                                                  +    )
                                                                   
                                                                       assert entry.cid is not None
                                                                       assert len(entry.data_urls) > 0
                                                                  @@ -911,9 +936,9 @@ def test_repo_insert_creates_index_entry(mock_s3, clean_redis, sample_dataset):
                                                                       and CID.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(sample_dataset, name="indexed-dataset", maxcount=100)
                                                                  @@ -936,9 +961,9 @@ def test_repo_insert_cid_generation(mock_s3, clean_redis, sample_dataset):
                                                                       Should create different CIDs for datasets with different URLs.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry1, new_ds1 = repo.insert(sample_dataset, name="dataset1", maxcount=100)
                                                                  @@ -965,15 +990,15 @@ def test_repo_insert_empty_dataset(mock_s3, clean_redis, tmp_path):
                                                                   
                                                                       ds = atdata.Dataset[SimpleTestSample](url=str(dataset_path))
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       # Empty datasets succeed because WebDataset creates a shard file regardless
                                                                       entry, new_ds = repo.insert(ds, name="empty-dataset", maxcount=100)
                                                                       assert entry.cid is not None
                                                                  -    assert '.tar' in new_ds.url
                                                                  +    assert ".tar" in new_ds.url
                                                                   
                                                                   
                                                                   @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                  @@ -985,9 +1010,9 @@ def test_repo_insert_preserves_sample_type(mock_s3, clean_redis, sample_dataset)
                                                                       Should return a Dataset[T] with the same sample type as the input dataset.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(sample_dataset, name="typed-dataset", maxcount=100)
                                                                  @@ -1006,14 +1031,14 @@ def test_repo_insert_with_shard_writer_kwargs(mock_s3, clean_redis, tmp_path):
                                                                       """
                                                                       ds = make_simple_dataset(tmp_path, num_samples=30, name="large")
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(ds, name="sharded-dataset", maxcount=5)
                                                                   
                                                                  -    assert '{' in new_ds.url and '}' in new_ds.url
                                                                  +    assert "{" in new_ds.url and "}" in new_ds.url
                                                                   
                                                                   
                                                                   @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                  @@ -1026,9 +1051,9 @@ def test_repo_insert_numpy_arrays(mock_s3, clean_redis, tmp_path):
                                                                       """
                                                                       ds = make_array_dataset(tmp_path, num_samples=3, array_shape=(10, 10))
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(ds, name="array-dataset", maxcount=100)
                                                                  @@ -1040,6 +1065,7 @@ def test_repo_insert_numpy_arrays(mock_s3, clean_redis, tmp_path):
                                                                   ##
                                                                   # Integration tests
                                                                   
                                                                  +
                                                                   @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                   @pytest.mark.filterwarnings("ignore:coroutine.*was never awaited:RuntimeWarning")
                                                                   @pytest.mark.filterwarnings("ignore:Repo is deprecated:DeprecationWarning")
                                                                  @@ -1050,9 +1076,9 @@ def test_repo_index_integration(mock_s3, clean_redis, sample_dataset):
                                                                       from the Index.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry, new_ds = repo.insert(sample_dataset, name="integrated-dataset", maxcount=100)
                                                                  @@ -1073,9 +1099,9 @@ def test_multiple_datasets_same_type(mock_s3, clean_redis, sample_dataset):
                                                                       retrievable from the index.
                                                                       """
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry1, _ = repo.insert(sample_dataset, name="dataset-a", maxcount=100)
                                                                  @@ -1105,9 +1131,9 @@ def test_multiple_datasets_different_types(mock_s3, clean_redis, tmp_path):
                                                                       array_ds = make_array_dataset(tmp_path, num_samples=3, array_shape=(5, 5))
                                                                   
                                                                       repo = atlocal.Repo(
                                                                  -        s3_credentials=mock_s3['credentials'],
                                                                  -        hive_path=mock_s3['hive_path'],
                                                                  -        redis=clean_redis
                                                                  +        s3_credentials=mock_s3["credentials"],
                                                                  +        hive_path=mock_s3["hive_path"],
                                                                  +        redis=clean_redis,
                                                                       )
                                                                   
                                                                       entry1, _ = repo.insert(simple_ds, name="simple-dataset", maxcount=100)
                                                                  @@ -1168,12 +1194,13 @@ def test_concurrent_index_access(clean_redis):
                                                                   ##
                                                                   # S3DataStore tests
                                                                   
                                                                  +
                                                                   def test_s3_datastore_init():
                                                                       """Test creating an S3DataStore."""
                                                                       creds = {
                                                                  -        'AWS_ENDPOINT': 'http://localhost:9000',
                                                                  -        'AWS_ACCESS_KEY_ID': 'minioadmin',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'minioadmin'
                                                                  +        "AWS_ENDPOINT": "http://localhost:9000",
                                                                  +        "AWS_ACCESS_KEY_ID": "minioadmin",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "minioadmin",
                                                                       }
                                                                   
                                                                       store = atlocal.S3DataStore(credentials=creds, bucket="test-bucket")
                                                                  @@ -1185,10 +1212,7 @@ def test_s3_datastore_init():
                                                                   
                                                                   def test_s3_datastore_supports_streaming():
                                                                       """Test that S3DataStore reports streaming support."""
                                                                  -    creds = {
                                                                  -        'AWS_ACCESS_KEY_ID': 'test',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'test'
                                                                  -    }
                                                                  +    creds = {"AWS_ACCESS_KEY_ID": "test", "AWS_SECRET_ACCESS_KEY": "test"}
                                                                   
                                                                       store = atlocal.S3DataStore(credentials=creds, bucket="test")
                                                                   
                                                                  @@ -1197,10 +1221,7 @@ def test_s3_datastore_supports_streaming():
                                                                   
                                                                   def test_s3_datastore_read_url():
                                                                       """Test that read_url returns URL unchanged without custom endpoint."""
                                                                  -    creds = {
                                                                  -        'AWS_ACCESS_KEY_ID': 'test',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'test'
                                                                  -    }
                                                                  +    creds = {"AWS_ACCESS_KEY_ID": "test", "AWS_SECRET_ACCESS_KEY": "test"}
                                                                   
                                                                       store = atlocal.S3DataStore(credentials=creds, bucket="test")
                                                                   
                                                                  @@ -1211,9 +1232,9 @@ def test_s3_datastore_read_url():
                                                                   def test_s3_datastore_read_url_with_custom_endpoint():
                                                                       """Test that read_url transforms s3:// to https:// with custom endpoint."""
                                                                       creds = {
                                                                  -        'AWS_ACCESS_KEY_ID': 'test',
                                                                  -        'AWS_SECRET_ACCESS_KEY': 'test',
                                                                  -        'AWS_ENDPOINT': 'https://abc123.r2.cloudflarestorage.com'
                                                                  +        "AWS_ACCESS_KEY_ID": "test",
                                                                  +        "AWS_SECRET_ACCESS_KEY": "test",
                                                                  +        "AWS_ENDPOINT": "https://abc123.r2.cloudflarestorage.com",
                                                                       }
                                                                   
                                                                       store = atlocal.S3DataStore(credentials=creds, bucket="test")
                                                                  @@ -1224,9 +1245,12 @@ def test_s3_datastore_read_url_with_custom_endpoint():
                                                                       assert store.read_url(url) == expected
                                                                   
                                                                       # Trailing slash on endpoint should be handled
                                                                  -    creds['AWS_ENDPOINT'] = 'https://endpoint.example.com/'
                                                                  +    creds["AWS_ENDPOINT"] = "https://endpoint.example.com/"
                                                                       store2 = atlocal.S3DataStore(credentials=creds, bucket="test")
                                                                  -    assert store2.read_url(url) == "https://endpoint.example.com/my-bucket/path/to/data.tar"
                                                                  +    assert (
                                                                  +        store2.read_url(url)
                                                                  +        == "https://endpoint.example.com/my-bucket/path/to/data.tar"
                                                                  +    )
                                                                   
                                                                       # Non-s3 URLs should be passed through unchanged
                                                                       https_url = "https://example.com/data.tar"
                                                                  @@ -1240,15 +1264,14 @@ def test_s3_datastore_write_shards(mock_s3, tmp_path):
                                                                       ds = make_simple_dataset(tmp_path, num_samples=5)
                                                                   
                                                                       store = atlocal.S3DataStore(
                                                                  -        credentials=mock_s3['credentials'],
                                                                  -        bucket=mock_s3['bucket']
                                                                  +        credentials=mock_s3["credentials"], bucket=mock_s3["bucket"]
                                                                       )
                                                                   
                                                                       urls = store.write_shards(ds, prefix="test/data", maxcount=100)
                                                                   
                                                                       assert len(urls) >= 1
                                                                       assert all(url.startswith("s3://") for url in urls)
                                                                  -    assert all(mock_s3['bucket'] in url for url in urls)
                                                                  +    assert all(mock_s3["bucket"] in url for url in urls)
                                                                   
                                                                   
                                                                   @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning")
                                                                  @@ -1258,8 +1281,7 @@ def test_s3_datastore_write_shards_cache_local(mock_s3, tmp_path):
                                                                       ds = make_simple_dataset(tmp_path, num_samples=5)
                                                                   
                                                                       store = atlocal.S3DataStore(
                                                                  -        credentials=mock_s3['credentials'],
                                                                  -        bucket=mock_s3['bucket']
                                                                  +        credentials=mock_s3["credentials"], bucket=mock_s3["bucket"]
                                                                       )
                                                                   
                                                                       urls = store.write_shards(ds, prefix="cached/data", cache_local=True, maxcount=100)
                                                                  @@ -1279,8 +1301,7 @@ def test_index_with_datastore_insert(mock_s3, clean_redis, tmp_path):
                                                                       ds = make_simple_dataset(tmp_path, num_samples=5)
                                                                   
                                                                       store = atlocal.S3DataStore(
                                                                  -        credentials=mock_s3['credentials'],
                                                                  -        bucket=mock_s3['bucket']
                                                                  +        credentials=mock_s3["credentials"], bucket=mock_s3["bucket"]
                                                                       )
                                                                       index = atlocal.Index(redis=clean_redis, data_store=store)
                                                                   
                                                                  @@ -1303,16 +1324,12 @@ def test_index_with_datastore_custom_prefix(mock_s3, clean_redis, tmp_path):
                                                                       ds = make_simple_dataset(tmp_path, num_samples=3)
                                                                   
                                                                       store = atlocal.S3DataStore(
                                                                  -        credentials=mock_s3['credentials'],
                                                                  -        bucket=mock_s3['bucket']
                                                                  +        credentials=mock_s3["credentials"], bucket=mock_s3["bucket"]
                                                                       )
                                                                       index = atlocal.Index(redis=clean_redis, data_store=store)
                                                                   
                                                                       entry = index.insert_dataset(
                                                                  -        ds,
                                                                  -        name="my-dataset",
                                                                  -        prefix="custom/path/v1",
                                                                  -        maxcount=100
                                                                  +        ds, name="my-dataset", prefix="custom/path/v1", maxcount=100
                                                                       )
                                                                   
                                                                       assert "custom/path/v1" in entry.data_urls[0]
                                                                  @@ -1334,8 +1351,7 @@ def test_index_without_datastore_indexes_existing_url(clean_redis, tmp_path):
                                                                   def test_index_data_store_property(mock_s3, clean_redis):
                                                                       """Test that Index.data_store property returns the data store."""
                                                                       store = atlocal.S3DataStore(
                                                                  -        credentials=mock_s3['credentials'],
                                                                  -        bucket=mock_s3['bucket']
                                                                  +        credentials=mock_s3["credentials"], bucket=mock_s3["bucket"]
                                                                       )
                                                                       index = atlocal.Index(redis=clean_redis, data_store=store)
                                                                   
                                                                  @@ -1352,6 +1368,7 @@ def test_index_data_store_property_none(clean_redis):
                                                                   ##
                                                                   # Schema storage tests
                                                                   
                                                                  +
                                                                   def test_publish_schema(clean_redis):
                                                                       """Test publishing a schema to Redis."""
                                                                       index = atlocal.Index(redis=clean_redis)
                                                                  @@ -1368,13 +1385,11 @@ def test_publish_schema_with_description(clean_redis):
                                                                       index = atlocal.Index(redis=clean_redis)
                                                                   
                                                                       schema_ref = index.publish_schema(
                                                                  -        SimpleTestSample,
                                                                  -        version="2.0.0",
                                                                  -        description="A simple test sample type"
                                                                  +        SimpleTestSample, version="2.0.0", description="A simple test sample type"
                                                                       )
                                                                   
                                                                       schema = index.get_schema(schema_ref)
                                                                  -    assert schema.get('description') == "A simple test sample type"
                                                                  +    assert schema.get("description") == "A simple test sample type"
                                                                   
                                                                   
                                                                   def test_publish_schema_auto_increment(clean_redis):
                                                                  @@ -1411,7 +1426,7 @@ def test_publish_schema_docstring_fallback(clean_redis):
                                                                       schema = index.get_schema(schema_ref)
                                                                   
                                                                       # Should use the class docstring
                                                                  -    assert schema.get('description') == SimpleTestSample.__doc__
                                                                  +    assert schema.get("description") == SimpleTestSample.__doc__
                                                                   
                                                                   
                                                                   def test_get_schema(clean_redis):
                                                                  @@ -1421,10 +1436,10 @@ def test_get_schema(clean_redis):
                                                                       schema_ref = index.publish_schema(SimpleTestSample, version="1.0.0")
                                                                       schema = index.get_schema(schema_ref)
                                                                   
                                                                  -    assert schema['name'] == 'SimpleTestSample'
                                                                  -    assert schema['version'] == '1.0.0'
                                                                  -    assert len(schema['fields']) == 2  # name and value fields
                                                                  -    assert schema['$ref'] == schema_ref
                                                                  +    assert schema["name"] == "SimpleTestSample"
                                                                  +    assert schema["version"] == "1.0.0"
                                                                  +    assert len(schema["fields"]) == 2  # name and value fields
                                                                  +    assert schema["$ref"] == schema_ref
                                                                   
                                                                   
                                                                   def test_get_schema_not_found(clean_redis):
                                                                  @@ -1461,9 +1476,9 @@ def test_list_schemas_multiple(clean_redis):
                                                                       schemas = list(index.list_schemas())
                                                                       assert len(schemas) == 2
                                                                   
                                                                  -    names = {s['name'] for s in schemas}
                                                                  -    assert 'SimpleTestSample' in names
                                                                  -    assert 'ArrayTestSample' in names
                                                                  +    names = {s["name"] for s in schemas}
                                                                  +    assert "SimpleTestSample" in names
                                                                  +    assert "ArrayTestSample" in names
                                                                   
                                                                   
                                                                   def test_schema_field_types(clean_redis):
                                                                  @@ -1474,13 +1489,13 @@ def test_schema_field_types(clean_redis):
                                                                       schema = index.get_schema(schema_ref)
                                                                   
                                                                       # Find name field (should be str)
                                                                  -    name_field = next(f for f in schema['fields'] if f['name'] == 'name')
                                                                  -    assert name_field['fieldType']['primitive'] == 'str'
                                                                  -    assert name_field['optional'] is False
                                                                  +    name_field = next(f for f in schema["fields"] if f["name"] == "name")
                                                                  +    assert name_field["fieldType"]["primitive"] == "str"
                                                                  +    assert name_field["optional"] is False
                                                                   
                                                                       # Find value field (should be int)
                                                                  -    value_field = next(f for f in schema['fields'] if f['name'] == 'value')
                                                                  -    assert value_field['fieldType']['primitive'] == 'int'
                                                                  +    value_field = next(f for f in schema["fields"] if f["name"] == "value")
                                                                  +    assert value_field["fieldType"]["primitive"] == "int"
                                                                   
                                                                   
                                                                   def test_schema_ndarray_field(clean_redis):
                                                                  @@ -1491,9 +1506,9 @@ def test_schema_ndarray_field(clean_redis):
                                                                       schema = index.get_schema(schema_ref)
                                                                   
                                                                       # Find data field (should be ndarray)
                                                                  -    data_field = next(f for f in schema['fields'] if f['name'] == 'data')
                                                                  -    assert data_field['fieldType']['$type'] == 'local#ndarray'
                                                                  -    assert data_field['fieldType']['dtype'] == 'float32'
                                                                  +    data_field = next(f for f in schema["fields"] if f["name"] == "data")
                                                                  +    assert data_field["fieldType"]["$type"] == "local#ndarray"
                                                                  +    assert data_field["fieldType"]["dtype"] == "float32"
                                                                   
                                                                   
                                                                   def test_decode_schema(clean_redis):
                                                                  @@ -1518,6 +1533,7 @@ def test_decode_schema_preserves_structure(clean_redis):
                                                                   
                                                                       # Check fields exist
                                                                       import numpy as np
                                                                  +
                                                                       instance = ReconstructedType(label="test", data=np.zeros((3, 3)))
                                                                       assert instance.label == "test"
                                                                       assert instance.data.shape == (3, 3)
                                                                  @@ -1556,13 +1572,14 @@ def test_schema_version_handling(clean_redis):
                                                                       schema_v1 = index.get_schema(ref_v1)
                                                                       schema_v2 = index.get_schema(ref_v2)
                                                                   
                                                                  -    assert schema_v1['version'] == '1.0.0'
                                                                  -    assert schema_v2['version'] == '2.0.0'
                                                                  +    assert schema_v1["version"] == "1.0.0"
                                                                  +    assert schema_v2["version"] == "2.0.0"
                                                                   
                                                                   
                                                                   ##
                                                                   # Schema codec tests
                                                                   
                                                                  +
                                                                   def test_schema_codec_type_caching():
                                                                       """Test that schema_to_type caches generated types."""
                                                                       from atdata._schema_codec import schema_to_type, clear_type_cache, get_cached_types
                                                                  @@ -1573,7 +1590,13 @@ def test_schema_codec_type_caching():
                                                                       schema = {
                                                                           "name": "CacheTestSample",
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "name": "value",
                                                                  +                "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       # First call creates and caches type
                                                                  @@ -1596,7 +1619,13 @@ def test_schema_to_type_missing_name():
                                                                       clear_type_cache()
                                                                       schema = {
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"name": "value", "fieldType": {"$type": "#primitive", "primitive": "int"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "name": "value",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "int"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       with pytest.raises(ValueError, match="must have a 'name' field"):
                                                                  @@ -1626,7 +1655,12 @@ def test_schema_to_type_field_missing_name():
                                                                       schema = {
                                                                           "name": "BadFieldSample",
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"fieldType": {"$type": "#primitive", "primitive": "int"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "int"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       # Raises KeyError from cache key generation (accesses f['name']) or
                                                                  @@ -1643,7 +1677,13 @@ def test_schema_to_type_unknown_primitive():
                                                                       schema = {
                                                                           "name": "UnknownPrimitiveSample",
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"name": "value", "fieldType": {"$type": "#primitive", "primitive": "unknown_type"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "name": "value",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "unknown_type"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       with pytest.raises(ValueError, match="Unknown primitive type"):
                                                                  @@ -1658,7 +1698,13 @@ def test_schema_to_type_unknown_field_kind():
                                                                       schema = {
                                                                           "name": "UnknownKindSample",
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"name": "value", "fieldType": {"$type": "#unknown_kind"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "name": "value",
                                                                  +                "fieldType": {"$type": "#unknown_kind"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       with pytest.raises(ValueError, match="Unknown field type kind"):
                                                                  @@ -1673,7 +1719,13 @@ def test_schema_to_type_ref_not_supported():
                                                                       schema = {
                                                                           "name": "RefSample",
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"name": "other", "fieldType": {"$type": "#ref", "ref": "other.Schema"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "name": "other",
                                                                  +                "fieldType": {"$type": "#ref", "ref": "other.Schema"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       with pytest.raises(ValueError, match="Schema references.*not yet supported"):
                                                                  @@ -1689,11 +1741,31 @@ def test_schema_to_type_all_primitives():
                                                                           "name": "AllPrimitivesSample",
                                                                           "version": "1.0.0",
                                                                           "fields": [
                                                                  -            {"name": "s", "fieldType": {"$type": "#primitive", "primitive": "str"}, "optional": False},
                                                                  -            {"name": "i", "fieldType": {"$type": "#primitive", "primitive": "int"}, "optional": False},
                                                                  -            {"name": "f", "fieldType": {"$type": "#primitive", "primitive": "float"}, "optional": False},
                                                                  -            {"name": "b", "fieldType": {"$type": "#primitive", "primitive": "bool"}, "optional": False},
                                                                  -            {"name": "by", "fieldType": {"$type": "#primitive", "primitive": "bytes"}, "optional": False},
                                                                  +            {
                                                                  +                "name": "s",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "str"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                  +            {
                                                                  +                "name": "i",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "int"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                  +            {
                                                                  +                "name": "f",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "float"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                  +            {
                                                                  +                "name": "b",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "bool"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                  +            {
                                                                  +                "name": "by",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "bytes"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                           ],
                                                                       }
                                                                   
                                                                  @@ -1716,8 +1788,16 @@ def test_schema_to_type_optional_fields():
                                                                           "name": "OptionalSample",
                                                                           "version": "1.0.0",
                                                                           "fields": [
                                                                  -            {"name": "required", "fieldType": {"$type": "#primitive", "primitive": "str"}, "optional": False},
                                                                  -            {"name": "optional_str", "fieldType": {"$type": "#primitive", "primitive": "str"}, "optional": True},
                                                                  +            {
                                                                  +                "name": "required",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "str"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                  +            {
                                                                  +                "name": "optional_str",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "str"},
                                                                  +                "optional": True,
                                                                  +            },
                                                                           ],
                                                                       }
                                                                   
                                                                  @@ -1742,7 +1822,11 @@ def test_schema_to_type_ndarray_field():
                                                                           "name": "ArraySample",
                                                                           "version": "1.0.0",
                                                                           "fields": [
                                                                  -            {"name": "data", "fieldType": {"$type": "#ndarray", "dtype": "float32"}, "optional": False},
                                                                  +            {
                                                                  +                "name": "data",
                                                                  +                "fieldType": {"$type": "#ndarray", "dtype": "float32"},
                                                                  +                "optional": False,
                                                                  +            },
                                                                           ],
                                                                       }
                                                                   
                                                                  @@ -1762,7 +1846,14 @@ def test_schema_to_type_array_field():
                                                                           "name": "ListSample",
                                                                           "version": "1.0.0",
                                                                           "fields": [
                                                                  -            {"name": "tags", "fieldType": {"$type": "#array", "items": {"$type": "#primitive", "primitive": "str"}}, "optional": False},
                                                                  +            {
                                                                  +                "name": "tags",
                                                                  +                "fieldType": {
                                                                  +                    "$type": "#array",
                                                                  +                    "items": {"$type": "#primitive", "primitive": "str"},
                                                                  +                },
                                                                  +                "optional": False,
                                                                  +            },
                                                                           ],
                                                                       }
                                                                   
                                                                  @@ -1780,7 +1871,13 @@ def test_schema_to_type_use_cache_false():
                                                                       schema = {
                                                                           "name": "NoCacheSample",
                                                                           "version": "1.0.0",
                                                                  -        "fields": [{"name": "value", "fieldType": {"$type": "#primitive", "primitive": "int"}, "optional": False}],
                                                                  +        "fields": [
                                                                  +            {
                                                                  +                "name": "value",
                                                                  +                "fieldType": {"$type": "#primitive", "primitive": "int"},
                                                                  +                "optional": False,
                                                                  +            }
                                                                  +        ],
                                                                       }
                                                                   
                                                                       Type1 = schema_to_type(schema, use_cache=False)
                                                                  @@ -1818,7 +1915,7 @@ def test_stub_generated_on_get_schema(self, clean_redis, tmp_path):
                                                                           ref = index.publish_schema(SimpleTestSample, version="1.0.0")
                                                                   
                                                                           # Get schema should trigger stub generation
                                                                  -        schema = index.get_schema(ref)
                                                                  +        _schema = index.get_schema(ref)
                                                                   
                                                                           # Check stub was created (in local/ subdirectory for namespacing)
                                                                           stub_path = stub_dir / "local" / "SimpleTestSample_1_0_0.py"
                                                                  @@ -1859,6 +1956,7 @@ def test_stub_not_regenerated_if_current(self, clean_redis, tmp_path):
                                                                   
                                                                           # Small delay to ensure different mtime if regenerated
                                                                           import time
                                                                  +
                                                                           time.sleep(0.01)
                                                                   
                                                                           # Second call should not regenerate
                                                                  diff --git a/tests/test_promote.py b/tests/test_promote.py
                                                                  index ef37635..4e18709 100644
                                                                  --- a/tests/test_promote.py
                                                                  +++ b/tests/test_promote.py
                                                                  @@ -15,6 +15,7 @@
                                                                   @atdata.packable
                                                                   class PromoteTestSample:
                                                                       """Sample type for promotion tests."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -35,14 +36,12 @@ def test_finds_matching_schema(self):
                                                                                       "value": {
                                                                                           "name": "test_promote.PromoteTestSample",
                                                                                           "version": "1.0.0",
                                                                  -                    }
                                                                  +                    },
                                                                                   }
                                                                               ]
                                                                   
                                                                               result = _find_existing_schema(
                                                                  -                mock_client,
                                                                  -                "test_promote.PromoteTestSample",
                                                                  -                "1.0.0"
                                                                  +                mock_client, "test_promote.PromoteTestSample", "1.0.0"
                                                                               )
                                                                   
                                                                               assert result == "at://did:plc:test/ac.foundation.dataset.sampleSchema/abc"
                                                                  @@ -59,14 +58,12 @@ def test_returns_none_when_not_found(self):
                                                                                       "value": {
                                                                                           "name": "other.OtherSample",
                                                                                           "version": "1.0.0",
                                                                  -                    }
                                                                  +                    },
                                                                                   }
                                                                               ]
                                                                   
                                                                               result = _find_existing_schema(
                                                                  -                mock_client,
                                                                  -                "test_promote.PromoteTestSample",
                                                                  -                "1.0.0"
                                                                  +                mock_client, "test_promote.PromoteTestSample", "1.0.0"
                                                                               )
                                                                   
                                                                               assert result is None
                                                                  @@ -83,14 +80,12 @@ def test_returns_none_when_version_mismatch(self):
                                                                                       "value": {
                                                                                           "name": "test_promote.PromoteTestSample",
                                                                                           "version": "2.0.0",  # Different version
                                                                  -                    }
                                                                  +                    },
                                                                                   }
                                                                               ]
                                                                   
                                                                               result = _find_existing_schema(
                                                                  -                mock_client,
                                                                  -                "test_promote.PromoteTestSample",
                                                                  -                "1.0.0"
                                                                  +                mock_client, "test_promote.PromoteTestSample", "1.0.0"
                                                                               )
                                                                   
                                                                               assert result is None
                                                                  @@ -125,7 +120,9 @@ def test_publishes_new_schema_when_not_found(self):
                                                                   
                                                                               with patch("atdata.atmosphere.SchemaPublisher") as MockPublisher:
                                                                                   mock_publisher = MockPublisher.return_value
                                                                  -                mock_publisher.publish.return_value = Mock(__str__=lambda s: "at://new/schema/uri")
                                                                  +                mock_publisher.publish.return_value = Mock(
                                                                  +                    __str__=lambda s: "at://new/schema/uri"
                                                                  +                )
                                                                   
                                                                                   result = _find_or_publish_schema(
                                                                                       PromoteTestSample,
                                                                  @@ -167,8 +164,16 @@ def test_promotes_with_existing_urls(self):
                                                                               "name": "test_promote.PromoteTestSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "name", "fieldType": {"$type": "local#primitive", "primitive": "str"}, "optional": False},
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  +                {
                                                                  +                    "name": "name",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "str"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                               ],
                                                                           }
                                                                   
                                                                  @@ -207,7 +212,11 @@ def test_promotes_with_custom_name(self):
                                                                               "name": "TestSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                               ],
                                                                           }
                                                                   
                                                                  @@ -218,7 +227,9 @@ def test_promotes_with_custom_name(self):
                                                                   
                                                                               with patch("atdata.atmosphere.DatasetPublisher") as MockPublisher:
                                                                                   mock_publisher = MockPublisher.return_value
                                                                  -                mock_publisher.publish_with_urls.return_value = Mock(__str__=lambda s: "at://result")
                                                                  +                mock_publisher.publish_with_urls.return_value = Mock(
                                                                  +                    __str__=lambda s: "at://result"
                                                                  +                )
                                                                   
                                                                                   promote_to_atmosphere(
                                                                                       entry,
                                                                  @@ -247,7 +258,11 @@ def test_promotes_with_data_store(self):
                                                                               "name": "TestSample",
                                                                               "version": "1.0.0",
                                                                               "fields": [
                                                                  -                {"name": "value", "fieldType": {"$type": "local#primitive", "primitive": "int"}, "optional": False},
                                                                  +                {
                                                                  +                    "name": "value",
                                                                  +                    "fieldType": {"$type": "local#primitive", "primitive": "int"},
                                                                  +                    "optional": False,
                                                                  +                },
                                                                               ],
                                                                           }
                                                                   
                                                                  @@ -262,7 +277,9 @@ def test_promotes_with_data_store(self):
                                                                   
                                                                               with patch("atdata.atmosphere.DatasetPublisher") as MockPublisher:
                                                                                   mock_publisher = MockPublisher.return_value
                                                                  -                mock_publisher.publish_with_urls.return_value = Mock(__str__=lambda s: "at://result")
                                                                  +                mock_publisher.publish_with_urls.return_value = Mock(
                                                                  +                    __str__=lambda s: "at://result"
                                                                  +                )
                                                                   
                                                                                   with patch("atdata.dataset.Dataset"):
                                                                                       promote_to_atmosphere(
                                                                  diff --git a/tests/test_protocols.py b/tests/test_protocols.py
                                                                  index a9dec54..854ad5c 100644
                                                                  --- a/tests/test_protocols.py
                                                                  +++ b/tests/test_protocols.py
                                                                  @@ -309,7 +309,10 @@ def test_index_entry_in_dict_key(self):
                                                                                   {
                                                                                       "name": "dataset-b",
                                                                                       "schemaRef": "at://schema",
                                                                  -                    "storage": {"$type": "ac.foundation.dataset.storageExternal", "urls": ["url2"]},
                                                                  +                    "storage": {
                                                                  +                        "$type": "ac.foundation.dataset.storageExternal",
                                                                  +                        "urls": ["url2"],
                                                                  +                    },
                                                                                   },
                                                                               ),
                                                                           ]
                                                                  @@ -343,7 +346,10 @@ def collect_all_urls(entries: list[IndexEntry]) -> list[str]:
                                                                                   {
                                                                                       "name": "ds2",
                                                                                       "schemaRef": "at://s",
                                                                  -                    "storage": {"$type": "ac.foundation.dataset.storageExternal", "urls": ["s3://b/1.tar"]},
                                                                  +                    "storage": {
                                                                  +                        "$type": "ac.foundation.dataset.storageExternal",
                                                                  +                        "urls": ["s3://b/1.tar"],
                                                                  +                    },
                                                                                   },
                                                                               ),
                                                                           ]
                                                                  diff --git a/tests/test_sources.py b/tests/test_sources.py
                                                                  index f252621..5c81140 100644
                                                                  --- a/tests/test_sources.py
                                                                  +++ b/tests/test_sources.py
                                                                  @@ -1,7 +1,5 @@
                                                                   """Tests for data source implementations."""
                                                                   
                                                                  -import io
                                                                  -import tarfile
                                                                   from pathlib import Path
                                                                   from unittest.mock import Mock, patch, MagicMock
                                                                   
                                                                  @@ -17,6 +15,7 @@
                                                                   @atdata.packable
                                                                   class SourceTestSample:
                                                                       """Simple sample for testing data sources."""
                                                                  +
                                                                       name: str
                                                                       value: int
                                                                   
                                                                  @@ -98,10 +97,13 @@ def test_open_shard_not_found(self, tmp_path):
                                                                       def test_dataset_integration(self, tmp_path):
                                                                           """URLSource works with Dataset."""
                                                                           tar_path = tmp_path / "test.tar"
                                                                  -        create_test_tar(tar_path, [
                                                                  -            {"name": "sample1", "value": 1},
                                                                  -            {"name": "sample2", "value": 2},
                                                                  -        ])
                                                                  +        create_test_tar(
                                                                  +            tar_path,
                                                                  +            [
                                                                  +                {"name": "sample1", "value": 1},
                                                                  +                {"name": "sample2", "value": 2},
                                                                  +            ],
                                                                  +        )
                                                                   
                                                                           source = URLSource(str(tar_path))
                                                                           ds = atdata.Dataset[SourceTestSample](source)
                                                                  @@ -130,10 +132,12 @@ def test_shard_list(self):
                                                                   
                                                                       def test_from_urls(self):
                                                                           """from_urls parses S3 URLs correctly."""
                                                                  -        source = S3Source.from_urls([
                                                                  -            "s3://bucket/path/a.tar",
                                                                  -            "s3://bucket/path/b.tar",
                                                                  -        ])
                                                                  +        source = S3Source.from_urls(
                                                                  +            [
                                                                  +                "s3://bucket/path/a.tar",
                                                                  +                "s3://bucket/path/b.tar",
                                                                  +            ]
                                                                  +        )
                                                                   
                                                                           assert source.bucket == "bucket"
                                                                           assert source.keys == ["path/a.tar", "path/b.tar"]
                                                                  @@ -164,10 +168,12 @@ def test_from_urls_invalid_scheme(self):
                                                                       def test_from_urls_multiple_buckets(self):
                                                                           """from_urls raises when URLs span buckets."""
                                                                           with pytest.raises(ValueError, match="same bucket"):
                                                                  -            S3Source.from_urls([
                                                                  -                "s3://bucket-a/data.tar",
                                                                  -                "s3://bucket-b/data.tar",
                                                                  -            ])
                                                                  +            S3Source.from_urls(
                                                                  +                [
                                                                  +                    "s3://bucket-a/data.tar",
                                                                  +                    "s3://bucket-b/data.tar",
                                                                  +                ]
                                                                  +            )
                                                                   
                                                                       def test_from_credentials(self):
                                                                           """from_credentials creates source from dict."""
                                                                  @@ -299,10 +305,12 @@ def test_conforms_to_protocol(self):
                                                                   
                                                                       def test_list_shards(self):
                                                                           """list_shards returns AT URIs."""
                                                                  -        source = BlobSource(blob_refs=[
                                                                  -            {"did": "did:plc:abc", "cid": "bafyrei111"},
                                                                  -            {"did": "did:plc:abc", "cid": "bafyrei222"},
                                                                  -        ])
                                                                  +        source = BlobSource(
                                                                  +            blob_refs=[
                                                                  +                {"did": "did:plc:abc", "cid": "bafyrei111"},
                                                                  +                {"did": "did:plc:abc", "cid": "bafyrei222"},
                                                                  +            ]
                                                                  +        )
                                                                           assert source.list_shards() == [
                                                                               "at://did:plc:abc/blob/bafyrei111",
                                                                               "at://did:plc:abc/blob/bafyrei222",
                                                                  @@ -310,9 +318,11 @@ def test_list_shards(self):
                                                                   
                                                                       def test_from_refs_simple_format(self):
                                                                           """from_refs accepts simple {did, cid} format."""
                                                                  -        source = BlobSource.from_refs([
                                                                  -            {"did": "did:plc:abc", "cid": "bafyrei123"},
                                                                  -        ])
                                                                  +        source = BlobSource.from_refs(
                                                                  +            [
                                                                  +                {"did": "did:plc:abc", "cid": "bafyrei123"},
                                                                  +            ]
                                                                  +        )
                                                                           assert len(source.blob_refs) == 1
                                                                           assert source.blob_refs[0]["did"] == "did:plc:abc"
                                                                           assert source.blob_refs[0]["cid"] == "bafyrei123"
                                                                  @@ -368,7 +378,10 @@ def test_get_blob_url(self):
                                                                           )
                                                                   
                                                                           url = source._get_blob_url("did:plc:abc", "bafyrei123")
                                                                  -        assert url == "https://pds.example.com/xrpc/com.atproto.sync.getBlob?did=did:plc:abc&cid=bafyrei123"
                                                                  +        assert (
                                                                  +            url
                                                                  +            == "https://pds.example.com/xrpc/com.atproto.sync.getBlob?did=did:plc:abc&cid=bafyrei123"
                                                                  +        )
                                                                   
                                                                       def test_shards_fetches_blobs(self):
                                                                           """shards property fetches blobs via HTTP."""
                                                                  
                                                                  From 5d7799be5fcd197586fe051bc1022d8da2ed096e Mon Sep 17 00:00:00 2001
                                                                  From: Maxine Levesque <170461181+maxinelevesque@users.noreply.github.com>
                                                                  Date: Wed, 28 Jan 2026 13:15:23 -0800
                                                                  Subject: [PATCH 6/6] added lockfile to tracking
                                                                  
                                                                  ---
                                                                   .gitignore |    4 +-
                                                                   uv.lock    | 3504 ++++++++++++++++++++++++++++++++++++++++++++++++++++
                                                                   2 files changed, 3507 insertions(+), 1 deletion(-)
                                                                   create mode 100644 uv.lock
                                                                  
                                                                  diff --git a/.gitignore b/.gitignore
                                                                  index 1dfe222..75ac226 100644
                                                                  --- a/.gitignore
                                                                  +++ b/.gitignore
                                                                  @@ -5,8 +5,10 @@
                                                                   # Don't commit any .env files
                                                                   **/*.env
                                                                   # Don't commit `uv` lockfiles
                                                                  -**/uv.lock
                                                                  +# (SIKE it turns out we actually do want to commit)
                                                                  +# **/uv.lock
                                                                   # Development tooling (keep local, not in upstream)
                                                                  +# (SIKE it turns out we actually do want to commit)
                                                                   # .chainlink/
                                                                   # .claude/
                                                                   
                                                                  diff --git a/uv.lock b/uv.lock
                                                                  new file mode 100644
                                                                  index 0000000..d9f4a00
                                                                  --- /dev/null
                                                                  +++ b/uv.lock
                                                                  @@ -0,0 +1,3504 @@
                                                                  +version = 1
                                                                  +revision = 3
                                                                  +requires-python = ">=3.12"
                                                                  +resolution-markers = [
                                                                  +    "python_full_version >= '3.14'",
                                                                  +    "python_full_version < '3.14'",
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "aiobotocore"
                                                                  +version = "2.26.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "aiohttp" },
                                                                  +    { name = "aioitertools" },
                                                                  +    { name = "botocore" },
                                                                  +    { name = "jmespath" },
                                                                  +    { name = "multidict" },
                                                                  +    { name = "python-dateutil" },
                                                                  +    { name = "wrapt" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/4d/f8/99fa90d9c25b78292899fd4946fce97b6353838b5ecc139ad8ba1436e70c/aiobotocore-2.26.0.tar.gz", hash = "sha256:50567feaf8dfe2b653570b4491f5bc8c6e7fb9622479d66442462c021db4fadc", size = 122026, upload-time = "2025-11-28T07:54:59.956Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b7/58/3bf0b7d474607dc7fd67dd1365c4e0f392c8177eaf4054e5ddee3ebd53b5/aiobotocore-2.26.0-py3-none-any.whl", hash = "sha256:a793db51c07930513b74ea7a95bd79aaa42f545bdb0f011779646eafa216abec", size = 87333, upload-time = "2025-11-28T07:54:58.457Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "aiohappyeyeballs"
                                                                  +version = "2.6.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "aiohttp"
                                                                  +version = "3.13.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "aiohappyeyeballs" },
                                                                  +    { name = "aiosignal" },
                                                                  +    { name = "attrs" },
                                                                  +    { name = "frozenlist" },
                                                                  +    { name = "multidict" },
                                                                  +    { name = "propcache" },
                                                                  +    { name = "yarl" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/1c/ce/3b83ebba6b3207a7135e5fcaba49706f8a4b6008153b4e30540c982fae26/aiohttp-3.13.2.tar.gz", hash = "sha256:40176a52c186aefef6eb3cad2cdd30cd06e3afbe88fe8ab2af9c0b90f228daca", size = 7837994, upload-time = "2025-10-28T20:59:39.937Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/29/8e4609b93e10a853b65f8291e64985de66d4f5848c5637cddc70e98f01f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba2715d842ffa787be87cbfce150d5e88c87a98e0b62e0f5aa489169a393dbbb", size = 1738863, upload-time = "2025-10-28T20:56:36.377Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/fa/4ebdf4adcc0def75ced1a0d2d227577cd7b1b85beb7edad85fcc87693c75/aiohttp-3.13.2-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:585542825c4bc662221fb257889e011a5aa00f1ae4d75d1d246a5225289183e3", size = 1700586, upload-time = "2025-10-28T20:56:38.034Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/04/73f5f02ff348a3558763ff6abe99c223381b0bace05cd4530a0258e52597/aiohttp-3.13.2-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:39d02cb6025fe1aabca329c5632f48c9532a3dabccd859e7e2f110668972331f", size = 1768625, upload-time = "2025-10-28T20:56:39.75Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f8/49/a825b79ffec124317265ca7d2344a86bcffeb960743487cb11988ffb3494/aiohttp-3.13.2-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e67446b19e014d37342f7195f592a2a948141d15a312fe0e700c2fd2f03124f6", size = 1867281, upload-time = "2025-10-28T20:56:41.471Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b9/48/adf56e05f81eac31edcfae45c90928f4ad50ef2e3ea72cb8376162a368f8/aiohttp-3.13.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4356474ad6333e41ccefd39eae869ba15a6c5299c9c01dfdcfdd5c107be4363e", size = 1752431, upload-time = "2025-10-28T20:56:43.162Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/ab/593855356eead019a74e862f21523db09c27f12fd24af72dbc3555b9bfd9/aiohttp-3.13.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:eeacf451c99b4525f700f078becff32c32ec327b10dcf31306a8a52d78166de7", size = 1562846, upload-time = "2025-10-28T20:56:44.85Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/39/0f/9f3d32271aa8dc35036e9668e31870a9d3b9542dd6b3e2c8a30931cb27ae/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d8a9b889aeabd7a4e9af0b7f4ab5ad94d42e7ff679aaec6d0db21e3b639ad58d", size = 1699606, upload-time = "2025-10-28T20:56:46.519Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2c/3c/52d2658c5699b6ef7692a3f7128b2d2d4d9775f2a68093f74bca06cf01e1/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:fa89cb11bc71a63b69568d5b8a25c3ca25b6d54c15f907ca1c130d72f320b76b", size = 1720663, upload-time = "2025-10-28T20:56:48.528Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9b/d4/8f8f3ff1fb7fb9e3f04fcad4e89d8a1cd8fc7d05de67e3de5b15b33008ff/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8aa7c807df234f693fed0ecd507192fc97692e61fee5702cdc11155d2e5cadc8", size = 1737939, upload-time = "2025-10-28T20:56:50.77Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/04/db5279e38471b7ac801d7d36a57d1230feeee130bbe2a74f72731b23c2b1/aiohttp-3.13.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1237c1375eaef0db4dcd7c2559f42e8af7b87ea7d295b118c60c36a6e61cb811", size = 1720387, upload-time = "2025-10-28T20:57:08.685Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/07/8ea4326bd7dae2bd59828f69d7fdc6e04523caa55e4a70f4a8725a7e4ed2/aiohttp-3.13.2-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:96581619c57419c3d7d78703d5b78c1e5e5fc0172d60f555bdebaced82ded19a", size = 1688314, upload-time = "2025-10-28T20:57:10.693Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/ab/3d98007b5b87ffd519d065225438cc3b668b2f245572a8cb53da5dd2b1bc/aiohttp-3.13.2-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2713a95b47374169409d18103366de1050fe0ea73db358fc7a7acb2880422d4", size = 1756317, upload-time = "2025-10-28T20:57:12.563Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/97/3d/801ca172b3d857fafb7b50c7c03f91b72b867a13abca982ed6b3081774ef/aiohttp-3.13.2-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:228a1cd556b3caca590e9511a89444925da87d35219a49ab5da0c36d2d943a6a", size = 1858539, upload-time = "2025-10-28T20:57:14.623Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f7/0d/4764669bdf47bd472899b3d3db91fffbe925c8e3038ec591a2fd2ad6a14d/aiohttp-3.13.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ac6cde5fba8d7d8c6ac963dbb0256a9854e9fafff52fbcc58fdf819357892c3e", size = 1739597, upload-time = "2025-10-28T20:57:16.399Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/52/7bd3c6693da58ba16e657eb904a5b6decfc48ecd06e9ac098591653b1566/aiohttp-3.13.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2bef8237544f4e42878c61cef4e2839fee6346dc60f5739f876a9c50be7fcdb", size = 1555006, upload-time = "2025-10-28T20:57:18.288Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/30/9586667acec5993b6f41d2ebcf96e97a1255a85f62f3c653110a5de4d346/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:16f15a4eac3bc2d76c45f7ebdd48a65d41b242eb6c31c2245463b40b34584ded", size = 1683220, upload-time = "2025-10-28T20:57:20.241Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/01/3afe4c96854cfd7b30d78333852e8e851dceaec1c40fd00fec90c6402dd2/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:bb7fb776645af5cc58ab804c58d7eba545a97e047254a52ce89c157b5af6cd0b", size = 1712570, upload-time = "2025-10-28T20:57:22.253Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/2c/22799d8e720f4697a9e66fd9c02479e40a49de3de2f0bbe7f9f78a987808/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e1b4951125ec10c70802f2cb09736c895861cd39fd9dcb35107b4dc8ae6220b8", size = 1733407, upload-time = "2025-10-28T20:57:24.37Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9b/36/e2abae1bd815f01c957cbf7be817b3043304e1c87bad526292a0410fdcf9/aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b", size = 735234, upload-time = "2025-10-28T20:57:36.415Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/e3/1ee62dde9b335e4ed41db6bba02613295a0d5b41f74a783c142745a12763/aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61", size = 490733, upload-time = "2025-10-28T20:57:38.205Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/aa/7a451b1d6a04e8d15a362af3e9b897de71d86feac3babf8894545d08d537/aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4", size = 491303, upload-time = "2025-10-28T20:57:40.122Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/1e/209958dbb9b01174870f6a7538cd1f3f28274fdbc88a750c238e2c456295/aiohttp-3.13.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5d7f02042c1f009ffb70067326ef183a047425bb2ff3bc434ead4dd4a4a66a2b", size = 1717965, upload-time = "2025-10-28T20:57:42.28Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/aa/6a01848d6432f241416bc4866cae8dc03f05a5a884d2311280f6a09c73d6/aiohttp-3.13.2-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:93655083005d71cd6c072cdab54c886e6570ad2c4592139c3fb967bfc19e4694", size = 1667221, upload-time = "2025-10-28T20:57:44.869Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/4f/36c1992432d31bbc789fa0b93c768d2e9047ec8c7177e5cd84ea85155f36/aiohttp-3.13.2-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0db1e24b852f5f664cd728db140cf11ea0e82450471232a394b3d1a540b0f906", size = 1757178, upload-time = "2025-10-28T20:57:47.216Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ac/b4/8e940dfb03b7e0f68a82b88fd182b9be0a65cb3f35612fe38c038c3112cf/aiohttp-3.13.2-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b009194665bcd128e23eaddef362e745601afa4641930848af4c8559e88f18f9", size = 1838001, upload-time = "2025-10-28T20:57:49.337Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/ef/39f3448795499c440ab66084a9db7d20ca7662e94305f175a80f5b7e0072/aiohttp-3.13.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c038a8fdc8103cd51dbd986ecdce141473ffd9775a7a8057a6ed9c3653478011", size = 1716325, upload-time = "2025-10-28T20:57:51.327Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/51/b311500ffc860b181c05d91c59a1313bdd05c82960fdd4035a15740d431e/aiohttp-3.13.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:66bac29b95a00db411cd758fea0e4b9bdba6d549dfe333f9a945430f5f2cc5a6", size = 1547978, upload-time = "2025-10-28T20:57:53.554Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/64/b9d733296ef79815226dab8c586ff9e3df41c6aff2e16c06697b2d2e6775/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4ebf9cfc9ba24a74cf0718f04aac2a3bbe745902cc7c5ebc55c0f3b5777ef213", size = 1682042, upload-time = "2025-10-28T20:57:55.617Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3f/30/43d3e0f9d6473a6db7d472104c4eff4417b1e9df01774cb930338806d36b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a4b88ebe35ce54205c7074f7302bd08a4cb83256a3e0870c72d6f68a3aaf8e49", size = 1680085, upload-time = "2025-10-28T20:57:57.59Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/51/c709f352c911b1864cfd1087577760ced64b3e5bee2aa88b8c0c8e2e4972/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:98c4fb90bb82b70a4ed79ca35f656f4281885be076f3f970ce315402b53099ae", size = 1728238, upload-time = "2025-10-28T20:57:59.525Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/19/e2/19bd4c547092b773caeb48ff5ae4b1ae86756a0ee76c16727fcfd281404b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa", size = 1544395, upload-time = "2025-10-28T20:58:01.914Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cf/87/860f2803b27dfc5ed7be532832a3498e4919da61299b4a1f8eb89b8ff44d/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4", size = 1742965, upload-time = "2025-10-28T20:58:03.972Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/7f/db2fc7618925e8c7a601094d5cbe539f732df4fb570740be88ed9e40e99a/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a", size = 1697585, upload-time = "2025-10-28T20:58:06.189Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0c/07/9127916cb09bb38284db5036036042b7b2c514c8ebaeee79da550c43a6d6/aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940", size = 431621, upload-time = "2025-10-28T20:58:08.636Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fb/41/554a8a380df6d3a2bba8a7726429a23f4ac62aaf38de43bb6d6cde7b4d4d/aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4", size = 457627, upload-time = "2025-10-28T20:58:11Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c7/8e/3824ef98c039d3951cb65b9205a96dd2b20f22241ee17d89c5701557c826/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673", size = 767360, upload-time = "2025-10-28T20:58:13.358Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a4/0f/6a03e3fc7595421274fa34122c973bde2d89344f8a881b728fa8c774e4f1/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd", size = 504616, upload-time = "2025-10-28T20:58:15.339Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c6/aa/ed341b670f1bc8a6f2c6a718353d13b9546e2cef3544f573c6a1ff0da711/aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3", size = 509131, upload-time = "2025-10-28T20:58:17.693Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7f/f0/c68dac234189dae5c4bbccc0f96ce0cc16b76632cfc3a08fff180045cfa4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e96eb1a34396e9430c19d8338d2ec33015e4a87ef2b4449db94c22412e25ccdf", size = 1864168, upload-time = "2025-10-28T20:58:20.113Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/65/75a9a76db8364b5d0e52a0c20eabc5d52297385d9af9c35335b924fafdee/aiohttp-3.13.2-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:23fb0783bc1a33640036465019d3bba069942616a6a2353c6907d7fe1ccdaf4e", size = 1719200, upload-time = "2025-10-28T20:58:22.583Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/55/8df2ed78d7f41d232f6bd3ff866b6f617026551aa1d07e2f03458f964575/aiohttp-3.13.2-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2e1a9bea6244a1d05a4e57c295d69e159a5c50d8ef16aa390948ee873478d9a5", size = 1843497, upload-time = "2025-10-28T20:58:24.672Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e9/e0/94d7215e405c5a02ccb6a35c7a3a6cfff242f457a00196496935f700cde5/aiohttp-3.13.2-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0a3d54e822688b56e9f6b5816fb3de3a3a64660efac64e4c2dc435230ad23bad", size = 1935703, upload-time = "2025-10-28T20:58:26.758Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0b/78/1eeb63c3f9b2d1015a4c02788fb543141aad0a03ae3f7a7b669b2483f8d4/aiohttp-3.13.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7a653d872afe9f33497215745da7a943d1dc15b728a9c8da1c3ac423af35178e", size = 1792738, upload-time = "2025-10-28T20:58:29.787Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/75/aaf1eea4c188e51538c04cc568040e3082db263a57086ea74a7d38c39e42/aiohttp-3.13.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:56d36e80d2003fa3fc0207fac644216d8532e9504a785ef9a8fd013f84a42c61", size = 1624061, upload-time = "2025-10-28T20:58:32.529Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9b/c2/3b6034de81fbcc43de8aeb209073a2286dfb50b86e927b4efd81cf848197/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:78cd586d8331fb8e241c2dd6b2f4061778cc69e150514b39a9e28dd050475661", size = 1789201, upload-time = "2025-10-28T20:58:34.618Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/38/c15dcf6d4d890217dae79d7213988f4e5fe6183d43893a9cf2fe9e84ca8d/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:20b10bbfbff766294fe99987f7bb3b74fdd2f1a2905f2562132641ad434dcf98", size = 1776868, upload-time = "2025-10-28T20:58:38.835Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/04/75/f74fd178ac81adf4f283a74847807ade5150e48feda6aef024403716c30c/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9ec49dff7e2b3c85cdeaa412e9d438f0ecd71676fde61ec57027dd392f00c693", size = 1790660, upload-time = "2025-10-28T20:58:41.507Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e7/80/7368bd0d06b16b3aba358c16b919e9c46cf11587dc572091031b0e9e3ef0/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a", size = 1617548, upload-time = "2025-10-28T20:58:43.674Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/4b/a6212790c50483cb3212e507378fbe26b5086d73941e1ec4b56a30439688/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be", size = 1817240, upload-time = "2025-10-28T20:58:45.787Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/f7/ba5f0ba4ea8d8f3c32850912944532b933acbf0f3a75546b89269b9b7dde/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c", size = 1762334, upload-time = "2025-10-28T20:58:47.936Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7e/83/1a5a1856574588b1cad63609ea9ad75b32a8353ac995d830bf5da9357364/aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734", size = 464685, upload-time = "2025-10-28T20:58:50.642Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/4d/d22668674122c08f4d56972297c51a624e64b3ed1efaa40187607a7cb66e/aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f", size = 498093, upload-time = "2025-10-28T20:58:52.782Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "aioitertools"
                                                                  +version = "0.13.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/fd/3c/53c4a17a05fb9ea2313ee1777ff53f5e001aefd5cc85aa2f4c2d982e1e38/aioitertools-0.13.0.tar.gz", hash = "sha256:620bd241acc0bbb9ec819f1ab215866871b4bbd1f73836a55f799200ee86950c", size = 19322, upload-time = "2025-11-06T22:17:07.609Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/10/a1/510b0a7fadc6f43a6ce50152e69dbd86415240835868bb0bd9b5b88b1e06/aioitertools-0.13.0-py3-none-any.whl", hash = "sha256:0be0292b856f08dfac90e31f4739432f4cb6d7520ab9eb73e143f4f2fa5259be", size = 24182, upload-time = "2025-11-06T22:17:06.502Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "aiosignal"
                                                                  +version = "1.4.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "frozenlist" },
                                                                  +    { name = "typing-extensions", marker = "python_full_version < '3.13'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "annotated-types"
                                                                  +version = "0.7.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "anyio"
                                                                  +version = "4.11.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "idna" },
                                                                  +    { name = "sniffio" },
                                                                  +    { name = "typing-extensions", marker = "python_full_version < '3.13'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "appnope"
                                                                  +version = "0.1.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/35/5d/752690df9ef5b76e169e68d6a129fa6d08a7100ca7f754c89495db3c6019/appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee", size = 4170, upload-time = "2024-02-06T09:43:11.258Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/81/29/5ecc3a15d5a33e31b26c11426c45c501e439cb865d0bff96315d86443b78/appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c", size = 4321, upload-time = "2024-02-06T09:43:09.663Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "argon2-cffi"
                                                                  +version = "25.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "argon2-cffi-bindings" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/0e/89/ce5af8a7d472a67cc819d5d998aa8c82c5d860608c4db9f46f1162d7dab9/argon2_cffi-25.1.0.tar.gz", hash = "sha256:694ae5cc8a42f4c4e2bf2ca0e64e51e23a040c6a517a85074683d3959e1346c1", size = 45706, upload-time = "2025-06-03T06:55:32.073Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/4f/d3/a8b22fa575b297cd6e3e3b0155c7e25db170edf1c74783d6a31a2490b8d9/argon2_cffi-25.1.0-py3-none-any.whl", hash = "sha256:fdc8b074db390fccb6eb4a3604ae7231f219aa669a2652e0f20e16ba513d5741", size = 14657, upload-time = "2025-06-03T06:55:30.804Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "argon2-cffi-bindings"
                                                                  +version = "25.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "cffi" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/5c/2d/db8af0df73c1cf454f71b2bbe5e356b8c1f8041c979f505b3d3186e520a9/argon2_cffi_bindings-25.1.0.tar.gz", hash = "sha256:b957f3e6ea4d55d820e40ff76f450952807013d361a65d7f28acc0acbf29229d", size = 1783441, upload-time = "2025-07-30T10:02:05.147Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/60/97/3c0a35f46e52108d4707c44b95cfe2afcafc50800b5450c197454569b776/argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:3d3f05610594151994ca9ccb3c771115bdb4daef161976a266f0dd8aa9996b8f", size = 54393, upload-time = "2025-07-30T10:01:40.97Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/f4/98bbd6ee89febd4f212696f13c03ca302b8552e7dbf9c8efa11ea4a388c3/argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8b8efee945193e667a396cbc7b4fb7d357297d6234d30a489905d96caabde56b", size = 29328, upload-time = "2025-07-30T10:01:41.916Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/43/24/90a01c0ef12ac91a6be05969f29944643bc1e5e461155ae6559befa8f00b/argon2_cffi_bindings-25.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:3c6702abc36bf3ccba3f802b799505def420a1b7039862014a65db3205967f5a", size = 31269, upload-time = "2025-07-30T10:01:42.716Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d4/d3/942aa10782b2697eee7af5e12eeff5ebb325ccfb86dd8abda54174e377e4/argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1c70058c6ab1e352304ac7e3b52554daadacd8d453c1752e547c76e9c99ac44", size = 86558, upload-time = "2025-07-30T10:01:43.943Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/82/b484f702fec5536e71836fc2dbc8c5267b3f6e78d2d539b4eaa6f0db8bf8/argon2_cffi_bindings-25.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2fd3bfbff3c5d74fef31a722f729bf93500910db650c925c2d6ef879a7e51cb", size = 92364, upload-time = "2025-07-30T10:01:44.887Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/c1/a606ff83b3f1735f3759ad0f2cd9e038a0ad11a3de3b6c673aa41c24bb7b/argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4f9665de60b1b0e99bcd6be4f17d90339698ce954cfd8d9cf4f91c995165a92", size = 85637, upload-time = "2025-07-30T10:01:46.225Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/44/b4/678503f12aceb0262f84fa201f6027ed77d71c5019ae03b399b97caa2f19/argon2_cffi_bindings-25.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ba92837e4a9aa6a508c8d2d7883ed5a8f6c308c89a4790e1e447a220deb79a85", size = 91934, upload-time = "2025-07-30T10:01:47.203Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/c7/f36bd08ef9bd9f0a9cff9428406651f5937ce27b6c5b07b92d41f91ae541/argon2_cffi_bindings-25.1.0-cp314-cp314t-win32.whl", hash = "sha256:84a461d4d84ae1295871329b346a97f68eade8c53b6ed9a7ca2d7467f3c8ff6f", size = 28158, upload-time = "2025-07-30T10:01:48.341Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/80/0106a7448abb24a2c467bf7d527fe5413b7fdfa4ad6d6a96a43a62ef3988/argon2_cffi_bindings-25.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b55aec3565b65f56455eebc9b9f34130440404f27fe21c3b375bf1ea4d8fbae6", size = 32597, upload-time = "2025-07-30T10:01:49.112Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/b8/d663c9caea07e9180b2cb662772865230715cbd573ba3b5e81793d580316/argon2_cffi_bindings-25.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:87c33a52407e4c41f3b70a9c2d3f6056d88b10dad7695be708c5021673f55623", size = 28231, upload-time = "2025-07-30T10:01:49.92Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1d/57/96b8b9f93166147826da5f90376e784a10582dd39a393c99bb62cfcf52f0/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:aecba1723ae35330a008418a91ea6cfcedf6d31e5fbaa056a166462ff066d500", size = 54121, upload-time = "2025-07-30T10:01:50.815Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/08/a9bebdb2e0e602dde230bdde8021b29f71f7841bd54801bcfd514acb5dcf/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2630b6240b495dfab90aebe159ff784d08ea999aa4b0d17efa734055a07d2f44", size = 29177, upload-time = "2025-07-30T10:01:51.681Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b6/02/d297943bcacf05e4f2a94ab6f462831dc20158614e5d067c35d4e63b9acb/argon2_cffi_bindings-25.1.0-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:7aef0c91e2c0fbca6fc68e7555aa60ef7008a739cbe045541e438373bc54d2b0", size = 31090, upload-time = "2025-07-30T10:01:53.184Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c1/93/44365f3d75053e53893ec6d733e4a5e3147502663554b4d864587c7828a7/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e021e87faa76ae0d413b619fe2b65ab9a037f24c60a1e6cc43457ae20de6dc6", size = 81246, upload-time = "2025-07-30T10:01:54.145Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/09/52/94108adfdd6e2ddf58be64f959a0b9c7d4ef2fa71086c38356d22dc501ea/argon2_cffi_bindings-25.1.0-cp39-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d3e924cfc503018a714f94a49a149fdc0b644eaead5d1f089330399134fa028a", size = 87126, upload-time = "2025-07-30T10:01:55.074Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/72/70/7a2993a12b0ffa2a9271259b79cc616e2389ed1a4d93842fac5a1f923ffd/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c87b72589133f0346a1cb8d5ecca4b933e3c9b64656c9d175270a000e73b288d", size = 80343, upload-time = "2025-07-30T10:01:56.007Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/9a/4e5157d893ffc712b74dbd868c7f62365618266982b64accab26bab01edc/argon2_cffi_bindings-25.1.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1db89609c06afa1a214a69a462ea741cf735b29a57530478c06eb81dd403de99", size = 86777, upload-time = "2025-07-30T10:01:56.943Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/cd/15777dfde1c29d96de7f18edf4cc94c385646852e7c7b0320aa91ccca583/argon2_cffi_bindings-25.1.0-cp39-abi3-win32.whl", hash = "sha256:473bcb5f82924b1becbb637b63303ec8d10e84c8d241119419897a26116515d2", size = 27180, upload-time = "2025-07-30T10:01:57.759Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/c6/a759ece8f1829d1f162261226fbfd2c6832b3ff7657384045286d2afa384/argon2_cffi_bindings-25.1.0-cp39-abi3-win_amd64.whl", hash = "sha256:a98cd7d17e9f7ce244c0803cad3c23a7d379c301ba618a5fa76a67d116618b98", size = 31715, upload-time = "2025-07-30T10:01:58.56Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/42/b9/f8d6fa329ab25128b7e98fd83a3cb34d9db5b059a9847eddb840a0af45dd/argon2_cffi_bindings-25.1.0-cp39-abi3-win_arm64.whl", hash = "sha256:b0fdbcf513833809c882823f98dc2f931cf659d9a1429616ac3adebb49f5db94", size = 27149, upload-time = "2025-07-30T10:01:59.329Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "arrow"
                                                                  +version = "1.4.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "python-dateutil" },
                                                                  +    { name = "tzdata" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b9/33/032cdc44182491aa708d06a68b62434140d8c50820a087fac7af37703357/arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7", size = 152931, upload-time = "2025-10-18T17:46:46.761Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205", size = 68797, upload-time = "2025-10-18T17:46:45.663Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "asttokens"
                                                                  +version = "3.0.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/be/a5/8e3f9b6771b0b408517c82d97aed8f2036509bc247d46114925e32fe33f0/asttokens-3.0.1.tar.gz", hash = "sha256:71a4ee5de0bde6a31d64f6b13f2293ac190344478f081c3d1bccfcf5eacb0cb7", size = 62308, upload-time = "2025-11-15T16:43:48.578Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/39/e7eaf1799466a4aef85b6a4fe7bd175ad2b1c6345066aa33f1f58d4b18d0/asttokens-3.0.1-py3-none-any.whl", hash = "sha256:15a3ebc0f43c2d0a50eeafea25e19046c68398e487b9f1f5b517f7c0f40f976a", size = 27047, upload-time = "2025-11-15T16:43:16.109Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "async-lru"
                                                                  +version = "2.0.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b2/4d/71ec4d3939dc755264f680f6c2b4906423a304c3d18e96853f0a595dfe97/async_lru-2.0.5.tar.gz", hash = "sha256:481d52ccdd27275f42c43a928b4a50c3bfb2d67af4e78b170e3e0bb39c66e5bb", size = 10380, upload-time = "2025-03-16T17:25:36.919Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/03/49/d10027df9fce941cb8184e78a02857af36360d33e1721df81c5ed2179a1a/async_lru-2.0.5-py3-none-any.whl", hash = "sha256:ab95404d8d2605310d345932697371a5f40def0487c03d6d0ad9138de52c9943", size = 6069, upload-time = "2025-03-16T17:25:35.422Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "atdata"
                                                                  +version = "0.2.3b1"
                                                                  +source = { editable = "." }
                                                                  +dependencies = [
                                                                  +    { name = "atproto" },
                                                                  +    { name = "boto3" },
                                                                  +    { name = "fastparquet" },
                                                                  +    { name = "libipld" },
                                                                  +    { name = "msgpack" },
                                                                  +    { name = "numpy" },
                                                                  +    { name = "ormsgpack" },
                                                                  +    { name = "pandas" },
                                                                  +    { name = "pydantic" },
                                                                  +    { name = "python-dotenv" },
                                                                  +    { name = "redis-om" },
                                                                  +    { name = "requests" },
                                                                  +    { name = "s3fs" },
                                                                  +    { name = "schemamodels" },
                                                                  +    { name = "tqdm" },
                                                                  +    { name = "webdataset" },
                                                                  +]
                                                                  +
                                                                  +[package.optional-dependencies]
                                                                  +atmosphere = [
                                                                  +    { name = "atproto" },
                                                                  +]
                                                                  +
                                                                  +[package.dev-dependencies]
                                                                  +dev = [
                                                                  +    { name = "jupyter" },
                                                                  +    { name = "moto", extra = ["s3"] },
                                                                  +    { name = "pytest" },
                                                                  +    { name = "pytest-cov" },
                                                                  +    { name = "quartodoc" },
                                                                  +    { name = "ruff" },
                                                                  +]
                                                                  +
                                                                  +[package.metadata]
                                                                  +requires-dist = [
                                                                  +    { name = "atproto", specifier = ">=0.0.65" },
                                                                  +    { name = "atproto", marker = "extra == 'atmosphere'", specifier = ">=0.0.55" },
                                                                  +    { name = "boto3", specifier = ">=1.41.5" },
                                                                  +    { name = "fastparquet", specifier = ">=2024.11.0" },
                                                                  +    { name = "libipld", specifier = ">=3.3.2" },
                                                                  +    { name = "msgpack", specifier = ">=1.1.2" },
                                                                  +    { name = "numpy", specifier = ">=2.3.4" },
                                                                  +    { name = "ormsgpack", specifier = ">=1.11.0" },
                                                                  +    { name = "pandas", specifier = ">=2.3.3" },
                                                                  +    { name = "pydantic", specifier = ">=2.12.5" },
                                                                  +    { name = "python-dotenv", specifier = ">=1.2.1" },
                                                                  +    { name = "redis-om", specifier = ">=0.3.5" },
                                                                  +    { name = "requests", specifier = ">=2.32.5" },
                                                                  +    { name = "s3fs", specifier = ">=2025.12.0" },
                                                                  +    { name = "schemamodels", specifier = ">=0.9.1" },
                                                                  +    { name = "tqdm", specifier = ">=4.67.1" },
                                                                  +    { name = "webdataset", specifier = ">=1.0.2" },
                                                                  +]
                                                                  +provides-extras = ["atmosphere"]
                                                                  +
                                                                  +[package.metadata.requires-dev]
                                                                  +dev = [
                                                                  +    { name = "jupyter", specifier = ">=1.1.1" },
                                                                  +    { name = "moto", extras = ["s3"], specifier = ">=5.0.29" },
                                                                  +    { name = "pytest", specifier = ">=8.4.2" },
                                                                  +    { name = "pytest-cov", specifier = ">=7.0.0" },
                                                                  +    { name = "quartodoc", specifier = ">=0.11.1" },
                                                                  +    { name = "ruff", specifier = ">=0.14.13" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "atproto"
                                                                  +version = "0.0.65"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "click" },
                                                                  +    { name = "cryptography" },
                                                                  +    { name = "dnspython" },
                                                                  +    { name = "httpx" },
                                                                  +    { name = "libipld" },
                                                                  +    { name = "pydantic" },
                                                                  +    { name = "typing-extensions" },
                                                                  +    { name = "websockets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b2/0f/b6e26f99ef730f1e5779f5833ba794343df78ee1e02041d3b05bd5005066/atproto-0.0.65.tar.gz", hash = "sha256:027c6ed98746a9e6f1bb24bc18db84b80b386037709ff3af9ef927dce3dd4938", size = 210996, upload-time = "2025-12-08T15:53:44.585Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e3/d9/360149e7bd9bac580496ce9fddc0ef320b3813aadd72be6abc011600862d/atproto-0.0.65-py3-none-any.whl", hash = "sha256:ea53dea57454c9e56318b5d25ceb35854d60ba238b38b0e5ca79aa1a2df85846", size = 446650, upload-time = "2025-12-08T15:53:43.029Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "attrs"
                                                                  +version = "25.4.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "babel"
                                                                  +version = "2.17.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "beartype"
                                                                  +version = "0.22.9"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/c7/94/1009e248bbfbab11397abca7193bea6626806be9a327d399810d523a07cb/beartype-0.22.9.tar.gz", hash = "sha256:8f82b54aa723a2848a56008d18875f91c1db02c32ef6a62319a002e3e25a975f", size = 1608866, upload-time = "2025-12-13T06:50:30.72Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/cc/18245721fa7747065ab478316c7fea7c74777d07f37ae60db2e84f8172e8/beartype-0.22.9-py3-none-any.whl", hash = "sha256:d16c9bbc61ea14637596c5f6fbff2ee99cbe3573e46a716401734ef50c3060c2", size = 1333658, upload-time = "2025-12-13T06:50:28.266Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "beautifulsoup4"
                                                                  +version = "4.14.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "soupsieve" },
                                                                  +    { name = "typing-extensions" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/77/e9/df2358efd7659577435e2177bfa69cba6c33216681af51a707193dec162a/beautifulsoup4-4.14.2.tar.gz", hash = "sha256:2a98ab9f944a11acee9cc848508ec28d9228abfd522ef0fad6a02a72e0ded69e", size = 625822, upload-time = "2025-09-29T10:05:42.613Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/94/fe/3aed5d0be4d404d12d36ab97e2f1791424d9ca39c2f754a6285d59a3b01d/beautifulsoup4-4.14.2-py3-none-any.whl", hash = "sha256:5ef6fa3a8cbece8488d66985560f97ed091e22bbc4e9c2338508a9d5de6d4515", size = 106392, upload-time = "2025-09-29T10:05:43.771Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "black"
                                                                  +version = "26.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "click" },
                                                                  +    { name = "mypy-extensions" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "pathspec" },
                                                                  +    { name = "platformdirs" },
                                                                  +    { name = "pytokens" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/13/88/560b11e521c522440af991d46848a2bde64b5f7202ec14e1f46f9509d328/black-26.1.0.tar.gz", hash = "sha256:d294ac3340eef9c9eb5d29288e96dc719ff269a88e27b396340459dd85da4c58", size = 658785, upload-time = "2026-01-18T04:50:11.993Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/13/710298938a61f0f54cdb4d1c0baeb672c01ff0358712eddaf29f76d32a0b/black-26.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6eeca41e70b5f5c84f2f913af857cf2ce17410847e1d54642e658e078da6544f", size = 1878189, upload-time = "2026-01-18T04:59:30.682Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/a6/5179beaa57e5dbd2ec9f1c64016214057b4265647c62125aa6aeffb05392/black-26.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:dd39eef053e58e60204f2cdf059e2442e2eb08f15989eefe259870f89614c8b6", size = 1700178, upload-time = "2026-01-18T04:59:32.387Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8c/04/c96f79d7b93e8f09d9298b333ca0d31cd9b2ee6c46c274fd0f531de9dc61/black-26.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9459ad0d6cd483eacad4c6566b0f8e42af5e8b583cee917d90ffaa3778420a0a", size = 1777029, upload-time = "2026-01-18T04:59:33.767Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/f9/71c161c4c7aa18bdda3776b66ac2dc07aed62053c7c0ff8bbda8c2624fe2/black-26.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a19915ec61f3a8746e8b10adbac4a577c6ba9851fa4a9e9fbfbcf319887a5791", size = 1406466, upload-time = "2026-01-18T04:59:35.177Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/8b/a7b0f974e473b159d0ac1b6bcefffeb6bec465898a516ee5cc989503cbc7/black-26.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:643d27fb5facc167c0b1b59d0315f2674a6e950341aed0fc05cf307d22bf4954", size = 1216393, upload-time = "2026-01-18T04:59:37.18Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/04/fa2f4784f7237279332aa735cdfd5ae2e7730db0072fb2041dadda9ae551/black-26.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ba1d768fbfb6930fc93b0ecc32a43d8861ded16f47a40f14afa9bb04ab93d304", size = 1877781, upload-time = "2026-01-18T04:59:39.054Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cf/ad/5a131b01acc0e5336740a039628c0ab69d60cf09a2c87a4ec49f5826acda/black-26.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2b807c240b64609cb0e80d2200a35b23c7df82259f80bef1b2c96eb422b4aac9", size = 1699670, upload-time = "2026-01-18T04:59:41.005Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/7c/b05f22964316a52ab6b4265bcd52c0ad2c30d7ca6bd3d0637e438fc32d6e/black-26.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1de0f7d01cc894066a1153b738145b194414cc6eeaad8ef4397ac9abacf40f6b", size = 1775212, upload-time = "2026-01-18T04:59:42.545Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a6/a3/e8d1526bea0446e040193185353920a9506eab60a7d8beb062029129c7d2/black-26.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:91a68ae46bf07868963671e4d05611b179c2313301bd756a89ad4e3b3db2325b", size = 1409953, upload-time = "2026-01-18T04:59:44.357Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c7/5a/d62ebf4d8f5e3a1daa54adaab94c107b57be1b1a2f115a0249b41931e188/black-26.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:be5e2fe860b9bd9edbf676d5b60a9282994c03fbbd40fe8f5e75d194f96064ca", size = 1217707, upload-time = "2026-01-18T04:59:45.719Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/83/be35a175aacfce4b05584ac415fd317dd6c24e93a0af2dcedce0f686f5d8/black-26.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:9dc8c71656a79ca49b8d3e2ce8103210c9481c57798b48deeb3a8bb02db5f115", size = 1871864, upload-time = "2026-01-18T04:59:47.586Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a5/f5/d33696c099450b1274d925a42b7a030cd3ea1f56d72e5ca8bbed5f52759c/black-26.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b22b3810451abe359a964cc88121d57f7bce482b53a066de0f1584988ca36e79", size = 1701009, upload-time = "2026-01-18T04:59:49.443Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1b/87/670dd888c537acb53a863bc15abbd85b22b429237d9de1b77c0ed6b79c42/black-26.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:53c62883b3f999f14e5d30b5a79bd437236658ad45b2f853906c7cbe79de00af", size = 1767806, upload-time = "2026-01-18T04:59:50.769Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fe/9c/cd3deb79bfec5bcf30f9d2100ffeec63eecce826eb63e3961708b9431ff1/black-26.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:f016baaadc423dc960cdddf9acae679e71ee02c4c341f78f3179d7e4819c095f", size = 1433217, upload-time = "2026-01-18T04:59:52.218Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4e/29/f3be41a1cf502a283506f40f5d27203249d181f7a1a2abce1c6ce188035a/black-26.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:66912475200b67ef5a0ab665011964bf924745103f51977a78b4fb92a9fc1bf0", size = 1245773, upload-time = "2026-01-18T04:59:54.457Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/3d/51bdb3ecbfadfaf825ec0c75e1de6077422b4afa2091c6c9ba34fbfc0c2d/black-26.1.0-py3-none-any.whl", hash = "sha256:1054e8e47ebd686e078c0bb0eaf31e6ce69c966058d122f2c0c950311f9f3ede", size = 204010, upload-time = "2026-01-18T04:50:09.978Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "bleach"
                                                                  +version = "6.3.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "webencodings" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/07/18/3c8523962314be6bf4c8989c79ad9531c825210dd13a8669f6b84336e8bd/bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22", size = 203533, upload-time = "2025-10-27T17:57:39.211Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/3a/577b549de0cc09d95f11087ee63c739bba856cd3952697eec4c4bb91350a/bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6", size = 164437, upload-time = "2025-10-27T17:57:37.538Z" },
                                                                  +]
                                                                  +
                                                                  +[package.optional-dependencies]
                                                                  +css = [
                                                                  +    { name = "tinycss2" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "boto3"
                                                                  +version = "1.41.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "botocore" },
                                                                  +    { name = "jmespath" },
                                                                  +    { name = "s3transfer" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/5b/81/450cd4143864959264a3d80f9246175a20de8c1e50ec889c710eaa28cdd9/boto3-1.41.5.tar.gz", hash = "sha256:bc7806bee681dfdff2fe2b74967b107a56274f1e66ebe4d20dc8eee1ea408d17", size = 111594, upload-time = "2025-11-26T20:27:47.021Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/56/f47a80254ed4991cce9a2f6d8ae8aafbc8df1c3270e966b2927289e5a12f/boto3-1.41.5-py3-none-any.whl", hash = "sha256:bb278111bfb4c33dca8342bda49c9db7685e43debbfa00cc2a5eb854dd54b745", size = 139344, upload-time = "2025-11-26T20:27:45.571Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "botocore"
                                                                  +version = "1.41.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jmespath" },
                                                                  +    { name = "python-dateutil" },
                                                                  +    { name = "urllib3" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/90/22/7fe08c726a2e3b11a0aef8bf177e83891c9cb2dc1809d35c9ed91a9e60e6/botocore-1.41.5.tar.gz", hash = "sha256:0367622b811597d183bfcaab4a350f0d3ede712031ce792ef183cabdee80d3bf", size = 14668152, upload-time = "2025-11-26T20:27:38.026Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/4e/4e/21cd0b8f365449f1576f93de1ec8718ed18a7a3bc086dfbdeb79437bba7a/botocore-1.41.5-py3-none-any.whl", hash = "sha256:3fef7fcda30c82c27202d232cfdbd6782cb27f20f8e7e21b20606483e66ee73a", size = 14337008, upload-time = "2025-11-26T20:27:35.208Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "braceexpand"
                                                                  +version = "0.1.7"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/54/93/badd4f5ccf25209f3fef2573073da9fe4a45a3da99fca2f800f942130c0f/braceexpand-0.1.7.tar.gz", hash = "sha256:e6e539bd20eaea53547472ff94f4fb5c3d3bf9d0a89388c4b56663aba765f705", size = 7777, upload-time = "2021-05-07T13:49:07.323Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/93/e8c04e80e82391a6e51f218ca49720f64236bc824e92152a2633b74cf7ab/braceexpand-0.1.7-py2.py3-none-any.whl", hash = "sha256:91332d53de7828103dcae5773fb43bc34950b0c8160e35e0f44c4427a3b85014", size = 5923, upload-time = "2021-05-07T13:49:05.146Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "certifi"
                                                                  +version = "2025.11.12"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a2/8c/58f469717fa48465e4a50c014a0400602d3c437d7c0c468e17ada824da3a/certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316", size = 160538, upload-time = "2025-11-12T02:54:51.517Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "cffi"
                                                                  +version = "2.0.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "pycparser", marker = "implementation_name != 'PyPy'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "charset-normalizer"
                                                                  +version = "3.4.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "click"
                                                                  +version = "8.3.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "colorama", marker = "sys_platform == 'win32'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "colorama"
                                                                  +version = "0.4.6"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "comm"
                                                                  +version = "0.2.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/4c/13/7d740c5849255756bc17888787313b61fd38a0a8304fc4f073dfc46122aa/comm-0.2.3.tar.gz", hash = "sha256:2dc8048c10962d55d7ad693be1e7045d891b7ce8d999c97963a5e3e99c055971", size = 6319, upload-time = "2025-07-25T14:02:04.452Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/60/97/891a0971e1e4a8c5d2b20bbe0e524dc04548d2307fee33cdeba148fd4fc7/comm-0.2.3-py3-none-any.whl", hash = "sha256:c615d91d75f7f04f095b30d1c1711babd43bdc6419c1be9886a85f2f4e489417", size = 7294, upload-time = "2025-07-25T14:02:02.896Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "coverage"
                                                                  +version = "7.12.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/89/26/4a96807b193b011588099c3b5c89fbb05294e5b90e71018e065465f34eb6/coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c", size = 819341, upload-time = "2025-11-18T13:34:20.766Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/bf/638c0427c0f0d47638242e2438127f3c8ee3cfc06c7fdeb16778ed47f836/coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647", size = 217704, upload-time = "2025-11-18T13:32:28.906Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/e1/706fae6692a66c2d6b871a608bbde0da6281903fa0e9f53a39ed441da36a/coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736", size = 218064, upload-time = "2025-11-18T13:32:30.161Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a9/8b/eb0231d0540f8af3ffda39720ff43cb91926489d01524e68f60e961366e4/coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60", size = 249560, upload-time = "2025-11-18T13:32:31.835Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e9/a1/67fb52af642e974d159b5b379e4d4c59d0ebe1288677fbd04bbffe665a82/coverage-7.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:99d5415c73ca12d558e07776bd957c4222c687b9f1d26fa0e1b57e3598bdcde8", size = 252318, upload-time = "2025-11-18T13:32:33.178Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/e5/38228f31b2c7665ebf9bdfdddd7a184d56450755c7e43ac721c11a4b8dab/coverage-7.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e949ebf60c717c3df63adb4a1a366c096c8d7fd8472608cd09359e1bd48ef59f", size = 253403, upload-time = "2025-11-18T13:32:34.45Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/4b/df78e4c8188f9960684267c5a4897836f3f0f20a20c51606ee778a1d9749/coverage-7.12.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d907ddccbca819afa2cd014bc69983b146cca2735a0b1e6259b2a6c10be1e70", size = 249984, upload-time = "2025-11-18T13:32:35.747Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/51/bb163933d195a345c6f63eab9e55743413d064c291b6220df754075c2769/coverage-7.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1518ecbad4e6173f4c6e6c4a46e49555ea5679bf3feda5edb1b935c7c44e8a0", size = 251339, upload-time = "2025-11-18T13:32:37.352Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/40/c9b29cdb8412c837cdcbc2cfa054547dd83affe6cbbd4ce4fdb92b6ba7d1/coverage-7.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51777647a749abdf6f6fd8c7cffab12de68ab93aab15efc72fbbb83036c2a068", size = 249489, upload-time = "2025-11-18T13:32:39.212Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c8/da/b3131e20ba07a0de4437a50ef3b47840dfabf9293675b0cd5c2c7f66dd61/coverage-7.12.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:42435d46d6461a3b305cdfcad7cdd3248787771f53fe18305548cba474e6523b", size = 249070, upload-time = "2025-11-18T13:32:40.598Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/81/b653329b5f6302c08d683ceff6785bc60a34be9ae92a5c7b63ee7ee7acec/coverage-7.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bcead88c8423e1855e64b8057d0544e33e4080b95b240c2a355334bb7ced937", size = 250929, upload-time = "2025-11-18T13:32:42.915Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a3/00/250ac3bca9f252a5fb1338b5ad01331ebb7b40223f72bef5b1b2cb03aa64/coverage-7.12.0-cp312-cp312-win32.whl", hash = "sha256:dcbb630ab034e86d2a0f79aefd2be07e583202f41e037602d438c80044957baa", size = 220241, upload-time = "2025-11-18T13:32:44.665Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/64/1c/77e79e76d37ce83302f6c21980b45e09f8aa4551965213a10e62d71ce0ab/coverage-7.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:2fd8354ed5d69775ac42986a691fbf68b4084278710cee9d7c3eaa0c28fa982a", size = 221051, upload-time = "2025-11-18T13:32:46.008Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/f5/641b8a25baae564f9e52cac0e2667b123de961985709a004e287ee7663cc/coverage-7.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:737c3814903be30695b2de20d22bcc5428fdae305c61ba44cdc8b3252984c49c", size = 219692, upload-time = "2025-11-18T13:32:47.372Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b8/14/771700b4048774e48d2c54ed0c674273702713c9ee7acdfede40c2666747/coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941", size = 217725, upload-time = "2025-11-18T13:32:49.22Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/17/a7/3aa4144d3bcb719bf67b22d2d51c2d577bf801498c13cb08f64173e80497/coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a", size = 218098, upload-time = "2025-11-18T13:32:50.78Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/9c/b846bbc774ff81091a12a10203e70562c91ae71badda00c5ae5b613527b1/coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d", size = 249093, upload-time = "2025-11-18T13:32:52.554Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/76/b6/67d7c0e1f400b32c883e9342de4a8c2ae7c1a0b57c5de87622b7262e2309/coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211", size = 251686, upload-time = "2025-11-18T13:32:54.862Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/75/b095bd4b39d49c3be4bffbb3135fea18a99a431c52dd7513637c0762fecb/coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d", size = 252930, upload-time = "2025-11-18T13:32:56.417Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6e/f3/466f63015c7c80550bead3093aacabf5380c1220a2a93c35d374cae8f762/coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c", size = 249296, upload-time = "2025-11-18T13:32:58.074Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/27/86/eba2209bf2b7e28c68698fc13437519a295b2d228ba9e0ec91673e09fa92/coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9", size = 251068, upload-time = "2025-11-18T13:32:59.646Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/55/ca8ae7dbba962a3351f18940b359b94c6bafdd7757945fdc79ec9e452dc7/coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0", size = 249034, upload-time = "2025-11-18T13:33:01.481Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/d7/39136149325cad92d420b023b5fd900dabdd1c3a0d1d5f148ef4a8cedef5/coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508", size = 248853, upload-time = "2025-11-18T13:33:02.935Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fe/b6/76e1add8b87ef60e00643b0b7f8f7bb73d4bf5249a3be19ebefc5793dd25/coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc", size = 250619, upload-time = "2025-11-18T13:33:04.336Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/95/87/924c6dc64f9203f7a3c1832a6a0eee5a8335dbe5f1bdadcc278d6f1b4d74/coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8", size = 220261, upload-time = "2025-11-18T13:33:06.493Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/91/77/dd4aff9af16ff776bf355a24d87eeb48fc6acde54c907cc1ea89b14a8804/coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07", size = 221072, upload-time = "2025-11-18T13:33:07.926Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/49/5c9dc46205fef31b1b226a6e16513193715290584317fd4df91cdaf28b22/coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc", size = 219702, upload-time = "2025-11-18T13:33:09.631Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9b/62/f87922641c7198667994dd472a91e1d9b829c95d6c29529ceb52132436ad/coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87", size = 218420, upload-time = "2025-11-18T13:33:11.153Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/dd/1cc13b2395ef15dbb27d7370a2509b4aee77890a464fb35d72d428f84871/coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6", size = 218773, upload-time = "2025-11-18T13:33:12.569Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/40/35773cc4bb1e9d4658d4fb669eb4195b3151bef3bbd6f866aba5cd5dac82/coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7", size = 260078, upload-time = "2025-11-18T13:33:14.037Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/ee/231bb1a6ffc2905e396557585ebc6bdc559e7c66708376d245a1f1d330fc/coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560", size = 262144, upload-time = "2025-11-18T13:33:15.601Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/be/32f4aa9f3bf0b56f3971001b56508352c7753915345d45fab4296a986f01/coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12", size = 264574, upload-time = "2025-11-18T13:33:17.354Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/7c/00489fcbc2245d13ab12189b977e0cf06ff3351cb98bc6beba8bd68c5902/coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296", size = 259298, upload-time = "2025-11-18T13:33:18.958Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/96/b4/f0760d65d56c3bea95b449e02570d4abd2549dc784bf39a2d4721a2d8ceb/coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507", size = 262150, upload-time = "2025-11-18T13:33:20.644Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c5/71/9a9314df00f9326d78c1e5a910f520d599205907432d90d1c1b7a97aa4b1/coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d", size = 259763, upload-time = "2025-11-18T13:33:22.189Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/10/34/01a0aceed13fbdf925876b9a15d50862eb8845454301fe3cdd1df08b2182/coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2", size = 258653, upload-time = "2025-11-18T13:33:24.239Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/04/81d8fd64928acf1574bbb0181f66901c6c1c6279c8ccf5f84259d2c68ae9/coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455", size = 260856, upload-time = "2025-11-18T13:33:26.365Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f2/76/fa2a37bfaeaf1f766a2d2360a25a5297d4fb567098112f6517475eee120b/coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d", size = 220936, upload-time = "2025-11-18T13:33:28.165Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/52/60f64d932d555102611c366afb0eb434b34266b1d9266fc2fe18ab641c47/coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c", size = 222001, upload-time = "2025-11-18T13:33:29.656Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/77/df/c303164154a5a3aea7472bf323b7c857fed93b26618ed9fc5c2955566bb0/coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d", size = 220273, upload-time = "2025-11-18T13:33:31.415Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/2e/fc12db0883478d6e12bbd62d481210f0c8daf036102aa11434a0c5755825/coverage-7.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a1c59b7dc169809a88b21a936eccf71c3895a78f5592051b1af8f4d59c2b4f92", size = 217777, upload-time = "2025-11-18T13:33:32.86Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1f/c1/ce3e525d223350c6ec16b9be8a057623f54226ef7f4c2fee361ebb6a02b8/coverage-7.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8787b0f982e020adb732b9f051f3e49dd5054cebbc3f3432061278512a2b1360", size = 218100, upload-time = "2025-11-18T13:33:34.532Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/87/113757441504aee3808cb422990ed7c8bcc2d53a6779c66c5adef0942939/coverage-7.12.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ea5a9f7dc8877455b13dd1effd3202e0bca72f6f3ab09f9036b1bcf728f69ac", size = 249151, upload-time = "2025-11-18T13:33:36.135Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/1d/9529d9bd44049b6b05bb319c03a3a7e4b0a8a802d28fa348ad407e10706d/coverage-7.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fdba9f15849534594f60b47c9a30bc70409b54947319a7c4fd0e8e3d8d2f355d", size = 251667, upload-time = "2025-11-18T13:33:37.996Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/bb/567e751c41e9c03dc29d3ce74b8c89a1e3396313e34f255a2a2e8b9ebb56/coverage-7.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a00594770eb715854fb1c57e0dea08cce6720cfbc531accdb9850d7c7770396c", size = 253003, upload-time = "2025-11-18T13:33:39.553Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/b3/c2cce2d8526a02fb9e9ca14a263ca6fc074449b33a6afa4892838c903528/coverage-7.12.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5560c7e0d82b42eb1951e4f68f071f8017c824ebfd5a6ebe42c60ac16c6c2434", size = 249185, upload-time = "2025-11-18T13:33:42.086Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/a7/967f93bb66e82c9113c66a8d0b65ecf72fc865adfba5a145f50c7af7e58d/coverage-7.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2e26b481c9159c2773a37947a9718cfdc58893029cdfb177531793e375cfc", size = 251025, upload-time = "2025-11-18T13:33:43.634Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b9/b2/f2f6f56337bc1af465d5b2dc1ee7ee2141b8b9272f3bf6213fcbc309a836/coverage-7.12.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6e1a8c066dabcde56d5d9fed6a66bc19a2883a3fe051f0c397a41fc42aedd4cc", size = 248979, upload-time = "2025-11-18T13:33:46.04Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/7a/bf4209f45a4aec09d10a01a57313a46c0e0e8f4c55ff2965467d41a92036/coverage-7.12.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f7ba9da4726e446d8dd8aae5a6cd872511184a5d861de80a86ef970b5dacce3e", size = 248800, upload-time = "2025-11-18T13:33:47.546Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b8/b7/1e01b8696fb0521810f60c5bbebf699100d6754183e6cc0679bf2ed76531/coverage-7.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e0f483ab4f749039894abaf80c2f9e7ed77bbf3c737517fb88c8e8e305896a17", size = 250460, upload-time = "2025-11-18T13:33:49.537Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/ae/84324fb9cb46c024760e706353d9b771a81b398d117d8c1fe010391c186f/coverage-7.12.0-cp314-cp314-win32.whl", hash = "sha256:76336c19a9ef4a94b2f8dc79f8ac2da3f193f625bb5d6f51a328cd19bfc19933", size = 220533, upload-time = "2025-11-18T13:33:51.16Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/71/1033629deb8460a8f97f83e6ac4ca3b93952e2b6f826056684df8275e015/coverage-7.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c1059b600aec6ef090721f8f633f60ed70afaffe8ecab85b59df748f24b31fe", size = 221348, upload-time = "2025-11-18T13:33:52.776Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/5f/ac8107a902f623b0c251abdb749be282dc2ab61854a8a4fcf49e276fce2f/coverage-7.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:172cf3a34bfef42611963e2b661302a8931f44df31629e5b1050567d6b90287d", size = 219922, upload-time = "2025-11-18T13:33:54.316Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/6e/f27af2d4da367f16077d21ef6fe796c874408219fa6dd3f3efe7751bd910/coverage-7.12.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:aa7d48520a32cb21c7a9b31f81799e8eaec7239db36c3b670be0fa2403828d1d", size = 218511, upload-time = "2025-11-18T13:33:56.343Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/dd/65fd874aa460c30da78f9d259400d8e6a4ef457d61ab052fd248f0050558/coverage-7.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:90d58ac63bc85e0fb919f14d09d6caa63f35a5512a2205284b7816cafd21bb03", size = 218771, upload-time = "2025-11-18T13:33:57.966Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/e0/7c6b71d327d8068cb79c05f8f45bf1b6145f7a0de23bbebe63578fe5240a/coverage-7.12.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca8ecfa283764fdda3eae1bdb6afe58bf78c2c3ec2b2edcb05a671f0bba7b3f9", size = 260151, upload-time = "2025-11-18T13:33:59.597Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/ce/4697457d58285b7200de6b46d606ea71066c6e674571a946a6ea908fb588/coverage-7.12.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:874fe69a0785d96bd066059cd4368022cebbec1a8958f224f0016979183916e6", size = 262257, upload-time = "2025-11-18T13:34:01.166Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2f/33/acbc6e447aee4ceba88c15528dbe04a35fb4d67b59d393d2e0d6f1e242c1/coverage-7.12.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3c889c0b8b283a24d721a9eabc8ccafcfc3aebf167e4cd0d0e23bf8ec4e339", size = 264671, upload-time = "2025-11-18T13:34:02.795Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/ec/e2822a795c1ed44d569980097be839c5e734d4c0c1119ef8e0a073496a30/coverage-7.12.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bb5b894b3ec09dcd6d3743229dc7f2c42ef7787dc40596ae04c0edda487371e", size = 259231, upload-time = "2025-11-18T13:34:04.397Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/72/c5/a7ec5395bb4a49c9b7ad97e63f0c92f6bf4a9e006b1393555a02dae75f16/coverage-7.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:79a44421cd5fba96aa57b5e3b5a4d3274c449d4c622e8f76882d76635501fd13", size = 262137, upload-time = "2025-11-18T13:34:06.068Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/0c/02c08858b764129f4ecb8e316684272972e60777ae986f3865b10940bdd6/coverage-7.12.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:33baadc0efd5c7294f436a632566ccc1f72c867f82833eb59820ee37dc811c6f", size = 259745, upload-time = "2025-11-18T13:34:08.04Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/04/4fd32b7084505f3829a8fe45c1a74a7a728cb251aaadbe3bec04abcef06d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:c406a71f544800ef7e9e0000af706b88465f3573ae8b8de37e5f96c59f689ad1", size = 258570, upload-time = "2025-11-18T13:34:09.676Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/35/2365e37c90df4f5342c4fa202223744119fe31264ee2924f09f074ea9b6d/coverage-7.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e71bba6a40883b00c6d571599b4627f50c360b3d0d02bfc658168936be74027b", size = 260899, upload-time = "2025-11-18T13:34:11.259Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/56/26ab0464ca733fa325e8e71455c58c1c374ce30f7c04cebb88eabb037b18/coverage-7.12.0-cp314-cp314t-win32.whl", hash = "sha256:9157a5e233c40ce6613dead4c131a006adfda70e557b6856b97aceed01b0e27a", size = 221313, upload-time = "2025-11-18T13:34:12.863Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/1c/017a3e1113ed34d998b27d2c6dba08a9e7cb97d362f0ec988fcd873dcf81/coverage-7.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e84da3a0fd233aeec797b981c51af1cabac74f9bd67be42458365b30d11b5291", size = 222423, upload-time = "2025-11-18T13:34:15.14Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4c/36/bcc504fdd5169301b52568802bb1b9cdde2e27a01d39fbb3b4b508ab7c2c/coverage-7.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:01d24af36fedda51c2b1aca56e4330a3710f83b02a5ff3743a6b015ffa7c9384", size = 220459, upload-time = "2025-11-18T13:34:17.222Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/a3/43b749004e3c09452e39bb56347a008f0a0668aad37324a99b5c8ca91d9e/coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a", size = 209503, upload-time = "2025-11-18T13:34:18.892Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "cramjam"
                                                                  +version = "2.11.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/14/12/34bf6e840a79130dfd0da7badfb6f7810b8fcfd60e75b0539372667b41b6/cramjam-2.11.0.tar.gz", hash = "sha256:5c82500ed91605c2d9781380b378397012e25127e89d64f460fea6aeac4389b4", size = 99100, upload-time = "2025-07-27T21:25:07.559Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/0b/0d/7c84c913a5fae85b773a9dcf8874390f9d68ba0fcc6630efa7ff1541b950/cramjam-2.11.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:dba5c14b8b4f73ea1e65720f5a3fe4280c1d27761238378be8274135c60bbc6e", size = 3553368, upload-time = "2025-07-27T21:22:27.162Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2b/cc/4f6d185d8a744776f53035e72831ff8eefc2354f46ab836f4bd3c4f6c138/cramjam-2.11.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:11eb40722b3fcf3e6890fba46c711bf60f8dc26360a24876c85e52d76c33b25b", size = 1860014, upload-time = "2025-07-27T21:22:28.738Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1c/a8/626c76263085c6d5ded0e71823b411e9522bfc93ba6cc59855a5869296e7/cramjam-2.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aeb26e2898994b6e8319f19a4d37c481512acdcc6d30e1b5ecc9d8ec57e835cb", size = 1693512, upload-time = "2025-07-27T21:22:30.999Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e9/52/0851a16a62447532e30ba95a80e638926fdea869a34b4b5b9d0a020083ba/cramjam-2.11.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f8d82081ed7d8fe52c982bd1f06e4c7631a73fe1fb6d4b3b3f2404f87dc40fe", size = 2025285, upload-time = "2025-07-27T21:22:32.954Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/76/122e444f59dbc216451d8e3d8282c9665dc79eaf822f5f1470066be1b695/cramjam-2.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:092a3ec26e0a679305018380e4f652eae1b6dfe3fc3b154ee76aa6b92221a17c", size = 1761327, upload-time = "2025-07-27T21:22:34.484Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a3/bc/3a0189aef1af2b29632c039c19a7a1b752bc21a4053582a5464183a0ad3d/cramjam-2.11.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:529d6d667c65fd105d10bd83d1cd3f9869f8fd6c66efac9415c1812281196a92", size = 1854075, upload-time = "2025-07-27T21:22:36.157Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/80/8a6343b13778ce52d94bb8d5365a30c3aa951276b1857201fe79d7e2ad25/cramjam-2.11.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:555eb9c90c450e0f76e27d9ff064e64a8b8c6478ab1a5594c91b7bc5c82fd9f0", size = 2032710, upload-time = "2025-07-27T21:22:38.17Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/6b/cd1778a207c29eda10791e3dfa018b588001928086e179fc71254793c625/cramjam-2.11.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5edf4c9e32493035b514cf2ba0c969d81ccb31de63bd05490cc8bfe3b431674e", size = 2068353, upload-time = "2025-07-27T21:22:39.615Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/f0/5c2a5cd5711032f3b191ca50cb786c17689b4a9255f9f768866e6c9f04d9/cramjam-2.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fa2fe41f48c4d58d923803383b0737f048918b5a0d10390de9628bb6272b107", size = 1978104, upload-time = "2025-07-27T21:22:41.106Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/8b/b363a5fb2c3347504fe9a64f8d0f1e276844f0e532aa7162c061cd1ffee4/cramjam-2.11.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9ca14cf1cabdb0b77d606db1bb9e9ca593b1dbd421fcaf251ec9a5431ec449f3", size = 2030779, upload-time = "2025-07-27T21:22:42.969Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/7b/d83dad46adb6c988a74361f81ad9c5c22642be53ad88616a19baedd06243/cramjam-2.11.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:309e95bf898829476bccf4fd2c358ec00e7ff73a12f95a3cdeeba4bb1d3683d5", size = 2155297, upload-time = "2025-07-27T21:22:44.6Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/be/60d9be4cb33d8740a4aa94c7513f2ef3c4eba4fd13536f086facbafade71/cramjam-2.11.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:86dca35d2f15ef22922411496c220f3c9e315d5512f316fe417461971cc1648d", size = 2169255, upload-time = "2025-07-27T21:22:46.534Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/b0/4a595f01a243aec8ad272b160b161c44351190c35d98d7787919d962e9e5/cramjam-2.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:193c6488bd2f514cbc0bef5c18fad61a5f9c8d059dd56edf773b3b37f0e85496", size = 2155651, upload-time = "2025-07-27T21:22:48.46Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/38/47/7776659aaa677046b77f527106e53ddd47373416d8fcdb1e1a881ec5dc06/cramjam-2.11.0-cp312-cp312-win32.whl", hash = "sha256:514e2c008a8b4fa823122ca3ecab896eac41d9aa0f5fc881bd6264486c204e32", size = 1603568, upload-time = "2025-07-27T21:22:50.084Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/b1/d53002729cfd94c5844ddfaf1233c86d29f2dbfc1b764a6562c41c044199/cramjam-2.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:53fed080476d5f6ad7505883ec5d1ec28ba36c2273db3b3e92d7224fe5e463db", size = 1709287, upload-time = "2025-07-27T21:22:51.534Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/8b/406c5dc0f8e82385519d8c299c40fd6a56d97eca3fcd6f5da8dad48de75b/cramjam-2.11.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:2c289729cc1c04e88bafa48b51082fb462b0a57dbc96494eab2be9b14dca62af", size = 3553330, upload-time = "2025-07-27T21:22:53.124Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/ad/4186884083d6e4125b285903e17841827ab0d6d0cffc86216d27ed91e91d/cramjam-2.11.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:045201ee17147e36cf43d8ae2fa4b4836944ac672df5874579b81cf6d40f1a1f", size = 1859756, upload-time = "2025-07-27T21:22:54.821Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/54/01/91b485cf76a7efef638151e8a7d35784dae2c4ff221b1aec2c083e4b106d/cramjam-2.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:619cd195d74c9e1d2a3ad78d63451d35379c84bd851aec552811e30842e1c67a", size = 1693609, upload-time = "2025-07-27T21:22:56.331Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/84/d0c80d279b2976870fc7d10f15dcb90a3c10c06566c6964b37c152694974/cramjam-2.11.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6eb3ae5ab72edb2ed68bdc0f5710f0a6cad7fd778a610ec2c31ee15e32d3921e", size = 2024912, upload-time = "2025-07-27T21:22:57.915Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d6/70/88f2a5cb904281ed5d3c111b8f7d5366639817a5470f059bcd26833fc870/cramjam-2.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df7da3f4b19e3078f9635f132d31b0a8196accb2576e3213ddd7a77f93317c20", size = 1760715, upload-time = "2025-07-27T21:22:59.528Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/06/cf5b02081132537d28964fb385fcef9ed9f8a017dd7d8c59d317e53ba50d/cramjam-2.11.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57286b289cd557ac76c24479d8ecfb6c3d5b854cce54ccc7671f9a2f5e2a2708", size = 1853782, upload-time = "2025-07-27T21:23:01.07Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/27/63525087ed40a53d1867021b9c4858b80cc86274ffe7225deed067d88d92/cramjam-2.11.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:28952fbbf8b32c0cb7fa4be9bcccfca734bf0d0989f4b509dc7f2f70ba79ae06", size = 2032354, upload-time = "2025-07-27T21:23:03.021Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c3/ef/dbba082c6ebfb6410da4dd39a64e654d7194fcfd4567f85991a83fa4ec32/cramjam-2.11.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78ed2e4099812a438b545dfbca1928ec825e743cd253bc820372d6ef8c3adff4", size = 2068007, upload-time = "2025-07-27T21:23:04.526Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/35/ce/d902b9358a46a086938feae83b2251720e030f06e46006f4c1fc0ac9da20/cramjam-2.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9aecd5c3845d415bd6c9957c93de8d93097e269137c2ecb0e5a5256374bdc8", size = 1977485, upload-time = "2025-07-27T21:23:06.058Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/03/982f54553244b0afcbdb2ad2065d460f0ab05a72a96896a969a1ca136a1e/cramjam-2.11.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:362fcf4d6f5e1242a4540812455f5a594949190f6fbc04f2ffbfd7ae0266d788", size = 2030447, upload-time = "2025-07-27T21:23:07.679Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/5f/748e54cdb665ec098ec519e23caacc65fc5ae58718183b071e33fc1c45b4/cramjam-2.11.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:13240b3dea41b1174456cb9426843b085dc1a2bdcecd9ee2d8f65ac5703374b0", size = 2154949, upload-time = "2025-07-27T21:23:09.366Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/69/81/c4e6cb06ed69db0dc81f9a8b1dc74995ebd4351e7a1877143f7031ff2700/cramjam-2.11.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:c54eed83726269594b9086d827decc7d2015696e31b99bf9b69b12d9063584fe", size = 2168925, upload-time = "2025-07-27T21:23:10.976Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/13/5b/966365523ce8290a08e163e3b489626c5adacdff2b3da9da1b0823dfb14e/cramjam-2.11.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f8195006fdd0fc0a85b19df3d64a3ef8a240e483ae1dfc7ac6a4316019eb5df2", size = 2154950, upload-time = "2025-07-27T21:23:12.514Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3a/7d/7f8eb5c534b72b32c6eb79d74585bfee44a9a5647a14040bb65c31c2572d/cramjam-2.11.0-cp313-cp313-win32.whl", hash = "sha256:ccf30e3fe6d770a803dcdf3bb863fa44ba5dc2664d4610ba2746a3c73599f2e4", size = 1603199, upload-time = "2025-07-27T21:23:14.38Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/37/05/47b5e0bf7c41a3b1cdd3b7c2147f880c93226a6bef1f5d85183040cbdece/cramjam-2.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:ee36348a204f0a68b03400f4736224e9f61d1c6a1582d7f875c1ca56f0254268", size = 1708924, upload-time = "2025-07-27T21:23:16.332Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/de/07/a1051cdbbe6d723df16d756b97f09da7c1adb69e29695c58f0392bc12515/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:7ba5e38c9fbd06f086f4a5a64a1a5b7b417cd3f8fc07a20e5c03651f72f36100", size = 3554141, upload-time = "2025-07-27T21:23:17.938Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/66/58487d2e16ef3d04f51a7c7f0e69823e806744b4c21101e89da4873074bc/cramjam-2.11.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:b8adeee57b41fe08e4520698a4b0bd3cc76dbd81f99424b806d70a5256a391d3", size = 1860353, upload-time = "2025-07-27T21:23:19.593Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/b4/67f6254d166ffbcc9d5fa1b56876eaa920c32ebc8e9d3d525b27296b693b/cramjam-2.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:b96a74fa03a636c8a7d76f700d50e9a8bc17a516d6a72d28711225d641e30968", size = 1693832, upload-time = "2025-07-27T21:23:21.185Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/a3/4e0b31c0d454ae70c04684ed7c13d3c67b4c31790c278c1e788cb804fa4a/cramjam-2.11.0-cp314-cp314-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c3811a56fa32e00b377ef79121c0193311fd7501f0fb378f254c7f083cc1fbe0", size = 2027080, upload-time = "2025-07-27T21:23:23.303Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/c7/5e8eed361d1d3b8be14f38a54852c5370cc0ceb2c2d543b8ba590c34f080/cramjam-2.11.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5d927e87461f8a0d448e4ab5eb2bca9f31ca5d8ea86d70c6f470bb5bc666d7e", size = 1761543, upload-time = "2025-07-27T21:23:24.991Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/09/0c/06b7f8b0ce9fde89470505116a01fc0b6cb92d406c4fb1e46f168b5d3fa5/cramjam-2.11.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f1f5c450121430fd89cb5767e0a9728ecc65997768fd4027d069cb0368af62f9", size = 1854636, upload-time = "2025-07-27T21:23:26.987Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6f/c6/6ebc02c9d5acdf4e5f2b1ec6e1252bd5feee25762246798ae823b3347457/cramjam-2.11.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:724aa7490be50235d97f07e2ca10067927c5d7f336b786ddbc868470e822aa25", size = 2032715, upload-time = "2025-07-27T21:23:28.603Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a2/77/a122971c23f5ca4b53e4322c647ac7554626c95978f92d19419315dddd05/cramjam-2.11.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:54c4637122e7cfd7aac5c1d3d4c02364f446d6923ea34cf9d0e8816d6e7a4936", size = 2069039, upload-time = "2025-07-27T21:23:30.319Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/19/0f/f6121b90b86b9093c066889274d26a1de3f29969d45c2ed1ecbe2033cb78/cramjam-2.11.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17eb39b1696179fb471eea2de958fa21f40a2cd8bf6b40d428312d5541e19dc4", size = 1979566, upload-time = "2025-07-27T21:23:32.002Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/a3/f95bc57fd7f4166ce6da816cfa917fb7df4bb80e669eb459d85586498414/cramjam-2.11.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:36aa5a798aa34e11813a80425a30d8e052d8de4a28f27bfc0368cfc454d1b403", size = 2030905, upload-time = "2025-07-27T21:23:33.696Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/52/e429de4e8bc86ee65e090dae0f87f45abd271742c63fb2d03c522ffde28a/cramjam-2.11.0-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:449fca52774dc0199545fbf11f5128933e5a6833946707885cf7be8018017839", size = 2155592, upload-time = "2025-07-27T21:23:35.375Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6c/6c/65a7a0207787ad39ad804af4da7f06a60149de19481d73d270b540657234/cramjam-2.11.0-cp314-cp314-musllinux_1_1_i686.whl", hash = "sha256:d87d37b3d476f4f7623c56a232045d25bd9b988314702ea01bd9b4a94948a778", size = 2170839, upload-time = "2025-07-27T21:23:37.197Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/c5/5c5db505ba692bc844246b066e23901d5905a32baf2f33719c620e65887f/cramjam-2.11.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:26cb45c47d71982d76282e303931c6dd4baee1753e5d48f9a89b3a63e690b3a3", size = 2157236, upload-time = "2025-07-27T21:23:38.854Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/22/88e6693e60afe98901e5bbe91b8dea193e3aa7f42e2770f9c3339f5c1065/cramjam-2.11.0-cp314-cp314-win32.whl", hash = "sha256:4efe919d443c2fd112fe25fe636a52f9628250c9a50d9bddb0488d8a6c09acc6", size = 1604136, upload-time = "2025-07-27T21:23:40.56Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/f8/01618801cd59ccedcc99f0f96d20be67d8cfc3497da9ccaaad6b481781dd/cramjam-2.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:ccec3524ea41b9abd5600e3e27001fd774199dbb4f7b9cb248fcee37d4bda84c", size = 1710272, upload-time = "2025-07-27T21:23:42.236Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/81/6cdb3ed222d13ae86bda77aafe8d50566e81a1169d49ed195b6263610704/cramjam-2.11.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:966ac9358b23d21ecd895c418c048e806fd254e46d09b1ff0cdad2eba195ea3e", size = 3559671, upload-time = "2025-07-27T21:23:44.504Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/43/52b7e54fe5ba1ef0270d9fdc43dabd7971f70ea2d7179be918c997820247/cramjam-2.11.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:387f09d647a0d38dcb4539f8a14281f8eb6bb1d3e023471eb18a5974b2121c86", size = 1867876, upload-time = "2025-07-27T21:23:46.987Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/28/30d5b8d10acd30db3193bc562a313bff722888eaa45cfe32aa09389f2b24/cramjam-2.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:665b0d8fbbb1a7f300265b43926457ec78385200133e41fef19d85790fc1e800", size = 1695562, upload-time = "2025-07-27T21:23:48.644Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/86/ec806f986e01b896a650655024ea52a13e25c3ac8a3a382f493089483cdc/cramjam-2.11.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ca905387c7a371531b9622d93471be4d745ef715f2890c3702479cd4fc85aa51", size = 2025056, upload-time = "2025-07-27T21:23:50.404Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/09/43/c2c17586b90848d29d63181f7d14b8bd3a7d00975ad46e3edf2af8af7e1f/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1aa56aef2c8af55a21ed39040a94a12b53fb23beea290f94d19a76027e2ffb", size = 1764084, upload-time = "2025-07-27T21:23:52.265Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2b/a9/68bc334fadb434a61df10071dc8606702aa4f5b6cdb2df62474fc21d2845/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5db59c1cdfaa2ab85cc988e602d6919495f735ca8a5fd7603608eb1e23c26d5", size = 1854859, upload-time = "2025-07-27T21:23:54.085Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5b/4e/b48e67835b5811ec5e9cb2e2bcba9c3fd76dab3e732569fe801b542c6ca9/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b1f893014f00fe5e89a660a032e813bf9f6d91de74cd1490cdb13b2b59d0c9a3", size = 2035970, upload-time = "2025-07-27T21:23:55.758Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/70/d2ac33d572b4d90f7f0f2c8a1d60fb48f06b128fdc2c05f9b49891bb0279/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c26a1eb487947010f5de24943bd7c422dad955b2b0f8650762539778c380ca89", size = 2069320, upload-time = "2025-07-27T21:23:57.494Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1d/4c/85cec77af4a74308ba5fca8e296c4e2f80ec465c537afc7ab1e0ca2f9a00/cramjam-2.11.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d5c8bfb438d94e7b892d1426da5fc4b4a5370cc360df9b8d9d77c33b896c37e", size = 1982668, upload-time = "2025-07-27T21:23:59.126Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/45/938546d1629e008cc3138df7c424ef892719b1796ff408a2ab8550032e5e/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:cb1fb8c9337ab0da25a01c05d69a0463209c347f16512ac43be5986f3d1ebaf4", size = 2034028, upload-time = "2025-07-27T21:24:00.865Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/01/76/b5a53e20505555f1640e66dcf70394bcf51a1a3a072aa18ea35135a0f9ed/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:1f6449f6de52dde3e2f1038284910c8765a397a25e2d05083870f3f5e7fc682c", size = 2155513, upload-time = "2025-07-27T21:24:02.92Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/84/12/8d3f6ceefae81bbe45a347fdfa2219d9f3ac75ebc304f92cd5fcb4fbddc5/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_i686.whl", hash = "sha256:382dec4f996be48ed9c6958d4e30c2b89435d7c2c4dbf32480b3b8886293dd65", size = 2170035, upload-time = "2025-07-27T21:24:04.558Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/85/3be6f0a1398f976070672be64f61895f8839857618a2d8cc0d3ab529d3dc/cramjam-2.11.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:d388bd5723732c3afe1dd1d181e4213cc4e1be210b080572e7d5749f6e955656", size = 2160229, upload-time = "2025-07-27T21:24:06.729Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/5e/66cfc3635511b20014bbb3f2ecf0095efb3049e9e96a4a9e478e4f3d7b78/cramjam-2.11.0-cp314-cp314t-win32.whl", hash = "sha256:0a70ff17f8e1d13f322df616505550f0f4c39eda62290acb56f069d4857037c8", size = 1610267, upload-time = "2025-07-27T21:24:08.428Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/c6/c71e82e041c95ffe6a92ac707785500aa2a515a4339c2c7dd67e3c449249/cramjam-2.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:028400d699442d40dbda02f74158c73d05cb76587a12490d0bfedd958fd49188", size = 1713108, upload-time = "2025-07-27T21:24:10.147Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "cryptography"
                                                                  +version = "46.0.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "debugpy"
                                                                  +version = "1.8.17"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/15/ad/71e708ff4ca377c4230530d6a7aa7992592648c122a2cd2b321cf8b35a76/debugpy-1.8.17.tar.gz", hash = "sha256:fd723b47a8c08892b1a16b2c6239a8b96637c62a59b94bb5dab4bac592a58a8e", size = 1644129, upload-time = "2025-09-17T16:33:20.633Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/2b/9d8e65beb2751876c82e1aceb32f328c43ec872711fa80257c7674f45650/debugpy-1.8.17-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:f14467edef672195c6f6b8e27ce5005313cb5d03c9239059bc7182b60c176e2d", size = 2549522, upload-time = "2025-09-17T16:33:38.466Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b4/78/eb0d77f02971c05fca0eb7465b18058ba84bd957062f5eec82f941ac792a/debugpy-1.8.17-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:24693179ef9dfa20dca8605905a42b392be56d410c333af82f1c5dff807a64cc", size = 4309417, upload-time = "2025-09-17T16:33:41.299Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/37/42/c40f1d8cc1fed1e75ea54298a382395b8b937d923fcf41ab0797a554f555/debugpy-1.8.17-cp312-cp312-win32.whl", hash = "sha256:6a4e9dacf2cbb60d2514ff7b04b4534b0139facbf2abdffe0639ddb6088e59cf", size = 5277130, upload-time = "2025-09-17T16:33:43.554Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/72/22/84263b205baad32b81b36eac076de0cdbe09fe2d0637f5b32243dc7c925b/debugpy-1.8.17-cp312-cp312-win_amd64.whl", hash = "sha256:e8f8f61c518952fb15f74a302e068b48d9c4691768ade433e4adeea961993464", size = 5319053, upload-time = "2025-09-17T16:33:53.033Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/76/597e5cb97d026274ba297af8d89138dfd9e695767ba0e0895edb20963f40/debugpy-1.8.17-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:857c1dd5d70042502aef1c6d1c2801211f3ea7e56f75e9c335f434afb403e464", size = 2538386, upload-time = "2025-09-17T16:33:54.594Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/60/ce5c34fcdfec493701f9d1532dba95b21b2f6394147234dce21160bd923f/debugpy-1.8.17-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:3bea3b0b12f3946e098cce9b43c3c46e317b567f79570c3f43f0b96d00788088", size = 4292100, upload-time = "2025-09-17T16:33:56.353Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/95/7873cf2146577ef71d2a20bf553f12df865922a6f87b9e8ee1df04f01785/debugpy-1.8.17-cp313-cp313-win32.whl", hash = "sha256:e34ee844c2f17b18556b5bbe59e1e2ff4e86a00282d2a46edab73fd7f18f4a83", size = 5277002, upload-time = "2025-09-17T16:33:58.231Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/11/18c79a1cee5ff539a94ec4aa290c1c069a5580fd5cfd2fb2e282f8e905da/debugpy-1.8.17-cp313-cp313-win_amd64.whl", hash = "sha256:6c5cd6f009ad4fca8e33e5238210dc1e5f42db07d4b6ab21ac7ffa904a196420", size = 5319047, upload-time = "2025-09-17T16:34:00.586Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/de/45/115d55b2a9da6de812696064ceb505c31e952c5d89c4ed1d9bb983deec34/debugpy-1.8.17-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:045290c010bcd2d82bc97aa2daf6837443cd52f6328592698809b4549babcee1", size = 2536899, upload-time = "2025-09-17T16:34:02.657Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/73/2aa00c7f1f06e997ef57dc9b23d61a92120bec1437a012afb6d176585197/debugpy-1.8.17-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:b69b6bd9dba6a03632534cdf67c760625760a215ae289f7489a452af1031fe1f", size = 4268254, upload-time = "2025-09-17T16:34:04.486Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/b5/ed3e65c63c68a6634e3ba04bd10255c8e46ec16ebed7d1c79e4816d8a760/debugpy-1.8.17-cp314-cp314-win32.whl", hash = "sha256:5c59b74aa5630f3a5194467100c3b3d1c77898f9ab27e3f7dc5d40fc2f122670", size = 5277203, upload-time = "2025-09-17T16:34:06.65Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/26/394276b71c7538445f29e792f589ab7379ae70fd26ff5577dfde71158e96/debugpy-1.8.17-cp314-cp314-win_amd64.whl", hash = "sha256:893cba7bb0f55161de4365584b025f7064e1f88913551bcd23be3260b231429c", size = 5318493, upload-time = "2025-09-17T16:34:08.483Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/d0/89247ec250369fc76db477720a26b2fce7ba079ff1380e4ab4529d2fe233/debugpy-1.8.17-py2.py3-none-any.whl", hash = "sha256:60c7dca6571efe660ccb7a9508d73ca14b8796c4ed484c2002abba714226cfef", size = 5283210, upload-time = "2025-09-17T16:34:25.835Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "decorator"
                                                                  +version = "5.2.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "defusedxml"
                                                                  +version = "0.7.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/0f/d5/c66da9b79e5bdb124974bfe172b4daf3c984ebd9c2a06e2b8a4dc7331c72/defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69", size = 75520, upload-time = "2021-03-08T10:59:26.269Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/6c/aa3f2f849e01cb6a001cd8554a88d4c77c5c1a31c95bdf1cf9301e6d9ef4/defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61", size = 25604, upload-time = "2021-03-08T10:59:24.45Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "dnspython"
                                                                  +version = "2.8.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "executing"
                                                                  +version = "2.2.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "fastjsonschema"
                                                                  +version = "2.21.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/20/b5/23b216d9d985a956623b6bd12d4086b60f0059b27799f23016af04a74ea1/fastjsonschema-2.21.2.tar.gz", hash = "sha256:b1eb43748041c880796cd077f1a07c3d94e93ae84bba5ed36800a33554ae05de", size = 374130, upload-time = "2025-08-14T18:49:36.666Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/a8/20d0723294217e47de6d9e2e40fd4a9d2f7c4b6ef974babd482a59743694/fastjsonschema-2.21.2-py3-none-any.whl", hash = "sha256:1c797122d0a86c5cace2e54bf4e819c36223b552017172f32c5c024a6b77e463", size = 24024, upload-time = "2025-08-14T18:49:34.776Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "fastparquet"
                                                                  +version = "2024.11.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "cramjam" },
                                                                  +    { name = "fsspec" },
                                                                  +    { name = "numpy" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "pandas" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b4/66/862da14f5fde4eff2cedc0f51a8dc34ba145088e5041b45b2d57ac54f922/fastparquet-2024.11.0.tar.gz", hash = "sha256:e3b1fc73fd3e1b70b0de254bae7feb890436cb67e99458b88cb9bd3cc44db419", size = 467192, upload-time = "2024-11-15T19:30:10.413Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/76/068ac7ec9b4fc783be21a75a6a90b8c0654da4d46934d969e524ce287787/fastparquet-2024.11.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:dbad4b014782bd38b58b8e9f514fe958cfa7a6c4e187859232d29fd5c5ddd849", size = 915968, upload-time = "2024-11-12T20:37:52.861Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c7/9e/6d3b4188ad64ed51173263c07109a5f18f9c84a44fa39ab524fca7420cda/fastparquet-2024.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:403d31109d398b6be7ce84fa3483fc277c6a23f0b321348c0a505eb098a041cb", size = 685399, upload-time = "2024-11-12T20:37:54.899Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/6c/809220bc9fbe83d107df2d664c3fb62fb81867be8f5218ac66c2e6b6a358/fastparquet-2024.11.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbbb9057a26acf0abad7adf58781ee357258b7708ee44a289e3bee97e2f55d42", size = 1758557, upload-time = "2024-11-12T20:37:56.553Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/2c/b3b3e6ca2e531484289024138cd4709c22512b3fe68066d7f9849da4a76c/fastparquet-2024.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63e0e416e25c15daa174aad8ba991c2e9e5b0dc347e5aed5562124261400f87b", size = 1781052, upload-time = "2024-11-12T20:37:58.339Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/21/fe/97ed45092d0311c013996dae633122b7a51c5d9fe8dcbc2c840dc491201e/fastparquet-2024.11.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2d7f02f57231e6c86d26e9ea71953737202f20e948790e5d4db6d6a1a150dc", size = 1715797, upload-time = "2024-11-12T20:38:00.694Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/24/df/02fa6aee6c0d53d1563b5bc22097076c609c4c5baa47056b0b4bed456fcf/fastparquet-2024.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbe4468146b633d8f09d7b196fea0547f213cb5ce5f76e9d1beb29eaa9593a93", size = 1795682, upload-time = "2024-11-12T20:38:02.38Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/25/f4f87557589e1923ee0e3bebbc84f08b7c56962bf90f51b116ddc54f2c9f/fastparquet-2024.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:29d5c718817bcd765fc519b17f759cad4945974421ecc1931d3bdc3e05e57fa9", size = 1857842, upload-time = "2024-11-12T20:38:04.196Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/f9/98cd0c39115879be1044d59c9b76e8292776e99bb93565bf990078fd11c4/fastparquet-2024.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:74a0b3c40ab373442c0fda96b75a36e88745d8b138fcc3a6143e04682cbbb8ca", size = 673269, upload-time = "2024-12-11T21:22:48.073Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/47/e3/e7db38704be5db787270d43dde895eaa1a825ab25dc245e71df70860ec12/fastparquet-2024.11.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:59e5c5b51083d5b82572cdb7aed0346e3181e3ac9d2e45759da2e804bdafa7ee", size = 912523, upload-time = "2024-11-12T20:38:06.003Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d3/66/e3387c99293dae441634e7724acaa425b27de19a00ee3d546775dace54a9/fastparquet-2024.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdadf7b6bad789125b823bfc5b0a719ba5c4a2ef965f973702d3ea89cff057f6", size = 683779, upload-time = "2024-11-12T20:38:07.442Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/21/d112d0573d086b578bf04302a502e9a7605ea8f1244a7b8577cd945eec78/fastparquet-2024.11.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46b2db02fc2a1507939d35441c8ab211d53afd75d82eec9767d1c3656402859b", size = 1751113, upload-time = "2024-11-12T20:38:09.36Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6b/a7/040507cee3a7798954e8fdbca21d2dbc532774b02b882d902b8a4a6849ef/fastparquet-2024.11.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3afdef2895c9f459135a00a7ed3ceafebfbce918a9e7b5d550e4fae39c1b64d", size = 1780496, upload-time = "2024-11-12T20:38:11.022Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bc/75/d0d9f7533d780ec167eede16ad88073ee71696150511126c31940e7f73aa/fastparquet-2024.11.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:36b5c9bd2ffaaa26ff45d59a6cefe58503dd748e0c7fad80dd905749da0f2b9e", size = 1713608, upload-time = "2024-11-12T20:38:12.848Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/fa/1d95bc86e45e80669c4f374b2ca26a9e5895a1011bb05d6341b4a7414693/fastparquet-2024.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6b7df5d3b61a19d76e209fe8d3133759af1c139e04ebc6d43f3cc2d8045ef338", size = 1792779, upload-time = "2024-11-12T20:38:14.5Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/13/3d/c076beeb926c79593374c04662a9422a76650eef17cd1c8e10951340764a/fastparquet-2024.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8b35823ac7a194134e5f82fa4a9659e42e8f9ad1f2d22a55fbb7b9e4053aabbb", size = 1851322, upload-time = "2024-11-12T20:38:16.231Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/09/5a/1d0d47e64816002824d4a876644e8c65540fa23f91b701f0daa726931545/fastparquet-2024.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:d20632964e65530374ff7cddd42cc06aa0a1388934903693d6d22592a5ba827b", size = 673266, upload-time = "2024-11-12T20:38:17.661Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "fqdn"
                                                                  +version = "1.5.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/30/3e/a80a8c077fd798951169626cde3e239adeba7dab75deb3555716415bd9b0/fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f", size = 6015, upload-time = "2021-03-11T07:16:29.08Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/cf/58/8acf1b3e91c58313ce5cb67df61001fc9dcd21be4fadb76c1a2d540e09ed/fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014", size = 9121, upload-time = "2021-03-11T07:16:28.351Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "frozenlist"
                                                                  +version = "1.8.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "fsspec"
                                                                  +version = "2025.12.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b6/27/954057b0d1f53f086f681755207dda6de6c660ce133c829158e8e8fe7895/fsspec-2025.12.0.tar.gz", hash = "sha256:c505de011584597b1060ff778bb664c1bc022e87921b0e4f10cc9c44f9635973", size = 309748, upload-time = "2025-12-03T15:23:42.687Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/51/c7/b64cae5dba3a1b138d7123ec36bb5ccd39d39939f18454407e5468f4763f/fsspec-2025.12.0-py3-none-any.whl", hash = "sha256:8bf1fe301b7d8acfa6e8571e3b1c3d158f909666642431cc78a1b7b4dbc5ec5b", size = 201422, upload-time = "2025-12-03T15:23:41.434Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "griffe"
                                                                  +version = "1.15.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "colorama" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "h11"
                                                                  +version = "0.16.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "hiredis"
                                                                  +version = "3.3.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/65/82/d2817ce0653628e0a0cb128533f6af0dd6318a49f3f3a6a7bd1f2f2154af/hiredis-3.3.0.tar.gz", hash = "sha256:105596aad9249634361815c574351f1bd50455dc23b537c2940066c4a9dea685", size = 89048, upload-time = "2025-10-14T16:33:34.263Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/1c/ed28ae5d704f5c7e85b946fa327f30d269e6272c847fef7e91ba5fc86193/hiredis-3.3.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:5b8e1d6a2277ec5b82af5dce11534d3ed5dffeb131fd9b210bc1940643b39b5f", size = 82026, upload-time = "2025-10-14T16:32:12.004Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/9b/79f30c5c40e248291023b7412bfdef4ad9a8a92d9e9285d65d600817dac7/hiredis-3.3.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:c4981de4d335f996822419e8a8b3b87367fcef67dc5fb74d3bff4df9f6f17783", size = 46217, upload-time = "2025-10-14T16:32:13.133Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e7/c3/02b9ed430ad9087aadd8afcdf616717452d16271b701fa47edfe257b681e/hiredis-3.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1706480a683e328ae9ba5d704629dee2298e75016aa0207e7067b9c40cecc271", size = 41858, upload-time = "2025-10-14T16:32:13.98Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/98/b2a42878b82130a535c7aa20bc937ba2d07d72e9af3ad1ad93e837c419b5/hiredis-3.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a95cef9989736ac313639f8f545b76b60b797e44e65834aabbb54e4fad8d6c8", size = 170195, upload-time = "2025-10-14T16:32:14.728Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/66/1d/9dcde7a75115d3601b016113d9b90300726fa8e48aacdd11bf01a453c145/hiredis-3.3.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca2802934557ccc28a954414c245ba7ad904718e9712cb67c05152cf6b9dd0a3", size = 181808, upload-time = "2025-10-14T16:32:15.622Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/56/a1/60f6bda9b20b4e73c85f7f5f046bc2c154a5194fc94eb6861e1fd97ced52/hiredis-3.3.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fe730716775f61e76d75810a38ee4c349d3af3896450f1525f5a4034cf8f2ed7", size = 180578, upload-time = "2025-10-14T16:32:16.514Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/01/859d21de65085f323a701824e23ea3330a0ac05f8e184544d7aa5c26128d/hiredis-3.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:749faa69b1ce1f741f5eaf743435ac261a9262e2d2d66089192477e7708a9abc", size = 172508, upload-time = "2025-10-14T16:32:17.411Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/99/a8/28fd526e554c80853d0fbf57ef2a3235f00e4ed34ce0e622e05d27d0f788/hiredis-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:95c9427f2ac3f1dd016a3da4e1161fa9d82f221346c8f3fdd6f3f77d4e28946c", size = 166341, upload-time = "2025-10-14T16:32:18.561Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f2/91/ded746b7d2914f557fbbf77be55e90d21f34ba758ae10db6591927c642c8/hiredis-3.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c863ee44fe7bff25e41f3a5105c936a63938b76299b802d758f40994ab340071", size = 176765, upload-time = "2025-10-14T16:32:19.491Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d6/4c/04aa46ff386532cb5f08ee495c2bf07303e93c0acf2fa13850e031347372/hiredis-3.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2213c7eb8ad5267434891f3241c7776e3bafd92b5933fc57d53d4456247dc542", size = 170312, upload-time = "2025-10-14T16:32:20.404Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/90/6e/67f9d481c63f542a9cf4c9f0ea4e5717db0312fb6f37fb1f78f3a66de93c/hiredis-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a172bae3e2837d74530cd60b06b141005075db1b814d966755977c69bd882ce8", size = 167965, upload-time = "2025-10-14T16:32:21.259Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/df/dde65144d59c3c0d85e43255798f1fa0c48d413e668cfd92b3d9f87924ef/hiredis-3.3.0-cp312-cp312-win32.whl", hash = "sha256:cb91363b9fd6d41c80df9795e12fffbaf5c399819e6ae8120f414dedce6de068", size = 20533, upload-time = "2025-10-14T16:32:22.192Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/a9/55a4ac9c16fdf32e92e9e22c49f61affe5135e177ca19b014484e28950f7/hiredis-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:04ec150e95eea3de9ff8bac754978aa17b8bf30a86d4ab2689862020945396b0", size = 22379, upload-time = "2025-10-14T16:32:22.916Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6d/39/2b789ebadd1548ccb04a2c18fbc123746ad1a7e248b7f3f3cac618ca10a6/hiredis-3.3.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:b7048b4ec0d5dddc8ddd03da603de0c4b43ef2540bf6e4c54f47d23e3480a4fa", size = 82035, upload-time = "2025-10-14T16:32:23.715Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/74/4066d9c1093be744158ede277f2a0a4e4cd0fefeaa525c79e2876e9e5c72/hiredis-3.3.0-cp313-cp313-macosx_10_15_x86_64.whl", hash = "sha256:e5f86ce5a779319c15567b79e0be806e8e92c18bb2ea9153e136312fafa4b7d6", size = 46219, upload-time = "2025-10-14T16:32:24.554Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/3f/f9e0f6d632f399d95b3635703e1558ffaa2de3aea4cfcbc2d7832606ba43/hiredis-3.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fbdb97a942e66016fff034df48a7a184e2b7dc69f14c4acd20772e156f20d04b", size = 41860, upload-time = "2025-10-14T16:32:25.356Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/c5/b7dde5ec390dabd1cabe7b364a509c66d4e26de783b0b64cf1618f7149fc/hiredis-3.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b0fb4bea72fe45ff13e93ddd1352b43ff0749f9866263b5cca759a4c960c776f", size = 170094, upload-time = "2025-10-14T16:32:26.148Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3e/d6/7f05c08ee74d41613be466935688068e07f7b6c55266784b5ace7b35b766/hiredis-3.3.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:85b9baf98050e8f43c2826ab46aaf775090d608217baf7af7882596aef74e7f9", size = 181746, upload-time = "2025-10-14T16:32:27.844Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/d2/aaf9f8edab06fbf5b766e0cae3996324297c0516a91eb2ca3bd1959a0308/hiredis-3.3.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69079fb0f0ebb61ba63340b9c4bce9388ad016092ca157e5772eb2818209d930", size = 180465, upload-time = "2025-10-14T16:32:29.185Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/1e/93ded8b9b484519b211fc71746a231af98c98928e3ebebb9086ed20bb1ad/hiredis-3.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c17f77b79031ea4b0967d30255d2ae6e7df0603ee2426ad3274067f406938236", size = 172419, upload-time = "2025-10-14T16:32:30.059Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/13/02880458e02bbfcedcaabb8f7510f9dda1c89d7c1921b1bb28c22bb38cbf/hiredis-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d14f745fc177bc05fc24bdf20e2b515e9a068d3d4cce90a0fb78d04c9c9d9a", size = 166400, upload-time = "2025-10-14T16:32:31.173Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/60/896e03267670570f19f61dc65a2137fcb2b06e83ab0911d58eeec9f3cb88/hiredis-3.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ba063fdf1eff6377a0c409609cbe890389aefddfec109c2d20fcc19cfdafe9da", size = 176845, upload-time = "2025-10-14T16:32:32.12Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/90/a1d4bd0cdcf251fda72ac0bd932f547b48ad3420f89bb2ef91bf6a494534/hiredis-3.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1799cc66353ad066bfdd410135c951959da9f16bcb757c845aab2f21fc4ef099", size = 170365, upload-time = "2025-10-14T16:32:33.035Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/9a/7c98f7bb76bdb4a6a6003cf8209721f083e65d2eed2b514f4a5514bda665/hiredis-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2cbf71a121996ffac82436b6153290815b746afb010cac19b3290a1644381b07", size = 168022, upload-time = "2025-10-14T16:32:34.81Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/ca/672ee658ffe9525558615d955b554ecd36aa185acd4431ccc9701c655c9b/hiredis-3.3.0-cp313-cp313-win32.whl", hash = "sha256:a7cbbc6026bf03659f0b25e94bbf6e64f6c8c22f7b4bc52fe569d041de274194", size = 20533, upload-time = "2025-10-14T16:32:35.7Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/93/511fd94f6a7b6d72a4cf9c2b159bf3d780585a9a1dca52715dd463825299/hiredis-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:a8def89dd19d4e2e4482b7412d453dec4a5898954d9a210d7d05f60576cedef6", size = 22387, upload-time = "2025-10-14T16:32:36.441Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/b3/b948ee76a6b2bc7e45249861646f91f29704f743b52565cf64cee9c4658b/hiredis-3.3.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c135bda87211f7af9e2fd4e046ab433c576cd17b69e639a0f5bb2eed5e0e71a9", size = 82105, upload-time = "2025-10-14T16:32:37.204Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a2/9b/4210f4ebfb3ab4ada964b8de08190f54cbac147198fb463cd3c111cc13e0/hiredis-3.3.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2f855c678230aed6fc29b962ce1cc67e5858a785ef3a3fd6b15dece0487a2e60", size = 46237, upload-time = "2025-10-14T16:32:38.07Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/7a/e38bfd7d04c05036b4ccc6f42b86b1032185cf6ae426e112a97551fece14/hiredis-3.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4059c78a930cbb33c391452ccce75b137d6f89e2eebf6273d75dafc5c2143c03", size = 41894, upload-time = "2025-10-14T16:32:38.929Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/d3/eae43d9609c5d9a6effef0586ee47e13a0d84b44264b688d97a75cd17ee5/hiredis-3.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:334a3f1d14c253bb092e187736c3384203bd486b244e726319bbb3f7dffa4a20", size = 170486, upload-time = "2025-10-14T16:32:40.147Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c3/fd/34d664554880b27741ab2916d66207357563b1639e2648685f4c84cfb755/hiredis-3.3.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd137b147235447b3d067ec952c5b9b95ca54b71837e1b38dbb2ec03b89f24fc", size = 182031, upload-time = "2025-10-14T16:32:41.06Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/a3/0c69fdde3f4155b9f7acc64ccffde46f312781469260061b3bbaa487fd34/hiredis-3.3.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8f88f4f2aceb73329ece86a1cb0794fdbc8e6d614cb5ca2d1023c9b7eb432db8", size = 180542, upload-time = "2025-10-14T16:32:42.993Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/7a/ad5da4d7bc241e57c5b0c4fe95aa75d1f2116e6e6c51577394d773216e01/hiredis-3.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:550f4d1538822fc75ebf8cf63adc396b23d4958bdbbad424521f2c0e3dfcb169", size = 172353, upload-time = "2025-10-14T16:32:43.965Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/dc/c46eace64eb047a5b31acd5e4b0dc6d2f0390a4a3f6d507442d9efa570ad/hiredis-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:54b14211fbd5930fc696f6fcd1f1f364c660970d61af065a80e48a1fa5464dd6", size = 166435, upload-time = "2025-10-14T16:32:44.97Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/ac/ad13a714e27883a2e4113c980c94caf46b801b810de5622c40f8d3e8335f/hiredis-3.3.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9e96f63dbc489fc86f69951e9f83dadb9582271f64f6822c47dcffa6fac7e4a", size = 177218, upload-time = "2025-10-14T16:32:45.936Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/38/268fabd85b225271fe1ba82cb4a484fcc1bf922493ff2c74b400f1a6f339/hiredis-3.3.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:106e99885d46684d62ab3ec1d6b01573cc0e0083ac295b11aaa56870b536c7ec", size = 170477, upload-time = "2025-10-14T16:32:46.898Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/6b/02bb8af810ea04247334ab7148acff7a61c08a8832830c6703f464be83a9/hiredis-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:087e2ef3206361281b1a658b5b4263572b6ba99465253e827796964208680459", size = 167915, upload-time = "2025-10-14T16:32:47.847Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/83/94/901fa817e667b2e69957626395e6dee416e31609dca738f28e6b545ca6c2/hiredis-3.3.0-cp314-cp314-win32.whl", hash = "sha256:80638ebeab1cefda9420e9fedc7920e1ec7b4f0513a6b23d58c9d13c882f8065", size = 21165, upload-time = "2025-10-14T16:32:50.753Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/7e/4881b9c1d0b4cdaba11bd10e600e97863f977ea9d67c5988f7ec8cd363e5/hiredis-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:a68aaf9ba024f4e28cf23df9196ff4e897bd7085872f3a30644dca07fa787816", size = 22996, upload-time = "2025-10-14T16:32:51.543Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a7/b6/d7e6c17da032665a954a89c1e6ee3bd12cb51cd78c37527842b03519981d/hiredis-3.3.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:f7f80442a32ce51ee5d89aeb5a84ee56189a0e0e875f1a57bbf8d462555ae48f", size = 83034, upload-time = "2025-10-14T16:32:52.395Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/27/6c/6751b698060cdd1b2d8427702cff367c9ed7a1705bcf3792eb5b896f149b/hiredis-3.3.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a1a67530da714954ed50579f4fe1ab0ddbac9c43643b1721c2cb226a50dde263", size = 46701, upload-time = "2025-10-14T16:32:53.572Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/8e/20a5cf2c83c7a7e08c76b9abab113f99f71cd57468a9c7909737ce6e9bf8/hiredis-3.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:616868352e47ab355559adca30f4f3859f9db895b4e7bc71e2323409a2add751", size = 42381, upload-time = "2025-10-14T16:32:54.762Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/be/0a/547c29c06e8c9c337d0df3eec39da0cf1aad701daf8a9658dd37f25aca66/hiredis-3.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e799b79f3150083e9702fc37e6243c0bd47a443d6eae3f3077b0b3f510d6a145", size = 180313, upload-time = "2025-10-14T16:32:55.644Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/8a/488de5469e3d0921a1c425045bf00e983d48b2111a90e47cf5769eaa536c/hiredis-3.3.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ef1dfb0d2c92c3701655e2927e6bbe10c499aba632c7ea57b6392516df3864b", size = 190488, upload-time = "2025-10-14T16:32:56.649Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b5/59/8493edc3eb9ae0dbea2b2230c2041a52bc03e390b02ffa3ac0bca2af9aea/hiredis-3.3.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c290da6bc2a57e854c7da9956cd65013483ede935677e84560da3b848f253596", size = 189210, upload-time = "2025-10-14T16:32:57.759Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/de/8c9a653922057b32fb1e2546ecd43ef44c9aa1a7cf460c87cae507eb2bc7/hiredis-3.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd8c438d9e1728f0085bf9b3c9484d19ec31f41002311464e75b69550c32ffa8", size = 180972, upload-time = "2025-10-14T16:32:58.737Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/a3/51e6e6afaef2990986d685ca6e254ffbd191f1635a59b2d06c9e5d10c8a2/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1bbc6b8a88bbe331e3ebf6685452cebca6dfe6d38a6d4efc5651d7e363ba28bd", size = 175315, upload-time = "2025-10-14T16:32:59.774Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/96/54/e436312feb97601f70f8b39263b8da5ac4a5d18305ebdfb08ad7621f6119/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:55d8c18fe9a05496c5c04e6eccc695169d89bf358dff964bcad95696958ec05f", size = 185653, upload-time = "2025-10-14T16:33:00.749Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ed/a3/88e66030d066337c6c0f883a912c6d4b2d6d7173490fbbc113a6cbe414ff/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:4ddc79afa76b805d364e202a754666cb3c4d9c85153cbfed522871ff55827838", size = 179032, upload-time = "2025-10-14T16:33:01.711Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bc/1f/fb7375467e9adaa371cd617c2984fefe44bdce73add4c70b8dd8cab1b33a/hiredis-3.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e8a4b8540581dcd1b2b25827a54cfd538e0afeaa1a0e3ca87ad7126965981cc", size = 176127, upload-time = "2025-10-14T16:33:02.793Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/66/14/0dc2b99209c400f3b8f24067273e9c3cb383d894e155830879108fb19e98/hiredis-3.3.0-cp314-cp314t-win32.whl", hash = "sha256:298593bb08487753b3afe6dc38bac2532e9bac8dcee8d992ef9977d539cc6776", size = 22024, upload-time = "2025-10-14T16:33:03.812Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/2f/8a0befeed8bbe142d5a6cf3b51e8cbe019c32a64a596b0ebcbc007a8f8f1/hiredis-3.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b442b6ab038a6f3b5109874d2514c4edf389d8d8b553f10f12654548808683bc", size = 23808, upload-time = "2025-10-14T16:33:04.965Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "httpcore"
                                                                  +version = "1.0.9"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "certifi" },
                                                                  +    { name = "h11" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "httpx"
                                                                  +version = "0.28.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "anyio" },
                                                                  +    { name = "certifi" },
                                                                  +    { name = "httpcore" },
                                                                  +    { name = "idna" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "idna"
                                                                  +version = "3.11"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "importlib-metadata"
                                                                  +version = "8.7.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "zipp" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "importlib-resources"
                                                                  +version = "6.5.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/cf/8c/f834fbf984f691b4f7ff60f50b514cc3de5cc08abfc3295564dd89c5e2e7/importlib_resources-6.5.2.tar.gz", hash = "sha256:185f87adef5bcc288449d98fb4fba07cea78bc036455dd44c5fc4a2fe78fed2c", size = 44693, upload-time = "2025-01-03T18:51:56.698Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a4/ed/1f1afb2e9e7f38a545d628f864d562a5ae64fe6f7a10e28ffb9b185b4e89/importlib_resources-6.5.2-py3-none-any.whl", hash = "sha256:789cfdc3ed28c78b67a06acb8126751ced69a3d5f79c095a98298cd8a760ccec", size = 37461, upload-time = "2025-01-03T18:51:54.306Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "iniconfig"
                                                                  +version = "2.3.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ipykernel"
                                                                  +version = "7.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "appnope", marker = "sys_platform == 'darwin'" },
                                                                  +    { name = "comm" },
                                                                  +    { name = "debugpy" },
                                                                  +    { name = "ipython" },
                                                                  +    { name = "jupyter-client" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "matplotlib-inline" },
                                                                  +    { name = "nest-asyncio" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "psutil" },
                                                                  +    { name = "pyzmq" },
                                                                  +    { name = "tornado" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b9/a4/4948be6eb88628505b83a1f2f40d90254cab66abf2043b3c40fa07dfce0f/ipykernel-7.1.0.tar.gz", hash = "sha256:58a3fc88533d5930c3546dc7eac66c6d288acde4f801e2001e65edc5dc9cf0db", size = 174579, upload-time = "2025-10-27T09:46:39.471Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a3/17/20c2552266728ceba271967b87919664ecc0e33efca29c3efc6baf88c5f9/ipykernel-7.1.0-py3-none-any.whl", hash = "sha256:763b5ec6c5b7776f6a8d7ce09b267693b4e5ce75cb50ae696aaefb3c85e1ea4c", size = 117968, upload-time = "2025-10-27T09:46:37.805Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ipython"
                                                                  +version = "9.7.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "colorama", marker = "sys_platform == 'win32'" },
                                                                  +    { name = "decorator" },
                                                                  +    { name = "ipython-pygments-lexers" },
                                                                  +    { name = "jedi" },
                                                                  +    { name = "matplotlib-inline" },
                                                                  +    { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
                                                                  +    { name = "prompt-toolkit" },
                                                                  +    { name = "pygments" },
                                                                  +    { name = "stack-data" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/29/e6/48c74d54039241a456add616464ea28c6ebf782e4110d419411b83dae06f/ipython-9.7.0.tar.gz", hash = "sha256:5f6de88c905a566c6a9d6c400a8fed54a638e1f7543d17aae2551133216b1e4e", size = 4422115, upload-time = "2025-11-05T12:18:54.646Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/aa/62893d6a591d337aa59dcc4c6f6c842f1fe20cd72c8c5c1f980255243252/ipython-9.7.0-py3-none-any.whl", hash = "sha256:bce8ac85eb9521adc94e1845b4c03d88365fd6ac2f4908ec4ed1eb1b0a065f9f", size = 618911, upload-time = "2025-11-05T12:18:52.484Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ipython-pygments-lexers"
                                                                  +version = "1.1.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "pygments" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ipywidgets"
                                                                  +version = "8.1.8"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "comm" },
                                                                  +    { name = "ipython" },
                                                                  +    { name = "jupyterlab-widgets" },
                                                                  +    { name = "traitlets" },
                                                                  +    { name = "widgetsnbextension" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/4c/ae/c5ce1edc1afe042eadb445e95b0671b03cee61895264357956e61c0d2ac0/ipywidgets-8.1.8.tar.gz", hash = "sha256:61f969306b95f85fba6b6986b7fe45d73124d1d9e3023a8068710d47a22ea668", size = 116739, upload-time = "2025-11-01T21:18:12.393Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/56/6d/0d9848617b9f753b87f214f1c682592f7ca42de085f564352f10f0843026/ipywidgets-8.1.8-py3-none-any.whl", hash = "sha256:ecaca67aed704a338f88f67b1181b58f821ab5dc89c1f0f5ef99db43c1c2921e", size = 139808, upload-time = "2025-11-01T21:18:10.956Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "isoduration"
                                                                  +version = "20.11.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "arrow" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/7c/1a/3c8edc664e06e6bd06cce40c6b22da5f1429aa4224d0c590f3be21c91ead/isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9", size = 11649, upload-time = "2020-11-01T11:00:00.312Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/7b/55/e5326141505c5d5e34c5e0935d2908a74e4561eca44108fbfb9c13d2911a/isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042", size = 11321, upload-time = "2020-11-01T10:59:58.02Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jedi"
                                                                  +version = "0.19.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "parso" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jinja2"
                                                                  +version = "3.1.6"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "markupsafe" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jmespath"
                                                                  +version = "1.0.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "json5"
                                                                  +version = "0.12.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/12/ae/929aee9619e9eba9015207a9d2c1c54db18311da7eb4dcf6d41ad6f0eb67/json5-0.12.1.tar.gz", hash = "sha256:b2743e77b3242f8d03c143dd975a6ec7c52e2f2afe76ed934e53503dd4ad4990", size = 52191, upload-time = "2025-08-12T19:47:42.583Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/e2/05328bd2621be49a6fed9e3030b1e51a2d04537d3f816d211b9cc53c5262/json5-0.12.1-py3-none-any.whl", hash = "sha256:d9c9b3bc34a5f54d43c35e11ef7cb87d8bdd098c6ace87117a7b7e83e705c1d5", size = 36119, upload-time = "2025-08-12T19:47:41.131Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jsonpointer"
                                                                  +version = "3.0.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jsonschema"
                                                                  +version = "4.25.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "attrs" },
                                                                  +    { name = "jsonschema-specifications" },
                                                                  +    { name = "referencing" },
                                                                  +    { name = "rpds-py" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" },
                                                                  +]
                                                                  +
                                                                  +[package.optional-dependencies]
                                                                  +format-nongpl = [
                                                                  +    { name = "fqdn" },
                                                                  +    { name = "idna" },
                                                                  +    { name = "isoduration" },
                                                                  +    { name = "jsonpointer" },
                                                                  +    { name = "rfc3339-validator" },
                                                                  +    { name = "rfc3986-validator" },
                                                                  +    { name = "rfc3987-syntax" },
                                                                  +    { name = "uri-template" },
                                                                  +    { name = "webcolors" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jsonschema-specifications"
                                                                  +version = "2025.9.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "referencing" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter"
                                                                  +version = "1.1.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "ipykernel" },
                                                                  +    { name = "ipywidgets" },
                                                                  +    { name = "jupyter-console" },
                                                                  +    { name = "jupyterlab" },
                                                                  +    { name = "nbconvert" },
                                                                  +    { name = "notebook" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/58/f3/af28ea964ab8bc1e472dba2e82627d36d470c51f5cd38c37502eeffaa25e/jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a", size = 5714959, upload-time = "2024-08-30T07:15:48.299Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/38/64/285f20a31679bf547b75602702f7800e74dbabae36ef324f716c02804753/jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83", size = 2657, upload-time = "2024-08-30T07:15:47.045Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-client"
                                                                  +version = "8.6.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "python-dateutil" },
                                                                  +    { name = "pyzmq" },
                                                                  +    { name = "tornado" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/71/22/bf9f12fdaeae18019a468b68952a60fe6dbab5d67cd2a103cac7659b41ca/jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419", size = 342019, upload-time = "2024-09-17T10:44:17.613Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/85/b0394e0b6fcccd2c1eeefc230978a6f8cb0c5df1e4cd3e7625735a0d7d1e/jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f", size = 106105, upload-time = "2024-09-17T10:44:15.218Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-console"
                                                                  +version = "6.6.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "ipykernel" },
                                                                  +    { name = "ipython" },
                                                                  +    { name = "jupyter-client" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "prompt-toolkit" },
                                                                  +    { name = "pygments" },
                                                                  +    { name = "pyzmq" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/bd/2d/e2fd31e2fc41c14e2bcb6c976ab732597e907523f6b2420305f9fc7fdbdb/jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539", size = 34363, upload-time = "2023-03-06T14:13:31.02Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/77/71d78d58f15c22db16328a476426f7ac4a60d3a5a7ba3b9627ee2f7903d4/jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485", size = 24510, upload-time = "2023-03-06T14:13:28.229Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-core"
                                                                  +version = "5.9.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "platformdirs" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/02/49/9d1284d0dc65e2c757b74c6687b6d319b02f822ad039e5c512df9194d9dd/jupyter_core-5.9.1.tar.gz", hash = "sha256:4d09aaff303b9566c3ce657f580bd089ff5c91f5f89cf7d8846c3cdf465b5508", size = 89814, upload-time = "2025-10-16T19:19:18.444Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e7/e7/80988e32bf6f73919a113473a604f5a8f09094de312b9d52b79c2df7612b/jupyter_core-5.9.1-py3-none-any.whl", hash = "sha256:ebf87fdc6073d142e114c72c9e29a9d7ca03fad818c5d300ce2adc1fb0743407", size = 29032, upload-time = "2025-10-16T19:19:16.783Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-events"
                                                                  +version = "0.12.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jsonschema", extra = ["format-nongpl"] },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "python-json-logger" },
                                                                  +    { name = "pyyaml" },
                                                                  +    { name = "referencing" },
                                                                  +    { name = "rfc3339-validator" },
                                                                  +    { name = "rfc3986-validator" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/9d/c3/306d090461e4cf3cd91eceaff84bede12a8e52cd821c2d20c9a4fd728385/jupyter_events-0.12.0.tar.gz", hash = "sha256:fc3fce98865f6784c9cd0a56a20644fc6098f21c8c33834a8d9fe383c17e554b", size = 62196, upload-time = "2025-02-03T17:23:41.485Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/48/577993f1f99c552f18a0428731a755e06171f9902fa118c379eb7c04ea22/jupyter_events-0.12.0-py3-none-any.whl", hash = "sha256:6464b2fa5ad10451c3d35fabc75eab39556ae1e2853ad0c0cc31b656731a97fb", size = 19430, upload-time = "2025-02-03T17:23:38.643Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-lsp"
                                                                  +version = "2.3.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jupyter-server" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/eb/5a/9066c9f8e94ee517133cd98dba393459a16cd48bba71a82f16a65415206c/jupyter_lsp-2.3.0.tar.gz", hash = "sha256:458aa59339dc868fb784d73364f17dbce8836e906cd75fd471a325cba02e0245", size = 54823, upload-time = "2025-08-27T17:47:34.671Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/60/1f6cee0c46263de1173894f0fafcb3475ded276c472c14d25e0280c18d6d/jupyter_lsp-2.3.0-py3-none-any.whl", hash = "sha256:e914a3cb2addf48b1c7710914771aaf1819d46b2e5a79b0f917b5478ec93f34f", size = 76687, upload-time = "2025-08-27T17:47:33.15Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-server"
                                                                  +version = "2.17.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "anyio" },
                                                                  +    { name = "argon2-cffi" },
                                                                  +    { name = "jinja2" },
                                                                  +    { name = "jupyter-client" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "jupyter-events" },
                                                                  +    { name = "jupyter-server-terminals" },
                                                                  +    { name = "nbconvert" },
                                                                  +    { name = "nbformat" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "prometheus-client" },
                                                                  +    { name = "pywinpty", marker = "os_name == 'nt'" },
                                                                  +    { name = "pyzmq" },
                                                                  +    { name = "send2trash" },
                                                                  +    { name = "terminado" },
                                                                  +    { name = "tornado" },
                                                                  +    { name = "traitlets" },
                                                                  +    { name = "websocket-client" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/5b/ac/e040ec363d7b6b1f11304cc9f209dac4517ece5d5e01821366b924a64a50/jupyter_server-2.17.0.tar.gz", hash = "sha256:c38ea898566964c888b4772ae1ed58eca84592e88251d2cfc4d171f81f7e99d5", size = 731949, upload-time = "2025-08-21T14:42:54.042Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/80/a24767e6ca280f5a49525d987bf3e4d7552bf67c8be07e8ccf20271f8568/jupyter_server-2.17.0-py3-none-any.whl", hash = "sha256:e8cb9c7db4251f51ed307e329b81b72ccf2056ff82d50524debde1ee1870e13f", size = 388221, upload-time = "2025-08-21T14:42:52.034Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyter-server-terminals"
                                                                  +version = "0.5.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "pywinpty", marker = "os_name == 'nt'" },
                                                                  +    { name = "terminado" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/fc/d5/562469734f476159e99a55426d697cbf8e7eb5efe89fb0e0b4f83a3d3459/jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269", size = 31430, upload-time = "2024-03-12T14:37:03.049Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/2d/2b32cdbe8d2a602f697a649798554e4f072115438e92249624e532e8aca6/jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa", size = 13656, upload-time = "2024-03-12T14:37:00.708Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyterlab"
                                                                  +version = "4.5.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "async-lru" },
                                                                  +    { name = "httpx" },
                                                                  +    { name = "ipykernel" },
                                                                  +    { name = "jinja2" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "jupyter-lsp" },
                                                                  +    { name = "jupyter-server" },
                                                                  +    { name = "jupyterlab-server" },
                                                                  +    { name = "notebook-shim" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "setuptools" },
                                                                  +    { name = "tornado" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/df/e5/4fa382a796a6d8e2cd867816b64f1ff27f906e43a7a83ad9eb389e448cd8/jupyterlab-4.5.0.tar.gz", hash = "sha256:aec33d6d8f1225b495ee2cf20f0514f45e6df8e360bdd7ac9bace0b7ac5177ea", size = 23989880, upload-time = "2025-11-18T13:19:00.365Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/6c/1e/5a4d5498eba382fee667ed797cf64ae5d1b13b04356df62f067f48bb0f61/jupyterlab-4.5.0-py3-none-any.whl", hash = "sha256:88e157c75c1afff64c7dc4b801ec471450b922a4eae4305211ddd40da8201c8a", size = 12380641, upload-time = "2025-11-18T13:18:56.252Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyterlab-pygments"
                                                                  +version = "0.3.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/90/51/9187be60d989df97f5f0aba133fa54e7300f17616e065d1ada7d7646b6d6/jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d", size = 512900, upload-time = "2023-11-23T09:26:37.44Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/dd/ead9d8ea85bf202d90cc513b533f9c363121c7792674f78e0d8a854b63b4/jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780", size = 15884, upload-time = "2023-11-23T09:26:34.325Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyterlab-server"
                                                                  +version = "2.28.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "babel" },
                                                                  +    { name = "jinja2" },
                                                                  +    { name = "json5" },
                                                                  +    { name = "jsonschema" },
                                                                  +    { name = "jupyter-server" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "requests" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/d6/2c/90153f189e421e93c4bb4f9e3f59802a1f01abd2ac5cf40b152d7f735232/jupyterlab_server-2.28.0.tar.gz", hash = "sha256:35baa81898b15f93573e2deca50d11ac0ae407ebb688299d3a5213265033712c", size = 76996, upload-time = "2025-10-22T13:59:18.37Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/07/a000fe835f76b7e1143242ab1122e6362ef1c03f23f83a045c38859c2ae0/jupyterlab_server-2.28.0-py3-none-any.whl", hash = "sha256:e4355b148fdcf34d312bbbc80f22467d6d20460e8b8736bf235577dd18506968", size = 59830, upload-time = "2025-10-22T13:59:16.767Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "jupyterlab-widgets"
                                                                  +version = "3.0.16"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/26/2d/ef58fed122b268c69c0aa099da20bc67657cdfb2e222688d5731bd5b971d/jupyterlab_widgets-3.0.16.tar.gz", hash = "sha256:423da05071d55cf27a9e602216d35a3a65a3e41cdf9c5d3b643b814ce38c19e0", size = 897423, upload-time = "2025-11-01T21:11:29.724Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/b5/36c712098e6191d1b4e349304ef73a8d06aed77e56ceaac8c0a306c7bda1/jupyterlab_widgets-3.0.16-py3-none-any.whl", hash = "sha256:45fa36d9c6422cf2559198e4db481aa243c7a32d9926b500781c830c80f7ecf8", size = 914926, upload-time = "2025-11-01T21:11:28.008Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "lark"
                                                                  +version = "1.3.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/da/34/28fff3ab31ccff1fd4f6c7c7b0ceb2b6968d8ea4950663eadcb5720591a0/lark-1.3.1.tar.gz", hash = "sha256:b426a7a6d6d53189d318f2b6236ab5d6429eaf09259f1ca33eb716eed10d2905", size = 382732, upload-time = "2025-10-27T18:25:56.653Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/82/3d/14ce75ef66813643812f3093ab17e46d3a206942ce7376d31ec2d36229e7/lark-1.3.1-py3-none-any.whl", hash = "sha256:c629b661023a014c37da873b4ff58a817398d12635d3bbb2c5a03be7fe5d1e12", size = 113151, upload-time = "2025-10-27T18:25:54.882Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "libipld"
                                                                  +version = "3.3.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/83/2b/4e84e033268d2717c692e5034e016b1d82501736cd297586fd1c7378ccd5/libipld-3.3.2.tar.gz", hash = "sha256:7e85ccd9136110e63943d95232b193c893e369c406273d235160e5cc4b39c9ce", size = 4401259, upload-time = "2025-12-05T13:00:20.34Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/0b/f65e7d56d0dec2804c1508aef4cf5d3a775273a090ae3047123f6f3e0f63/libipld-3.3.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3f033e98c9e95e8448c97bbc904271908076974d790a895abade2ae89433715e", size = 269020, upload-time = "2025-12-05T12:58:26.503Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/19/20/01a3be66e8945aaef9959ce80a07bf959e31b2bd2216bd199b24b463235a/libipld-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88ac549eb6c56287785ad20d0e7785d3e8b153b6a322fd5d7edf0e7fda2b182e", size = 260450, upload-time = "2025-12-05T12:58:27.735Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/af/06/a052e57bc99ec592d4b40c641d492f5fb225d25cc17f9edbf4f5918d7ff4/libipld-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:627035693460bae559d2e7f46bc577a27504d6e38e8715fcf9a8d905f6b1c72d", size = 280170, upload-time = "2025-12-05T12:58:28.977Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/eb/34/f20ff8a1b28a76d28f20895b1cb7d88422946e6ff6d8bc3d26a0b444e990/libipld-3.3.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:36be4ce9cb417eedec253eda9f55b92f29a35cbfcb24d108b496c72934fea7a2", size = 290219, upload-time = "2025-12-05T12:58:30.376Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bb/0c/253c1d433e01c95d70c1b146e065fd5a3e1284ed0072f082603b5daf9223/libipld-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:908630dc28b16a517cf323293f0843f481b0872649cba7d4cfdbc6eb258f5674", size = 315833, upload-time = "2025-12-05T12:58:31.61Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/72/4a/2b8da906680e7379b31e1b31a4e49d90725a767e53510eb88f85f91e71c6/libipld-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ac45e3aef416fe2eccbe84e562d81714416790bfd0756a1aa49ba895d4c7010", size = 330068, upload-time = "2025-12-05T12:58:32.94Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0b/73/be4031e3e1f839c286a6d9277fcacd756160a18009aa649adee308531698/libipld-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3344f30d47dcab9cba41dd8f2243874af91939e38e3c31f20d586383ca74296e", size = 283716, upload-time = "2025-12-05T12:58:34.166Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/f2/35ebdb7b53cc4a97a2a8d580d5c302bf30a66d918273a0d01c3cd77b9336/libipld-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4443d047fd1a9679534a87a6ee35c3a10793d4453801281341bb1e8390087c69", size = 309913, upload-time = "2025-12-05T12:58:35.392Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/d7/a1ffdb1b2986e60dd59d094c86e5bb318739c6d709b9e8af255667b7c578/libipld-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:37ea7cb7afb94277e4e095bcc0ae888ed4b6e0fe8082c41dccd6e9487ccfd729", size = 463850, upload-time = "2025-12-05T12:58:36.702Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1d/7d/440e372c3b8070cbf9200e1ddf3dff7409bcbc9243aade08e99c9e845e90/libipld-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:634d176664cf295360712157b5c5a83539da2f4416f3e0491340064d49e74fd8", size = 460370, upload-time = "2025-12-05T12:58:38.032Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/3e/cfcbbe21b30752482afa22fd528635a96901b39e517a10b73fc422f3d29b/libipld-3.3.2-cp312-cp312-win32.whl", hash = "sha256:071de5acf902e9a21d761572755afc8403cbaadd4b8199e7504ad52ee45b6b5e", size = 159380, upload-time = "2025-12-05T12:58:39.266Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/af/b5/b1cbc3347cf831c0599bb9b5579ed286939455d11d6f70110a3b8fb7d695/libipld-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:e35a8735b8a4bdd09b9edfbf1ae36e9ba9a804de50c99352c9a06aa3da109a62", size = 158896, upload-time = "2025-12-05T12:58:40.457Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/cd/4ac32a0297c1d91d7147178927144dcb4456c35076388efb7c7f76e90695/libipld-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:36fe9cd1b5a75a315cab30091579242d05df39692f773f7d8221250503753e3a", size = 149432, upload-time = "2025-12-05T12:58:41.691Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/a6/2bf577bde352fdb81ebe2e271e542b85f1aeae630405cae1b9d07a97b5e9/libipld-3.3.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:63bc6858d73c324e29d74155bdb339e14330a88bb1a8cc8fdc295048337dca09", size = 269326, upload-time = "2025-12-05T12:58:42.967Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/83/850a0bb214c31c128447e29cdbea816225ee2c8fbb397a8c865f895198e4/libipld-3.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4140f030eb3cfff17d04b9481f13aaed0b2910d1371fe7489394120ed1d09ae5", size = 260709, upload-time = "2025-12-05T12:58:44.232Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/f8/0c02a2acb246603f5351d0a71055d0c835bc0bc5332c5ca5d29a1d95b04c/libipld-3.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a48bc2f7845825143a36f6a305680823a2816488593024803064d0803e3cee35", size = 280309, upload-time = "2025-12-05T12:58:46.137Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/2e/ca50530aed1911d99a730f30ab73e7731da8299a933b909a96fcdbb1baf6/libipld-3.3.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7627f371682160cae818f817eb846bc8c267a5daa028748a7c73103d8df00eb", size = 290446, upload-time = "2025-12-05T12:58:47.49Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/09/dd0f39cf78dbc7f5f2ca1208fc9ff284b56c2b90edf3dbf98c4b36491b6c/libipld-3.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a7de390a3eb897d3194f6c96067c21338fbe6e0fc1145ab6b51af276aa7a08e", size = 316193, upload-time = "2025-12-05T12:58:49.057Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/75/ca6fe1673c80f7f4164edf9647dd2cb622455a73890e96648c44c361c918/libipld-3.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:196a8fcd86ae0c8096cea85ff308edf315d77fbb677ef3dd9eff0be9da526499", size = 330556, upload-time = "2025-12-05T12:58:50.471Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bd/41/aff762ccf5a80b66a911c576afcd850f0d64cb43d51cb63c29429dc68230/libipld-3.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b040dab7eb04b0ff730e68840f40eb225c9f14e73ad21238b76c7b8ded3ad99d", size = 283970, upload-time = "2025-12-05T12:58:52.131Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/56/3a19a6067bde8827146cd771583e8930cf952709f036328579747647f38f/libipld-3.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d7cd1e7e88b0fbc8f4aa267bdea2d10452c9dd0e1aafa82a5e0751427f222b0", size = 309885, upload-time = "2025-12-05T12:58:53.406Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/de/9b/0b4ee60ede82cdd301e2266a8172e8ee6f1b40c7dbd797510e632314ddf6/libipld-3.3.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:76731ebebd824fa45e51cc85506b108aa5da7322e43864909895f1779e9e4b41", size = 464028, upload-time = "2025-12-05T12:58:54.755Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/c2/8edf65cf2c98bfbf6535b65f4bcc461ecec65ae6b9e3fb5a4308b9a5fb7a/libipld-3.3.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7b8e7100bffbe579b7c92a3c6a8852ce333e0de171e696a2063e1e39ec9cc50a", size = 460526, upload-time = "2025-12-05T12:58:56.231Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/17/3f/d6d2aa42f07855be6b7e1fb43d76e39945469fc54fe9366bf8c9a81ca38e/libipld-3.3.2-cp313-cp313-win32.whl", hash = "sha256:06f766cec75f3d78339caa3ce3c6977e290e1a97f37e5f4ba358da2e77340196", size = 159501, upload-time = "2025-12-05T12:58:57.482Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/12/2a/83f634329f1d1912e5d37aec717396c76ef689fa8c8997b16cf0866a1985/libipld-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:8be484f1dc5525453e17f07f02202180c708213f2b6ea06d3b9247a5702e0229", size = 159090, upload-time = "2025-12-05T12:58:58.628Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/f4/5b55acce9f3626f8cbd54163f22a0917430d7307bf56fd30d88df7a0a897/libipld-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:4446cae7584a446b58de66942f89f155d95c2cbfb9ad215af359086824d4e3b9", size = 149497, upload-time = "2025-12-05T12:59:00.191Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/de/d6/9ab52adf13ee501b50624ef1265657aa30b3267998dfadcb44d77bbeef42/libipld-3.3.2-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5947e99b40e923170094a3313c9f3629c6ed475465ba95eadce6cdcf08f1f65a", size = 268909, upload-time = "2025-12-05T12:59:02.485Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/12/d6f04fb3d6911a276940c89b5ad3e6168d79fda9ae79a812d4da91c433d6/libipld-3.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f46179c722baf74c627c01c0bf85be7fcbde66bbf7c5f8c1bbb57bd3a17b861b", size = 261052, upload-time = "2025-12-05T12:59:03.829Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d8/23/6cade33d39f00eb71fde1c8fe6f73c5db5274ef8abeac3d2e6d989e65718/libipld-3.3.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e3e9be4bdeb90dbc537a53f8d06e8b2c703f4b7868f9316958e1bbde526a143", size = 280280, upload-time = "2025-12-05T12:59:05.13Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2c/42/50445b6c1c418a3514feb7d267d308e9fb9fe473fbbfaa205bc288ffe5ed/libipld-3.3.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b155c02626b194439f4b519a53985aedc8637ae56cf640ea6acf6172a37465de", size = 290306, upload-time = "2025-12-05T12:59:06.372Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/b1/7c197e21f1635ba31b2f4e893d3368598a48d990cebc4308ba496bad1409/libipld-3.3.2-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a1d84c630961cff188deaa2129c86d69f5779c8d02046fbe0c629ef162bc3df", size = 315801, upload-time = "2025-12-05T12:59:07.918Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/83/df/51a549e3017cc496a80852063124793007cb9b4cf2cae2e8a99f5c3dd814/libipld-3.3.2-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5393886a7e387751904681ecfa7e5912471b46043f044baa041a2b4772e4f839", size = 330420, upload-time = "2025-12-05T12:59:09.185Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/f8/84107ad6431311283dadf697fd238ea271e0af1068a0d13e574be5027f32/libipld-3.3.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44ca1ba44cb801686557e9544d248e013a2d5d1ab9fed796f090bb0d51d8f4ef", size = 283791, upload-time = "2025-12-05T12:59:10.481Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/35/c5/e3c5116b66383f7e54b9d1feb6d6e254a383311a4cce2940942f07d45893/libipld-3.3.2-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd0877ef4a1bd6e42ba52659769b5b766583c67b3cfb4e7143f9d10b81fb7a74", size = 309401, upload-time = "2025-12-05T12:59:11.711Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bd/b5/b9345d47569806e6f0041d339c9a1ec0be765fd8a3588308a7a40c383dd9/libipld-3.3.2-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:91b02da059a6ae7f783efa826f640ab1ca5eb5dd370bfd3f41071693a363c4fb", size = 463929, upload-time = "2025-12-05T12:59:13.344Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/4b/ae985a308191771e5a9e8e3108a3a4ed7090147e21a7cda0c0e345adc22a/libipld-3.3.2-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:95a2c4f507c88c01a797ec97ce10603bea684c03208227703e007485dc631971", size = 460308, upload-time = "2025-12-05T12:59:14.702Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/d6/98aafc9721dd239e578e2826cbb1e9ef438d76c0ec125bce64346e439041/libipld-3.3.2-cp314-cp314-win32.whl", hash = "sha256:5a50cbf5b3b73164fbb88169573ed3e824024c12fbc5f9efd87fb5c8f236ccc1", size = 159315, upload-time = "2025-12-05T12:59:16.004Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/9c/6b7b91a417162743d9ea109e142fe485b2f6dafadb276c6e5a393f772715/libipld-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:c1f3ed8f70b215a294b5c6830e91af48acde96b3c8a6cae13304291f8240b939", size = 159168, upload-time = "2025-12-05T12:59:17.308Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/22/19/bb42dc53bb8855c1f40b4a431ed3cb2df257bd5a6af61842626712c83073/libipld-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:08261503b7307c6d9acbd3b2a221da9294b457204dcefce446f627893abb077e", size = 149324, upload-time = "2025-12-05T12:59:18.815Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "markdown-it-py"
                                                                  +version = "4.0.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "mdurl" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "markupsafe"
                                                                  +version = "3.0.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "matplotlib-inline"
                                                                  +version = "0.2.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/c7/74/97e72a36efd4ae2bccb3463284300f8953f199b5ffbc04cbbb0ec78f74b1/matplotlib_inline-0.2.1.tar.gz", hash = "sha256:e1ee949c340d771fc39e241ea75683deb94762c8fa5f2927ec57c83c4dffa9fe", size = 8110, upload-time = "2025-10-23T09:00:22.126Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/af/33/ee4519fa02ed11a94aef9559552f3b17bb863f2ecfe1a35dc7f548cde231/matplotlib_inline-0.2.1-py3-none-any.whl", hash = "sha256:d56ce5156ba6085e00a9d54fead6ed29a9c47e215cd1bba2e976ef39f5710a76", size = 9516, upload-time = "2025-10-23T09:00:20.675Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "mdurl"
                                                                  +version = "0.1.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "mistune"
                                                                  +version = "3.1.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/d7/02/a7fb8b21d4d55ac93cdcde9d3638da5dd0ebdd3a4fed76c7725e10b81cbe/mistune-3.1.4.tar.gz", hash = "sha256:b5a7f801d389f724ec702840c11d8fc48f2b33519102fc7ee739e8177b672164", size = 94588, upload-time = "2025-08-29T07:20:43.594Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/f0/8282d9641415e9e33df173516226b404d367a0fc55e1a60424a152913abc/mistune-3.1.4-py3-none-any.whl", hash = "sha256:93691da911e5d9d2e23bc54472892aff676df27a75274962ff9edc210364266d", size = 53481, upload-time = "2025-08-29T07:20:42.218Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "more-itertools"
                                                                  +version = "10.8.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "moto"
                                                                  +version = "5.1.19"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "boto3" },
                                                                  +    { name = "botocore" },
                                                                  +    { name = "cryptography" },
                                                                  +    { name = "jinja2" },
                                                                  +    { name = "python-dateutil" },
                                                                  +    { name = "requests" },
                                                                  +    { name = "responses" },
                                                                  +    { name = "werkzeug" },
                                                                  +    { name = "xmltodict" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/45/eb/100a04d1b49859d05a9c701815117cd31bc436c3d9e959d399d9d2ff7e9c/moto-5.1.19.tar.gz", hash = "sha256:a13423e402366b6affab07ed28e1df5f3fcc54ef68fc8d83dc9f824da7a4024e", size = 8361592, upload-time = "2025-12-28T20:14:57.211Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/07/5ca7ba79615b88ee2325224894667f263b992d266a52b83d215c4b3caa39/moto-5.1.19-py3-none-any.whl", hash = "sha256:7adb0caacf0e2d0dbb09550bcb49a7f158ee7c460a09cb54d4599a9a94cfef70", size = 6451569, upload-time = "2025-12-28T20:14:54.701Z" },
                                                                  +]
                                                                  +
                                                                  +[package.optional-dependencies]
                                                                  +s3 = [
                                                                  +    { name = "py-partiql-parser" },
                                                                  +    { name = "pyyaml" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "msgpack"
                                                                  +version = "1.1.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "multidict"
                                                                  +version = "6.7.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7f/f5/013798161ca665e4a422afbc5e2d9e4070142a9ff8905e482139cd09e4d0/multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7", size = 250545, upload-time = "2025-10-06T14:49:24.882Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/2f/91dbac13e0ba94669ea5119ba267c9a832f0cb65419aca75549fcf09a3dc/multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e", size = 258305, upload-time = "2025-10-06T14:49:26.778Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ef/b0/754038b26f6e04488b48ac621f779c341338d78503fb45403755af2df477/multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546", size = 242363, upload-time = "2025-10-06T14:49:28.562Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/15/9da40b9336a7c9fa606c4cf2ed80a649dffeb42b905d4f63a1d7eb17d746/multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4", size = 268375, upload-time = "2025-10-06T14:49:29.96Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/82/72/c53fcade0cc94dfaad583105fd92b3a783af2091eddcb41a6d5a52474000/multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1", size = 269346, upload-time = "2025-10-06T14:49:31.404Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/e2/9baffdae21a76f77ef8447f1a05a96ec4bc0a24dae08767abc0a2fe680b8/multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d", size = 256107, upload-time = "2025-10-06T14:49:32.974Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/06/3f06f611087dc60d65ef775f1fb5aca7c6d61c6db4990e7cda0cef9b1651/multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304", size = 253592, upload-time = "2025-10-06T14:49:34.52Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/24/54e804ec7945b6023b340c412ce9c3f81e91b3bf5fa5ce65558740141bee/multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12", size = 251024, upload-time = "2025-10-06T14:49:35.956Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/48/011cba467ea0b17ceb938315d219391d3e421dfd35928e5dbdc3f4ae76ef/multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62", size = 251484, upload-time = "2025-10-06T14:49:37.631Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/3f/e2639e80325af0b6c6febdf8e57cc07043ff15f57fa1ef808f4ccb5ac4cd/multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8", size = 249342, upload-time = "2025-10-06T14:49:58.368Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5d/cc/84e0585f805cbeaa9cbdaa95f9a3d6aed745b9d25700623ac89a6ecff400/multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60", size = 257082, upload-time = "2025-10-06T14:49:59.89Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/9c/ac851c107c92289acbbf5cfb485694084690c1b17e555f44952c26ddc5bd/multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4", size = 240704, upload-time = "2025-10-06T14:50:01.485Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/cc/5f93e99427248c09da95b62d64b25748a5f5c98c7c2ab09825a1d6af0e15/multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f", size = 266355, upload-time = "2025-10-06T14:50:02.955Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/0c/2ec1d883ceb79c6f7f6d7ad90c919c898f5d1c6ea96d322751420211e072/multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf", size = 267259, upload-time = "2025-10-06T14:50:04.446Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c6/2d/f0b184fa88d6630aa267680bdb8623fb69cb0d024b8c6f0d23f9a0f406d3/multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32", size = 254903, upload-time = "2025-10-06T14:50:05.98Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/06/c9/11ea263ad0df7dfabcad404feb3c0dd40b131bc7f232d5537f2fb1356951/multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036", size = 252365, upload-time = "2025-10-06T14:50:07.511Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/88/d714b86ee2c17d6e09850c70c9d310abac3d808ab49dfa16b43aba9d53fd/multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec", size = 250062, upload-time = "2025-10-06T14:50:09.074Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/fe/ad407bb9e818c2b31383f6131ca19ea7e35ce93cf1310fce69f12e89de75/multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e", size = 249683, upload-time = "2025-10-06T14:50:10.714Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/d1/908f896224290350721597a61a69cd19b89ad8ee0ae1f38b3f5cd12ea2ac/multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c", size = 242588, upload-time = "2025-10-06T14:50:25.716Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/67/8604288bbd68680eee0ab568fdcb56171d8b23a01bcd5cb0c8fedf6e5d99/multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1", size = 249966, upload-time = "2025-10-06T14:50:28.192Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/33/9228d76339f1ba51e3efef7da3ebd91964d3006217aae13211653193c3ff/multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b", size = 228618, upload-time = "2025-10-06T14:50:29.82Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f8/2d/25d9b566d10cab1c42b3b9e5b11ef79c9111eaf4463b8c257a3bd89e0ead/multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5", size = 257539, upload-time = "2025-10-06T14:50:31.731Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b6/b1/8d1a965e6637fc33de3c0d8f414485c2b7e4af00f42cab3d84e7b955c222/multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad", size = 256345, upload-time = "2025-10-06T14:50:33.26Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/0c/06b5a8adbdeedada6f4fb8d8f193d44a347223b11939b42953eeb6530b6b/multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c", size = 247934, upload-time = "2025-10-06T14:50:34.808Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/31/b2491b5fe167ca044c6eb4b8f2c9f3b8a00b24c432c365358eadac5d7625/multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5", size = 245243, upload-time = "2025-10-06T14:50:36.436Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/1a/982913957cb90406c8c94f53001abd9eafc271cb3e70ff6371590bec478e/multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10", size = 235878, upload-time = "2025-10-06T14:50:37.953Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/be/c0/21435d804c1a1cf7a2608593f4d19bca5bcbd7a81a70b253fdd1c12af9c0/multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754", size = 243452, upload-time = "2025-10-06T14:50:39.574Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/68/6b086fef8a3f1a8541b9236c594f0c9245617c29841f2e0395d979485cde/multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128", size = 245084, upload-time = "2025-10-06T14:50:56.369Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/ee/f524093232007cd7a75c1d132df70f235cfd590a7c9eaccd7ff422ef4ae8/multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34", size = 252667, upload-time = "2025-10-06T14:50:57.991Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/a5/eeb3f43ab45878f1895118c3ef157a480db58ede3f248e29b5354139c2c9/multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99", size = 233590, upload-time = "2025-10-06T14:50:59.589Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/1e/76d02f8270b97269d7e3dbd45644b1785bda457b474315f8cf999525a193/multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202", size = 264112, upload-time = "2025-10-06T14:51:01.183Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/76/0b/c28a70ecb58963847c2a8efe334904cd254812b10e535aefb3bcce513918/multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1", size = 261194, upload-time = "2025-10-06T14:51:02.794Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b4/63/2ab26e4209773223159b83aa32721b4021ffb08102f8ac7d689c943fded1/multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3", size = 248510, upload-time = "2025-10-06T14:51:04.724Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/93/cd/06c1fa8282af1d1c46fd55c10a7930af652afdce43999501d4d68664170c/multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d", size = 248395, upload-time = "2025-10-06T14:51:06.306Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/99/ac/82cb419dd6b04ccf9e7e61befc00c77614fc8134362488b553402ecd55ce/multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6", size = 239520, upload-time = "2025-10-06T14:51:08.091Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/f3/a0f9bf09493421bd8716a362e0cd1d244f5a6550f5beffdd6b47e885b331/multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7", size = 245479, upload-time = "2025-10-06T14:51:10.365Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fe/6a/bab00cbab6d9cfb57afe1663318f72ec28289ea03fd4e8236bb78429893a/multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e", size = 239324, upload-time = "2025-10-06T14:51:25.822Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2a/5f/8de95f629fc22a7769ade8b41028e3e5a822c1f8904f618d175945a81ad3/multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064", size = 246877, upload-time = "2025-10-06T14:51:27.604Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/23/b4/38881a960458f25b89e9f4a4fdcb02ac101cfa710190db6e5528841e67de/multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e", size = 225824, upload-time = "2025-10-06T14:51:29.664Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1e/39/6566210c83f8a261575f18e7144736059f0c460b362e96e9cf797a24b8e7/multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd", size = 253558, upload-time = "2025-10-06T14:51:31.684Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/a3/67f18315100f64c269f46e6c0319fa87ba68f0f64f2b8e7fd7c72b913a0b/multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a", size = 252339, upload-time = "2025-10-06T14:51:33.699Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c8/2a/1cb77266afee2458d82f50da41beba02159b1d6b1f7973afc9a1cad1499b/multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96", size = 244895, upload-time = "2025-10-06T14:51:36.189Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dd/72/09fa7dd487f119b2eb9524946ddd36e2067c08510576d43ff68469563b3b/multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e", size = 241862, upload-time = "2025-10-06T14:51:41.291Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/65/92/bc1f8bd0853d8669300f732c801974dfc3702c3eeadae2f60cef54dc69d7/multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599", size = 232376, upload-time = "2025-10-06T14:51:43.55Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/09/86/ac39399e5cb9d0c2ac8ef6e10a768e4d3bc933ac808d49c41f9dc23337eb/multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394", size = 240272, upload-time = "2025-10-06T14:51:45.265Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "mypy-extensions"
                                                                  +version = "1.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "nbclient"
                                                                  +version = "0.10.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jupyter-client" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "nbformat" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/87/66/7ffd18d58eae90d5721f9f39212327695b749e23ad44b3881744eaf4d9e8/nbclient-0.10.2.tar.gz", hash = "sha256:90b7fc6b810630db87a6d0c2250b1f0ab4cf4d3c27a299b0cde78a4ed3fd9193", size = 62424, upload-time = "2024-12-19T10:32:27.164Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/34/6d/e7fa07f03a4a7b221d94b4d586edb754a9b0dc3c9e2c93353e9fa4e0d117/nbclient-0.10.2-py3-none-any.whl", hash = "sha256:4ffee11e788b4a27fabeb7955547e4318a5298f34342a4bfd01f2e1faaeadc3d", size = 25434, upload-time = "2024-12-19T10:32:24.139Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "nbconvert"
                                                                  +version = "7.16.6"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "beautifulsoup4" },
                                                                  +    { name = "bleach", extra = ["css"] },
                                                                  +    { name = "defusedxml" },
                                                                  +    { name = "jinja2" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "jupyterlab-pygments" },
                                                                  +    { name = "markupsafe" },
                                                                  +    { name = "mistune" },
                                                                  +    { name = "nbclient" },
                                                                  +    { name = "nbformat" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "pandocfilters" },
                                                                  +    { name = "pygments" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a3/59/f28e15fc47ffb73af68a8d9b47367a8630d76e97ae85ad18271b9db96fdf/nbconvert-7.16.6.tar.gz", hash = "sha256:576a7e37c6480da7b8465eefa66c17844243816ce1ccc372633c6b71c3c0f582", size = 857715, upload-time = "2025-01-28T09:29:14.724Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/9a/cd673b2f773a12c992f41309ef81b99da1690426bd2f96957a7ade0d3ed7/nbconvert-7.16.6-py3-none-any.whl", hash = "sha256:1375a7b67e0c2883678c48e506dc320febb57685e5ee67faa51b18a90f3a712b", size = 258525, upload-time = "2025-01-28T09:29:12.551Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "nbformat"
                                                                  +version = "5.10.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "fastjsonschema" },
                                                                  +    { name = "jsonschema" },
                                                                  +    { name = "jupyter-core" },
                                                                  +    { name = "traitlets" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6d/fd/91545e604bc3dad7dca9ed03284086039b294c6b3d75c0d2fa45f9e9caf3/nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a", size = 142749, upload-time = "2024-04-04T11:20:37.371Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a9/82/0340caa499416c78e5d8f5f05947ae4bc3cba53c9f038ab6e9ed964e22f1/nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b", size = 78454, upload-time = "2024-04-04T11:20:34.895Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "nest-asyncio"
                                                                  +version = "1.6.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "notebook"
                                                                  +version = "7.5.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jupyter-server" },
                                                                  +    { name = "jupyterlab" },
                                                                  +    { name = "jupyterlab-server" },
                                                                  +    { name = "notebook-shim" },
                                                                  +    { name = "tornado" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/89/ac/a97041621250a4fc5af379fb377942841eea2ca146aab166b8fcdfba96c2/notebook-7.5.0.tar.gz", hash = "sha256:3b27eaf9913033c28dde92d02139414c608992e1df4b969c843219acf2ff95e4", size = 14052074, upload-time = "2025-11-19T08:36:20.093Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/96/00df2a4760f10f5af0f45c4955573cae6189931f9a30265a35865f8c1031/notebook-7.5.0-py3-none-any.whl", hash = "sha256:3300262d52905ca271bd50b22617681d95f08a8360d099e097726e6d2efb5811", size = 14460968, upload-time = "2025-11-19T08:36:15.869Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "notebook-shim"
                                                                  +version = "0.2.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "jupyter-server" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/54/d2/92fa3243712b9a3e8bafaf60aac366da1cada3639ca767ff4b5b3654ec28/notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb", size = 13167, upload-time = "2024-02-14T23:35:18.353Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/33/bd5b9137445ea4b680023eb0469b2bb969d61303dedb2aac6560ff3d14a1/notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef", size = 13307, upload-time = "2024-02-14T23:35:16.286Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "numpy"
                                                                  +version = "2.3.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/76/65/21b3bc86aac7b8f2862db1e808f1ea22b028e30a225a34a5ede9bf8678f2/numpy-2.3.5.tar.gz", hash = "sha256:784db1dcdab56bf0517743e746dfb0f885fc68d948aba86eeec2cba234bdf1c0", size = 20584950, upload-time = "2025-11-16T22:52:42.067Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/44/37/e669fe6cbb2b96c62f6bbedc6a81c0f3b7362f6a59230b23caa673a85721/numpy-2.3.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74ae7b798248fe62021dbf3c914245ad45d1a6b0cb4a29ecb4b31d0bfbc4cc3e", size = 16733873, upload-time = "2025-11-16T22:49:49.84Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c5/65/df0db6c097892c9380851ab9e44b52d4f7ba576b833996e0080181c0c439/numpy-2.3.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ee3888d9ff7c14604052b2ca5535a30216aa0a58e948cdd3eeb8d3415f638769", size = 12259838, upload-time = "2025-11-16T22:49:52.863Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5b/e1/1ee06e70eb2136797abe847d386e7c0e830b67ad1d43f364dd04fa50d338/numpy-2.3.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:612a95a17655e213502f60cfb9bf9408efdc9eb1d5f50535cc6eb365d11b42b5", size = 5088378, upload-time = "2025-11-16T22:49:55.055Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6d/9c/1ca85fb86708724275103b81ec4cf1ac1d08f465368acfc8da7ab545bdae/numpy-2.3.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:3101e5177d114a593d79dd79658650fe28b5a0d8abeb8ce6f437c0e6df5be1a4", size = 6628559, upload-time = "2025-11-16T22:49:57.371Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/78/fcd41e5a0ce4f3f7b003da85825acddae6d7ecb60cf25194741b036ca7d6/numpy-2.3.5-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b973c57ff8e184109db042c842423ff4f60446239bd585a5131cc47f06f789d", size = 14250702, upload-time = "2025-11-16T22:49:59.632Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b6/23/2a1b231b8ff672b4c450dac27164a8b2ca7d9b7144f9c02d2396518352eb/numpy-2.3.5-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d8163f43acde9a73c2a33605353a4f1bc4798745a8b1d73183b28e5b435ae28", size = 16606086, upload-time = "2025-11-16T22:50:02.127Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a0/c5/5ad26fbfbe2012e190cc7d5003e4d874b88bb18861d0829edc140a713021/numpy-2.3.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:51c1e14eb1e154ebd80e860722f9e6ed6ec89714ad2db2d3aa33c31d7c12179b", size = 16025985, upload-time = "2025-11-16T22:50:04.536Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d2/fa/dd48e225c46c819288148d9d060b047fd2a6fb1eb37eae25112ee4cb4453/numpy-2.3.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b46b4ec24f7293f23adcd2d146960559aaf8020213de8ad1909dba6c013bf89c", size = 18542976, upload-time = "2025-11-16T22:50:07.557Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/79/ccbd23a75862d95af03d28b5c6901a1b7da4803181513d52f3b86ed9446e/numpy-2.3.5-cp312-cp312-win32.whl", hash = "sha256:3997b5b3c9a771e157f9aae01dd579ee35ad7109be18db0e85dbdbe1de06e952", size = 6285274, upload-time = "2025-11-16T22:50:10.746Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2d/57/8aeaf160312f7f489dea47ab61e430b5cb051f59a98ae68b7133ce8fa06a/numpy-2.3.5-cp312-cp312-win_amd64.whl", hash = "sha256:86945f2ee6d10cdfd67bcb4069c1662dd711f7e2a4343db5cecec06b87cf31aa", size = 12782922, upload-time = "2025-11-16T22:50:12.811Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/a6/aae5cc2ca78c45e64b9ef22f089141d661516856cf7c8a54ba434576900d/numpy-2.3.5-cp312-cp312-win_arm64.whl", hash = "sha256:f28620fe26bee16243be2b7b874da327312240a7cdc38b769a697578d2100013", size = 10194667, upload-time = "2025-11-16T22:50:16.16Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/db/69/9cde09f36da4b5a505341180a3f2e6fadc352fd4d2b7096ce9778db83f1a/numpy-2.3.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d0f23b44f57077c1ede8c5f26b30f706498b4862d3ff0a7298b8411dd2f043ff", size = 16728251, upload-time = "2025-11-16T22:50:19.013Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/fb/f505c95ceddd7027347b067689db71ca80bd5ecc926f913f1a23e65cf09b/numpy-2.3.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa5bc7c5d59d831d9773d1170acac7893ce3a5e130540605770ade83280e7188", size = 12254652, upload-time = "2025-11-16T22:50:21.487Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/da/8c7738060ca9c31b30e9301ee0cf6c5ffdbf889d9593285a1cead337f9a5/numpy-2.3.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:ccc933afd4d20aad3c00bcef049cb40049f7f196e0397f1109dba6fed63267b0", size = 5083172, upload-time = "2025-11-16T22:50:24.562Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a4/b4/ee5bb2537fb9430fd2ef30a616c3672b991a4129bb1c7dcc42aa0abbe5d7/numpy-2.3.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:afaffc4393205524af9dfa400fa250143a6c3bc646c08c9f5e25a9f4b4d6a903", size = 6622990, upload-time = "2025-11-16T22:50:26.47Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/95/03/dc0723a013c7d7c19de5ef29e932c3081df1c14ba582b8b86b5de9db7f0f/numpy-2.3.5-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c75442b2209b8470d6d5d8b1c25714270686f14c749028d2199c54e29f20b4d", size = 14248902, upload-time = "2025-11-16T22:50:28.861Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/10/ca162f45a102738958dcec8023062dad0cbc17d1ab99d68c4e4a6c45fb2b/numpy-2.3.5-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e06aa0af8c0f05104d56450d6093ee639e15f24ecf62d417329d06e522e017", size = 16597430, upload-time = "2025-11-16T22:50:31.56Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2a/51/c1e29be863588db58175175f057286900b4b3327a1351e706d5e0f8dd679/numpy-2.3.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ed89927b86296067b4f81f108a2271d8926467a8868e554eaf370fc27fa3ccaf", size = 16024551, upload-time = "2025-11-16T22:50:34.242Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/83/68/8236589d4dbb87253d28259d04d9b814ec0ecce7cb1c7fed29729f4c3a78/numpy-2.3.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51c55fe3451421f3a6ef9a9c1439e82101c57a2c9eab9feb196a62b1a10b58ce", size = 18533275, upload-time = "2025-11-16T22:50:37.651Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/56/2932d75b6f13465239e3b7b7e511be27f1b8161ca2510854f0b6e521c395/numpy-2.3.5-cp313-cp313-win32.whl", hash = "sha256:1978155dd49972084bd6ef388d66ab70f0c323ddee6f693d539376498720fb7e", size = 6277637, upload-time = "2025-11-16T22:50:40.11Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0c/88/e2eaa6cffb115b85ed7c7c87775cb8bcf0816816bc98ca8dbfa2ee33fe6e/numpy-2.3.5-cp313-cp313-win_amd64.whl", hash = "sha256:00dc4e846108a382c5869e77c6ed514394bdeb3403461d25a829711041217d5b", size = 12779090, upload-time = "2025-11-16T22:50:42.503Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/88/3f41e13a44ebd4034ee17baa384acac29ba6a4fcc2aca95f6f08ca0447d1/numpy-2.3.5-cp313-cp313-win_arm64.whl", hash = "sha256:0472f11f6ec23a74a906a00b48a4dcf3849209696dff7c189714511268d103ae", size = 10194710, upload-time = "2025-11-16T22:50:44.971Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/13/cb/71744144e13389d577f867f745b7df2d8489463654a918eea2eeb166dfc9/numpy-2.3.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:414802f3b97f3c1eef41e530aaba3b3c1620649871d8cb38c6eaff034c2e16bd", size = 16827292, upload-time = "2025-11-16T22:50:47.715Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/80/ba9dc6f2a4398e7f42b708a7fdc841bb638d353be255655498edbf9a15a8/numpy-2.3.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5ee6609ac3604fa7780e30a03e5e241a7956f8e2fcfe547d51e3afa5247ac47f", size = 12378897, upload-time = "2025-11-16T22:50:51.327Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/6d/db2151b9f64264bcceccd51741aa39b50150de9b602d98ecfe7e0c4bff39/numpy-2.3.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:86d835afea1eaa143012a2d7a3f45a3adce2d7adc8b4961f0b362214d800846a", size = 5207391, upload-time = "2025-11-16T22:50:54.542Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/ae/429bacace5ccad48a14c4ae5332f6aa8ab9f69524193511d60ccdfdc65fa/numpy-2.3.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:30bc11310e8153ca664b14c5f1b73e94bd0503681fcf136a163de856f3a50139", size = 6721275, upload-time = "2025-11-16T22:50:56.794Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/5b/1919abf32d8722646a38cd527bc3771eb229a32724ee6ba340ead9b92249/numpy-2.3.5-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1062fde1dcf469571705945b0f221b73928f34a20c904ffb45db101907c3454e", size = 14306855, upload-time = "2025-11-16T22:50:59.208Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a5/87/6831980559434973bebc30cd9c1f21e541a0f2b0c280d43d3afd909b66d0/numpy-2.3.5-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce581db493ea1a96c0556360ede6607496e8bf9b3a8efa66e06477267bc831e9", size = 16657359, upload-time = "2025-11-16T22:51:01.991Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dd/91/c797f544491ee99fd00495f12ebb7802c440c1915811d72ac5b4479a3356/numpy-2.3.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:cc8920d2ec5fa99875b670bb86ddeb21e295cb07aa331810d9e486e0b969d946", size = 16093374, upload-time = "2025-11-16T22:51:05.291Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/a6/54da03253afcbe7a72785ec4da9c69fb7a17710141ff9ac5fcb2e32dbe64/numpy-2.3.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:9ee2197ef8c4f0dfe405d835f3b6a14f5fee7782b5de51ba06fb65fc9b36e9f1", size = 18594587, upload-time = "2025-11-16T22:51:08.585Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/e9/aff53abbdd41b0ecca94285f325aff42357c6b5abc482a3fcb4994290b18/numpy-2.3.5-cp313-cp313t-win32.whl", hash = "sha256:70b37199913c1bd300ff6e2693316c6f869c7ee16378faf10e4f5e3275b299c3", size = 6405940, upload-time = "2025-11-16T22:51:11.541Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/81/50613fec9d4de5480de18d4f8ef59ad7e344d497edbef3cfd80f24f98461/numpy-2.3.5-cp313-cp313t-win_amd64.whl", hash = "sha256:b501b5fa195cc9e24fe102f21ec0a44dffc231d2af79950b451e0d99cea02234", size = 12920341, upload-time = "2025-11-16T22:51:14.312Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bb/ab/08fd63b9a74303947f34f0bd7c5903b9c5532c2d287bead5bdf4c556c486/numpy-2.3.5-cp313-cp313t-win_arm64.whl", hash = "sha256:a80afd79f45f3c4a7d341f13acbe058d1ca8ac017c165d3fa0d3de6bc1a079d7", size = 10262507, upload-time = "2025-11-16T22:51:16.846Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/97/1a914559c19e32d6b2e233cf9a6a114e67c856d35b1d6babca571a3e880f/numpy-2.3.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:bf06bc2af43fa8d32d30fae16ad965663e966b1a3202ed407b84c989c3221e82", size = 16735706, upload-time = "2025-11-16T22:51:19.558Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/d4/51233b1c1b13ecd796311216ae417796b88b0616cfd8a33ae4536330748a/numpy-2.3.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:052e8c42e0c49d2575621c158934920524f6c5da05a1d3b9bab5d8e259e045f0", size = 12264507, upload-time = "2025-11-16T22:51:22.492Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/45/98/2fe46c5c2675b8306d0b4a3ec3494273e93e1226a490f766e84298576956/numpy-2.3.5-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:1ed1ec893cff7040a02c8aa1c8611b94d395590d553f6b53629a4461dc7f7b63", size = 5093049, upload-time = "2025-11-16T22:51:25.171Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/0e/0698378989bb0ac5f1660c81c78ab1fe5476c1a521ca9ee9d0710ce54099/numpy-2.3.5-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:2dcd0808a421a482a080f89859a18beb0b3d1e905b81e617a188bd80422d62e9", size = 6626603, upload-time = "2025-11-16T22:51:27Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5e/a6/9ca0eecc489640615642a6cbc0ca9e10df70df38c4d43f5a928ff18d8827/numpy-2.3.5-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:727fd05b57df37dc0bcf1a27767a3d9a78cbbc92822445f32cc3436ba797337b", size = 14262696, upload-time = "2025-11-16T22:51:29.402Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c8/f6/07ec185b90ec9d7217a00eeeed7383b73d7e709dae2a9a021b051542a708/numpy-2.3.5-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fffe29a1ef00883599d1dc2c51aa2e5d80afe49523c261a74933df395c15c520", size = 16597350, upload-time = "2025-11-16T22:51:32.167Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/37/164071d1dde6a1a84c9b8e5b414fa127981bad47adf3a6b7e23917e52190/numpy-2.3.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8f7f0e05112916223d3f438f293abf0727e1181b5983f413dfa2fefc4098245c", size = 16040190, upload-time = "2025-11-16T22:51:35.403Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/3c/f18b82a406b04859eb026d204e4e1773eb41c5be58410f41ffa511d114ae/numpy-2.3.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2e2eb32ddb9ccb817d620ac1d8dae7c3f641c1e5f55f531a33e8ab97960a75b8", size = 18536749, upload-time = "2025-11-16T22:51:39.698Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/79/f82f572bf44cf0023a2fe8588768e23e1592585020d638999f15158609e1/numpy-2.3.5-cp314-cp314-win32.whl", hash = "sha256:66f85ce62c70b843bab1fb14a05d5737741e74e28c7b8b5a064de10142fad248", size = 6335432, upload-time = "2025-11-16T22:51:42.476Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a3/2e/235b4d96619931192c91660805e5e49242389742a7a82c27665021db690c/numpy-2.3.5-cp314-cp314-win_amd64.whl", hash = "sha256:e6a0bc88393d65807d751a614207b7129a310ca4fe76a74e5c7da5fa5671417e", size = 12919388, upload-time = "2025-11-16T22:51:45.275Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/2b/29fd75ce45d22a39c61aad74f3d718e7ab67ccf839ca8b60866054eb15f8/numpy-2.3.5-cp314-cp314-win_arm64.whl", hash = "sha256:aeffcab3d4b43712bb7a60b65f6044d444e75e563ff6180af8f98dd4b905dfd2", size = 10476651, upload-time = "2025-11-16T22:51:47.749Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/17/e1/f6a721234ebd4d87084cfa68d081bcba2f5cfe1974f7de4e0e8b9b2a2ba1/numpy-2.3.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:17531366a2e3a9e30762c000f2c43a9aaa05728712e25c11ce1dbe700c53ad41", size = 16834503, upload-time = "2025-11-16T22:51:50.443Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/1c/baf7ffdc3af9c356e1c135e57ab7cf8d247931b9554f55c467efe2c69eff/numpy-2.3.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d21644de1b609825ede2f48be98dfde4656aefc713654eeee280e37cadc4e0ad", size = 12381612, upload-time = "2025-11-16T22:51:53.609Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/91/f7f0295151407ddc9ba34e699013c32c3c91944f9b35fcf9281163dc1468/numpy-2.3.5-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:c804e3a5aba5460c73955c955bdbd5c08c354954e9270a2c1565f62e866bdc39", size = 5210042, upload-time = "2025-11-16T22:51:56.213Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/3b/78aebf345104ec50dd50a4d06ddeb46a9ff5261c33bcc58b1c4f12f85ec2/numpy-2.3.5-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:cc0a57f895b96ec78969c34f682c602bf8da1a0270b09bc65673df2e7638ec20", size = 6724502, upload-time = "2025-11-16T22:51:58.584Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/c6/7c34b528740512e57ef1b7c8337ab0b4f0bddf34c723b8996c675bc2bc91/numpy-2.3.5-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:900218e456384ea676e24ea6a0417f030a3b07306d29d7ad843957b40a9d8d52", size = 14308962, upload-time = "2025-11-16T22:52:01.698Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/35/09d433c5262bc32d725bafc619e095b6a6651caf94027a03da624146f655/numpy-2.3.5-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:09a1bea522b25109bf8e6f3027bd810f7c1085c64a0c7ce050c1676ad0ba010b", size = 16655054, upload-time = "2025-11-16T22:52:04.267Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/ab/6a7b259703c09a88804fa2430b43d6457b692378f6b74b356155283566ac/numpy-2.3.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:04822c00b5fd0323c8166d66c701dc31b7fbd252c100acd708c48f763968d6a3", size = 16091613, upload-time = "2025-11-16T22:52:08.651Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/88/330da2071e8771e60d1038166ff9d73f29da37b01ec3eb43cb1427464e10/numpy-2.3.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d6889ec4ec662a1a37eb4b4fb26b6100841804dac55bd9df579e326cdc146227", size = 18591147, upload-time = "2025-11-16T22:52:11.453Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/51/41/851c4b4082402d9ea860c3626db5d5df47164a712cb23b54be028b184c1c/numpy-2.3.5-cp314-cp314t-win32.whl", hash = "sha256:93eebbcf1aafdf7e2ddd44c2923e2672e1010bddc014138b229e49725b4d6be5", size = 6479806, upload-time = "2025-11-16T22:52:14.641Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/90/30/d48bde1dfd93332fa557cff1972fbc039e055a52021fbef4c2c4b1eefd17/numpy-2.3.5-cp314-cp314t-win_amd64.whl", hash = "sha256:c8a9958e88b65c3b27e22ca2a076311636850b612d6bbfb76e8d156aacde2aaf", size = 13105760, upload-time = "2025-11-16T22:52:17.975Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2d/fd/4b5eb0b3e888d86aee4d198c23acec7d214baaf17ea93c1adec94c9518b9/numpy-2.3.5-cp314-cp314t-win_arm64.whl", hash = "sha256:6203fdf9f3dc5bdaed7319ad8698e685c7a3be10819f41d32a0723e611733b42", size = 10545459, upload-time = "2025-11-16T22:52:20.55Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ormsgpack"
                                                                  +version = "1.12.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6c/67/d5ef41c3b4a94400be801984ef7c7fc9623e1a82b643e74eeec367e7462b/ormsgpack-1.12.0.tar.gz", hash = "sha256:94be818fdbb0285945839b88763b269987787cb2f7ef280cad5d6ec815b7e608", size = 49959, upload-time = "2025-11-04T18:30:10.083Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a2/f2/c1036b2775fcc0cfa5fd618c53bcd3b862ee07298fb627f03af4c7982f84/ormsgpack-1.12.0-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e0c1e08b64d99076fee155276097489b82cc56e8d5951c03c721a65a32f44494", size = 369538, upload-time = "2025-11-04T18:29:37.125Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/ca/526c4ae02f3cb34621af91bf8282a10d666757c2e0c6ff391ff5d403d607/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd43bcb299131690b8e0677af172020b2ada8e625169034b42ac0c13adf84aa", size = 195872, upload-time = "2025-11-04T18:29:38.34Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7f/0f/83bb7968e9715f6a85be53d041b1e6324a05428f56b8b980dac866886871/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0149d595341e22ead340bf281b2995c4cc7dc8d522a6b5f575fe17aa407604", size = 206469, upload-time = "2025-11-04T18:29:39.749Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/e3/9e93ca1065f2d4af035804a842b1ff3025bab580c7918239bb225cd1fee2/ormsgpack-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f19a1b27d169deb553c80fd10b589fc2be1fc14cee779fae79fcaf40db04de2b", size = 208273, upload-time = "2025-11-04T18:29:40.769Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/d8/6d6ef901b3a8b8f3ab8836b135a56eb7f66c559003e251d9530bedb12627/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6f28896942d655064940dfe06118b7ce1e3468d051483148bf02c99ec157483a", size = 377839, upload-time = "2025-11-04T18:29:42.092Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4c/72/fcb704bfa4c2c3a37b647d597cc45a13cffc9d50baac635a9ad620731d29/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9396efcfa48b4abbc06e44c5dbc3c4574a8381a80cb4cd01eea15d28b38c554e", size = 471446, upload-time = "2025-11-04T18:29:43.133Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/84/f8/402e4e3eb997c2ee534c99bec4b5bb359c2a1f9edadf043e254a71e11378/ormsgpack-1.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:96586ed537a5fb386a162c4f9f7d8e6f76e07b38a990d50c73f11131e00ff040", size = 381783, upload-time = "2025-11-04T18:29:44.466Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/8d/5897b700360bc00911b70ae5ef1134ee7abf5baa81a92a4be005917d3dfd/ormsgpack-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e70387112fb3870e4844de090014212cdcf1342f5022047aecca01ec7de05d7a", size = 112943, upload-time = "2025-11-04T18:29:45.468Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5b/44/1e73649f79bb96d6cf9e5bcbac68b6216d238bba80af351c4c0cbcf7ee15/ormsgpack-1.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:d71290a23de5d4829610c42665d816c661ecad8979883f3f06b2e3ab9639962e", size = 106688, upload-time = "2025-11-04T18:29:46.411Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/e8/35f11ce9313111488b26b3035e4cbe55caa27909c0b6c8b5b5cd59f9661e/ormsgpack-1.12.0-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:766f2f3b512d85cd375b26a8b1329b99843560b50b93d3880718e634ad4a5de5", size = 369574, upload-time = "2025-11-04T18:29:47.431Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/b0/77461587f412d4e598d3687bafe23455ed0f26269f44be20252eddaa624e/ormsgpack-1.12.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84b285b1f3f185aad7da45641b873b30acfd13084cf829cf668c4c6480a81583", size = 195893, upload-time = "2025-11-04T18:29:48.735Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c6/67/e197ceb04c3b550589e5407fc9fdae10f4e2e2eba5fdac921a269e02e974/ormsgpack-1.12.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e23604fc79fe110292cb365f4c8232e64e63a34f470538be320feae3921f271b", size = 206503, upload-time = "2025-11-04T18:29:49.99Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0b/b1/7fa8ba82a25cef678983c7976f85edeef5014f5c26495f338258e6a3cf1c/ormsgpack-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc32b156c113a0fae2975051417d8d9a7a5247c34b2d7239410c46b75ce9348a", size = 208257, upload-time = "2025-11-04T18:29:51.007Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/b1/759e999390000d2589e6d0797f7265e6ec28378547075d28d3736248ab63/ormsgpack-1.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:94ac500dd10c20fa8b8a23bc55606250bfe711bf9716828d9f3d44dfd1f25668", size = 377852, upload-time = "2025-11-04T18:29:52.103Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/51/e7/0af737c94272494d9d84a3c29cc42c973ef7fd2342917020906596db863c/ormsgpack-1.12.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:c5201ff7ec24f721f813a182885a17064cffdbe46b2412685a52e6374a872c8f", size = 471456, upload-time = "2025-11-04T18:29:53.336Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/ba/c81f0aa4f19fbf457213395945b672e6fde3ce777e3587456e7f0fca2147/ormsgpack-1.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a9740bb3839c9368aacae1cbcfc474ee6976458f41cc135372b7255d5206c953", size = 381813, upload-time = "2025-11-04T18:29:54.394Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/15/429c72d64323503fd42cc4ca8398930ded8aa8b3470df8a86b3bbae7a35c/ormsgpack-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ed37f29772432048b58174e920a1d4c4cde0404a5d448d3d8bbcc95d86a6918", size = 112949, upload-time = "2025-11-04T18:29:55.371Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/b9/e72c451a40f8c57bfc229e0b8e536ecea7203c8f0a839676df2ffb605c62/ormsgpack-1.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:b03994bbec5d6d42e03d6604e327863f885bde67aa61e06107ce1fa5bdd3e71d", size = 106689, upload-time = "2025-11-04T18:29:56.262Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/13/16/13eab1a75da531b359105fdee90dda0b6bd1ca0a09880250cf91d8bdfdea/ormsgpack-1.12.0-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:0f3981ba3cba80656012090337e548e597799e14b41e3d0b595ab5ab05a23d7f", size = 369620, upload-time = "2025-11-04T18:29:57.255Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a0/c1/cbcc38b7af4ce58d8893e56d3595c0c8dcd117093bf048f889cf351bdba0/ormsgpack-1.12.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:901f6f55184d6776dbd5183cbce14caf05bf7f467eef52faf9b094686980bf71", size = 195925, upload-time = "2025-11-04T18:29:58.34Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/59/4fa4dc0681490e12b75333440a1c0fd9741b0ebff272b1db4a29d35c2021/ormsgpack-1.12.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e13b15412571422b711b40f45e3fe6d993ea3314b5e97d1a853fe99226c5effc", size = 206594, upload-time = "2025-11-04T18:29:59.329Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/39/67/249770896bc32bb91b22c30256961f935d0915cbcf6e289a7fc961d9b14c/ormsgpack-1.12.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91fa8a452553a62e5fb3fbab471e7faf7b3bec3c87a2f355ebf3d7aab290fe4f", size = 208307, upload-time = "2025-11-04T18:30:00.377Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/0a/e041a248cd72f2f4c07e155913e0a3ede4c86cf21a40ae6cd79f135f2847/ormsgpack-1.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74ec101f69624695eec4ce7c953192d97748254abe78fb01b591f06d529e1952", size = 377844, upload-time = "2025-11-04T18:30:01.389Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d8/71/6f7773e4ffda73a358ce4bba69b3e8bee9d40a7a06315e4c1cd7a3ea9d02/ormsgpack-1.12.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:9bbf7896580848326c1f9bd7531f264e561f98db7e08e15aa75963d83832c717", size = 471572, upload-time = "2025-11-04T18:30:02.486Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/65/29/af6769a4289c07acc71e7bda1d64fb31800563147d73142686e185e82348/ormsgpack-1.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7567917da613b8f8d591c1674e411fd3404bea41ef2b9a0e0a1e049c0f9406d7", size = 381842, upload-time = "2025-11-04T18:30:03.799Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0b/dd/0a86195ee7a1a96c088aefc8504385e881cf56f4563ed81bafe21cbf1fb0/ormsgpack-1.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:4e418256c5d8622b8bc92861936f7c6a0131355e7bcad88a42102ae8227f8a1c", size = 113008, upload-time = "2025-11-04T18:30:04.777Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4c/57/fafc79e32f3087f6f26f509d80b8167516326bfea38d30502627c01617e0/ormsgpack-1.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:433ace29aa02713554f714c62a4e4dcad0c9e32674ba4f66742c91a4c3b1b969", size = 106648, upload-time = "2025-11-04T18:30:05.708Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/cf/5d58d9b132128d2fe5d586355dde76af386554abef00d608f66b913bff1f/ormsgpack-1.12.0-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e57164be4ca34b64e210ec515059193280ac84df4d6f31a6fcbfb2fc8436de55", size = 369803, upload-time = "2025-11-04T18:30:06.728Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/42/968a2da361eaff2e4cbb17c82c7599787babf16684110ad70409646cc1e4/ormsgpack-1.12.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:904f96289deaa92fc6440b122edc27c5bdc28234edd63717f6d853d88c823a83", size = 195991, upload-time = "2025-11-04T18:30:07.713Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/03/f0/9696c6c6cf8ad35170f0be8d0ef3523cc258083535f6c8071cb8235ebb8b/ormsgpack-1.12.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b291d086e524a1062d57d1b7b5a8bcaaf29caebf0212fec12fd86240bd33633", size = 208316, upload-time = "2025-11-04T18:30:08.663Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "packaging"
                                                                  +version = "25.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pandas"
                                                                  +version = "2.3.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "numpy" },
                                                                  +    { name = "python-dateutil" },
                                                                  +    { name = "pytz" },
                                                                  +    { name = "tzdata" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/04/fd/74903979833db8390b73b3a8a7d30d146d710bd32703724dd9083950386f/pandas-2.3.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:ee15f284898e7b246df8087fc82b87b01686f98ee67d85a17b7ab44143a3a9a0", size = 11540635, upload-time = "2025-09-29T23:25:52.486Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/21/00/266d6b357ad5e6d3ad55093a7e8efc7dd245f5a842b584db9f30b0f0a287/pandas-2.3.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1611aedd912e1ff81ff41c745822980c49ce4a7907537be8692c8dbc31924593", size = 10759079, upload-time = "2025-09-29T23:26:33.204Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/05/d01ef80a7a3a12b2f8bbf16daba1e17c98a2f039cbc8e2f77a2c5a63d382/pandas-2.3.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d2cefc361461662ac48810cb14365a365ce864afe85ef1f447ff5a1e99ea81c", size = 11814049, upload-time = "2025-09-29T23:27:15.384Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/b2/0e62f78c0c5ba7e3d2c5945a82456f4fac76c480940f805e0b97fcbc2f65/pandas-2.3.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee67acbbf05014ea6c763beb097e03cd629961c8a632075eeb34247120abcb4b", size = 12332638, upload-time = "2025-09-29T23:27:51.625Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c5/33/dd70400631b62b9b29c3c93d2feee1d0964dc2bae2e5ad7a6c73a7f25325/pandas-2.3.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c46467899aaa4da076d5abc11084634e2d197e9460643dd455ac3db5856b24d6", size = 12886834, upload-time = "2025-09-29T23:28:21.289Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d3/18/b5d48f55821228d0d2692b34fd5034bb185e854bdb592e9c640f6290e012/pandas-2.3.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6253c72c6a1d990a410bc7de641d34053364ef8bcd3126f7e7450125887dffe3", size = 13409925, upload-time = "2025-09-29T23:28:58.261Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a6/3d/124ac75fcd0ecc09b8fdccb0246ef65e35b012030defb0e0eba2cbbbe948/pandas-2.3.3-cp314-cp314-win_amd64.whl", hash = "sha256:1b07204a219b3b7350abaae088f451860223a52cfb8a6c53358e7948735158e5", size = 11109071, upload-time = "2025-09-29T23:32:27.484Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/9c/0e21c895c38a157e0faa1fb64587a9226d6dd46452cac4532d80c3c4a244/pandas-2.3.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2462b1a365b6109d275250baaae7b760fd25c726aaca0054649286bcfbb3e8ec", size = 12048504, upload-time = "2025-09-29T23:29:31.47Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/82/b69a1c95df796858777b68fbe6a81d37443a33319761d7c652ce77797475/pandas-2.3.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0242fe9a49aa8b4d78a4fa03acb397a58833ef6199e9aa40a95f027bb3a1b6e7", size = 11410702, upload-time = "2025-09-29T23:29:54.591Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/88/702bde3ba0a94b8c73a0181e05144b10f13f29ebfc2150c3a79062a8195d/pandas-2.3.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a21d830e78df0a515db2b3d2f5570610f5e6bd2e27749770e8bb7b524b89b450", size = 11634535, upload-time = "2025-09-29T23:30:21.003Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a4/1e/1bac1a839d12e6a82ec6cb40cda2edde64a2013a66963293696bbf31fbbb/pandas-2.3.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e3ebdb170b5ef78f19bfb71b0dc5dc58775032361fa188e814959b74d726dd5", size = 12121582, upload-time = "2025-09-29T23:30:43.391Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/44/91/483de934193e12a3b1d6ae7c8645d083ff88dec75f46e827562f1e4b4da6/pandas-2.3.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:d051c0e065b94b7a3cea50eb1ec32e912cd96dba41647eb24104b6c6c14c5788", size = 12699963, upload-time = "2025-09-29T23:31:10.009Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/44/5191d2e4026f86a2a109053e194d3ba7a31a2d10a9c2348368c63ed4e85a/pandas-2.3.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3869faf4bd07b3b66a9f462417d0ca3a9df29a9f6abd5d0d0dbab15dac7abe87", size = 13202175, upload-time = "2025-09-29T23:31:59.173Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pandocfilters"
                                                                  +version = "1.5.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/70/6f/3dd4940bbe001c06a65f88e36bad298bc7a0de5036115639926b0c5c0458/pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e", size = 8454, upload-time = "2024-01-18T20:08:13.726Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ef/af/4fbc8cab944db5d21b7e2a5b8e9211a03a79852b1157e2c102fcc61ac440/pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc", size = 8663, upload-time = "2024-01-18T20:08:11.28Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "parso"
                                                                  +version = "0.8.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pathspec"
                                                                  +version = "1.0.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/4c/b2/bb8e495d5262bfec41ab5cb18f522f1012933347fb5d9e62452d446baca2/pathspec-1.0.3.tar.gz", hash = "sha256:bac5cf97ae2c2876e2d25ebb15078eb04d76e4b98921ee31c6f85ade8b59444d", size = 130841, upload-time = "2026-01-09T15:46:46.009Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/32/2b/121e912bd60eebd623f873fd090de0e84f322972ab25a7f9044c056804ed/pathspec-1.0.3-py3-none-any.whl", hash = "sha256:e80767021c1cc524aa3fb14bedda9c34406591343cc42797b386ce7b9354fb6c", size = 55021, upload-time = "2026-01-09T15:46:44.652Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pexpect"
                                                                  +version = "4.9.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "ptyprocess" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "platformdirs"
                                                                  +version = "4.5.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/61/33/9611380c2bdb1225fdef633e2a9610622310fed35ab11dac9620972ee088/platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312", size = 21632, upload-time = "2025-10-08T17:44:48.791Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pluggy"
                                                                  +version = "1.6.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "plum-dispatch"
                                                                  +version = "2.6.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "beartype" },
                                                                  +    { name = "rich" },
                                                                  +    { name = "typing-extensions" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a8/df/36f6677eff00a853c6a7d365316920ea411aa8015cf218612871082e25e7/plum_dispatch-2.6.1.tar.gz", hash = "sha256:05d14f31bf2ac8550d7742426d5c5a3fa532d8ed7cc12ffd695c4b452cffbdfa", size = 34952, upload-time = "2025-12-18T11:56:54.862Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/42/88/71fa06eb487ed9d4fab0ad173300b7a58706385f98fb66b1ccdc3ec3d4dd/plum_dispatch-2.6.1-py3-none-any.whl", hash = "sha256:49cd83027498e35eac32c7a93ecd6a99970d72d90f4141cc93be760c7ba831c4", size = 41456, upload-time = "2025-12-18T11:56:53.599Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "prometheus-client"
                                                                  +version = "0.23.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "prompt-toolkit"
                                                                  +version = "3.0.52"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "wcwidth" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "propcache"
                                                                  +version = "0.4.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "psutil"
                                                                  +version = "7.1.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/e1/88/bdd0a41e5857d5d703287598cbf08dad90aed56774ea52ae071bae9071b6/psutil-7.1.3.tar.gz", hash = "sha256:6c86281738d77335af7aec228328e944b30930899ea760ecf33a4dba66be5e74", size = 489059, upload-time = "2025-11-02T12:25:54.619Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/bd/93/0c49e776b8734fef56ec9c5c57f923922f2cf0497d62e0f419465f28f3d0/psutil-7.1.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0005da714eee687b4b8decd3d6cc7c6db36215c9e74e5ad2264b90c3df7d92dc", size = 239751, upload-time = "2025-11-02T12:25:58.161Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6f/8d/b31e39c769e70780f007969815195a55c81a63efebdd4dbe9e7a113adb2f/psutil-7.1.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:19644c85dcb987e35eeeaefdc3915d059dac7bd1167cdcdbf27e0ce2df0c08c0", size = 240368, upload-time = "2025-11-02T12:26:00.491Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/61/23fd4acc3c9eebbf6b6c78bcd89e5d020cfde4acf0a9233e9d4e3fa698b4/psutil-7.1.3-cp313-cp313t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95ef04cf2e5ba0ab9eaafc4a11eaae91b44f4ef5541acd2ee91d9108d00d59a7", size = 287134, upload-time = "2025-11-02T12:26:02.613Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/30/1c/f921a009ea9ceb51aa355cb0cc118f68d354db36eae18174bab63affb3e6/psutil-7.1.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1068c303be3a72f8e18e412c5b2a8f6d31750fb152f9cb106b54090296c9d251", size = 289904, upload-time = "2025-11-02T12:26:05.207Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a6/82/62d68066e13e46a5116df187d319d1724b3f437ddd0f958756fc052677f4/psutil-7.1.3-cp313-cp313t-win_amd64.whl", hash = "sha256:18349c5c24b06ac5612c0428ec2a0331c26443d259e2a0144a9b24b4395b58fa", size = 249642, upload-time = "2025-11-02T12:26:07.447Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/ad/c1cd5fe965c14a0392112f68362cfceb5230819dbb5b1888950d18a11d9f/psutil-7.1.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c525ffa774fe4496282fb0b1187725793de3e7c6b29e41562733cae9ada151ee", size = 245518, upload-time = "2025-11-02T12:26:09.719Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/bb/6670bded3e3236eb4287c7bcdc167e9fae6e1e9286e437f7111caed2f909/psutil-7.1.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b403da1df4d6d43973dc004d19cee3b848e998ae3154cc8097d139b77156c353", size = 239843, upload-time = "2025-11-02T12:26:11.968Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b8/66/853d50e75a38c9a7370ddbeefabdd3d3116b9c31ef94dc92c6729bc36bec/psutil-7.1.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:ad81425efc5e75da3f39b3e636293360ad8d0b49bed7df824c79764fb4ba9b8b", size = 240369, upload-time = "2025-11-02T12:26:14.358Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/41/bd/313aba97cb5bfb26916dc29cf0646cbe4dd6a89ca69e8c6edce654876d39/psutil-7.1.3-cp314-cp314t-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f33a3702e167783a9213db10ad29650ebf383946e91bc77f28a5eb083496bc9", size = 288210, upload-time = "2025-11-02T12:26:16.699Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/fa/76e3c06e760927a0cfb5705eb38164254de34e9bd86db656d4dbaa228b04/psutil-7.1.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fac9cd332c67f4422504297889da5ab7e05fd11e3c4392140f7370f4208ded1f", size = 291182, upload-time = "2025-11-02T12:26:18.848Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0f/1d/5774a91607035ee5078b8fd747686ebec28a962f178712de100d00b78a32/psutil-7.1.3-cp314-cp314t-win_amd64.whl", hash = "sha256:3792983e23b69843aea49c8f5b8f115572c5ab64c153bada5270086a2123c7e7", size = 250466, upload-time = "2025-11-02T12:26:21.183Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/ca/e426584bacb43a5cb1ac91fae1937f478cd8fbe5e4ff96574e698a2c77cd/psutil-7.1.3-cp314-cp314t-win_arm64.whl", hash = "sha256:31d77fcedb7529f27bb3a0472bea9334349f9a04160e8e6e5020f22c59893264", size = 245756, upload-time = "2025-11-02T12:26:23.148Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ef/94/46b9154a800253e7ecff5aaacdf8ebf43db99de4a2dfa18575b02548654e/psutil-7.1.3-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2bdbcd0e58ca14996a42adf3621a6244f1bb2e2e528886959c72cf1e326677ab", size = 238359, upload-time = "2025-11-02T12:26:25.284Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/3a/9f93cff5c025029a36d9a92fef47220ab4692ee7f2be0fba9f92813d0cb8/psutil-7.1.3-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:bc31fa00f1fbc3c3802141eede66f3a2d51d89716a194bf2cd6fc68310a19880", size = 239171, upload-time = "2025-11-02T12:26:27.23Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/b1/5f49af514f76431ba4eea935b8ad3725cdeb397e9245ab919dbc1d1dc20f/psutil-7.1.3-cp36-abi3-manylinux2010_x86_64.manylinux_2_12_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3bb428f9f05c1225a558f53e30ccbad9930b11c3fc206836242de1091d3e7dd3", size = 263261, upload-time = "2025-11-02T12:26:29.48Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/95/992c8816a74016eb095e73585d747e0a8ea21a061ed3689474fabb29a395/psutil-7.1.3-cp36-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:56d974e02ca2c8eb4812c3f76c30e28836fffc311d55d979f1465c1feeb2b68b", size = 264635, upload-time = "2025-11-02T12:26:31.74Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/4c/c3ed1a622b6ae2fd3c945a366e64eb35247a31e4db16cf5095e269e8eb3c/psutil-7.1.3-cp37-abi3-win_amd64.whl", hash = "sha256:f39c2c19fe824b47484b96f9692932248a54c43799a84282cfe58d05a6449efd", size = 247633, upload-time = "2025-11-02T12:26:33.887Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/ad/33b2ccec09bf96c2b2ef3f9a6f66baac8253d7565d8839e024a6b905d45d/psutil-7.1.3-cp37-abi3-win_arm64.whl", hash = "sha256:bd0d69cee829226a761e92f28140bec9a5ee9d5b4fb4b0cc589068dbfff559b1", size = 244608, upload-time = "2025-11-02T12:26:36.136Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ptyprocess"
                                                                  +version = "0.7.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pure-eval"
                                                                  +version = "0.2.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "py-partiql-parser"
                                                                  +version = "0.6.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/56/7a/a0f6bda783eb4df8e3dfd55973a1ac6d368a89178c300e1b5b91cd181e5e/py_partiql_parser-0.6.3.tar.gz", hash = "sha256:09cecf916ce6e3da2c050f0cb6106166de42c33d34a078ec2eb19377ea70389a", size = 17456, upload-time = "2025-10-18T13:56:13.441Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/33/a7cbfccc39056a5cf8126b7aab4c8bafbedd4f0ca68ae40ecb627a2d2cd3/py_partiql_parser-0.6.3-py2.py3-none-any.whl", hash = "sha256:deb0769c3346179d2f590dcbde556f708cdb929059fb654bad75f4cf6e07f582", size = 23752, upload-time = "2025-10-18T13:56:12.256Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pycparser"
                                                                  +version = "2.23"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pydantic"
                                                                  +version = "2.12.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "annotated-types" },
                                                                  +    { name = "pydantic-core" },
                                                                  +    { name = "typing-extensions" },
                                                                  +    { name = "typing-inspection" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pydantic-core"
                                                                  +version = "2.41.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "typing-extensions" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pygments"
                                                                  +version = "2.19.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pyjwt"
                                                                  +version = "2.10.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pytest"
                                                                  +version = "9.0.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "colorama", marker = "sys_platform == 'win32'" },
                                                                  +    { name = "iniconfig" },
                                                                  +    { name = "packaging" },
                                                                  +    { name = "pluggy" },
                                                                  +    { name = "pygments" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/07/56/f013048ac4bc4c1d9be45afd4ab209ea62822fb1598f40687e6bf45dcea4/pytest-9.0.1.tar.gz", hash = "sha256:3e9c069ea73583e255c3b21cf46b8d3c56f6e3a1a8f6da94ccb0fcf57b9d73c8", size = 1564125, upload-time = "2025-11-12T13:05:09.333Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/0b/8b/6300fb80f858cda1c51ffa17075df5d846757081d11ab4aa35cef9e6258b/pytest-9.0.1-py3-none-any.whl", hash = "sha256:67be0030d194df2dfa7b556f2e56fb3c3315bd5c8822c6951162b92b32ce7dad", size = 373668, upload-time = "2025-11-12T13:05:07.379Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pytest-cov"
                                                                  +version = "7.0.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "coverage" },
                                                                  +    { name = "pluggy" },
                                                                  +    { name = "pytest" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "python-dateutil"
                                                                  +version = "2.9.0.post0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "six" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "python-dotenv"
                                                                  +version = "1.2.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "python-json-logger"
                                                                  +version = "4.0.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "python-ulid"
                                                                  +version = "1.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/e8/8b/0580d8ee0a73a3f3869488856737c429cbaa08b63c3506275f383c4771a8/python-ulid-1.1.0.tar.gz", hash = "sha256:5fb5e4a91db8ca93e8938a613360b3def299b60d41f847279a8c39c9b2e9c65e", size = 19992, upload-time = "2022-03-10T15:11:41.968Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/8e/c30b08ee9b8dc9b4a10e782c2a7fd5de55388201ddebfe0f7ab99dfbb349/python_ulid-1.1.0-py3-none-any.whl", hash = "sha256:88c952f6be133dbede19c907d72d26717d2691ec8421512b573144794d891e24", size = 9360, upload-time = "2022-03-10T15:11:40.405Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pytokens"
                                                                  +version = "0.4.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/e5/16/4b9cfd90d55e66ffdb277d7ebe3bc25250c2311336ec3fc73b2673c794d5/pytokens-0.4.0.tar.gz", hash = "sha256:6b0b03e6ea7c9f9d47c5c61164b69ad30f4f0d70a5d9fe7eac4d19f24f77af2d", size = 15039, upload-time = "2026-01-19T07:59:50.623Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/65/65460ebbfefd0bc1b160457904370d44f269e6e4582e0a9b6cba7c267b04/pytokens-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cd8da894e5a29ba6b6da8be06a4f7589d7220c099b5e363cb0643234b9b38c2a", size = 159864, upload-time = "2026-01-19T07:59:08.908Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/70/a46669ec55876c392036b4da9808b5c3b1c5870bbca3d4cc923bf68bdbc1/pytokens-0.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:237ba7cfb677dbd3b01b09860810aceb448871150566b93cd24501d5734a04b1", size = 254448, upload-time = "2026-01-19T07:59:10.594Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/0b/c486fc61299c2fc3b7f88ee4e115d4c8b6ffd1a7f88dc94b398b5b1bc4b8/pytokens-0.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01d1a61e36812e4e971cfe2c0e4c1f2d66d8311031dac8bf168af8a249fa04dd", size = 268863, upload-time = "2026-01-19T07:59:12.31Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/92/b036af846707d25feaff7cafbd5280f1bd6a1034c16bb06a7c910209c1ab/pytokens-0.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e47e2ef3ec6ee86909e520d79f965f9b23389fda47460303cf715d510a6fe544", size = 267181, upload-time = "2026-01-19T07:59:13.856Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/c0/6d011fc00fefa74ce34816c84a923d2dd7c46b8dbc6ee52d13419786834c/pytokens-0.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d36954aba4557fd5a418a03cf595ecbb1cdcce119f91a49b19ef09d691a22ae", size = 102814, upload-time = "2026-01-19T07:59:15.288Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/63/627b7e71d557383da5a97f473ad50f8d9c2c1f55c7d3c2531a120c796f6e/pytokens-0.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73eff3bdd8ad08da679867992782568db0529b887bed4c85694f84cdf35eafc6", size = 159744, upload-time = "2026-01-19T07:59:16.88Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/d7/16f434c37ec3824eba6bcb6e798e5381a8dc83af7a1eda0f95c16fe3ade5/pytokens-0.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d97cc1f91b1a8e8ebccf31c367f28225699bea26592df27141deade771ed0afb", size = 253207, upload-time = "2026-01-19T07:59:18.069Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/96/04102856b9527701ae57d74a6393d1aca5bad18a1b1ca48ccffb3c93b392/pytokens-0.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a2c8952c537cb73a1a74369501a83b7f9d208c3cf92c41dd88a17814e68d48ce", size = 267452, upload-time = "2026-01-19T07:59:19.328Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0e/ef/0936eb472b89ab2d2c2c24bb81c50417e803fa89c731930d9fb01176fe9f/pytokens-0.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5dbf56f3c748aed9310b310d5b8b14e2c96d3ad682ad5a943f381bdbbdddf753", size = 265965, upload-time = "2026-01-19T07:59:20.613Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ae/f5/64f3d6f7df4a9e92ebda35ee85061f6260e16eac82df9396020eebbca775/pytokens-0.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:e131804513597f2dff2b18f9911d9b6276e21ef3699abeffc1c087c65a3d975e", size = 102813, upload-time = "2026-01-19T07:59:22.012Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/f1/d07e6209f18ef378fc2ae9dee8d1dfe91fd2447c2e2dbfa32867b6dd30cf/pytokens-0.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0d7374c917197106d3c4761374718bc55ea2e9ac0fb94171588ef5840ee1f016", size = 159968, upload-time = "2026-01-19T07:59:23.07Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0a/73/0eb111400abd382a04f253b269819db9fcc748aa40748441cebdcb6d068f/pytokens-0.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cd3fa1caf9e47a72ee134a29ca6b5bea84712724bba165d6628baa190c6ea5b", size = 253373, upload-time = "2026-01-19T07:59:24.381Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bd/8d/9e4e2fdb5bcaba679e54afcc304e9f13f488eb4d626e6b613f9553e03dbd/pytokens-0.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c6986576b7b07fe9791854caa5347923005a80b079d45b63b0be70d50cce5f1", size = 267024, upload-time = "2026-01-19T07:59:25.74Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/b7/e0a370321af2deb772cff14ff337e1140d1eac2c29a8876bfee995f486f0/pytokens-0.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9940f7c2e2f54fb1cb5fe17d0803c54da7a2bf62222704eb4217433664a186a7", size = 270912, upload-time = "2026-01-19T07:59:27.072Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7c/54/4348f916c440d4c3e68b53b4ed0e66b292d119e799fa07afa159566dcc86/pytokens-0.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:54691cf8f299e7efabcc25adb4ce715d3cef1491e1c930eaf555182f898ef66a", size = 103836, upload-time = "2026-01-19T07:59:28.112Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/f8/a693c0cfa9c783a2a8c4500b7b2a8bab420f8ca4f2d496153226bf1c12e3/pytokens-0.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:94ff5db97a0d3cd7248a5b07ba2167bd3edc1db92f76c6db00137bbaf068ddf8", size = 167643, upload-time = "2026-01-19T07:59:29.292Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/dd/a64eb1e9f3ec277b69b33ef1b40ffbcc8f0a3bafcde120997efc7bdefebf/pytokens-0.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0dd6261cd9cc95fae1227b1b6ebee023a5fd4a4b6330b071c73a516f5f59b63", size = 289553, upload-time = "2026-01-19T07:59:30.537Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/22/06c1079d93dbc3bca5d013e1795f3d8b9ed6c87290acd6913c1c526a6bb2/pytokens-0.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cdca8159df407dbd669145af4171a0d967006e0be25f3b520896bc7068f02c4", size = 302490, upload-time = "2026-01-19T07:59:32.352Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/de/a6f5e43115b4fbf4b93aa87d6c83c79932cdb084f9711daae04549e1e4ad/pytokens-0.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4b5770abeb2a24347380a1164a558f0ebe06e98aedbd54c45f7929527a5fb26e", size = 305652, upload-time = "2026-01-19T07:59:33.685Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/3d/c136e057cb622e36e0c3ff7a8aaa19ff9720050c4078235691da885fe6ee/pytokens-0.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:74500d72c561dad14c037a9e86a657afd63e277dd5a3bb7570932ab7a3b12551", size = 115472, upload-time = "2026-01-19T07:59:34.734Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7c/3c/6941a82f4f130af6e1c68c076b6789069ef10c04559bd4733650f902fd3b/pytokens-0.4.0-py3-none-any.whl", hash = "sha256:0508d11b4de157ee12063901603be87fb0253e8f4cb9305eb168b1202ab92068", size = 13224, upload-time = "2026-01-19T07:59:49.822Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pytz"
                                                                  +version = "2025.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pywinpty"
                                                                  +version = "3.0.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/f3/bb/a7cc2967c5c4eceb6cc49cfe39447d4bfc56e6c865e7c2249b6eb978935f/pywinpty-3.0.2.tar.gz", hash = "sha256:1505cc4cb248af42cb6285a65c9c2086ee9e7e574078ee60933d5d7fa86fb004", size = 30669, upload-time = "2025-10-03T21:16:29.205Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/4e/1098484e042c9485f56f16eb2b69b43b874bd526044ee401512234cf9e04/pywinpty-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:99fdd9b455f0ad6419aba6731a7a0d2f88ced83c3c94a80ff9533d95fa8d8a9e", size = 2050391, upload-time = "2025-10-03T21:19:01.642Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/19/b757fe28008236a4a713e813283721b8a40aa60cd7d3f83549f2e25a3155/pywinpty-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:18f78b81e4cfee6aabe7ea8688441d30247b73e52cd9657138015c5f4ee13a51", size = 2050057, upload-time = "2025-10-03T21:19:26.732Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/44/cbae12ecf6f4fa4129c36871fd09c6bef4f98d5f625ecefb5e2449765508/pywinpty-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:663383ecfab7fc382cc97ea5c4f7f0bb32c2f889259855df6ea34e5df42d305b", size = 2049874, upload-time = "2025-10-03T21:18:53.923Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/15/f12c6055e2d7a617d4d5820e8ac4ceaff849da4cb124640ef5116a230771/pywinpty-3.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:28297cecc37bee9f24d8889e47231972d6e9e84f7b668909de54f36ca785029a", size = 2050386, upload-time = "2025-10-03T21:18:50.477Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/de/24/c6907c5bb06043df98ad6a0a0ff5db2e0affcecbc3b15c42404393a3f72a/pywinpty-3.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:34b55ae9a1b671fe3eae071d86618110538e8eaad18fcb1531c0830b91a82767", size = 2049834, upload-time = "2025-10-03T21:19:25.688Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pyyaml"
                                                                  +version = "6.0.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "pyzmq"
                                                                  +version = "27.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "cffi", marker = "implementation_name == 'pypy'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/04/0b/3c9baedbdf613ecaa7aa07027780b8867f57b6293b6ee50de316c9f3222b/pyzmq-27.1.0.tar.gz", hash = "sha256:ac0765e3d44455adb6ddbf4417dcce460fc40a05978c08efdf2948072f6db540", size = 281750, upload-time = "2025-09-08T23:10:18.157Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/e7/038aab64a946d535901103da16b953c8c9cc9c961dadcbf3609ed6428d23/pyzmq-27.1.0-cp312-abi3-macosx_10_15_universal2.whl", hash = "sha256:452631b640340c928fa343801b0d07eb0c3789a5ffa843f6e1a9cee0ba4eb4fc", size = 1306279, upload-time = "2025-09-08T23:08:03.807Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/5e/c3c49fdd0f535ef45eefcc16934648e9e59dace4a37ee88fc53f6cd8e641/pyzmq-27.1.0-cp312-abi3-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1c179799b118e554b66da67d88ed66cd37a169f1f23b5d9f0a231b4e8d44a113", size = 895645, upload-time = "2025-09-08T23:08:05.301Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f8/e5/b0b2504cb4e903a74dcf1ebae157f9e20ebb6ea76095f6cfffea28c42ecd/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3837439b7f99e60312f0c926a6ad437b067356dc2bc2ec96eb395fd0fe804233", size = 652574, upload-time = "2025-09-08T23:08:06.828Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f8/9b/c108cdb55560eaf253f0cbdb61b29971e9fb34d9c3499b0e96e4e60ed8a5/pyzmq-27.1.0-cp312-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43ad9a73e3da1fab5b0e7e13402f0b2fb934ae1c876c51d0afff0e7c052eca31", size = 840995, upload-time = "2025-09-08T23:08:08.396Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/bb/b79798ca177b9eb0825b4c9998c6af8cd2a7f15a6a1a4272c1d1a21d382f/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0de3028d69d4cdc475bfe47a6128eb38d8bc0e8f4d69646adfbcd840facbac28", size = 1642070, upload-time = "2025-09-08T23:08:09.989Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9c/80/2df2e7977c4ede24c79ae39dcef3899bfc5f34d1ca7a5b24f182c9b7a9ca/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_i686.whl", hash = "sha256:cf44a7763aea9298c0aa7dbf859f87ed7012de8bda0f3977b6fb1d96745df856", size = 2021121, upload-time = "2025-09-08T23:08:11.907Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/bd/2d45ad24f5f5ae7e8d01525eb76786fa7557136555cac7d929880519e33a/pyzmq-27.1.0-cp312-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f30f395a9e6fbca195400ce833c731e7b64c3919aa481af4d88c3759e0cb7496", size = 1878550, upload-time = "2025-09-08T23:08:13.513Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e6/2f/104c0a3c778d7c2ab8190e9db4f62f0b6957b53c9d87db77c284b69f33ea/pyzmq-27.1.0-cp312-abi3-win32.whl", hash = "sha256:250e5436a4ba13885494412b3da5d518cd0d3a278a1ae640e113c073a5f88edd", size = 559184, upload-time = "2025-09-08T23:08:15.163Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/7f/a21b20d577e4100c6a41795842028235998a643b1ad406a6d4163ea8f53e/pyzmq-27.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:9ce490cf1d2ca2ad84733aa1d69ce6855372cb5ce9223802450c9b2a7cba0ccf", size = 619480, upload-time = "2025-09-08T23:08:17.192Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/c2/c012beae5f76b72f007a9e91ee9401cb88c51d0f83c6257a03e785c81cc2/pyzmq-27.1.0-cp312-abi3-win_arm64.whl", hash = "sha256:75a2f36223f0d535a0c919e23615fc85a1e23b71f40c7eb43d7b1dedb4d8f15f", size = 552993, upload-time = "2025-09-08T23:08:18.926Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/60/cb/84a13459c51da6cec1b7b1dc1a47e6db6da50b77ad7fd9c145842750a011/pyzmq-27.1.0-cp313-cp313-android_24_arm64_v8a.whl", hash = "sha256:93ad4b0855a664229559e45c8d23797ceac03183c7b6f5b4428152a6b06684a5", size = 1122436, upload-time = "2025-09-08T23:08:20.801Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/b6/94414759a69a26c3dd674570a81813c46a078767d931a6c70ad29fc585cb/pyzmq-27.1.0-cp313-cp313-android_24_x86_64.whl", hash = "sha256:fbb4f2400bfda24f12f009cba62ad5734148569ff4949b1b6ec3b519444342e6", size = 1156301, upload-time = "2025-09-08T23:08:22.47Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a5/ad/15906493fd40c316377fd8a8f6b1f93104f97a752667763c9b9c1b71d42d/pyzmq-27.1.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:e343d067f7b151cfe4eb3bb796a7752c9d369eed007b91231e817071d2c2fec7", size = 1341197, upload-time = "2025-09-08T23:08:24.286Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/1d/d343f3ce13db53a54cb8946594e567410b2125394dafcc0268d8dda027e0/pyzmq-27.1.0-cp313-cp313t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:08363b2011dec81c354d694bdecaef4770e0ae96b9afea70b3f47b973655cc05", size = 897275, upload-time = "2025-09-08T23:08:26.063Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/69/2d/d83dd6d7ca929a2fc67d2c3005415cdf322af7751d773524809f9e585129/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d54530c8c8b5b8ddb3318f481297441af102517602b569146185fa10b63f4fa9", size = 660469, upload-time = "2025-09-08T23:08:27.623Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3e/cd/9822a7af117f4bc0f1952dbe9ef8358eb50a24928efd5edf54210b850259/pyzmq-27.1.0-cp313-cp313t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f3afa12c392f0a44a2414056d730eebc33ec0926aae92b5ad5cf26ebb6cc128", size = 847961, upload-time = "2025-09-08T23:08:29.672Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9a/12/f003e824a19ed73be15542f172fd0ec4ad0b60cf37436652c93b9df7c585/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c65047adafe573ff023b3187bb93faa583151627bc9c51fc4fb2c561ed689d39", size = 1650282, upload-time = "2025-09-08T23:08:31.349Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/4a/e82d788ed58e9a23995cee70dbc20c9aded3d13a92d30d57ec2291f1e8a3/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:90e6e9441c946a8b0a667356f7078d96411391a3b8f80980315455574177ec97", size = 2024468, upload-time = "2025-09-08T23:08:33.543Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/94/2da0a60841f757481e402b34bf4c8bf57fa54a5466b965de791b1e6f747d/pyzmq-27.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:add071b2d25f84e8189aaf0882d39a285b42fa3853016ebab234a5e78c7a43db", size = 1885394, upload-time = "2025-09-08T23:08:35.51Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4f/6f/55c10e2e49ad52d080dc24e37adb215e5b0d64990b57598abc2e3f01725b/pyzmq-27.1.0-cp313-cp313t-win32.whl", hash = "sha256:7ccc0700cfdf7bd487bea8d850ec38f204478681ea02a582a8da8171b7f90a1c", size = 574964, upload-time = "2025-09-08T23:08:37.178Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/4d/2534970ba63dd7c522d8ca80fb92777f362c0f321900667c615e2067cb29/pyzmq-27.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:8085a9fba668216b9b4323be338ee5437a235fe275b9d1610e422ccc279733e2", size = 641029, upload-time = "2025-09-08T23:08:40.595Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f6/fa/f8aea7a28b0641f31d40dea42d7ef003fded31e184ef47db696bc74cd610/pyzmq-27.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:6bb54ca21bcfe361e445256c15eedf083f153811c37be87e0514934d6913061e", size = 561541, upload-time = "2025-09-08T23:08:42.668Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/45/19efbb3000956e82d0331bafca5d9ac19ea2857722fa2caacefb6042f39d/pyzmq-27.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ce980af330231615756acd5154f29813d553ea555485ae712c491cd483df6b7a", size = 1341197, upload-time = "2025-09-08T23:08:44.973Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/43/d72ccdbf0d73d1343936296665826350cb1e825f92f2db9db3e61c2162a2/pyzmq-27.1.0-cp314-cp314t-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:1779be8c549e54a1c38f805e56d2a2e5c009d26de10921d7d51cfd1c8d4632ea", size = 897175, upload-time = "2025-09-08T23:08:46.601Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2f/2e/a483f73a10b65a9ef0161e817321d39a770b2acf8bcf3004a28d90d14a94/pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7200bb0f03345515df50d99d3db206a0a6bee1955fbb8c453c76f5bf0e08fb96", size = 660427, upload-time = "2025-09-08T23:08:48.187Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/d2/5f36552c2d3e5685abe60dfa56f91169f7a2d99bbaf67c5271022ab40863/pyzmq-27.1.0-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01c0e07d558b06a60773744ea6251f769cd79a41a97d11b8bf4ab8f034b0424d", size = 847929, upload-time = "2025-09-08T23:08:49.76Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/2a/404b331f2b7bf3198e9945f75c4c521f0c6a3a23b51f7a4a401b94a13833/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:80d834abee71f65253c91540445d37c4c561e293ba6e741b992f20a105d69146", size = 1650193, upload-time = "2025-09-08T23:08:51.7Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1c/0b/f4107e33f62a5acf60e3ded67ed33d79b4ce18de432625ce2fc5093d6388/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:544b4e3b7198dde4a62b8ff6685e9802a9a1ebf47e77478a5eb88eca2a82f2fd", size = 2024388, upload-time = "2025-09-08T23:08:53.393Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/01/add31fe76512642fd6e40e3a3bd21f4b47e242c8ba33efb6809e37076d9b/pyzmq-27.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cedc4c68178e59a4046f97eca31b148ddcf51e88677de1ef4e78cf06c5376c9a", size = 1885316, upload-time = "2025-09-08T23:08:55.702Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c4/59/a5f38970f9bf07cee96128de79590bb354917914a9be11272cfc7ff26af0/pyzmq-27.1.0-cp314-cp314t-win32.whl", hash = "sha256:1f0b2a577fd770aa6f053211a55d1c47901f4d537389a034c690291485e5fe92", size = 587472, upload-time = "2025-09-08T23:08:58.18Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/d8/78b1bad170f93fcf5e3536e70e8fadac55030002275c9a29e8f5719185de/pyzmq-27.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:19c9468ae0437f8074af379e986c5d3d7d7bfe033506af442e8c879732bedbe0", size = 661401, upload-time = "2025-09-08T23:08:59.802Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/81/d6/4bfbb40c9a0b42fc53c7cf442f6385db70b40f74a783130c5d0a5aa62228/pyzmq-27.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:dc5dbf68a7857b59473f7df42650c621d7e8923fb03fa74a526890f4d33cc4d7", size = 575170, upload-time = "2025-09-08T23:09:01.418Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "quartodoc"
                                                                  +version = "0.11.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "black" },
                                                                  +    { name = "click" },
                                                                  +    { name = "griffe" },
                                                                  +    { name = "importlib-metadata" },
                                                                  +    { name = "importlib-resources" },
                                                                  +    { name = "plum-dispatch" },
                                                                  +    { name = "pydantic" },
                                                                  +    { name = "pyyaml" },
                                                                  +    { name = "requests" },
                                                                  +    { name = "sphobjinv" },
                                                                  +    { name = "tabulate" },
                                                                  +    { name = "typing-extensions" },
                                                                  +    { name = "watchdog" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/ff/b9/0bb44cc62a5d63728d04fd6b1e14bcb64945fabad4aa8c03b9d70315fb06/quartodoc-0.11.1.tar.gz", hash = "sha256:c121626e1a36392d168631f33c4d3e7fd48d185de178859f8eafbda14fbfe92f", size = 778611, upload-time = "2025-06-10T14:50:08.185Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/d9/0b48d4184f9ca6a996c4fac46897a968698c9d1e0f0e43a6906746201323/quartodoc-0.11.1-py3-none-any.whl", hash = "sha256:0776eb8e53d89385e2c9a8ae0ec08e8c307c1410dd1bd78bb28e8b1823dbb6ad", size = 88053, upload-time = "2025-06-10T14:50:06.443Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "redis"
                                                                  +version = "5.3.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "pyjwt" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6a/cf/128b1b6d7086200c9f387bd4be9b2572a30b90745ef078bd8b235042dc9f/redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c", size = 4626200, upload-time = "2025-07-25T08:06:27.778Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/7f/26/5c5fa0e83c3621db835cfc1f1d789b37e7fa99ed54423b5f519beb931aa7/redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97", size = 272833, upload-time = "2025-07-25T08:06:26.317Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "redis-om"
                                                                  +version = "0.3.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "click" },
                                                                  +    { name = "hiredis" },
                                                                  +    { name = "more-itertools" },
                                                                  +    { name = "pydantic" },
                                                                  +    { name = "python-ulid" },
                                                                  +    { name = "redis" },
                                                                  +    { name = "setuptools" },
                                                                  +    { name = "types-redis" },
                                                                  +    { name = "typing-extensions" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/11/32/9bdcb86b88f5b53fd9f80019a62970ded91e4befb65c03fee17bdb2bc9f0/redis_om-0.3.5.tar.gz", hash = "sha256:fd152ccebc9b47604287a347628ef0d2c0051c13d5653f121193e801bb1cc4a7", size = 78939, upload-time = "2025-04-04T12:54:51.465Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/60/2cc6753c2c36a2a5dded8c380c6cad67a26c5878cd7aad56de2eee1d63c8/redis_om-0.3.5-py3-none-any.whl", hash = "sha256:99ab40f696028ce47c5e2eb5118a1ffc1fd193005428df89c8cf77ad35a0177a", size = 86634, upload-time = "2025-04-04T12:54:50.07Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "referencing"
                                                                  +version = "0.37.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "attrs" },
                                                                  +    { name = "rpds-py" },
                                                                  +    { name = "typing-extensions", marker = "python_full_version < '3.13'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "requests"
                                                                  +version = "2.32.5"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "certifi" },
                                                                  +    { name = "charset-normalizer" },
                                                                  +    { name = "idna" },
                                                                  +    { name = "urllib3" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "responses"
                                                                  +version = "0.25.8"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "pyyaml" },
                                                                  +    { name = "requests" },
                                                                  +    { name = "urllib3" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/0e/95/89c054ad70bfef6da605338b009b2e283485835351a9935c7bfbfaca7ffc/responses-0.25.8.tar.gz", hash = "sha256:9374d047a575c8f781b94454db5cab590b6029505f488d12899ddb10a4af1cf4", size = 79320, upload-time = "2025-08-08T19:01:46.709Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/1c/4c/cc276ce57e572c102d9542d383b2cfd551276581dc60004cb94fe8774c11/responses-0.25.8-py3-none-any.whl", hash = "sha256:0c710af92def29c8352ceadff0c3fe340ace27cf5af1bbe46fb71275bcd2831c", size = 34769, upload-time = "2025-08-08T19:01:45.018Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "rfc3339-validator"
                                                                  +version = "0.1.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "six" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/28/ea/a9387748e2d111c3c2b275ba970b735e04e15cdb1eb30693b6b5708c4dbd/rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b", size = 5513, upload-time = "2021-05-12T16:37:54.178Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/7b/44/4e421b96b67b2daff264473f7465db72fbdf36a07e05494f50300cc7b0c6/rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa", size = 3490, upload-time = "2021-05-12T16:37:52.536Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "rfc3986-validator"
                                                                  +version = "0.1.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/da/88/f270de456dd7d11dcc808abfa291ecdd3f45ff44e3b549ffa01b126464d0/rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055", size = 6760, upload-time = "2019-10-28T16:00:19.144Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/9e/51/17023c0f8f1869d8806b979a2bffa3f861f26a3f1a66b094288323fba52f/rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9", size = 4242, upload-time = "2019-10-28T16:00:13.976Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "rfc3987-syntax"
                                                                  +version = "1.1.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "lark" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/2c/06/37c1a5557acf449e8e406a830a05bf885ac47d33270aec454ef78675008d/rfc3987_syntax-1.1.0.tar.gz", hash = "sha256:717a62cbf33cffdd16dfa3a497d81ce48a660ea691b1ddd7be710c22f00b4a0d", size = 14239, upload-time = "2025-07-18T01:05:05.015Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/7e/71/44ce230e1b7fadd372515a97e32a83011f906ddded8d03e3c6aafbdedbb7/rfc3987_syntax-1.1.0-py3-none-any.whl", hash = "sha256:6c3d97604e4c5ce9f714898e05401a0445a641cfa276432b0a648c80856f6a3f", size = 8046, upload-time = "2025-07-18T01:05:03.843Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "rich"
                                                                  +version = "14.2.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "markdown-it-py" },
                                                                  +    { name = "pygments" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/fb/d2/8920e102050a0de7bfabeb4c4614a49248cf8d5d7a8d01885fbb24dc767a/rich-14.2.0.tar.gz", hash = "sha256:73ff50c7c0c1c77c8243079283f4edb376f0f6442433aecb8ce7e6d0b92d1fe4", size = 219990, upload-time = "2025-10-09T14:16:53.064Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/7a/b0178788f8dc6cafce37a212c99565fa1fe7872c70c6c9c1e1a372d9d88f/rich-14.2.0-py3-none-any.whl", hash = "sha256:76bc51fe2e57d2b1be1f96c524b890b816e334ab4c1e45888799bfaab0021edd", size = 243393, upload-time = "2025-10-09T14:16:51.245Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "rpds-py"
                                                                  +version = "0.29.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/98/33/23b3b3419b6a3e0f559c7c0d2ca8fc1b9448382b25245033788785921332/rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359", size = 69359, upload-time = "2025-11-16T14:50:39.532Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/50/bc0e6e736d94e420df79be4deb5c9476b63165c87bb8f19ef75d100d21b3/rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954", size = 376000, upload-time = "2025-11-16T14:48:19.141Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3e/3a/46676277160f014ae95f24de53bed0e3b7ea66c235e7de0b9df7bd5d68ba/rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c", size = 360575, upload-time = "2025-11-16T14:48:20.443Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/ba/411d414ed99ea1afdd185bbabeeaac00624bd1e4b22840b5e9967ade6337/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d", size = 392159, upload-time = "2025-11-16T14:48:22.12Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8f/b1/e18aa3a331f705467a48d0296778dc1fea9d7f6cf675bd261f9a846c7e90/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5", size = 410602, upload-time = "2025-11-16T14:48:23.563Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2f/6c/04f27f0c9f2299274c76612ac9d2c36c5048bb2c6c2e52c38c60bf3868d9/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e", size = 515808, upload-time = "2025-11-16T14:48:24.949Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/83/56/a8412aa464fb151f8bc0d91fb0bb888adc9039bd41c1c6ba8d94990d8cf8/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83", size = 416015, upload-time = "2025-11-16T14:48:26.782Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/04/4c/f9b8a05faca3d9e0a6397c90d13acb9307c9792b2bff621430c58b1d6e76/rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949", size = 395325, upload-time = "2025-11-16T14:48:28.055Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/34/60/869f3bfbf8ed7b54f1ad9a5543e0fdffdd40b5a8f587fe300ee7b4f19340/rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181", size = 410160, upload-time = "2025-11-16T14:48:29.338Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/91/aa/e5b496334e3aba4fe4c8a80187b89f3c1294c5c36f2a926da74338fa5a73/rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c", size = 425309, upload-time = "2025-11-16T14:48:30.691Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/68/4e24a34189751ceb6d66b28f18159922828dd84155876551f7ca5b25f14f/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7", size = 574644, upload-time = "2025-11-16T14:48:31.964Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8c/cf/474a005ea4ea9c3b4f17b6108b6b13cebfc98ebaff11d6e1b193204b3a93/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19", size = 601605, upload-time = "2025-11-16T14:48:33.252Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/b1/c56f6a9ab8c5f6bb5c65c4b5f8229167a3a525245b0773f2c0896686b64e/rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0", size = 564593, upload-time = "2025-11-16T14:48:34.643Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/13/0494cecce4848f68501e0a229432620b4b57022388b071eeff95f3e1e75b/rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7", size = 223853, upload-time = "2025-11-16T14:48:36.419Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1f/6a/51e9aeb444a00cdc520b032a28b07e5f8dc7bc328b57760c53e7f96997b4/rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977", size = 239895, upload-time = "2025-11-16T14:48:37.956Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/d4/8bce56cdad1ab873e3f27cb31c6a51d8f384d66b022b820525b879f8bed1/rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7", size = 230321, upload-time = "2025-11-16T14:48:39.71Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/d9/c5de60d9d371bbb186c3e9bf75f4fc5665e11117a25a06a6b2e0afb7380e/rpds_py-0.29.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1585648d0760b88292eecab5181f5651111a69d90eff35d6b78aa32998886a61", size = 375710, upload-time = "2025-11-16T14:48:41.063Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/b3/0860cdd012291dc21272895ce107f1e98e335509ba986dd83d72658b82b9/rpds_py-0.29.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:521807963971a23996ddaf764c682b3e46459b3c58ccd79fefbe16718db43154", size = 360582, upload-time = "2025-11-16T14:48:42.423Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/8a/a18c2f4a61b3407e56175f6aab6deacdf9d360191a3d6f38566e1eaf7266/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8896986efaa243ab713c69e6491a4138410f0fe36f2f4c71e18bd5501e8014", size = 391172, upload-time = "2025-11-16T14:48:43.75Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/49/e93354258508c50abc15cdcd5fcf7ac4117f67bb6233ad7859f75e7372a0/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d24564a700ef41480a984c5ebed62b74e6ce5860429b98b1fede76049e953e6", size = 409586, upload-time = "2025-11-16T14:48:45.498Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/8d/a27860dae1c19a6bdc901f90c81f0d581df1943355802961a57cdb5b6cd1/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6596b93c010d386ae46c9fba9bfc9fc5965fa8228edeac51576299182c2e31c", size = 516339, upload-time = "2025-11-16T14:48:47.308Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/ad/a75e603161e79b7110c647163d130872b271c6b28712c803c65d492100f7/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5cc58aac218826d054c7da7f95821eba94125d88be673ff44267bb89d12a5866", size = 416201, upload-time = "2025-11-16T14:48:48.615Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b9/42/555b4ee17508beafac135c8b450816ace5a96194ce97fefc49d58e5652ea/rpds_py-0.29.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de73e40ebc04dd5d9556f50180395322193a78ec247e637e741c1b954810f295", size = 395095, upload-time = "2025-11-16T14:48:50.027Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cd/f0/c90b671b9031e800ec45112be42ea9f027f94f9ac25faaac8770596a16a1/rpds_py-0.29.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:295ce5ac7f0cf69a651ea75c8f76d02a31f98e5698e82a50a5f4d4982fbbae3b", size = 410077, upload-time = "2025-11-16T14:48:51.515Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/80/9af8b640b81fe21e6f718e9dec36c0b5f670332747243130a5490f292245/rpds_py-0.29.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ea59b23ea931d494459c8338056fe7d93458c0bf3ecc061cd03916505369d55", size = 424548, upload-time = "2025-11-16T14:48:53.237Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/0b/b5647446e991736e6a495ef510e6710df91e880575a586e763baeb0aa770/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f49d41559cebd608042fdcf54ba597a4a7555b49ad5c1c0c03e0af82692661cd", size = 573661, upload-time = "2025-11-16T14:48:54.769Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f7/b3/1b1c9576839ff583d1428efbf59f9ee70498d8ce6c0b328ac02f1e470879/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:05a2bd42768ea988294ca328206efbcc66e220d2d9b7836ee5712c07ad6340ea", size = 600937, upload-time = "2025-11-16T14:48:56.247Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6c/7b/b6cfca2f9fee4c4494ce54f7fb1b9f578867495a9aa9fc0d44f5f735c8e0/rpds_py-0.29.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33ca7bdfedd83339ca55da3a5e1527ee5870d4b8369456b5777b197756f3ca22", size = 564496, upload-time = "2025-11-16T14:48:57.691Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b9/fb/ba29ec7f0f06eb801bac5a23057a9ff7670623b5e8013bd59bec4aa09de8/rpds_py-0.29.0-cp313-cp313-win32.whl", hash = "sha256:20c51ae86a0bb9accc9ad4e6cdeec58d5ebb7f1b09dd4466331fc65e1766aae7", size = 223126, upload-time = "2025-11-16T14:48:59.058Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/6b/0229d3bed4ddaa409e6d90b0ae967ed4380e4bdd0dad6e59b92c17d42457/rpds_py-0.29.0-cp313-cp313-win_amd64.whl", hash = "sha256:6410e66f02803600edb0b1889541f4b5cc298a5ccda0ad789cc50ef23b54813e", size = 239771, upload-time = "2025-11-16T14:49:00.872Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/38/d2868f058b164f8efd89754d85d7b1c08b454f5c07ac2e6cc2e9bd4bd05b/rpds_py-0.29.0-cp313-cp313-win_arm64.whl", hash = "sha256:56838e1cd9174dc23c5691ee29f1d1be9eab357f27efef6bded1328b23e1ced2", size = 229994, upload-time = "2025-11-16T14:49:02.673Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/52/91/5de91c5ec7d41759beec9b251630824dbb8e32d20c3756da1a9a9d309709/rpds_py-0.29.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:37d94eadf764d16b9a04307f2ab1d7af6dc28774bbe0535c9323101e14877b4c", size = 365886, upload-time = "2025-11-16T14:49:04.133Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/7c/415d8c1b016d5f47ecec5145d9d6d21002d39dce8761b30f6c88810b455a/rpds_py-0.29.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d472cf73efe5726a067dce63eebe8215b14beabea7c12606fd9994267b3cfe2b", size = 355262, upload-time = "2025-11-16T14:49:05.543Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/14/bf83e2daa4f980e4dc848aed9299792a8b84af95e12541d9e7562f84a6ef/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72fdfd5ff8992e4636621826371e3ac5f3e3b8323e9d0e48378e9c13c3dac9d0", size = 384826, upload-time = "2025-11-16T14:49:07.301Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/33/b8/53330c50a810ae22b4fbba5e6cf961b68b9d72d9bd6780a7c0a79b070857/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2549d833abdf8275c901313b9e8ff8fba57e50f6a495035a2a4e30621a2f7cc4", size = 394234, upload-time = "2025-11-16T14:49:08.782Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/32/01e2e9645cef0e584f518cfde4567563e57db2257244632b603f61b40e50/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4448dad428f28a6a767c3e3b80cde3446a22a0efbddaa2360f4bb4dc836d0688", size = 520008, upload-time = "2025-11-16T14:49:10.253Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/c3/0d1b95a81affae2b10f950782e33a1fd2edd6ce2a479966cac98c9a66f57/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:115f48170fd4296a33938d8c11f697f5f26e0472e43d28f35624764173a60e4d", size = 409569, upload-time = "2025-11-16T14:49:12.478Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/60/aa3b8678f3f009f675b99174fa2754302a7fbfe749162e8043d111de2d88/rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5bb73ffc029820f4348e9b66b3027493ae00bca6629129cd433fd7a76308ee", size = 385188, upload-time = "2025-11-16T14:49:13.88Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/92/02/5546c1c8aa89c18d40c1fcffdcc957ba730dee53fb7c3ca3a46f114761d2/rpds_py-0.29.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:b1581fcde18fcdf42ea2403a16a6b646f8eb1e58d7f90a0ce693da441f76942e", size = 398587, upload-time = "2025-11-16T14:49:15.339Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6c/e0/ad6eeaf47e236eba052fa34c4073078b9e092bd44da6bbb35aaae9580669/rpds_py-0.29.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16e9da2bda9eb17ea318b4c335ec9ac1818e88922cbe03a5743ea0da9ecf74fb", size = 416641, upload-time = "2025-11-16T14:49:16.832Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/93/0acedfd50ad9cdd3879c615a6dc8c5f1ce78d2fdf8b87727468bb5bb4077/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:28fd300326dd21198f311534bdb6d7e989dd09b3418b3a91d54a0f384c700967", size = 566683, upload-time = "2025-11-16T14:49:18.342Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/53/8c64e0f340a9e801459fc6456821abc15b3582cb5dc3932d48705a9d9ac7/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2aba991e041d031c7939e1358f583ae405a7bf04804ca806b97a5c0e0af1ea5e", size = 592730, upload-time = "2025-11-16T14:49:19.767Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/ef/3109b6584f8c4b0d2490747c916df833c127ecfa82be04d9a40a376f2090/rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f437026dbbc3f08c99cc41a5b2570c6e1a1ddbe48ab19a9b814254128d4ea7a", size = 557361, upload-time = "2025-11-16T14:49:21.574Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/3b/61586475e82d57f01da2c16edb9115a618afe00ce86fe1b58936880b15af/rpds_py-0.29.0-cp313-cp313t-win32.whl", hash = "sha256:6e97846e9800a5d0fe7be4d008f0c93d0feeb2700da7b1f7528dabafb31dfadb", size = 211227, upload-time = "2025-11-16T14:49:23.03Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3b/3a/12dc43f13594a54ea0c9d7e9d43002116557330e3ad45bc56097ddf266e2/rpds_py-0.29.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f49196aec7c4b406495f60e6f947ad71f317a765f956d74bbd83996b9edc0352", size = 225248, upload-time = "2025-11-16T14:49:24.841Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/89/b1/0b1474e7899371d9540d3bbb2a499a3427ae1fc39c998563fe9035a1073b/rpds_py-0.29.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:394d27e4453d3b4d82bb85665dc1fcf4b0badc30fc84282defed71643b50e1a1", size = 363731, upload-time = "2025-11-16T14:49:26.683Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/12/3b7cf2068d0a334ed1d7b385a9c3c8509f4c2bcba3d4648ea71369de0881/rpds_py-0.29.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55d827b2ae95425d3be9bc9a5838b6c29d664924f98146557f7715e331d06df8", size = 354343, upload-time = "2025-11-16T14:49:28.24Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/eb/73/5afcf8924bc02a749416eda64e17ac9c9b28f825f4737385295a0e99b0c1/rpds_py-0.29.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc31a07ed352e5462d3ee1b22e89285f4ce97d5266f6d1169da1142e78045626", size = 385406, upload-time = "2025-11-16T14:49:29.943Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c8/37/5db736730662508535221737a21563591b6f43c77f2e388951c42f143242/rpds_py-0.29.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4695dd224212f6105db7ea62197144230b808d6b2bba52238906a2762f1d1e7", size = 396162, upload-time = "2025-11-16T14:49:31.833Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/70/0d/491c1017d14f62ce7bac07c32768d209a50ec567d76d9f383b4cfad19b80/rpds_py-0.29.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcae1770b401167f8b9e1e3f566562e6966ffa9ce63639916248a9e25fa8a244", size = 517719, upload-time = "2025-11-16T14:49:33.804Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/25/b11132afcb17cd5d82db173f0c8dab270ffdfaba43e5ce7a591837ae9649/rpds_py-0.29.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90f30d15f45048448b8da21c41703b31c61119c06c216a1bf8c245812a0f0c17", size = 409498, upload-time = "2025-11-16T14:49:35.222Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0f/7d/e6543cedfb2e6403a1845710a5ab0e0ccf8fc288e0b5af9a70bfe2c12053/rpds_py-0.29.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a91e0ab77bdc0004b43261a4b8cd6d6b451e8d443754cfda830002b5745b32", size = 382743, upload-time = "2025-11-16T14:49:36.704Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/11/a4ebc9f654293ae9fefb83b2b6be7f3253e85ea42a5db2f77d50ad19aaeb/rpds_py-0.29.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:4aa195e5804d32c682e453b34474f411ca108e4291c6a0f824ebdc30a91c973c", size = 400317, upload-time = "2025-11-16T14:49:39.132Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/52/18/97677a60a81c7f0e5f64e51fb3f8271c5c8fcabf3a2df18e97af53d7c2bf/rpds_py-0.29.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7971bdb7bf4ee0f7e6f67fa4c7fbc6019d9850cc977d126904392d363f6f8318", size = 416979, upload-time = "2025-11-16T14:49:40.575Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f0/69/28ab391a9968f6c746b2a2db181eaa4d16afaa859fedc9c2f682d19f7e18/rpds_py-0.29.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8ae33ad9ce580c7a47452c3b3f7d8a9095ef6208e0a0c7e4e2384f9fc5bf8212", size = 567288, upload-time = "2025-11-16T14:49:42.24Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3b/d3/0c7afdcdb830eee94f5611b64e71354ffe6ac8df82d00c2faf2bfffd1d4e/rpds_py-0.29.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c661132ab2fb4eeede2ef69670fd60da5235209874d001a98f1542f31f2a8a94", size = 593157, upload-time = "2025-11-16T14:49:43.782Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/ac/a0fcbc2feed4241cf26d32268c195eb88ddd4bd862adfc9d4b25edfba535/rpds_py-0.29.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb78b3a0d31ac1bde132c67015a809948db751cb4e92cdb3f0b242e430b6ed0d", size = 554741, upload-time = "2025-11-16T14:49:45.557Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0f/f1/fcc24137c470df8588674a677f33719d5800ec053aaacd1de8a5d5d84d9e/rpds_py-0.29.0-cp314-cp314-win32.whl", hash = "sha256:f475f103488312e9bd4000bc890a95955a07b2d0b6e8884aef4be56132adbbf1", size = 215508, upload-time = "2025-11-16T14:49:47.562Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7b/c7/1d169b2045512eac019918fc1021ea07c30e84a4343f9f344e3e0aa8c788/rpds_py-0.29.0-cp314-cp314-win_amd64.whl", hash = "sha256:b9cf2359a4fca87cfb6801fae83a76aedf66ee1254a7a151f1341632acf67f1b", size = 228125, upload-time = "2025-11-16T14:49:49.064Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/be/36/0cec88aaba70ec4a6e381c444b0d916738497d27f0c30406e3d9fcbd3bc2/rpds_py-0.29.0-cp314-cp314-win_arm64.whl", hash = "sha256:9ba8028597e824854f0f1733d8b964e914ae3003b22a10c2c664cb6927e0feb9", size = 221992, upload-time = "2025-11-16T14:49:50.777Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b1/fa/a2e524631717c9c0eb5d90d30f648cfba6b731047821c994acacb618406c/rpds_py-0.29.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:e71136fd0612556b35c575dc2726ae04a1669e6a6c378f2240312cf5d1a2ab10", size = 366425, upload-time = "2025-11-16T14:49:52.691Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a2/a4/6d43ebe0746ff694a30233f63f454aed1677bd50ab7a59ff6b2bb5ac61f2/rpds_py-0.29.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:76fe96632d53f3bf0ea31ede2f53bbe3540cc2736d4aec3b3801b0458499ef3a", size = 355282, upload-time = "2025-11-16T14:49:54.292Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/a7/52fd8270e0320b09eaf295766ae81dd175f65394687906709b3e75c71d06/rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9459a33f077130dbb2c7c3cea72ee9932271fb3126404ba2a2661e4fe9eb7b79", size = 384968, upload-time = "2025-11-16T14:49:55.857Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/7d/e6bc526b7a14e1ef80579a52c1d4ad39260a058a51d66c6039035d14db9d/rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c9546cfdd5d45e562cc0444b6dddc191e625c62e866bf567a2c69487c7ad28a", size = 394714, upload-time = "2025-11-16T14:49:57.343Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/3f/f0ade3954e7db95c791e7eaf978aa7e08a756d2046e8bdd04d08146ed188/rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12597d11d97b8f7e376c88929a6e17acb980e234547c92992f9f7c058f1a7310", size = 520136, upload-time = "2025-11-16T14:49:59.162Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/87/b3/07122ead1b97009715ab9d4082be6d9bd9546099b2b03fae37c3116f72be/rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28de03cf48b8a9e6ec10318f2197b83946ed91e2891f651a109611be4106ac4b", size = 409250, upload-time = "2025-11-16T14:50:00.698Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/c6/dcbee61fd1dc892aedcb1b489ba661313101aa82ec84b1a015d4c63ebfda/rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7951c964069039acc9d67a8ff1f0a7f34845ae180ca542b17dc1456b1f1808", size = 384940, upload-time = "2025-11-16T14:50:02.312Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/47/11/914ecb6f3574cf9bf8b38aced4063e0f787d6e1eb30b181a7efbc6c1da9a/rpds_py-0.29.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:c07d107b7316088f1ac0177a7661ca0c6670d443f6fe72e836069025e6266761", size = 399392, upload-time = "2025-11-16T14:50:03.829Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/fd/2f4bd9433f58f816434bb934313584caa47dbc6f03ce5484df8ac8980561/rpds_py-0.29.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de2345af363d25696969befc0c1688a6cb5e8b1d32b515ef84fc245c6cddba3", size = 416796, upload-time = "2025-11-16T14:50:05.558Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/a5/449f0281af33efa29d5c71014399d74842342ae908d8cd38260320167692/rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:00e56b12d2199ca96068057e1ae7f9998ab6e99cda82431afafd32f3ec98cca9", size = 566843, upload-time = "2025-11-16T14:50:07.243Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/32/0a6a1ccee2e37fcb1b7ba9afde762b77182dbb57937352a729c6cd3cf2bb/rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3919a3bbecee589300ed25000b6944174e07cd20db70552159207b3f4bbb45b8", size = 593956, upload-time = "2025-11-16T14:50:09.029Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4a/3d/eb820f95dce4306f07a495ede02fb61bef36ea201d9137d4fcd5ab94ec1e/rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7fa2ccc312bbd91e43aa5e0869e46bc03278a3dddb8d58833150a18b0f0283a", size = 557288, upload-time = "2025-11-16T14:50:10.73Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e9/f8/b8ff786f40470462a252918e0836e0db903c28e88e3eec66bc4a7856ee5d/rpds_py-0.29.0-cp314-cp314t-win32.whl", hash = "sha256:97c817863ffc397f1e6a6e9d2d89fe5408c0a9922dac0329672fb0f35c867ea5", size = 211382, upload-time = "2025-11-16T14:50:12.827Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/7f/1a65ae870bc9d0576aebb0c501ea5dccf1ae2178fe2821042150ebd2e707/rpds_py-0.29.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2023473f444752f0f82a58dfcbee040d0a1b3d1b3c2ec40e884bd25db6d117d2", size = 225919, upload-time = "2025-11-16T14:50:14.734Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "ruff"
                                                                  +version = "0.14.13"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/50/0a/1914efb7903174b381ee2ffeebb4253e729de57f114e63595114c8ca451f/ruff-0.14.13.tar.gz", hash = "sha256:83cd6c0763190784b99650a20fec7633c59f6ebe41c5cc9d45ee42749563ad47", size = 6059504, upload-time = "2026-01-15T20:15:16.918Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c3/ae/0deefbc65ca74b0ab1fd3917f94dc3b398233346a74b8bbb0a916a1a6bf6/ruff-0.14.13-py3-none-linux_armv6l.whl", hash = "sha256:76f62c62cd37c276cb03a275b198c7c15bd1d60c989f944db08a8c1c2dbec18b", size = 13062418, upload-time = "2026-01-15T20:14:50.779Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/47/df/5916604faa530a97a3c154c62a81cb6b735c0cb05d1e26d5ad0f0c8ac48a/ruff-0.14.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:914a8023ece0528d5cc33f5a684f5f38199bbb566a04815c2c211d8f40b5d0ed", size = 13442344, upload-time = "2026-01-15T20:15:07.94Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4c/f3/e0e694dd69163c3a1671e102aa574a50357536f18a33375050334d5cd517/ruff-0.14.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d24899478c35ebfa730597a4a775d430ad0d5631b8647a3ab368c29b7e7bd063", size = 12354720, upload-time = "2026-01-15T20:15:09.854Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c3/e8/67f5fcbbaee25e8fc3b56cc33e9892eca7ffe09f773c8e5907757a7e3bdb/ruff-0.14.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9aaf3870f14d925bbaf18b8a2347ee0ae7d95a2e490e4d4aea6813ed15ebc80e", size = 12774493, upload-time = "2026-01-15T20:15:20.908Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6b/ce/d2e9cb510870b52a9565d885c0d7668cc050e30fa2c8ac3fb1fda15c083d/ruff-0.14.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac5b7f63dd3b27cc811850f5ffd8fff845b00ad70e60b043aabf8d6ecc304e09", size = 12815174, upload-time = "2026-01-15T20:15:05.74Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/88/00/c38e5da58beebcf4fa32d0ddd993b63dfacefd02ab7922614231330845bf/ruff-0.14.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78d2b1097750d90ba82ce4ba676e85230a0ed694178ca5e61aa9b459970b3eb9", size = 13680909, upload-time = "2026-01-15T20:15:14.537Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/61/cd37c9dd5bd0a3099ba79b2a5899ad417d8f3b04038810b0501a80814fd7/ruff-0.14.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7d0bf87705acbbcb8d4c24b2d77fbb73d40210a95c3903b443cd9e30824a5032", size = 15144215, upload-time = "2026-01-15T20:15:22.886Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/56/8a/85502d7edbf98c2df7b8876f316c0157359165e16cdf98507c65c8d07d3d/ruff-0.14.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3eb5da8e2c9e9f13431032fdcbe7681de9ceda5835efee3269417c13f1fed5c", size = 14706067, upload-time = "2026-01-15T20:14:48.271Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7e/2f/de0df127feb2ee8c1e54354dc1179b4a23798f0866019528c938ba439aca/ruff-0.14.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:642442b42957093811cd8d2140dfadd19c7417030a7a68cf8d51fcdd5f217427", size = 14133916, upload-time = "2026-01-15T20:14:57.357Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0d/77/9b99686bb9fe07a757c82f6f95e555c7a47801a9305576a9c67e0a31d280/ruff-0.14.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4acdf009f32b46f6e8864af19cbf6841eaaed8638e65c8dac845aea0d703c841", size = 13859207, upload-time = "2026-01-15T20:14:55.111Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/46/2bdcb34a87a179a4d23022d818c1c236cb40e477faf0d7c9afb6813e5876/ruff-0.14.13-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:591a7f68860ea4e003917d19b5c4f5ac39ff558f162dc753a2c5de897fd5502c", size = 14043686, upload-time = "2026-01-15T20:14:52.841Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1a/a9/5c6a4f56a0512c691cf143371bcf60505ed0f0860f24a85da8bd123b2bf1/ruff-0.14.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:774c77e841cc6e046fc3e91623ce0903d1cd07e3a36b1a9fe79b81dab3de506b", size = 12663837, upload-time = "2026-01-15T20:15:18.921Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fe/bb/b920016ece7651fa7fcd335d9d199306665486694d4361547ccb19394c44/ruff-0.14.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:61f4e40077a1248436772bb6512db5fc4457fe4c49e7a94ea7c5088655dd21ae", size = 12805867, upload-time = "2026-01-15T20:14:59.272Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/b3/0bd909851e5696cd21e32a8fc25727e5f58f1934b3596975503e6e85415c/ruff-0.14.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6d02f1428357fae9e98ac7aa94b7e966fd24151088510d32cf6f902d6c09235e", size = 13208528, upload-time = "2026-01-15T20:15:03.732Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3b/3b/e2d94cb613f6bbd5155a75cbe072813756363eba46a3f2177a1fcd0cd670/ruff-0.14.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:e399341472ce15237be0c0ae5fbceca4b04cd9bebab1a2b2c979e015455d8f0c", size = 13929242, upload-time = "2026-01-15T20:15:11.918Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/c5/abd840d4132fd51a12f594934af5eba1d5d27298a6f5b5d6c3be45301caf/ruff-0.14.13-py3-none-win32.whl", hash = "sha256:ef720f529aec113968b45dfdb838ac8934e519711da53a0456038a0efecbd680", size = 12919024, upload-time = "2026-01-15T20:14:43.647Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/55/6384b0b8ce731b6e2ade2b5449bf07c0e4c31e8a2e68ea65b3bafadcecc5/ruff-0.14.13-py3-none-win_amd64.whl", hash = "sha256:6070bd026e409734b9257e03e3ef18c6e1a216f0435c6751d7a8ec69cb59abef", size = 14097887, upload-time = "2026-01-15T20:15:01.48Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4d/e1/7348090988095e4e39560cfc2f7555b1b2a7357deba19167b600fdf5215d/ruff-0.14.13-py3-none-win_arm64.whl", hash = "sha256:7ab819e14f1ad9fe39f246cfcc435880ef7a9390d81a2b6ac7e01039083dd247", size = 13080224, upload-time = "2026-01-15T20:14:45.853Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "s3fs"
                                                                  +version = "2025.12.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "aiobotocore" },
                                                                  +    { name = "aiohttp" },
                                                                  +    { name = "fsspec" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/cf/26/fff848df6a76d6fec20208e61548244639c46a741e296244c3404d6e7df0/s3fs-2025.12.0.tar.gz", hash = "sha256:8612885105ce14d609c5b807553f9f9956b45541576a17ff337d9435ed3eb01f", size = 81217, upload-time = "2025-12-03T15:34:04.754Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/44/8c/04797ebb53748b4d594d4c334b2d9a99f2d2e06e19ad505f1313ca5d56eb/s3fs-2025.12.0-py3-none-any.whl", hash = "sha256:89d51e0744256baad7ae5410304a368ca195affd93a07795bc8ba9c00c9effbb", size = 30726, upload-time = "2025-12-03T15:34:03.576Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "s3transfer"
                                                                  +version = "0.15.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "botocore" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/ca/bb/940d6af975948c1cc18f44545ffb219d3c35d78ec972b42ae229e8e37e08/s3transfer-0.15.0.tar.gz", hash = "sha256:d36fac8d0e3603eff9b5bfa4282c7ce6feb0301a633566153cbd0b93d11d8379", size = 152185, upload-time = "2025-11-20T20:28:56.327Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/e1/5ef25f52973aa12a19cf4e1375d00932d7fb354ffd310487ba7d44225c1a/s3transfer-0.15.0-py3-none-any.whl", hash = "sha256:6f8bf5caa31a0865c4081186689db1b2534cef721d104eb26101de4b9d6a5852", size = 85984, upload-time = "2025-11-20T20:28:55.046Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "schemamodels"
                                                                  +version = "0.9.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/af/4e/5a0a31c8ba009b50189becd114bc8aaefda2d6e82c96d0f7196d42797f54/schemamodels-0.9.1.tar.gz", hash = "sha256:2816e9612f86b9368aeae57c5374b2008a2737b32ade5138463027e5a3e90144", size = 15980, upload-time = "2024-02-24T23:57:54.697Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/54/5e/5a1cd38db9cbf2f9422214323e16855e5d399746b866a48b93b2fdfa979b/schemamodels-0.9.1-py3-none-any.whl", hash = "sha256:2f675fb538d8e3f90c91b1b1b0e2fe9914a888fd6c7e3449dfe887d8c581aa42", size = 32167, upload-time = "2024-02-24T23:57:50.973Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "send2trash"
                                                                  +version = "1.8.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/fd/3a/aec9b02217bb79b87bbc1a21bc6abc51e3d5dcf65c30487ac96c0908c722/Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf", size = 17394, upload-time = "2024-04-07T00:01:09.267Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/b0/4562db6223154aa4e22f939003cb92514c79f3d4dccca3444253fd17f902/Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9", size = 18072, upload-time = "2024-04-07T00:01:07.438Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "setuptools"
                                                                  +version = "80.9.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "six"
                                                                  +version = "1.17.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "sniffio"
                                                                  +version = "1.3.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "soupsieve"
                                                                  +version = "2.8"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "sphobjinv"
                                                                  +version = "2.3.1.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "attrs" },
                                                                  +    { name = "certifi" },
                                                                  +    { name = "jsonschema" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/4887ebe7b46f4669a896dc286a3ac559101d2ceadbbea4614472960c2222/sphobjinv-2.3.1.3.tar.gz", hash = "sha256:a1d51e4cf3d968b9e0d3ed1cbccea0071e5e5795f24a2d7401a4e37d6bd75717", size = 268835, upload-time = "2025-05-26T15:18:16.994Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/63/f9/f48a8f489c8ae8930f12c558b4dd26da96791837747fca87e9da2643f12d/sphobjinv-2.3.1.3-py3-none-any.whl", hash = "sha256:41fc39f6f740a707cfe5b24c1a3a4a6e4ddbdd6429a59bf21f0b5ef1fddf932a", size = 50812, upload-time = "2025-05-26T15:18:10.636Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "stack-data"
                                                                  +version = "0.6.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "asttokens" },
                                                                  +    { name = "executing" },
                                                                  +    { name = "pure-eval" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "tabulate"
                                                                  +version = "0.9.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/ec/fe/802052aecb21e3797b8f7902564ab6ea0d60ff8ca23952079064155d1ae1/tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", size = 81090, upload-time = "2022-10-06T17:21:48.54Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/44/4a5f08c96eb108af5cb50b41f76142f0afa346dfa99d5296fe7202a11854/tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f", size = 35252, upload-time = "2022-10-06T17:21:44.262Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "terminado"
                                                                  +version = "0.18.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "ptyprocess", marker = "os_name != 'nt'" },
                                                                  +    { name = "pywinpty", marker = "os_name == 'nt'" },
                                                                  +    { name = "tornado" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/8a/11/965c6fd8e5cc254f1fe142d547387da17a8ebfd75a3455f637c663fb38a0/terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e", size = 32701, upload-time = "2024-03-12T14:34:39.026Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/6a/9e/2064975477fdc887e47ad42157e214526dcad8f317a948dee17e1659a62f/terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0", size = 14154, upload-time = "2024-03-12T14:34:36.569Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "tinycss2"
                                                                  +version = "1.4.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "webencodings" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/7a/fd/7a5ee21fd08ff70d3d33a5781c255cbe779659bd03278feb98b19ee550f4/tinycss2-1.4.0.tar.gz", hash = "sha256:10c0972f6fc0fbee87c3edb76549357415e94548c1ae10ebccdea16fb404a9b7", size = 87085, upload-time = "2024-10-24T14:58:29.895Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e6/34/ebdc18bae6aa14fbee1a08b63c015c72b64868ff7dae68808ab500c492e2/tinycss2-1.4.0-py3-none-any.whl", hash = "sha256:3a49cf47b7675da0b15d0c6e1df8df4ebd96e9394bb905a5775adb0d884c5289", size = 26610, upload-time = "2024-10-24T14:58:28.029Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "tornado"
                                                                  +version = "6.5.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "tqdm"
                                                                  +version = "4.67.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "colorama", marker = "sys_platform == 'win32'" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "traitlets"
                                                                  +version = "5.14.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "types-cffi"
                                                                  +version = "1.17.0.20250915"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "types-setuptools" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/2a/98/ea454cea03e5f351323af6a482c65924f3c26c515efd9090dede58f2b4b6/types_cffi-1.17.0.20250915.tar.gz", hash = "sha256:4362e20368f78dabd5c56bca8004752cc890e07a71605d9e0d9e069dbaac8c06", size = 17229, upload-time = "2025-09-15T03:01:25.31Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/ec/092f2b74b49ec4855cdb53050deb9699f7105b8fda6fe034c0781b8687f3/types_cffi-1.17.0.20250915-py3-none-any.whl", hash = "sha256:cef4af1116c83359c11bb4269283c50f0688e9fc1d7f0eeb390f3661546da52c", size = 20112, upload-time = "2025-09-15T03:01:24.187Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "types-pyopenssl"
                                                                  +version = "24.1.0.20240722"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "cryptography" },
                                                                  +    { name = "types-cffi" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/93/29/47a346550fd2020dac9a7a6d033ea03fccb92fa47c726056618cc889745e/types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39", size = 8458, upload-time = "2024-07-22T02:32:22.558Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/05/c868a850b6fbb79c26f5f299b768ee0adc1f9816d3461dcf4287916f655b/types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54", size = 7499, upload-time = "2024-07-22T02:32:21.232Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "types-redis"
                                                                  +version = "4.6.0.20241004"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "cryptography" },
                                                                  +    { name = "types-pyopenssl" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/3a/95/c054d3ac940e8bac4ca216470c80c26688a0e79e09f520a942bb27da3386/types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e", size = 49679, upload-time = "2024-10-04T02:43:59.224Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/55/82/7d25dce10aad92d2226b269bce2f85cfd843b4477cd50245d7d40ecf8f89/types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed", size = 58737, upload-time = "2024-10-04T02:43:57.968Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "types-setuptools"
                                                                  +version = "80.9.0.20250822"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/19/bd/1e5f949b7cb740c9f0feaac430e301b8f1c5f11a81e26324299ea671a237/types_setuptools-80.9.0.20250822.tar.gz", hash = "sha256:070ea7716968ec67a84c7f7768d9952ff24d28b65b6594797a464f1b3066f965", size = 41296, upload-time = "2025-08-22T03:02:08.771Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/b6/2d/475bf15c1cdc172e7a0d665b6e373ebfb1e9bf734d3f2f543d668b07a142/types_setuptools-80.9.0.20250822-py3-none-any.whl", hash = "sha256:53bf881cb9d7e46ed12c76ef76c0aaf28cfe6211d3fab12e0b83620b1a8642c3", size = 63179, upload-time = "2025-08-22T03:02:07.643Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "typing-extensions"
                                                                  +version = "4.15.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "typing-inspection"
                                                                  +version = "0.4.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "typing-extensions" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "tzdata"
                                                                  +version = "2025.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "uri-template"
                                                                  +version = "1.3.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/31/c7/0336f2bd0bcbada6ccef7aaa25e443c118a704f828a0620c6fa0207c1b64/uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7", size = 21678, upload-time = "2023-06-21T01:49:05.374Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e7/00/3fca040d7cf8a32776d3d81a00c8ee7457e00f80c649f1e4a863c8321ae9/uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363", size = 11140, upload-time = "2023-06-21T01:49:03.467Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "urllib3"
                                                                  +version = "2.5.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "watchdog"
                                                                  +version = "6.0.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "wcwidth"
                                                                  +version = "0.2.14"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "webcolors"
                                                                  +version = "25.10.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/1d/7a/eb316761ec35664ea5174709a68bbd3389de60d4a1ebab8808bfc264ed67/webcolors-25.10.0.tar.gz", hash = "sha256:62abae86504f66d0f6364c2a8520de4a0c47b80c03fc3a5f1815fedbef7c19bf", size = 53491, upload-time = "2025-10-31T07:51:03.977Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/e2/cc/e097523dd85c9cf5d354f78310927f1656c422bd7b2613b2db3e3f9a0f2c/webcolors-25.10.0-py3-none-any.whl", hash = "sha256:032c727334856fc0b968f63daa252a1ac93d33db2f5267756623c210e57a4f1d", size = 14905, upload-time = "2025-10-31T07:51:01.778Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "webdataset"
                                                                  +version = "1.0.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "braceexpand" },
                                                                  +    { name = "numpy" },
                                                                  +    { name = "pyyaml" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/5a/3a/68800d92e065cf4750ebecf973b13979c0c929b439e1293012938862038d/webdataset-1.0.2.tar.gz", hash = "sha256:7f0498be827cfa46cc5430a58768a24e2c6a410676a61be1838f53d61afdaab4", size = 80090, upload-time = "2025-06-19T23:26:21.945Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/00/aca6beb3658dab4ed3dbb41a78e6e7f31342e0b41d28088f205525751601/webdataset-1.0.2-py3-none-any.whl", hash = "sha256:3dbfced32b25c0d199c6b9787937b6f85742bc3c84f652c846893075c1c082d9", size = 74956, upload-time = "2025-06-19T23:26:20.354Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "webencodings"
                                                                  +version = "0.5.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "websocket-client"
                                                                  +version = "1.9.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "websockets"
                                                                  +version = "15.0.1"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "werkzeug"
                                                                  +version = "3.1.4"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "markupsafe" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/45/ea/b0f8eeb287f8df9066e56e831c7824ac6bab645dd6c7a8f4b2d767944f9b/werkzeug-3.1.4.tar.gz", hash = "sha256:cd3cd98b1b92dc3b7b3995038826c68097dcb16f9baa63abe35f20eafeb9fe5e", size = 864687, upload-time = "2025-11-29T02:15:22.841Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/2f/f9/9e082990c2585c744734f85bec79b5dae5df9c974ffee58fe421652c8e91/werkzeug-3.1.4-py3-none-any.whl", hash = "sha256:2ad50fb9ed09cc3af22c54698351027ace879a0b60a3b5edf5730b2f7d876905", size = 224960, upload-time = "2025-11-29T02:15:21.13Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "widgetsnbextension"
                                                                  +version = "4.0.15"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/bd/f4/c67440c7fb409a71b7404b7aefcd7569a9c0d6bd071299bf4198ae7a5d95/widgetsnbextension-4.0.15.tar.gz", hash = "sha256:de8610639996f1567952d763a5a41af8af37f2575a41f9852a38f947eb82a3b9", size = 1097402, upload-time = "2025-11-01T21:15:55.178Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/3f/0e/fa3b193432cfc60c93b42f3be03365f5f909d2b3ea410295cf36df739e31/widgetsnbextension-4.0.15-py3-none-any.whl", hash = "sha256:8156704e4346a571d9ce73b84bee86a29906c9abfd7223b7228a28899ccf3366", size = 2196503, upload-time = "2025-11-01T21:15:53.565Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "wrapt"
                                                                  +version = "1.17.3"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fc/f6/759ece88472157acb55fc195e5b116e06730f1b651b5b314c66291729193/wrapt-1.17.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a47681378a0439215912ef542c45a783484d4dd82bac412b71e59cf9c0e1cea0", size = 54003, upload-time = "2025-08-12T05:51:48.627Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4f/a9/49940b9dc6d47027dc850c116d79b4155f15c08547d04db0f07121499347/wrapt-1.17.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54a30837587c6ee3cd1a4d1c2ec5d24e77984d44e2f34547e2323ddb4e22eb77", size = 39025, upload-time = "2025-08-12T05:51:37.156Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/45/35/6a08de0f2c96dcdd7fe464d7420ddb9a7655a6561150e5fc4da9356aeaab/wrapt-1.17.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16ecf15d6af39246fe33e507105d67e4b81d8f8d2c6598ff7e3ca1b8a37213f7", size = 39108, upload-time = "2025-08-12T05:51:58.425Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/0c/37/6faf15cfa41bf1f3dba80cd3f5ccc6622dfccb660ab26ed79f0178c7497f/wrapt-1.17.3-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6fd1ad24dc235e4ab88cda009e19bf347aabb975e44fd5c2fb22a3f6e4141277", size = 88072, upload-time = "2025-08-12T05:52:37.53Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/78/f2/efe19ada4a38e4e15b6dff39c3e3f3f73f5decf901f66e6f72fe79623a06/wrapt-1.17.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ed61b7c2d49cee3c027372df5809a59d60cf1b6c2f81ee980a091f3afed6a2d", size = 88214, upload-time = "2025-08-12T05:52:15.886Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/40/90/ca86701e9de1622b16e09689fc24b76f69b06bb0150990f6f4e8b0eeb576/wrapt-1.17.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:423ed5420ad5f5529db9ce89eac09c8a2f97da18eb1c870237e84c5a5c2d60aa", size = 87105, upload-time = "2025-08-12T05:52:17.914Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fd/e0/d10bd257c9a3e15cbf5523025252cc14d77468e8ed644aafb2d6f54cb95d/wrapt-1.17.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e01375f275f010fcbf7f643b4279896d04e571889b8a5b3f848423d91bf07050", size = 87766, upload-time = "2025-08-12T05:52:39.243Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e8/cf/7d848740203c7b4b27eb55dbfede11aca974a51c3d894f6cc4b865f42f58/wrapt-1.17.3-cp313-cp313-win32.whl", hash = "sha256:53e5e39ff71b3fc484df8a522c933ea2b7cdd0d5d15ae82e5b23fde87d44cbd8", size = 36711, upload-time = "2025-08-12T05:53:10.074Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/57/54/35a84d0a4d23ea675994104e667ceff49227ce473ba6a59ba2c84f250b74/wrapt-1.17.3-cp313-cp313-win_amd64.whl", hash = "sha256:1f0b2f40cf341ee8cc1a97d51ff50dddb9fcc73241b9143ec74b30fc4f44f6cb", size = 38885, upload-time = "2025-08-12T05:53:08.695Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/01/77/66e54407c59d7b02a3c4e0af3783168fff8e5d61def52cda8728439d86bc/wrapt-1.17.3-cp313-cp313-win_arm64.whl", hash = "sha256:7425ac3c54430f5fc5e7b6f41d41e704db073309acfc09305816bc6a0b26bb16", size = 36896, upload-time = "2025-08-12T05:52:55.34Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/02/a2/cd864b2a14f20d14f4c496fab97802001560f9f41554eef6df201cd7f76c/wrapt-1.17.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cf30f6e3c077c8e6a9a7809c94551203c8843e74ba0c960f4a98cd80d4665d39", size = 54132, upload-time = "2025-08-12T05:51:49.864Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d5/46/d011725b0c89e853dc44cceb738a307cde5d240d023d6d40a82d1b4e1182/wrapt-1.17.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e228514a06843cae89621384cfe3a80418f3c04aadf8a3b14e46a7be704e4235", size = 39091, upload-time = "2025-08-12T05:51:38.935Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/9e/3ad852d77c35aae7ddebdbc3b6d35ec8013af7d7dddad0ad911f3d891dae/wrapt-1.17.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5ea5eb3c0c071862997d6f3e02af1d055f381b1d25b286b9d6644b79db77657c", size = 39172, upload-time = "2025-08-12T05:51:59.365Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c3/f7/c983d2762bcce2326c317c26a6a1e7016f7eb039c27cdf5c4e30f4160f31/wrapt-1.17.3-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:281262213373b6d5e4bb4353bc36d1ba4084e6d6b5d242863721ef2bf2c2930b", size = 87163, upload-time = "2025-08-12T05:52:40.965Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/0f/f673f75d489c7f22d17fe0193e84b41540d962f75fce579cf6873167c29b/wrapt-1.17.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:dc4a8d2b25efb6681ecacad42fca8859f88092d8732b170de6a5dddd80a1c8fa", size = 87963, upload-time = "2025-08-12T05:52:20.326Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/61/515ad6caca68995da2fac7a6af97faab8f78ebe3bf4f761e1b77efbc47b5/wrapt-1.17.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:373342dd05b1d07d752cecbec0c41817231f29f3a89aa8b8843f7b95992ed0c7", size = 86945, upload-time = "2025-08-12T05:52:21.581Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d3/bd/4e70162ce398462a467bc09e768bee112f1412e563620adc353de9055d33/wrapt-1.17.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d40770d7c0fd5cbed9d84b2c3f2e156431a12c9a37dc6284060fb4bec0b7ffd4", size = 86857, upload-time = "2025-08-12T05:52:43.043Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/2b/b8/da8560695e9284810b8d3df8a19396a6e40e7518059584a1a394a2b35e0a/wrapt-1.17.3-cp314-cp314-win32.whl", hash = "sha256:fbd3c8319de8e1dc79d346929cd71d523622da527cca14e0c1d257e31c2b8b10", size = 37178, upload-time = "2025-08-12T05:53:12.605Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/db/c8/b71eeb192c440d67a5a0449aaee2310a1a1e8eca41676046f99ed2487e9f/wrapt-1.17.3-cp314-cp314-win_amd64.whl", hash = "sha256:e1a4120ae5705f673727d3253de3ed0e016f7cd78dc463db1b31e2463e1f3cf6", size = 39310, upload-time = "2025-08-12T05:53:11.106Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/45/20/2cda20fd4865fa40f86f6c46ed37a2a8356a7a2fde0773269311f2af56c7/wrapt-1.17.3-cp314-cp314-win_arm64.whl", hash = "sha256:507553480670cab08a800b9463bdb881b2edeed77dc677b0a5915e6106e91a58", size = 37266, upload-time = "2025-08-12T05:52:56.531Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/77/ed/dd5cf21aec36c80443c6f900449260b80e2a65cf963668eaef3b9accce36/wrapt-1.17.3-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ed7c635ae45cfbc1a7371f708727bf74690daedc49b4dba310590ca0bd28aa8a", size = 56544, upload-time = "2025-08-12T05:51:51.109Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8d/96/450c651cc753877ad100c7949ab4d2e2ecc4d97157e00fa8f45df682456a/wrapt-1.17.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:249f88ed15503f6492a71f01442abddd73856a0032ae860de6d75ca62eed8067", size = 40283, upload-time = "2025-08-12T05:51:39.912Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/86/2fcad95994d9b572db57632acb6f900695a648c3e063f2cd344b3f5c5a37/wrapt-1.17.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5a03a38adec8066d5a37bea22f2ba6bbf39fcdefbe2d91419ab864c3fb515454", size = 40366, upload-time = "2025-08-12T05:52:00.693Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/64/0e/f4472f2fdde2d4617975144311f8800ef73677a159be7fe61fa50997d6c0/wrapt-1.17.3-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5d4478d72eb61c36e5b446e375bbc49ed002430d17cdec3cecb36993398e1a9e", size = 108571, upload-time = "2025-08-12T05:52:44.521Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cc/01/9b85a99996b0a97c8a17484684f206cbb6ba73c1ce6890ac668bcf3838fb/wrapt-1.17.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:223db574bb38637e8230eb14b185565023ab624474df94d2af18f1cdb625216f", size = 113094, upload-time = "2025-08-12T05:52:22.618Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/02/78926c1efddcc7b3aa0bc3d6b33a822f7d898059f7cd9ace8c8318e559ef/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e405adefb53a435f01efa7ccdec012c016b5a1d3f35459990afc39b6be4d5056", size = 110659, upload-time = "2025-08-12T05:52:24.057Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/dc/ee/c414501ad518ac3e6fe184753632fe5e5ecacdcf0effc23f31c1e4f7bfcf/wrapt-1.17.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:88547535b787a6c9ce4086917b6e1d291aa8ed914fdd3a838b3539dc95c12804", size = 106946, upload-time = "2025-08-12T05:52:45.976Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/be/44/a1bd64b723d13bb151d6cc91b986146a1952385e0392a78567e12149c7b4/wrapt-1.17.3-cp314-cp314t-win32.whl", hash = "sha256:41b1d2bc74c2cac6f9074df52b2efbef2b30bdfe5f40cb78f8ca22963bc62977", size = 38717, upload-time = "2025-08-12T05:53:15.214Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/79/d9/7cfd5a312760ac4dd8bf0184a6ee9e43c33e47f3dadc303032ce012b8fa3/wrapt-1.17.3-cp314-cp314t-win_amd64.whl", hash = "sha256:73d496de46cd2cdbdbcce4ae4bcdb4afb6a11234a1df9c085249d55166b95116", size = 41334, upload-time = "2025-08-12T05:53:14.178Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/78/10ad9781128ed2f99dbc474f43283b13fea8ba58723e98844367531c18e9/wrapt-1.17.3-cp314-cp314t-win_arm64.whl", hash = "sha256:f38e60678850c42461d4202739f9bf1e3a737c7ad283638251e79cc49effb6b6", size = 38471, upload-time = "2025-08-12T05:52:57.784Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "xmltodict"
                                                                  +version = "1.0.2"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "yarl"
                                                                  +version = "1.22.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +dependencies = [
                                                                  +    { name = "idna" },
                                                                  +    { name = "multidict" },
                                                                  +    { name = "propcache" },
                                                                  +]
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" },
                                                                  +    { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" },
                                                                  +]
                                                                  +
                                                                  +[[package]]
                                                                  +name = "zipp"
                                                                  +version = "3.23.0"
                                                                  +source = { registry = "https://pypi.org/simple" }
                                                                  +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
                                                                  +wheels = [
                                                                  +    { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
                                                                  +]