From 1dd90679eeaed7d0bddc09bc8a77f43298db026f Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Thu, 19 Dec 2024 12:53:08 +0100 Subject: [PATCH 1/8] [v1/feature/70]: Implementation of Cassandra Add new project Cortex.States.Cassandra, Implement CassandraStateStore, Add KeyspaceConfigration --- Cortex.sln | 6 + src/Cortex.States.Cassandra/Assets/cortex.png | Bin 0 -> 7179 bytes src/Cortex.States.Cassandra/Assets/license.md | 20 ++ .../CassandraStateStore.cs | 220 ++++++++++++++++++ .../Cortex.States.Cassandra.csproj | 58 +++++ .../KeyspaceConfiguration.cs | 40 ++++ 6 files changed, 344 insertions(+) create mode 100644 src/Cortex.States.Cassandra/Assets/cortex.png create mode 100644 src/Cortex.States.Cassandra/Assets/license.md create mode 100644 src/Cortex.States.Cassandra/CassandraStateStore.cs create mode 100644 src/Cortex.States.Cassandra/Cortex.States.Cassandra.csproj create mode 100644 src/Cortex.States.Cassandra/KeyspaceConfiguration.cs diff --git a/Cortex.sln b/Cortex.sln index cb35459..9e5cdea 100644 --- a/Cortex.sln +++ b/Cortex.sln @@ -33,6 +33,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Tests", "src\Cortex. EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Streams.Files", "src\Cortex.Streams.Files\Cortex.Streams.Files.csproj", "{D376D6CA-3192-4EDC-B840-31F58B6457DD}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.Cassandra", "src\Cortex.States.Cassandra\Cortex.States.Cassandra.csproj", "{447970B9-C5AA-41D9-A07F-330A251597D0}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -98,6 +100,10 @@ Global {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Debug|Any CPU.Build.0 = Debug|Any CPU {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Release|Any CPU.ActiveCfg = Release|Any CPU {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Release|Any CPU.Build.0 = Release|Any CPU + {447970B9-C5AA-41D9-A07F-330A251597D0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {447970B9-C5AA-41D9-A07F-330A251597D0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {447970B9-C5AA-41D9-A07F-330A251597D0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {447970B9-C5AA-41D9-A07F-330A251597D0}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/Cortex.States.Cassandra/Assets/cortex.png b/src/Cortex.States.Cassandra/Assets/cortex.png new file mode 100644 index 0000000000000000000000000000000000000000..a4f9727d04f91f61f20720bd64b9c139fb041236 GIT binary patch literal 7179 zcmV+m9Q5OfP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGf5&!@T5&_cPe*6Fc8=y%C^C!)jU*}Z_TUZ5zkPS!zgzWpiKoUY0LP$tR!Xokt%Ag{H3XX^@B8wnv03972 z_hwv1kf6A*-1mH+Tg$Cm_f}O`27H#NQDPYKIv zcTCHy`fO@u)s1PH)sbnLH4!`}kZK|c%WX|xUTdf>GJS=u8BkWepa#-RXr-xDk(t@H zW@WFA%*qkfWGlBRGCQ}KE6>y#u7Wj@IeBX$a|&84%`IF@nqxf9Eo!w@+!~oz+$N|! zGOwgPGT%~1WPT~v)1{qUWzc%A^7WAg4s}HqRBUi)VHMOZs3)?px+juI+89Z!G1VJc zB&xQLlmylF3mS+d)%Qn|>-%q{Gz@%}+OT;?VqO2Az$`8yqwdcMnN^=m&#Gn9GOO9N zWmSgoa}7%%RkMWUH7sFyEou6S8a90e9_zTW>Pa(9t!!{;=E_xE+0bgztQ@G(RxZ@U zX6H7t+4KlCv)TDgY<6BVo1M3Y&B-^_!sZmT(Bo4DE$penwd|>awfwQDmCY?QRMf`i z7PlFX?QCvIJDW%9VDpMQ_+#lhHowFWelF`|^GiF~)8*^A%D5JkcX3sij~m#6$_@0m zu(F#itm@{El|3x6Y9mXm>S2l1c-+Vmt9w~uO)pzi-OCo$^sz;?rZ%x8Q+55MWT<|? zR>L4ErD2dQZhVHNG;H}~QR9|B$4$wo*gJiBEt{HI$)=I2xEuhhIXqJqL=&J0vCala z5N(h%O%XsY0Gvq>4G}=VGamuY)|~uiL2C?vYdAnt09d$|;L#&sk_?c=ZERj~yDb1L z>0t9s0idC$%Q^*_5G9Z#17M~3$VC820LaAwn(8AZK>|nur~sJ;c=dyOfYHG-0D8c)!3BgD z0ANym|IN6m>E(B(W>gS3OJ2?cFb6y?Pk02*2B>qM3!dyehx-I@f^^Pv!7~6xXt|F=bO5v<25_GNQSUxC#H8AO4sJ?%xdNQ*J~@1Hcqd#$aK3;6*i@;(*!qp zN$K#ErDYCqdVq%egycmAs0*E1fQrN=5J1U&?wU^l&=op;GEa^BbZsAjqqfh>eP;6F z)Ao6|Z&6L30M3*Oo+ETR-KT=*2%XxtPvO3(3wF_Ye(o~>8t!w1PapS14NvJlvF#Jx zr)&H4@D^1Y;3h9EV~`Hsl#Ft1yQhN}IO}{ANsFp%P!{Yu=K-8E>wGlM^MYqOFId^& z2%iLQaft=a3!W})l0KJV3MhJsTZ0AEbxpuK9u{Elz-8;ROww zR?TOCTT;p_`2Zw{iqPrGJT3QmCoihd8BOM?bf27cD&6OV=tx|mt@-5SMRuQ8=yZio zXXvycikWBcaNHyroC=-<(Ct1seEPUg8#$OY0`0p9-Gr zJ{g`@!7jLOL4{YlXVrYeag)-D-QWax7C_l`CNRIYFQbZ{KY1z6$Kr!#EgZ*Wg$`|~Vo!z(PSdoO8a_bY<-k(PJvp>_%Gv>m7eXoO8y z=v38xA~-=V_sQW?)$U2~rleO;=quWCh;^O(o^87Do2U9N{K`5{e9LlrUu6j^#7=7= z^K^|iM3;Gn?{VS6|5NG*Skr-zS?{?!Y}3WxJ=J&q7q<4T&slowP6s>-T#)3&a-QwJ zg;hNOSL6bxgy$1FEqErlrDbgSx;<>mwFhkL2aIjG<}W;N@XCF*V*N{$o67aDm_{0CMM*lVjE#{Dj{8=BvSl58QYDE=yg#mCogYr)>Me!L!;uW!7o8S@;_g zl7_uQY-^OC2f{JS*1bsWqcU+(hEBQd6W{?{d}`5fJiw)yk^ly7`)qjlJuY(RJKqrq z`GfcguyO3dif(EzUGOZ31~>troplQEFvkURT|{^Q=UekNvpM)VZw)KievEA%dfrbnvSA!cLKpGf(cmS-Gnz;po5gd(_bb!i_sP^nlvAr;IF94NvVp2S5{E zIe{x2j!!EhSb%2%93eco6oY=f@81m-UN#$TMCJ2m2ny;!--6vrS|s-=;JMtVsQK{x z#ZBAU#4ZIzS;aOu?=L=xFyyPUm3?Au_nm}G zSbMn7S+E-pEG2LS!||zwHaG#E*!EfQ6af9;A!OnMw7>mzcz7Q$)_Ul3`k>r-s{YQf z&a>g+c?iv$uRaJBBI%f+2drT5@6;XkD29qGXyza-z)iHk1%yWcqj%nyCZ5l%AP$=9nz{{A|*f(qAK$Qw5&SJ zo?;tL{umCL@!pybd`vwo2)C-yhLO6Dw|gDjaV9t?JOP~4_C;OuneH?GpWBB}^e|le zag}$AW%vAz`a|7QB%*g8o`;LG_RW8Vt~&u6od!uYy_Dq0U1st~gFVI<0$vHgCFeW9 zY2hhCr&r?Qf@c7ngiJHNoWjXST(|Mu9k#gnSvqH6=P3$yBu=={wJ%+went7Uf(X!h z-~WXzZrVoeq^hse9xCz}qzwws23No)rt09S0S1Am1V|yXgw4pRV@44sR;1jC7Xs)LiHjQyTH$)pCo|DzJmknGB*FnHD z$os71*<)0+QxUHkDiUUvsUTYIo+5d105l-ZFV=!{!t(V?P4YG}AheuQ< z)a-emPmxfTfpUQ9H^!gPggt-s-G2;kgHCgTT|RX;9RB!N9ozB9ZX| zC?+lh(FY)Yh70OmycjxJ!EqZ-{m7DPHj(pOlXI8VSTHMgo+MZULl4-%l?OEJ5)_`9 z(Pp&nqIS~RQSCoP!Z}bY&#D&NJyVLBFFJ5unWq3x0WgUB;?oN0L#}=Ev(RZd;I$w5 znl1!QRFdFfPE1bke%62S4+_m(COm)VAtDT1xySPR-{571QM}KRM;lZ_MXEts6QFA; zw9b8|16!j67Zjc@boz9c;XFw15r)DzZxBWORmK|j4NgF5PGS zm+!Fw$N|sWdE)pWr0VN*zzZ}*!U@j?XipDx^|lm-0Y}oo696jUd1alV^A}LqXi93!NSS@!Xt^``O^oeP4KkLkM3S8lMvPE8BGbE_G&O zSOp#2xInA-f57@K{6;+-NMz8F40r&k255&)Kx;ev74^(xYZ0-#OziKJ0jff0xXe@P zaVZ!nI7}*&F!yQT0hY^wYabyvt@HHoT#1VeFSU>ga2wjm_3!?`+K+t0I!}Dhx=#K`zsKJ#c;`7hI8_PnDx<%n&g8Sr z!jJQb?hEfc?!GyX3>@hZz>@&tSiDhOd}r?l!k8HzeONU`<1)T0q+4D7<$MGH@|7DY0!)`sNf0U{Nc%2ryo3k zYk8F5MCl*~1Sr76`LGIe<9oNQf*FoLRtTr`T28yoHk`i6)*H~)9sibok2Z4f@_qjM zf^u?^wsN8?5{EPmb(x)1}`npzBZk zK+o?!^Aiu9M%Xmr;kebWen8VL(9`1XsMZV>1%xNpe5UigK7Hn?bf27c3htXV*5HC9E-H8v zQVLjNO&@DL^o0>3BNXanKw4qK>g9W^^Tc=LJcQTM9q-Uo1{xL`!*%Sgf?ZrmYrvb8 z+cXy7py=U=p_6`}TEx;@o?~r?zoa&5vz(0h=^8Jt0lwzN^VD`=`IvGYy9CcYNNZ1# za5+x{FL2wZSWPsB;6&*lx&d+-0FD2b?l|twAOi|v6&dNmiVZZ|T3k`<4bVGuqCYdJ z`@8_k?z7syoW@uNr-o;`58)D7R_oy}Jo_{KgdP}_8OU5QZBX8Xz_ovi1TR?ld~^kS z41qHh0GVslES5%vl{{;4J9? z7P1M+d2D6RJ~ntY+3(wPe{{`Su0WFVqlEuC$BTYv9L^HWA zV=cfwTA*TX1!4iyWSsz~+Unn3^U3i1O9l&`0dU5D1Govv`7A!Igw^i37<#IBw8HCP z?j*u8k`;R!kP)~~129qxa52`At{d)Zax&K5Asn8Q`3=YVpf$Fp2mId zn$OF9qVs~)e6bBKxq#X=EbZ;T9BwrYg!V9OB7$1h9~cut4myz&XdXp z^=9L-3+~B9IkX3H&zYN{lM#fxhJ9CQur&;L90Tapf5mE`YBpKtJeQ=5Q-bFRn-)NQ z$w1%)fYVpj#U{8XlXBUtye8IuG!i-)!EsHmen=MzdtuU+P9foKzIvbK558e7ht_UB zr)c*`8h~Ll&*@n)2o7x;QgQ6#720W7t$Hj%+YBPyqKm9x;Em8B6YwzKWogTE)WI!p zf2D`#?>?6U1GsN`R())NL$3&W{mY*_75XCcjPPfcTt7(R7TshxHzwFt?LHU!W-wRj zV4J9ksYP0NYJfI8ch;$cXE@JFUL4MgA#lhblk-?My0eDvg$ipBdi$}Z?h}i09o=O( z|BS2#w&smbL&L*Ofl+hJCPsg$1y2TOZg;E*oBUf+)HRyMd|DWY2|=M9{Q(l5YZS8LvDQJ_zf6|OeXj5+*L)g) zCd7mlF#-xlnb`aj=d?B?5Z$K>of`KAtogTb+Y7WWR~Tq?%(aKKqYuKZ ze15HjaXz$n*kl{i`@+JbLjT}DDNG|2b8rFSxfEcKmbUw-9vg5^BxchW*!8~8SJTKB z!%sM7>-C4MY{v--d3G`qPy)Os5_4#lSI?QBLc_yx)w|Ep#mD;*UFQqWbBctVbsBBo zSb)P_j&;qLfgHAdL3@bRGzc3wpZprZh4v0*LLSZ0#H69H?+M2h48BP-(5#TDa-XW* zbHTISr)>K&s~#^nTx3`si{QiV--Qc=wE~#Y>&zfS%1BnQlq;8Zh7C6Xi4VZuW0~!{ z>AVhjDhEpLQ#(-nT2sM&j}sj3YV`9}>^e0fXjmVBFOqmN(H|Nw#QB(>{5M$u7oS>8 zAunvmL^*?nBiObd=hVT|g-#2e!g-FwCF3!IgWGcZUgLLjm^<3oUJzfrFoD7&ptxVh z5ruBAj_xl+kEh!p>HlED{XIoMcrbKOj8F>0Z`>UN7C}Rz#)TNx@HGm z_%O)`2YsK~5c2}duOSOJ3cmJiaOe-#eDG81;|tOr;yD=g#Kkc z-20a~LZ`0g)3$vTqYn;uI(7@fnzXP*HZDZ%ix=qw2MK$~TCs6=&zakFEL6GcG_4N^ zbo`=R+A1F#0*vH6m+w4j?BnU&ghvT48uz7-GB`lPLWS11z6c$9Z~?HKDj~Bf&_$=( zET@rr@y%^EXe9gELzdI`nsJU`j>R`n4PJQ=DzxJmEJnd=B4nB?YhCbE;Zp@qRC@X7 zf#cm-6|DZ{%OeUoSgVcpg12k#*Kxe2(Hnv-4P~d8S>KX1&jw0t%=2Musm)mvHl=WbI^evqCKK8=qryV7NafQRsjtQ3W}d=6Ro47iAO zFNgnDF1Wt!@Yif+b|VeIn(&kWgSv0jz$NF?8^bPkVLmXqex(#`AKi z`wK~oR|13!+4|O3Y<6z5f2L`M`|MS0?V-=ZM~>@^zAKe?Zaa5SQUdgWH)XWJVdJwf zj>8AVG;r=*A85F^iP^@Qf?#88+x~%X47p5=KGq}1=aYr37~ilFcDxYBEUN9JbBX)6 zXu+w%rwDFo*(iZSm`TZ}FRG39F2(cFV~;m))?!|QBe11=*U29OK{JKtqW$9M3HCbp zE;;P663xws&x7qe(9=UniemR9EkN0MCcsezXAF=wzV;!#dB{x7l`@{weDD*RfEpx& zgd4GL41=)Ge55$1hY&IW0N)s54l(%&pNW5a<8SPlf4I*n3(whf;TM+C{=BfNR@&-c z0Z|9ghFCg!;2>Ol%u>ScbCu7Zrf>Jq2|KT@=z5vWSWy=U8l68mk2WQ(*>m1?zChZ2 zj=uX8#d(mB%p*V{jck-`dz)45R-Ds1_gPl5?Fh{Z11xkL$U8}dcHfdw1_$9T#RcXI z?)Ado@w^}zq$v9GUGeUXR>!P_YGw zSNL?mTU;^@ZuF#srv_-c&kLRfa2(udO9wAN=+wX)2lrT|f+qlUgiSki>flWp2lqIo zb6*sp(}b8-JPz)$OR)`z4xZM1odhs- z99)b@2TuSf!W#z{Lki+N6JToLIJnqS0C?lzVpv-D84y!AxI3-`#}y+|!6R^Y0Pd#y zfN{l+6z)qYycst!Iqv|5(%lD)D<&j`P4t{hOer`JH!(H)FB4OWzCm&4I%r%mq(a)B z1FR;d7W^d+|DH_Bc_Kcw;Pv?A{9DxKAz>L;Ea?u|fYF4My#E2S{{wP}i{H;-Y-|7k N002ovPDHLkV1i-n!nFVZ literal 0 HcmV?d00001 diff --git a/src/Cortex.States.Cassandra/Assets/license.md b/src/Cortex.States.Cassandra/Assets/license.md new file mode 100644 index 0000000..530f621 --- /dev/null +++ b/src/Cortex.States.Cassandra/Assets/license.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2024 Buildersoft + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/Cortex.States.Cassandra/CassandraStateStore.cs b/src/Cortex.States.Cassandra/CassandraStateStore.cs new file mode 100644 index 0000000..2c358eb --- /dev/null +++ b/src/Cortex.States.Cassandra/CassandraStateStore.cs @@ -0,0 +1,220 @@ +using Cassandra; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace Cortex.States.Cassandra +{ + public class CassandraStateStore : IStateStore + { + private readonly ISession _session; + private readonly string _keyspace; + private readonly string _tableName; + private readonly PreparedStatement _getStatement; + private readonly PreparedStatement _putStatement; + private readonly PreparedStatement _removeStatement; + private readonly PreparedStatement _getAllStatement; + private readonly PreparedStatement _getKeysStatement; + private readonly Func _keySerializer; + private readonly Func _valueSerializer; + private readonly Func _keyDeserializer; + private readonly Func _valueDeserializer; + + + // SemaphoreSlim for initialization synchronization + private static readonly SemaphoreSlim _initializationLock = new SemaphoreSlim(1, 1); + + // Flag to track initialization status + private volatile bool _isInitialized; + + // Cancellation token source for cleanup + private readonly CancellationTokenSource _cancellationTokenSource; + + public string Name { get; } + + + /// + /// Initializes a new instance of the CassandraStateStore. + /// + /// Name of the state store + /// Keyspace name + /// Table name + /// Cassandra session + /// Optional keyspace configuration + /// Read consistency level + /// Write consistency level + public CassandraStateStore( + string name, + string keyspace, + string tableName, + ISession session, + KeyspaceConfiguration keyspaceConfig = null, + ConsistencyLevel? readConsistency = null, + ConsistencyLevel? writeConsistency = null) + { + Name = name ?? throw new ArgumentNullException(nameof(name)); + _session = session ?? throw new ArgumentNullException(nameof(session)); + _keyspace = keyspace ?? throw new ArgumentNullException(nameof(keyspace)); + _tableName = tableName ?? throw new ArgumentNullException(nameof(tableName)); + _cancellationTokenSource = new CancellationTokenSource(); + + // Initialize with the provided configuration or default + InitializeAsync(keyspaceConfig ?? new KeyspaceConfiguration()).GetAwaiter().GetResult(); + + // Prepare statements with specified consistency levels + _getStatement = _session.Prepare( + $"SELECT value FROM {_keyspace}.{_tableName} WHERE key = ?") + .SetConsistencyLevel(readConsistency ?? ConsistencyLevel.Quorum); + + _putStatement = _session.Prepare( + $"INSERT INTO {_keyspace}.{_tableName} (key, value) VALUES (?, ?)") + .SetConsistencyLevel(writeConsistency ?? ConsistencyLevel.Quorum); + + _removeStatement = _session.Prepare( + $"DELETE FROM {_keyspace}.{_tableName} WHERE key = ?") + .SetConsistencyLevel(writeConsistency ?? ConsistencyLevel.Quorum); + + _getAllStatement = _session.Prepare( + $"SELECT key, value FROM {_keyspace}.{_tableName}") + .SetConsistencyLevel(readConsistency ?? ConsistencyLevel.Quorum); + + _getKeysStatement = _session.Prepare( + $"SELECT key FROM {_keyspace}.{_tableName}") + .SetConsistencyLevel(readConsistency ?? ConsistencyLevel.Quorum); + + _keySerializer = key => JsonSerializer.Serialize(key); + _valueSerializer = value => JsonSerializer.Serialize(value); + _keyDeserializer = str => JsonSerializer.Deserialize(str); + _valueDeserializer = str => JsonSerializer.Deserialize(str); + } + + private async Task InitializeAsync(KeyspaceConfiguration config) + { + if (_isInitialized) return; + + await _initializationLock.WaitAsync(); + try + { + if (_isInitialized) return; + + // Create keyspace using the provided configuration + var createKeyspaceQuery = config.GenerateCreateKeyspaceCql(_keyspace); + await _session.ExecuteAsync(new SimpleStatement(createKeyspaceQuery)); + + // Create table if it doesn't exist + await _session.ExecuteAsync(new SimpleStatement( + $@"CREATE TABLE IF NOT EXISTS {_keyspace}.{_tableName} ( + key text PRIMARY KEY, + value text + )")); + + _isInitialized = true; + } + finally + { + _initializationLock.Release(); + } + } + + public TValue Get(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + var boundStatement = _getStatement.Bind(serializedKey); + + // Cassandra driver handles thread safety for execute operations + var row = _session.Execute(boundStatement).FirstOrDefault(); + + if (row == null) + return default; + + var serializedValue = row.GetValue("value"); + return _valueDeserializer(serializedValue); + } + + public void Put(TKey key, TValue value) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + var serializedValue = _valueSerializer(value); + var boundStatement = _putStatement.Bind(serializedKey, serializedValue); + + _session.Execute(boundStatement); + } + + public bool ContainsKey(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + var boundStatement = _getStatement.Bind(serializedKey); + var row = _session.Execute(boundStatement).FirstOrDefault(); + return row != null; + } + + public void Remove(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + var boundStatement = _removeStatement.Bind(serializedKey); + _session.Execute(boundStatement); + } + + public IEnumerable> GetAll() + { + EnsureInitialized(); + + var boundStatement = _getAllStatement.Bind(); + + // Execute and materialize results to avoid timeout issues during enumeration + var rows = _session.Execute(boundStatement).ToList(); + + foreach (var row in rows) + { + var serializedKey = row.GetValue("key"); + var serializedValue = row.GetValue("value"); + var key = _keyDeserializer(serializedKey); + var value = _valueDeserializer(serializedValue); + yield return new KeyValuePair(key, value); + } + } + + public IEnumerable GetKeys() + { + EnsureInitialized(); + + var boundStatement = _getKeysStatement.Bind(); + + // Execute and materialize results to avoid timeout issues during enumeration + var rows = _session.Execute(boundStatement).ToList(); + + foreach (var row in rows) + { + var serializedKey = row.GetValue("key"); + yield return _keyDeserializer(serializedKey); + } + } + + private void EnsureInitialized() + { + if (!_isInitialized) + { + throw new InvalidOperationException("CassandraStateStore is not properly initialized."); + } + } + + public void Dispose() + { + _cancellationTokenSource.Cancel(); + _cancellationTokenSource.Dispose(); + _initializationLock.Dispose(); + } + } +} diff --git a/src/Cortex.States.Cassandra/Cortex.States.Cassandra.csproj b/src/Cortex.States.Cassandra/Cortex.States.Cassandra.csproj new file mode 100644 index 0000000..30b781c --- /dev/null +++ b/src/Cortex.States.Cassandra/Cortex.States.Cassandra.csproj @@ -0,0 +1,58 @@ + + + + net9.0;net8.0;net7.0 + + 1.0.1 + 1.0.1 + Buildersoft Cortex Framework + Buildersoft + Buildersoft,EnesHoxha + Copyright © Buildersoft 2024 + + Cortex Data Framework is a robust, extensible platform designed to facilitate real-time data streaming, processing, and state management. It provides developers with a comprehensive suite of tools and libraries to build scalable, high-performance data pipelines tailored to diverse use cases. By abstracting underlying streaming technologies and state management solutions, Cortex Data Framework enables seamless integration, simplified development workflows, and enhanced maintainability for complex data-driven applications. + + + https://github.com/buildersoftio/cortex + cortex mediator eda streaming distributed streams states cassandra + + 1.0.1 + license.md + cortex.png + Cortex.States.Cassandra + True + True + True + + Just as the Cortex in our brains handles complex processing efficiently, Cortex Data Framework brings brainpower to your data management! + https://buildersoft.io/ + README.md + + + + + + + + + + + + + + + + True + \ + + + True + + + + True + + + + + diff --git a/src/Cortex.States.Cassandra/KeyspaceConfiguration.cs b/src/Cortex.States.Cassandra/KeyspaceConfiguration.cs new file mode 100644 index 0000000..b361a4c --- /dev/null +++ b/src/Cortex.States.Cassandra/KeyspaceConfiguration.cs @@ -0,0 +1,40 @@ +using System.Collections.Generic; +using System.Linq; + +namespace Cortex.States.Cassandra +{ + public class KeyspaceConfiguration + { + public string ReplicationStrategy { get; set; } = "SimpleStrategy"; + public int ReplicationFactor { get; set; } = 1; + public Dictionary ReplicationOptions { get; set; } + public bool DurableWrites { get; set; } = true; + + public string GenerateCreateKeyspaceCql(string keyspaceName) + { + var replicationConfig = new Dictionary + { + { "class", ReplicationStrategy } + }; + + if (ReplicationStrategy == "SimpleStrategy") + { + replicationConfig.Add("replication_factor", ReplicationFactor.ToString()); + } + else if (ReplicationStrategy == "NetworkTopologyStrategy" && ReplicationOptions != null) + { + foreach (var option in ReplicationOptions) + { + replicationConfig.Add(option.Key, option.Value); + } + } + + var replicationString = string.Join(", ", + replicationConfig.Select(kv => $"'{kv.Key}': '{kv.Value}'")); + + return $@"CREATE KEYSPACE IF NOT EXISTS {keyspaceName} + WITH replication = {{{replicationString}}} + AND durable_writes = {DurableWrites.ToString().ToLower()}"; + } + } +} From e65f178a2e1fb96afb535406fb5501814f546f4e Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Thu, 19 Dec 2024 13:28:08 +0100 Subject: [PATCH 2/8] [v1/feature/71]: Add Support for MongoDb Create new project Cortex.States.MongoDb, Implement MongoDbStateStore, Add MongoStateEntity --- Cortex.sln | 6 + src/Cortex.States.MongoDb/Assets/cortex.png | Bin 0 -> 7179 bytes src/Cortex.States.MongoDb/Assets/license.md | 20 +++ .../Cortex.States.MongoDb.csproj | 56 ++++++++ .../MongoDbStateStore.cs | 123 ++++++++++++++++++ src/Cortex.States.MongoDb/MongoStateEntry.cs | 19 +++ 6 files changed, 224 insertions(+) create mode 100644 src/Cortex.States.MongoDb/Assets/cortex.png create mode 100644 src/Cortex.States.MongoDb/Assets/license.md create mode 100644 src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj create mode 100644 src/Cortex.States.MongoDb/MongoDbStateStore.cs create mode 100644 src/Cortex.States.MongoDb/MongoStateEntry.cs diff --git a/Cortex.sln b/Cortex.sln index cb35459..e93b042 100644 --- a/Cortex.sln +++ b/Cortex.sln @@ -33,6 +33,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Tests", "src\Cortex. EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Streams.Files", "src\Cortex.Streams.Files\Cortex.Streams.Files.csproj", "{D376D6CA-3192-4EDC-B840-31F58B6457DD}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.MongoDb", "src\Cortex.States.MongoDb\Cortex.States.MongoDb.csproj", "{00358701-D117-4953-A673-D60625D38466}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -98,6 +100,10 @@ Global {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Debug|Any CPU.Build.0 = Debug|Any CPU {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Release|Any CPU.ActiveCfg = Release|Any CPU {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Release|Any CPU.Build.0 = Release|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Debug|Any CPU.Build.0 = Debug|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Release|Any CPU.ActiveCfg = Release|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/Cortex.States.MongoDb/Assets/cortex.png b/src/Cortex.States.MongoDb/Assets/cortex.png new file mode 100644 index 0000000000000000000000000000000000000000..a4f9727d04f91f61f20720bd64b9c139fb041236 GIT binary patch literal 7179 zcmV+m9Q5OfP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGf5&!@T5&_cPe*6Fc8=y%C^C!)jU*}Z_TUZ5zkPS!zgzWpiKoUY0LP$tR!Xokt%Ag{H3XX^@B8wnv03972 z_hwv1kf6A*-1mH+Tg$Cm_f}O`27H#NQDPYKIv zcTCHy`fO@u)s1PH)sbnLH4!`}kZK|c%WX|xUTdf>GJS=u8BkWepa#-RXr-xDk(t@H zW@WFA%*qkfWGlBRGCQ}KE6>y#u7Wj@IeBX$a|&84%`IF@nqxf9Eo!w@+!~oz+$N|! zGOwgPGT%~1WPT~v)1{qUWzc%A^7WAg4s}HqRBUi)VHMOZs3)?px+juI+89Z!G1VJc zB&xQLlmylF3mS+d)%Qn|>-%q{Gz@%}+OT;?VqO2Az$`8yqwdcMnN^=m&#Gn9GOO9N zWmSgoa}7%%RkMWUH7sFyEou6S8a90e9_zTW>Pa(9t!!{;=E_xE+0bgztQ@G(RxZ@U zX6H7t+4KlCv)TDgY<6BVo1M3Y&B-^_!sZmT(Bo4DE$penwd|>awfwQDmCY?QRMf`i z7PlFX?QCvIJDW%9VDpMQ_+#lhHowFWelF`|^GiF~)8*^A%D5JkcX3sij~m#6$_@0m zu(F#itm@{El|3x6Y9mXm>S2l1c-+Vmt9w~uO)pzi-OCo$^sz;?rZ%x8Q+55MWT<|? zR>L4ErD2dQZhVHNG;H}~QR9|B$4$wo*gJiBEt{HI$)=I2xEuhhIXqJqL=&J0vCala z5N(h%O%XsY0Gvq>4G}=VGamuY)|~uiL2C?vYdAnt09d$|;L#&sk_?c=ZERj~yDb1L z>0t9s0idC$%Q^*_5G9Z#17M~3$VC820LaAwn(8AZK>|nur~sJ;c=dyOfYHG-0D8c)!3BgD z0ANym|IN6m>E(B(W>gS3OJ2?cFb6y?Pk02*2B>qM3!dyehx-I@f^^Pv!7~6xXt|F=bO5v<25_GNQSUxC#H8AO4sJ?%xdNQ*J~@1Hcqd#$aK3;6*i@;(*!qp zN$K#ErDYCqdVq%egycmAs0*E1fQrN=5J1U&?wU^l&=op;GEa^BbZsAjqqfh>eP;6F z)Ao6|Z&6L30M3*Oo+ETR-KT=*2%XxtPvO3(3wF_Ye(o~>8t!w1PapS14NvJlvF#Jx zr)&H4@D^1Y;3h9EV~`Hsl#Ft1yQhN}IO}{ANsFp%P!{Yu=K-8E>wGlM^MYqOFId^& z2%iLQaft=a3!W})l0KJV3MhJsTZ0AEbxpuK9u{Elz-8;ROww zR?TOCTT;p_`2Zw{iqPrGJT3QmCoihd8BOM?bf27cD&6OV=tx|mt@-5SMRuQ8=yZio zXXvycikWBcaNHyroC=-<(Ct1seEPUg8#$OY0`0p9-Gr zJ{g`@!7jLOL4{YlXVrYeag)-D-QWax7C_l`CNRIYFQbZ{KY1z6$Kr!#EgZ*Wg$`|~Vo!z(PSdoO8a_bY<-k(PJvp>_%Gv>m7eXoO8y z=v38xA~-=V_sQW?)$U2~rleO;=quWCh;^O(o^87Do2U9N{K`5{e9LlrUu6j^#7=7= z^K^|iM3;Gn?{VS6|5NG*Skr-zS?{?!Y}3WxJ=J&q7q<4T&slowP6s>-T#)3&a-QwJ zg;hNOSL6bxgy$1FEqErlrDbgSx;<>mwFhkL2aIjG<}W;N@XCF*V*N{$o67aDm_{0CMM*lVjE#{Dj{8=BvSl58QYDE=yg#mCogYr)>Me!L!;uW!7o8S@;_g zl7_uQY-^OC2f{JS*1bsWqcU+(hEBQd6W{?{d}`5fJiw)yk^ly7`)qjlJuY(RJKqrq z`GfcguyO3dif(EzUGOZ31~>troplQEFvkURT|{^Q=UekNvpM)VZw)KievEA%dfrbnvSA!cLKpGf(cmS-Gnz;po5gd(_bb!i_sP^nlvAr;IF94NvVp2S5{E zIe{x2j!!EhSb%2%93eco6oY=f@81m-UN#$TMCJ2m2ny;!--6vrS|s-=;JMtVsQK{x z#ZBAU#4ZIzS;aOu?=L=xFyyPUm3?Au_nm}G zSbMn7S+E-pEG2LS!||zwHaG#E*!EfQ6af9;A!OnMw7>mzcz7Q$)_Ul3`k>r-s{YQf z&a>g+c?iv$uRaJBBI%f+2drT5@6;XkD29qGXyza-z)iHk1%yWcqj%nyCZ5l%AP$=9nz{{A|*f(qAK$Qw5&SJ zo?;tL{umCL@!pybd`vwo2)C-yhLO6Dw|gDjaV9t?JOP~4_C;OuneH?GpWBB}^e|le zag}$AW%vAz`a|7QB%*g8o`;LG_RW8Vt~&u6od!uYy_Dq0U1st~gFVI<0$vHgCFeW9 zY2hhCr&r?Qf@c7ngiJHNoWjXST(|Mu9k#gnSvqH6=P3$yBu=={wJ%+went7Uf(X!h z-~WXzZrVoeq^hse9xCz}qzwws23No)rt09S0S1Am1V|yXgw4pRV@44sR;1jC7Xs)LiHjQyTH$)pCo|DzJmknGB*FnHD z$os71*<)0+QxUHkDiUUvsUTYIo+5d105l-ZFV=!{!t(V?P4YG}AheuQ< z)a-emPmxfTfpUQ9H^!gPggt-s-G2;kgHCgTT|RX;9RB!N9ozB9ZX| zC?+lh(FY)Yh70OmycjxJ!EqZ-{m7DPHj(pOlXI8VSTHMgo+MZULl4-%l?OEJ5)_`9 z(Pp&nqIS~RQSCoP!Z}bY&#D&NJyVLBFFJ5unWq3x0WgUB;?oN0L#}=Ev(RZd;I$w5 znl1!QRFdFfPE1bke%62S4+_m(COm)VAtDT1xySPR-{571QM}KRM;lZ_MXEts6QFA; zw9b8|16!j67Zjc@boz9c;XFw15r)DzZxBWORmK|j4NgF5PGS zm+!Fw$N|sWdE)pWr0VN*zzZ}*!U@j?XipDx^|lm-0Y}oo696jUd1alV^A}LqXi93!NSS@!Xt^``O^oeP4KkLkM3S8lMvPE8BGbE_G&O zSOp#2xInA-f57@K{6;+-NMz8F40r&k255&)Kx;ev74^(xYZ0-#OziKJ0jff0xXe@P zaVZ!nI7}*&F!yQT0hY^wYabyvt@HHoT#1VeFSU>ga2wjm_3!?`+K+t0I!}Dhx=#K`zsKJ#c;`7hI8_PnDx<%n&g8Sr z!jJQb?hEfc?!GyX3>@hZz>@&tSiDhOd}r?l!k8HzeONU`<1)T0q+4D7<$MGH@|7DY0!)`sNf0U{Nc%2ryo3k zYk8F5MCl*~1Sr76`LGIe<9oNQf*FoLRtTr`T28yoHk`i6)*H~)9sibok2Z4f@_qjM zf^u?^wsN8?5{EPmb(x)1}`npzBZk zK+o?!^Aiu9M%Xmr;kebWen8VL(9`1XsMZV>1%xNpe5UigK7Hn?bf27c3htXV*5HC9E-H8v zQVLjNO&@DL^o0>3BNXanKw4qK>g9W^^Tc=LJcQTM9q-Uo1{xL`!*%Sgf?ZrmYrvb8 z+cXy7py=U=p_6`}TEx;@o?~r?zoa&5vz(0h=^8Jt0lwzN^VD`=`IvGYy9CcYNNZ1# za5+x{FL2wZSWPsB;6&*lx&d+-0FD2b?l|twAOi|v6&dNmiVZZ|T3k`<4bVGuqCYdJ z`@8_k?z7syoW@uNr-o;`58)D7R_oy}Jo_{KgdP}_8OU5QZBX8Xz_ovi1TR?ld~^kS z41qHh0GVslES5%vl{{;4J9? z7P1M+d2D6RJ~ntY+3(wPe{{`Su0WFVqlEuC$BTYv9L^HWA zV=cfwTA*TX1!4iyWSsz~+Unn3^U3i1O9l&`0dU5D1Govv`7A!Igw^i37<#IBw8HCP z?j*u8k`;R!kP)~~129qxa52`At{d)Zax&K5Asn8Q`3=YVpf$Fp2mId zn$OF9qVs~)e6bBKxq#X=EbZ;T9BwrYg!V9OB7$1h9~cut4myz&XdXp z^=9L-3+~B9IkX3H&zYN{lM#fxhJ9CQur&;L90Tapf5mE`YBpKtJeQ=5Q-bFRn-)NQ z$w1%)fYVpj#U{8XlXBUtye8IuG!i-)!EsHmen=MzdtuU+P9foKzIvbK558e7ht_UB zr)c*`8h~Ll&*@n)2o7x;QgQ6#720W7t$Hj%+YBPyqKm9x;Em8B6YwzKWogTE)WI!p zf2D`#?>?6U1GsN`R())NL$3&W{mY*_75XCcjPPfcTt7(R7TshxHzwFt?LHU!W-wRj zV4J9ksYP0NYJfI8ch;$cXE@JFUL4MgA#lhblk-?My0eDvg$ipBdi$}Z?h}i09o=O( z|BS2#w&smbL&L*Ofl+hJCPsg$1y2TOZg;E*oBUf+)HRyMd|DWY2|=M9{Q(l5YZS8LvDQJ_zf6|OeXj5+*L)g) zCd7mlF#-xlnb`aj=d?B?5Z$K>of`KAtogTb+Y7WWR~Tq?%(aKKqYuKZ ze15HjaXz$n*kl{i`@+JbLjT}DDNG|2b8rFSxfEcKmbUw-9vg5^BxchW*!8~8SJTKB z!%sM7>-C4MY{v--d3G`qPy)Os5_4#lSI?QBLc_yx)w|Ep#mD;*UFQqWbBctVbsBBo zSb)P_j&;qLfgHAdL3@bRGzc3wpZprZh4v0*LLSZ0#H69H?+M2h48BP-(5#TDa-XW* zbHTISr)>K&s~#^nTx3`si{QiV--Qc=wE~#Y>&zfS%1BnQlq;8Zh7C6Xi4VZuW0~!{ z>AVhjDhEpLQ#(-nT2sM&j}sj3YV`9}>^e0fXjmVBFOqmN(H|Nw#QB(>{5M$u7oS>8 zAunvmL^*?nBiObd=hVT|g-#2e!g-FwCF3!IgWGcZUgLLjm^<3oUJzfrFoD7&ptxVh z5ruBAj_xl+kEh!p>HlED{XIoMcrbKOj8F>0Z`>UN7C}Rz#)TNx@HGm z_%O)`2YsK~5c2}duOSOJ3cmJiaOe-#eDG81;|tOr;yD=g#Kkc z-20a~LZ`0g)3$vTqYn;uI(7@fnzXP*HZDZ%ix=qw2MK$~TCs6=&zakFEL6GcG_4N^ zbo`=R+A1F#0*vH6m+w4j?BnU&ghvT48uz7-GB`lPLWS11z6c$9Z~?HKDj~Bf&_$=( zET@rr@y%^EXe9gELzdI`nsJU`j>R`n4PJQ=DzxJmEJnd=B4nB?YhCbE;Zp@qRC@X7 zf#cm-6|DZ{%OeUoSgVcpg12k#*Kxe2(Hnv-4P~d8S>KX1&jw0t%=2Musm)mvHl=WbI^evqCKK8=qryV7NafQRsjtQ3W}d=6Ro47iAO zFNgnDF1Wt!@Yif+b|VeIn(&kWgSv0jz$NF?8^bPkVLmXqex(#`AKi z`wK~oR|13!+4|O3Y<6z5f2L`M`|MS0?V-=ZM~>@^zAKe?Zaa5SQUdgWH)XWJVdJwf zj>8AVG;r=*A85F^iP^@Qf?#88+x~%X47p5=KGq}1=aYr37~ilFcDxYBEUN9JbBX)6 zXu+w%rwDFo*(iZSm`TZ}FRG39F2(cFV~;m))?!|QBe11=*U29OK{JKtqW$9M3HCbp zE;;P663xws&x7qe(9=UniemR9EkN0MCcsezXAF=wzV;!#dB{x7l`@{weDD*RfEpx& zgd4GL41=)Ge55$1hY&IW0N)s54l(%&pNW5a<8SPlf4I*n3(whf;TM+C{=BfNR@&-c z0Z|9ghFCg!;2>Ol%u>ScbCu7Zrf>Jq2|KT@=z5vWSWy=U8l68mk2WQ(*>m1?zChZ2 zj=uX8#d(mB%p*V{jck-`dz)45R-Ds1_gPl5?Fh{Z11xkL$U8}dcHfdw1_$9T#RcXI z?)Ado@w^}zq$v9GUGeUXR>!P_YGw zSNL?mTU;^@ZuF#srv_-c&kLRfa2(udO9wAN=+wX)2lrT|f+qlUgiSki>flWp2lqIo zb6*sp(}b8-JPz)$OR)`z4xZM1odhs- z99)b@2TuSf!W#z{Lki+N6JToLIJnqS0C?lzVpv-D84y!AxI3-`#}y+|!6R^Y0Pd#y zfN{l+6z)qYycst!Iqv|5(%lD)D<&j`P4t{hOer`JH!(H)FB4OWzCm&4I%r%mq(a)B z1FR;d7W^d+|DH_Bc_Kcw;Pv?A{9DxKAz>L;Ea?u|fYF4My#E2S{{wP}i{H;-Y-|7k N002ovPDHLkV1i-n!nFVZ literal 0 HcmV?d00001 diff --git a/src/Cortex.States.MongoDb/Assets/license.md b/src/Cortex.States.MongoDb/Assets/license.md new file mode 100644 index 0000000..530f621 --- /dev/null +++ b/src/Cortex.States.MongoDb/Assets/license.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2024 Buildersoft + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj b/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj new file mode 100644 index 0000000..c4f83c9 --- /dev/null +++ b/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj @@ -0,0 +1,56 @@ + + + + net8.0 + + 1.0.1 + 1.0.1 + Buildersoft Cortex Framework + Buildersoft + Buildersoft,EnesHoxha + Copyright © Buildersoft 2024 + + Cortex Data Framework is a robust, extensible platform designed to facilitate real-time data streaming, processing, and state management. It provides developers with a comprehensive suite of tools and libraries to build scalable, high-performance data pipelines tailored to diverse use cases. By abstracting underlying streaming technologies and state management solutions, Cortex Data Framework enables seamless integration, simplified development workflows, and enhanced maintainability for complex data-driven applications. + + + https://github.com/buildersoftio/cortex + cortex vortex mediator eda streaming distributed streams states mongodb + + 1.0.1 + license.md + cortex.png + Cortex.States.MongoDb + True + True + True + + Just as the Cortex in our brains handles complex processing efficiently, Cortex Data Framework brings brainpower to your data management! + https://buildersoft.io/ + README.md + + + + + + + + + + + + + + True + \ + + + True + + + + True + + + + + diff --git a/src/Cortex.States.MongoDb/MongoDbStateStore.cs b/src/Cortex.States.MongoDb/MongoDbStateStore.cs new file mode 100644 index 0000000..3cf999c --- /dev/null +++ b/src/Cortex.States.MongoDb/MongoDbStateStore.cs @@ -0,0 +1,123 @@ +using MongoDB.Driver; +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Cortex.States.MongoDb +{ + /// + /// A state store implementation backed by a MongoDB collection. + /// This implementation ensures that all operations are thread-safe and production-ready. + /// + /// The type of the key. Must be serializable by MongoDB. + /// The type of the value. Must be serializable by MongoDB. + public class MongoDbStateStore : IStateStore + { + private readonly IMongoCollection> _collection; + public string Name { get; } + + /// + /// Creates a new MongoDbStateStore that uses the specified MongoDB database and collection. + /// + /// The MongoDB database instance to use. + /// The name of the collection to store key-value pairs in. + /// A friendly name for this store. + public MongoDbStateStore(string storeName, IMongoDatabase database, string collectionName) + { + if (database == null) throw new ArgumentNullException(nameof(database)); + if (string.IsNullOrWhiteSpace(collectionName)) throw new ArgumentException("Collection name must be provided", nameof(collectionName)); + if (string.IsNullOrWhiteSpace(storeName)) throw new ArgumentException("Store name must be provided", nameof(storeName)); + + Name = storeName; + + _collection = database.GetCollection>(collectionName); + // Ensure collection and index. _id is indexed by default, so no special action required. + // Additional indexes can be created here if needed. + } + + /// + /// Retrieves the value associated with the specified key. + /// + /// The key to lookup. + /// The value if found; otherwise the default value of TValue. + public TValue Get(TKey key) + { + if (key == null) throw new ArgumentNullException(nameof(key)); + + var filter = Builders>.Filter.Eq(e => e.Id, key); + var result = _collection.Find(filter).FirstOrDefault(); + return result != null ? result.Value : default; + } + + /// + /// Puts the specified key-value pair into the store. If the key already exists, its value is replaced. + /// + /// The key to store. + /// The value to store. + public void Put(TKey key, TValue value) + { + if (key == null) throw new ArgumentNullException(nameof(key)); + + var filter = Builders>.Filter.Eq(e => e.Id, key); + var replacement = new MongoStateEntry { Id = key, Value = value }; + + // Upsert ensures that if the key does not exist, it is created + _collection.ReplaceOne(filter, replacement, new ReplaceOptions { IsUpsert = true }); + } + + /// + /// Checks if the specified key exists in the store. + /// + /// The key to check for existence. + /// True if the key exists; otherwise false. + public bool ContainsKey(TKey key) + { + if (key == null) throw new ArgumentNullException(nameof(key)); + + var filter = Builders>.Filter.Eq(e => e.Id, key); + // Limit to 1 result for efficiency + var count = _collection.Find(filter).Limit(1).CountDocuments(); + return count > 0; + } + + /// + /// Removes the value associated with the specified key, if it exists. + /// + /// The key to remove. + public void Remove(TKey key) + { + if (key == null) throw new ArgumentNullException(nameof(key)); + + var filter = Builders>.Filter.Eq(e => e.Id, key); + _collection.DeleteOne(filter); + } + + /// + /// Retrieves all key-value pairs stored. + /// Note: This operation might be expensive if the collection is large. + /// + /// An IEnumerable of all key-value pairs. + public IEnumerable> GetAll() + { + // For large data sets, consider using a cursor-based approach or streaming. + // Here we just return all documents. + var allDocs = _collection.Find(Builders>.Filter.Empty).ToList(); + return allDocs.Select(d => new KeyValuePair(d.Id, d.Value)); + } + + /// + /// Retrieves all keys stored. + /// Note: This operation might be expensive if the collection is large. + /// + /// An IEnumerable of all keys. + public IEnumerable GetKeys() + { + // We can do a projection to only return the keys. + var projection = Builders>.Projection.Expression(d => d.Id); + var keys = _collection.Find(Builders>.Filter.Empty) + .Project(projection) + .ToList(); + return keys; + } + } +} diff --git a/src/Cortex.States.MongoDb/MongoStateEntry.cs b/src/Cortex.States.MongoDb/MongoStateEntry.cs new file mode 100644 index 0000000..0ed89f8 --- /dev/null +++ b/src/Cortex.States.MongoDb/MongoStateEntry.cs @@ -0,0 +1,19 @@ +using MongoDB.Bson.Serialization.Attributes; + +namespace Cortex.States.MongoDb +{ + /// + /// Represents a single key-value entry in MongoDB. + /// The _id field will serve as the key, ensuring uniqueness and indexing. + /// + /// The type of the key. + /// The type of the value. + internal class MongoStateEntry + { + [BsonId] + public TKey Id { get; set; } + + [BsonElement("value")] + public TValue Value { get; set; } + } +} From 95c02a1c6529105436e4243af04a7ac9527a77a4 Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Thu, 19 Dec 2024 13:30:03 +0100 Subject: [PATCH 3/8] README file updated --- README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.md b/README.md index d03f6d2..dbb9e96 100644 --- a/README.md +++ b/README.md @@ -75,6 +75,12 @@ - **Cortex.States.RocksDb:** Persistent state storage using RocksDB. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.RocksDb?label=Cortex.States.RocksDb)](https://www.nuget.org/packages/Cortex.States.RocksDb) +- **Cortex.States.Cassandra:** Persistent state storage using Cassandra. +[![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.Cassandra?label=Cortex.States.Cassandra)](https://www.nuget.org/packages/Cortex.States.Cassandra) + +- **Cortex.States.MongoDb:** Persistent state storage using MongoDb. +[![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.MongoDb?label=Cortex.States.MongoDb)](https://www.nuget.org/packages/Cortex.States.MongoDb) + - **Cortex.Telemetry:** Core library to add support for Tracing and Matrics. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.Telemetry?label=Cortex.Telemetry)](https://www.nuget.org/packages/Cortex.Telemetry) From 576133446ca572abaf08f127123cadd6fc5c9234 Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Thu, 19 Dec 2024 14:17:41 +0100 Subject: [PATCH 4/8] [v1/feature/37]: Add support for FlatMap Implement FlatMapOperator, Add FlatMap as core action in IStreamBuilder and IBranchStreamBuilder, Implement FlatMap in StreamBuilder, Implement FlatMap in BranchStreamBuilder --- .../Abstractions/IBranchStreamBuilder.cs | 11 +- .../Abstractions/IStreamBuilder.cs | 9 ++ src/Cortex.Streams/BranchStreamBuilder.cs | 26 ++- .../Operators/FlatMapOperator.cs | 134 ++++++++++++++++ src/Cortex.Streams/StreamBuilder.cs | 18 +++ .../Streams/Tests/FlatMapOperatorTests.cs | 150 ++++++++++++++++++ 6 files changed, 344 insertions(+), 4 deletions(-) create mode 100644 src/Cortex.Streams/Operators/FlatMapOperator.cs create mode 100644 src/Cortex.Tests/Streams/Tests/FlatMapOperatorTests.cs diff --git a/src/Cortex.Streams/Abstractions/IBranchStreamBuilder.cs b/src/Cortex.Streams/Abstractions/IBranchStreamBuilder.cs index be172f3..1ccc642 100644 --- a/src/Cortex.Streams/Abstractions/IBranchStreamBuilder.cs +++ b/src/Cortex.Streams/Abstractions/IBranchStreamBuilder.cs @@ -25,9 +25,16 @@ public interface IBranchStreamBuilder /// The type of data after the transformation. /// A function to transform data. /// The branch stream builder with the new data type. - IBranchStreamBuilder Map(Func mapFunction); - + IBranchStreamBuilder Map(Func mapFunction); + /// + /// Adds a FlatMap operator to the stream. For each input element, it produces zero or more output elements. + /// + /// The current type of data in the stream. + /// The type of data emitted after flat-mapping. + /// A function that maps an input element to zero or more output elements. + /// A stream builder emitting elements of type TNext. + IBranchStreamBuilder FlatMap(Func> flatMapFunction); /// /// Groups the stream data by a specified key selector. diff --git a/src/Cortex.Streams/Abstractions/IStreamBuilder.cs b/src/Cortex.Streams/Abstractions/IStreamBuilder.cs index cf433e8..e49275d 100644 --- a/src/Cortex.Streams/Abstractions/IStreamBuilder.cs +++ b/src/Cortex.Streams/Abstractions/IStreamBuilder.cs @@ -40,6 +40,15 @@ public interface IStreamBuilder /// The stream builder with the new data type. IStreamBuilder Map(Func mapFunction); + /// + /// Adds a FlatMap operator to the stream. For each input element, it produces zero or more output elements. + /// + /// The current type of data in the stream. + /// The type of data emitted after flat-mapping. + /// A function that maps an input element to zero or more output elements. + /// A stream builder emitting elements of type TNext. + IStreamBuilder FlatMap(Func> flatMapFunction); + /// /// Adds a sink function to the stream. /// diff --git a/src/Cortex.Streams/BranchStreamBuilder.cs b/src/Cortex.Streams/BranchStreamBuilder.cs index 939bd2f..8b9bb63 100644 --- a/src/Cortex.Streams/BranchStreamBuilder.cs +++ b/src/Cortex.Streams/BranchStreamBuilder.cs @@ -53,7 +53,7 @@ public IBranchStreamBuilder Filter(Func predicate /// The type of data after the transformation. /// A function to transform data. /// The branch stream builder with the new data type. - public IBranchStreamBuilder Map(Func mapFunction) + public IBranchStreamBuilder Map(Func mapFunction) { var mapOperator = new MapOperator(mapFunction); @@ -68,7 +68,7 @@ public IBranchStreamBuilder Map(Func ma _lastOperator = mapOperator; } - return new BranchStreamBuilder(_name) + return new BranchStreamBuilder(_name) { _firstOperator = _firstOperator, _lastOperator = _lastOperator @@ -246,5 +246,27 @@ public IBranchStreamBuilder AggregateSilently(F }; } + public IBranchStreamBuilder FlatMap(Func> flatMapFunction) + { + var flatMapOperator = new FlatMapOperator(flatMapFunction); + + if (_firstOperator == null) + { + _firstOperator = flatMapOperator; + _lastOperator = flatMapOperator; + } + else + { + _lastOperator.SetNext(flatMapOperator); + _lastOperator = flatMapOperator; + } + + return new BranchStreamBuilder(_name) + { + _firstOperator = _firstOperator, + _lastOperator = _lastOperator, + _sourceAdded = _sourceAdded + }; + } } } diff --git a/src/Cortex.Streams/Operators/FlatMapOperator.cs b/src/Cortex.Streams/Operators/FlatMapOperator.cs new file mode 100644 index 0000000..040a5b7 --- /dev/null +++ b/src/Cortex.Streams/Operators/FlatMapOperator.cs @@ -0,0 +1,134 @@ +using Cortex.Telemetry; +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.Streams.Operators +{ + /// + /// The FlatMapOperator takes each input element, applies a function to produce zero or more output elements, + /// and emits each output element individually into the stream. + /// + /// The type of the input element. + /// The type of the output element(s) produced. + public class FlatMapOperator : IOperator, IHasNextOperators, ITelemetryEnabled + { + private readonly Func> _flatMapFunction; + private IOperator _nextOperator; + + // Telemetry fields + private ITelemetryProvider _telemetryProvider; + private ICounter _processedCounter; + private ICounter _emittedCounter; + private IHistogram _processingTimeHistogram; + private ITracer _tracer; + private Action _incrementProcessedCounter; + private Action _incrementEmittedCounter; + private Action _recordProcessingTime; + + public FlatMapOperator(Func> flatMapFunction) + { + _flatMapFunction = flatMapFunction ?? throw new ArgumentNullException(nameof(flatMapFunction)); + } + + public void SetTelemetryProvider(ITelemetryProvider telemetryProvider) + { + _telemetryProvider = telemetryProvider; + + if (_telemetryProvider != null) + { + var metrics = _telemetryProvider.GetMetricsProvider(); + _processedCounter = metrics.CreateCounter($"flatmap_operator_processed_{typeof(TInput).Name}_to_{typeof(TOutput).Name}", "Number of items processed by FlatMapOperator"); + _emittedCounter = metrics.CreateCounter($"flatmap_operator_emitted_{typeof(TInput).Name}_to_{typeof(TOutput).Name}", "Number of items emitted by FlatMapOperator"); + _processingTimeHistogram = metrics.CreateHistogram($"flatmap_operator_processing_time_{typeof(TInput).Name}_to_{typeof(TOutput).Name}", "Processing time for FlatMapOperator"); + _tracer = _telemetryProvider.GetTracingProvider().GetTracer($"FlatMapOperator_{typeof(TInput).Name}_to_{typeof(TOutput).Name}"); + + // Cache delegates + _incrementProcessedCounter = () => _processedCounter.Increment(); + _incrementEmittedCounter = () => _emittedCounter.Increment(); + _recordProcessingTime = value => _processingTimeHistogram.Record(value); + } + else + { + _incrementProcessedCounter = null; + _incrementEmittedCounter = null; + _recordProcessingTime = null; + } + + // Propagate telemetry to next operator + if (_nextOperator is ITelemetryEnabled telemetryEnabled) + { + telemetryEnabled.SetTelemetryProvider(telemetryProvider); + } + } + + public void Process(object input) + { + if (input == null) + throw new ArgumentNullException(nameof(input)); + + if (!(input is TInput typedInput)) + throw new ArgumentException($"Expected input of type {typeof(TInput).Name}, but received {input.GetType().Name}", nameof(input)); + + IEnumerable outputs; + + if (_telemetryProvider != null) + { + var stopwatch = Stopwatch.StartNew(); + using (var span = _tracer.StartSpan("FlatMapOperator.Process")) + { + try + { + outputs = _flatMapFunction(typedInput) ?? Array.Empty(); + span.SetAttribute("status", "success"); + span.SetAttribute("input_type", typeof(TInput).Name); + span.SetAttribute("output_type", typeof(TOutput).Name); + } + catch (Exception ex) + { + span.SetAttribute("status", "error"); + span.SetAttribute("exception", ex.ToString()); + throw; + } + finally + { + stopwatch.Stop(); + _recordProcessingTime?.Invoke(stopwatch.Elapsed.TotalMilliseconds); + _incrementProcessedCounter?.Invoke(); + } + } + } + else + { + outputs = _flatMapFunction(typedInput) ?? Array.Empty(); + } + + // Emit each output element + foreach (var output in outputs) + { + _incrementEmittedCounter?.Invoke(); + _nextOperator?.Process(output); + } + } + + public void SetNext(IOperator nextOperator) + { + _nextOperator = nextOperator; + + // Propagate telemetry + if (_nextOperator is ITelemetryEnabled nextTelemetryEnabled && _telemetryProvider != null) + { + nextTelemetryEnabled.SetTelemetryProvider(_telemetryProvider); + } + } + + public IEnumerable GetNextOperators() + { + if (_nextOperator != null) + yield return _nextOperator; + } + } +} diff --git a/src/Cortex.Streams/StreamBuilder.cs b/src/Cortex.Streams/StreamBuilder.cs index 3aa4181..46f4fb1 100644 --- a/src/Cortex.Streams/StreamBuilder.cs +++ b/src/Cortex.Streams/StreamBuilder.cs @@ -561,5 +561,23 @@ public IStreamBuilder SetNext(IOperator customOperator) return this; // Returns the current builder for method chaining } + + public IStreamBuilder FlatMap(Func> flatMapFunction) + { + var flatMapOperator = new FlatMapOperator(flatMapFunction); + + if (_firstOperator == null) + { + _firstOperator = flatMapOperator; + _lastOperator = flatMapOperator; + } + else + { + _lastOperator.SetNext(flatMapOperator); + _lastOperator = flatMapOperator; + } + + return new StreamBuilder(_name, _firstOperator, _lastOperator, _sourceAdded); + } } } diff --git a/src/Cortex.Tests/Streams/Tests/FlatMapOperatorTests.cs b/src/Cortex.Tests/Streams/Tests/FlatMapOperatorTests.cs new file mode 100644 index 0000000..22ba542 --- /dev/null +++ b/src/Cortex.Tests/Streams/Tests/FlatMapOperatorTests.cs @@ -0,0 +1,150 @@ +using Cortex.Streams; +using Cortex.Streams.Operators; + +namespace Cortex.Tests.Streams.Tests +{ + public class CollectingSink : ISinkOperator + { + private readonly List _collected = new List(); + public IReadOnlyList Collected => _collected; + + public void Start() { } + public void Stop() { } + + public void Process(TInput input) + { + _collected.Add(input); + } + } + + + public class FlatMapOperatorTests + { + [Fact] + public void Stream_FlatMap_SplitsInputIntoMultipleOutputs() + { + // Arrange + var collectingSink = new CollectingSink(); + + // Build the stream: + // Start a stream without a dedicated source, we will just Emit into it. + var stream = StreamBuilder + .CreateNewStream("TestStream") + .Stream() + .FlatMap(line => line.Split(' ')) // Use FlatMap to split a sentence into words + .Sink(collectingSink) + .Build(); + + stream.Start(); + + // Act + stream.Emit("Hello world from stream"); + + // Assert + Assert.Equal(4, collectingSink.Collected.Count); + Assert.Contains("Hello", collectingSink.Collected); + Assert.Contains("world", collectingSink.Collected); + Assert.Contains("from", collectingSink.Collected); + Assert.Contains("stream", collectingSink.Collected); + + stream.Stop(); + } + + [Fact] + public void Stream_FlatMap_EmptyResult_EmitsNoOutput() + { + // Arrange + var collectingSink = new CollectingSink(); + + var stream = StreamBuilder + .CreateNewStream("EmptyResultStream") + .Stream() + .FlatMap(num => new int[0]) // Always empty + .Sink(collectingSink) + .Build(); + + stream.Start(); + + // Act + stream.Emit(42); + + // Assert + Assert.Empty(collectingSink.Collected); + + stream.Stop(); + } + + [Fact] + public void Stream_FlatMap_NullResult_TreatedAsEmpty() + { + // Arrange + var collectingSink = new CollectingSink(); + + var stream = StreamBuilder + .CreateNewStream("NullResultStream") + .Stream() + .FlatMap(num => null) // Always null + .Sink(collectingSink) + .Build(); + + stream.Start(); + + // Act + stream.Emit(10); + + // Assert + Assert.Empty(collectingSink.Collected); + + stream.Stop(); + } + + [Fact] + public void Stream_FlatMap_ExceptionInFunction_BubblesUp() + { + // Arrange + var collectingSink = new CollectingSink(); + + var stream = StreamBuilder + .CreateNewStream("ExceptionStream") + .Stream() + .FlatMap(num => throw new InvalidOperationException("Test exception")) + .Sink(collectingSink) + .Build(); + + stream.Start(); + + // Act & Assert + var ex = Assert.Throws(() => stream.Emit(5)); + Assert.Equal("Test exception", ex.Message); + + stream.Stop(); + } + + [Fact] + public void Stream_FlatMap_SingleOutputEmittedForEachInput() + { + // Arrange + var collectingSink = new CollectingSink(); + + var stream = StreamBuilder + .CreateNewStream("SingleOutputStream") + .Stream() + .FlatMap(line => new[] { line.ToUpper() }) // One-to-one mapping but via flatmap + .Sink(collectingSink) + .Build(); + + stream.Start(); + + // Act + stream.Emit("hello"); + stream.Emit("world"); + + // Assert + Assert.Equal(2, collectingSink.Collected.Count); + Assert.Contains("HELLO", collectingSink.Collected); + Assert.Contains("WORLD", collectingSink.Collected); + + stream.Stop(); + } + } +} From 65f399b8e6240b6800e53574326a5b86caf7ca7b Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Thu, 19 Dec 2024 19:51:13 +0100 Subject: [PATCH 5/8] [v1/feature/75]: Add support for Microsoft SQL Server Implement SQLServerKeyValueStateStore based on JSON, Implement SQLServerStateStore will full schema management --- Cortex.sln | 24 +- README.md | 3 + .../Assets/cortex.png | Bin 0 -> 7179 bytes .../Assets/license.md | 20 + .../Cortex.States.MSSqlServer.csproj | 56 ++ .../ListPropertyMetadata.cs | 13 + .../PropertyConverter.cs | 91 +++ .../SchemaManager.cs | 469 ++++++++++++++ .../SqlServerKeyValueStateStore.cs | 256 ++++++++ .../SqlServerStateStore.cs | 585 ++++++++++++++++++ src/Cortex.States.MSSqlServer/TypeAnalyzer.cs | 108 ++++ .../Assets/cortex.png | Bin 0 -> 7179 bytes .../Assets/license.md | 20 + .../Cortex.States.PostgreSQL.csproj | 9 + 14 files changed, 1648 insertions(+), 6 deletions(-) create mode 100644 src/Cortex.States.MSSqlServer/Assets/cortex.png create mode 100644 src/Cortex.States.MSSqlServer/Assets/license.md create mode 100644 src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj create mode 100644 src/Cortex.States.MSSqlServer/ListPropertyMetadata.cs create mode 100644 src/Cortex.States.MSSqlServer/PropertyConverter.cs create mode 100644 src/Cortex.States.MSSqlServer/SchemaManager.cs create mode 100644 src/Cortex.States.MSSqlServer/SqlServerKeyValueStateStore.cs create mode 100644 src/Cortex.States.MSSqlServer/SqlServerStateStore.cs create mode 100644 src/Cortex.States.MSSqlServer/TypeAnalyzer.cs create mode 100644 src/Cortex.States.PostgreSQL/Assets/cortex.png create mode 100644 src/Cortex.States.PostgreSQL/Assets/license.md create mode 100644 src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj diff --git a/Cortex.sln b/Cortex.sln index a52aa69..4f6cbfb 100644 --- a/Cortex.sln +++ b/Cortex.sln @@ -1,4 +1,4 @@ - + Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 17 VisualStudioVersion = 17.10.34607.79 @@ -33,9 +33,13 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Tests", "src\Cortex. EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Streams.Files", "src\Cortex.Streams.Files\Cortex.Streams.Files.csproj", "{D376D6CA-3192-4EDC-B840-31F58B6457DD}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.Cassandra", "src\Cortex.States.Cassandra\Cortex.States.Cassandra.csproj", "{447970B9-C5AA-41D9-A07F-330A251597D0}" +EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.MongoDb", "src\Cortex.States.MongoDb\Cortex.States.MongoDb.csproj", "{00358701-D117-4953-A673-D60625D38466}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.Cassandra", "src\Cortex.States.Cassandra\Cortex.States.Cassandra.csproj", "{447970B9-C5AA-41D9-A07F-330A251597D0}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.MSSqlServer", "src\Cortex.States.MSSqlServer\Cortex.States.MSSqlServer.csproj", "{77AD462F-A248-43AF-9212-43031F22F23D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.PostgreSQL", "src\Cortex.States.PostgreSQL\Cortex.States.PostgreSQL.csproj", "{980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -102,14 +106,22 @@ Global {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Debug|Any CPU.Build.0 = Debug|Any CPU {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Release|Any CPU.ActiveCfg = Release|Any CPU {D376D6CA-3192-4EDC-B840-31F58B6457DD}.Release|Any CPU.Build.0 = Release|Any CPU - {00358701-D117-4953-A673-D60625D38466}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {00358701-D117-4953-A673-D60625D38466}.Debug|Any CPU.Build.0 = Debug|Any CPU - {00358701-D117-4953-A673-D60625D38466}.Release|Any CPU.ActiveCfg = Release|Any CPU - {00358701-D117-4953-A673-D60625D38466}.Release|Any CPU.Build.0 = Release|Any CPU {447970B9-C5AA-41D9-A07F-330A251597D0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {447970B9-C5AA-41D9-A07F-330A251597D0}.Debug|Any CPU.Build.0 = Debug|Any CPU {447970B9-C5AA-41D9-A07F-330A251597D0}.Release|Any CPU.ActiveCfg = Release|Any CPU {447970B9-C5AA-41D9-A07F-330A251597D0}.Release|Any CPU.Build.0 = Release|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Debug|Any CPU.Build.0 = Debug|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Release|Any CPU.ActiveCfg = Release|Any CPU + {00358701-D117-4953-A673-D60625D38466}.Release|Any CPU.Build.0 = Release|Any CPU + {77AD462F-A248-43AF-9212-43031F22F23D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {77AD462F-A248-43AF-9212-43031F22F23D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {77AD462F-A248-43AF-9212-43031F22F23D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {77AD462F-A248-43AF-9212-43031F22F23D}.Release|Any CPU.Build.0 = Release|Any CPU + {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/README.md b/README.md index dbb9e96..3e4bda1 100644 --- a/README.md +++ b/README.md @@ -81,6 +81,9 @@ - **Cortex.States.MongoDb:** Persistent state storage using MongoDb. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.MongoDb?label=Cortex.States.MongoDb)](https://www.nuget.org/packages/Cortex.States.MongoDb) +- **Cortex.States.MSSqlServer:** Persistent state storage using Microsoft Sql Server. +[![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.MSSqlServer?label=Cortex.States.MSSqlServer)](https://www.nuget.org/packages/Cortex.States.MSSqlServer) + - **Cortex.Telemetry:** Core library to add support for Tracing and Matrics. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.Telemetry?label=Cortex.Telemetry)](https://www.nuget.org/packages/Cortex.Telemetry) diff --git a/src/Cortex.States.MSSqlServer/Assets/cortex.png b/src/Cortex.States.MSSqlServer/Assets/cortex.png new file mode 100644 index 0000000000000000000000000000000000000000..a4f9727d04f91f61f20720bd64b9c139fb041236 GIT binary patch literal 7179 zcmV+m9Q5OfP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGf5&!@T5&_cPe*6Fc8=y%C^C!)jU*}Z_TUZ5zkPS!zgzWpiKoUY0LP$tR!Xokt%Ag{H3XX^@B8wnv03972 z_hwv1kf6A*-1mH+Tg$Cm_f}O`27H#NQDPYKIv zcTCHy`fO@u)s1PH)sbnLH4!`}kZK|c%WX|xUTdf>GJS=u8BkWepa#-RXr-xDk(t@H zW@WFA%*qkfWGlBRGCQ}KE6>y#u7Wj@IeBX$a|&84%`IF@nqxf9Eo!w@+!~oz+$N|! zGOwgPGT%~1WPT~v)1{qUWzc%A^7WAg4s}HqRBUi)VHMOZs3)?px+juI+89Z!G1VJc zB&xQLlmylF3mS+d)%Qn|>-%q{Gz@%}+OT;?VqO2Az$`8yqwdcMnN^=m&#Gn9GOO9N zWmSgoa}7%%RkMWUH7sFyEou6S8a90e9_zTW>Pa(9t!!{;=E_xE+0bgztQ@G(RxZ@U zX6H7t+4KlCv)TDgY<6BVo1M3Y&B-^_!sZmT(Bo4DE$penwd|>awfwQDmCY?QRMf`i z7PlFX?QCvIJDW%9VDpMQ_+#lhHowFWelF`|^GiF~)8*^A%D5JkcX3sij~m#6$_@0m zu(F#itm@{El|3x6Y9mXm>S2l1c-+Vmt9w~uO)pzi-OCo$^sz;?rZ%x8Q+55MWT<|? zR>L4ErD2dQZhVHNG;H}~QR9|B$4$wo*gJiBEt{HI$)=I2xEuhhIXqJqL=&J0vCala z5N(h%O%XsY0Gvq>4G}=VGamuY)|~uiL2C?vYdAnt09d$|;L#&sk_?c=ZERj~yDb1L z>0t9s0idC$%Q^*_5G9Z#17M~3$VC820LaAwn(8AZK>|nur~sJ;c=dyOfYHG-0D8c)!3BgD z0ANym|IN6m>E(B(W>gS3OJ2?cFb6y?Pk02*2B>qM3!dyehx-I@f^^Pv!7~6xXt|F=bO5v<25_GNQSUxC#H8AO4sJ?%xdNQ*J~@1Hcqd#$aK3;6*i@;(*!qp zN$K#ErDYCqdVq%egycmAs0*E1fQrN=5J1U&?wU^l&=op;GEa^BbZsAjqqfh>eP;6F z)Ao6|Z&6L30M3*Oo+ETR-KT=*2%XxtPvO3(3wF_Ye(o~>8t!w1PapS14NvJlvF#Jx zr)&H4@D^1Y;3h9EV~`Hsl#Ft1yQhN}IO}{ANsFp%P!{Yu=K-8E>wGlM^MYqOFId^& z2%iLQaft=a3!W})l0KJV3MhJsTZ0AEbxpuK9u{Elz-8;ROww zR?TOCTT;p_`2Zw{iqPrGJT3QmCoihd8BOM?bf27cD&6OV=tx|mt@-5SMRuQ8=yZio zXXvycikWBcaNHyroC=-<(Ct1seEPUg8#$OY0`0p9-Gr zJ{g`@!7jLOL4{YlXVrYeag)-D-QWax7C_l`CNRIYFQbZ{KY1z6$Kr!#EgZ*Wg$`|~Vo!z(PSdoO8a_bY<-k(PJvp>_%Gv>m7eXoO8y z=v38xA~-=V_sQW?)$U2~rleO;=quWCh;^O(o^87Do2U9N{K`5{e9LlrUu6j^#7=7= z^K^|iM3;Gn?{VS6|5NG*Skr-zS?{?!Y}3WxJ=J&q7q<4T&slowP6s>-T#)3&a-QwJ zg;hNOSL6bxgy$1FEqErlrDbgSx;<>mwFhkL2aIjG<}W;N@XCF*V*N{$o67aDm_{0CMM*lVjE#{Dj{8=BvSl58QYDE=yg#mCogYr)>Me!L!;uW!7o8S@;_g zl7_uQY-^OC2f{JS*1bsWqcU+(hEBQd6W{?{d}`5fJiw)yk^ly7`)qjlJuY(RJKqrq z`GfcguyO3dif(EzUGOZ31~>troplQEFvkURT|{^Q=UekNvpM)VZw)KievEA%dfrbnvSA!cLKpGf(cmS-Gnz;po5gd(_bb!i_sP^nlvAr;IF94NvVp2S5{E zIe{x2j!!EhSb%2%93eco6oY=f@81m-UN#$TMCJ2m2ny;!--6vrS|s-=;JMtVsQK{x z#ZBAU#4ZIzS;aOu?=L=xFyyPUm3?Au_nm}G zSbMn7S+E-pEG2LS!||zwHaG#E*!EfQ6af9;A!OnMw7>mzcz7Q$)_Ul3`k>r-s{YQf z&a>g+c?iv$uRaJBBI%f+2drT5@6;XkD29qGXyza-z)iHk1%yWcqj%nyCZ5l%AP$=9nz{{A|*f(qAK$Qw5&SJ zo?;tL{umCL@!pybd`vwo2)C-yhLO6Dw|gDjaV9t?JOP~4_C;OuneH?GpWBB}^e|le zag}$AW%vAz`a|7QB%*g8o`;LG_RW8Vt~&u6od!uYy_Dq0U1st~gFVI<0$vHgCFeW9 zY2hhCr&r?Qf@c7ngiJHNoWjXST(|Mu9k#gnSvqH6=P3$yBu=={wJ%+went7Uf(X!h z-~WXzZrVoeq^hse9xCz}qzwws23No)rt09S0S1Am1V|yXgw4pRV@44sR;1jC7Xs)LiHjQyTH$)pCo|DzJmknGB*FnHD z$os71*<)0+QxUHkDiUUvsUTYIo+5d105l-ZFV=!{!t(V?P4YG}AheuQ< z)a-emPmxfTfpUQ9H^!gPggt-s-G2;kgHCgTT|RX;9RB!N9ozB9ZX| zC?+lh(FY)Yh70OmycjxJ!EqZ-{m7DPHj(pOlXI8VSTHMgo+MZULl4-%l?OEJ5)_`9 z(Pp&nqIS~RQSCoP!Z}bY&#D&NJyVLBFFJ5unWq3x0WgUB;?oN0L#}=Ev(RZd;I$w5 znl1!QRFdFfPE1bke%62S4+_m(COm)VAtDT1xySPR-{571QM}KRM;lZ_MXEts6QFA; zw9b8|16!j67Zjc@boz9c;XFw15r)DzZxBWORmK|j4NgF5PGS zm+!Fw$N|sWdE)pWr0VN*zzZ}*!U@j?XipDx^|lm-0Y}oo696jUd1alV^A}LqXi93!NSS@!Xt^``O^oeP4KkLkM3S8lMvPE8BGbE_G&O zSOp#2xInA-f57@K{6;+-NMz8F40r&k255&)Kx;ev74^(xYZ0-#OziKJ0jff0xXe@P zaVZ!nI7}*&F!yQT0hY^wYabyvt@HHoT#1VeFSU>ga2wjm_3!?`+K+t0I!}Dhx=#K`zsKJ#c;`7hI8_PnDx<%n&g8Sr z!jJQb?hEfc?!GyX3>@hZz>@&tSiDhOd}r?l!k8HzeONU`<1)T0q+4D7<$MGH@|7DY0!)`sNf0U{Nc%2ryo3k zYk8F5MCl*~1Sr76`LGIe<9oNQf*FoLRtTr`T28yoHk`i6)*H~)9sibok2Z4f@_qjM zf^u?^wsN8?5{EPmb(x)1}`npzBZk zK+o?!^Aiu9M%Xmr;kebWen8VL(9`1XsMZV>1%xNpe5UigK7Hn?bf27c3htXV*5HC9E-H8v zQVLjNO&@DL^o0>3BNXanKw4qK>g9W^^Tc=LJcQTM9q-Uo1{xL`!*%Sgf?ZrmYrvb8 z+cXy7py=U=p_6`}TEx;@o?~r?zoa&5vz(0h=^8Jt0lwzN^VD`=`IvGYy9CcYNNZ1# za5+x{FL2wZSWPsB;6&*lx&d+-0FD2b?l|twAOi|v6&dNmiVZZ|T3k`<4bVGuqCYdJ z`@8_k?z7syoW@uNr-o;`58)D7R_oy}Jo_{KgdP}_8OU5QZBX8Xz_ovi1TR?ld~^kS z41qHh0GVslES5%vl{{;4J9? z7P1M+d2D6RJ~ntY+3(wPe{{`Su0WFVqlEuC$BTYv9L^HWA zV=cfwTA*TX1!4iyWSsz~+Unn3^U3i1O9l&`0dU5D1Govv`7A!Igw^i37<#IBw8HCP z?j*u8k`;R!kP)~~129qxa52`At{d)Zax&K5Asn8Q`3=YVpf$Fp2mId zn$OF9qVs~)e6bBKxq#X=EbZ;T9BwrYg!V9OB7$1h9~cut4myz&XdXp z^=9L-3+~B9IkX3H&zYN{lM#fxhJ9CQur&;L90Tapf5mE`YBpKtJeQ=5Q-bFRn-)NQ z$w1%)fYVpj#U{8XlXBUtye8IuG!i-)!EsHmen=MzdtuU+P9foKzIvbK558e7ht_UB zr)c*`8h~Ll&*@n)2o7x;QgQ6#720W7t$Hj%+YBPyqKm9x;Em8B6YwzKWogTE)WI!p zf2D`#?>?6U1GsN`R())NL$3&W{mY*_75XCcjPPfcTt7(R7TshxHzwFt?LHU!W-wRj zV4J9ksYP0NYJfI8ch;$cXE@JFUL4MgA#lhblk-?My0eDvg$ipBdi$}Z?h}i09o=O( z|BS2#w&smbL&L*Ofl+hJCPsg$1y2TOZg;E*oBUf+)HRyMd|DWY2|=M9{Q(l5YZS8LvDQJ_zf6|OeXj5+*L)g) zCd7mlF#-xlnb`aj=d?B?5Z$K>of`KAtogTb+Y7WWR~Tq?%(aKKqYuKZ ze15HjaXz$n*kl{i`@+JbLjT}DDNG|2b8rFSxfEcKmbUw-9vg5^BxchW*!8~8SJTKB z!%sM7>-C4MY{v--d3G`qPy)Os5_4#lSI?QBLc_yx)w|Ep#mD;*UFQqWbBctVbsBBo zSb)P_j&;qLfgHAdL3@bRGzc3wpZprZh4v0*LLSZ0#H69H?+M2h48BP-(5#TDa-XW* zbHTISr)>K&s~#^nTx3`si{QiV--Qc=wE~#Y>&zfS%1BnQlq;8Zh7C6Xi4VZuW0~!{ z>AVhjDhEpLQ#(-nT2sM&j}sj3YV`9}>^e0fXjmVBFOqmN(H|Nw#QB(>{5M$u7oS>8 zAunvmL^*?nBiObd=hVT|g-#2e!g-FwCF3!IgWGcZUgLLjm^<3oUJzfrFoD7&ptxVh z5ruBAj_xl+kEh!p>HlED{XIoMcrbKOj8F>0Z`>UN7C}Rz#)TNx@HGm z_%O)`2YsK~5c2}duOSOJ3cmJiaOe-#eDG81;|tOr;yD=g#Kkc z-20a~LZ`0g)3$vTqYn;uI(7@fnzXP*HZDZ%ix=qw2MK$~TCs6=&zakFEL6GcG_4N^ zbo`=R+A1F#0*vH6m+w4j?BnU&ghvT48uz7-GB`lPLWS11z6c$9Z~?HKDj~Bf&_$=( zET@rr@y%^EXe9gELzdI`nsJU`j>R`n4PJQ=DzxJmEJnd=B4nB?YhCbE;Zp@qRC@X7 zf#cm-6|DZ{%OeUoSgVcpg12k#*Kxe2(Hnv-4P~d8S>KX1&jw0t%=2Musm)mvHl=WbI^evqCKK8=qryV7NafQRsjtQ3W}d=6Ro47iAO zFNgnDF1Wt!@Yif+b|VeIn(&kWgSv0jz$NF?8^bPkVLmXqex(#`AKi z`wK~oR|13!+4|O3Y<6z5f2L`M`|MS0?V-=ZM~>@^zAKe?Zaa5SQUdgWH)XWJVdJwf zj>8AVG;r=*A85F^iP^@Qf?#88+x~%X47p5=KGq}1=aYr37~ilFcDxYBEUN9JbBX)6 zXu+w%rwDFo*(iZSm`TZ}FRG39F2(cFV~;m))?!|QBe11=*U29OK{JKtqW$9M3HCbp zE;;P663xws&x7qe(9=UniemR9EkN0MCcsezXAF=wzV;!#dB{x7l`@{weDD*RfEpx& zgd4GL41=)Ge55$1hY&IW0N)s54l(%&pNW5a<8SPlf4I*n3(whf;TM+C{=BfNR@&-c z0Z|9ghFCg!;2>Ol%u>ScbCu7Zrf>Jq2|KT@=z5vWSWy=U8l68mk2WQ(*>m1?zChZ2 zj=uX8#d(mB%p*V{jck-`dz)45R-Ds1_gPl5?Fh{Z11xkL$U8}dcHfdw1_$9T#RcXI z?)Ado@w^}zq$v9GUGeUXR>!P_YGw zSNL?mTU;^@ZuF#srv_-c&kLRfa2(udO9wAN=+wX)2lrT|f+qlUgiSki>flWp2lqIo zb6*sp(}b8-JPz)$OR)`z4xZM1odhs- z99)b@2TuSf!W#z{Lki+N6JToLIJnqS0C?lzVpv-D84y!AxI3-`#}y+|!6R^Y0Pd#y zfN{l+6z)qYycst!Iqv|5(%lD)D<&j`P4t{hOer`JH!(H)FB4OWzCm&4I%r%mq(a)B z1FR;d7W^d+|DH_Bc_Kcw;Pv?A{9DxKAz>L;Ea?u|fYF4My#E2S{{wP}i{H;-Y-|7k N002ovPDHLkV1i-n!nFVZ literal 0 HcmV?d00001 diff --git a/src/Cortex.States.MSSqlServer/Assets/license.md b/src/Cortex.States.MSSqlServer/Assets/license.md new file mode 100644 index 0000000..530f621 --- /dev/null +++ b/src/Cortex.States.MSSqlServer/Assets/license.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2024 Buildersoft + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj b/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj new file mode 100644 index 0000000..cc3a9af --- /dev/null +++ b/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj @@ -0,0 +1,56 @@ + + + + net9.0;net8.0;net7.0 + + 1.0.1 + 1.0.1 + Buildersoft Cortex Framework + Buildersoft + Buildersoft,EnesHoxha + Copyright © Buildersoft 2024 + + Cortex Data Framework is a robust, extensible platform designed to facilitate real-time data streaming, processing, and state management. It provides developers with a comprehensive suite of tools and libraries to build scalable, high-performance data pipelines tailored to diverse use cases. By abstracting underlying streaming technologies and state management solutions, Cortex Data Framework enables seamless integration, simplified development workflows, and enhanced maintainability for complex data-driven applications. + + + https://github.com/buildersoftio/cortex + cortex vortex mediator eda streaming distributed streams states kafka pulsar rocksdb + + 1.0.1 + license.md + cortex.png + Cortex.States.MSSqlServer + True + True + True + + Just as the Cortex in our brains handles complex processing efficiently, Cortex Data Framework brings brainpower to your data management! + https://buildersoft.io/ + README.md + + + + + True + \ + + + True + + + + True + + + + + + + + + + + + + + diff --git a/src/Cortex.States.MSSqlServer/ListPropertyMetadata.cs b/src/Cortex.States.MSSqlServer/ListPropertyMetadata.cs new file mode 100644 index 0000000..087220c --- /dev/null +++ b/src/Cortex.States.MSSqlServer/ListPropertyMetadata.cs @@ -0,0 +1,13 @@ +using System; +using System.Reflection; + +namespace Cortex.States.MSSqlServer +{ + internal class ListPropertyMetadata + { + public PropertyInfo Property { get; set; } + public PropertyInfo[] ChildScalarProperties { get; set; } + public Type ChildItemType { get; set; } + public string TableName { get; set; } + } +} diff --git a/src/Cortex.States.MSSqlServer/PropertyConverter.cs b/src/Cortex.States.MSSqlServer/PropertyConverter.cs new file mode 100644 index 0000000..50fc985 --- /dev/null +++ b/src/Cortex.States.MSSqlServer/PropertyConverter.cs @@ -0,0 +1,91 @@ +using System; +using System.Globalization; + +namespace Cortex.States.MSSqlServer +{ + internal class PropertyConverter + { + public string ConvertToString(object value) + { + if (value == null) return null; + return value.ToString(); + } + + public object ConvertFromString(Type type, string str) + { + if (str == null) return null; + + if (type == typeof(string)) return str; + + // Numeric and other conversions + if (type == typeof(int) || type == typeof(int?)) + { + if (int.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var i)) return i; + return type == typeof(int) ? 0 : (int?)null; + } + if (type == typeof(long) || type == typeof(long?)) + { + if (long.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var l)) return l; + return type == typeof(long) ? 0L : (long?)null; + } + if (type == typeof(double) || type == typeof(double?)) + { + if (double.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var d)) return d; + return type == typeof(double) ? 0.0 : (double?)null; + } + if (type == typeof(float) || type == typeof(float?)) + { + if (float.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var f)) return f; + return type == typeof(float) ? 0f : (float?)null; + } + if (type == typeof(bool) || type == typeof(bool?)) + { + if (bool.TryParse(str, out var b)) return b; + return type == typeof(bool) ? false : (bool?)null; + } + if (type == typeof(DateTime) || type == typeof(DateTime?)) + { + if (DateTime.TryParse(str, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var dt)) return dt; + return type == typeof(DateTime) ? DateTime.MinValue : (DateTime?)null; + } + if (type == typeof(Guid) || type == typeof(Guid?)) + { + if (Guid.TryParse(str, out var g)) return g; + return type == typeof(Guid) ? Guid.Empty : (Guid?)null; + } + if (type == typeof(decimal) || type == typeof(decimal?)) + { + if (decimal.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var dec)) return dec; + return type == typeof(decimal) ? 0m : (decimal?)null; + } + if (type == typeof(short) || type == typeof(short?)) + { + if (short.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var s)) return s; + return type == typeof(short) ? (short)0 : (short?)null; + } + if (type == typeof(ushort) || type == typeof(ushort?)) + { + if (ushort.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var us)) return us; + return type == typeof(ushort) ? (ushort)0 : (ushort?)null; + } + if (type == typeof(uint) || type == typeof(uint?)) + { + if (uint.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var ui)) return ui; + return type == typeof(uint) ? 0U : (uint?)null; + } + if (type == typeof(ulong) || type == typeof(ulong?)) + { + if (ulong.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var ul)) return ul; + return type == typeof(ulong) ? 0UL : (ulong?)null; + } + if (type == typeof(TimeSpan) || type == typeof(TimeSpan?)) + { + if (TimeSpan.TryParse(str, CultureInfo.InvariantCulture, out var ts)) return ts; + return type == typeof(TimeSpan) ? TimeSpan.Zero : (TimeSpan?)null; + } + + // If unknown type, just return the string + return str; + } + } +} diff --git a/src/Cortex.States.MSSqlServer/SchemaManager.cs b/src/Cortex.States.MSSqlServer/SchemaManager.cs new file mode 100644 index 0000000..10cb7d2 --- /dev/null +++ b/src/Cortex.States.MSSqlServer/SchemaManager.cs @@ -0,0 +1,469 @@ +using Microsoft.Data.SqlClient; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; + +namespace Cortex.States.MSSqlServer +{ + internal class SchemaManager + { + private readonly string _connectionString; + private readonly string _schemaName; + private readonly string _baseTableName; + private readonly TypeAnalyzer _typeAnalyzer; + private readonly bool _createOrUpdateTableSchema; + + public SchemaManager(string connectionString, string schemaName, string baseTableName, TypeAnalyzer typeAnalyzer, bool createOrUpdateTableSchema) + { + _connectionString = connectionString; + _schemaName = schemaName; + _baseTableName = baseTableName; + _typeAnalyzer = typeAnalyzer; + _createOrUpdateTableSchema = createOrUpdateTableSchema; + } + + public void EnsureSchemaAndTables() + { + using (var connection = new SqlConnection(_connectionString)) + { + connection.Open(); + EnsureSchema(connection); + + if (_typeAnalyzer.IsListType) + { + EnsureMainTableForListType(connection); + EnsureChildTableForListType(connection); + } + else + { + EnsureMainTable(connection); + + foreach (var lp in _typeAnalyzer.ListProperties) + { + EnsureChildTable(connection, lp); + } + } + } + } + + private void EnsureSchema(SqlConnection connection) + { + if (_schemaName.ToLower() != "dbo") + { + var createSchemaSql = $@" + IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{_schemaName}') + BEGIN + EXEC('CREATE SCHEMA [{_schemaName}]') + END"; + using (var cmd = new SqlCommand(createSchemaSql, connection)) + { + if (_createOrUpdateTableSchema) + { + cmd.ExecuteNonQuery(); + } + else + { + var checkSchemaSql = $"SELECT 1 FROM sys.schemas WHERE name = '{_schemaName}'"; + using (var checkCmd = new SqlCommand(checkSchemaSql, connection)) + { + var exists = checkCmd.ExecuteScalar(); + if (exists == null) throw new InvalidOperationException($"Schema {_schemaName} does not exist and createOrUpdateTableSchema=false."); + } + } + } + } + } + + private void EnsureMainTable(SqlConnection connection) + { + if (!TableExists(connection, _baseTableName)) + { + if (_createOrUpdateTableSchema) + { + var mainTableSql = BuildCreateMainTableSql(); + using (var cmd = new SqlCommand(mainTableSql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Main table [{_schemaName}].[{_baseTableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + // Check columns + var propMap = _typeAnalyzer.ScalarProperties.ToDictionary(p => p.Name, p => p); + EnsureColumns(connection, _baseTableName, propMap, isChildTable: false, isListType: false); + } + } + + private void EnsureMainTableForListType(SqlConnection connection) + { + if (!TableExists(connection, _baseTableName)) + { + if (_createOrUpdateTableSchema) + { + var sql = $@"CREATE TABLE [{_schemaName}].[{_baseTableName}] ( + [key] NVARCHAR(450) NOT NULL PRIMARY KEY + )"; + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Main table [{_schemaName}].[{_baseTableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + // Ensure key column + var existingColumns = GetExistingColumns(connection, _baseTableName); + if (!existingColumns.Contains("key", StringComparer.OrdinalIgnoreCase)) + { + if (_createOrUpdateTableSchema) + { + var sql = $@"ALTER TABLE [{_schemaName}].[{_baseTableName}] ADD [key] NVARCHAR(450) NOT NULL"; + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + // Add primary key if missing + var pkCheck = $"SELECT 1 FROM sys.indexes WHERE object_id = OBJECT_ID('[{_schemaName}].[{_baseTableName}]') AND is_primary_key = 1"; + using (var pkCmd = new SqlCommand(pkCheck, connection)) + { + var pkExists = pkCmd.ExecuteScalar(); + if (pkExists == null) + { + var addPk = $@"ALTER TABLE [{_schemaName}].[{_baseTableName}] ADD CONSTRAINT [PK_{_baseTableName}] PRIMARY KEY ([key])"; + using (var addPkCmd = new SqlCommand(addPk, connection)) + { + addPkCmd.ExecuteNonQuery(); + } + } + } + } + else + { + throw new InvalidOperationException($"Column [key] is missing in [{_schemaName}].[{_baseTableName}] and createOrUpdateTableSchema=false."); + } + } + } + } + + private void EnsureChildTable(SqlConnection connection, ListPropertyMetadata lp) + { + if (!TableExists(connection, lp.TableName)) + { + if (_createOrUpdateTableSchema) + { + var childTableSql = BuildCreateChildTableSql(lp); + using (var cmd = new SqlCommand(childTableSql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Child table [{_schemaName}].[{lp.TableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + var propMap = lp.ChildScalarProperties.ToDictionary(p => p.Name, p => p); + // For child tables we know we have key, ItemIndex plus these properties. + // key and ItemIndex are fixed: key NVARCHAR(450), ItemIndex INT + // Add them to propMap? key and ItemIndex are not from properties, so handle them separately. + EnsureColumns(connection, lp.TableName, propMap, isChildTable: true, isListType: false); + } + } + + private void EnsureChildTableForListType(SqlConnection connection) + { + var tableName = _baseTableName + "_Child"; + + if (!TableExists(connection, tableName)) + { + if (_createOrUpdateTableSchema) + { + var childTableSql = BuildCreateChildTableSqlForListType(); + using (var cmd = new SqlCommand(childTableSql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Child table [{_schemaName}].[{tableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + var propMap = _typeAnalyzer.ChildScalarProperties.ToDictionary(p => p.Name, p => p); + EnsureColumns(connection, tableName, propMap, isChildTable: true, isListType: true); + } + } + + private bool TableExists(SqlConnection connection, string tableName) + { + var sql = $@"SELECT 1 FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table"; + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@schema", _schemaName); + cmd.Parameters.AddWithValue("@table", tableName); + return cmd.ExecuteScalar() != null; + } + } + + /// + /// Ensures all required columns exist. If not, adds them if createOrUpdateTableSchema = true. + /// For main table: key + scalar properties + /// For child table: key + ItemIndex + child scalar properties + /// For list type main table: just key + /// For list type child table: key + ItemIndex + child scalar props + /// + private void EnsureColumns(SqlConnection connection, string tableName, Dictionary propMap, bool isChildTable, bool isListType) + { + var existingColumns = GetExistingColumns(connection, tableName); + // Always ensure 'key' column + if (!existingColumns.Contains("key", StringComparer.OrdinalIgnoreCase)) + { + AddSpecialColumn(connection, tableName, "key", "NVARCHAR(450) NOT NULL", isPk: false); // We'll rely on PK existing or created at table creation. + } + + if (isChildTable || (isListType && tableName.EndsWith("_Child", StringComparison.OrdinalIgnoreCase))) + { + // Ensure ItemIndex + if (!existingColumns.Contains("ItemIndex", StringComparer.OrdinalIgnoreCase)) + { + AddSpecialColumn(connection, tableName, "ItemIndex", "INT NOT NULL", isPk: false); + } + } + + // Ensure property columns + foreach (var kvp in propMap) + { + var colName = kvp.Key; + var prop = kvp.Value; + if (!existingColumns.Contains(colName, StringComparer.OrdinalIgnoreCase)) + { + if (_createOrUpdateTableSchema) + { + AddColumnForProperty(connection, tableName, prop); + } + else + { + throw new InvalidOperationException($"Column [{colName}] is missing in [{_schemaName}].[{tableName}] and createOrUpdateTableSchema=false."); + } + } + } + } + + private HashSet GetExistingColumns(SqlConnection connection, string tableName) + { + var columns = new HashSet(StringComparer.OrdinalIgnoreCase); + var sql = $@"SELECT COLUMN_NAME FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table"; + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@schema", _schemaName); + cmd.Parameters.AddWithValue("@table", tableName); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + columns.Add(reader.GetString(0)); + } + } + } + return columns; + } + + private void AddColumnForProperty(SqlConnection connection, string tableName, PropertyInfo prop) + { + var sqlType = GetSqlTypeForProperty(prop); + var sql = $@"ALTER TABLE [{_schemaName}].[{tableName}] ADD [{prop.Name}] {sqlType} NULL"; + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + + private void AddSpecialColumn(SqlConnection connection, string tableName, string columnName, string sqlTypeDeclaration, bool isPk) + { + if (!_createOrUpdateTableSchema) + { + throw new InvalidOperationException($"Column [{columnName}] is missing in [{_schemaName}].[{tableName}] and createOrUpdateTableSchema=false."); + } + var sql = $@"ALTER TABLE [{_schemaName}].[{tableName}] ADD [{columnName}] {sqlTypeDeclaration}"; + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + + if (isPk) + { + var pkSql = $@"ALTER TABLE [{_schemaName}].[{tableName}] ADD CONSTRAINT [PK_{tableName}] PRIMARY KEY ([{columnName}])"; + using (var pkCmd = new SqlCommand(pkSql, connection)) + { + pkCmd.ExecuteNonQuery(); + } + } + } + + private string BuildCreateMainTableSql() + { + var sb = new StringBuilder(); + sb.AppendLine($"CREATE TABLE [{_schemaName}].[{_baseTableName}] ("); + sb.AppendLine("[key] NVARCHAR(450) NOT NULL PRIMARY KEY,"); + + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + var sqlType = GetSqlTypeForProperty(prop); + sb.AppendLine($"[{prop.Name}] {sqlType} NULL,"); + } + + if (_typeAnalyzer.ScalarProperties.Length > 0) + sb.Length -= 3; // remove last comma + else + sb.Length -= 2; // remove last newline if no scalar props + + sb.AppendLine(")"); + return sb.ToString(); + } + + private string BuildCreateChildTableSql(ListPropertyMetadata lp) + { + var sb = new StringBuilder(); + sb.AppendLine($"CREATE TABLE [{_schemaName}].[{lp.TableName}] ("); + sb.AppendLine("[key] NVARCHAR(450) NOT NULL,"); + sb.AppendLine("[ItemIndex] INT NOT NULL,"); + + foreach (var cprop in lp.ChildScalarProperties) + { + var sqlType = GetSqlTypeForProperty(cprop); + sb.AppendLine($"[{cprop.Name}] {sqlType} NULL,"); + } + + sb.AppendLine($"CONSTRAINT [PK_{lp.TableName}] PRIMARY KEY ([key], [ItemIndex])"); + sb.AppendLine(")"); + return sb.ToString(); + } + + private string BuildCreateChildTableSqlForListType() + { + var tableName = _baseTableName + "_Child"; + var sb = new StringBuilder(); + sb.AppendLine($"CREATE TABLE [{_schemaName}].[{tableName}] ("); + sb.AppendLine("[key] NVARCHAR(450) NOT NULL,"); + sb.AppendLine("[ItemIndex] INT NOT NULL,"); + + foreach (var cprop in _typeAnalyzer.ChildScalarProperties) + { + var sqlType = GetSqlTypeForProperty(cprop); + sb.AppendLine($"[{cprop.Name}] {sqlType} NULL,"); + } + + sb.AppendLine($"CONSTRAINT [PK_{tableName}] PRIMARY KEY ([key], [ItemIndex])"); + sb.AppendLine(")"); + return sb.ToString(); + } + + public string BuildUpsertMainSql() + { + var setClauses = string.Join(", ", _typeAnalyzer.ScalarProperties.Select(p => $"[{p.Name}] = @{p.Name}")); + var insertColumns = string.Join(", ", _typeAnalyzer.ScalarProperties.Select(p => $"[{p.Name}]")); + var insertValues = string.Join(", ", _typeAnalyzer.ScalarProperties.Select(p => $"@{p.Name}")); + return $@" + IF EXISTS (SELECT 1 FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key) + BEGIN + UPDATE [{_schemaName}].[{_baseTableName}] SET {setClauses} WHERE [key] = @key; + END + ELSE + BEGIN + INSERT INTO [{_schemaName}].[{_baseTableName}] ([key]{(insertColumns.Length > 0 ? ", " + insertColumns : "")}) + VALUES (@key{(insertValues.Length > 0 ? ", " + insertValues : "")}); + END"; + } + + public string BuildUpsertMainSqlForListType() + { + // For list type: just ensure row with key + return $@" + IF NOT EXISTS (SELECT 1 FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key) + BEGIN + INSERT INTO [{_schemaName}].[{_baseTableName}] ([key]) VALUES (@key); + END"; + } + + public string BuildInsertChildSql(ListPropertyMetadata lp) + { + var columns = new List { "[key]", "[ItemIndex]" }; + columns.AddRange(lp.ChildScalarProperties.Select(p => $"[{p.Name}]")); + var values = new List { "@key", "@ItemIndex" }; + values.AddRange(lp.ChildScalarProperties.Select(p => $"@{p.Name}")); + + return $@" + INSERT INTO [{_schemaName}].[{lp.TableName}] + ({string.Join(", ", columns)}) + VALUES ({string.Join(", ", values)})"; + } + + public string BuildInsertChildSqlForListType() + { + var tableName = _baseTableName + "_Child"; + var columns = new List { "[key]", "[ItemIndex]" }; + columns.AddRange(_typeAnalyzer.ChildScalarProperties.Select(p => $"[{p.Name}]")); + var values = new List { "@key", "@ItemIndex" }; + values.AddRange(_typeAnalyzer.ChildScalarProperties.Select(p => $"@{p.Name}")); + + return $@" + INSERT INTO [{_schemaName}].[{tableName}] + ({string.Join(", ", columns)}) + VALUES ({string.Join(", ", values)})"; + } + + /// + /// Maps C# property types to appropriate SQL types. You can refine this mapping as needed. + /// + private string GetSqlTypeForProperty(PropertyInfo prop) + { + var type = prop.PropertyType; + // Unwrap nullable + Type underlying = Nullable.GetUnderlyingType(type) ?? type; + + if (underlying == typeof(int) || underlying == typeof(short) || underlying == typeof(byte)) + return "INT"; + if (underlying == typeof(long)) + return "BIGINT"; + if (underlying == typeof(bool)) + return "BIT"; + if (underlying == typeof(DateTime)) + return "DATETIME2"; + if (underlying == typeof(decimal)) + return "DECIMAL(18,2)"; + if (underlying == typeof(double)) + return "FLOAT"; + if (underlying == typeof(float)) + return "REAL"; + if (underlying == typeof(Guid)) + return "UNIQUEIDENTIFIER"; + if (underlying == typeof(TimeSpan)) + return "BIGINT"; + + // We could store TimeSpan as ticks in a BIGINT column, or a string. For simplicity, store as BIGINT (ticks). + // For TimeSpan conversion: store and retrieve ticks. + + // If string or unknown: + if (underlying == typeof(string)) + return "NVARCHAR(MAX)"; + + // Default to NVARCHAR(MAX) for unknown types + return "NVARCHAR(MAX)"; + } + } +} diff --git a/src/Cortex.States.MSSqlServer/SqlServerKeyValueStateStore.cs b/src/Cortex.States.MSSqlServer/SqlServerKeyValueStateStore.cs new file mode 100644 index 0000000..615fefc --- /dev/null +++ b/src/Cortex.States.MSSqlServer/SqlServerKeyValueStateStore.cs @@ -0,0 +1,256 @@ +using Microsoft.Data.SqlClient; +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace Cortex.States.MSSqlServer +{ + public class SqlServerKeyValueStateStore : IStateStore, IDisposable + { + private readonly string _connectionString; + private readonly string _schemaName; + private readonly string _tableName; + private readonly Func _keySerializer; + private readonly Func _valueSerializer; + private readonly Func _keyDeserializer; + private readonly Func _valueDeserializer; + + private static readonly SemaphoreSlim _initializationLock = new SemaphoreSlim(1, 1); + private volatile bool _isInitialized; + + public string Name { get; } + + /// + /// Initializes a new instance of the SqlServerStateStore. + /// + /// A friendly name for the store. + /// The connection string to the SQL Server database. + /// The schema name under which the table will be created. Defaults to "dbo". + /// The name of the table to use for storing state entries. + /// Optional key serializer. If not provided, JSON serialization is used. + /// Optional value serializer. If not provided, JSON serialization is used. + /// Optional key deserializer. If not provided, JSON deserialization is used. + /// Optional value deserializer. If not provided, JSON deserialization is used. + public SqlServerKeyValueStateStore( + string name, + string connectionString, + string tableName, + string schemaName = "dbo", + Func keySerializer = null, + Func valueSerializer = null, + Func keyDeserializer = null, + Func valueDeserializer = null) + { + if (string.IsNullOrWhiteSpace(name)) + throw new ArgumentNullException(nameof(name)); + if (string.IsNullOrWhiteSpace(connectionString)) + throw new ArgumentNullException(nameof(connectionString)); + if (string.IsNullOrWhiteSpace(tableName)) + throw new ArgumentNullException(nameof(tableName)); + + Name = name; + _connectionString = connectionString; + _schemaName = schemaName; + _tableName = tableName; + + _keySerializer = key => JsonSerializer.Serialize(key); + _valueSerializer = value => JsonSerializer.Serialize(value); + _keyDeserializer = str => JsonSerializer.Deserialize(str)!; + _valueDeserializer = str => JsonSerializer.Deserialize(str)!; + + // Initialize the table + InitializeAsync().GetAwaiter().GetResult(); + } + + private async Task InitializeAsync() + { + if (_isInitialized) return; + + await _initializationLock.WaitAsync().ConfigureAwait(false); + try + { + if (_isInitialized) return; + + using (var connection = new SqlConnection(_connectionString)) + { + await connection.OpenAsync().ConfigureAwait(false); + + // Ensure schema exists (if not dbo) + if (!string.Equals(_schemaName, "dbo", StringComparison.OrdinalIgnoreCase)) + { + var createSchemaSql = $@" + IF NOT EXISTS (SELECT * FROM sys.schemas WHERE name = '{_schemaName}') + BEGIN + EXEC('CREATE SCHEMA [{_schemaName}]') + END"; + using (var cmd = new SqlCommand(createSchemaSql, connection)) + { + await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + } + } + + // Ensure table exists + var createTableSql = $@" + IF NOT EXISTS (SELECT * FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '{_schemaName}' AND TABLE_NAME = '{_tableName}') + BEGIN + CREATE TABLE [{_schemaName}].[{_tableName}] ( + [key] NVARCHAR(450) NOT NULL PRIMARY KEY, + [value] NVARCHAR(MAX) NULL + ); + END"; + using (var cmd = new SqlCommand(createTableSql, connection)) + { + await cmd.ExecuteNonQueryAsync().ConfigureAwait(false); + } + } + + _isInitialized = true; + } + finally + { + _initializationLock.Release(); + } + } + + private void EnsureInitialized() + { + if (!_isInitialized) + { + throw new InvalidOperationException("SqlServerStateStore is not properly initialized."); + } + } + + public TValue Get(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + + var sql = $@"SELECT [value] FROM [{_schemaName}].[{_tableName}] WHERE [key] = @key"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@key", (object)serializedKey ?? DBNull.Value); + connection.Open(); + var result = cmd.ExecuteScalar() as string; + if (result == null) + return default; + return _valueDeserializer(result); + } + } + + public void Put(TKey key, TValue value) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + var serializedValue = _valueSerializer(value); + + // Upsert pattern using MERGE or a simple IF EXISTS + var sql = $@" + IF EXISTS (SELECT 1 FROM [{_schemaName}].[{_tableName}] WHERE [key] = @key) + BEGIN + UPDATE [{_schemaName}].[{_tableName}] SET [value] = @value WHERE [key] = @key; + END + ELSE + BEGIN + INSERT INTO [{_schemaName}].[{_tableName}] ([key], [value]) VALUES (@key, @value); + END"; + + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@key", (object)serializedKey ?? DBNull.Value); + cmd.Parameters.AddWithValue("@value", (object)serializedValue ?? DBNull.Value); + connection.Open(); + cmd.ExecuteNonQuery(); + } + } + + public bool ContainsKey(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + + var sql = $@"SELECT COUNT(*) FROM [{_schemaName}].[{_tableName}] WHERE [key] = @key"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@key", (object)serializedKey ?? DBNull.Value); + connection.Open(); + var count = (int)cmd.ExecuteScalar(); + return count > 0; + } + } + + public void Remove(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + + var sql = $@"DELETE FROM [{_schemaName}].[{_tableName}] WHERE [key] = @key"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@key", (object)serializedKey ?? DBNull.Value); + connection.Open(); + cmd.ExecuteNonQuery(); + } + } + + public IEnumerable> GetAll() + { + EnsureInitialized(); + + var sql = $@"SELECT [key], [value] FROM [{_schemaName}].[{_tableName}]"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + connection.Open(); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + var serializedKey = reader.GetString(0); + var serializedValue = reader.IsDBNull(1) ? null : reader.GetString(1); + var key = _keyDeserializer(serializedKey); + var value = serializedValue == null ? default : _valueDeserializer(serializedValue); + yield return new KeyValuePair(key, value!); + } + } + } + } + + public IEnumerable GetKeys() + { + EnsureInitialized(); + + var sql = $@"SELECT [key] FROM [{_schemaName}].[{_tableName}]"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + connection.Open(); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + var serializedKey = reader.GetString(0); + yield return _keyDeserializer(serializedKey); + } + } + } + } + + public void Dispose() + { + // Nothing to dispose specifically here since we create new connections on each call. + // Just in case, we can attempt to release the initialization lock. + _initializationLock.Dispose(); + } + + } +} diff --git a/src/Cortex.States.MSSqlServer/SqlServerStateStore.cs b/src/Cortex.States.MSSqlServer/SqlServerStateStore.cs new file mode 100644 index 0000000..807cd01 --- /dev/null +++ b/src/Cortex.States.MSSqlServer/SqlServerStateStore.cs @@ -0,0 +1,585 @@ +using Microsoft.Data.SqlClient; +using System; +using System.Collections.Generic; +using System.Data; +using System.Linq; +using System.Reflection; +using System.Threading; + +namespace Cortex.States.MSSqlServer +{ + public class SqlServerStateStore : IStateStore, IDisposable + where TValue : new() + { + private readonly string _connectionString; + private readonly string _schemaName; + private readonly string _baseTableName; + private readonly bool _createOrUpdateTableSchema; + + public string Name { get; } + + private static readonly SemaphoreSlim _initializationLock = new SemaphoreSlim(1, 1); + private volatile bool _isInitialized; + + private readonly TypeAnalyzer _typeAnalyzer; + private readonly PropertyConverter _propertyConverter; // may be less used now + private readonly SchemaManager _schemaManager; + + public SqlServerStateStore( + string name, + string connectionString, + string tableName, + string schemaName = "dbo", + bool createOrUpdateTableSchema = true) + { + if (string.IsNullOrWhiteSpace(name)) + throw new ArgumentNullException(nameof(name)); + if (string.IsNullOrWhiteSpace(connectionString)) + throw new ArgumentNullException(nameof(connectionString)); + if (string.IsNullOrWhiteSpace(tableName)) + throw new ArgumentNullException(nameof(tableName)); + + Name = name; + _connectionString = connectionString; + _schemaName = schemaName; + _baseTableName = tableName; + _createOrUpdateTableSchema = createOrUpdateTableSchema; + + _typeAnalyzer = new TypeAnalyzer(typeof(TValue), _baseTableName); + _propertyConverter = new PropertyConverter(); + _schemaManager = new SchemaManager(_connectionString, _schemaName, _baseTableName, _typeAnalyzer, _createOrUpdateTableSchema); + + Initialize(); + } + + private void Initialize() + { + if (_isInitialized) return; + _initializationLock.Wait(); + try + { + if (_isInitialized) return; + _schemaManager.EnsureSchemaAndTables(); + _isInitialized = true; + } + finally + { + _initializationLock.Release(); + } + } + + private void EnsureInitialized() + { + if (!_isInitialized) + throw new InvalidOperationException("SqlServerStateStore is not properly initialized."); + } + + #region Get and Put Helpers + + private string GetSqlTypeForProperty(PropertyInfo prop) + { + var type = prop.PropertyType; + var underlying = Nullable.GetUnderlyingType(type) ?? type; + + if (underlying == typeof(int) || underlying == typeof(short) || underlying == typeof(byte)) + return "INT"; + if (underlying == typeof(long)) + return "BIGINT"; + if (underlying == typeof(bool)) + return "BIT"; + if (underlying == typeof(DateTime)) + return "DATETIME2"; + if (underlying == typeof(decimal)) + return "DECIMAL(18,2)"; + if (underlying == typeof(double)) + return "FLOAT"; + if (underlying == typeof(float)) + return "REAL"; + if (underlying == typeof(Guid)) + return "UNIQUEIDENTIFIER"; + if (underlying == typeof(TimeSpan)) + return "BIGINT"; // stored as ticks + if (underlying == typeof(string)) + return "NVARCHAR(MAX)"; + + // default + return "NVARCHAR(MAX)"; + } + + private object ReadValueFromReader(SqlDataReader reader, PropertyInfo prop) + { + var colIndex = reader.GetOrdinal(prop.Name); + if (reader.IsDBNull(colIndex)) return null; + + var sqlType = GetSqlTypeForProperty(prop); + var underlying = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + // Read based on sqlType + switch (sqlType) + { + case "INT": + int iVal = reader.GetInt32(colIndex); + return ConvertToNullableIfNeeded(iVal, prop.PropertyType); + case "BIGINT": + long lVal = reader.GetInt64(colIndex); + if (underlying == typeof(TimeSpan)) + { + return ConvertToNullableIfNeeded(TimeSpan.FromTicks(lVal), prop.PropertyType); + } + return ConvertToNullableIfNeeded(lVal, prop.PropertyType); + case "BIT": + bool bVal = reader.GetBoolean(colIndex); + return ConvertToNullableIfNeeded(bVal, prop.PropertyType); + case "DATETIME2": + DateTime dtVal = reader.GetDateTime(colIndex); + return ConvertToNullableIfNeeded(dtVal, prop.PropertyType); + case "DECIMAL(18,2)": + decimal decVal = reader.GetDecimal(colIndex); + return ConvertToNullableIfNeeded(decVal, prop.PropertyType); + case "FLOAT": + double dVal = reader.GetDouble(colIndex); + return ConvertToNullableIfNeeded(dVal, prop.PropertyType); + case "REAL": + float fVal = (float)reader.GetDouble(colIndex); // or reader.GetFloat if available + return ConvertToNullableIfNeeded(fVal, prop.PropertyType); + case "UNIQUEIDENTIFIER": + Guid gVal = reader.GetGuid(colIndex); + return ConvertToNullableIfNeeded(gVal, prop.PropertyType); + default: + // NVARCHAR(MAX) or unknown + // For strings or unknown, we get string + string sVal = reader.GetString(colIndex); + // if property isn't string but something else unknown, attempt convert + return ConvertToNullableIfNeeded(ConvertFromStringFallback(sVal, prop.PropertyType), prop.PropertyType); + } + } + + private object ConvertFromStringFallback(string value, Type targetType) + { + // fallback if needed; just try basic converter or return string as is + if (targetType == typeof(string) || Nullable.GetUnderlyingType(targetType) == typeof(string)) + return value; + + // Attempt using propertyConverter as a fallback + return _propertyConverter.ConvertFromString(targetType, value); + } + + private object ConvertToNullableIfNeeded(T value, Type targetType) + { + // If targetType is nullable and value is default, handle if needed + // Actually no extra logic needed, just return value as object. + return value; + } + + private void SetParameterValueForProperty(SqlParameter param, PropertyInfo prop, object propValue) + { + if (propValue == null) + { + param.Value = DBNull.Value; + return; + } + + var sqlType = GetSqlTypeForProperty(prop); + var underlying = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + if (underlying == typeof(TimeSpan) && sqlType == "BIGINT") + { + var ts = (TimeSpan)propValue; + param.Value = ts.Ticks; + return; + } + + // For other types, just assign the value directly. ADO.NET will handle conversions. + // Make sure to assign the actual typed value if needed: + // For a decimal property and DECIMAL(18,2): param.Value = (decimal)propValue; + // For a GUID: param.Value = (Guid)propValue; + // For datetime: param.Value = (DateTime)propValue; + // etc. + + param.Value = propValue; + } + + #endregion + + public TValue Get(TKey key) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + using (var connection = new SqlConnection(_connectionString)) + { + connection.Open(); + + if (_typeAnalyzer.IsListType) + { + var mainSql = $"SELECT [key] FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key"; + bool exists; + using (var cmd = new SqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + var res = cmd.ExecuteScalar(); + exists = (res != null); + } + + if (!exists) + return default; + + var listValue = Activator.CreateInstance(); + var listAsIList = (System.Collections.IList)listValue; + var childSql = $"SELECT * FROM [{_schemaName}].[{_baseTableName}_Child] WHERE [key] = @key ORDER BY [ItemIndex]"; + + using (var cmd = new SqlCommand(childSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + var childInstance = Activator.CreateInstance(_typeAnalyzer.ChildItemType); + foreach (var cprop in _typeAnalyzer.ChildScalarProperties) + { + if (!ColumnExists(reader, cprop.Name)) continue; + var val = ReadValueFromReader(reader, cprop); + cprop.SetValue(childInstance, val); + } + listAsIList.Add(childInstance); + } + } + } + + return listValue; + } + else + { + var mainSql = $"SELECT * FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key"; + TValue instance = new TValue(); + using (var cmd = new SqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + using (var reader = cmd.ExecuteReader()) + { + if (!reader.Read()) + return default; + + // Set scalar properties + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + if (!ColumnExists(reader, prop.Name)) continue; + var val = ReadValueFromReader(reader, prop); + prop.SetValue(instance, val); + } + } + } + + // Load list properties + foreach (var lp in _typeAnalyzer.ListProperties) + { + var listInstance = (System.Collections.IList)Activator.CreateInstance(lp.Property.PropertyType); + var childSql = $"SELECT * FROM [{_schemaName}].[{lp.TableName}] WHERE [key] = @key ORDER BY [ItemIndex]"; + using (var cmd = new SqlCommand(childSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + var childInstance = Activator.CreateInstance(lp.ChildItemType); + foreach (var cprop in lp.ChildScalarProperties) + { + if (!ColumnExists(reader, cprop.Name)) continue; + var val = ReadValueFromReader(reader, cprop); + cprop.SetValue(childInstance, val); + } + listInstance.Add(childInstance); + } + } + } + + lp.Property.SetValue(instance, listInstance); + } + + return instance; + } + } + } + + private bool ColumnExists(SqlDataReader reader, string columnName) + { + try + { + return reader.GetOrdinal(columnName) >= 0; + } + catch + { + return false; + } + } + + public void Put(TKey key, TValue value) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + + using (var connection = new SqlConnection(_connectionString)) + { + connection.Open(); + + if (_typeAnalyzer.IsListType) + { + var upsertMainSql = _schemaManager.BuildUpsertMainSqlForListType(); + using (var cmd = new SqlCommand(upsertMainSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + cmd.ExecuteNonQuery(); + } + + var deleteChildSql = $"DELETE FROM [{_schemaName}].[{_baseTableName}_Child] WHERE [key] = @key"; + using (var deleteCmd = new SqlCommand(deleteChildSql, connection)) + { + deleteCmd.Parameters.AddWithValue("@key", serializedKey); + deleteCmd.ExecuteNonQuery(); + } + + var listAsIList = (System.Collections.IList)value; + var insertChildSql = _schemaManager.BuildInsertChildSqlForListType(); + using (var insertCmd = new SqlCommand(insertChildSql, connection)) + { + int index = 0; + foreach (var item in listAsIList) + { + insertCmd.Parameters.Clear(); + insertCmd.Parameters.AddWithValue("@key", serializedKey); + insertCmd.Parameters.AddWithValue("@ItemIndex", index); + foreach (var cprop in _typeAnalyzer.ChildScalarProperties) + { + var param = insertCmd.Parameters.Add("@" + cprop.Name, System.Data.SqlDbType.VarChar); // dummy + var val = cprop.GetValue(item); + SetParameterForProperty(param, cprop, val); + } + insertCmd.ExecuteNonQuery(); + index++; + } + } + } + else + { + var upsertMainSql = _schemaManager.BuildUpsertMainSql(); + using (var cmd = new SqlCommand(upsertMainSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + var param = cmd.Parameters.Add("@" + prop.Name, System.Data.SqlDbType.VarChar); // dummy type + var propVal = prop.GetValue(value); + SetParameterForProperty(param, prop, propVal); + } + cmd.ExecuteNonQuery(); + } + + // For each list property + foreach (var lp in _typeAnalyzer.ListProperties) + { + var deleteSql = $"DELETE FROM [{_schemaName}].[{lp.TableName}] WHERE [key] = @key"; + using (var deleteCmd = new SqlCommand(deleteSql, connection)) + { + deleteCmd.Parameters.AddWithValue("@key", serializedKey); + deleteCmd.ExecuteNonQuery(); + } + + var listValue = lp.Property.GetValue(value) as System.Collections.IEnumerable; + if (listValue != null) + { + int index = 0; + var insertSql = _schemaManager.BuildInsertChildSql(lp); + using (var insertCmd = new SqlCommand(insertSql, connection)) + { + foreach (var item in listValue) + { + insertCmd.Parameters.Clear(); + insertCmd.Parameters.AddWithValue("@key", serializedKey); + insertCmd.Parameters.AddWithValue("@ItemIndex", index); + foreach (var cprop in lp.ChildScalarProperties) + { + var param = insertCmd.Parameters.Add("@" + cprop.Name, System.Data.SqlDbType.Variant); + var val = cprop.GetValue(item); + SetParameterValueForProperty(param, cprop, val); + } + insertCmd.ExecuteNonQuery(); + index++; + } + } + } + } + } + } + } + + public bool ContainsKey(TKey key) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + var sql = $"SELECT COUNT(*) FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + connection.Open(); + var count = (int)cmd.ExecuteScalar(); + return count > 0; + } + } + + public void Remove(TKey key) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + using (var connection = new SqlConnection(_connectionString)) + { + connection.Open(); + + if (_typeAnalyzer.IsListType) + { + var deleteChildSql = $"DELETE FROM [{_schemaName}].[{_baseTableName}_Child] WHERE [key] = @key"; + using (var cmd = new SqlCommand(deleteChildSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + cmd.ExecuteNonQuery(); + } + + var mainSql = $"DELETE FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key"; + using (var cmd = new SqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + cmd.ExecuteNonQuery(); + } + } + else + { + // Remove from child tables + foreach (var lp in _typeAnalyzer.ListProperties) + { + var deleteChildSql = $"DELETE FROM [{_schemaName}].[{lp.TableName}] WHERE [key] = @key"; + using (var cmd = new SqlCommand(deleteChildSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + cmd.ExecuteNonQuery(); + } + } + + var mainSql = $"DELETE FROM [{_schemaName}].[{_baseTableName}] WHERE [key] = @key"; + using (var cmd = new SqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("@key", serializedKey); + cmd.ExecuteNonQuery(); + } + } + } + } + + public IEnumerable> GetAll() + { + EnsureInitialized(); + var keys = GetKeys().ToList(); + foreach (var k in keys) + { + yield return new KeyValuePair(k, Get(k)); + } + } + + public IEnumerable GetKeys() + { + EnsureInitialized(); + var sql = $"SELECT [key] FROM [{_schemaName}].[{_baseTableName}]"; + using (var connection = new SqlConnection(_connectionString)) + using (var cmd = new SqlCommand(sql, connection)) + { + connection.Open(); + using (var reader = cmd.ExecuteReader()) + { + var keyType = typeof(TKey); + while (reader.Read()) + { + var keyStr = reader.GetString(0); + yield return (TKey)Convert.ChangeType(keyStr, keyType); + } + } + } + } + + private SqlDbType GetSqlDbTypeForProperty(PropertyInfo prop) + { + var type = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + if (type == typeof(int) || type == typeof(short) || type == typeof(byte)) + return SqlDbType.Int; + if (type == typeof(long)) + return SqlDbType.BigInt; + if (type == typeof(bool)) + return SqlDbType.Bit; + if (type == typeof(DateTime)) + return SqlDbType.DateTime2; + if (type == typeof(decimal)) + return SqlDbType.Decimal; + if (type == typeof(double)) + return SqlDbType.Float; + if (type == typeof(float)) + return SqlDbType.Real; + if (type == typeof(Guid)) + return SqlDbType.UniqueIdentifier; + if (type == typeof(TimeSpan)) + return SqlDbType.BigInt; // storing ticks in BIGINT + if (type == typeof(string)) + return SqlDbType.NVarChar; + + // Default to NVarChar for unknown + return SqlDbType.NVarChar; + } + + private void SetParameterForProperty(SqlParameter param, PropertyInfo prop, object propValue) + { + param.SqlDbType = GetSqlDbTypeForProperty(prop); + + // If it's a string and we said NVarChar, we can set Size = -1 for NVARCHAR(MAX) + if (param.SqlDbType == SqlDbType.NVarChar) + { + param.Size = -1; + } + else if (param.SqlDbType == SqlDbType.Decimal) + { + // For DECIMAL(18,2) + param.Precision = 18; + param.Scale = 2; + } + + // Handle TimeSpan as ticks if needed + var underlying = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + if (underlying == typeof(TimeSpan) && param.SqlDbType == SqlDbType.BigInt) + { + if (propValue == null) + { + param.Value = DBNull.Value; + } + else + { + TimeSpan ts = (TimeSpan)propValue; + param.Value = ts.Ticks; + } + return; + } + + // For other types, just assign directly + if (propValue == null) + { + param.Value = DBNull.Value; + } + else + { + param.Value = propValue; + } + } + + public void Dispose() + { + _initializationLock.Dispose(); + } + } +} diff --git a/src/Cortex.States.MSSqlServer/TypeAnalyzer.cs b/src/Cortex.States.MSSqlServer/TypeAnalyzer.cs new file mode 100644 index 0000000..9fb70d4 --- /dev/null +++ b/src/Cortex.States.MSSqlServer/TypeAnalyzer.cs @@ -0,0 +1,108 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.MSSqlServer +{ + internal class TypeAnalyzer + { + public PropertyInfo[] ScalarProperties { get; private set; } + public List ListProperties { get; private set; } + + // For List scenario + public bool IsListType { get; private set; } + public Type ChildItemType { get; private set; } + public PropertyInfo[] ChildScalarProperties { get; private set; } + + private Type _valueType; + private string _baseTableName; + + public TypeAnalyzer(Type valueType, string baseTableName) + { + _valueType = valueType; + _baseTableName = baseTableName; + AnalyzeType(); + } + + private void AnalyzeType() + { + // Check if valueType is List + if (IsGenericList(_valueType, out Type itemType)) + { + // Handle list scenario + IsListType = true; + ChildItemType = itemType; + ChildScalarProperties = GetScalarProperties(itemType); + ScalarProperties = new PropertyInfo[0]; + ListProperties = new List(); + } + else + { + // Regular object scenario + IsListType = false; + var props = _valueType.GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.CanRead && p.CanWrite && p.GetIndexParameters().Length == 0) + .ToArray(); + + var scalarProps = new List(); + var listProps = new List(); + + foreach (var prop in props) + { + if (IsListProperty(prop, out Type childItemType)) + { + var childScalars = GetScalarProperties(childItemType); + listProps.Add(new ListPropertyMetadata + { + Property = prop, + ChildScalarProperties = childScalars, + ChildItemType = childItemType, + TableName = $"{_baseTableName}_{prop.Name}" + }); + } + else + { + scalarProps.Add(prop); + } + } + + ScalarProperties = scalarProps.ToArray(); + ListProperties = listProps; + } + } + + private bool IsListProperty(PropertyInfo prop, out Type itemType) + { + itemType = null; + if (!prop.PropertyType.IsGenericType) return false; + var genType = prop.PropertyType.GetGenericTypeDefinition(); + if (genType == typeof(List<>)) + { + itemType = prop.PropertyType.GetGenericArguments()[0]; + return true; + } + return false; + } + + private bool IsGenericList(Type type, out Type itemType) + { + itemType = null; + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>)) + { + itemType = type.GetGenericArguments()[0]; + return true; + } + return false; + } + + private PropertyInfo[] GetScalarProperties(Type t) + { + return t.GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.CanRead && p.CanWrite && p.GetIndexParameters().Length == 0 && !IsListProperty(p, out _)) + .ToArray(); + } + } +} diff --git a/src/Cortex.States.PostgreSQL/Assets/cortex.png b/src/Cortex.States.PostgreSQL/Assets/cortex.png new file mode 100644 index 0000000000000000000000000000000000000000..a4f9727d04f91f61f20720bd64b9c139fb041236 GIT binary patch literal 7179 zcmV+m9Q5OfP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGf5&!@T5&_cPe*6Fc8=y%C^C!)jU*}Z_TUZ5zkPS!zgzWpiKoUY0LP$tR!Xokt%Ag{H3XX^@B8wnv03972 z_hwv1kf6A*-1mH+Tg$Cm_f}O`27H#NQDPYKIv zcTCHy`fO@u)s1PH)sbnLH4!`}kZK|c%WX|xUTdf>GJS=u8BkWepa#-RXr-xDk(t@H zW@WFA%*qkfWGlBRGCQ}KE6>y#u7Wj@IeBX$a|&84%`IF@nqxf9Eo!w@+!~oz+$N|! zGOwgPGT%~1WPT~v)1{qUWzc%A^7WAg4s}HqRBUi)VHMOZs3)?px+juI+89Z!G1VJc zB&xQLlmylF3mS+d)%Qn|>-%q{Gz@%}+OT;?VqO2Az$`8yqwdcMnN^=m&#Gn9GOO9N zWmSgoa}7%%RkMWUH7sFyEou6S8a90e9_zTW>Pa(9t!!{;=E_xE+0bgztQ@G(RxZ@U zX6H7t+4KlCv)TDgY<6BVo1M3Y&B-^_!sZmT(Bo4DE$penwd|>awfwQDmCY?QRMf`i z7PlFX?QCvIJDW%9VDpMQ_+#lhHowFWelF`|^GiF~)8*^A%D5JkcX3sij~m#6$_@0m zu(F#itm@{El|3x6Y9mXm>S2l1c-+Vmt9w~uO)pzi-OCo$^sz;?rZ%x8Q+55MWT<|? zR>L4ErD2dQZhVHNG;H}~QR9|B$4$wo*gJiBEt{HI$)=I2xEuhhIXqJqL=&J0vCala z5N(h%O%XsY0Gvq>4G}=VGamuY)|~uiL2C?vYdAnt09d$|;L#&sk_?c=ZERj~yDb1L z>0t9s0idC$%Q^*_5G9Z#17M~3$VC820LaAwn(8AZK>|nur~sJ;c=dyOfYHG-0D8c)!3BgD z0ANym|IN6m>E(B(W>gS3OJ2?cFb6y?Pk02*2B>qM3!dyehx-I@f^^Pv!7~6xXt|F=bO5v<25_GNQSUxC#H8AO4sJ?%xdNQ*J~@1Hcqd#$aK3;6*i@;(*!qp zN$K#ErDYCqdVq%egycmAs0*E1fQrN=5J1U&?wU^l&=op;GEa^BbZsAjqqfh>eP;6F z)Ao6|Z&6L30M3*Oo+ETR-KT=*2%XxtPvO3(3wF_Ye(o~>8t!w1PapS14NvJlvF#Jx zr)&H4@D^1Y;3h9EV~`Hsl#Ft1yQhN}IO}{ANsFp%P!{Yu=K-8E>wGlM^MYqOFId^& z2%iLQaft=a3!W})l0KJV3MhJsTZ0AEbxpuK9u{Elz-8;ROww zR?TOCTT;p_`2Zw{iqPrGJT3QmCoihd8BOM?bf27cD&6OV=tx|mt@-5SMRuQ8=yZio zXXvycikWBcaNHyroC=-<(Ct1seEPUg8#$OY0`0p9-Gr zJ{g`@!7jLOL4{YlXVrYeag)-D-QWax7C_l`CNRIYFQbZ{KY1z6$Kr!#EgZ*Wg$`|~Vo!z(PSdoO8a_bY<-k(PJvp>_%Gv>m7eXoO8y z=v38xA~-=V_sQW?)$U2~rleO;=quWCh;^O(o^87Do2U9N{K`5{e9LlrUu6j^#7=7= z^K^|iM3;Gn?{VS6|5NG*Skr-zS?{?!Y}3WxJ=J&q7q<4T&slowP6s>-T#)3&a-QwJ zg;hNOSL6bxgy$1FEqErlrDbgSx;<>mwFhkL2aIjG<}W;N@XCF*V*N{$o67aDm_{0CMM*lVjE#{Dj{8=BvSl58QYDE=yg#mCogYr)>Me!L!;uW!7o8S@;_g zl7_uQY-^OC2f{JS*1bsWqcU+(hEBQd6W{?{d}`5fJiw)yk^ly7`)qjlJuY(RJKqrq z`GfcguyO3dif(EzUGOZ31~>troplQEFvkURT|{^Q=UekNvpM)VZw)KievEA%dfrbnvSA!cLKpGf(cmS-Gnz;po5gd(_bb!i_sP^nlvAr;IF94NvVp2S5{E zIe{x2j!!EhSb%2%93eco6oY=f@81m-UN#$TMCJ2m2ny;!--6vrS|s-=;JMtVsQK{x z#ZBAU#4ZIzS;aOu?=L=xFyyPUm3?Au_nm}G zSbMn7S+E-pEG2LS!||zwHaG#E*!EfQ6af9;A!OnMw7>mzcz7Q$)_Ul3`k>r-s{YQf z&a>g+c?iv$uRaJBBI%f+2drT5@6;XkD29qGXyza-z)iHk1%yWcqj%nyCZ5l%AP$=9nz{{A|*f(qAK$Qw5&SJ zo?;tL{umCL@!pybd`vwo2)C-yhLO6Dw|gDjaV9t?JOP~4_C;OuneH?GpWBB}^e|le zag}$AW%vAz`a|7QB%*g8o`;LG_RW8Vt~&u6od!uYy_Dq0U1st~gFVI<0$vHgCFeW9 zY2hhCr&r?Qf@c7ngiJHNoWjXST(|Mu9k#gnSvqH6=P3$yBu=={wJ%+went7Uf(X!h z-~WXzZrVoeq^hse9xCz}qzwws23No)rt09S0S1Am1V|yXgw4pRV@44sR;1jC7Xs)LiHjQyTH$)pCo|DzJmknGB*FnHD z$os71*<)0+QxUHkDiUUvsUTYIo+5d105l-ZFV=!{!t(V?P4YG}AheuQ< z)a-emPmxfTfpUQ9H^!gPggt-s-G2;kgHCgTT|RX;9RB!N9ozB9ZX| zC?+lh(FY)Yh70OmycjxJ!EqZ-{m7DPHj(pOlXI8VSTHMgo+MZULl4-%l?OEJ5)_`9 z(Pp&nqIS~RQSCoP!Z}bY&#D&NJyVLBFFJ5unWq3x0WgUB;?oN0L#}=Ev(RZd;I$w5 znl1!QRFdFfPE1bke%62S4+_m(COm)VAtDT1xySPR-{571QM}KRM;lZ_MXEts6QFA; zw9b8|16!j67Zjc@boz9c;XFw15r)DzZxBWORmK|j4NgF5PGS zm+!Fw$N|sWdE)pWr0VN*zzZ}*!U@j?XipDx^|lm-0Y}oo696jUd1alV^A}LqXi93!NSS@!Xt^``O^oeP4KkLkM3S8lMvPE8BGbE_G&O zSOp#2xInA-f57@K{6;+-NMz8F40r&k255&)Kx;ev74^(xYZ0-#OziKJ0jff0xXe@P zaVZ!nI7}*&F!yQT0hY^wYabyvt@HHoT#1VeFSU>ga2wjm_3!?`+K+t0I!}Dhx=#K`zsKJ#c;`7hI8_PnDx<%n&g8Sr z!jJQb?hEfc?!GyX3>@hZz>@&tSiDhOd}r?l!k8HzeONU`<1)T0q+4D7<$MGH@|7DY0!)`sNf0U{Nc%2ryo3k zYk8F5MCl*~1Sr76`LGIe<9oNQf*FoLRtTr`T28yoHk`i6)*H~)9sibok2Z4f@_qjM zf^u?^wsN8?5{EPmb(x)1}`npzBZk zK+o?!^Aiu9M%Xmr;kebWen8VL(9`1XsMZV>1%xNpe5UigK7Hn?bf27c3htXV*5HC9E-H8v zQVLjNO&@DL^o0>3BNXanKw4qK>g9W^^Tc=LJcQTM9q-Uo1{xL`!*%Sgf?ZrmYrvb8 z+cXy7py=U=p_6`}TEx;@o?~r?zoa&5vz(0h=^8Jt0lwzN^VD`=`IvGYy9CcYNNZ1# za5+x{FL2wZSWPsB;6&*lx&d+-0FD2b?l|twAOi|v6&dNmiVZZ|T3k`<4bVGuqCYdJ z`@8_k?z7syoW@uNr-o;`58)D7R_oy}Jo_{KgdP}_8OU5QZBX8Xz_ovi1TR?ld~^kS z41qHh0GVslES5%vl{{;4J9? z7P1M+d2D6RJ~ntY+3(wPe{{`Su0WFVqlEuC$BTYv9L^HWA zV=cfwTA*TX1!4iyWSsz~+Unn3^U3i1O9l&`0dU5D1Govv`7A!Igw^i37<#IBw8HCP z?j*u8k`;R!kP)~~129qxa52`At{d)Zax&K5Asn8Q`3=YVpf$Fp2mId zn$OF9qVs~)e6bBKxq#X=EbZ;T9BwrYg!V9OB7$1h9~cut4myz&XdXp z^=9L-3+~B9IkX3H&zYN{lM#fxhJ9CQur&;L90Tapf5mE`YBpKtJeQ=5Q-bFRn-)NQ z$w1%)fYVpj#U{8XlXBUtye8IuG!i-)!EsHmen=MzdtuU+P9foKzIvbK558e7ht_UB zr)c*`8h~Ll&*@n)2o7x;QgQ6#720W7t$Hj%+YBPyqKm9x;Em8B6YwzKWogTE)WI!p zf2D`#?>?6U1GsN`R())NL$3&W{mY*_75XCcjPPfcTt7(R7TshxHzwFt?LHU!W-wRj zV4J9ksYP0NYJfI8ch;$cXE@JFUL4MgA#lhblk-?My0eDvg$ipBdi$}Z?h}i09o=O( z|BS2#w&smbL&L*Ofl+hJCPsg$1y2TOZg;E*oBUf+)HRyMd|DWY2|=M9{Q(l5YZS8LvDQJ_zf6|OeXj5+*L)g) zCd7mlF#-xlnb`aj=d?B?5Z$K>of`KAtogTb+Y7WWR~Tq?%(aKKqYuKZ ze15HjaXz$n*kl{i`@+JbLjT}DDNG|2b8rFSxfEcKmbUw-9vg5^BxchW*!8~8SJTKB z!%sM7>-C4MY{v--d3G`qPy)Os5_4#lSI?QBLc_yx)w|Ep#mD;*UFQqWbBctVbsBBo zSb)P_j&;qLfgHAdL3@bRGzc3wpZprZh4v0*LLSZ0#H69H?+M2h48BP-(5#TDa-XW* zbHTISr)>K&s~#^nTx3`si{QiV--Qc=wE~#Y>&zfS%1BnQlq;8Zh7C6Xi4VZuW0~!{ z>AVhjDhEpLQ#(-nT2sM&j}sj3YV`9}>^e0fXjmVBFOqmN(H|Nw#QB(>{5M$u7oS>8 zAunvmL^*?nBiObd=hVT|g-#2e!g-FwCF3!IgWGcZUgLLjm^<3oUJzfrFoD7&ptxVh z5ruBAj_xl+kEh!p>HlED{XIoMcrbKOj8F>0Z`>UN7C}Rz#)TNx@HGm z_%O)`2YsK~5c2}duOSOJ3cmJiaOe-#eDG81;|tOr;yD=g#Kkc z-20a~LZ`0g)3$vTqYn;uI(7@fnzXP*HZDZ%ix=qw2MK$~TCs6=&zakFEL6GcG_4N^ zbo`=R+A1F#0*vH6m+w4j?BnU&ghvT48uz7-GB`lPLWS11z6c$9Z~?HKDj~Bf&_$=( zET@rr@y%^EXe9gELzdI`nsJU`j>R`n4PJQ=DzxJmEJnd=B4nB?YhCbE;Zp@qRC@X7 zf#cm-6|DZ{%OeUoSgVcpg12k#*Kxe2(Hnv-4P~d8S>KX1&jw0t%=2Musm)mvHl=WbI^evqCKK8=qryV7NafQRsjtQ3W}d=6Ro47iAO zFNgnDF1Wt!@Yif+b|VeIn(&kWgSv0jz$NF?8^bPkVLmXqex(#`AKi z`wK~oR|13!+4|O3Y<6z5f2L`M`|MS0?V-=ZM~>@^zAKe?Zaa5SQUdgWH)XWJVdJwf zj>8AVG;r=*A85F^iP^@Qf?#88+x~%X47p5=KGq}1=aYr37~ilFcDxYBEUN9JbBX)6 zXu+w%rwDFo*(iZSm`TZ}FRG39F2(cFV~;m))?!|QBe11=*U29OK{JKtqW$9M3HCbp zE;;P663xws&x7qe(9=UniemR9EkN0MCcsezXAF=wzV;!#dB{x7l`@{weDD*RfEpx& zgd4GL41=)Ge55$1hY&IW0N)s54l(%&pNW5a<8SPlf4I*n3(whf;TM+C{=BfNR@&-c z0Z|9ghFCg!;2>Ol%u>ScbCu7Zrf>Jq2|KT@=z5vWSWy=U8l68mk2WQ(*>m1?zChZ2 zj=uX8#d(mB%p*V{jck-`dz)45R-Ds1_gPl5?Fh{Z11xkL$U8}dcHfdw1_$9T#RcXI z?)Ado@w^}zq$v9GUGeUXR>!P_YGw zSNL?mTU;^@ZuF#srv_-c&kLRfa2(udO9wAN=+wX)2lrT|f+qlUgiSki>flWp2lqIo zb6*sp(}b8-JPz)$OR)`z4xZM1odhs- z99)b@2TuSf!W#z{Lki+N6JToLIJnqS0C?lzVpv-D84y!AxI3-`#}y+|!6R^Y0Pd#y zfN{l+6z)qYycst!Iqv|5(%lD)D<&j`P4t{hOer`JH!(H)FB4OWzCm&4I%r%mq(a)B z1FR;d7W^d+|DH_Bc_Kcw;Pv?A{9DxKAz>L;Ea?u|fYF4My#E2S{{wP}i{H;-Y-|7k N002ovPDHLkV1i-n!nFVZ literal 0 HcmV?d00001 diff --git a/src/Cortex.States.PostgreSQL/Assets/license.md b/src/Cortex.States.PostgreSQL/Assets/license.md new file mode 100644 index 0000000..530f621 --- /dev/null +++ b/src/Cortex.States.PostgreSQL/Assets/license.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2024 Buildersoft + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj b/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj new file mode 100644 index 0000000..fa71b7a --- /dev/null +++ b/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj @@ -0,0 +1,9 @@ + + + + net8.0 + enable + enable + + + From 903f18bc99e40523f5b5e945e34ba7112f49b751 Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Thu, 19 Dec 2024 20:34:14 +0100 Subject: [PATCH 6/8] [v1/feature/75]: Add support for PostgreSQL Implement PostgresStateStore with Schema Management, Implement PostgresKeyValueStateStore using Json KEY/VALUE --- README.md | 3 + .../Cortex.States.MSSqlServer.csproj | 2 +- .../Cortex.States.PostgreSQL.csproj | 56 ++- .../ListPropertyMetadata.cs | 12 + .../PostgresKeyValueStateStore.cs | 226 +++++++++ .../PostgresPropertyConverter.cs | 76 +++ .../PostgresSchemaManager.cs | 446 ++++++++++++++++ .../PostgresStateStore.cs | 474 ++++++++++++++++++ .../PostgresTypeAnalyzer.cs | 107 ++++ 9 files changed, 1396 insertions(+), 6 deletions(-) create mode 100644 src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs create mode 100644 src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs create mode 100644 src/Cortex.States.PostgreSQL/PostgresPropertyConverter.cs create mode 100644 src/Cortex.States.PostgreSQL/PostgresSchemaManager.cs create mode 100644 src/Cortex.States.PostgreSQL/PostgresStateStore.cs create mode 100644 src/Cortex.States.PostgreSQL/PostgresTypeAnalyzer.cs diff --git a/README.md b/README.md index 3e4bda1..f9fc026 100644 --- a/README.md +++ b/README.md @@ -84,6 +84,9 @@ - **Cortex.States.MSSqlServer:** Persistent state storage using Microsoft Sql Server. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.MSSqlServer?label=Cortex.States.MSSqlServer)](https://www.nuget.org/packages/Cortex.States.MSSqlServer) +- **Cortex.States.PostgreSQL:** Persistent state storage using PostgreSQL. +[![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.PostgreSQL?label=Cortex.States.PostgreSQL)](https://www.nuget.org/packages/Cortex.States.PostgreSQL) + - **Cortex.Telemetry:** Core library to add support for Tracing and Matrics. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.Telemetry?label=Cortex.Telemetry)](https://www.nuget.org/packages/Cortex.Telemetry) diff --git a/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj b/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj index cc3a9af..86bf0bc 100644 --- a/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj +++ b/src/Cortex.States.MSSqlServer/Cortex.States.MSSqlServer.csproj @@ -14,7 +14,7 @@ https://github.com/buildersoftio/cortex - cortex vortex mediator eda streaming distributed streams states kafka pulsar rocksdb + cortex vortex mediator eda streaming distributed streams states mssql 1.0.1 license.md diff --git a/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj b/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj index fa71b7a..fd0a4eb 100644 --- a/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj +++ b/src/Cortex.States.PostgreSQL/Cortex.States.PostgreSQL.csproj @@ -1,9 +1,55 @@  - - net8.0 - enable - enable - + + net9.0;net8.0;net7.0 + + 1.0.1 + 1.0.1 + Buildersoft Cortex Framework + Buildersoft + Buildersoft,EnesHoxha + Copyright © Buildersoft 2024 + + Cortex Data Framework is a robust, extensible platform designed to facilitate real-time data streaming, processing, and state management. It provides developers with a comprehensive suite of tools and libraries to build scalable, high-performance data pipelines tailored to diverse use cases. By abstracting underlying streaming technologies and state management solutions, Cortex Data Framework enables seamless integration, simplified development workflows, and enhanced maintainability for complex data-driven applications. + + + https://github.com/buildersoftio/cortex + cortex mediator eda streaming distributed streams states postgresql + + 1.0.1 + license.md + cortex.png + Cortex.States.PostgreSQL + True + True + True + + Just as the Cortex in our brains handles complex processing efficiently, Cortex Data Framework brings brainpower to your data management! + https://buildersoft.io/ + README.md + + + + + True + \ + + + True + + + + True + + + + + + + + + + + diff --git a/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs b/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs new file mode 100644 index 0000000..47838f2 --- /dev/null +++ b/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs @@ -0,0 +1,12 @@ +using System.Reflection; + +namespace Cortex.States.PostgreSQL +{ + internal class ListPropertyMetadata + { + public PropertyInfo Property { get; set; } + public PropertyInfo[] ChildScalarProperties { get; set; } + public Type ChildItemType { get; set; } + public string TableName { get; set; } + } +} diff --git a/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs b/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs new file mode 100644 index 0000000..0bbf990 --- /dev/null +++ b/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs @@ -0,0 +1,226 @@ +using Npgsql; +using System.Text.Json; + +namespace Cortex.States.PostgreSQL +{ + public class PostgresKeyValueStateStore : IStateStore, IDisposable + { + private readonly string _connectionString; + private readonly string _schemaName; + private readonly string _tableName; + + private readonly Func _keySerializer; + private readonly Func _valueSerializer; + private readonly Func _keyDeserializer; + private readonly Func _valueDeserializer; + + private static readonly SemaphoreSlim _initializationLock = new SemaphoreSlim(1, 1); + private bool _isInitialized = false; + + public string Name { get; } + + /// + /// Creates a new PostgresStateStore. + /// + /// A friendly name for the state store. + /// The Postgres connection string. + /// The schema where the table will be created. + /// The table name to store key-value pairs. + public PostgresKeyValueStateStore( + string name, + string connectionString, + string tableName, + string schemaName = "public") + { + Name = name ?? throw new ArgumentNullException(nameof(name)); + _connectionString = connectionString ?? throw new ArgumentNullException(nameof(connectionString)); + _schemaName = string.IsNullOrWhiteSpace(schemaName) ? "public" : schemaName; + _tableName = string.IsNullOrWhiteSpace(tableName) ? "state_store" : tableName; + + // Default JSON-based (de)serialization + _keySerializer = key => JsonSerializer.Serialize(key); + _valueSerializer = value => JsonSerializer.Serialize(value); + _keyDeserializer = str => JsonSerializer.Deserialize(str); + _valueDeserializer = str => JsonSerializer.Deserialize(str); + + // Initialize the schema/table + Initialize().GetAwaiter().GetResult(); + } + + private async Task Initialize() + { + if (_isInitialized) return; + + await _initializationLock.WaitAsync().ConfigureAwait(false); + try + { + if (_isInitialized) return; + + using var conn = new NpgsqlConnection(_connectionString); + await conn.OpenAsync().ConfigureAwait(false); + + // Create schema if not exists + var createSchemaCmd = new NpgsqlCommand($@" + CREATE SCHEMA IF NOT EXISTS ""{_schemaName}""; + ", conn); + + await createSchemaCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + // Create table if it doesn't exist + var createTableCmd = new NpgsqlCommand($@" + CREATE TABLE IF NOT EXISTS ""{_schemaName}"".""{_tableName}"" ( + key TEXT PRIMARY KEY, + value TEXT + ); + ", conn); + + await createTableCmd.ExecuteNonQueryAsync().ConfigureAwait(false); + + _isInitialized = true; + } + finally + { + _initializationLock.Release(); + } + } + + private void EnsureInitialized() + { + if (!_isInitialized) + { + throw new InvalidOperationException("PostgresStateStore is not initialized properly."); + } + } + + public TValue Get(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + + using var conn = new NpgsqlConnection(_connectionString); + conn.Open(); + + using var cmd = new NpgsqlCommand($@" + SELECT value FROM ""{_schemaName}"".""{_tableName}"" WHERE key = @key; + ", conn); + + cmd.Parameters.AddWithValue("key", serializedKey); + + var result = cmd.ExecuteScalar() as string; + if (result == null) + return default; + + return _valueDeserializer(result); + } + + public void Put(TKey key, TValue value) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + var serializedValue = _valueSerializer(value); + + using var conn = new NpgsqlConnection(_connectionString); + conn.Open(); + + // Upsert logic (Postgres 9.5+) + using var cmd = new NpgsqlCommand($@" + INSERT INTO ""{_schemaName}"".""{_tableName}"" (key, value) + VALUES (@key, @value) + ON CONFLICT (key) DO UPDATE SET value = EXCLUDED.value; + ", conn); + + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.Parameters.AddWithValue("value", serializedValue); + + cmd.ExecuteNonQuery(); + } + + public bool ContainsKey(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + + using var conn = new NpgsqlConnection(_connectionString); + conn.Open(); + + using var cmd = new NpgsqlCommand($@" + SELECT 1 FROM ""{_schemaName}"".""{_tableName}"" + WHERE key = @key LIMIT 1; + ", conn); + cmd.Parameters.AddWithValue("key", serializedKey); + + var exists = cmd.ExecuteScalar(); + return exists != null; + } + + public void Remove(TKey key) + { + EnsureInitialized(); + + var serializedKey = _keySerializer(key); + + using var conn = new NpgsqlConnection(_connectionString); + conn.Open(); + + using var cmd = new NpgsqlCommand($@" + DELETE FROM ""{_schemaName}"".""{_tableName}"" + WHERE key = @key; + ", conn); + + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.ExecuteNonQuery(); + } + + public IEnumerable> GetAll() + { + EnsureInitialized(); + + using var conn = new NpgsqlConnection(_connectionString); + conn.Open(); + + using var cmd = new NpgsqlCommand($@" + SELECT key, value FROM ""{_schemaName}"".""{_tableName}""; + ", conn); + + using var reader = cmd.ExecuteReader(); + while (reader.Read()) + { + var serializedKey = reader.GetString(0); + var serializedValue = reader.GetString(1); + + var key = _keyDeserializer(serializedKey); + var value = _valueDeserializer(serializedValue); + + yield return new KeyValuePair(key, value); + } + } + + public IEnumerable GetKeys() + { + EnsureInitialized(); + + using var conn = new NpgsqlConnection(_connectionString); + conn.Open(); + + using var cmd = new NpgsqlCommand($@" + SELECT key FROM ""{_schemaName}"".""{_tableName}""; + ", conn); + + using var reader = cmd.ExecuteReader(); + while (reader.Read()) + { + var serializedKey = reader.GetString(0); + yield return _keyDeserializer(serializedKey); + } + } + + public void Dispose() + { + // Since we create a new connection per operation, there's no connection-level resource to dispose. + // The semaphore does not need disposal as we share it statically. If needed, could be disposed at application end. + } + } +} \ No newline at end of file diff --git a/src/Cortex.States.PostgreSQL/PostgresPropertyConverter.cs b/src/Cortex.States.PostgreSQL/PostgresPropertyConverter.cs new file mode 100644 index 0000000..e8641c6 --- /dev/null +++ b/src/Cortex.States.PostgreSQL/PostgresPropertyConverter.cs @@ -0,0 +1,76 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.PostgreSQL +{ + internal class PostgresPropertyConverter + { + public string ConvertToString(object value) + { + if (value == null) return null; + return value.ToString(); + } + + public object ConvertFromString(Type type, string str) + { + if (str == null) return null; + + var underlying = Nullable.GetUnderlyingType(type) ?? type; + + if (underlying == typeof(string)) return str; + if (underlying == typeof(int)) + { + if (int.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var i)) return i; + return 0; + } + if (underlying == typeof(long)) + { + if (long.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var l)) return l; + return 0L; + } + if (underlying == typeof(bool)) + { + if (bool.TryParse(str, out var b)) return b; + return false; + } + if (underlying == typeof(DateTime)) + { + if (DateTime.TryParse(str, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var dt)) return dt; + return DateTime.MinValue; + } + if (underlying == typeof(decimal)) + { + if (decimal.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var dec)) return dec; + return 0m; + } + if (underlying == typeof(double)) + { + if (double.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var d)) return d; + return 0.0; + } + if (underlying == typeof(float)) + { + if (float.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var f)) return f; + return 0f; + } + if (underlying == typeof(Guid)) + { + if (Guid.TryParse(str, out var g)) return g; + return Guid.Empty; + } + if (underlying == typeof(TimeSpan)) + { + // Attempt parse + if (TimeSpan.TryParse(str, CultureInfo.InvariantCulture, out var ts)) return ts; + return TimeSpan.Zero; + } + + // default fallback + return str; + } + } +} diff --git a/src/Cortex.States.PostgreSQL/PostgresSchemaManager.cs b/src/Cortex.States.PostgreSQL/PostgresSchemaManager.cs new file mode 100644 index 0000000..20bc345 --- /dev/null +++ b/src/Cortex.States.PostgreSQL/PostgresSchemaManager.cs @@ -0,0 +1,446 @@ +using Npgsql; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.PostgreSQL +{ + internal class PostgresSchemaManager + { + private readonly string _connectionString; + private readonly string _schemaName; + private readonly string _baseTableName; + private readonly PostgresTypeAnalyzer _typeAnalyzer; + private readonly bool _createOrUpdateTableSchema; + + public PostgresSchemaManager(string connectionString, string schemaName, string baseTableName, PostgresTypeAnalyzer typeAnalyzer, bool createOrUpdateTableSchema) + { + _connectionString = connectionString; + _schemaName = schemaName; + _baseTableName = baseTableName; + _typeAnalyzer = typeAnalyzer; + _createOrUpdateTableSchema = createOrUpdateTableSchema; + } + + public void EnsureSchemaAndTables() + { + using (var connection = new NpgsqlConnection(_connectionString)) + { + connection.Open(); + EnsureSchema(connection); + + if (_typeAnalyzer.IsListType) + { + EnsureMainTableForListType(connection); + EnsureChildTableForListType(connection); + } + else + { + EnsureMainTable(connection); + + foreach (var lp in _typeAnalyzer.ListProperties) + { + EnsureChildTable(connection, lp); + } + } + } + } + + private void EnsureSchema(NpgsqlConnection connection) + { + // CREATE SCHEMA IF NOT EXISTS + var createSchemaSql = $@"CREATE SCHEMA IF NOT EXISTS ""{_schemaName}"""; + using (var cmd = new NpgsqlCommand(createSchemaSql, connection)) + { + if (_createOrUpdateTableSchema) + { + cmd.ExecuteNonQuery(); + } + else + { + // Check existence + var checkSchemaSql = @"SELECT 1 FROM pg_namespace WHERE nspname = @schema"; + using (var checkCmd = new NpgsqlCommand(checkSchemaSql, connection)) + { + checkCmd.Parameters.AddWithValue("schema", _schemaName); + var exists = checkCmd.ExecuteScalar(); + if (exists == null) + throw new InvalidOperationException($"Schema {_schemaName} does not exist and createOrUpdateTableSchema=false."); + } + } + } + } + + private void EnsureMainTable(NpgsqlConnection connection) + { + if (!TableExists(connection, _baseTableName)) + { + if (_createOrUpdateTableSchema) + { + var mainTableSql = BuildCreateMainTableSql(); + using (var cmd = new NpgsqlCommand(mainTableSql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Main table [{_schemaName}.{_baseTableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + // Check columns + var propMap = _typeAnalyzer.ScalarProperties.ToDictionary(p => p.Name, p => p); + EnsureColumns(connection, _baseTableName, propMap, isChildTable: false, isListType: false); + } + } + + private void EnsureMainTableForListType(NpgsqlConnection connection) + { + if (!TableExists(connection, _baseTableName)) + { + if (_createOrUpdateTableSchema) + { + var sql = $@"CREATE TABLE ""{_schemaName}"".""{_baseTableName}"" ( + ""key"" TEXT NOT NULL PRIMARY KEY + )"; + using (var cmd = new NpgsqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Main table [{_schemaName}.{_baseTableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + // Ensure key column exists + var existingColumns = GetExistingColumns(connection, _baseTableName); + if (!existingColumns.Contains("key")) + { + if (_createOrUpdateTableSchema) + { + var alter = $@"ALTER TABLE ""{_schemaName}"".""{_baseTableName}"" ADD COLUMN ""key"" TEXT NOT NULL"; + using (var cmd = new NpgsqlCommand(alter, connection)) + { + cmd.ExecuteNonQuery(); + } + // Ensure primary key + var pkSql = $@"ALTER TABLE ""{_schemaName}"".""{_baseTableName}"" ADD PRIMARY KEY(""key"")"; + using (var pkCmd = new NpgsqlCommand(pkSql, connection)) + { + pkCmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Column [key] is missing in [{_schemaName}.{_baseTableName}] and createOrUpdateTableSchema=false."); + } + } + } + } + + private void EnsureChildTable(NpgsqlConnection connection, ListPropertyMetadata lp) + { + if (!TableExists(connection, lp.TableName)) + { + if (_createOrUpdateTableSchema) + { + var childTableSql = BuildCreateChildTableSql(lp); + using (var cmd = new NpgsqlCommand(childTableSql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Child table [{_schemaName}.{lp.TableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + var propMap = lp.ChildScalarProperties.ToDictionary(p => p.Name, p => p); + EnsureColumns(connection, lp.TableName, propMap, isChildTable: true, isListType: false); + } + } + + private void EnsureChildTableForListType(NpgsqlConnection connection) + { + var tableName = _baseTableName + "_Child"; + + if (!TableExists(connection, tableName)) + { + if (_createOrUpdateTableSchema) + { + var childTableSql = BuildCreateChildTableSqlForListType(); + using (var cmd = new NpgsqlCommand(childTableSql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + else + { + throw new InvalidOperationException($"Child table [{_schemaName}.{tableName}] does not exist and createOrUpdateTableSchema=false."); + } + } + else + { + var propMap = _typeAnalyzer.ChildScalarProperties.ToDictionary(p => p.Name, p => p); + EnsureColumns(connection, tableName, propMap, isChildTable: true, isListType: true); + } + } + + private bool TableExists(NpgsqlConnection connection, string tableName) + { + var sql = @"SELECT 1 FROM information_schema.tables + WHERE table_schema = @schema AND table_name = @table"; + using (var cmd = new NpgsqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("schema", _schemaName); + cmd.Parameters.AddWithValue("table", tableName); + return cmd.ExecuteScalar() != null; + } + } + + private void EnsureColumns(NpgsqlConnection connection, string tableName, Dictionary propMap, bool isChildTable, bool isListType) + { + var existingColumns = GetExistingColumns(connection, tableName); + + // Always ensure "key" + if (!existingColumns.Contains("key")) + { + AddSpecialColumn(connection, tableName, "key", "TEXT NOT NULL", isPk: false); + } + + if (isChildTable || (isListType && tableName.EndsWith("_Child", StringComparison.OrdinalIgnoreCase))) + { + // Ensure ItemIndex + if (!existingColumns.Contains("itemindex")) + { + AddSpecialColumn(connection, tableName, "ItemIndex", "INTEGER NOT NULL", isPk: false); + } + } + + // Ensure property columns + foreach (var kvp in propMap) + { + var colName = kvp.Key; + var prop = kvp.Value; + if (!existingColumns.Contains(colName.ToLower())) + { + if (_createOrUpdateTableSchema) + { + AddColumnForProperty(connection, tableName, prop); + } + else + { + throw new InvalidOperationException($"Column [{colName}] is missing in [{_schemaName}.{tableName}] and createOrUpdateTableSchema=false."); + } + } + } + } + + private HashSet GetExistingColumns(NpgsqlConnection connection, string tableName) + { + var columns = new HashSet(StringComparer.OrdinalIgnoreCase); + var sql = @"SELECT column_name FROM information_schema.columns + WHERE table_schema = @schema AND table_name = @table"; + using (var cmd = new NpgsqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("schema", _schemaName); + cmd.Parameters.AddWithValue("table", tableName); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + columns.Add(reader.GetString(0)); + } + } + } + return columns; + } + + private void AddColumnForProperty(NpgsqlConnection connection, string tableName, PropertyInfo prop) + { + var sqlType = GetPostgresTypeForProperty(prop); + var sql = $@"ALTER TABLE ""{_schemaName}"".""{tableName}"" ADD COLUMN ""{prop.Name}"" {sqlType} NULL"; + using (var cmd = new NpgsqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + } + + private void AddSpecialColumn(NpgsqlConnection connection, string tableName, string columnName, string sqlTypeDeclaration, bool isPk) + { + if (!_createOrUpdateTableSchema) + { + throw new InvalidOperationException($"Column [{columnName}] is missing in [{_schemaName}.{tableName}] and createOrUpdateTableSchema=false."); + } + var sql = $@"ALTER TABLE ""{_schemaName}"".""{tableName}"" ADD COLUMN ""{columnName}"" {sqlTypeDeclaration}"; + using (var cmd = new NpgsqlCommand(sql, connection)) + { + cmd.ExecuteNonQuery(); + } + + if (isPk) + { + var pkSql = $@"ALTER TABLE ""{_schemaName}"".""{tableName}"" ADD PRIMARY KEY(""{columnName}"")"; + using (var pkCmd = new NpgsqlCommand(pkSql, connection)) + { + pkCmd.ExecuteNonQuery(); + } + } + } + + private string BuildCreateMainTableSql() + { + var sb = new StringBuilder(); + sb.AppendLine($@"CREATE TABLE ""{_schemaName}"".""{_baseTableName}"" ("); + sb.AppendLine(@"""key"" TEXT NOT NULL PRIMARY KEY,"); + + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + var sqlType = GetPostgresTypeForProperty(prop); + sb.AppendLine($@"""{prop.Name}"" {sqlType} NULL,"); + } + + if (_typeAnalyzer.ScalarProperties.Length > 0) + sb.Length -= 3; // remove last comma + else + sb.Length -= 2; // if no scalar props + + sb.AppendLine(")"); + return sb.ToString(); + } + + private string BuildCreateChildTableSql(ListPropertyMetadata lp) + { + var sb = new StringBuilder(); + sb.AppendLine($@"CREATE TABLE ""{_schemaName}"".""{lp.TableName}"" ("); + sb.AppendLine(@"""key"" TEXT NOT NULL,"); + sb.AppendLine(@"""ItemIndex"" INTEGER NOT NULL,"); + + foreach (var cprop in lp.ChildScalarProperties) + { + var sqlType = GetPostgresTypeForProperty(cprop); + sb.AppendLine($@"""{cprop.Name}"" {sqlType} NULL,"); + } + + sb.AppendLine($@"CONSTRAINT ""PK_{lp.TableName}"" PRIMARY KEY (""key"", ""ItemIndex"")"); + sb.AppendLine(")"); + return sb.ToString(); + } + + private string BuildCreateChildTableSqlForListType() + { + var tableName = _baseTableName + "_Child"; + var sb = new StringBuilder(); + sb.AppendLine($@"CREATE TABLE ""{_schemaName}"".""{tableName}"" ("); + sb.AppendLine(@"""key"" TEXT NOT NULL,"); + sb.AppendLine(@"""ItemIndex"" INTEGER NOT NULL,"); + + foreach (var cprop in _typeAnalyzer.ChildScalarProperties) + { + var sqlType = GetPostgresTypeForProperty(cprop); + sb.AppendLine($@"""{cprop.Name}"" {sqlType} NULL,"); + } + + sb.AppendLine($@"CONSTRAINT ""PK_{tableName}"" PRIMARY KEY (""key"", ""ItemIndex"")"); + sb.AppendLine(")"); + return sb.ToString(); + } + + public string BuildUpsertMainSql() + { + // Postgres UPSERT: use INSERT ... ON CONFLICT + var columns = _typeAnalyzer.ScalarProperties.Select(p => p.Name).ToArray(); + var insertCols = string.Join(", ", columns.Select(c => $@"""{c}""")); + var insertVals = string.Join(", ", columns.Select(c => "@" + c)); + var updateSet = string.Join(", ", columns.Select(c => $@"""{c}"" = EXCLUDED.""{c}""")); + + // If no scalar columns, we just insert key if not exists + if (columns.Length == 0) + { + return $@" + INSERT INTO ""{_schemaName}"".""{_baseTableName}"" (""key"") + VALUES (@key) + ON CONFLICT (""key"") DO NOTHING;"; + } + + return $@" + INSERT INTO ""{_schemaName}"".""{_baseTableName}"" (""key"", {insertCols}) + VALUES (@key, {insertVals}) + ON CONFLICT (""key"") DO UPDATE SET {updateSet};"; + } + + public string BuildUpsertMainSqlForListType() + { + return $@" + INSERT INTO ""{_schemaName}"".""{_baseTableName}"" (""key"") + VALUES (@key) + ON CONFLICT (""key"") DO NOTHING;"; + } + + public string BuildInsertChildSql(ListPropertyMetadata lp) + { + var columns = new List { "\"key\"", "\"ItemIndex\"" }; + columns.AddRange(lp.ChildScalarProperties.Select(p => $@"""{p.Name}""")); + var values = new List { "@key", "@ItemIndex" }; + values.AddRange(lp.ChildScalarProperties.Select(p => "@" + p.Name)); + + return $@" + INSERT INTO ""{_schemaName}"".""{lp.TableName}"" + ({string.Join(", ", columns)}) + VALUES ({string.Join(", ", values)})"; + } + + public string BuildInsertChildSqlForListType() + { + var tableName = _baseTableName + "_Child"; + var columns = new List { "\"key\"", "\"ItemIndex\"" }; + columns.AddRange(_typeAnalyzer.ChildScalarProperties.Select(p => $@"""{p.Name}""")); + var values = new List { "@key", "@ItemIndex" }; + values.AddRange(_typeAnalyzer.ChildScalarProperties.Select(p => "@" + p.Name)); + + return $@" + INSERT INTO ""{_schemaName}"".""{tableName}"" + ({string.Join(", ", columns)}) + VALUES ({string.Join(", ", values)})"; + } + + private string GetPostgresTypeForProperty(PropertyInfo prop) + { + var type = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + if (type == typeof(int) || type == typeof(short) || type == typeof(byte)) + return "INTEGER"; + if (type == typeof(long)) + return "BIGINT"; + if (type == typeof(bool)) + return "BOOLEAN"; + if (type == typeof(DateTime)) + return "TIMESTAMP"; + if (type == typeof(decimal)) + return "NUMERIC(18,2)"; + if (type == typeof(double)) + return "DOUBLE PRECISION"; + if (type == typeof(float)) + return "REAL"; + if (type == typeof(Guid)) + return "UUID"; + if (type == typeof(TimeSpan)) + return "INTERVAL"; + if (type == typeof(string)) + return "TEXT"; + + // fallback + return "TEXT"; + } + } +} diff --git a/src/Cortex.States.PostgreSQL/PostgresStateStore.cs b/src/Cortex.States.PostgreSQL/PostgresStateStore.cs new file mode 100644 index 0000000..f5beacb --- /dev/null +++ b/src/Cortex.States.PostgreSQL/PostgresStateStore.cs @@ -0,0 +1,474 @@ +using Npgsql; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.PostgreSQL +{ + public class PostgresStateStore : IStateStore, IDisposable where TValue : new() + { + private readonly string _connectionString; + private readonly string _schemaName; + private readonly string _baseTableName; + private readonly bool _createOrUpdateTableSchema; + + public string Name { get; } + + private static readonly SemaphoreSlim _initializationLock = new SemaphoreSlim(1, 1); + private volatile bool _isInitialized; + + private readonly PostgresTypeAnalyzer _typeAnalyzer; + private readonly PostgresPropertyConverter _propertyConverter; + private readonly PostgresSchemaManager _schemaManager; + + public PostgresStateStore( + string name, + string connectionString, + string tableName, + string schemaName = "public", + bool createOrUpdateTableSchema = true) + { + if (string.IsNullOrWhiteSpace(name)) + throw new ArgumentNullException(nameof(name)); + if (string.IsNullOrWhiteSpace(connectionString)) + throw new ArgumentNullException(nameof(connectionString)); + if (string.IsNullOrWhiteSpace(tableName)) + throw new ArgumentNullException(nameof(tableName)); + + Name = name; + _connectionString = connectionString; + _schemaName = schemaName; + _baseTableName = tableName; + _createOrUpdateTableSchema = createOrUpdateTableSchema; + + _typeAnalyzer = new PostgresTypeAnalyzer(typeof(TValue), _baseTableName); + _propertyConverter = new PostgresPropertyConverter(); + _schemaManager = new PostgresSchemaManager(_connectionString, _schemaName, _baseTableName, _typeAnalyzer, _createOrUpdateTableSchema); + + Initialize(); + } + + private void Initialize() + { + if (_isInitialized) return; + _initializationLock.Wait(); + try + { + if (_isInitialized) return; + _schemaManager.EnsureSchemaAndTables(); + _isInitialized = true; + } + finally + { + _initializationLock.Release(); + } + } + + private void EnsureInitialized() + { + if (!_isInitialized) + throw new InvalidOperationException("PostgresStateStore is not properly initialized."); + } + + public TValue Get(TKey key) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + using (var connection = new NpgsqlConnection(_connectionString)) + { + connection.Open(); + + if (_typeAnalyzer.IsListType) + { + var mainSql = $@"SELECT ""key"" FROM ""{_schemaName}"".""{_baseTableName}"" WHERE ""key"" = @key"; + bool exists; + using (var cmd = new NpgsqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + var res = cmd.ExecuteScalar(); + exists = (res != null); + } + + if (!exists) + return default; + + var listValue = Activator.CreateInstance(); + var listAsIList = (System.Collections.IList)listValue; + var childSql = $@"SELECT * FROM ""{_schemaName}"".""{_baseTableName}_Child"" WHERE ""key"" = @key ORDER BY ""ItemIndex"""; + + using (var cmd = new NpgsqlCommand(childSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + var childInstance = Activator.CreateInstance(_typeAnalyzer.ChildItemType); + foreach (var cprop in _typeAnalyzer.ChildScalarProperties) + { + if (!ColumnExists(reader, cprop.Name)) continue; + var val = ReadValueFromReader(reader, cprop); + cprop.SetValue(childInstance, val); + } + listAsIList.Add(childInstance); + } + } + } + + return listValue; + } + else + { + var mainSql = $@"SELECT * FROM ""{_schemaName}"".""{_baseTableName}"" WHERE ""key"" = @key"; + TValue instance = new TValue(); + using (var cmd = new NpgsqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + using (var reader = cmd.ExecuteReader()) + { + if (!reader.Read()) + return default; + + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + if (!ColumnExists(reader, prop.Name)) continue; + var val = ReadValueFromReader(reader, prop); + prop.SetValue(instance, val); + } + } + } + + // Load list properties + foreach (var lp in _typeAnalyzer.ListProperties) + { + var listInstance = (System.Collections.IList)Activator.CreateInstance(lp.Property.PropertyType); + var childSql = $@"SELECT * FROM ""{_schemaName}"".""{lp.TableName}"" WHERE ""key"" = @key ORDER BY ""ItemIndex"""; + using (var cmd = new NpgsqlCommand(childSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + using (var reader = cmd.ExecuteReader()) + { + while (reader.Read()) + { + var childInstance = Activator.CreateInstance(lp.ChildItemType); + foreach (var cprop in lp.ChildScalarProperties) + { + if (!ColumnExists(reader, cprop.Name)) continue; + var val = ReadValueFromReader(reader, cprop); + cprop.SetValue(childInstance, val); + } + listInstance.Add(childInstance); + } + } + } + + lp.Property.SetValue(instance, listInstance); + } + + return instance; + } + } + } + + public void Put(TKey key, TValue value) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + + using (var connection = new NpgsqlConnection(_connectionString)) + { + connection.Open(); + + if (_typeAnalyzer.IsListType) + { + var upsertMainSql = _schemaManager.BuildUpsertMainSqlForListType(); + using (var cmd = new NpgsqlCommand(upsertMainSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.ExecuteNonQuery(); + } + + var deleteChildSql = $@"DELETE FROM ""{_schemaName}"".""{_baseTableName}_Child"" WHERE ""key"" = @key"; + using (var deleteCmd = new NpgsqlCommand(deleteChildSql, connection)) + { + deleteCmd.Parameters.AddWithValue("key", serializedKey); + deleteCmd.ExecuteNonQuery(); + } + + var listAsIList = (System.Collections.IList)value; + var insertChildSql = _schemaManager.BuildInsertChildSqlForListType(); + using (var insertCmd = new NpgsqlCommand(insertChildSql, connection)) + { + int index = 0; + foreach (var item in listAsIList) + { + insertCmd.Parameters.Clear(); + insertCmd.Parameters.AddWithValue("key", serializedKey); + insertCmd.Parameters.AddWithValue("ItemIndex", index); + foreach (var cprop in _typeAnalyzer.ChildScalarProperties) + { + var val = cprop.GetValue(item); + SetParameterForProperty(insertCmd, cprop, val); + } + insertCmd.ExecuteNonQuery(); + index++; + } + } + } + else + { + var upsertMainSql = _schemaManager.BuildUpsertMainSql(); + using (var cmd = new NpgsqlCommand(upsertMainSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + var propVal = prop.GetValue(value); + SetParameterForProperty(cmd, prop, propVal); + } + cmd.ExecuteNonQuery(); + } + + // For each list property + foreach (var lp in _typeAnalyzer.ListProperties) + { + var deleteSql = $@"DELETE FROM ""{_schemaName}"".""{lp.TableName}"" WHERE ""key"" = @key"; + using (var deleteCmd = new NpgsqlCommand(deleteSql, connection)) + { + deleteCmd.Parameters.AddWithValue("key", serializedKey); + deleteCmd.ExecuteNonQuery(); + } + + var listValue = lp.Property.GetValue(value) as System.Collections.IEnumerable; + if (listValue != null) + { + int index = 0; + var insertSql = _schemaManager.BuildInsertChildSql(lp); + using (var insertCmd = new NpgsqlCommand(insertSql, connection)) + { + foreach (var item in listValue) + { + insertCmd.Parameters.Clear(); + insertCmd.Parameters.AddWithValue("key", serializedKey); + insertCmd.Parameters.AddWithValue("ItemIndex", index); + foreach (var cprop in lp.ChildScalarProperties) + { + var val = cprop.GetValue(item); + SetParameterForProperty(insertCmd, cprop, val); + } + insertCmd.ExecuteNonQuery(); + index++; + } + } + } + } + } + } + } + + public bool ContainsKey(TKey key) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + var sql = $@"SELECT COUNT(*) FROM ""{_schemaName}"".""{_baseTableName}"" WHERE ""key"" = @key"; + using (var connection = new NpgsqlConnection(_connectionString)) + using (var cmd = new NpgsqlCommand(sql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + connection.Open(); + var count = (long)cmd.ExecuteScalar(); + return count > 0; + } + } + + public void Remove(TKey key) + { + EnsureInitialized(); + var serializedKey = key.ToString(); + using (var connection = new NpgsqlConnection(_connectionString)) + { + connection.Open(); + + if (_typeAnalyzer.IsListType) + { + var deleteChildSql = $@"DELETE FROM ""{_schemaName}"".""{_baseTableName}_Child"" WHERE ""key"" = @key"; + using (var cmd = new NpgsqlCommand(deleteChildSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.ExecuteNonQuery(); + } + + var mainSql = $@"DELETE FROM ""{_schemaName}"".""{_baseTableName}"" WHERE ""key"" = @key"; + using (var cmd = new NpgsqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.ExecuteNonQuery(); + } + } + else + { + // Remove from child tables + foreach (var lp in _typeAnalyzer.ListProperties) + { + var deleteChildSql = $@"DELETE FROM ""{_schemaName}"".""{lp.TableName}"" WHERE ""key"" = @key"; + using (var cmd = new NpgsqlCommand(deleteChildSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.ExecuteNonQuery(); + } + } + + var mainSql = $@"DELETE FROM ""{_schemaName}"".""{_baseTableName}"" WHERE ""key"" = @key"; + using (var cmd = new NpgsqlCommand(mainSql, connection)) + { + cmd.Parameters.AddWithValue("key", serializedKey); + cmd.ExecuteNonQuery(); + } + } + } + } + + public IEnumerable> GetAll() + { + EnsureInitialized(); + var keys = GetKeys().ToList(); + foreach (var k in keys) + { + yield return new KeyValuePair(k, Get(k)); + } + } + + public IEnumerable GetKeys() + { + EnsureInitialized(); + var sql = $@"SELECT ""key"" FROM ""{_schemaName}"".""{_baseTableName}"""; + using (var connection = new NpgsqlConnection(_connectionString)) + using (var cmd = new NpgsqlCommand(sql, connection)) + { + connection.Open(); + using (var reader = cmd.ExecuteReader()) + { + var keyType = typeof(TKey); + while (reader.Read()) + { + var keyStr = reader.GetString(0); + yield return (TKey)Convert.ChangeType(keyStr, keyType); + } + } + } + } + + public void Dispose() + { + _initializationLock.Dispose(); + } + + #region Helper Methods + + private bool ColumnExists(NpgsqlDataReader reader, string columnName) + { + for (int i = 0; i < reader.FieldCount; i++) + { + if (reader.GetName(i).Equals(columnName, StringComparison.OrdinalIgnoreCase)) + return true; + } + return false; + } + + private object ReadValueFromReader(NpgsqlDataReader reader, PropertyInfo prop) + { + var index = reader.GetOrdinal(prop.Name); + if (reader.IsDBNull(index)) return null; + + var type = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + if (type == typeof(int) || type == typeof(short) || type == typeof(byte)) + return reader.GetInt32(index); + if (type == typeof(long)) + return reader.GetInt64(index); + if (type == typeof(bool)) + return reader.GetBoolean(index); + if (type == typeof(DateTime)) + return reader.GetDateTime(index); + if (type == typeof(decimal)) + return reader.GetDecimal(index); + if (type == typeof(double)) + return reader.GetDouble(index); + if (type == typeof(float)) + return (float)reader.GetDouble(index); + if (type == typeof(Guid)) + return reader.GetGuid(index); + if (type == typeof(TimeSpan)) + return reader.GetTimeSpan(index); // Npgsql supports TimeSpan for INTERVAL + if (type == typeof(string)) + return reader.GetString(index); + + // fallback to string convert + var strVal = reader.GetString(index); + return _propertyConverter.ConvertFromString(prop.PropertyType, strVal); + } + + private void SetParameterForProperty(NpgsqlCommand cmd, PropertyInfo prop, object propValue) + { + var paramName = "@" + prop.Name; + if (!cmd.Parameters.Contains(paramName)) + { + cmd.Parameters.Add(new NpgsqlParameter(paramName, ConvertToNpgsqlDbType(prop)) { Value = DBNull.Value }); + } + + var param = cmd.Parameters[paramName]; + + if (propValue == null) + { + param.Value = DBNull.Value; + return; + } + + var type = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + // Npgsql can handle direct assignment if we pick the right NpgsqlDbType + if (type == typeof(TimeSpan)) + { + param.Value = (TimeSpan)propValue; // stored as INTERVAL + } + else + { + param.Value = propValue; + } + } + + private NpgsqlTypes.NpgsqlDbType ConvertToNpgsqlDbType(PropertyInfo prop) + { + var type = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + if (type == typeof(int) || type == typeof(short) || type == typeof(byte)) + return NpgsqlTypes.NpgsqlDbType.Integer; + if (type == typeof(long)) + return NpgsqlTypes.NpgsqlDbType.Bigint; + if (type == typeof(bool)) + return NpgsqlTypes.NpgsqlDbType.Boolean; + if (type == typeof(DateTime)) + return NpgsqlTypes.NpgsqlDbType.Timestamp; + if (type == typeof(decimal)) + return NpgsqlTypes.NpgsqlDbType.Numeric; + if (type == typeof(double)) + return NpgsqlTypes.NpgsqlDbType.Double; + if (type == typeof(float)) + return NpgsqlTypes.NpgsqlDbType.Real; + if (type == typeof(Guid)) + return NpgsqlTypes.NpgsqlDbType.Uuid; + if (type == typeof(TimeSpan)) + return NpgsqlTypes.NpgsqlDbType.Interval; + if (type == typeof(string)) + return NpgsqlTypes.NpgsqlDbType.Text; + + // fallback to Text + return NpgsqlTypes.NpgsqlDbType.Text; + } + + #endregion + } +} diff --git a/src/Cortex.States.PostgreSQL/PostgresTypeAnalyzer.cs b/src/Cortex.States.PostgreSQL/PostgresTypeAnalyzer.cs new file mode 100644 index 0000000..592477e --- /dev/null +++ b/src/Cortex.States.PostgreSQL/PostgresTypeAnalyzer.cs @@ -0,0 +1,107 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.PostgreSQL +{ + internal class PostgresTypeAnalyzer + { + public PropertyInfo[] ScalarProperties { get; private set; } + public List ListProperties { get; private set; } + + // For List scenario + public bool IsListType { get; private set; } + public Type ChildItemType { get; private set; } + public PropertyInfo[] ChildScalarProperties { get; private set; } + + private readonly Type _valueType; + private readonly string _baseTableName; + + public PostgresTypeAnalyzer(Type valueType, string baseTableName) + { + _valueType = valueType; + _baseTableName = baseTableName; + AnalyzeType(); + } + + private void AnalyzeType() + { + if (IsGenericList(_valueType, out Type itemType)) + { + // Value is a List + IsListType = true; + ChildItemType = itemType; + ChildScalarProperties = GetScalarProperties(itemType); + ScalarProperties = new PropertyInfo[0]; + ListProperties = new List(); + } + else + { + // Normal object scenario + IsListType = false; + var props = _valueType.GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.CanRead && p.CanWrite && p.GetIndexParameters().Length == 0) + .ToArray(); + + var scalarProps = new List(); + var listProps = new List(); + + foreach (var prop in props) + { + if (IsListProperty(prop, out Type childItemType)) + { + var childScalars = GetScalarProperties(childItemType); + listProps.Add(new ListPropertyMetadata + { + Property = prop, + ChildScalarProperties = childScalars, + ChildItemType = childItemType, + TableName = $"{_baseTableName}_{prop.Name}" + }); + } + else + { + scalarProps.Add(prop); + } + } + + ScalarProperties = scalarProps.ToArray(); + ListProperties = listProps; + } + } + + private bool IsListProperty(PropertyInfo prop, out Type itemType) + { + itemType = null; + if (!prop.PropertyType.IsGenericType) return false; + var genType = prop.PropertyType.GetGenericTypeDefinition(); + if (genType == typeof(List<>)) + { + itemType = prop.PropertyType.GetGenericArguments()[0]; + return true; + } + return false; + } + + private bool IsGenericList(Type type, out Type itemType) + { + itemType = null; + if (type.IsGenericType && type.GetGenericTypeDefinition() == typeof(List<>)) + { + itemType = type.GetGenericArguments()[0]; + return true; + } + return false; + } + + private PropertyInfo[] GetScalarProperties(Type t) + { + return t.GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.CanRead && p.CanWrite && p.GetIndexParameters().Length == 0 && !IsListProperty(p, out _)) + .ToArray(); + } + } +} From f348a2523644c48bbb76e7f715ac2cd55a7d875d Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Tue, 24 Dec 2024 19:07:45 +0100 Subject: [PATCH 7/8] [v1/feature/75]: Add Support for ClickHouse State Add ClickHouseStateStore, Add SchemaVerification and Creation --- Cortex.sln | 6 + .../Assets/cortex.png | Bin 0 -> 7179 bytes .../Assets/license.md | 20 + .../ClickHouseConfiguration.cs | 25 ++ .../ClickHousePropertyConverter.cs | 75 ++++ .../ClickHouseSchemaManager.cs | 221 ++++++++++ .../ClickHouseStateStore.cs | 378 ++++++++++++++++++ .../ClickHouseTypeAnalyzer.cs | 51 +++ .../Cortex.States.ClickHouse.csproj | 56 +++ .../Cortex.States.MongoDb.csproj | 2 +- .../ListPropertyMetadata.cs | 3 +- .../PostgresKeyValueStateStore.cs | 4 + .../PostgresStateStore.cs | 3 +- 13 files changed, 840 insertions(+), 4 deletions(-) create mode 100644 src/Cortex.States.ClickHouse/Assets/cortex.png create mode 100644 src/Cortex.States.ClickHouse/Assets/license.md create mode 100644 src/Cortex.States.ClickHouse/ClickHouseConfiguration.cs create mode 100644 src/Cortex.States.ClickHouse/ClickHousePropertyConverter.cs create mode 100644 src/Cortex.States.ClickHouse/ClickHouseSchemaManager.cs create mode 100644 src/Cortex.States.ClickHouse/ClickHouseStateStore.cs create mode 100644 src/Cortex.States.ClickHouse/ClickHouseTypeAnalyzer.cs create mode 100644 src/Cortex.States.ClickHouse/Cortex.States.ClickHouse.csproj diff --git a/Cortex.sln b/Cortex.sln index 4f6cbfb..a9ad30f 100644 --- a/Cortex.sln +++ b/Cortex.sln @@ -41,6 +41,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.MSSqlServer", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.PostgreSQL", "src\Cortex.States.PostgreSQL\Cortex.States.PostgreSQL.csproj", "{980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.ClickHouse", "src\Cortex.States.ClickHouse\Cortex.States.ClickHouse.csproj", "{0F9FCB99-D00F-4396-8E2B-6E627076ADA0}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -122,6 +124,10 @@ Global {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Debug|Any CPU.Build.0 = Debug|Any CPU {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Release|Any CPU.ActiveCfg = Release|Any CPU {980EDBFE-40C2-4EFD-96C2-FED1032FB5E6}.Release|Any CPU.Build.0 = Release|Any CPU + {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/src/Cortex.States.ClickHouse/Assets/cortex.png b/src/Cortex.States.ClickHouse/Assets/cortex.png new file mode 100644 index 0000000000000000000000000000000000000000..a4f9727d04f91f61f20720bd64b9c139fb041236 GIT binary patch literal 7179 zcmV+m9Q5OfP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGf5&!@T5&_cPe*6Fc8=y%C^C!)jU*}Z_TUZ5zkPS!zgzWpiKoUY0LP$tR!Xokt%Ag{H3XX^@B8wnv03972 z_hwv1kf6A*-1mH+Tg$Cm_f}O`27H#NQDPYKIv zcTCHy`fO@u)s1PH)sbnLH4!`}kZK|c%WX|xUTdf>GJS=u8BkWepa#-RXr-xDk(t@H zW@WFA%*qkfWGlBRGCQ}KE6>y#u7Wj@IeBX$a|&84%`IF@nqxf9Eo!w@+!~oz+$N|! zGOwgPGT%~1WPT~v)1{qUWzc%A^7WAg4s}HqRBUi)VHMOZs3)?px+juI+89Z!G1VJc zB&xQLlmylF3mS+d)%Qn|>-%q{Gz@%}+OT;?VqO2Az$`8yqwdcMnN^=m&#Gn9GOO9N zWmSgoa}7%%RkMWUH7sFyEou6S8a90e9_zTW>Pa(9t!!{;=E_xE+0bgztQ@G(RxZ@U zX6H7t+4KlCv)TDgY<6BVo1M3Y&B-^_!sZmT(Bo4DE$penwd|>awfwQDmCY?QRMf`i z7PlFX?QCvIJDW%9VDpMQ_+#lhHowFWelF`|^GiF~)8*^A%D5JkcX3sij~m#6$_@0m zu(F#itm@{El|3x6Y9mXm>S2l1c-+Vmt9w~uO)pzi-OCo$^sz;?rZ%x8Q+55MWT<|? zR>L4ErD2dQZhVHNG;H}~QR9|B$4$wo*gJiBEt{HI$)=I2xEuhhIXqJqL=&J0vCala z5N(h%O%XsY0Gvq>4G}=VGamuY)|~uiL2C?vYdAnt09d$|;L#&sk_?c=ZERj~yDb1L z>0t9s0idC$%Q^*_5G9Z#17M~3$VC820LaAwn(8AZK>|nur~sJ;c=dyOfYHG-0D8c)!3BgD z0ANym|IN6m>E(B(W>gS3OJ2?cFb6y?Pk02*2B>qM3!dyehx-I@f^^Pv!7~6xXt|F=bO5v<25_GNQSUxC#H8AO4sJ?%xdNQ*J~@1Hcqd#$aK3;6*i@;(*!qp zN$K#ErDYCqdVq%egycmAs0*E1fQrN=5J1U&?wU^l&=op;GEa^BbZsAjqqfh>eP;6F z)Ao6|Z&6L30M3*Oo+ETR-KT=*2%XxtPvO3(3wF_Ye(o~>8t!w1PapS14NvJlvF#Jx zr)&H4@D^1Y;3h9EV~`Hsl#Ft1yQhN}IO}{ANsFp%P!{Yu=K-8E>wGlM^MYqOFId^& z2%iLQaft=a3!W})l0KJV3MhJsTZ0AEbxpuK9u{Elz-8;ROww zR?TOCTT;p_`2Zw{iqPrGJT3QmCoihd8BOM?bf27cD&6OV=tx|mt@-5SMRuQ8=yZio zXXvycikWBcaNHyroC=-<(Ct1seEPUg8#$OY0`0p9-Gr zJ{g`@!7jLOL4{YlXVrYeag)-D-QWax7C_l`CNRIYFQbZ{KY1z6$Kr!#EgZ*Wg$`|~Vo!z(PSdoO8a_bY<-k(PJvp>_%Gv>m7eXoO8y z=v38xA~-=V_sQW?)$U2~rleO;=quWCh;^O(o^87Do2U9N{K`5{e9LlrUu6j^#7=7= z^K^|iM3;Gn?{VS6|5NG*Skr-zS?{?!Y}3WxJ=J&q7q<4T&slowP6s>-T#)3&a-QwJ zg;hNOSL6bxgy$1FEqErlrDbgSx;<>mwFhkL2aIjG<}W;N@XCF*V*N{$o67aDm_{0CMM*lVjE#{Dj{8=BvSl58QYDE=yg#mCogYr)>Me!L!;uW!7o8S@;_g zl7_uQY-^OC2f{JS*1bsWqcU+(hEBQd6W{?{d}`5fJiw)yk^ly7`)qjlJuY(RJKqrq z`GfcguyO3dif(EzUGOZ31~>troplQEFvkURT|{^Q=UekNvpM)VZw)KievEA%dfrbnvSA!cLKpGf(cmS-Gnz;po5gd(_bb!i_sP^nlvAr;IF94NvVp2S5{E zIe{x2j!!EhSb%2%93eco6oY=f@81m-UN#$TMCJ2m2ny;!--6vrS|s-=;JMtVsQK{x z#ZBAU#4ZIzS;aOu?=L=xFyyPUm3?Au_nm}G zSbMn7S+E-pEG2LS!||zwHaG#E*!EfQ6af9;A!OnMw7>mzcz7Q$)_Ul3`k>r-s{YQf z&a>g+c?iv$uRaJBBI%f+2drT5@6;XkD29qGXyza-z)iHk1%yWcqj%nyCZ5l%AP$=9nz{{A|*f(qAK$Qw5&SJ zo?;tL{umCL@!pybd`vwo2)C-yhLO6Dw|gDjaV9t?JOP~4_C;OuneH?GpWBB}^e|le zag}$AW%vAz`a|7QB%*g8o`;LG_RW8Vt~&u6od!uYy_Dq0U1st~gFVI<0$vHgCFeW9 zY2hhCr&r?Qf@c7ngiJHNoWjXST(|Mu9k#gnSvqH6=P3$yBu=={wJ%+went7Uf(X!h z-~WXzZrVoeq^hse9xCz}qzwws23No)rt09S0S1Am1V|yXgw4pRV@44sR;1jC7Xs)LiHjQyTH$)pCo|DzJmknGB*FnHD z$os71*<)0+QxUHkDiUUvsUTYIo+5d105l-ZFV=!{!t(V?P4YG}AheuQ< z)a-emPmxfTfpUQ9H^!gPggt-s-G2;kgHCgTT|RX;9RB!N9ozB9ZX| zC?+lh(FY)Yh70OmycjxJ!EqZ-{m7DPHj(pOlXI8VSTHMgo+MZULl4-%l?OEJ5)_`9 z(Pp&nqIS~RQSCoP!Z}bY&#D&NJyVLBFFJ5unWq3x0WgUB;?oN0L#}=Ev(RZd;I$w5 znl1!QRFdFfPE1bke%62S4+_m(COm)VAtDT1xySPR-{571QM}KRM;lZ_MXEts6QFA; zw9b8|16!j67Zjc@boz9c;XFw15r)DzZxBWORmK|j4NgF5PGS zm+!Fw$N|sWdE)pWr0VN*zzZ}*!U@j?XipDx^|lm-0Y}oo696jUd1alV^A}LqXi93!NSS@!Xt^``O^oeP4KkLkM3S8lMvPE8BGbE_G&O zSOp#2xInA-f57@K{6;+-NMz8F40r&k255&)Kx;ev74^(xYZ0-#OziKJ0jff0xXe@P zaVZ!nI7}*&F!yQT0hY^wYabyvt@HHoT#1VeFSU>ga2wjm_3!?`+K+t0I!}Dhx=#K`zsKJ#c;`7hI8_PnDx<%n&g8Sr z!jJQb?hEfc?!GyX3>@hZz>@&tSiDhOd}r?l!k8HzeONU`<1)T0q+4D7<$MGH@|7DY0!)`sNf0U{Nc%2ryo3k zYk8F5MCl*~1Sr76`LGIe<9oNQf*FoLRtTr`T28yoHk`i6)*H~)9sibok2Z4f@_qjM zf^u?^wsN8?5{EPmb(x)1}`npzBZk zK+o?!^Aiu9M%Xmr;kebWen8VL(9`1XsMZV>1%xNpe5UigK7Hn?bf27c3htXV*5HC9E-H8v zQVLjNO&@DL^o0>3BNXanKw4qK>g9W^^Tc=LJcQTM9q-Uo1{xL`!*%Sgf?ZrmYrvb8 z+cXy7py=U=p_6`}TEx;@o?~r?zoa&5vz(0h=^8Jt0lwzN^VD`=`IvGYy9CcYNNZ1# za5+x{FL2wZSWPsB;6&*lx&d+-0FD2b?l|twAOi|v6&dNmiVZZ|T3k`<4bVGuqCYdJ z`@8_k?z7syoW@uNr-o;`58)D7R_oy}Jo_{KgdP}_8OU5QZBX8Xz_ovi1TR?ld~^kS z41qHh0GVslES5%vl{{;4J9? z7P1M+d2D6RJ~ntY+3(wPe{{`Su0WFVqlEuC$BTYv9L^HWA zV=cfwTA*TX1!4iyWSsz~+Unn3^U3i1O9l&`0dU5D1Govv`7A!Igw^i37<#IBw8HCP z?j*u8k`;R!kP)~~129qxa52`At{d)Zax&K5Asn8Q`3=YVpf$Fp2mId zn$OF9qVs~)e6bBKxq#X=EbZ;T9BwrYg!V9OB7$1h9~cut4myz&XdXp z^=9L-3+~B9IkX3H&zYN{lM#fxhJ9CQur&;L90Tapf5mE`YBpKtJeQ=5Q-bFRn-)NQ z$w1%)fYVpj#U{8XlXBUtye8IuG!i-)!EsHmen=MzdtuU+P9foKzIvbK558e7ht_UB zr)c*`8h~Ll&*@n)2o7x;QgQ6#720W7t$Hj%+YBPyqKm9x;Em8B6YwzKWogTE)WI!p zf2D`#?>?6U1GsN`R())NL$3&W{mY*_75XCcjPPfcTt7(R7TshxHzwFt?LHU!W-wRj zV4J9ksYP0NYJfI8ch;$cXE@JFUL4MgA#lhblk-?My0eDvg$ipBdi$}Z?h}i09o=O( z|BS2#w&smbL&L*Ofl+hJCPsg$1y2TOZg;E*oBUf+)HRyMd|DWY2|=M9{Q(l5YZS8LvDQJ_zf6|OeXj5+*L)g) zCd7mlF#-xlnb`aj=d?B?5Z$K>of`KAtogTb+Y7WWR~Tq?%(aKKqYuKZ ze15HjaXz$n*kl{i`@+JbLjT}DDNG|2b8rFSxfEcKmbUw-9vg5^BxchW*!8~8SJTKB z!%sM7>-C4MY{v--d3G`qPy)Os5_4#lSI?QBLc_yx)w|Ep#mD;*UFQqWbBctVbsBBo zSb)P_j&;qLfgHAdL3@bRGzc3wpZprZh4v0*LLSZ0#H69H?+M2h48BP-(5#TDa-XW* zbHTISr)>K&s~#^nTx3`si{QiV--Qc=wE~#Y>&zfS%1BnQlq;8Zh7C6Xi4VZuW0~!{ z>AVhjDhEpLQ#(-nT2sM&j}sj3YV`9}>^e0fXjmVBFOqmN(H|Nw#QB(>{5M$u7oS>8 zAunvmL^*?nBiObd=hVT|g-#2e!g-FwCF3!IgWGcZUgLLjm^<3oUJzfrFoD7&ptxVh z5ruBAj_xl+kEh!p>HlED{XIoMcrbKOj8F>0Z`>UN7C}Rz#)TNx@HGm z_%O)`2YsK~5c2}duOSOJ3cmJiaOe-#eDG81;|tOr;yD=g#Kkc z-20a~LZ`0g)3$vTqYn;uI(7@fnzXP*HZDZ%ix=qw2MK$~TCs6=&zakFEL6GcG_4N^ zbo`=R+A1F#0*vH6m+w4j?BnU&ghvT48uz7-GB`lPLWS11z6c$9Z~?HKDj~Bf&_$=( zET@rr@y%^EXe9gELzdI`nsJU`j>R`n4PJQ=DzxJmEJnd=B4nB?YhCbE;Zp@qRC@X7 zf#cm-6|DZ{%OeUoSgVcpg12k#*Kxe2(Hnv-4P~d8S>KX1&jw0t%=2Musm)mvHl=WbI^evqCKK8=qryV7NafQRsjtQ3W}d=6Ro47iAO zFNgnDF1Wt!@Yif+b|VeIn(&kWgSv0jz$NF?8^bPkVLmXqex(#`AKi z`wK~oR|13!+4|O3Y<6z5f2L`M`|MS0?V-=ZM~>@^zAKe?Zaa5SQUdgWH)XWJVdJwf zj>8AVG;r=*A85F^iP^@Qf?#88+x~%X47p5=KGq}1=aYr37~ilFcDxYBEUN9JbBX)6 zXu+w%rwDFo*(iZSm`TZ}FRG39F2(cFV~;m))?!|QBe11=*U29OK{JKtqW$9M3HCbp zE;;P663xws&x7qe(9=UniemR9EkN0MCcsezXAF=wzV;!#dB{x7l`@{weDD*RfEpx& zgd4GL41=)Ge55$1hY&IW0N)s54l(%&pNW5a<8SPlf4I*n3(whf;TM+C{=BfNR@&-c z0Z|9ghFCg!;2>Ol%u>ScbCu7Zrf>Jq2|KT@=z5vWSWy=U8l68mk2WQ(*>m1?zChZ2 zj=uX8#d(mB%p*V{jck-`dz)45R-Ds1_gPl5?Fh{Z11xkL$U8}dcHfdw1_$9T#RcXI z?)Ado@w^}zq$v9GUGeUXR>!P_YGw zSNL?mTU;^@ZuF#srv_-c&kLRfa2(udO9wAN=+wX)2lrT|f+qlUgiSki>flWp2lqIo zb6*sp(}b8-JPz)$OR)`z4xZM1odhs- z99)b@2TuSf!W#z{Lki+N6JToLIJnqS0C?lzVpv-D84y!AxI3-`#}y+|!6R^Y0Pd#y zfN{l+6z)qYycst!Iqv|5(%lD)D<&j`P4t{hOer`JH!(H)FB4OWzCm&4I%r%mq(a)B z1FR;d7W^d+|DH_Bc_Kcw;Pv?A{9DxKAz>L;Ea?u|fYF4My#E2S{{wP}i{H;-Y-|7k N002ovPDHLkV1i-n!nFVZ literal 0 HcmV?d00001 diff --git a/src/Cortex.States.ClickHouse/Assets/license.md b/src/Cortex.States.ClickHouse/Assets/license.md new file mode 100644 index 0000000..530f621 --- /dev/null +++ b/src/Cortex.States.ClickHouse/Assets/license.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2024 Buildersoft + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/Cortex.States.ClickHouse/ClickHouseConfiguration.cs b/src/Cortex.States.ClickHouse/ClickHouseConfiguration.cs new file mode 100644 index 0000000..0d376cd --- /dev/null +++ b/src/Cortex.States.ClickHouse/ClickHouseConfiguration.cs @@ -0,0 +1,25 @@ +namespace Cortex.States.ClickHouse +{ + public enum ClickHouseTableEngine + { + MergeTree, + ReplacingMergeTree, + } + + public class ClickHouseConfiguration + { + public ClickHouseTableEngine TableEngine { get; set; } = ClickHouseTableEngine.MergeTree; + + /// + /// Additional raw text appended to the engine definition, e.g. PARTITION BY, ORDER BY clauses if needed. + /// For example: "ORDER BY key" or "ORDER BY (key) PRIMARY KEY (key) SETTINGS index_granularity = 8192" + /// + public string EngineModifiers { get; set; } = "ORDER BY key"; + + /// + /// If true, the table creation code will attempt to create or alter columns to match schema. + /// If false, throws if the table or columns do not exist. + /// + public bool CreateOrUpdateTableSchema { get; set; } = true; + } +} diff --git a/src/Cortex.States.ClickHouse/ClickHousePropertyConverter.cs b/src/Cortex.States.ClickHouse/ClickHousePropertyConverter.cs new file mode 100644 index 0000000..3b64a22 --- /dev/null +++ b/src/Cortex.States.ClickHouse/ClickHousePropertyConverter.cs @@ -0,0 +1,75 @@ +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.ClickHouse +{ + internal class ClickHousePropertyConverter + { + public string ConvertToString(object value) + { + if (value == null) return null; + return value.ToString(); + } + + public object ConvertFromString(Type type, string str) + { + if (str == null) return null; + + if (type == typeof(string)) return str; + + // Basic numeric and other conversions + if (type == typeof(int) || type == typeof(int?)) + { + if (int.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var i)) return i; + return type == typeof(int) ? 0 : (int?)null; + } + if (type == typeof(long) || type == typeof(long?)) + { + if (long.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var l)) return l; + return type == typeof(long) ? 0L : (long?)null; + } + if (type == typeof(double) || type == typeof(double?)) + { + if (double.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var d)) return d; + return type == typeof(double) ? 0.0 : (double?)null; + } + if (type == typeof(float) || type == typeof(float?)) + { + if (float.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var f)) return f; + return type == typeof(float) ? 0f : (float?)null; + } + if (type == typeof(bool) || type == typeof(bool?)) + { + if (bool.TryParse(str, out var b)) return b; + return type == typeof(bool) ? false : (bool?)null; + } + if (type == typeof(DateTime) || type == typeof(DateTime?)) + { + if (DateTime.TryParse(str, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var dt)) return dt; + return type == typeof(DateTime) ? DateTime.MinValue : (DateTime?)null; + } + if (type == typeof(Guid) || type == typeof(Guid?)) + { + if (Guid.TryParse(str, out var g)) return g; + return type == typeof(Guid) ? Guid.Empty : (Guid?)null; + } + if (type == typeof(decimal) || type == typeof(decimal?)) + { + if (decimal.TryParse(str, NumberStyles.Any, CultureInfo.InvariantCulture, out var dec)) return dec; + return type == typeof(decimal) ? 0m : (decimal?)null; + } + if (type == typeof(TimeSpan) || type == typeof(TimeSpan?)) + { + if (TimeSpan.TryParse(str, CultureInfo.InvariantCulture, out var ts)) return ts; + return type == typeof(TimeSpan) ? TimeSpan.Zero : (TimeSpan?)null; + } + + // If unknown type, just return the string + return str; + } + } +} diff --git a/src/Cortex.States.ClickHouse/ClickHouseSchemaManager.cs b/src/Cortex.States.ClickHouse/ClickHouseSchemaManager.cs new file mode 100644 index 0000000..229052d --- /dev/null +++ b/src/Cortex.States.ClickHouse/ClickHouseSchemaManager.cs @@ -0,0 +1,221 @@ +using ClickHouse.Client.ADO.Parameters; +using ClickHouse.Client.ADO; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.ClickHouse +{ + internal class ClickHouseSchemaManager + { + private readonly string _connectionString; + private readonly string _tableName; + private readonly ClickHouseConfiguration _config; + private readonly ClickHouseTypeAnalyzer _typeAnalyzer; + + public ClickHouseSchemaManager( + string connectionString, + string tableName, + ClickHouseConfiguration config, + ClickHouseTypeAnalyzer typeAnalyzer) + { + _connectionString = connectionString; + _tableName = tableName; + _config = config; + _typeAnalyzer = typeAnalyzer; + } + + public void EnsureSchemaAndTable() + { + // 1. Check if table exists + if (!TableExists()) + { + if (_config.CreateOrUpdateTableSchema) + { + CreateTable(); + } + else + { + throw new InvalidOperationException( + $"Table {_tableName} does not exist and CreateOrUpdateTableSchema=false."); + } + } + else + { + // 2. Potentially check columns if table already exists. + var existingColumns = GetExistingColumns(); + var neededColumns = BuildNeededColumns(); + foreach (var needed in neededColumns) + { + if (!existingColumns.Contains(needed.Key, StringComparer.OrdinalIgnoreCase)) + { + if (_config.CreateOrUpdateTableSchema) + AddColumn(needed); + else + throw new InvalidOperationException( + $"Column {needed.Key} is missing in table {_tableName} but CreateOrUpdateTableSchema=false."); + } + } + } + } + + private bool TableExists() + { + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + var sql = @"SELECT count(*) + FROM system.tables + WHERE database = currentDatabase() + AND name = @table"; + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + cmd.Parameters.Add(new ClickHouseDbParameter { ParameterName = "table", Value = _tableName }); + var result = cmd.ExecuteScalar(); + return Convert.ToInt64(result) > 0; + } + + private HashSet GetExistingColumns() + { + var columns = new HashSet(StringComparer.OrdinalIgnoreCase); + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + var sql = @"SELECT name + FROM system.columns + WHERE database = currentDatabase() + AND table = @table"; + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + cmd.Parameters.Add(new ClickHouseDbParameter { ParameterName = "table", Value = _tableName }); + using var reader = cmd.ExecuteReader(); + while (reader.Read()) + { + columns.Add(reader.GetString(0)); + } + return columns; + } + + private Dictionary BuildNeededColumns() + { + // Always need a key column + var needed = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + { "key", "String" } + }; + + // Also for a ReplacingMergeTree we typically need a version or an is_deleted or something. + // For simplicity, let's store a 'timestamp' column: + needed["timestamp"] = "DateTime64(3)"; // millisecond resolution + + // Map each scalar property + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + var chType = GetClickhouseTypeForProperty(prop); + needed[prop.Name] = chType; + } + + // Map each list property to an Array(...) type + foreach (var listProp in _typeAnalyzer.ListProperties) + { + var itemType = listProp.PropertyType.GetGenericArguments()[0]; + var elementChType = GetClickhouseScalarType(itemType); + needed[listProp.Name] = $"Array({elementChType})"; + } + + return needed; + } + + private void CreateTable() + { + var needed = BuildNeededColumns(); + + // Build CREATE TABLE statement + var sb = new StringBuilder(); + sb.AppendLine($"CREATE TABLE {_tableName} ("); + // Add each column definition + foreach (var kv in needed) + { + sb.AppendLine($" `{kv.Key}` {kv.Value},"); + } + // Remove last comma + sb.Length -= 3; + sb.AppendLine(")"); + + sb.AppendLine("ENGINE = " + BuildEngineClause()); + + // Example: ReplacingMergeTree(timestamp) ORDER BY key + // or: MergeTree ORDER BY key + // or: ReplacingMergeTree ORDER BY (key) PARTITION BY ... + // etc. + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + using var cmd = conn.CreateCommand(); + cmd.CommandText = sb.ToString(); + cmd.ExecuteNonQuery(); + } + + private void AddColumn(KeyValuePair column) + { + var sql = $"ALTER TABLE {_tableName} ADD COLUMN `{column.Key}` {column.Value}"; + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + cmd.ExecuteNonQuery(); + } + + private string BuildEngineClause() + { + // e.g. "MergeTree ORDER BY key" + // or "ReplacingMergeTree(timestamp) ORDER BY key" + switch (_config.TableEngine) + { + case ClickHouseTableEngine.ReplacingMergeTree: + // For ReplacingMergeTree we usually specify a version or a column like "timestamp" + return $"ReplacingMergeTree(timestamp) {_config.EngineModifiers}"; + default: + // Default is MergeTree + return $"MergeTree {_config.EngineModifiers}"; + } + } + + private string GetClickhouseTypeForProperty(PropertyInfo prop) + { + var type = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + return GetClickhouseScalarType(type); + } + + private string GetClickhouseScalarType(Type type) + { + // Basic mapping + if (type == typeof(int) || type == typeof(short) || type == typeof(byte)) + return "Int32"; + if (type == typeof(long)) + return "Int64"; + if (type == typeof(bool)) + return "UInt8"; // or use Int8, but bool->UInt8 is common + if (type == typeof(DateTime)) + return "DateTime64(3)"; + if (type == typeof(decimal)) + return "Decimal(18,4)"; // adjust as needed + if (type == typeof(double)) + return "Float64"; + if (type == typeof(float)) + return "Float32"; + if (type == typeof(Guid)) + return "String"; // ClickHouse doesn't have a native GUID type in older versions + if (type == typeof(TimeSpan)) + return "Int64"; // store ticks + + // default to String if unknown + if (type == typeof(string)) + return "String"; + + return "String"; + } + } +} diff --git a/src/Cortex.States.ClickHouse/ClickHouseStateStore.cs b/src/Cortex.States.ClickHouse/ClickHouseStateStore.cs new file mode 100644 index 0000000..9236d32 --- /dev/null +++ b/src/Cortex.States.ClickHouse/ClickHouseStateStore.cs @@ -0,0 +1,378 @@ +using ClickHouse.Client.ADO.Parameters; +using ClickHouse.Client.ADO; +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Threading; + +namespace Cortex.States.ClickHouse +{ + public class ClickHouseStateStore : IStateStore, IDisposable + where TValue : new() + { + private readonly string _connectionString; + private readonly string _tableName; + private readonly ClickHouseConfiguration _config; + private readonly ClickHouseTypeAnalyzer _typeAnalyzer; + private readonly ClickHousePropertyConverter _propertyConverter; + private readonly ClickHouseSchemaManager _schemaManager; + + private static readonly SemaphoreSlim _initLock = new SemaphoreSlim(1, 1); + private volatile bool _initialized; + + public string Name { get; } + + public ClickHouseStateStore( + string name, + string connectionString, + string tableName, + ClickHouseConfiguration config = null) + { + if (string.IsNullOrWhiteSpace(name)) + throw new ArgumentNullException(nameof(name)); + if (string.IsNullOrWhiteSpace(connectionString)) + throw new ArgumentNullException(nameof(connectionString)); + if (string.IsNullOrWhiteSpace(tableName)) + throw new ArgumentNullException(nameof(tableName)); + + Name = name; + _connectionString = connectionString; + _tableName = tableName; + _config = config ?? new ClickHouseConfiguration(); // default config + + _typeAnalyzer = new ClickHouseTypeAnalyzer(typeof(TValue)); + _propertyConverter = new ClickHousePropertyConverter(); + _schemaManager = new ClickHouseSchemaManager( + _connectionString, + _tableName, + _config, + _typeAnalyzer); + + Initialize(); + } + + private void Initialize() + { + if (_initialized) return; + _initLock.Wait(); + try + { + if (_initialized) return; + _schemaManager.EnsureSchemaAndTable(); + _initialized = true; + } + finally + { + _initLock.Release(); + } + } + + private void EnsureInitialized() + { + if (!_initialized) + throw new InvalidOperationException("ClickhouseStateStore is not initialized yet."); + } + + public TValue Get(TKey key) + { + EnsureInitialized(); + var keyStr = key.ToString(); + + // For ReplacingMergeTree we might want the latest row; if we always delete then insert, + // there's only one row per key anyway. But let's be safe in case of leftover older rows: + string sql = $@" +SELECT * +FROM {_tableName} +WHERE key = @key +ORDER BY timestamp DESC +LIMIT 1"; + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + cmd.Parameters.Add(new ClickHouseDbParameter { ParameterName = "key", Value = keyStr }); + + using var reader = cmd.ExecuteReader(); + if (!reader.Read()) + return default; // not found + + var instance = new TValue(); + + // Index by column name -> ordinal + var schemaTable = reader.GetSchemaTable(); + var columnsDict = new Dictionary(StringComparer.OrdinalIgnoreCase); + for (int i = 0; i < reader.FieldCount; i++) + { + columnsDict[reader.GetName(i)] = i; + } + + // fill scalar properties + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + if (!columnsDict.ContainsKey(prop.Name)) continue; + var ordinal = columnsDict[prop.Name]; + var val = reader.IsDBNull(ordinal) ? null : reader.GetValue(ordinal); + var converted = ConvertValueFromClickhouse(val, prop.PropertyType); + prop.SetValue(instance, converted); + } + + // fill list properties + foreach (var prop in _typeAnalyzer.ListProperties) + { + if (!columnsDict.ContainsKey(prop.Name)) continue; + var ordinal = columnsDict[prop.Name]; + if (reader.IsDBNull(ordinal)) + { + prop.SetValue(instance, null); + continue; + } + var arrayVal = reader.GetValue(ordinal); + // ClickHouse client typically returns object[] or something for Array columns. + var listInstance = Activator.CreateInstance(prop.PropertyType); // e.g. List + var addMethod = prop.PropertyType.GetMethod("Add"); + + var elementType = prop.PropertyType.GetGenericArguments()[0]; + + // Suppose the arrayVal is an object[] of the column's elements + if (arrayVal is System.Collections.IEnumerable items) + { + foreach (var item in items) + { + var convertedItem = ConvertValueFromClickhouse(item, elementType); + addMethod.Invoke(listInstance, new[] { convertedItem }); + } + } + + prop.SetValue(instance, listInstance); + } + + return instance; + } + + private object ConvertValueFromClickhouse(object val, Type targetType) + { + if (val == null) + return null; + + var underlying = Nullable.GetUnderlyingType(targetType) ?? targetType; + + // Special case for timespan stored as ticks + if (underlying == typeof(TimeSpan) && val is long lVal) + { + return TimeSpan.FromTicks(lVal); + } + + // If target is string, just call ToString + if (underlying == typeof(string)) + return val.ToString(); + + // If we got a DateTime for a DateTime64 + if (underlying == typeof(DateTime) && val is DateTime dt) + { + return dt; + } + + // if we got an integer or double etc. + // we can just Convert.ChangeType in many cases + // but let's do a fallback to the property converter if that fails + try + { + return Convert.ChangeType(val, underlying); + } + catch + { + // fallback + return _propertyConverter.ConvertFromString(targetType, val.ToString()); + } + } + + public void Put(TKey key, TValue value) + { + EnsureInitialized(); + + // We do a "delete" then an "insert" + Remove(key); + + var keyStr = key.ToString(); + + // Build parameter lists + var columnNames = new List { "key", "timestamp" }; + var paramNames = new List { "@key", "@timestamp" }; + var paramValues = new Dictionary + { + { "key", keyStr }, + { "timestamp", DateTime.UtcNow } // used for ReplacingMergeTree version + }; + + // scalar properties + foreach (var prop in _typeAnalyzer.ScalarProperties) + { + columnNames.Add(prop.Name); + var pName = "@" + prop.Name; + paramNames.Add(pName); + + var propVal = prop.GetValue(value); + object finalVal = ConvertValueToClickhouse(propVal, prop); + paramValues[prop.Name] = finalVal; + } + + // list properties + foreach (var prop in _typeAnalyzer.ListProperties) + { + columnNames.Add(prop.Name); + var pName = "@" + prop.Name; + paramNames.Add(pName); + + var propVal = prop.GetValue(value); + // propVal might be a List. We can store as object[] or typed array for Insert. + object finalVal = ConvertListToClickhouseArray(propVal, prop); + paramValues[prop.Name] = finalVal; + } + + var insertSql = $@" +INSERT INTO {_tableName} +({string.Join(",", columnNames)}) +VALUES ({string.Join(",", paramNames)})"; + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + using var cmd = conn.CreateCommand(); + cmd.CommandText = insertSql; + + foreach (var kvp in paramValues) + { + cmd.Parameters.Add(new ClickHouseDbParameter + { + ParameterName = kvp.Key, + Value = kvp.Value ?? DBNull.Value + }); + } + + cmd.ExecuteNonQuery(); + } + + private object ConvertValueToClickhouse(object value, PropertyInfo prop) + { + if (value == null) return null; + + var underlying = Nullable.GetUnderlyingType(prop.PropertyType) ?? prop.PropertyType; + + if (underlying == typeof(TimeSpan)) + { + // store as ticks + TimeSpan ts = (TimeSpan)value; + return ts.Ticks; + } + + return value; + } + + private object ConvertListToClickhouseArray(object listVal, PropertyInfo prop) + { + if (listVal == null) return null; + // it's a List, so let's get T + var elementType = prop.PropertyType.GetGenericArguments()[0]; + var enumer = listVal as System.Collections.IEnumerable; + if (enumer == null) return null; + + var result = new List(); + foreach (var item in enumer) + { + var converted = ConvertValueFromClickhouse(item, elementType); + // Actually we want to store "raw" form, so let's do ConvertValueToClickhouse + // so timespan etc. become ticks + converted = ConvertValueToClickhouse(item, prop); + result.Add(converted); + } + return result.ToArray(); + } + + public bool ContainsKey(TKey key) + { + EnsureInitialized(); + var keyStr = key.ToString(); + + var sql = $@" +SELECT count(*) +FROM {_tableName} +WHERE key = @key +"; + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + cmd.Parameters.Add(new ClickHouseDbParameter + { + ParameterName = "key", + Value = keyStr + }); + + var count = Convert.ToInt64(cmd.ExecuteScalar()); + return count > 0; + } + + public void Remove(TKey key) + { + EnsureInitialized(); + + // For a pure MergeTree, we can do an actual DELETE. This is an async operation in newer ClickHouse versions. + // For ReplacingMergeTree, we might do an "is_deleted=1" approach. For simplicity, let's do a DELETE if we want immediate removal. + var keyStr = key.ToString(); + var sql = $"ALTER TABLE {_tableName} DELETE WHERE key = @key"; + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + cmd.Parameters.Add(new ClickHouseDbParameter + { + ParameterName = "key", + Value = keyStr + }); + cmd.ExecuteNonQuery(); + } + + public IEnumerable> GetAll() + { + // For large tables, you'd likely do streaming or another approach. + // Here we just do SELECT distinct key, then do Get(key). + // For a big table, that might be huge. Use with caution. + EnsureInitialized(); + var allKeys = GetKeys().ToList(); + foreach (var k in allKeys) + { + yield return new KeyValuePair(k, Get(k)); + } + } + + public IEnumerable GetKeys() + { + EnsureInitialized(); + + var sql = $"SELECT distinct key FROM {_tableName}"; + + using var conn = new ClickHouseConnection(_connectionString); + conn.Open(); + using var cmd = conn.CreateCommand(); + cmd.CommandText = sql; + using var reader = cmd.ExecuteReader(); + var keyType = typeof(TKey); + + while (reader.Read()) + { + var keyObj = reader.GetValue(0); + // Convert from string to TKey if needed + yield return (TKey)Convert.ChangeType(keyObj, keyType); + } + } + + public void Dispose() + { + _initLock.Dispose(); + } + } +} diff --git a/src/Cortex.States.ClickHouse/ClickHouseTypeAnalyzer.cs b/src/Cortex.States.ClickHouse/ClickHouseTypeAnalyzer.cs new file mode 100644 index 0000000..ab5e551 --- /dev/null +++ b/src/Cortex.States.ClickHouse/ClickHouseTypeAnalyzer.cs @@ -0,0 +1,51 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using System.Text; +using System.Threading.Tasks; + +namespace Cortex.States.ClickHouse +{ + internal class ClickHouseTypeAnalyzer + { + public PropertyInfo[] ScalarProperties { get; private set; } + public PropertyInfo[] ListProperties { get; private set; } + + private readonly Type _valueType; + + public ClickHouseTypeAnalyzer(Type valueType) + { + _valueType = valueType; + AnalyzeType(); + } + + private void AnalyzeType() + { + // Retrieve all top-level properties + var props = _valueType.GetProperties(BindingFlags.Public | BindingFlags.Instance) + .Where(p => p.CanRead && p.CanWrite && p.GetIndexParameters().Length == 0) + .ToArray(); + + var scalarProps = new List(); + var listProps = new List(); + + foreach (var prop in props) + { + if (IsGenericList(prop.PropertyType)) + listProps.Add(prop); + else + scalarProps.Add(prop); + } + + ScalarProperties = scalarProps.ToArray(); + ListProperties = listProps.ToArray(); + } + + private bool IsGenericList(Type type) + { + if (!type.IsGenericType) return false; + return type.GetGenericTypeDefinition() == typeof(List<>); + } + } +} diff --git a/src/Cortex.States.ClickHouse/Cortex.States.ClickHouse.csproj b/src/Cortex.States.ClickHouse/Cortex.States.ClickHouse.csproj new file mode 100644 index 0000000..e40cbcb --- /dev/null +++ b/src/Cortex.States.ClickHouse/Cortex.States.ClickHouse.csproj @@ -0,0 +1,56 @@ + + + + net9.0;net8.0;net7.0;net6.0 + + 1.0.1 + 1.0.1 + Buildersoft Cortex Framework + Buildersoft + Buildersoft,EnesHoxha + Copyright © Buildersoft 2024 + + Cortex Data Framework is a robust, extensible platform designed to facilitate real-time data streaming, processing, and state management. It provides developers with a comprehensive suite of tools and libraries to build scalable, high-performance data pipelines tailored to diverse use cases. By abstracting underlying streaming technologies and state management solutions, Cortex Data Framework enables seamless integration, simplified development workflows, and enhanced maintainability for complex data-driven applications. + + + https://github.com/buildersoftio/cortex + cortex eda streaming distributed streams states clickhouse + + 1.0.1 + license.md + cortex.png + Cortex.States.ClickHouse + True + True + True + + Just as the Cortex in our brains handles complex processing efficiently, Cortex Data Framework brings brainpower to your data management! + https://buildersoft.io/ + README.md + + + + + + + + + + + + + + True + \ + + + True + + + + True + + + + + diff --git a/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj b/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj index c4f83c9..d65c4e7 100644 --- a/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj +++ b/src/Cortex.States.MongoDb/Cortex.States.MongoDb.csproj @@ -52,5 +52,5 @@ - + diff --git a/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs b/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs index 47838f2..86fa7de 100644 --- a/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs +++ b/src/Cortex.States.PostgreSQL/ListPropertyMetadata.cs @@ -1,4 +1,5 @@ -using System.Reflection; +using System; +using System.Reflection; namespace Cortex.States.PostgreSQL { diff --git a/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs b/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs index 0bbf990..82578d9 100644 --- a/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs +++ b/src/Cortex.States.PostgreSQL/PostgresKeyValueStateStore.cs @@ -1,5 +1,9 @@ using Npgsql; +using System; +using System.Collections.Generic; using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; namespace Cortex.States.PostgreSQL { diff --git a/src/Cortex.States.PostgreSQL/PostgresStateStore.cs b/src/Cortex.States.PostgreSQL/PostgresStateStore.cs index f5beacb..03acb60 100644 --- a/src/Cortex.States.PostgreSQL/PostgresStateStore.cs +++ b/src/Cortex.States.PostgreSQL/PostgresStateStore.cs @@ -3,8 +3,7 @@ using System.Collections.Generic; using System.Linq; using System.Reflection; -using System.Text; -using System.Threading.Tasks; +using System.Threading; namespace Cortex.States.PostgreSQL { From ed5f1e1bc54df98ecbf6bd1f47aa771173dcca77 Mon Sep 17 00:00:00 2001 From: Enes Hoxha Date: Tue, 24 Dec 2024 20:26:56 +0100 Subject: [PATCH 8/8] [v1/feature/76]: Add support for HTTP Source and Sink Operators Implement HttpSourceOperator, Implement HttpSinkOperator, Implement HttpSinkOperatorAsync. Update README.md File --- Cortex.sln | 6 + README.md | 6 + src/Cortex.Streams.Http/Assets/cortex.png | Bin 0 -> 7179 bytes src/Cortex.Streams.Http/Assets/license.md | 20 ++ .../Cortex.Streams.Http.csproj | 53 ++++++ src/Cortex.Streams.Http/HttpSinkOperator.cs | 105 +++++++++++ .../HttpSinkOperatorAsync.cs | 176 ++++++++++++++++++ src/Cortex.Streams.Http/HttpSourceOperator.cs | 120 ++++++++++++ 8 files changed, 486 insertions(+) create mode 100644 src/Cortex.Streams.Http/Assets/cortex.png create mode 100644 src/Cortex.Streams.Http/Assets/license.md create mode 100644 src/Cortex.Streams.Http/Cortex.Streams.Http.csproj create mode 100644 src/Cortex.Streams.Http/HttpSinkOperator.cs create mode 100644 src/Cortex.Streams.Http/HttpSinkOperatorAsync.cs create mode 100644 src/Cortex.Streams.Http/HttpSourceOperator.cs diff --git a/Cortex.sln b/Cortex.sln index a9ad30f..a87ef46 100644 --- a/Cortex.sln +++ b/Cortex.sln @@ -43,6 +43,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.PostgreSQL", EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.States.ClickHouse", "src\Cortex.States.ClickHouse\Cortex.States.ClickHouse.csproj", "{0F9FCB99-D00F-4396-8E2B-6E627076ADA0}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Cortex.Streams.Http", "src\Cortex.Streams.Http\Cortex.Streams.Http.csproj", "{20BD7107-8199-4CA8-815B-4D156B522B82}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -128,6 +130,10 @@ Global {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Debug|Any CPU.Build.0 = Debug|Any CPU {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Release|Any CPU.ActiveCfg = Release|Any CPU {0F9FCB99-D00F-4396-8E2B-6E627076ADA0}.Release|Any CPU.Build.0 = Release|Any CPU + {20BD7107-8199-4CA8-815B-4D156B522B82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {20BD7107-8199-4CA8-815B-4D156B522B82}.Debug|Any CPU.Build.0 = Debug|Any CPU + {20BD7107-8199-4CA8-815B-4D156B522B82}.Release|Any CPU.ActiveCfg = Release|Any CPU + {20BD7107-8199-4CA8-815B-4D156B522B82}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/README.md b/README.md index f9fc026..4cd9b0e 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,9 @@ - **Cortex.Streams.Files:** Implementation of File Source and Sink operators. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.Streams.Files?label=Cortex.Streams.Files)](https://www.nuget.org/packages/Cortex.Streams.Files) +- **Cortex.Streams.Http:** Implementation of Http Source and Sink operators. +[![NuGet Version](https://img.shields.io/nuget/v/Cortex.Streams.Http?label=Cortex.Streams.Http)](https://www.nuget.org/packages/Cortex.Streams.Http) + - **Cortex.States:** Core state management functionalities. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.States?label=Cortex.States)](https://www.nuget.org/packages/Cortex.States) @@ -87,6 +90,9 @@ - **Cortex.States.PostgreSQL:** Persistent state storage using PostgreSQL. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.PostgreSQL?label=Cortex.States.PostgreSQL)](https://www.nuget.org/packages/Cortex.States.PostgreSQL) +- **Cortex.States.ClickHouse:** Persistent state storage using Clickhouse. +[![NuGet Version](https://img.shields.io/nuget/v/Cortex.States.ClickHouse?label=Cortex.States.ClickHouse)](https://www.nuget.org/packages/Cortex.States.ClickHouse) + - **Cortex.Telemetry:** Core library to add support for Tracing and Matrics. [![NuGet Version](https://img.shields.io/nuget/v/Cortex.Telemetry?label=Cortex.Telemetry)](https://www.nuget.org/packages/Cortex.Telemetry) diff --git a/src/Cortex.Streams.Http/Assets/cortex.png b/src/Cortex.Streams.Http/Assets/cortex.png new file mode 100644 index 0000000000000000000000000000000000000000..a4f9727d04f91f61f20720bd64b9c139fb041236 GIT binary patch literal 7179 zcmV+m9Q5OfP)Px#1ZP1_K>z@;j|==^1poj532;bRa{vGf5&!@T5&_cPe*6Fc8=y%C^C!)jU*}Z_TUZ5zkPS!zgzWpiKoUY0LP$tR!Xokt%Ag{H3XX^@B8wnv03972 z_hwv1kf6A*-1mH+Tg$Cm_f}O`27H#NQDPYKIv zcTCHy`fO@u)s1PH)sbnLH4!`}kZK|c%WX|xUTdf>GJS=u8BkWepa#-RXr-xDk(t@H zW@WFA%*qkfWGlBRGCQ}KE6>y#u7Wj@IeBX$a|&84%`IF@nqxf9Eo!w@+!~oz+$N|! zGOwgPGT%~1WPT~v)1{qUWzc%A^7WAg4s}HqRBUi)VHMOZs3)?px+juI+89Z!G1VJc zB&xQLlmylF3mS+d)%Qn|>-%q{Gz@%}+OT;?VqO2Az$`8yqwdcMnN^=m&#Gn9GOO9N zWmSgoa}7%%RkMWUH7sFyEou6S8a90e9_zTW>Pa(9t!!{;=E_xE+0bgztQ@G(RxZ@U zX6H7t+4KlCv)TDgY<6BVo1M3Y&B-^_!sZmT(Bo4DE$penwd|>awfwQDmCY?QRMf`i z7PlFX?QCvIJDW%9VDpMQ_+#lhHowFWelF`|^GiF~)8*^A%D5JkcX3sij~m#6$_@0m zu(F#itm@{El|3x6Y9mXm>S2l1c-+Vmt9w~uO)pzi-OCo$^sz;?rZ%x8Q+55MWT<|? zR>L4ErD2dQZhVHNG;H}~QR9|B$4$wo*gJiBEt{HI$)=I2xEuhhIXqJqL=&J0vCala z5N(h%O%XsY0Gvq>4G}=VGamuY)|~uiL2C?vYdAnt09d$|;L#&sk_?c=ZERj~yDb1L z>0t9s0idC$%Q^*_5G9Z#17M~3$VC820LaAwn(8AZK>|nur~sJ;c=dyOfYHG-0D8c)!3BgD z0ANym|IN6m>E(B(W>gS3OJ2?cFb6y?Pk02*2B>qM3!dyehx-I@f^^Pv!7~6xXt|F=bO5v<25_GNQSUxC#H8AO4sJ?%xdNQ*J~@1Hcqd#$aK3;6*i@;(*!qp zN$K#ErDYCqdVq%egycmAs0*E1fQrN=5J1U&?wU^l&=op;GEa^BbZsAjqqfh>eP;6F z)Ao6|Z&6L30M3*Oo+ETR-KT=*2%XxtPvO3(3wF_Ye(o~>8t!w1PapS14NvJlvF#Jx zr)&H4@D^1Y;3h9EV~`Hsl#Ft1yQhN}IO}{ANsFp%P!{Yu=K-8E>wGlM^MYqOFId^& z2%iLQaft=a3!W})l0KJV3MhJsTZ0AEbxpuK9u{Elz-8;ROww zR?TOCTT;p_`2Zw{iqPrGJT3QmCoihd8BOM?bf27cD&6OV=tx|mt@-5SMRuQ8=yZio zXXvycikWBcaNHyroC=-<(Ct1seEPUg8#$OY0`0p9-Gr zJ{g`@!7jLOL4{YlXVrYeag)-D-QWax7C_l`CNRIYFQbZ{KY1z6$Kr!#EgZ*Wg$`|~Vo!z(PSdoO8a_bY<-k(PJvp>_%Gv>m7eXoO8y z=v38xA~-=V_sQW?)$U2~rleO;=quWCh;^O(o^87Do2U9N{K`5{e9LlrUu6j^#7=7= z^K^|iM3;Gn?{VS6|5NG*Skr-zS?{?!Y}3WxJ=J&q7q<4T&slowP6s>-T#)3&a-QwJ zg;hNOSL6bxgy$1FEqErlrDbgSx;<>mwFhkL2aIjG<}W;N@XCF*V*N{$o67aDm_{0CMM*lVjE#{Dj{8=BvSl58QYDE=yg#mCogYr)>Me!L!;uW!7o8S@;_g zl7_uQY-^OC2f{JS*1bsWqcU+(hEBQd6W{?{d}`5fJiw)yk^ly7`)qjlJuY(RJKqrq z`GfcguyO3dif(EzUGOZ31~>troplQEFvkURT|{^Q=UekNvpM)VZw)KievEA%dfrbnvSA!cLKpGf(cmS-Gnz;po5gd(_bb!i_sP^nlvAr;IF94NvVp2S5{E zIe{x2j!!EhSb%2%93eco6oY=f@81m-UN#$TMCJ2m2ny;!--6vrS|s-=;JMtVsQK{x z#ZBAU#4ZIzS;aOu?=L=xFyyPUm3?Au_nm}G zSbMn7S+E-pEG2LS!||zwHaG#E*!EfQ6af9;A!OnMw7>mzcz7Q$)_Ul3`k>r-s{YQf z&a>g+c?iv$uRaJBBI%f+2drT5@6;XkD29qGXyza-z)iHk1%yWcqj%nyCZ5l%AP$=9nz{{A|*f(qAK$Qw5&SJ zo?;tL{umCL@!pybd`vwo2)C-yhLO6Dw|gDjaV9t?JOP~4_C;OuneH?GpWBB}^e|le zag}$AW%vAz`a|7QB%*g8o`;LG_RW8Vt~&u6od!uYy_Dq0U1st~gFVI<0$vHgCFeW9 zY2hhCr&r?Qf@c7ngiJHNoWjXST(|Mu9k#gnSvqH6=P3$yBu=={wJ%+went7Uf(X!h z-~WXzZrVoeq^hse9xCz}qzwws23No)rt09S0S1Am1V|yXgw4pRV@44sR;1jC7Xs)LiHjQyTH$)pCo|DzJmknGB*FnHD z$os71*<)0+QxUHkDiUUvsUTYIo+5d105l-ZFV=!{!t(V?P4YG}AheuQ< z)a-emPmxfTfpUQ9H^!gPggt-s-G2;kgHCgTT|RX;9RB!N9ozB9ZX| zC?+lh(FY)Yh70OmycjxJ!EqZ-{m7DPHj(pOlXI8VSTHMgo+MZULl4-%l?OEJ5)_`9 z(Pp&nqIS~RQSCoP!Z}bY&#D&NJyVLBFFJ5unWq3x0WgUB;?oN0L#}=Ev(RZd;I$w5 znl1!QRFdFfPE1bke%62S4+_m(COm)VAtDT1xySPR-{571QM}KRM;lZ_MXEts6QFA; zw9b8|16!j67Zjc@boz9c;XFw15r)DzZxBWORmK|j4NgF5PGS zm+!Fw$N|sWdE)pWr0VN*zzZ}*!U@j?XipDx^|lm-0Y}oo696jUd1alV^A}LqXi93!NSS@!Xt^``O^oeP4KkLkM3S8lMvPE8BGbE_G&O zSOp#2xInA-f57@K{6;+-NMz8F40r&k255&)Kx;ev74^(xYZ0-#OziKJ0jff0xXe@P zaVZ!nI7}*&F!yQT0hY^wYabyvt@HHoT#1VeFSU>ga2wjm_3!?`+K+t0I!}Dhx=#K`zsKJ#c;`7hI8_PnDx<%n&g8Sr z!jJQb?hEfc?!GyX3>@hZz>@&tSiDhOd}r?l!k8HzeONU`<1)T0q+4D7<$MGH@|7DY0!)`sNf0U{Nc%2ryo3k zYk8F5MCl*~1Sr76`LGIe<9oNQf*FoLRtTr`T28yoHk`i6)*H~)9sibok2Z4f@_qjM zf^u?^wsN8?5{EPmb(x)1}`npzBZk zK+o?!^Aiu9M%Xmr;kebWen8VL(9`1XsMZV>1%xNpe5UigK7Hn?bf27c3htXV*5HC9E-H8v zQVLjNO&@DL^o0>3BNXanKw4qK>g9W^^Tc=LJcQTM9q-Uo1{xL`!*%Sgf?ZrmYrvb8 z+cXy7py=U=p_6`}TEx;@o?~r?zoa&5vz(0h=^8Jt0lwzN^VD`=`IvGYy9CcYNNZ1# za5+x{FL2wZSWPsB;6&*lx&d+-0FD2b?l|twAOi|v6&dNmiVZZ|T3k`<4bVGuqCYdJ z`@8_k?z7syoW@uNr-o;`58)D7R_oy}Jo_{KgdP}_8OU5QZBX8Xz_ovi1TR?ld~^kS z41qHh0GVslES5%vl{{;4J9? z7P1M+d2D6RJ~ntY+3(wPe{{`Su0WFVqlEuC$BTYv9L^HWA zV=cfwTA*TX1!4iyWSsz~+Unn3^U3i1O9l&`0dU5D1Govv`7A!Igw^i37<#IBw8HCP z?j*u8k`;R!kP)~~129qxa52`At{d)Zax&K5Asn8Q`3=YVpf$Fp2mId zn$OF9qVs~)e6bBKxq#X=EbZ;T9BwrYg!V9OB7$1h9~cut4myz&XdXp z^=9L-3+~B9IkX3H&zYN{lM#fxhJ9CQur&;L90Tapf5mE`YBpKtJeQ=5Q-bFRn-)NQ z$w1%)fYVpj#U{8XlXBUtye8IuG!i-)!EsHmen=MzdtuU+P9foKzIvbK558e7ht_UB zr)c*`8h~Ll&*@n)2o7x;QgQ6#720W7t$Hj%+YBPyqKm9x;Em8B6YwzKWogTE)WI!p zf2D`#?>?6U1GsN`R())NL$3&W{mY*_75XCcjPPfcTt7(R7TshxHzwFt?LHU!W-wRj zV4J9ksYP0NYJfI8ch;$cXE@JFUL4MgA#lhblk-?My0eDvg$ipBdi$}Z?h}i09o=O( z|BS2#w&smbL&L*Ofl+hJCPsg$1y2TOZg;E*oBUf+)HRyMd|DWY2|=M9{Q(l5YZS8LvDQJ_zf6|OeXj5+*L)g) zCd7mlF#-xlnb`aj=d?B?5Z$K>of`KAtogTb+Y7WWR~Tq?%(aKKqYuKZ ze15HjaXz$n*kl{i`@+JbLjT}DDNG|2b8rFSxfEcKmbUw-9vg5^BxchW*!8~8SJTKB z!%sM7>-C4MY{v--d3G`qPy)Os5_4#lSI?QBLc_yx)w|Ep#mD;*UFQqWbBctVbsBBo zSb)P_j&;qLfgHAdL3@bRGzc3wpZprZh4v0*LLSZ0#H69H?+M2h48BP-(5#TDa-XW* zbHTISr)>K&s~#^nTx3`si{QiV--Qc=wE~#Y>&zfS%1BnQlq;8Zh7C6Xi4VZuW0~!{ z>AVhjDhEpLQ#(-nT2sM&j}sj3YV`9}>^e0fXjmVBFOqmN(H|Nw#QB(>{5M$u7oS>8 zAunvmL^*?nBiObd=hVT|g-#2e!g-FwCF3!IgWGcZUgLLjm^<3oUJzfrFoD7&ptxVh z5ruBAj_xl+kEh!p>HlED{XIoMcrbKOj8F>0Z`>UN7C}Rz#)TNx@HGm z_%O)`2YsK~5c2}duOSOJ3cmJiaOe-#eDG81;|tOr;yD=g#Kkc z-20a~LZ`0g)3$vTqYn;uI(7@fnzXP*HZDZ%ix=qw2MK$~TCs6=&zakFEL6GcG_4N^ zbo`=R+A1F#0*vH6m+w4j?BnU&ghvT48uz7-GB`lPLWS11z6c$9Z~?HKDj~Bf&_$=( zET@rr@y%^EXe9gELzdI`nsJU`j>R`n4PJQ=DzxJmEJnd=B4nB?YhCbE;Zp@qRC@X7 zf#cm-6|DZ{%OeUoSgVcpg12k#*Kxe2(Hnv-4P~d8S>KX1&jw0t%=2Musm)mvHl=WbI^evqCKK8=qryV7NafQRsjtQ3W}d=6Ro47iAO zFNgnDF1Wt!@Yif+b|VeIn(&kWgSv0jz$NF?8^bPkVLmXqex(#`AKi z`wK~oR|13!+4|O3Y<6z5f2L`M`|MS0?V-=ZM~>@^zAKe?Zaa5SQUdgWH)XWJVdJwf zj>8AVG;r=*A85F^iP^@Qf?#88+x~%X47p5=KGq}1=aYr37~ilFcDxYBEUN9JbBX)6 zXu+w%rwDFo*(iZSm`TZ}FRG39F2(cFV~;m))?!|QBe11=*U29OK{JKtqW$9M3HCbp zE;;P663xws&x7qe(9=UniemR9EkN0MCcsezXAF=wzV;!#dB{x7l`@{weDD*RfEpx& zgd4GL41=)Ge55$1hY&IW0N)s54l(%&pNW5a<8SPlf4I*n3(whf;TM+C{=BfNR@&-c z0Z|9ghFCg!;2>Ol%u>ScbCu7Zrf>Jq2|KT@=z5vWSWy=U8l68mk2WQ(*>m1?zChZ2 zj=uX8#d(mB%p*V{jck-`dz)45R-Ds1_gPl5?Fh{Z11xkL$U8}dcHfdw1_$9T#RcXI z?)Ado@w^}zq$v9GUGeUXR>!P_YGw zSNL?mTU;^@ZuF#srv_-c&kLRfa2(udO9wAN=+wX)2lrT|f+qlUgiSki>flWp2lqIo zb6*sp(}b8-JPz)$OR)`z4xZM1odhs- z99)b@2TuSf!W#z{Lki+N6JToLIJnqS0C?lzVpv-D84y!AxI3-`#}y+|!6R^Y0Pd#y zfN{l+6z)qYycst!Iqv|5(%lD)D<&j`P4t{hOer`JH!(H)FB4OWzCm&4I%r%mq(a)B z1FR;d7W^d+|DH_Bc_Kcw;Pv?A{9DxKAz>L;Ea?u|fYF4My#E2S{{wP}i{H;-Y-|7k N002ovPDHLkV1i-n!nFVZ literal 0 HcmV?d00001 diff --git a/src/Cortex.Streams.Http/Assets/license.md b/src/Cortex.Streams.Http/Assets/license.md new file mode 100644 index 0000000..530f621 --- /dev/null +++ b/src/Cortex.Streams.Http/Assets/license.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2024 Buildersoft + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/src/Cortex.Streams.Http/Cortex.Streams.Http.csproj b/src/Cortex.Streams.Http/Cortex.Streams.Http.csproj new file mode 100644 index 0000000..24b5d95 --- /dev/null +++ b/src/Cortex.Streams.Http/Cortex.Streams.Http.csproj @@ -0,0 +1,53 @@ + + + + net9.0;net8.0;net7.0;net6.0 + + 1.0.1 + 1.0.1 + Buildersoft Cortex Framework + Buildersoft + Buildersoft,EnesHoxha + Copyright © Buildersoft 2024 + + Cortex Data Framework is a robust, extensible platform designed to facilitate real-time data streaming, processing, and state management. It provides developers with a comprehensive suite of tools and libraries to build scalable, high-performance data pipelines tailored to diverse use cases. By abstracting underlying streaming technologies and state management solutions, Cortex Data Framework enables seamless integration, simplified development workflows, and enhanced maintainability for complex data-driven applications. + + + https://github.com/buildersoftio/cortex + cortex streaming distributed streams http + + 1.0.1 + license.md + cortex.png + Cortex.States.Http + True + True + True + + Just as the Cortex in our brains handles complex processing efficiently, Cortex Data Framework brings brainpower to your data management! + https://buildersoft.io/ + README.md + + + + + + + + + + + True + \ + + + True + + + + True + + + + + diff --git a/src/Cortex.Streams.Http/HttpSinkOperator.cs b/src/Cortex.Streams.Http/HttpSinkOperator.cs new file mode 100644 index 0000000..2518cd9 --- /dev/null +++ b/src/Cortex.Streams.Http/HttpSinkOperator.cs @@ -0,0 +1,105 @@ +using Cortex.Streams.Operators; +using System; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; + +namespace Cortex.Streams.Http +{ + /// + /// A sink operator that pushes data to an HTTP endpoint. + /// + /// The type of data consumed by the HTTP sink operator. + public class HttpSinkOperator : ISinkOperator + { + private readonly string _endpoint; + private readonly HttpClient _httpClient; + private readonly JsonSerializerOptions _jsonOptions; + + // Retry configuration + private readonly int _maxRetries; + private readonly TimeSpan _initialDelay; + + /// + /// Creates a new HttpSinkOperator. + /// + /// The endpoint where data should be posted. + /// Number of max consecutive retries on failure before giving up. + /// Initial backoff delay when retrying. + /// Optional HttpClient. If null, a new HttpClient will be created. + /// Optional JsonSerializerOptions for serializing JSON. + public HttpSinkOperator( + string endpoint, + int maxRetries = 3, + TimeSpan? initialDelay = null, + HttpClient httpClient = null, + JsonSerializerOptions jsonOptions = null) + { + _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); + _maxRetries = maxRetries; + _initialDelay = initialDelay ?? TimeSpan.FromMilliseconds(500); + + _httpClient = httpClient ?? new HttpClient(); + _jsonOptions = jsonOptions ?? new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + } + + /// + /// Called once when the sink operator is started. + /// + public void Start() + { + // Any initialization or connection setup if needed + } + + /// + /// Processes each incoming item from the stream, pushing it to the HTTP endpoint with retries. + /// + /// The data to send. + public void Process(TInput input) + { + // Synchronous approach + // For a synchronous approach, we do blocking calls + int attempt = 0; + TimeSpan delay = _initialDelay; + + while (true) + { + try + { + var json = JsonSerializer.Serialize(input, _jsonOptions); + var content = new StringContent(json, Encoding.UTF8, "application/json"); + + using var response = _httpClient.PostAsync(_endpoint, content).Result; + response.EnsureSuccessStatusCode(); + + // success + break; + } + catch (Exception ex) + { + attempt++; + if (attempt > _maxRetries) + { + Console.WriteLine($"HttpSinkOperator: Exhausted retries for endpoint {_endpoint}. Error: {ex.Message}"); + break; + } + + Console.WriteLine($"HttpSinkOperator: Error sending data (attempt {attempt} of {_maxRetries}). Retrying in {delay}. Error: {ex.Message}"); + Task.Delay(delay).Wait(); + + // Exponential backoff + delay = TimeSpan.FromMilliseconds(delay.TotalMilliseconds * 2); + } + } + } + + /// + /// Called once when the sink operator is stopped. + /// + public void Stop() + { + // Cleanup if needed + } + } +} \ No newline at end of file diff --git a/src/Cortex.Streams.Http/HttpSinkOperatorAsync.cs b/src/Cortex.Streams.Http/HttpSinkOperatorAsync.cs new file mode 100644 index 0000000..15b62b4 --- /dev/null +++ b/src/Cortex.Streams.Http/HttpSinkOperatorAsync.cs @@ -0,0 +1,176 @@ +using Cortex.Streams.Operators; +using System; +using System.Collections.Concurrent; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace Cortex.Streams.Http +{ + /// + /// A sink operator that pushes data to an HTTP endpoint asynchronously. + /// + /// Type of data consumed by this sink. + public class HttpSinkOperatorAsync : ISinkOperator + { + private readonly string _endpoint; + private readonly HttpClient _httpClient; + private readonly JsonSerializerOptions _jsonOptions; + + // Retry configuration + private readonly int _maxRetries; + private readonly TimeSpan _initialDelay; + + // Internal queue and background worker + private BlockingCollection _messageQueue; + private CancellationTokenSource _cts; + private Task _workerTask; + + /// + /// Constructs an asynchronous HTTP sink operator. + /// + /// The HTTP endpoint to which data should be posted. + /// Max consecutive retries on failure before giving up. + /// Initial backoff delay for retries. + /// + /// Optional . + /// If null, a new HttpClient will be created (but consider in production). + /// + /// Optional JSON serialization options. + public HttpSinkOperatorAsync( + string endpoint, + int maxRetries = 3, + TimeSpan? initialDelay = null, + HttpClient httpClient = null, + JsonSerializerOptions jsonOptions = null) + { + _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); + _maxRetries = maxRetries; + _initialDelay = initialDelay ?? TimeSpan.FromMilliseconds(500); + + _httpClient = httpClient ?? new HttpClient(); + _jsonOptions = jsonOptions ?? new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }; + } + + /// + /// Called once when the sink operator starts. Spawns a background worker. + /// + public void Start() + { + // Prepare the queue and cancellation token + _messageQueue = new BlockingCollection(boundedCapacity: 10000); + _cts = new CancellationTokenSource(); + + // Launch the worker that processes messages asynchronously + _workerTask = Task.Run(() => WorkerLoopAsync(_cts.Token)); + } + + /// + /// Queues incoming data for asynchronous sending. + /// + /// The data to be sent to the HTTP endpoint. + public void Process(TInput input) + { + // Enqueue the item. If the queue is full (bounded), this will block briefly. + // If you want a non-blocking approach, consider _messageQueue.TryAdd(...). + _messageQueue.Add(input); + } + + /// + /// Called once when the sink operator stops. Waits for the background worker to finish. + /// + public void Stop() + { + _cts.Cancel(); + _messageQueue.CompleteAdding(); + + // Safely wait for the worker task to exit + try + { + _workerTask.Wait(); + } + catch (AggregateException ex) + { + // If the worker loop was canceled or faulted, handle if needed + Console.WriteLine($"HttpSinkOperatorAsync: Worker stopped with exception: {ex.Message}"); + } + + _cts.Dispose(); + _messageQueue.Dispose(); + } + + /// + /// Continuously consumes messages from the queue and sends them via HTTP asynchronously. + /// + private async Task WorkerLoopAsync(CancellationToken token) + { + while (!token.IsCancellationRequested && !_messageQueue.IsCompleted) + { + TInput item; + try + { + // Blocks until an item is available or cancellation is requested + item = _messageQueue.Take(token); + } + catch (OperationCanceledException) + { + // Gracefully exit when canceled + break; + } + catch (InvalidOperationException) + { + // The collection has been marked as CompleteAdding + break; + } + + // Send the item asynchronously (with retries) + await SendAsync(item, token); + } + } + + /// + /// Sends one item to the configured HTTP endpoint using exponential backoff. + /// + private async Task SendAsync(TInput item, CancellationToken token) + { + int attempt = 0; + TimeSpan delay = _initialDelay; + + while (!token.IsCancellationRequested) + { + try + { + var json = JsonSerializer.Serialize(item, _jsonOptions); + var content = new StringContent(json, Encoding.UTF8, "application/json"); + + using var response = await _httpClient.PostAsync(_endpoint, content, token); + response.EnsureSuccessStatusCode(); + + // Success; break out of the loop + break; + } + catch (Exception ex) when (!(ex is OperationCanceledException)) + { + attempt++; + if (attempt > _maxRetries) + { + Console.WriteLine($"HttpSinkOperatorAsync: Exhausted retries for {_endpoint}. Error: {ex.Message}"); + break; + } + + Console.WriteLine($"HttpSinkOperatorAsync: Error sending data (attempt {attempt} of {_maxRetries}). " + + $"Retrying in {delay}. Error: {ex.Message}"); + + // Exponential backoff, but only if not canceled + if (!token.IsCancellationRequested) + { + await Task.Delay(delay, token); + delay = TimeSpan.FromMilliseconds(delay.TotalMilliseconds * 2); + } + } + } + } + } +} diff --git a/src/Cortex.Streams.Http/HttpSourceOperator.cs b/src/Cortex.Streams.Http/HttpSourceOperator.cs new file mode 100644 index 0000000..6173b62 --- /dev/null +++ b/src/Cortex.Streams.Http/HttpSourceOperator.cs @@ -0,0 +1,120 @@ +using Cortex.Streams.Operators; +using System; +using System.Net.Http; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace Cortex.Streams.Http +{ + /// + /// A source operator that periodically calls an HTTP endpoint and emits the response into the stream. + /// + /// The type of data emitted by the HTTP source operator. + public class HttpSourceOperator : ISourceOperator + { + private readonly string _endpoint; + private readonly TimeSpan _pollInterval; + private readonly HttpClient _httpClient; + private readonly JsonSerializerOptions _jsonOptions; + private Timer _timer; + private CancellationTokenSource _cts; + + // Retry configuration + private readonly int _maxRetries; + private readonly TimeSpan _initialDelay; + + /// + /// Creates a new HttpSourceOperator. + /// + /// The HTTP endpoint to call. + /// How often the endpoint should be polled. + /// Number of max consecutive retries on failure before giving up. + /// Initial backoff delay when retrying. + /// Optional HttpClient. If null, a new HttpClient will be created. + /// Optional JsonSerializerOptions for parsing JSON. + public HttpSourceOperator( + string endpoint, + TimeSpan pollInterval, + int maxRetries = 3, + TimeSpan? initialDelay = null, + HttpClient httpClient = null, + JsonSerializerOptions jsonOptions = null) + { + _endpoint = endpoint ?? throw new ArgumentNullException(nameof(endpoint)); + _pollInterval = pollInterval; + _maxRetries = maxRetries; + _initialDelay = initialDelay ?? TimeSpan.FromMilliseconds(500); + + _httpClient = httpClient ?? new HttpClient(); + _jsonOptions = jsonOptions ?? new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + } + + /// + /// Starts the source operator, periodically calling the endpoint and emitting data. + /// + /// Action that receives the deserialized data. + public void Start(Action emit) + { + _cts = new CancellationTokenSource(); + + // Create a timer that fires at the given interval + _timer = new Timer(async _ => await PollAndEmitAsync(emit, _cts.Token), null, TimeSpan.Zero, _pollInterval); + } + + /// + /// Stops polling the endpoint. + /// + public void Stop() + { + _cts.Cancel(); + _timer?.Dispose(); + } + + /// + /// Calls the configured HTTP endpoint, deserializes the result, and emits it. + /// Includes simple retry with exponential backoff. + /// + private async Task PollAndEmitAsync(Action emit, CancellationToken token) + { + int attempt = 0; + TimeSpan delay = _initialDelay; + + while (true) + { + if (token.IsCancellationRequested) + return; + + try + { + using var response = await _httpClient.GetAsync(_endpoint, token); + response.EnsureSuccessStatusCode(); + + var content = await response.Content.ReadAsStringAsync(token); + var data = JsonSerializer.Deserialize(content, _jsonOptions); + emit(data); + + // If we succeed, break out of the retry loop + break; + } + catch (Exception ex) + { + attempt++; + if (attempt > _maxRetries) + { + // We exceeded maximum retries; optionally log or re-throw + Console.WriteLine($"HttpSourceOperator: Exhausted retries for endpoint {_endpoint}. Error: {ex.Message}"); + break; + } + + // Exponential backoff + Console.WriteLine($"HttpSourceOperator: Error calling HTTP endpoint (attempt {attempt} of {_maxRetries}). Retrying in {delay}. Error: {ex.Message}"); + await Task.Delay(delay, token); + + // Increase the delay + delay = TimeSpan.FromMilliseconds(delay.TotalMilliseconds * 2); + } + } + } + } +}