diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..f91f646 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,12 @@ +# +# https://help.github.com/articles/dealing-with-line-endings/ +# +# Linux start script should use lf +/gradlew text eol=lf + +# These are Windows script files and should use crlf +*.bat text eol=crlf + +# Binary files should be left untouched +*.jar binary + diff --git a/.gitignore b/.gitignore index d4fb281..898b0ad 100644 --- a/.gitignore +++ b/.gitignore @@ -1,41 +1,38 @@ -# Prerequisites -*.d - -# Compiled Object files -*.slo -*.lo -*.o -*.obj - -# Precompiled Headers -*.gch -*.pch - -# Linker files -*.ilk - -# Debugger Files -*.pdb - -# Compiled Dynamic libraries -*.so -*.dylib -*.dll - -# Fortran module files -*.mod -*.smod - -# Compiled Static libraries -*.lai -*.la -*.a -*.lib - -# Executables -*.exe -*.out -*.app - -# debug information files -*.dwo +# Java build artifacts +*.class +*.log +out/ +build/ + +# VS Code settings (keep launch.json) + +# OS-specific files +.DS_Store +Thumbs.db + +# ignore lib folder except .jar files +libs/* + +# ignore Gradle project-specific cache directory +.gradle + +# ignore Gradle build output directory +build + + +# ignore resources +resources/project/ + +# ignore the presentation and report +project_presentation +project_report + +# ignore the polypheny-all.jar and app.jar files +app\build\libs + +# ignore the matlab add-on folder structure +matlab-polypheny-connector/jar/polypheny-all.jar +matlab-polypheny-connector/resources +matlab-polypheny-connector/libs/polypheny-jdbc-driver-2.3.jar +matlab-polypheny-connector/Toolbox1/PolyphenyConnector.mltbx +matlab-polypheny-connector/Toolbox1/release/*.mltbx diff --git a/.vscode/Polypheny-Style.xml b/.vscode/Polypheny-Style.xml new file mode 100644 index 0000000..e8365c4 --- /dev/null +++ b/.vscode/Polypheny-Style.xml @@ -0,0 +1,380 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 0000000..3865e57 --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1,24 @@ +{ + "version": "0.2.0", + "configurations": [ + { + "type": "java", + "name": "Main", + "request": "launch", + "mainClass": "Main", + "projectName": "Matlab_Connector" + }, + { + "type": "java", + "name": "PolyphenyConnection", + "request": "launch", + "mainClass": "PolyphenyConnection", + "projectName": "Learning Contract_7fe425cb", + "vmArgs": "-Djava.library.path=C:\\Programme\\MATLAB\\R2025a\\bin\\win64", + "env": { + "PATH": "C:\\Programme\\MATLAB\\R2025a\\bin\\win64;C:\\Programme\\MATLAB\\R2025a\\extern\\bin\\win64;${env:PATH}" + }, + "console": "internalConsole" + } + ] +} diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..282e471 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,48 @@ +{ + "java.project.sourcePaths": [ + "app/src/main/java" + ], + "java.project.referencedLibraries": [], + "cSpell.words": [ + "Afterall", + "disp", + "elems", + "empid", + "emps", + "HASPOLYPHENY", + "Insertand", + "iscell", + "ischar", + "isscalar", + "istable", + "JDBC", + "johnrengelman", + "Matlabtoolbox", + "mongoql", + "mongotest", + "mtlbx", + "Multimodel", + "multiquery", + "myexecutor", + "mypath", + "Polpyheny's", + "polyconnection", + "Polypheny", + "polyphenyconnector", + "POLYWRAPPER", + "QUERYLIST", + "QUERYSTR", + "stringbuilder", + "varargin", + "VARCHAR", + "wrongpass", + "wronguser" + ], + "java.format.enabled": true, + "java.format.settings.url": ".vscode/Polypheny-Style.xml", + "java.format.settings.profile": "Polypheny Code Style", + "[java]": { + "editor.defaultFormatter": "redhat.java", + "editor.formatOnSave": true + } +} diff --git a/LICENSE b/LICENSE index 261eeb9..845ef47 100644 --- a/LICENSE +++ b/LICENSE @@ -186,7 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright [2025] [Fynn Gohlke] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Polypheny_Connector.prj b/Polypheny_Connector.prj new file mode 100644 index 0000000..6b95f98 --- /dev/null +++ b/Polypheny_Connector.prj @@ -0,0 +1,2 @@ + + diff --git a/app/bin/main/.gitignore b/app/bin/main/.gitignore new file mode 100644 index 0000000..b733afd --- /dev/null +++ b/app/bin/main/.gitignore @@ -0,0 +1,14 @@ +# Java build artifacts +*.class +*.log +out/ +build/ + +# VS Code settings (keep launch.json) + +# OS-specific files +.DS_Store +Thumbs.db + +# Ignore lib folder except .jar files +lib/* \ No newline at end of file diff --git a/app/bin/main/ClassTest.m b/app/bin/main/ClassTest.m new file mode 100644 index 0000000..47f32fc --- /dev/null +++ b/app/bin/main/ClassTest.m @@ -0,0 +1,9 @@ +clear all; clear classes +%conn = javaObject('polyphenyconnector.PolyphenyConnection', 'localhost', int32(20590), 'pa', ''); +%exec = javaObject('polyphenyconnector.QueryExecutor', conn); + +%res = exec.execute('sql', 'SELECT 1 AS x'); +results = runtests('PolyphenyWrapperTest'); +disp(results) + +%disp(res); diff --git a/app/bin/main/LICENSE b/app/bin/main/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/app/bin/main/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/app/bin/main/Polypheny.m b/app/bin/main/Polypheny.m new file mode 100644 index 0000000..dfd0721 --- /dev/null +++ b/app/bin/main/Polypheny.m @@ -0,0 +1,138 @@ +classdef Polypheny < handle +% POLYPHENY MATLAB wrapper for the Polypheny Java connector. Wraps polyphenyconnector.PolyphenyConnection +% and polyphenyconnector.QueryExecutor to run queries from MATLAB + properties ( Access = private ) + polyConnection % Java PolyphenyConnection + queryExecutor % Java QueryExecutor + + end + + methods + + function PolyWrapper = Polypheny( host, port, user, password ) + % Polypheny( LANGUAGE, HOST, PORT, USER, PASSWORD ): Set up Java connection + executor + % LANGUAGE: The database language ( 'sql', 'mongo', 'cypher' ) + % HOST: Database host ( e.g. 'localhost' ) + % PORT: Database port ( integer ) + % USER: Username + % PASSWORD: Password + + % This makes sure that Matlab sees Java classes supplied by the .jar files in the Matlabtoolbox PolyphenyConnector.mtlbx + try + if ~polypheny.Polypheny.hasPolypheny( ) + startup( ); + end + PolyWrapper.polyConnection = javaObject( "polyphenyconnector.PolyphenyConnection",host, int32( port ), user, password ); + PolyWrapper.queryExecutor = javaObject( "polyphenyconnector.QueryExecutor", PolyWrapper.polyConnection ); + + catch ME %Matlab Exception + disp( "Error: " + ME.message ) + end + + end + + + + function matlab_result = query( PolyWrapper, language, namespace, queryStr ) + % query( POLYWRAPPER, QUERYSTR ): Execute query via QueryExecutor.java + % POLYWRAPPER: The PolyWrapper Matlab object + % LANGUAGE: The language of the query string -> SQL, mongo, Cypher + % QUERYSTR: The queryStr set by the user + % @return matlab_result: The result of the query -> return type differs for SQL,Mongo and Cypher + + + try + java_result = PolyWrapper.queryExecutor.execute( string( language ), string( namespace ), queryStr ); + + switch lower( language ) + case "sql" + if isempty( java_result ) + matlab_result = []; + elseif isscalar( java_result ) + matlab_result = java_result; + elseif isa( java_result,'java.lang.Object[]' ) && numel( java_result )==2 + tmp = cell( java_result ); + colNames = cell( tmp{1} ); + data = cell( tmp{2} ); + matlab_result = cell2table( data, 'VariableNames', colNames ); + else + matlab_result = []; + end + + case "mongo" + if isa( java_result, 'java.util.List' ) + % Current driver behavior: always returns List of JSON docs + matlab_result = string(java_result); + elseif isnumeric( java_result ) + % Not observed in current driver, but kept for forward compatibility + % (e.g. if Polypheny ever returns scalar counts directly) + matlab_result = java_result; + else + error( "Unexpected Mongo result type: %s", class( java_result ) ); + end + + case "cypher" + % TODO: integrate once Cypher executor is ready + error( "Cypher not supported yet." ); + + otherwise + error( "Unsupported language: %s", language ); + end + + catch ME + error( "Query execution failed: %s", ME.message ); + end + end + + function matlab_result = queryBatch( PolyWrapper, language, namespace, queryList ) + % queryBatch( POLYWRAPPER, QUERYLIST ): Execute batch of non-SELECT statements + % QUERYLIST: A cell array of SQL strings ( INSERT, UPDATE, DELETE, etc. ) + % + % Returns: int array with rows affected per statement + + if ~iscell( queryList ) + error( 'queryBatch expects a cell array of query strings' ); + end + + javaList = java.util.ArrayList(); + for i = 1:numel( queryList ) + javaList.add( string(queryList{i} ) ); + end + + switch lower(language) + case "sql" + java_result = PolyWrapper.queryExecutor.executeBatchSql( javaList ); + %matlab_result = double(java_result(:))'; + vals = double(java_result(:)); % convert Java int[] to MATLAB column vector + matlab_result = array2table(vals, 'VariableNames', {'RowsAffected'}); + + case "mongo" + java_result = PolyWrapper.queryExecutor.executeBatchMongo( string(namespace), javaList ); + matlab_result = string( java_result ); % outer list + + case "cypher" + error( "Batch execution for Cypher not yet implemented." ); + + otherwise + error( "Unsupported language: %s", language ); + end + + end + + function close( PolyWrapper ) + % close( POLYWRAPPER ): Close the Java connection + % POLYWRAPPER: The PolyWrapper Matlab object + PolyWrapper.polyConnection.close( ); + end + end + + methods ( Static ) + function flag = hasPolypheny( ) + % HASPOLYPHENY Returns true if Polypheny Java classes are available because the exist( 'polyphenyconnector.PolyphenyConnection','class' ) + % returns 8 if Matlab sees the Java class and 0 otherwise. + flag = ( exist( 'polyphenyconnector.PolyphenyConnection','class' ) == 8 ); + end + + end + +end diff --git a/app/bin/main/PolyphenyWrapperTest.m b/app/bin/main/PolyphenyWrapperTest.m new file mode 100644 index 0000000..68b1909 --- /dev/null +++ b/app/bin/main/PolyphenyWrapperTest.m @@ -0,0 +1,69 @@ +classdef PolyphenyWrapperTest < matlab.unittest.TestCase + properties + conn + end + + methods (TestMethodSetup) + function setupConnection(testCase) + testCase.conn = polypheny.Polypheny('localhost', int32(20590), 'pa', '' ); + end + end + + methods (TestMethodTeardown) + function closeConnection(testCase) + testCase.conn.close(); + end + end + + methods (Test) + function testScalar(testCase) + r = testCase.conn.query( "sql" , "" , "SELECT 1 AS x"); + testCase.verifyEqual(r, 1); + end + + function testTable(testCase) + testCase.conn.query("sql" , "" , "DROP TABLE IF EXISTS wrapper_test"); + testCase.conn.query("sql" , "" , "CREATE TABLE wrapper_test (id INTEGER PRIMARY KEY, name VARCHAR)"); + testCase.conn.query("sql" , "" , "INSERT INTO wrapper_test VALUES (1,'Alice'),(2,'Bob')"); + + T = testCase.conn.query("sql" , "" , "SELECT * FROM wrapper_test ORDER BY id"); + + if istable(T) + % Expected: table output with column "name" + testCase.verifyEqual(T.name, {'Alice'; 'Bob'}); + elseif iscell(T) + % Fallback: check the raw cell contents + testCase.verifyEqual(T(:,2), {'Alice','Bob'}'); + else + testCase.verifyFail("Unexpected return type: " + class(T)); + end + end + + function testEmpty(testCase) + T = testCase.conn.query("sql" , "" , "SELECT * FROM wrapper_test WHERE id=999"); + testCase.verifyEmpty(T); + end + + function testBatchInsert(testCase) + % Prepare table + testCase.conn.query("sql" , "" , "DROP TABLE IF EXISTS batch_test"); + testCase.conn.query("sql" , "" , "CREATE TABLE batch_test (id INTEGER PRIMARY KEY, name VARCHAR)"); + + % Batch insert 2 rows + queries = { ... + "INSERT INTO batch_test VALUES (1,'Alice')", ... + "INSERT INTO batch_test VALUES (2,'Bob')" ... + }; + result = testCase.conn.queryBatch("sql" , "" , queries); + + % Verify JDBC return codes + testCase.verifyEqual(result.RowsAffected', [1 1]); + + % Verify table contents + T = testCase.conn.query("sql" , "" ,"SELECT id, name FROM batch_test ORDER BY id"); + testCase.verifyEqual(T.id, [1; 2]); + testCase.verifyEqual(string(T.name), ["Alice"; "Bob"]); + end + + end +end \ No newline at end of file diff --git a/app/bin/main/PolyphenyWrapperTestMQL.m b/app/bin/main/PolyphenyWrapperTestMQL.m new file mode 100644 index 0000000..e992fc1 --- /dev/null +++ b/app/bin/main/PolyphenyWrapperTestMQL.m @@ -0,0 +1,245 @@ +classdef PolyphenyWrapperTestMQL < matlab.unittest.TestCase + properties + conn + end + + methods(TestClassSetup) + function setUpNamespaceAndCollection(testCase) + clc; + % open connection once + testCase.conn = polypheny.Polypheny("localhost",20590,"pa",""); + + % try create collection + try + testCase.conn.query("mongo","mongotest", ... + 'db.createCollection("unittest_collection")'); + catch + end + end + end + + methods(TestClassTeardown) + function tearDownNamespaceAndCollection(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.drop()'); + catch + end + testCase.conn.close(); + end + end + + methods(TestMethodSetup) + function clearCollection(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.deleteMany({})'); + catch + end + end + end + + methods(TestMethodTeardown) + function clearCollectionAfter(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.deleteMany({})'); + catch + end + end + end + + methods(Test) + + function testDeleteManyRemovesAllDocs(testCase) + % Drop & recreate collection + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.drop()'); + testCase.conn.query("mongo","mongotest", 'db.createCollection("unittest_collection")'); + + % Insert three documents + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})'); + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})'); + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":3,"name":"Ciri"})'); + + % Call deleteMany({}) + ack = testCase.conn.query("mongo","mongotest", 'db.unittest_collection.deleteMany({})'); + disp("Ack from deleteMany:"); + disp(ack); + + % Verify collection is empty + docs = testCase.conn.query("mongo","mongotest", 'db.unittest_collection.find({})'); + docs = jsondecode(docs); + testCase.verifyEmpty(docs, "Collection should be empty after deleteMany({})"); + end + + function testInsertManyAndNestedDocument(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":14})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":20})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":24})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":30,"adress":{"Country":"Switzerland","Code":4051}})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({"age":{$gt:29}})'); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"age":30')); + end + + function testBooleanField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"flag":true})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyTrue(contains(docs(1),'"flag":true')); + testCase.verifyClass(decoded.flag, 'logical'); % asserts that class(decoded.flag) == logical + end + + function testIntegerAgeField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":42})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"age":42')); + end + + function testStringField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"name":"Alice"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"name":"Alice"')); + end + + function testLongField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"big":1111111111111111111})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"big":1111111111111111111')); + end + + function testDoubleField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"pi":3.14159})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"pi":3.14159')); + end + + function testInsertAndQueryTwoDocsRawJson(testCase) + % Clean collection + testCase.conn.query("mongo","mongotest",'db.unittest_collection.drop()'); + testCase.conn.query("mongo","mongotest",'db.createCollection("unittest_collection")'); + + % Insert two docs + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})'); + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})'); + + % Query back + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp("Raw JSON:"); + disp(docs); + decoded = jsondecode(docs); + disp(decoded) + + % Assert raw JSON is exactly what we want + testCase.verifyTrue(contains(docs(1),'"name":"Alice"')); + testCase.verifyTrue(contains(docs(1),'"name":"Bob"')); + end + + + function testCountDocuments(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"name":"Bob"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.countDocuments({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'{"count":1}')); + end + + function testArrayField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"scores":[1,2,3]})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs,'"scores":[1,2,3]')); + end + + function testFindOnEmptyCollection(testCase) + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(docs,"[]"); + end + + function testInsertManyAndFindMultiple(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"id":10,"name":"A"})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"id":11,"name":"B"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyTrue(contains(docs,'"id":10')) + testCase.verifyTrue(contains(docs,'"name":"A"')) + testCase.verifyTrue(contains(docs,'"id":11')) + testCase.verifyTrue(contains(docs,'"name":"B"')) + end + + function testBatchInsertAndFind(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"name":"Alice","age":25})', ... + 'db.unittest_collection.insertOne({"name":"Alice","age":20})', ... + 'db.unittest_collection.insertOne({"name":"Bob","age":30})' }; + ignore = testCase.conn.queryBatch("mongo","mongotest",queries); + queries2 = { ... + 'db.unittest_collection.find({"name":"Alice"})', ... + 'db.unittest_collection.find({"name":"Alice","age":20})', ... + 'db.unittest_collection.find({"name":"Bob","age":30})' }; + docs = testCase.conn.queryBatch("mongo","mongotest", queries2); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyEqual(numel(decoded{1}), 2); % 2 docs in first query + + % check names + names = {decoded{1}.name}; % cell of names + disp(names) + testCase.verifyEqual(string(names), ["Alice","Alice"]); + + end + + + function testBatchMixedOps(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"name":"Charlie","active":true})', ... + 'db.unittest_collection.countDocuments({})' }; + docs = testCase.conn.queryBatch("mongo","mongotest",queries); + testCase.verifyEqual(numel(docs),1); + decoded = jsondecode(docs) + varname = fieldnames(decoded{2}) + disp(decoded{2}.count) + testCase.verifyTrue(decoded{2}.count==1); + end + + function testSyntaxErrorThrows(testCase) + badQuery = 'db.unittest_collection.insertOne({"foo":123)'; % invalid JSON + testCase.verifyError(@() testCase.conn.query("mongo","mongotest",badQuery),?MException); + end + + function testMultiStatementFails(testCase) + badMulti = [ ... + 'db.people.insertOne({"name":"Alice","age":20}); ' ... + 'db.people.insertOne({"name":"Bob","age":24}); ' ... + 'db.people.find({})' ]; + testCase.verifyError(@() testCase.conn.query("mongo","mongotest",badMulti),?MException); + end + + function testBatchRollback(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})', ... + 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})', ... + 'db.unittest_collection.insertOne({"id":3,"name":"Janice"' }; % broken + testCase.verifyError(@() testCase.conn.queryBatch("mongo","mongotest",queries),?MException); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyEqual(docs,"[]") + end + + end +end diff --git a/app/bin/main/PolyphenyWrapperTestSQL.m b/app/bin/main/PolyphenyWrapperTestSQL.m new file mode 100644 index 0000000..0b25e6b --- /dev/null +++ b/app/bin/main/PolyphenyWrapperTestSQL.m @@ -0,0 +1,220 @@ +classdef PolyphenyWrapperTestSQL < matlab.unittest.TestCase + properties + conn % polypheny.Polypheny wrapper + end + + methods(TestClassSetup) + function setUpNamespaceAndTable(testCase) + clc; + % Open connection once for all tests + testCase.conn = polypheny.Polypheny("localhost",20590,"pa",""); + + % Drop leftovers if they exist + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DROP NAMESPACE IF EXISTS unittest_namespace"); + catch + end + + % Create namespace + table for execute() + testCase.conn.query("sql","", ... + "CREATE NAMESPACE unittest_namespace"); + testCase.conn.query("sql","unittest_namespace", ... + "CREATE TABLE unittest_namespace.unittest_table (id INT NOT NULL, name VARCHAR(100), PRIMARY KEY(id))"); + + % Drop and recreate batch_table + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.batch_table"); + catch + end + testCase.conn.query("sql","unittest_namespace", ... + "CREATE TABLE unittest_namespace.batch_table (" + ... + "emp_id INT NOT NULL, " + ... + "name VARCHAR(100), " + ... + "gender VARCHAR(10), " + ... + "birthday DATE, " + ... + "employee_id INT, " + ... + "PRIMARY KEY(emp_id))"); + end + end + + methods(TestClassTeardown) + function tearDownNamespaceAndTable(testCase) + % Cleanup after all tests + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.batch_table"); + testCase.conn.query("sql","", ... + "DROP NAMESPACE IF EXISTS unittest_namespace"); + catch + end + testCase.conn.close(); + end + end + + methods(TestMethodSetup) + function clearTables(testCase) + % Clear before each test + try + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.batch_table"); + catch + end + end + end + + methods(TestMethodTeardown) + function clearTablesAfter(testCase) + % Clear again after each test + try + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.batch_table"); + catch + end + end + end + + methods(Test) + function testScalarLiteral(testCase) + r = testCase.conn.query("sql","","SELECT 42 AS answer"); + testCase.verifyEqual(r,42); + end + + function testEmptyLiteral(testCase) + r = testCase.conn.query("sql","","SELECT * FROM (SELECT 1) t WHERE 1=0"); + testCase.verifyEmpty(r); + end + + function testTableLiteral(testCase) + r = testCase.conn.query("sql","unittest_namespace","SELECT 1 AS a, 2 AS b UNION ALL SELECT 3,4"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'a','b'}); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,2}); + testCase.verifyEqual(table2cell(r(2,:)),{3,4}); + end + + function testInsert(testCase) + r = testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.verifyEqual(r,1); + end + + function testInsertAndSelect(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id,name FROM unittest_namespace.unittest_table"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'id','name'}); + testCase.verifyEqual(height(r),1); + testCase.verifyEqual(table2cell(r),{1,'Alice'}); + end + + function testScalarFromTable(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Carol')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id FROM unittest_namespace.unittest_table WHERE name='Carol'"); + testCase.verifyEqual(r,2); + end + + function testInsertAndSelectMultipleRows(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id,name FROM unittest_namespace.unittest_table ORDER BY id"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,'Alice'}); + testCase.verifyEqual(table2cell(r(2,:)),{2,'Bob'}); + end + + function testDeleteFromTable(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + testCase.conn.query("sql","unittest_namespace","DELETE FROM unittest_namespace.unittest_table"); + r = testCase.conn.query("sql","unittest_namespace","SELECT * FROM unittest_namespace.unittest_table"); + testCase.verifyEmpty(r); + end + + function testBatchInsertEmployees(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "INSERT INTO unittest_namespace.batch_table VALUES (2,'Bob','M',DATE '1989-05-12',1002)" + "INSERT INTO unittest_namespace.batch_table VALUES (3,'Jane','F',DATE '1992-07-23',1003)" + "INSERT INTO unittest_namespace.batch_table VALUES (4,'Tim','M',DATE '1991-03-03',1004)" + "INSERT INTO unittest_namespace.batch_table VALUES (5,'Alex','M',DATE '1994-11-11',1005)" + "INSERT INTO unittest_namespace.batch_table VALUES (6,'Mason','M',DATE '1988-04-22',1006)" + "INSERT INTO unittest_namespace.batch_table VALUES (7,'Rena','F',DATE '1995-06-17',1007)" + "INSERT INTO unittest_namespace.batch_table VALUES (8,'Christopher','M',DATE '1987-08-09',1008)" + "INSERT INTO unittest_namespace.batch_table VALUES (9,'Lexi','F',DATE '1996-09-30',1009)" + "INSERT INTO unittest_namespace.batch_table VALUES (10,'Baen','M',DATE '1990-10-05',1010)" + "INSERT INTO unittest_namespace.batch_table VALUES (11,'Ricardo','M',DATE '1986-12-12',1011)" + "INSERT INTO unittest_namespace.batch_table VALUES (12,'Tim','M',DATE '1993-02-02',1012)" + "INSERT INTO unittest_namespace.batch_table VALUES (13,'Beya','F',DATE '1994-05-25',1013)" + }; + counts = testCase.conn.queryBatch("sql","unittest_namespace",queries); + testCase.verifyEqual(height(counts),13); + disp(counts) + testCase.verifyTrue(all(counts.RowsAffected == 1)); + r = testCase.conn.query("sql","unittest_namespace","SELECT COUNT(*) FROM unittest_namespace.batch_table"); + testCase.verifyEqual(r,13); + end + + function testBatchRollbackOnFailure(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "BROKEN QUERY" + }; + testCase.verifyError(@() testCase.conn.queryBatch("sql","unittest_namespace",queries),?MException); + r = testCase.conn.query("sql","unittest_namespace","SELECT * FROM unittest_namespace.batch_table"); + testCase.verifyEmpty(r); + end + + function testSyntaxError(testCase) + testCase.verifyError(@() testCase.conn.query("sql","unittest_namespace","SELEC WRONG FROM nowhere"),?MException); + end + + + function testQueryWithSpaces(testCase) + % Insert with leading spaces + testCase.conn.query("sql","unittest_namespace", ... + " INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.conn.query("sql","unittest_namespace", ... + " INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + + r = testCase.conn.query("sql","unittest_namespace", ... + "SELECT id,name FROM unittest_namespace.unittest_table ORDER BY id"); + + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'id','name'}); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,'Alice'}); + testCase.verifyEqual(table2cell(r(2,:)),{2,'Bob'}); + end + + function testConnectionFailure(testCase) + testCase.verifyError(@() ... + polypheny.Polypheny("localhost",9999,"pa","").query("sql","unittest_namespace","SELECT 1"), ... + ?MException); + end + + function testCommitFailureRollback(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "Intentional nonsense to produce a failure" + }; + testCase.verifyError(@() ... + testCase.conn.queryBatch("sql","unittest_namespace",queries),?MException); + + r = testCase.conn.query("sql","unittest_namespace", ... + "SELECT * FROM unittest_namespace.batch_table"); + testCase.verifyEmpty(r); + end + + end + +end \ No newline at end of file diff --git a/README.md b/app/bin/main/README.md similarity index 100% rename from README.md rename to app/bin/main/README.md diff --git a/app/bin/main/startup.m b/app/bin/main/startup.m new file mode 100644 index 0000000..c4dad7b --- /dev/null +++ b/app/bin/main/startup.m @@ -0,0 +1,33 @@ +function startup + % Get root folder of the toolbox + root = fileparts(mfilename('fullpath')); + + % Paths to JARs + jarPaths = { ... + fullfile(root, 'jar', 'polypheny-all.jar'), ... + fullfile(root, 'libs', 'polypheny-jdbc-driver-2.3.jar') ... + }; + + % Add JARs if not already on classpath + for i = 1:numel(jarPaths) + if ~any(strcmp(jarPaths{i}, javaclasspath('-all'))) + javaaddpath(jarPaths{i}); + end + end + + % Try to register the JDBC driver dynamically + try + %java.lang.Class.forName('org.polypheny.jdbc.PolyphenyDriver'); + driver = javaObject('org.polypheny.jdbc.PolyphenyDriver'); + java.sql.DriverManager.registerDriver(driver); + catch e + warning('Could not register Polypheny JDBC driver dynamically: %s', char(e.message)); + end + + % Add MATLAB namespace folder (+polypheny) + if exist(fullfile(root, '+polypheny'), 'dir') + addpath(root); + end + + disp('Polypheny connector initialized.'); +end diff --git a/app/build.gradle b/app/build.gradle new file mode 100644 index 0000000..8fbf696 --- /dev/null +++ b/app/build.gradle @@ -0,0 +1,44 @@ +plugins { + id 'application' + id 'com.github.johnrengelman.shadow' version '8.1.1' //necessary plugin to create the .jar file for matlab later +} + +application { mainClass = 'QuickTest' } // currently running QuickTest.java as application + +repositories { mavenCentral() } + +dependencies { + // SLF4J implementation to silence warnings + implementation 'org.slf4j:slf4j-simple:2.0.16' + + // Polypheny JDBC driver + implementation files(rootProject.file('libs/polypheny-jdbc-driver-2.3.jar')) + testImplementation files(rootProject.file('libs/polypheny-jdbc-driver-2.3.jar')) + + // Multimodel extension JAR (if you really need it) + runtimeOnly files(rootProject.file('libs/polypheny-jdbc-multimodel-2.3.jar')) + testRuntimeOnly files(rootProject.file('libs/polypheny-jdbc-multimodel-2.3.jar')) + + // JUnit 5 API + engine + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.10.2' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.10.2' +} + +shadowJar { + archiveBaseName.set('polypheny') // final jar name base + archiveClassifier.set('all') // adds -all suffix + archiveVersion.set('') // => app/build/libs/polypheny-all.jar +} + +java { + toolchain { languageVersion = JavaLanguageVersion.of(8) } +} + +test { + useJUnitPlatform() + testLogging { + events "passed", "skipped", "failed" + } + outputs.upToDateWhen { false } +} + diff --git a/app/src/main/java/.gitignore b/app/src/main/java/.gitignore new file mode 100644 index 0000000..b733afd --- /dev/null +++ b/app/src/main/java/.gitignore @@ -0,0 +1,14 @@ +# Java build artifacts +*.class +*.log +out/ +build/ + +# VS Code settings (keep launch.json) + +# OS-specific files +.DS_Store +Thumbs.db + +# Ignore lib folder except .jar files +lib/* \ No newline at end of file diff --git a/app/src/main/java/ClassTest.m b/app/src/main/java/ClassTest.m new file mode 100644 index 0000000..47f32fc --- /dev/null +++ b/app/src/main/java/ClassTest.m @@ -0,0 +1,9 @@ +clear all; clear classes +%conn = javaObject('polyphenyconnector.PolyphenyConnection', 'localhost', int32(20590), 'pa', ''); +%exec = javaObject('polyphenyconnector.QueryExecutor', conn); + +%res = exec.execute('sql', 'SELECT 1 AS x'); +results = runtests('PolyphenyWrapperTest'); +disp(results) + +%disp(res); diff --git a/app/src/main/java/LICENSE b/app/src/main/java/LICENSE new file mode 100644 index 0000000..261eeb9 --- /dev/null +++ b/app/src/main/java/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/app/src/main/java/Main.java b/app/src/main/java/Main.java new file mode 100644 index 0000000..f7bb97e --- /dev/null +++ b/app/src/main/java/Main.java @@ -0,0 +1,22 @@ +import polyphenyconnector.PolyphenyConnection; +import polyphenyconnector.QueryExecutor; + +public class Main { + + public static void main( String[] args ) { + try { + String host = "localhost"; + int port = 205090; + String user = "pa"; + String pass = ""; + + PolyphenyConnection conn = new PolyphenyConnection( host, port, user, pass ); + QueryExecutor executor = new QueryExecutor( conn ); + executor.execute( "sql", "emps", "SELECT * FROM emps;" ); + conn.close(); + } catch ( Exception e ) { + e.printStackTrace(); + } + } + +} diff --git a/app/src/main/java/Polypheny.m b/app/src/main/java/Polypheny.m new file mode 100644 index 0000000..dfd0721 --- /dev/null +++ b/app/src/main/java/Polypheny.m @@ -0,0 +1,138 @@ +classdef Polypheny < handle +% POLYPHENY MATLAB wrapper for the Polypheny Java connector. Wraps polyphenyconnector.PolyphenyConnection +% and polyphenyconnector.QueryExecutor to run queries from MATLAB + properties ( Access = private ) + polyConnection % Java PolyphenyConnection + queryExecutor % Java QueryExecutor + + end + + methods + + function PolyWrapper = Polypheny( host, port, user, password ) + % Polypheny( LANGUAGE, HOST, PORT, USER, PASSWORD ): Set up Java connection + executor + % LANGUAGE: The database language ( 'sql', 'mongo', 'cypher' ) + % HOST: Database host ( e.g. 'localhost' ) + % PORT: Database port ( integer ) + % USER: Username + % PASSWORD: Password + + % This makes sure that Matlab sees Java classes supplied by the .jar files in the Matlabtoolbox PolyphenyConnector.mtlbx + try + if ~polypheny.Polypheny.hasPolypheny( ) + startup( ); + end + PolyWrapper.polyConnection = javaObject( "polyphenyconnector.PolyphenyConnection",host, int32( port ), user, password ); + PolyWrapper.queryExecutor = javaObject( "polyphenyconnector.QueryExecutor", PolyWrapper.polyConnection ); + + catch ME %Matlab Exception + disp( "Error: " + ME.message ) + end + + end + + + + function matlab_result = query( PolyWrapper, language, namespace, queryStr ) + % query( POLYWRAPPER, QUERYSTR ): Execute query via QueryExecutor.java + % POLYWRAPPER: The PolyWrapper Matlab object + % LANGUAGE: The language of the query string -> SQL, mongo, Cypher + % QUERYSTR: The queryStr set by the user + % @return matlab_result: The result of the query -> return type differs for SQL,Mongo and Cypher + + + try + java_result = PolyWrapper.queryExecutor.execute( string( language ), string( namespace ), queryStr ); + + switch lower( language ) + case "sql" + if isempty( java_result ) + matlab_result = []; + elseif isscalar( java_result ) + matlab_result = java_result; + elseif isa( java_result,'java.lang.Object[]' ) && numel( java_result )==2 + tmp = cell( java_result ); + colNames = cell( tmp{1} ); + data = cell( tmp{2} ); + matlab_result = cell2table( data, 'VariableNames', colNames ); + else + matlab_result = []; + end + + case "mongo" + if isa( java_result, 'java.util.List' ) + % Current driver behavior: always returns List of JSON docs + matlab_result = string(java_result); + elseif isnumeric( java_result ) + % Not observed in current driver, but kept for forward compatibility + % (e.g. if Polypheny ever returns scalar counts directly) + matlab_result = java_result; + else + error( "Unexpected Mongo result type: %s", class( java_result ) ); + end + + case "cypher" + % TODO: integrate once Cypher executor is ready + error( "Cypher not supported yet." ); + + otherwise + error( "Unsupported language: %s", language ); + end + + catch ME + error( "Query execution failed: %s", ME.message ); + end + end + + function matlab_result = queryBatch( PolyWrapper, language, namespace, queryList ) + % queryBatch( POLYWRAPPER, QUERYLIST ): Execute batch of non-SELECT statements + % QUERYLIST: A cell array of SQL strings ( INSERT, UPDATE, DELETE, etc. ) + % + % Returns: int array with rows affected per statement + + if ~iscell( queryList ) + error( 'queryBatch expects a cell array of query strings' ); + end + + javaList = java.util.ArrayList(); + for i = 1:numel( queryList ) + javaList.add( string(queryList{i} ) ); + end + + switch lower(language) + case "sql" + java_result = PolyWrapper.queryExecutor.executeBatchSql( javaList ); + %matlab_result = double(java_result(:))'; + vals = double(java_result(:)); % convert Java int[] to MATLAB column vector + matlab_result = array2table(vals, 'VariableNames', {'RowsAffected'}); + + case "mongo" + java_result = PolyWrapper.queryExecutor.executeBatchMongo( string(namespace), javaList ); + matlab_result = string( java_result ); % outer list + + case "cypher" + error( "Batch execution for Cypher not yet implemented." ); + + otherwise + error( "Unsupported language: %s", language ); + end + + end + + function close( PolyWrapper ) + % close( POLYWRAPPER ): Close the Java connection + % POLYWRAPPER: The PolyWrapper Matlab object + PolyWrapper.polyConnection.close( ); + end + end + + methods ( Static ) + function flag = hasPolypheny( ) + % HASPOLYPHENY Returns true if Polypheny Java classes are available because the exist( 'polyphenyconnector.PolyphenyConnection','class' ) + % returns 8 if Matlab sees the Java class and 0 otherwise. + flag = ( exist( 'polyphenyconnector.PolyphenyConnection','class' ) == 8 ); + end + + end + +end diff --git a/app/src/main/java/PolyphenyWrapperTest.m b/app/src/main/java/PolyphenyWrapperTest.m new file mode 100644 index 0000000..68b1909 --- /dev/null +++ b/app/src/main/java/PolyphenyWrapperTest.m @@ -0,0 +1,69 @@ +classdef PolyphenyWrapperTest < matlab.unittest.TestCase + properties + conn + end + + methods (TestMethodSetup) + function setupConnection(testCase) + testCase.conn = polypheny.Polypheny('localhost', int32(20590), 'pa', '' ); + end + end + + methods (TestMethodTeardown) + function closeConnection(testCase) + testCase.conn.close(); + end + end + + methods (Test) + function testScalar(testCase) + r = testCase.conn.query( "sql" , "" , "SELECT 1 AS x"); + testCase.verifyEqual(r, 1); + end + + function testTable(testCase) + testCase.conn.query("sql" , "" , "DROP TABLE IF EXISTS wrapper_test"); + testCase.conn.query("sql" , "" , "CREATE TABLE wrapper_test (id INTEGER PRIMARY KEY, name VARCHAR)"); + testCase.conn.query("sql" , "" , "INSERT INTO wrapper_test VALUES (1,'Alice'),(2,'Bob')"); + + T = testCase.conn.query("sql" , "" , "SELECT * FROM wrapper_test ORDER BY id"); + + if istable(T) + % Expected: table output with column "name" + testCase.verifyEqual(T.name, {'Alice'; 'Bob'}); + elseif iscell(T) + % Fallback: check the raw cell contents + testCase.verifyEqual(T(:,2), {'Alice','Bob'}'); + else + testCase.verifyFail("Unexpected return type: " + class(T)); + end + end + + function testEmpty(testCase) + T = testCase.conn.query("sql" , "" , "SELECT * FROM wrapper_test WHERE id=999"); + testCase.verifyEmpty(T); + end + + function testBatchInsert(testCase) + % Prepare table + testCase.conn.query("sql" , "" , "DROP TABLE IF EXISTS batch_test"); + testCase.conn.query("sql" , "" , "CREATE TABLE batch_test (id INTEGER PRIMARY KEY, name VARCHAR)"); + + % Batch insert 2 rows + queries = { ... + "INSERT INTO batch_test VALUES (1,'Alice')", ... + "INSERT INTO batch_test VALUES (2,'Bob')" ... + }; + result = testCase.conn.queryBatch("sql" , "" , queries); + + % Verify JDBC return codes + testCase.verifyEqual(result.RowsAffected', [1 1]); + + % Verify table contents + T = testCase.conn.query("sql" , "" ,"SELECT id, name FROM batch_test ORDER BY id"); + testCase.verifyEqual(T.id, [1; 2]); + testCase.verifyEqual(string(T.name), ["Alice"; "Bob"]); + end + + end +end \ No newline at end of file diff --git a/app/src/main/java/PolyphenyWrapperTestMQL.m b/app/src/main/java/PolyphenyWrapperTestMQL.m new file mode 100644 index 0000000..e992fc1 --- /dev/null +++ b/app/src/main/java/PolyphenyWrapperTestMQL.m @@ -0,0 +1,245 @@ +classdef PolyphenyWrapperTestMQL < matlab.unittest.TestCase + properties + conn + end + + methods(TestClassSetup) + function setUpNamespaceAndCollection(testCase) + clc; + % open connection once + testCase.conn = polypheny.Polypheny("localhost",20590,"pa",""); + + % try create collection + try + testCase.conn.query("mongo","mongotest", ... + 'db.createCollection("unittest_collection")'); + catch + end + end + end + + methods(TestClassTeardown) + function tearDownNamespaceAndCollection(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.drop()'); + catch + end + testCase.conn.close(); + end + end + + methods(TestMethodSetup) + function clearCollection(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.deleteMany({})'); + catch + end + end + end + + methods(TestMethodTeardown) + function clearCollectionAfter(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.deleteMany({})'); + catch + end + end + end + + methods(Test) + + function testDeleteManyRemovesAllDocs(testCase) + % Drop & recreate collection + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.drop()'); + testCase.conn.query("mongo","mongotest", 'db.createCollection("unittest_collection")'); + + % Insert three documents + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})'); + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})'); + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":3,"name":"Ciri"})'); + + % Call deleteMany({}) + ack = testCase.conn.query("mongo","mongotest", 'db.unittest_collection.deleteMany({})'); + disp("Ack from deleteMany:"); + disp(ack); + + % Verify collection is empty + docs = testCase.conn.query("mongo","mongotest", 'db.unittest_collection.find({})'); + docs = jsondecode(docs); + testCase.verifyEmpty(docs, "Collection should be empty after deleteMany({})"); + end + + function testInsertManyAndNestedDocument(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":14})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":20})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":24})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":30,"adress":{"Country":"Switzerland","Code":4051}})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({"age":{$gt:29}})'); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"age":30')); + end + + function testBooleanField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"flag":true})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyTrue(contains(docs(1),'"flag":true')); + testCase.verifyClass(decoded.flag, 'logical'); % asserts that class(decoded.flag) == logical + end + + function testIntegerAgeField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":42})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"age":42')); + end + + function testStringField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"name":"Alice"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"name":"Alice"')); + end + + function testLongField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"big":1111111111111111111})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"big":1111111111111111111')); + end + + function testDoubleField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"pi":3.14159})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"pi":3.14159')); + end + + function testInsertAndQueryTwoDocsRawJson(testCase) + % Clean collection + testCase.conn.query("mongo","mongotest",'db.unittest_collection.drop()'); + testCase.conn.query("mongo","mongotest",'db.createCollection("unittest_collection")'); + + % Insert two docs + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})'); + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})'); + + % Query back + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp("Raw JSON:"); + disp(docs); + decoded = jsondecode(docs); + disp(decoded) + + % Assert raw JSON is exactly what we want + testCase.verifyTrue(contains(docs(1),'"name":"Alice"')); + testCase.verifyTrue(contains(docs(1),'"name":"Bob"')); + end + + + function testCountDocuments(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"name":"Bob"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.countDocuments({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'{"count":1}')); + end + + function testArrayField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"scores":[1,2,3]})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs,'"scores":[1,2,3]')); + end + + function testFindOnEmptyCollection(testCase) + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(docs,"[]"); + end + + function testInsertManyAndFindMultiple(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"id":10,"name":"A"})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"id":11,"name":"B"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyTrue(contains(docs,'"id":10')) + testCase.verifyTrue(contains(docs,'"name":"A"')) + testCase.verifyTrue(contains(docs,'"id":11')) + testCase.verifyTrue(contains(docs,'"name":"B"')) + end + + function testBatchInsertAndFind(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"name":"Alice","age":25})', ... + 'db.unittest_collection.insertOne({"name":"Alice","age":20})', ... + 'db.unittest_collection.insertOne({"name":"Bob","age":30})' }; + ignore = testCase.conn.queryBatch("mongo","mongotest",queries); + queries2 = { ... + 'db.unittest_collection.find({"name":"Alice"})', ... + 'db.unittest_collection.find({"name":"Alice","age":20})', ... + 'db.unittest_collection.find({"name":"Bob","age":30})' }; + docs = testCase.conn.queryBatch("mongo","mongotest", queries2); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyEqual(numel(decoded{1}), 2); % 2 docs in first query + + % check names + names = {decoded{1}.name}; % cell of names + disp(names) + testCase.verifyEqual(string(names), ["Alice","Alice"]); + + end + + + function testBatchMixedOps(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"name":"Charlie","active":true})', ... + 'db.unittest_collection.countDocuments({})' }; + docs = testCase.conn.queryBatch("mongo","mongotest",queries); + testCase.verifyEqual(numel(docs),1); + decoded = jsondecode(docs) + varname = fieldnames(decoded{2}) + disp(decoded{2}.count) + testCase.verifyTrue(decoded{2}.count==1); + end + + function testSyntaxErrorThrows(testCase) + badQuery = 'db.unittest_collection.insertOne({"foo":123)'; % invalid JSON + testCase.verifyError(@() testCase.conn.query("mongo","mongotest",badQuery),?MException); + end + + function testMultiStatementFails(testCase) + badMulti = [ ... + 'db.people.insertOne({"name":"Alice","age":20}); ' ... + 'db.people.insertOne({"name":"Bob","age":24}); ' ... + 'db.people.find({})' ]; + testCase.verifyError(@() testCase.conn.query("mongo","mongotest",badMulti),?MException); + end + + function testBatchRollback(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})', ... + 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})', ... + 'db.unittest_collection.insertOne({"id":3,"name":"Janice"' }; % broken + testCase.verifyError(@() testCase.conn.queryBatch("mongo","mongotest",queries),?MException); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyEqual(docs,"[]") + end + + end +end diff --git a/app/src/main/java/PolyphenyWrapperTestSQL.m b/app/src/main/java/PolyphenyWrapperTestSQL.m new file mode 100644 index 0000000..0b25e6b --- /dev/null +++ b/app/src/main/java/PolyphenyWrapperTestSQL.m @@ -0,0 +1,220 @@ +classdef PolyphenyWrapperTestSQL < matlab.unittest.TestCase + properties + conn % polypheny.Polypheny wrapper + end + + methods(TestClassSetup) + function setUpNamespaceAndTable(testCase) + clc; + % Open connection once for all tests + testCase.conn = polypheny.Polypheny("localhost",20590,"pa",""); + + % Drop leftovers if they exist + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DROP NAMESPACE IF EXISTS unittest_namespace"); + catch + end + + % Create namespace + table for execute() + testCase.conn.query("sql","", ... + "CREATE NAMESPACE unittest_namespace"); + testCase.conn.query("sql","unittest_namespace", ... + "CREATE TABLE unittest_namespace.unittest_table (id INT NOT NULL, name VARCHAR(100), PRIMARY KEY(id))"); + + % Drop and recreate batch_table + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.batch_table"); + catch + end + testCase.conn.query("sql","unittest_namespace", ... + "CREATE TABLE unittest_namespace.batch_table (" + ... + "emp_id INT NOT NULL, " + ... + "name VARCHAR(100), " + ... + "gender VARCHAR(10), " + ... + "birthday DATE, " + ... + "employee_id INT, " + ... + "PRIMARY KEY(emp_id))"); + end + end + + methods(TestClassTeardown) + function tearDownNamespaceAndTable(testCase) + % Cleanup after all tests + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.batch_table"); + testCase.conn.query("sql","", ... + "DROP NAMESPACE IF EXISTS unittest_namespace"); + catch + end + testCase.conn.close(); + end + end + + methods(TestMethodSetup) + function clearTables(testCase) + % Clear before each test + try + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.batch_table"); + catch + end + end + end + + methods(TestMethodTeardown) + function clearTablesAfter(testCase) + % Clear again after each test + try + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.batch_table"); + catch + end + end + end + + methods(Test) + function testScalarLiteral(testCase) + r = testCase.conn.query("sql","","SELECT 42 AS answer"); + testCase.verifyEqual(r,42); + end + + function testEmptyLiteral(testCase) + r = testCase.conn.query("sql","","SELECT * FROM (SELECT 1) t WHERE 1=0"); + testCase.verifyEmpty(r); + end + + function testTableLiteral(testCase) + r = testCase.conn.query("sql","unittest_namespace","SELECT 1 AS a, 2 AS b UNION ALL SELECT 3,4"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'a','b'}); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,2}); + testCase.verifyEqual(table2cell(r(2,:)),{3,4}); + end + + function testInsert(testCase) + r = testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.verifyEqual(r,1); + end + + function testInsertAndSelect(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id,name FROM unittest_namespace.unittest_table"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'id','name'}); + testCase.verifyEqual(height(r),1); + testCase.verifyEqual(table2cell(r),{1,'Alice'}); + end + + function testScalarFromTable(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Carol')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id FROM unittest_namespace.unittest_table WHERE name='Carol'"); + testCase.verifyEqual(r,2); + end + + function testInsertAndSelectMultipleRows(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id,name FROM unittest_namespace.unittest_table ORDER BY id"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,'Alice'}); + testCase.verifyEqual(table2cell(r(2,:)),{2,'Bob'}); + end + + function testDeleteFromTable(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + testCase.conn.query("sql","unittest_namespace","DELETE FROM unittest_namespace.unittest_table"); + r = testCase.conn.query("sql","unittest_namespace","SELECT * FROM unittest_namespace.unittest_table"); + testCase.verifyEmpty(r); + end + + function testBatchInsertEmployees(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "INSERT INTO unittest_namespace.batch_table VALUES (2,'Bob','M',DATE '1989-05-12',1002)" + "INSERT INTO unittest_namespace.batch_table VALUES (3,'Jane','F',DATE '1992-07-23',1003)" + "INSERT INTO unittest_namespace.batch_table VALUES (4,'Tim','M',DATE '1991-03-03',1004)" + "INSERT INTO unittest_namespace.batch_table VALUES (5,'Alex','M',DATE '1994-11-11',1005)" + "INSERT INTO unittest_namespace.batch_table VALUES (6,'Mason','M',DATE '1988-04-22',1006)" + "INSERT INTO unittest_namespace.batch_table VALUES (7,'Rena','F',DATE '1995-06-17',1007)" + "INSERT INTO unittest_namespace.batch_table VALUES (8,'Christopher','M',DATE '1987-08-09',1008)" + "INSERT INTO unittest_namespace.batch_table VALUES (9,'Lexi','F',DATE '1996-09-30',1009)" + "INSERT INTO unittest_namespace.batch_table VALUES (10,'Baen','M',DATE '1990-10-05',1010)" + "INSERT INTO unittest_namespace.batch_table VALUES (11,'Ricardo','M',DATE '1986-12-12',1011)" + "INSERT INTO unittest_namespace.batch_table VALUES (12,'Tim','M',DATE '1993-02-02',1012)" + "INSERT INTO unittest_namespace.batch_table VALUES (13,'Beya','F',DATE '1994-05-25',1013)" + }; + counts = testCase.conn.queryBatch("sql","unittest_namespace",queries); + testCase.verifyEqual(height(counts),13); + disp(counts) + testCase.verifyTrue(all(counts.RowsAffected == 1)); + r = testCase.conn.query("sql","unittest_namespace","SELECT COUNT(*) FROM unittest_namespace.batch_table"); + testCase.verifyEqual(r,13); + end + + function testBatchRollbackOnFailure(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "BROKEN QUERY" + }; + testCase.verifyError(@() testCase.conn.queryBatch("sql","unittest_namespace",queries),?MException); + r = testCase.conn.query("sql","unittest_namespace","SELECT * FROM unittest_namespace.batch_table"); + testCase.verifyEmpty(r); + end + + function testSyntaxError(testCase) + testCase.verifyError(@() testCase.conn.query("sql","unittest_namespace","SELEC WRONG FROM nowhere"),?MException); + end + + + function testQueryWithSpaces(testCase) + % Insert with leading spaces + testCase.conn.query("sql","unittest_namespace", ... + " INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.conn.query("sql","unittest_namespace", ... + " INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + + r = testCase.conn.query("sql","unittest_namespace", ... + "SELECT id,name FROM unittest_namespace.unittest_table ORDER BY id"); + + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'id','name'}); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,'Alice'}); + testCase.verifyEqual(table2cell(r(2,:)),{2,'Bob'}); + end + + function testConnectionFailure(testCase) + testCase.verifyError(@() ... + polypheny.Polypheny("localhost",9999,"pa","").query("sql","unittest_namespace","SELECT 1"), ... + ?MException); + end + + function testCommitFailureRollback(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "Intentional nonsense to produce a failure" + }; + testCase.verifyError(@() ... + testCase.conn.queryBatch("sql","unittest_namespace",queries),?MException); + + r = testCase.conn.query("sql","unittest_namespace", ... + "SELECT * FROM unittest_namespace.batch_table"); + testCase.verifyEmpty(r); + end + + end + +end \ No newline at end of file diff --git a/app/src/main/java/README.md b/app/src/main/java/README.md new file mode 100644 index 0000000..5a16b89 --- /dev/null +++ b/app/src/main/java/README.md @@ -0,0 +1,2 @@ +# Matlab-Connector +Addon for Matlab to connect and query a Polypheny database. diff --git a/app/src/main/java/org/example/App.java b/app/src/main/java/org/example/App.java new file mode 100644 index 0000000..e7e1af9 --- /dev/null +++ b/app/src/main/java/org/example/App.java @@ -0,0 +1,14 @@ +/* + * This source file was generated by the Gradle 'init' task + */ +package org.example; + +public class App { + public String getGreeting() { + return "Hello World!"; + } + + public static void main(String[] args) { + System.out.println(new App().getGreeting()); + } +} diff --git a/app/src/main/java/polyphenyconnector/PolyphenyConnection.java b/app/src/main/java/polyphenyconnector/PolyphenyConnection.java new file mode 100644 index 0000000..b2edf6d --- /dev/null +++ b/app/src/main/java/polyphenyconnector/PolyphenyConnection.java @@ -0,0 +1,147 @@ +package polyphenyconnector; + +import java.sql.*; + +public class PolyphenyConnection { + + private Connection connection; + private final String host, url, username, password; + private final int port; + + + /** + * @Description + * - Constructor supporting lazy-open: Stores logins; connects on first use to protect server resources. + * + * @param host: the host that should be used for the connection + * @param port: the port that should be used for the connection + * @param username: username to access the database with + * @param password: password to the corresponding username + * + **/ + public PolyphenyConnection( String host, int port, String username, String password ) { + this.host = host; + this.port = port; + this.url = "jdbc:polypheny://" + host + ":" + port; + this.username = username; + this.password = password; + this.connection = null; // The connection is established later on when needed. Lazy open + // prevents accidental resource leaks induced by user + } + + + /** + * @Description + * - Opens the server connection to Polypheny if needed (reuse otherwise). Checking the + * if-clause in java is a lot faster, than iterative opening and closing of the connection after every + * use for large numbers of queries, as it eliminates the ~10ms matlab-java crossover that opening and + * closing a connection from matlab would create. For 1M queries that avoids 1M*10ms = ~10 000sec=2.8 hrs + * of overhead. + * + **/ + public void openIfNeeded() { + if ( connection == null ) { + try { + connection = DriverManager.getConnection( url, username, password ); + } catch ( SQLException e ) { + throw new RuntimeException( "Failed to open connection", e ); + } + } + } + + + /** + * @Description + * - Getter function for the host + * @return host The host passed to the PolyphenyConnection object. + */ + public String getHost() { + return host; + } + + + /** + * @Description + * - Getter function for the port + * @return port The port passed to the PolyphenyConnection object. + */ + public int getPort() { + return port; + } + + + /** + * @Description + * - Closes connection if open + * + **/ + public void close() { + try { + if ( connection != null && !connection.isClosed() ) { + connection.close(); + } + } catch ( SQLException e ) { + throw new RuntimeException( "Failed to close connection: " + e.getMessage() ); + } finally { + connection = null; + } + } + + + /** + * @Description + * - Getter function for the connection variable of PolyphenyConnection + * + * @return + * - Connection connection variable of the PolyphenyConnection class + **/ + public Connection getConnection() { + return this.connection; + } + + + /** + * @Description + * - Begins Database transaction. This is necessary to expose here because we need it to control flow in + * Batch queries handled in the QueryExecutor class later. + * + * @throws SQLException + */ + public void beginTransaction() throws SQLException { + openIfNeeded(); + connection.setAutoCommit( false ); + } + + + /** + * @Description + * - Commits Database transaction. This is necessary to expose here because we need it to control flow in + * Batch queries handled in the QueryExecutor class later. + * + * @throws SQLException + */ + public void commitTransaction() throws SQLException { + connection.commit(); + connection.setAutoCommit( true ); + + } + + + /** + * @Description + * - Rolls back Database transaction. This is necessary to expose here because we need it to control flow in + * Batch queries handled in the QueryExecutor class later. + * + * @throws SQLException + */ + public void rollbackTransaction() throws SQLException { + connection.rollback(); + connection.setAutoCommit( true ); + } + + + public void setAutoCommit( boolean AutoCommitMode ) throws SQLException { + connection.setAutoCommit( AutoCommitMode ); + } + +} diff --git a/app/src/main/java/polyphenyconnector/QueryExecutor.java b/app/src/main/java/polyphenyconnector/QueryExecutor.java new file mode 100644 index 0000000..5a4660e --- /dev/null +++ b/app/src/main/java/polyphenyconnector/QueryExecutor.java @@ -0,0 +1,447 @@ +package polyphenyconnector; + +import java.sql.*; +import java.util.*; + +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.multimodel.*; +import org.polypheny.jdbc.types.*; + +public class QueryExecutor { + + private PolyphenyConnection polyconnection; + + + /** + * @Description + * - Constructor + * + * @param polyconnection: PolyphenyConnection object that holds the connection + * details to the Database. It's used to execute queries + **/ + public QueryExecutor( PolyphenyConnection polyconnection ) { + this.polyconnection = polyconnection; + } + + + /** + * @Description + * - Executes the query depending on the language given by the user + * + * @param language: The database language that is used (e.g. SQL, Mongo,Cypher) + * @param namespace: The namespace in the query string. For SQL this argument as no effect as we use JDBC's + * executeQuery(...)/executeUpdate(...). For MQL this argument will be passed to JDBC's execute(...) function. For further + * information consult Polpyheny's web documentation. + * @param query: The query-text to be executed (e.g. FROM emps SELECT *) + * + * @return: ResultToMatlab(rs) which is a Matlab compatible object that is cast to the Matlab user. + **/ + public Object execute( String language, String namespace, String query ) { + polyconnection.openIfNeeded(); + + switch ( language.toLowerCase() ) { + default: + throw new UnsupportedOperationException( "Unsupported language: " + language ); + + case "sql": + try ( Statement stmt = polyconnection.getConnection().createStatement() ) { + + String first = query.trim().toUpperCase(); + + // SELECT statements + if ( first.startsWith( "SELECT" ) ) { + try ( ResultSet rs = stmt.executeQuery( query ) ) { + return SQLResultToMatlab( rs ); + } + + // INSERT, UPDATE, DELETE, CREATE, DROP, ... statements + } else { + int rs = stmt.executeUpdate( query ); + return rs; + + } + + } catch ( SQLException e ) { + throw translateException( e ); + } catch ( Exception e ) { + throw new RuntimeException( "SQL execution failed: " + e.getMessage(), e ); + } + + case "mongo": + try { + + // Get the connection variable from the PolyphenyConnection.java class using the getter + Connection connection = polyconnection.getConnection(); + + // Unwrap Connection connection to the JDBC Driver-supplied PolyConnection polyConnection + PolyConnection polyConnection = connection.unwrap( PolyConnection.class ); + + // Create a PolyStatement object to call .execute(...) method of the JDBC-Driver on + PolyStatement polyStatement = polyConnection.createPolyStatement(); + + // Call the execute(...) function on the polyStatement + Result result = polyStatement.execute( namespace, language, query ); + + switch ( result.getResultType() ) { + + case DOCUMENT: + // Unwrapping according to → https://docs.polypheny.com/en/latest/drivers/jdbc/extensions/result + DocumentResult documentResult = result.unwrap( DocumentResult.class ); + return DocumentToMatlab( documentResult ); + + // This case was never used in any of the JUnit Tests, as every Mongo Query currently seems to be wrapped as + // PolyDocument by the JDBC Driver. The case was still left in as security based on the official documentation: + // → https://docs.polypheny.com/en/latest/drivers/jdbc/extensions/result + case SCALAR: + ScalarResult scalarResult = result.unwrap( ScalarResult.class ); + long scalar = scalarResult.getScalar(); + return scalar; + + default: + throw new UnsupportedOperationException( "Unhandled result type: " + result.getResultType() ); + } + } catch ( SQLException e ) { + throw translateException( e ); + } catch ( Exception e ) { + throw new RuntimeException( "Mongo execution failed: " + e.getMessage(), e ); + } + case "cypher": + throw new UnsupportedOperationException( "Cypher execution not yet implemented." ); + } + + } + + + /** + * @Description + * This function is capable of executing a List of non-SELECT SQL statements in one single Matlab-Java crossing. + * All SQL statements except SELECT are supported. For further information consult the Polypheny JDBC Driver documentation + * → https://docs.polypheny.com/en/latest/drivers/jdbc/relational/statement + * + * @param queries The list of SQL query strings to be executed. + * @return List result A list of integers, where the i-th entry will denote for the i-th query how many rows were touched, e.g. + * n: n rows were updated, 0: no rows were touched. + */ + public int[] executeBatchSql( List queries ) { + polyconnection.openIfNeeded(); + try { + polyconnection.beginTransaction(); + try ( Statement stmt = polyconnection.getConnection().createStatement() ) { + for ( String query : queries ) { + String first = query.trim().toUpperCase(); + if ( first.startsWith( "SELECT" ) ) { + throw new UnsupportedOperationException( "Batch execution does not support SELECT statements." ); + } + stmt.addBatch( query ); + } + int[] result = stmt.executeBatch(); + polyconnection.commitTransaction(); + return result; // return directly + } catch ( SQLException e ) { + try { + polyconnection.rollbackTransaction(); + } catch ( Exception rollbackException ) { + // Propagate both the batch failure AND the rollback failure → User must be made + throw new RuntimeException( "SQL batch failed AND rollback failed: " + rollbackException.getMessage(), e ); + } + throw translateException( e ); + } catch ( Exception e ) { + try { + polyconnection.rollbackTransaction(); + } catch ( Exception rollbackEx ) { + // Propagate both the batch failure AND the rollback failure → User must be made + throw new RuntimeException( "SQL batch failed AND rollback failed: " + rollbackEx.getMessage(), e ); + } + throw new RuntimeException( "SQL batch execution failed. Transaction was rolled back: " + e.getMessage(), e ); + } + + } catch ( SQLException e ) { + throw new RuntimeException( "Failed to manage transaction", e ); + } + } + + + /** + * @Description + * This function is capable of executing a List of Mongo statements in one single Matlab-Java crossing. + * Each query is executed individually via the execute(...) method. The result of each query will be a List + * containing the JSON-encoded documents or scalars (as JSON strings). All individual query results are then grouped + * into an outer List, which represents the batch result. + * + * @param namespace The Mongo namespace (e.g. database / collection context). + * @param queries The list of Mongo query strings to be executed. + * @return List> result An outer list with one entry per query. Each entry is a List containing + * the documents or scalar results (as JSON strings) returned by the respective query. + */ + public List> executeBatchMongo( String namespace, List queries ) { + polyconnection.openIfNeeded(); + List> results = new ArrayList<>(); + try { + polyconnection.beginTransaction(); + + for ( String query : queries ) { + @SuppressWarnings("unchecked") List result = (List) execute( "mongo", namespace, query ); + results.add( result ); + } + + polyconnection.commitTransaction(); // commit if all succeeded + return results; + + } catch ( Exception e ) { + try { + polyconnection.rollbackTransaction(); // rollback if anything failed + } catch ( Exception rollbackEx ) { + throw new RuntimeException( "Rollback failed after batch error", rollbackEx ); + } + throw new RuntimeException( "Batch execution failed", e ); + } + } + + + /** + * @Description + * - Casts the result of SQL queries to MatlabObjects + * + * @param rs: The result object of the query of type ResultSet + * + * @return: Result from the query which is either null/scalar/table + **/ + public Object SQLResultToMatlab( ResultSet rs ) throws Exception { + + ResultSetMetaData meta = rs.getMetaData(); + int colCount = meta.getColumnCount(); + Object[][] ResultArray; + + // ───────────────────────────── + // Case 1: Empty Result + // ───────────────────────────── + if ( !rs.next() ) { + //System.out.println( "Empty result set." ); + return null; + } + + // ───────────────────────────── + // Case 2: Scalar Result + // ───────────────────────────── + if ( colCount == 1 && rs.isLast() ) { + //System.out.println( "Scalar result set." ); + Object scalar = rs.getObject( 1 ); + return scalar; + } + + // ───────────────────────────── + // Case 3: Tabular Result (≥1 column, ≥1 row) + // ───────────────────────────── + String[] colNames = new String[colCount]; // get the column names to name the columns later + + for ( int i = 1; i <= colCount; i++ ) { + colNames[i - 1] = meta.getColumnName( i ); // assign the column names to the array + } + + List rows = new ArrayList<>(); // List of arrays to store the rows returned by the query + do { + Object[] row = new Object[colCount]; // Creates new array that will store the queries entries + for ( int i = 0; i < colCount; i++ ) { + row[i] = rs.getObject( i + 1 ); // Saves each entry + } + rows.add( row ); // Append row to the List + } while ( rs.next() ); // First row already fetched above with rs.next() so we use do while + + // Ensure that the colNames and rows have the same number of columns + if ( colNames.length != rows.get( 0 ).length ) { + throw new RuntimeException( "Mismatch: colNames and rowData column count don't match" ); + } + + ResultArray = rows.toArray( new Object[rows.size()][] ); + + return new Object[]{ colNames, ResultArray }; + } + + + private List DocumentToMatlab( DocumentResult documentResult ) { + List docs = new ArrayList<>(); + Iterator documentIterator = documentResult.iterator(); + while ( documentIterator.hasNext() ) { + PolyDocument document = documentIterator.next(); + docs.add( NestedPolyDocumentToString( document ) ); // at the most outer layer everything must be wrapped as PolyDocument + } + return docs; + } + + + private String NestedPolyDocumentToString( PolyDocument document ) { + StringBuilder sb = new StringBuilder(); + sb.append( "{" ); + Iterator> it = document.entrySet().iterator(); + while ( it.hasNext() ) { + Map.Entry entry = it.next(); + sb.append( "\"" ).append( entry.getKey() ).append( "\":" ); + sb.append( anyToJson( entry.getValue() ) ); + if ( it.hasNext() ) + sb.append( "," ); + } + sb.append( "}" ); + return sb.toString(); + } + + + private String NestedArrayToString( Array array ) { + StringBuilder sb = new StringBuilder(); + sb.append( "[" ); + try { + Object[] elems = (Object[]) array.getArray(); + for ( int i = 0; i < elems.length; i++ ) { + sb.append( anyToJson( elems[i] ) ); + if ( i < elems.length - 1 ) + sb.append( "," ); + } + } catch ( SQLException e ) { + throw new RuntimeException( "List serialization error: " + e.getMessage(), e ); + } + sb.append( "]" ); + return sb.toString(); + } + + + private String anyToJson( Object result ) { + if ( result == null ) { + return "null"; + } + try { + if ( result instanceof TypedValue ) { + TypedValue value = (TypedValue) result; + switch ( value.getValueCase() ) { + case DOCUMENT: + return NestedPolyDocumentToString( value.asDocument() ); + case LIST: + return NestedArrayToString( value.asArray() ); + case BOOLEAN: + return String.valueOf( value.asBoolean() ); + case INTEGER: + return String.valueOf( value.asInt() ); + case LONG: + return String.valueOf( value.asLong() ); + case DOUBLE: + return String.valueOf( value.asDouble() ); + case FLOAT: + return String.valueOf( value.asFloat() ); + case BIG_DECIMAL: + return value.asBigDecimal().toPlainString(); + case STRING: + return "\"" + escapeJson( value.asString() ) + "\""; + case DATE: + return "\"" + value.asDate().toString() + "\""; + case TIME: + return "\"" + value.asTime().toString() + "\""; + case TIMESTAMP: + return "\"" + value.asTimestamp().toString() + "\""; + case INTERVAL: + return "\"" + value.asInterval().toString() + "\""; + case BINARY: + return "\"" + Base64.getEncoder().encodeToString( value.asBytes() ) + "\""; + case FILE: + return "\"" + escapeJson( String.valueOf( value.asBlob() ) ) + "\""; + case NULL: + return "null"; + default: + return "\"" + escapeJson( value.toString() ) + "\""; + } + } else if ( result instanceof PolyDocument ) { + return NestedPolyDocumentToString( (PolyDocument) result ); + } else if ( result instanceof Array ) { + return NestedArrayToString( (Array) result ); + } else if ( result instanceof String ) { + return "\"" + escapeJson( (String) result ) + "\""; + } else if ( result instanceof java.sql.Date + || result instanceof java.sql.Time + || result instanceof java.sql.Timestamp ) { + return "\"" + result.toString() + "\""; + } else if ( result instanceof byte[] ) { + return "\"" + Base64.getEncoder().encodeToString( (byte[]) result ) + "\""; + } else { + // numbers, booleans, anything else + return String.valueOf( result ); + } + } catch ( SQLException e ) { + throw new RuntimeException( "Serialization error: " + e.getMessage(), e ); + } + } + + + /** + * @Description + * This function takes makes sure that the escapes of queries are handled correctly in Strings when appending. + * e.g. " C\mypath " must be converted into " C\\mypath " because "\" is an operator sign like in Latex + * @param string + * @return string: The string with the proper escape sequences + */ + private static String escapeJson( String string ) { + return string.replace( "\\", "\\\\" ).replace( "\"", "\\\"" ); + } + + + /** + * @Description + * This method ensures that exceptions thrown by Polypheny (and propagated through the JDBC driver) due to user fault when calling + * execute or executeBatch are translated in a user-interpretable error to be propagated to Matlab, instead of just failing + * with an obscure error/exception. + * + * @param e The exception caught. 08 denotes an error with the Polypheny connection, 42 denotes a + * @return + */ + private RuntimeException translateException( SQLException e ) { + String state = e.getSQLState(); + if ( state != null ) { + if ( state.startsWith( "08" ) ) { + return new RuntimeException( "Connection error: " + e.getMessage(), e ); + } + if ( state.startsWith( "42" ) ) { + return new RuntimeException( "Syntax error in query: " + e.getMessage(), e ); + } + } + return new RuntimeException( "Query execution failed: " + e.getMessage(), e ); + } + + /* + * This function might be used in the future to automatically detect the namespace in MQL queries. + + private static void checkNamespace( String language, String namespace ) { + if ( namespace == null || namespace.isEmpty() ) { + if ( language.equalsIgnoreCase( "sql" ) ) { + // fine: default namespace is used implicitly + } else { + throw new IllegalArgumentException( + "For " + language + " queries a namespace must be specified" + ); + } + } + } + */ + + /** + * @Description + * This function determines the operation (e.g. "find" or "insertOne") of a Mongo query. This is important to distinguish + * whether to use executeUpdate or executeQuery. Functionality was moved to a function (instead of handling it like for SQL) + * because in Mongo queries the operation isn't as easy to determine. + * MQL queries are always of the form ..() + * + * @param q The query text of type String + * @return + **/ + /* + private static String extractMongoOperation( String q ) { + String query = q.trim(); + int paren = query.indexOf( '(' ); // get the position of the first "(" in the query. + if ( paren < 0 ) { + return ""; // return an empty String if no ( was found + } + int lastDot = query.lastIndexOf( '.', paren ); // get the position of the last "." before the "(". + if ( lastDot < 0 ) { + return ""; // return an empty string if no dot was found + } + String operation = query.substring( lastDot + 1, paren ).trim(); + return operation; // return the + } + */ + +} diff --git a/app/src/main/java/startup.m b/app/src/main/java/startup.m new file mode 100644 index 0000000..c4dad7b --- /dev/null +++ b/app/src/main/java/startup.m @@ -0,0 +1,33 @@ +function startup + % Get root folder of the toolbox + root = fileparts(mfilename('fullpath')); + + % Paths to JARs + jarPaths = { ... + fullfile(root, 'jar', 'polypheny-all.jar'), ... + fullfile(root, 'libs', 'polypheny-jdbc-driver-2.3.jar') ... + }; + + % Add JARs if not already on classpath + for i = 1:numel(jarPaths) + if ~any(strcmp(jarPaths{i}, javaclasspath('-all'))) + javaaddpath(jarPaths{i}); + end + end + + % Try to register the JDBC driver dynamically + try + %java.lang.Class.forName('org.polypheny.jdbc.PolyphenyDriver'); + driver = javaObject('org.polypheny.jdbc.PolyphenyDriver'); + java.sql.DriverManager.registerDriver(driver); + catch e + warning('Could not register Polypheny JDBC driver dynamically: %s', char(e.message)); + end + + % Add MATLAB namespace folder (+polypheny) + if exist(fullfile(root, '+polypheny'), 'dir') + addpath(root); + end + + disp('Polypheny connector initialized.'); +end diff --git a/app/src/test/java/polyphenyconnector/PolyphenyConnectionTest.java b/app/src/test/java/polyphenyconnector/PolyphenyConnectionTest.java new file mode 100644 index 0000000..e47f614 --- /dev/null +++ b/app/src/test/java/polyphenyconnector/PolyphenyConnectionTest.java @@ -0,0 +1,109 @@ +package polyphenyconnector; + +import java.sql.SQLException; +import org.junit.jupiter.api.*; +import static org.junit.jupiter.api.Assertions.*; + +public class PolyphenyConnectionTest { + + private PolyphenyConnection conn; + private String host, username, password; + private int port; + + + @BeforeAll + static void waitForPolypheny() throws Exception { + PolyphenyConnectionTestHelper.waitForPolypheny(); + } + + + @BeforeEach + void setUp() { + host = "localhost"; + port = 20590; + username = "pa"; + password = ""; + conn = new PolyphenyConnection( host, port, username, password ); + } + + + @AfterEach + void tearDown() { + conn.close(); + } + + + @Test + void getHostPort() { + assertEquals( port, conn.getPort(), "Port must be 20590." ); + assertEquals( host, conn.getHost(), "Host must be localhost." ); + + } + + + @Test + void testLazyOpen() { + assertNull( conn.getConnection(), "Connection should start as null" ); + conn.openIfNeeded(); + assertNotNull( conn.getConnection(), "Connection should be established after openIfNeeded()" ); + } + + + @Test + void testBeginCommitRollback() throws SQLException { + conn.openIfNeeded(); + + // Test AutoCommit is enabled in the standard 1-Batch query case. This is important so myQueryExecutor.execute(...) can AutoCommit + assertTrue( conn.getConnection().getAutoCommit(), "AutoCommit should be true in the standard 1-Batch query case." ); + + // Test beginTransaction() actually disables the AutoCommit setting in openIfNeeded(). Important for its use in N-Batching. + conn.beginTransaction(); + assertFalse( conn.getConnection().getAutoCommit(), "AutoCommit should be false in transaction" ); + + // Test that AutoCommit is set to true again after commitTransaction() executes, so the standard 1-Batch query case can AutoCommit. + conn.commitTransaction(); + assertTrue( conn.getConnection().getAutoCommit(), "AutoCommit should be true after commit" ); + + // Test that after the rollbackTransaction() AutoCommit is true again, so the standard 1-Batch query case can AutoCommit. + conn.beginTransaction(); + conn.rollbackTransaction(); + assertTrue( conn.getConnection().getAutoCommit(), "AutoCommit should be true after rollback" ); + } + + + @Test + void testOpen() { + + // Test that openIfNeeded() twice doesn't change the existing connection. + conn.openIfNeeded(); + java.sql.Connection firstConnection = conn.getConnection(); + conn.close(); + conn.openIfNeeded(); + java.sql.Connection secondConnection = conn.getConnection(); + assertNotSame( firstConnection, secondConnection, "A new connection should be created after close()" ); + + // Test executing openIfNeeded() twice doesn't throw an Exception + assertDoesNotThrow( () -> conn.openIfNeeded(), "Opening twice should not throw an exception" ); + + } + + + @Test + void testClose() { + // Test that opening twice doesn't throw an exception + + conn.close(); + assertDoesNotThrow( () -> conn.close(), "Closing twice should not throw an exception" ); + assertNull( conn.getConnection(), "Connection should be null after close" ); + } + + + @Test + void testOpenWithInvalidCredentials() { + + // Tests that a RuntimeException is thrown. Makes sure false Username and Password are treated with an Exception + PolyphenyConnection badConn = new PolyphenyConnection( host, port, "wronguser", "wrongpass" ); + assertThrows( RuntimeException.class, badConn::openIfNeeded, "Opening with bad credentials should fail" ); + } + +} diff --git a/app/src/test/java/polyphenyconnector/PolyphenyConnectionTestHelper.java b/app/src/test/java/polyphenyconnector/PolyphenyConnectionTestHelper.java new file mode 100644 index 0000000..2a680df --- /dev/null +++ b/app/src/test/java/polyphenyconnector/PolyphenyConnectionTestHelper.java @@ -0,0 +1,41 @@ +package polyphenyconnector; + +import java.sql.DriverManager; + +public class PolyphenyConnectionTestHelper { + + static void waitForPolypheny() throws Exception { + String url = "jdbc:polypheny://localhost:20590"; + String user = "pa"; + String pass = ""; + + long deadline = System.currentTimeMillis() + 7000; // 7s timeout + int attempt = 1; + boolean ready = false; + + while ( System.currentTimeMillis() < deadline ) { + try ( java.sql.Connection conn = DriverManager.getConnection( url, user, pass ) ) { + if ( conn != null && !conn.isClosed() ) { + ready = true; + break; + } + } catch ( Exception e ) { + System.out.println( "Polypheny not ready (attempt " + attempt + ")" ); + } + attempt++; + Thread.sleep( 1000 ); // wait 1s before retry + } + + if ( !ready ) { + throw new RuntimeException( "Polypheny did not become available within 7 seconds." ); + } + } + + /* + public static void ensurePostgresAdapter( PolyphenyConnection conn ) throws SQLException { + QueryExecutor exec = new QueryExecutor( conn ); + exec.execute( "sql", "unittest_namespace", "CREATE ADAPTER IF NOT EXISTS postgresql1 USING postgresql ..." ); + } + */ + +} diff --git a/app/src/test/java/polyphenyconnector/QueryExecutorTestMQL.java b/app/src/test/java/polyphenyconnector/QueryExecutorTestMQL.java new file mode 100644 index 0000000..7052cec --- /dev/null +++ b/app/src/test/java/polyphenyconnector/QueryExecutorTestMQL.java @@ -0,0 +1,361 @@ +package polyphenyconnector; + +import org.junit.jupiter.api.*; +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.multimodel.*; + +import static org.junit.jupiter.api.Assertions.*; + +import java.sql.Connection; +import java.util.ArrayList; +import java.util.List; + +public class QueryExecutorTestMQL { + + private static PolyphenyConnection myconnection; + private static QueryExecutor myexecutor; + + + @BeforeAll + static void setUpNamespaceAndCollection() throws Exception { + PolyphenyConnectionTestHelper.waitForPolypheny(); + Thread.sleep( 4000 ); + myconnection = new PolyphenyConnection( "localhost", 20590, "pa", "" ); + myexecutor = new QueryExecutor( myconnection ); + + try { + myexecutor.execute( "mongo", "mongotest", "db.createCollection(\"unittest_collection\")" ); + } catch ( Exception ignoredException ) { + } + } + + + @AfterAll + static void tearDownNamespaceAndCollection() { + try { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.drop()" ); + } catch ( Exception ignored ) { + } + myconnection.close(); + } + + + @BeforeEach + void clearCollection() { + try { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.deleteMany({})" ); + } catch ( Exception ignored ) { + } + } + + + @AfterEach + void clearCollectionAfter() { + try { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.deleteMany({})" ); + } catch ( Exception ignored ) { + } + } + + + @Test + void testDeleteManyRemovesAllDocs() { + // Arrange: create namespace & collection + myconnection.openIfNeeded(); + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.drop()" ); + myexecutor.execute( "mongo", "mongotest", "db.createCollection(\"unittest_collection\")" ); + + // Insert 3 docs + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"id\":1,\"name\":\"Alice\"})" ); + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"id\":2,\"name\":\"Bob\"})" ); + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"id\":3,\"name\":\"Ciri\"})" ); + + // Act: delete all + Object ack = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.deleteMany({})" ); + + // Assert: ack JSON contains deletedCount:3 + assertTrue( ack.toString().contains( "\"updateCount\":3" ), "Expected 3 deletions, got: " + ack ); + // Verify collection is empty + @SuppressWarnings("unchecked") List docs = (List) myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + assertEquals( 0, docs.size(), "Collection should be empty after deleteMany({})" ); + } + + + @Test + void testInsertandDrop() { + Object result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + assertTrue( result instanceof List, "Expected a List" ); + List docs = (List) result; + assertEquals( 0, docs.size() ); + + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"age\":14})" ); + result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({\"age\":14})" ); + + docs = (List) result; + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"age\":14" ) ); + + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.deleteMany({})" ); + result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({\"age\": {\"$eq\": 14}})" ); + docs = (List) result; + assertEquals( 0, docs.size() ); + } + + + @Test + void testInsertManyAndNestedDocument() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"age\":14})" ); + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"age\":20})" ); + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"age\":24})" ); + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"age\":30, \"adress\": {\"Country\": \"Switzerland\", \"Code\": 4051}})" ); + Object result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({\"age\": {$gt:29}})" ); + + assertTrue( result instanceof List, "expected a List for DocumentResult" ); + @SuppressWarnings("unchecked") List docs = (List) result; + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"age\":30" ) ); + } + + + @Test + void testBooleanField() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"flag\":true})" ); + @SuppressWarnings("unchecked") List docs = (List) myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + assertTrue( docs.get( 0 ).contains( "\"flag\":true" ) ); + } + + + @Test + void testIntegerAgeField() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"age\":42})" ); + @SuppressWarnings("unchecked") List docs = (List) myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"age\":42" ) ); + } + + + @Test + void testStringField() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"name\":\"Alice\"})" ); + @SuppressWarnings("unchecked") List docs = (List) myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"name\":\"Alice\"" ) ); + } + + + @Test + void testLongField() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"big\":1111111111111111111})" ); + Object r = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + @SuppressWarnings("unchecked") List docs = (List) r; + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"big\":1111111111111111111" ) ); + } + + + @Test + void testDoubleField() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"pi\":3.14159})" ); + Object r = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + @SuppressWarnings("unchecked") List docs = (List) r; + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"pi\":3.14159" ) ); + } + + + @Test + void testCountDocumentsReturnsStringArray() { + myexecutor.execute( "mongo", "mongotest", + "db.unittest_collection.insertOne({\"name\":\"Bob\"})" ); + + Object result = myexecutor.execute( "mongo", "mongotest", + "db.unittest_collection.countDocuments({})" ); + + assertTrue( result instanceof List, "result should be a List" ); + @SuppressWarnings("unchecked") List docs = (List) result; + assertEquals( 1, docs.size() ); + assertEquals( "{\"count\":1}", docs.get( 0 ) ); + } + + + @Test + void testListElementClasses() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"mixed\":[{\"bar\":2},1,\"foo\"]})" ); + Object result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + @SuppressWarnings("unchecked") List docs = (List) result; + System.out.println( docs.get( 0 ) ); + } + + + @Test + void testArrayField() { + myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.insertOne({\"scores\":[1,2,3]})" ); + Object result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + @SuppressWarnings("unchecked") List docs = (List) result; + assertEquals( 1, docs.size() ); + assertTrue( docs.get( 0 ).contains( "\"scores\":[1,2,3]" ) ); + } + + + @SuppressWarnings("unchecked") + @Test + void testFindOnEmptyCollectionReturnsEmptyArray() { + Object result = myexecutor.execute( "mongo", "mongotest", "db.unittest_collection.find({})" ); + assertTrue( result instanceof List, "expected List even for empty cursor" ); + List docs = (List) result; + assertEquals( 0, docs.size() ); + } + + + @Test + void testInsertManyAndFindMultiple() { + myexecutor.execute( "mongo", "mongotest", + "db.unittest_collection.insertOne({\"id\":10,\"name\":\"A\"})" ); + myexecutor.execute( "mongo", "mongotest", + "db.unittest_collection.insertOne({\"id\":11,\"name\":\"B\"})" ); + + Object result = myexecutor.execute( "mongo", "mongotest", + "db.unittest_collection.find({})" ); + + assertTrue( result instanceof List ); + @SuppressWarnings("unchecked") List docs = (List) result; + assertEquals( 2, docs.size() ); + } + + + @Test + void testBatchInsertAndFind() { + List queries = new ArrayList<>(); + queries.add( "db.unittest_collection.insertOne({\"name\":\"Alice\",\"age\":25})" ); + queries.add( "db.unittest_collection.insertOne({\"name\":\"Bob\",\"age\":30})" ); + + Object result = myexecutor.executeBatchMongo( "mongotest", queries ); + + assertTrue( result instanceof List, "Expected a List of results" ); + @SuppressWarnings("unchecked") List> results = (List>) result; + + assertEquals( 2, results.size(), "Expected two results (one per insert)" ); + assertTrue( results.get( 0 ) instanceof List, "First insert result should be a List" ); + assertTrue( results.get( 1 ) instanceof List, "Second insert result should be a List" ); + assertEquals( 1, results.get( 0 ).size(), "Each insert should yield a singleton list" ); + assertEquals( 1, results.get( 1 ).size(), "Each insert should yield a singleton list" ); + } + + + @Test + void testBatchMixedOps() { + List queries = new ArrayList<>(); + queries.add( "db.unittest_collection.insertOne({\"name\":\"Charlie\",\"active\":true})" ); + queries.add( "db.unittest_collection.countDocuments({})" ); + + Object result = myexecutor.executeBatchMongo( "mongotest", queries ); + + assertTrue( result instanceof List, "Expected a List of results" ); + @SuppressWarnings("unchecked") List> results = (List>) result; + + assertEquals( 2, results.size(), "Expected two results" ); + assertTrue( results.get( 0 ) instanceof List, "First result should be a List" ); + assertTrue( results.get( 1 ) instanceof List, "Second result should be a List" ); + + // Count → singleton list with a number string + assertEquals( 1, results.get( 0 ).size(), "Insert should yield a singleton list" ); + assertEquals( 1, results.get( 1 ).size(), "Count should yield a singleton list" ); + + // Content → Check results + assertTrue( results.get( 1 ).get( 0 ).contains( "1" ), "Count result should include '1'" ); + } + + + @Test + void testSyntaxErrorThrows() { + // Missing closing brace makes this invalid JSON + String badQuery = "db.unittest_collection.insertOne({\"foo\":123)"; // typo + + RuntimeException runtimeException = assertThrows( RuntimeException.class, () -> { + myexecutor.execute( "mongo", "mongotest", badQuery ); + } ); + assertTrue( runtimeException.getMessage().contains( "Syntax error" ) || + runtimeException.getMessage().contains( "execution failed" ), + "Exception message should indicate syntax error" ); + + assertThrows( Exception.class, () -> { + myexecutor.execute( "mongo", "mongotest", badQuery ); + } ); + } + + + @Test + void testMultiStatementMongoFails() { + String illegal_multiquery = "" + + "db.people.insertOne({\"name\":\"Alice\",\"age\":20}); " + + "db.people.insertOne({\"name\":\"Bob\",\"age\":24}); " + + "db.people.find({})"; + + assertThrows( Exception.class, () -> { + myexecutor.execute( "mongo", "mongotest", illegal_multiquery ); + }, "Polypheny should not support multi-statement MongoQL with ';'" ); + } + + + @Test + void testMongoRollbackSupport() throws Exception { + // Sanity check to verify rollback on Polypheny server side + myconnection.openIfNeeded(); + Connection conn = myconnection.getConnection(); + + try { + conn.setAutoCommit( false ); + PolyConnection polyConn = conn.unwrap( PolyConnection.class ); + PolyStatement stmt = polyConn.createPolyStatement(); + + // Insert Alice with id=1 + stmt.execute( "mongotest", "mongo", "db.mongotest.insertOne({\"id\": 1, \"name\": \"Alice\"})" ); + + // Verify Alice is visible before rollback + Result preRes = stmt.execute( "mongotest", "mongo", "db.mongotest.find({\"id\": 1})" ); + DocumentResult preDocs = preRes.unwrap( DocumentResult.class ); + assertTrue( preDocs.iterator().hasNext(), "Inserted document should be visible before rollback" ); + + // Roll back instead of commit + conn.rollback(); + + } finally { + conn.setAutoCommit( true ); + } + + // After rollback, Alice should not exist + PolyConnection polyConn = conn.unwrap( PolyConnection.class ); + PolyStatement ps = polyConn.createPolyStatement(); + Result res = ps.execute( "mongotest", "mongo", "db.mongotest.find({\"id\": 1})" ); + + DocumentResult docs = res.unwrap( DocumentResult.class ); + boolean hasDoc = docs.iterator().hasNext(); + + assertFalse( hasDoc, + "Rollback did not remove Mongo document with id=1" ); + } + + + @Test + void testExecuteBatchMongoRollback() { + myconnection.openIfNeeded(); + + // Prepare batch: 2 valid inserts + 1 faulty insert + List queries = new ArrayList<>(); + queries.add( "db.unittest_collection.insertOne({\"id\": 1, \"name\": \"Alice\"})" ); + queries.add( "db.unittest_collection.insertOne({\"id\": 2, \"name\": \"Bob\"})" ); + queries.add( "db.unittest_collection.insertOne({\"id\": 3, \"name\": \"Janice\"" ); // → missing closing }) brace + + // Expect the batch to throw (rollback triggered) + assertThrows( RuntimeException.class, () -> { + myexecutor.executeBatchMongo( "mongotest", queries ); + } ); + + // After rollback, none of the documents should exist + @SuppressWarnings("unchecked") List docs = (List) myexecutor.execute( + "mongo", "mongotest", "db.unittest_collection.find({\"id\": {\"$gte\": 0, \"$lte\": 100}})" ); + + assertEquals( 0, docs.size(), "Rollback should have undone all inserts when one failed" ); + } + +} diff --git a/app/src/test/java/polyphenyconnector/QueryExecutorTestSQL.java b/app/src/test/java/polyphenyconnector/QueryExecutorTestSQL.java new file mode 100644 index 0000000..d55df42 --- /dev/null +++ b/app/src/test/java/polyphenyconnector/QueryExecutorTestSQL.java @@ -0,0 +1,365 @@ +package polyphenyconnector; + +import org.junit.jupiter.api.*; +import org.polypheny.jdbc.PolyConnection; +import org.polypheny.jdbc.multimodel.*; +import org.polypheny.jdbc.types.TypedValue; + +import static org.junit.jupiter.api.Assertions.*; + +import java.sql.Connection; +import java.util.Arrays; +import java.util.List; +import java.util.Iterator; + +public class QueryExecutorTestSQL { + + private static PolyphenyConnection myconnection; + private static QueryExecutor myexecutor; + + + @BeforeAll + static void setUpNamespaceAndTable() throws Exception { + + // Wait for Polypheny to be available and connect to localhost. We do this because we run all the JUnit tests on our local machine. + PolyphenyConnectionTestHelper.waitForPolypheny(); + Thread.sleep( 4000 ); + myconnection = new PolyphenyConnection( "localhost", 20590, "pa", "" ); + myexecutor = new QueryExecutor( myconnection ); + + // 1. Setup tables for .execute() + // Delete any TABLE called and any NAMESPACE if it exists. This is important so we can insert it + // cleanly, in case tests break mid run the cleanup "@Afterall" might not have been executed properly. + try { + myexecutor.execute( "sql", "unittest_namespace", "DROP TABLE IF EXISTS unittest_namespace.unittest_table" ); + myexecutor.execute( "sql", "unittest_namespace", "DROP NAMESPACE IF EXISTS unittest_namespace" ); + } catch ( Exception ignored ) { + } + + // Creates the NAMESPACE and TABLE . + myexecutor.execute( "sql", "unittest_namespace", "CREATE NAMESPACE unittest_namespace" ); + myexecutor.execute( "sql", "unittest_namespace", "CREATE TABLE unittest_namespace.unittest_table (id INT NOT NULL, name VARCHAR(100), PRIMARY KEY(id))" ); + + // 2. Setup tables for executeBatch() + // Delete any tables that might still exist as described before. + try { + myexecutor.execute( "sql", "unittest_namespace", "DROP TABLE IF EXISTS unittest_namespace.batch_table" ); + } catch ( Exception ignored ) { + } + myexecutor.execute( "sql", "unittest_namespace", "CREATE TABLE unittest_namespace.batch_table (" + "emp_id INT NOT NULL, " + "name VARCHAR(100), " + "gender VARCHAR(10), " + "birthday DATE, " + "employee_id INT, " + "PRIMARY KEY(emp_id))" ); + + } + + + @AfterAll + static void tearDownNamespaceAndTable() { + // Cleans up the TABLE and NAMESPACE we created so we leave no trace after the tests. + myexecutor.execute( "sql", "unittest_namespace", "DROP TABLE IF EXISTS unittest_namespace.unittest_table" ); + myexecutor.execute( "sql", "unittest_namespace", "DROP TABLE IF EXISTS unittest_namespace.batch_table" ); + myexecutor.execute( "sql", "unittest_namespace", "DROP NAMESPACE IF EXISTS unittest_namespace" ); + myconnection.close(); + } + + + @BeforeEach + void clearTable() { + myexecutor.execute( "sql", "unittest_namespace", "DELETE FROM unittest_namespace.unittest_table" ); + myexecutor.execute( "sql", "unittest_namespace", "DELETE FROM unittest_namespace.batch_table" ); + } + + + @AfterEach + void clearTableAfter() { + myexecutor.execute( "sql", "unittest_namespace", "DELETE FROM unittest_namespace.unittest_table" ); + myexecutor.execute( "sql", "unittest_namespace", "DELETE FROM unittest_namespace.batch_table" ); + } + + // ───────────────────────────── + // Isolated branch tests (no table needed) + // ───────────────────────────── + + + @Test + void testScalarLiteral() { + Object result = myexecutor.execute( "sql", "", "SELECT 42 AS answer" ); + assertTrue( result instanceof Integer, "Expected an integer scalar" ); + assertEquals( 42, result ); + } + + + @Test + void testEmptyLiteral() { + Object result = myexecutor.execute( "sql", "", "SELECT * FROM (SELECT 1) t WHERE 1=0" ); + assertNull( result, "Query with no rows should return null" ); + } + + + @Test + void testTableLiteral() { + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT 1 AS a, 2 AS b UNION ALL SELECT 3, 4" ); + assertTrue( result instanceof Object[], "Expected tabular result" ); + + Object[] arr = (Object[]) result; + String[] colNames = (String[]) arr[0]; + Object[][] data = (Object[][]) arr[1]; + + assertArrayEquals( new String[]{ "a", "b" }, colNames, "Column names must match" ); + assertEquals( 2, data.length, "Should have 2 rows" ); + assertArrayEquals( new Object[]{ 1, 2 }, data[0] ); + assertArrayEquals( new Object[]{ 3, 4 }, data[1] ); + } + + // ───────────────────────────── + // Realistic integration tests (use unittest_namespace.unittest_table) + // ───────────────────────────── + + + @Test + void testInsert() { + // Insert id = 1 and name = Alice into the table. + Object result = myexecutor.execute( "sql", "unittest_namespace", "INSERT INTO unittest_namespace.unittest_table VALUES (1, 'Alice')" ); + assertTrue( result instanceof Integer, "Expected an integer." ); + assertEquals( result, 1, "result should equal 1." ); + } + + + @Test + void testInsertAndSelect() { + // Insert id = 1 and name = Alice into the table. + myexecutor.execute( "sql", "unittest_namespace", "INSERT INTO unittest_namespace.unittest_table VALUES (1, 'Alice')" ); + + // Query the result from the table. + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT id, name FROM unittest_namespace.unittest_table" ); + + // Test that the result comes back as array. + System.out.println( "Result is: " + result ); + assertTrue( result instanceof Object[], "Expected tabular result" ); + + // Test that the contents of the query are correct. + Object[] arr = (Object[]) result; + String[] colNames = (String[]) arr[0]; + Object[][] data = (Object[][]) arr[1]; + assertArrayEquals( new String[]{ "id", "name" }, colNames, "Column names must match" ); + assertEquals( 1, data.length, "Should have one row" ); + assertArrayEquals( new Object[]{ 1, "Alice" }, data[0], "Row must match inserted values" ); + + } + + + @Test + void testScalarFromTable() { + myexecutor.execute( "sql", "unittest_namespace", "INSERT INTO unittest_namespace.unittest_table VALUES (2, 'Carol')" ); + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT id FROM unittest_namespace.unittest_table WHERE name = 'Carol'" ); + assertTrue( result instanceof Integer, "Expected scalar integer result" ); + assertEquals( 2, result ); + } + + + @Test + void testInsertAndSelectMultipleRows() { + // Insert id = 1,2 and name = Alice, Bob into the table. + myexecutor.execute( "sql", "unittest_namespace", "INSERT INTO unittest_namespace.unittest_table VALUES (1, 'Alice')" ); + myexecutor.execute( "sql", "unittest_namespace", "INSERT INTO unittest_namespace.unittest_table VALUES (2, 'Bob')" ); + + // Query the result from the table. + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT id, name FROM unittest_namespace.unittest_table ORDER BY id" ); + + // Check the contents of the query are correct. + Object[] arr = (Object[]) result; + String[] colNames = (String[]) arr[0]; + Object[][] data = (Object[][]) arr[1]; + + // Test the column names match. + assertArrayEquals( new String[]{ "id", "name" }, colNames ); + + // Test the array has indeed length 2 (2 rows for Alice and Bob) + assertEquals( 2, data.length ); + + // Test the contents of each row are correct. + assertArrayEquals( new Object[]{ 1, "Alice" }, data[0] ); + assertArrayEquals( new Object[]{ 2, "Bob" }, data[1] ); + } + + + @Test + void testQueryWithSpaces() { + // Insert Bob into table. + // Insert id = 1,2 and name = Alice, Bob into the table. + myexecutor.execute( "sql", "unittest_namespace", " INSERT INTO unittest_namespace.unittest_table VALUES (1, 'Alice')" ); + myexecutor.execute( "sql", "unittest_namespace", " INSERT INTO unittest_namespace.unittest_table VALUES (2, 'Bob')" ); + + // Query the result from the table. + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT id, name FROM unittest_namespace.unittest_table ORDER BY id" ); + + // Check the contents of the query are correct. + Object[] arr = (Object[]) result; + String[] colNames = (String[]) arr[0]; + Object[][] data = (Object[][]) arr[1]; + + // Test the column names match. + assertArrayEquals( new String[]{ "id", "name" }, colNames ); + + // Test the array has indeed length 2 (2 rows for Alice and Bob) + assertEquals( 2, data.length ); + + // Test the contents of each row are correct. + assertArrayEquals( new Object[]{ 1, "Alice" }, data[0] ); + assertArrayEquals( new Object[]{ 2, "Bob" }, data[1] ); + } + + + @Test + void testDeleteFromTable() { + + // Insert Bob into table. + myexecutor.execute( "sql", "unittest_namespace", "INSERT INTO unittest_namespace.unittest_table VALUES (2, 'Bob')" ); + + // Delete Bob from table. + myexecutor.execute( "sql", "unittest_namespace", "DELETE FROM unittest_namespace.unittest_table" ); + + // Test that the query comes back null. + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT * FROM unittest_namespace.unittest_table WHERE name = 'Bob'" ); + assertNull( result, "After DELETE the table should be empty" ); + } + + + @Test + void testBatchInsertEmployees() { + + // Insert the List of queries + List queries = Arrays.asList( + "INSERT INTO unittest_namespace.batch_table VALUES (1, 'Alice', 'F', DATE '1990-01-15', 1001)", + "INSERT INTO unittest_namespace.batch_table VALUES (2, 'Bob', 'M', DATE '1989-05-12', 1002)", + "INSERT INTO unittest_namespace.batch_table VALUES (3, 'Jane', 'F', DATE '1992-07-23', 1003)", + "INSERT INTO unittest_namespace.batch_table VALUES (4, 'Tim', 'M', DATE '1991-03-03', 1004)", + "INSERT INTO unittest_namespace.batch_table VALUES (5, 'Alex', 'M', DATE '1994-11-11', 1005)", + "INSERT INTO unittest_namespace.batch_table VALUES (6, 'Mason', 'M', DATE '1988-04-22', 1006)", + "INSERT INTO unittest_namespace.batch_table VALUES (7, 'Rena', 'F', DATE '1995-06-17', 1007)", + "INSERT INTO unittest_namespace.batch_table VALUES (8, 'Christopher', 'M', DATE '1987-08-09', 1008)", + "INSERT INTO unittest_namespace.batch_table VALUES (9, 'Lexi', 'F', DATE '1996-09-30', 1009)", + "INSERT INTO unittest_namespace.batch_table VALUES (10, 'Baen', 'M', DATE '1990-10-05', 1010)", + "INSERT INTO unittest_namespace.batch_table VALUES (11, 'Ricardo', 'M', DATE '1986-12-12', 1011)", + "INSERT INTO unittest_namespace.batch_table VALUES (12, 'Tim', 'M', DATE '1993-02-02', 1012)", + "INSERT INTO unittest_namespace.batch_table VALUES (13, 'Beya', 'F', DATE '1994-05-25', 1013)" + ); + + // Do the batch execution using executeBatch(...) + int[] counts = myexecutor.executeBatchSql( queries ); + + // Test that the length of the counts vector is 13 (for 13 queries in the queries list). + assertEquals( 13, counts.length, "Batch should return 13 results" ); + + // Test the i-th entry in the counts vector is actually 1 (because the i-th query changed exactly 1 row) + for ( Object c : counts ) { + assertEquals( 1, c, "Each INSERT should affect exactly 1 row" ); + } + + // Test the result has the correct type + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT COUNT(*) FROM unittest_namespace.batch_table" ); + assertTrue( result instanceof Long || result instanceof Integer ); + + // Test the rowcount is correct. + int rowCount = ((Number) result).intValue(); + assertEquals( 13, rowCount, "Table should contain 13 rows after batch insert" ); + } + + + @Test + void testBatchRollbackOnFailure() { + + // Prepare one correct and one ill posed SQL statement to query as batch later. + List queries = Arrays.asList( + "INSERT INTO unittest_namespace.batch_table VALUES (1, 'Alice')", + "Purposefully messed up query message to produce a failure" // PK violation → id missing + ); + + // Run the ill posed batch query and test an exception is thrown. + assertThrows( RuntimeException.class, () -> { + myexecutor.executeBatchSql( queries ); + } ); + + // Query the whole table to make sure it is really empty. + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECt * FROM unittest_namespace.batch_table" ); + + // Test the query comes back as null i.e. the executeBatch has indeed been rolled back and the table is unchanged + assertNull( result ); + } + + + @Test + void testConnectionFailure() { + assertThrows( RuntimeException.class, () -> { + PolyphenyConnection badConn = new PolyphenyConnection( "localhost", 9999, "pa", "" ); + QueryExecutor badExec = new QueryExecutor( badConn ); + badExec.execute( "sql", "unittest_namespace", "SELECT 1" ); // should fail to connect + } ); + } + + + @Test + void testSyntaxError() { + RuntimeException runtimeException = assertThrows( RuntimeException.class, () -> { + myexecutor.execute( "sql", "unittest_namespace", "SELEC WRONG FROM nowhere" ); // typo: SELEC + } ); + assertTrue( runtimeException.getMessage().contains( "Syntax error" ) || + runtimeException.getMessage().contains( "execution failed" ), + "Exception message should indicate syntax error" ); + } + + + @Test + void testCommitFailureRollback() { + List queries = Arrays.asList( + "INSERT INTO unittest_namespace.batch_table VALUES (1, 'Alice', 'F', DATE '1990-01-15', 1001)", + "Intentional nonsense to produce a failure" // PK violation → id missing + ); + + assertThrows( RuntimeException.class, () -> { + myexecutor.executeBatchSql( queries ); + } ); + + Object result = myexecutor.execute( "sql", "unittest_namespace", "SELECT * FROM unittest_namespace.batch_table" ); + assertNull( result, "Batch should have rolled back and left the table empty" ); + } + + + @Test + // This test asserts that the column names aren't stored in the first row of the table for relational results. + // Thought that this might maybe be how it's implemented for relational results in execute(...) + void testRelationalResultFirstRowDirectly() throws Exception { + // Insert a row we can recognize + myexecutor.execute( + "sql", + "unittest_namespace", + "INSERT INTO unittest_namespace.unittest_table (id, name) VALUES (1, 'Alice')" + ); + + // Unwrap to PolyConnection and PolyStatement + Connection jdbcConn = myconnection.getConnection(); + PolyConnection polyConn = jdbcConn.unwrap( PolyConnection.class ); + PolyStatement polyStmt = polyConn.createPolyStatement(); + + // Run query directly through multimodel API + Result result = polyStmt.execute( + "unittest_namespace", + "sql", + "SELECT * FROM unittest_namespace.unittest_table" + ); + + assertEquals( Result.ResultType.RELATIONAL, result.getResultType() ); + + RelationalResult rr = result.unwrap( RelationalResult.class ); + Iterator it = rr.iterator(); + assertTrue( it.hasNext(), "Expected at least one row" ); + + PolyRow firstRow = it.next(); + assertEquals( 2, firstRow.getColumnCount(), "Expected 2 columns (id, name)" ); + + TypedValue idVal = firstRow.getValue( 0 ); + TypedValue nameVal = firstRow.getValue( 1 ); + + assertEquals( 1, idVal.asInt() ); + assertEquals( "Alice", nameVal.asString() ); + } + +} diff --git a/documentation/dev/README.md b/documentation/dev/README.md new file mode 100644 index 0000000..ca044cc --- /dev/null +++ b/documentation/dev/README.md @@ -0,0 +1,93 @@ +--- +layout: page +title: "MATLAB Connector (Developers)" +toc: true +docs_area: "devs" +doc_type: doc +tags: Matlab Connector, Matlab Toolbox, Matlab, Connector +search: false +lang: en +--- + +# MATLAB Connector (Developer Documentation) + +This page documents developer-specific details for maintaining and extending the MATLAB Connector for Polypheny. + +## Requirements + +- **Java:** JDK 17 or older + (newer versions are not supported by the MATLAB Engine) + https://www.mathworks.com/support/requirements/language-interfaces.html +- **Polypheny:** must be running locally +- **MATLAB:** R2023b or newer (ships with Java 8) + +## Repository Contents + +The following components are **not included** in this repository: + +- `polypheny.jar` (see **Build**) +- `polypheny-jdbc-driver-2.3.jar` (see **Build**) +- Polypheny server startup (see **Tests**) +- Packaged MATLAB toolbox (`.mltbx`) (see **Packaging**) + +## Tests + +Before running or building tests, the Polypheny server must be started manually. + +MATLAB uses Java 8 internally, while the Polypheny server requires Java 17. +To avoid version conflicts, the Polypheny server is run **outside** the build process. + +Automatic startup was evaluated but rejected due to: +- cross-platform instability +- environment-specific Java configuration issues + +If the server is not running, the build will fail. + +## Build + +To build the project, ensure the following: + +1. The `libs/` directory contains: + - `polypheny.jar` + - `polypheny-jdbc-driver-2.3.jar` + +2. The Polypheny server is running locally. + +3. Gradle is installed and configured. + +4. Versions used: + - **MATLAB:** R2023b+ + - **Polypheny JDBC driver:** 2.3 + - **Polypheny server:** tested with 1.9 + +## Packaging (MATLAB Toolbox) + +The connector is distributed as a MATLAB toolbox (`.mltbx`). + +### Steps +0. create a `.jar` file with your latest version of the Polypheny Connector by compiling the project. (**Tip**: The compiled `polypheny-all.jar` will be stored under ) +1. Open MATLAB +2. Go to **Home** +3. Open **Add-Ons → Package Toolbox** +4. Select the `matlab-polypheny-connector` folder +5. Select the project **Toolbox1** +6. Add JARs: + - `polypheny-all.jar` → `jar/` + - `polypheny-jdbc-driver-2.3.jar` → `libs/` +7. Delete any existing `PolyphenyConnector.mltbx` in `Toolbox1/release` + +8. Before we finally package the Toolbox you must verify the following are true: + +- **Output file:** `PolyphenyConnector.mltbx` +- **Output location:** `Toolbox1/release` +- **MATLAB path:** `/` +- **Class path must include:** + + jar/polypheny-all.jar + + libs/polypheny-jdbc-driver-2.3.jar + + +9. Click **Package Toolbox** (top right corner in MATLAB) + +MATLAB will generate the toolbox file in `Toolbox1/release`. \ No newline at end of file diff --git a/documentation/user/README.md b/documentation/user/README.md new file mode 100644 index 0000000..bbf0f99 --- /dev/null +++ b/documentation/user/README.md @@ -0,0 +1,360 @@ +--- +layout: page +title: "MATLAB Connector (Users)" +toc: true +docs_area: "users" +doc_type: doc +tags: Matlab Connector, Matlab Toolbox, Matlab, Connector +search: true +lang: en +--- + +# MATLAB Connector (User Documentation) + +## Setting up your connector +The Polypheny-Matlab connector is supplied using a PolyphenyConnector.mltbx file that will automatically install the necessary package in your Matlab environment. To install it: +- open Matlab and wait until it is ready to run +- double-click the supplied PolyphenyConnector.mltbx and wait a few seconds. Matlab should inform you the package was successfully installed. + +> **Note** +> +> MATLAB provides a graphical UI to manage installed Add-Ons. In practice, this UI can be unreliable. +> If uninstalling the Polypheny Connector via the UI fails (e.g. when installing an updated version), +> the toolbox can be force-removed manually. + +### Manual Uninstall (Force Removal) + +#### 1. Locate the installed toolbox + +Run the following in the MATLAB console: + + which polypheny.Polypheny -all + +This returns the folder where the toolbox is installed, typically of the form: + + \MathWorks\MATLAB Add-Ons\Toolboxes\PolyphenyConnector + +#### 2. Delete the toolbox folder + +Delete the entire `PolyphenyConnector` directory from disk. +If files are locked on Windows, close MATLAB first and then delete the folder. + +#### 3. Remove the toolbox from the MATLAB search path + + rmpath('') + +#### 4. Update MATLAB’s internal toolbox cache + + rehash toolboxcache + +#### 5. Persist the updated search path + + savepath + +#### Fallback (only if the path is badly polluted) + + restoredefaultpath + rehash toolboxcache + savepath + +This procedure bypasses MATLAB’s Add-On Manager and ensures the toolbox is fully removed +from disk, the search path, and MATLAB’s internal cache. Note the PolyphenyConnector might not show up in Matlabs Add-on UI anymore, even if the installation was successful. + + +## MATLAB Connector – Usage Examples +This section demonstrates how to use the Polypheny MATLAB connector. +All examples assume a local Polypheny instance running on port `20590`. + +--- + +### Opening and Closing a Connection +Create a connection by specifying host, port, username, and password and close the connection afterwards +```matlab +conn = polypheny.Polypheny( 'host', 'port', 'username', 'password' ); +% your code goes here +conn.close(); +``` +To test this with your local machine as host we could do +```matlab +conn = polypheny.Polypheny( 'localhost', int32(20590), 'username', 'pa' ); +% your code goes here +conn.close(); +``` +### Executing queries +Queries are executed using +```matlab +result = conn.query( 'language', 'namespace', queryString ); +``` +where `language` is an element of `{'sql', 'mongo', 'cypher'}`, `'namespace'` is the name of the namespace the query targets in the database and `queryString` is the string passed to the database. + +> **Note:** +> For Mongo queries the namespace argument is necessary for the creation and deletion of data structures in the backend. For SQL queries the namespace argument has no consequence and thus does not need to be set. In the following examples we will therefore use `""` as namespace argument for SQL. + +### Executing SQL-queries +>**Note:** For relational query +results most primitive types used in our tests were cast to Matlab types. Edge cases were returned as Java objects in the table and will currently still have to be handled by the user. + +Let us look at some practical examples +```matlab +conn.query( "sql", "", "DROP TABLE IF EXISTS test" ); +conn.query( "sql", "", "CREATE TABLE test (id INTEGER PRIMARY KEY, name VARCHAR)" ); +conn.query( "sql", "", "INSERT INTO test VALUES (1,'Alice'),(2,'Bob')" ); +``` + +#### Scalar results +If a query returns a single value, the result is returned as a MATLAB scalar. +```matlab +x = conn.query( "sql", "", "SELECT COUNT(*) FROM test" ) +``` +will produce the output +```matlab +x = 2 +``` + +#### Tabular results +Queries returning multiple rows and columns are returned as a matlab `table` +```matlab +T = conn.query( "sql", "", "SELECT * FROM test ORDER BY id" ); +``` +It is possible to access the columns directly by doing +```matlab +T.id +T.name +``` +which in our example yields the output +```matlab +T.id = [1; 2] +T.name = {'Alice'; 'Bob'} +``` + +#### Empty results +Empty results +```matlab +T = conn.query( "sql", "~", "SELECT * FROM test WHERE id = 999" ); +``` +will be returned as empty MATLAB array: +```matlab +T = [] +``` + + +### SQL Batch Queries +Multiple non-SELECT statements can be executed using `queryBatch` +```matlab +conn.query( "sql", "", "DROP TABLE IF EXISTS test" ); +conn.query( "sql", "", "CREATE TABLE test (id INTEGER PRIMARY KEY, name VARCHAR)" ); + +queries = { + "INSERT INTO test VALUES (1,'Alice')" + "INSERT INTO test VALUES (2,'Bob')" +}; + +result = conn.queryBatch( "sql", "", queries ); +``` +where for `queries` containing `n` single queries, the result will be a `n x 1` vector with the i-th entry representing how many rows in the table the i-th query affected. Since each query inserts exactly one entry into one row, our example this yields the output +```matlab + RowsAffected + ____________ + + 1 + 1 +``` +Should single queries of a batch fail a rollback will be triggered (all or nothing principle). +### Executing Mongo Queries + +The Polypheny MATLAB connector supports Mongo-style queries via Polypheny’s document model. Mongo queries return **raw JSON documents** as MATLAB strings. Automatic decoding is intentionally not performed. + +```matlab + result = conn.query( 'mongo', namespace, queryString ); +``` + +- `namespace` – Mongo adapter / namespace (**required**) +- `queryString` – Mongo-style query (**single statement only**) + +**Note:**: Unlike SQL, the `namespace` argument is required for Mongo queries. + +--- + +#### Return Type Semantics + +| Operation type | MATLAB return value | +|--------------------------|--------------------------| +| `find(...)` | string array (JSON docs) | +| `insertOne(...)` | acknowledgment JSON | +| `deleteMany(...)` | acknowledgment JSON | +| `countDocuments(...)` | acknowledgment JSON | +| empty result | `"[]"` (JSON string) | +| syntax / multi-statement | error | + + +#### Creating a Collection +```matlab +conn.query( "mongo", "demo", 'db.patients.drop()' ); +conn.query( "mongo", "demo", 'db.createCollection("patients")' ); +``` + +#### Inserting and Querying Documents +Documents can be inserted with +```matlab +conn.query( "mongo", "demo", 'db.patients.insertOne({"name":"Alice","age":34,"icu":true})' ); +``` +and queried using +```matlab +docs = conn.query( "mongo", "demo", 'db.patients.find({})' ); +disp(docs) +``` +will generate the output: +```text +"{""name"":""Alice"",""age"":34,""icu"":true}" +``` +and can be decoded using `jsondecode( docs ) ` which will return +```text + struct with fields: + + name: "Alice" + age: 34 + icu: true +``` +#### Decoding JSON in MATLAB using `jsondecode` +Since document queries can return a variety of results included nested documents, we will present some examples to illustrate the usage and possibilities of MATLAB's struct type. + +##### Example 1: Common Document Structure + +This example demonstrates working with **homogeneous Mongo documents** that all share the same schema. +After decoding, `jsondecode` returns a **struct array**, enabling vectorized access and logical indexing. + +```matlab +conn.query("mongo","demo", 'db.patients_mongo.drop()'); +conn.query("mongo","demo", 'db.createCollection("patients_mongo")'); + +conn.query("mongo","demo", ... + 'db.patients_mongo.insertOne({"name":"Alice Keller","age":34,"sex":"F", ... + "hospital":{"name":"Universitätsspital Basel","address":{"city":"Basel","zip":4051}}, ... + "infection_date":"2025-03-15","variant":"Omicron","vaccinated":true, ... + "icu":true,"recovered":true,"death":false})'); + +docs_all = conn.query("mongo","demo",'db.patients_mongo.find({})'); +decoded_all = jsondecode(docs_all); + ``` +will return +```text +1×1 struct array with fields: + + recovered + death + sex + infection_date + name + variant + icu + x_id + vaccinated + hospital + age +``` +The contents of the structure can be used through +```text +decoded_all.name + +ans = + 1×1 string array + "Alice Keller" +``` +and nested documents can equivalently accessed by +```text +decoded_all(1).hospital.name + +ans = + "Universitätsspital Basel" +``` + +#### Example 2: Nested Documents and Arrays +Insertions can consist of nested documents, e.g. the following code +```matlab +conn.query( "mongo", "demo",'db.patients.insertOne({"name":"Bob","meta":{"insurance":"LiveForeverInsurance","allergies":["nuts","penicillin"]}})' ); + +docs = conn.query( "mongo", "demo", 'db.patients.find({})' ); +decoded = jsondecode(docs); + +decoded(1).meta.insurance +decoded(1).meta.allergies +``` +will create the outputs: +```text +decoded = + + struct with fields: + + name: "Bob" + meta: [1×1 struct] + +ans = + "LiveForeverInsurance" + +ans = + 2×1 cell array + {'nuts'} + {'penicillin'} +``` + +#### Counting Documents +Counting documents using Mongo +```matlab +count_json = conn.query( "mongo", "demo", 'db.patients.countDocuments({})' ); +disp(count_json) +``` +will generate: + + {"count":1} +--- + +#### Empty Results +```matlab +docs = conn.query( "mongo", "demo", 'db.patients.find({})' ); +``` +Returned value: + + "[]" + +--- + +#### Batch Execution (Mongo) + + queries = { + 'db.patients.insertOne({"name":"Alice","age":25})' + 'db.patients.insertOne({"name":"Bob","age":30})' + }; + + conn.queryBatch( "mongo", "demo", queries ); + +Batch queries returning results: + + docs = conn.queryBatch( "mongo", "demo", { + 'db.patients.find({"name":"Alice"})' + 'db.patients.find({"name":"Bob"})' + }); + + decoded = jsondecode( docs ); + +--- + +### Error Handling + +The following conditions raise MATLAB errors: +- invalid JSON syntax +- multiple statements in a single query string +- unsupported operations + + badQuery = 'db.patients.insertOne({"foo":123)'; + conn.query( "mongo", "demo", badQuery ); + +--- + +### Design Rationale + +- Mongo results are returned as raw JSON +- Automatic decoding is avoided +- Users explicitly control conversion via jsondecode + +### Executing Cypher Queries +The Connector currently does not support Cypher queries yet. diff --git a/gradle.properties b/gradle.properties new file mode 100644 index 0000000..377538c --- /dev/null +++ b/gradle.properties @@ -0,0 +1,5 @@ +# This file was generated by the Gradle 'init' task. +# https://docs.gradle.org/current/userguide/build_environment.html#sec:gradle_configuration_properties + +org.gradle.configuration-cache=true + diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml new file mode 100644 index 0000000..afde022 --- /dev/null +++ b/gradle/libs.versions.toml @@ -0,0 +1,10 @@ +# This file was generated by the Gradle 'init' task. +# https://docs.gradle.org/current/userguide/platforms.html#sub::toml-dependencies-format + +[versions] +guava = "33.3.1-jre" +junit-jupiter = "5.11.3" + +[libraries] +guava = { module = "com.google.guava:guava", version.ref = "guava" } +junit-jupiter = { module = "org.junit.jupiter:junit-jupiter", version.ref = "junit-jupiter" } diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 0000000..9bbc975 Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 0000000..37f853b --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,7 @@ +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-bin.zip +networkTimeout=10000 +validateDistributionUrl=true +zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew new file mode 100644 index 0000000..faf9300 --- /dev/null +++ b/gradlew @@ -0,0 +1,251 @@ +#!/bin/sh + +# +# Copyright © 2015-2021 the original authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# SPDX-License-Identifier: Apache-2.0 +# + +############################################################################## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# +############################################################################## + +# Attempt to set APP_HOME + +# Resolve links: $0 may be a link +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac +done + +# This is normally unused +# shellcheck disable=SC2034 +APP_BASE_NAME=${0##*/} +# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) +APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD=maximum + +warn () { + echo "$*" +} >&2 + +die () { + echo + echo "$*" + echo + exit 1 +} >&2 + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD=$JAVA_HOME/jre/sh/java + else + JAVACMD=$JAVA_HOME/bin/java + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD=java + if ! command -v java >/dev/null 2>&1 + then + die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +fi + +# Increase the maximum file descriptors if we can. +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. + # shellcheck disable=SC2039,SC3045 + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac +fi + +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. + +# For Cygwin or MSYS, switch paths to Windows format before running java +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + + # Now convert the arguments - kludge to limit ourselves to /bin/sh + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) + fi + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg + done +fi + + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Collect all arguments for the java command: +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, +# and any embedded shellness will be escaped. +# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be +# treated as '${Hostname}' itself on the command line. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Stop when "xargs" is not available. +if ! command -v xargs >/dev/null 2>&1 +then + die "xargs is not available" +fi + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# + +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 0000000..9d21a21 --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,94 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem +@rem SPDX-License-Identifier: Apache-2.0 +@rem + +@if "%DEBUG%"=="" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%"=="" set DIRNAME=. +@rem This is normally unused +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Resolve any "." and ".." in APP_HOME to make it shorter. +for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if %ERRORLEVEL% equ 0 goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto execute + +echo. 1>&2 +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 +echo. 1>&2 +echo Please set the JAVA_HOME variable in your environment to match the 1>&2 +echo location of your Java installation. 1>&2 + +goto fail + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* + +:end +@rem End local scope for the variables with windows NT shell +if %ERRORLEVEL% equ 0 goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +set EXIT_CODE=%ERRORLEVEL% +if %EXIT_CODE% equ 0 set EXIT_CODE=1 +if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% +exit /b %EXIT_CODE% + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/matlab-polypheny-connector/+polypheny/Polypheny.m b/matlab-polypheny-connector/+polypheny/Polypheny.m new file mode 100644 index 0000000..dfd0721 --- /dev/null +++ b/matlab-polypheny-connector/+polypheny/Polypheny.m @@ -0,0 +1,138 @@ +classdef Polypheny < handle +% POLYPHENY MATLAB wrapper for the Polypheny Java connector. Wraps polyphenyconnector.PolyphenyConnection +% and polyphenyconnector.QueryExecutor to run queries from MATLAB + properties ( Access = private ) + polyConnection % Java PolyphenyConnection + queryExecutor % Java QueryExecutor + + end + + methods + + function PolyWrapper = Polypheny( host, port, user, password ) + % Polypheny( LANGUAGE, HOST, PORT, USER, PASSWORD ): Set up Java connection + executor + % LANGUAGE: The database language ( 'sql', 'mongo', 'cypher' ) + % HOST: Database host ( e.g. 'localhost' ) + % PORT: Database port ( integer ) + % USER: Username + % PASSWORD: Password + + % This makes sure that Matlab sees Java classes supplied by the .jar files in the Matlabtoolbox PolyphenyConnector.mtlbx + try + if ~polypheny.Polypheny.hasPolypheny( ) + startup( ); + end + PolyWrapper.polyConnection = javaObject( "polyphenyconnector.PolyphenyConnection",host, int32( port ), user, password ); + PolyWrapper.queryExecutor = javaObject( "polyphenyconnector.QueryExecutor", PolyWrapper.polyConnection ); + + catch ME %Matlab Exception + disp( "Error: " + ME.message ) + end + + end + + + + function matlab_result = query( PolyWrapper, language, namespace, queryStr ) + % query( POLYWRAPPER, QUERYSTR ): Execute query via QueryExecutor.java + % POLYWRAPPER: The PolyWrapper Matlab object + % LANGUAGE: The language of the query string -> SQL, mongo, Cypher + % QUERYSTR: The queryStr set by the user + % @return matlab_result: The result of the query -> return type differs for SQL,Mongo and Cypher + + + try + java_result = PolyWrapper.queryExecutor.execute( string( language ), string( namespace ), queryStr ); + + switch lower( language ) + case "sql" + if isempty( java_result ) + matlab_result = []; + elseif isscalar( java_result ) + matlab_result = java_result; + elseif isa( java_result,'java.lang.Object[]' ) && numel( java_result )==2 + tmp = cell( java_result ); + colNames = cell( tmp{1} ); + data = cell( tmp{2} ); + matlab_result = cell2table( data, 'VariableNames', colNames ); + else + matlab_result = []; + end + + case "mongo" + if isa( java_result, 'java.util.List' ) + % Current driver behavior: always returns List of JSON docs + matlab_result = string(java_result); + elseif isnumeric( java_result ) + % Not observed in current driver, but kept for forward compatibility + % (e.g. if Polypheny ever returns scalar counts directly) + matlab_result = java_result; + else + error( "Unexpected Mongo result type: %s", class( java_result ) ); + end + + case "cypher" + % TODO: integrate once Cypher executor is ready + error( "Cypher not supported yet." ); + + otherwise + error( "Unsupported language: %s", language ); + end + + catch ME + error( "Query execution failed: %s", ME.message ); + end + end + + function matlab_result = queryBatch( PolyWrapper, language, namespace, queryList ) + % queryBatch( POLYWRAPPER, QUERYLIST ): Execute batch of non-SELECT statements + % QUERYLIST: A cell array of SQL strings ( INSERT, UPDATE, DELETE, etc. ) + % + % Returns: int array with rows affected per statement + + if ~iscell( queryList ) + error( 'queryBatch expects a cell array of query strings' ); + end + + javaList = java.util.ArrayList(); + for i = 1:numel( queryList ) + javaList.add( string(queryList{i} ) ); + end + + switch lower(language) + case "sql" + java_result = PolyWrapper.queryExecutor.executeBatchSql( javaList ); + %matlab_result = double(java_result(:))'; + vals = double(java_result(:)); % convert Java int[] to MATLAB column vector + matlab_result = array2table(vals, 'VariableNames', {'RowsAffected'}); + + case "mongo" + java_result = PolyWrapper.queryExecutor.executeBatchMongo( string(namespace), javaList ); + matlab_result = string( java_result ); % outer list + + case "cypher" + error( "Batch execution for Cypher not yet implemented." ); + + otherwise + error( "Unsupported language: %s", language ); + end + + end + + function close( PolyWrapper ) + % close( POLYWRAPPER ): Close the Java connection + % POLYWRAPPER: The PolyWrapper Matlab object + PolyWrapper.polyConnection.close( ); + end + end + + methods ( Static ) + function flag = hasPolypheny( ) + % HASPOLYPHENY Returns true if Polypheny Java classes are available because the exist( 'polyphenyconnector.PolyphenyConnection','class' ) + % returns 8 if Matlab sees the Java class and 0 otherwise. + flag = ( exist( 'polyphenyconnector.PolyphenyConnection','class' ) == 8 ); + end + + end + +end diff --git a/matlab-polypheny-connector/ClassTest.m b/matlab-polypheny-connector/ClassTest.m new file mode 100644 index 0000000..47f32fc --- /dev/null +++ b/matlab-polypheny-connector/ClassTest.m @@ -0,0 +1,9 @@ +clear all; clear classes +%conn = javaObject('polyphenyconnector.PolyphenyConnection', 'localhost', int32(20590), 'pa', ''); +%exec = javaObject('polyphenyconnector.QueryExecutor', conn); + +%res = exec.execute('sql', 'SELECT 1 AS x'); +results = runtests('PolyphenyWrapperTest'); +disp(results) + +%disp(res); diff --git a/matlab-polypheny-connector/LICENSE b/matlab-polypheny-connector/LICENSE new file mode 100644 index 0000000..845ef47 --- /dev/null +++ b/matlab-polypheny-connector/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [2025] [Fynn Gohlke] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/matlab-polypheny-connector/PolyphenyWrapperTest.m b/matlab-polypheny-connector/PolyphenyWrapperTest.m new file mode 100644 index 0000000..68b1909 --- /dev/null +++ b/matlab-polypheny-connector/PolyphenyWrapperTest.m @@ -0,0 +1,69 @@ +classdef PolyphenyWrapperTest < matlab.unittest.TestCase + properties + conn + end + + methods (TestMethodSetup) + function setupConnection(testCase) + testCase.conn = polypheny.Polypheny('localhost', int32(20590), 'pa', '' ); + end + end + + methods (TestMethodTeardown) + function closeConnection(testCase) + testCase.conn.close(); + end + end + + methods (Test) + function testScalar(testCase) + r = testCase.conn.query( "sql" , "" , "SELECT 1 AS x"); + testCase.verifyEqual(r, 1); + end + + function testTable(testCase) + testCase.conn.query("sql" , "" , "DROP TABLE IF EXISTS wrapper_test"); + testCase.conn.query("sql" , "" , "CREATE TABLE wrapper_test (id INTEGER PRIMARY KEY, name VARCHAR)"); + testCase.conn.query("sql" , "" , "INSERT INTO wrapper_test VALUES (1,'Alice'),(2,'Bob')"); + + T = testCase.conn.query("sql" , "" , "SELECT * FROM wrapper_test ORDER BY id"); + + if istable(T) + % Expected: table output with column "name" + testCase.verifyEqual(T.name, {'Alice'; 'Bob'}); + elseif iscell(T) + % Fallback: check the raw cell contents + testCase.verifyEqual(T(:,2), {'Alice','Bob'}'); + else + testCase.verifyFail("Unexpected return type: " + class(T)); + end + end + + function testEmpty(testCase) + T = testCase.conn.query("sql" , "" , "SELECT * FROM wrapper_test WHERE id=999"); + testCase.verifyEmpty(T); + end + + function testBatchInsert(testCase) + % Prepare table + testCase.conn.query("sql" , "" , "DROP TABLE IF EXISTS batch_test"); + testCase.conn.query("sql" , "" , "CREATE TABLE batch_test (id INTEGER PRIMARY KEY, name VARCHAR)"); + + % Batch insert 2 rows + queries = { ... + "INSERT INTO batch_test VALUES (1,'Alice')", ... + "INSERT INTO batch_test VALUES (2,'Bob')" ... + }; + result = testCase.conn.queryBatch("sql" , "" , queries); + + % Verify JDBC return codes + testCase.verifyEqual(result.RowsAffected', [1 1]); + + % Verify table contents + T = testCase.conn.query("sql" , "" ,"SELECT id, name FROM batch_test ORDER BY id"); + testCase.verifyEqual(T.id, [1; 2]); + testCase.verifyEqual(string(T.name), ["Alice"; "Bob"]); + end + + end +end \ No newline at end of file diff --git a/matlab-polypheny-connector/PolyphenyWrapperTestMQL.m b/matlab-polypheny-connector/PolyphenyWrapperTestMQL.m new file mode 100644 index 0000000..e992fc1 --- /dev/null +++ b/matlab-polypheny-connector/PolyphenyWrapperTestMQL.m @@ -0,0 +1,245 @@ +classdef PolyphenyWrapperTestMQL < matlab.unittest.TestCase + properties + conn + end + + methods(TestClassSetup) + function setUpNamespaceAndCollection(testCase) + clc; + % open connection once + testCase.conn = polypheny.Polypheny("localhost",20590,"pa",""); + + % try create collection + try + testCase.conn.query("mongo","mongotest", ... + 'db.createCollection("unittest_collection")'); + catch + end + end + end + + methods(TestClassTeardown) + function tearDownNamespaceAndCollection(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.drop()'); + catch + end + testCase.conn.close(); + end + end + + methods(TestMethodSetup) + function clearCollection(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.deleteMany({})'); + catch + end + end + end + + methods(TestMethodTeardown) + function clearCollectionAfter(testCase) + try + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.deleteMany({})'); + catch + end + end + end + + methods(Test) + + function testDeleteManyRemovesAllDocs(testCase) + % Drop & recreate collection + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.drop()'); + testCase.conn.query("mongo","mongotest", 'db.createCollection("unittest_collection")'); + + % Insert three documents + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})'); + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})'); + testCase.conn.query("mongo","mongotest", 'db.unittest_collection.insertOne({"id":3,"name":"Ciri"})'); + + % Call deleteMany({}) + ack = testCase.conn.query("mongo","mongotest", 'db.unittest_collection.deleteMany({})'); + disp("Ack from deleteMany:"); + disp(ack); + + % Verify collection is empty + docs = testCase.conn.query("mongo","mongotest", 'db.unittest_collection.find({})'); + docs = jsondecode(docs); + testCase.verifyEmpty(docs, "Collection should be empty after deleteMany({})"); + end + + function testInsertManyAndNestedDocument(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":14})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":20})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":24})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":30,"adress":{"Country":"Switzerland","Code":4051}})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({"age":{$gt:29}})'); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"age":30')); + end + + function testBooleanField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"flag":true})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyTrue(contains(docs(1),'"flag":true')); + testCase.verifyClass(decoded.flag, 'logical'); % asserts that class(decoded.flag) == logical + end + + function testIntegerAgeField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"age":42})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"age":42')); + end + + function testStringField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"name":"Alice"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"name":"Alice"')); + end + + function testLongField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"big":1111111111111111111})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"big":1111111111111111111')); + end + + function testDoubleField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"pi":3.14159})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'"pi":3.14159')); + end + + function testInsertAndQueryTwoDocsRawJson(testCase) + % Clean collection + testCase.conn.query("mongo","mongotest",'db.unittest_collection.drop()'); + testCase.conn.query("mongo","mongotest",'db.createCollection("unittest_collection")'); + + % Insert two docs + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})'); + testCase.conn.query("mongo","mongotest", ... + 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})'); + + % Query back + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp("Raw JSON:"); + disp(docs); + decoded = jsondecode(docs); + disp(decoded) + + % Assert raw JSON is exactly what we want + testCase.verifyTrue(contains(docs(1),'"name":"Alice"')); + testCase.verifyTrue(contains(docs(1),'"name":"Bob"')); + end + + + function testCountDocuments(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"name":"Bob"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.countDocuments({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs(1),'{"count":1}')); + end + + function testArrayField(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"scores":[1,2,3]})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyTrue(contains(docs,'"scores":[1,2,3]')); + end + + function testFindOnEmptyCollection(testCase) + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(docs,"[]"); + end + + function testInsertManyAndFindMultiple(testCase) + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"id":10,"name":"A"})'); + testCase.conn.query("mongo","mongotest",'db.unittest_collection.insertOne({"id":11,"name":"B"})'); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyTrue(contains(docs,'"id":10')) + testCase.verifyTrue(contains(docs,'"name":"A"')) + testCase.verifyTrue(contains(docs,'"id":11')) + testCase.verifyTrue(contains(docs,'"name":"B"')) + end + + function testBatchInsertAndFind(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"name":"Alice","age":25})', ... + 'db.unittest_collection.insertOne({"name":"Alice","age":20})', ... + 'db.unittest_collection.insertOne({"name":"Bob","age":30})' }; + ignore = testCase.conn.queryBatch("mongo","mongotest",queries); + queries2 = { ... + 'db.unittest_collection.find({"name":"Alice"})', ... + 'db.unittest_collection.find({"name":"Alice","age":20})', ... + 'db.unittest_collection.find({"name":"Bob","age":30})' }; + docs = testCase.conn.queryBatch("mongo","mongotest", queries2); + disp(docs) + decoded = jsondecode(docs); + disp(decoded) + testCase.verifyEqual(numel(decoded{1}), 2); % 2 docs in first query + + % check names + names = {decoded{1}.name}; % cell of names + disp(names) + testCase.verifyEqual(string(names), ["Alice","Alice"]); + + end + + + function testBatchMixedOps(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"name":"Charlie","active":true})', ... + 'db.unittest_collection.countDocuments({})' }; + docs = testCase.conn.queryBatch("mongo","mongotest",queries); + testCase.verifyEqual(numel(docs),1); + decoded = jsondecode(docs) + varname = fieldnames(decoded{2}) + disp(decoded{2}.count) + testCase.verifyTrue(decoded{2}.count==1); + end + + function testSyntaxErrorThrows(testCase) + badQuery = 'db.unittest_collection.insertOne({"foo":123)'; % invalid JSON + testCase.verifyError(@() testCase.conn.query("mongo","mongotest",badQuery),?MException); + end + + function testMultiStatementFails(testCase) + badMulti = [ ... + 'db.people.insertOne({"name":"Alice","age":20}); ' ... + 'db.people.insertOne({"name":"Bob","age":24}); ' ... + 'db.people.find({})' ]; + testCase.verifyError(@() testCase.conn.query("mongo","mongotest",badMulti),?MException); + end + + function testBatchRollback(testCase) + queries = { ... + 'db.unittest_collection.insertOne({"id":1,"name":"Alice"})', ... + 'db.unittest_collection.insertOne({"id":2,"name":"Bob"})', ... + 'db.unittest_collection.insertOne({"id":3,"name":"Janice"' }; % broken + testCase.verifyError(@() testCase.conn.queryBatch("mongo","mongotest",queries),?MException); + docs = testCase.conn.query("mongo","mongotest",'db.unittest_collection.find({})'); + disp(docs) + testCase.verifyEqual(numel(docs),1); + testCase.verifyEqual(docs,"[]") + end + + end +end diff --git a/matlab-polypheny-connector/PolyphenyWrapperTestSQL.m b/matlab-polypheny-connector/PolyphenyWrapperTestSQL.m new file mode 100644 index 0000000..0b25e6b --- /dev/null +++ b/matlab-polypheny-connector/PolyphenyWrapperTestSQL.m @@ -0,0 +1,220 @@ +classdef PolyphenyWrapperTestSQL < matlab.unittest.TestCase + properties + conn % polypheny.Polypheny wrapper + end + + methods(TestClassSetup) + function setUpNamespaceAndTable(testCase) + clc; + % Open connection once for all tests + testCase.conn = polypheny.Polypheny("localhost",20590,"pa",""); + + % Drop leftovers if they exist + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DROP NAMESPACE IF EXISTS unittest_namespace"); + catch + end + + % Create namespace + table for execute() + testCase.conn.query("sql","", ... + "CREATE NAMESPACE unittest_namespace"); + testCase.conn.query("sql","unittest_namespace", ... + "CREATE TABLE unittest_namespace.unittest_table (id INT NOT NULL, name VARCHAR(100), PRIMARY KEY(id))"); + + % Drop and recreate batch_table + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.batch_table"); + catch + end + testCase.conn.query("sql","unittest_namespace", ... + "CREATE TABLE unittest_namespace.batch_table (" + ... + "emp_id INT NOT NULL, " + ... + "name VARCHAR(100), " + ... + "gender VARCHAR(10), " + ... + "birthday DATE, " + ... + "employee_id INT, " + ... + "PRIMARY KEY(emp_id))"); + end + end + + methods(TestClassTeardown) + function tearDownNamespaceAndTable(testCase) + % Cleanup after all tests + try + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DROP TABLE IF EXISTS unittest_namespace.batch_table"); + testCase.conn.query("sql","", ... + "DROP NAMESPACE IF EXISTS unittest_namespace"); + catch + end + testCase.conn.close(); + end + end + + methods(TestMethodSetup) + function clearTables(testCase) + % Clear before each test + try + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.batch_table"); + catch + end + end + end + + methods(TestMethodTeardown) + function clearTablesAfter(testCase) + % Clear again after each test + try + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.unittest_table"); + testCase.conn.query("sql","unittest_namespace", ... + "DELETE FROM unittest_namespace.batch_table"); + catch + end + end + end + + methods(Test) + function testScalarLiteral(testCase) + r = testCase.conn.query("sql","","SELECT 42 AS answer"); + testCase.verifyEqual(r,42); + end + + function testEmptyLiteral(testCase) + r = testCase.conn.query("sql","","SELECT * FROM (SELECT 1) t WHERE 1=0"); + testCase.verifyEmpty(r); + end + + function testTableLiteral(testCase) + r = testCase.conn.query("sql","unittest_namespace","SELECT 1 AS a, 2 AS b UNION ALL SELECT 3,4"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'a','b'}); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,2}); + testCase.verifyEqual(table2cell(r(2,:)),{3,4}); + end + + function testInsert(testCase) + r = testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.verifyEqual(r,1); + end + + function testInsertAndSelect(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id,name FROM unittest_namespace.unittest_table"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'id','name'}); + testCase.verifyEqual(height(r),1); + testCase.verifyEqual(table2cell(r),{1,'Alice'}); + end + + function testScalarFromTable(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Carol')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id FROM unittest_namespace.unittest_table WHERE name='Carol'"); + testCase.verifyEqual(r,2); + end + + function testInsertAndSelectMultipleRows(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + r = testCase.conn.query("sql","unittest_namespace","SELECT id,name FROM unittest_namespace.unittest_table ORDER BY id"); + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,'Alice'}); + testCase.verifyEqual(table2cell(r(2,:)),{2,'Bob'}); + end + + function testDeleteFromTable(testCase) + testCase.conn.query("sql","unittest_namespace","INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + testCase.conn.query("sql","unittest_namespace","DELETE FROM unittest_namespace.unittest_table"); + r = testCase.conn.query("sql","unittest_namespace","SELECT * FROM unittest_namespace.unittest_table"); + testCase.verifyEmpty(r); + end + + function testBatchInsertEmployees(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "INSERT INTO unittest_namespace.batch_table VALUES (2,'Bob','M',DATE '1989-05-12',1002)" + "INSERT INTO unittest_namespace.batch_table VALUES (3,'Jane','F',DATE '1992-07-23',1003)" + "INSERT INTO unittest_namespace.batch_table VALUES (4,'Tim','M',DATE '1991-03-03',1004)" + "INSERT INTO unittest_namespace.batch_table VALUES (5,'Alex','M',DATE '1994-11-11',1005)" + "INSERT INTO unittest_namespace.batch_table VALUES (6,'Mason','M',DATE '1988-04-22',1006)" + "INSERT INTO unittest_namespace.batch_table VALUES (7,'Rena','F',DATE '1995-06-17',1007)" + "INSERT INTO unittest_namespace.batch_table VALUES (8,'Christopher','M',DATE '1987-08-09',1008)" + "INSERT INTO unittest_namespace.batch_table VALUES (9,'Lexi','F',DATE '1996-09-30',1009)" + "INSERT INTO unittest_namespace.batch_table VALUES (10,'Baen','M',DATE '1990-10-05',1010)" + "INSERT INTO unittest_namespace.batch_table VALUES (11,'Ricardo','M',DATE '1986-12-12',1011)" + "INSERT INTO unittest_namespace.batch_table VALUES (12,'Tim','M',DATE '1993-02-02',1012)" + "INSERT INTO unittest_namespace.batch_table VALUES (13,'Beya','F',DATE '1994-05-25',1013)" + }; + counts = testCase.conn.queryBatch("sql","unittest_namespace",queries); + testCase.verifyEqual(height(counts),13); + disp(counts) + testCase.verifyTrue(all(counts.RowsAffected == 1)); + r = testCase.conn.query("sql","unittest_namespace","SELECT COUNT(*) FROM unittest_namespace.batch_table"); + testCase.verifyEqual(r,13); + end + + function testBatchRollbackOnFailure(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "BROKEN QUERY" + }; + testCase.verifyError(@() testCase.conn.queryBatch("sql","unittest_namespace",queries),?MException); + r = testCase.conn.query("sql","unittest_namespace","SELECT * FROM unittest_namespace.batch_table"); + testCase.verifyEmpty(r); + end + + function testSyntaxError(testCase) + testCase.verifyError(@() testCase.conn.query("sql","unittest_namespace","SELEC WRONG FROM nowhere"),?MException); + end + + + function testQueryWithSpaces(testCase) + % Insert with leading spaces + testCase.conn.query("sql","unittest_namespace", ... + " INSERT INTO unittest_namespace.unittest_table VALUES (1,'Alice')"); + testCase.conn.query("sql","unittest_namespace", ... + " INSERT INTO unittest_namespace.unittest_table VALUES (2,'Bob')"); + + r = testCase.conn.query("sql","unittest_namespace", ... + "SELECT id,name FROM unittest_namespace.unittest_table ORDER BY id"); + + testCase.verifyTrue(istable(r)); + testCase.verifyEqual(r.Properties.VariableNames,{'id','name'}); + testCase.verifyEqual(height(r),2); + testCase.verifyEqual(table2cell(r(1,:)),{1,'Alice'}); + testCase.verifyEqual(table2cell(r(2,:)),{2,'Bob'}); + end + + function testConnectionFailure(testCase) + testCase.verifyError(@() ... + polypheny.Polypheny("localhost",9999,"pa","").query("sql","unittest_namespace","SELECT 1"), ... + ?MException); + end + + function testCommitFailureRollback(testCase) + queries = { + "INSERT INTO unittest_namespace.batch_table VALUES (1,'Alice','F',DATE '1990-01-15',1001)" + "Intentional nonsense to produce a failure" + }; + testCase.verifyError(@() ... + testCase.conn.queryBatch("sql","unittest_namespace",queries),?MException); + + r = testCase.conn.query("sql","unittest_namespace", ... + "SELECT * FROM unittest_namespace.batch_table"); + testCase.verifyEmpty(r); + end + + end + +end \ No newline at end of file diff --git a/matlab-polypheny-connector/matlab-polypheny-connector.prj b/matlab-polypheny-connector/matlab-polypheny-connector.prj new file mode 100644 index 0000000..6b95f98 --- /dev/null +++ b/matlab-polypheny-connector/matlab-polypheny-connector.prj @@ -0,0 +1,2 @@ + + diff --git a/matlab-polypheny-connector/ode_demo.m b/matlab-polypheny-connector/ode_demo.m new file mode 100644 index 0000000..e69de29 diff --git a/matlab-polypheny-connector/startup.m b/matlab-polypheny-connector/startup.m new file mode 100644 index 0000000..c4dad7b --- /dev/null +++ b/matlab-polypheny-connector/startup.m @@ -0,0 +1,33 @@ +function startup + % Get root folder of the toolbox + root = fileparts(mfilename('fullpath')); + + % Paths to JARs + jarPaths = { ... + fullfile(root, 'jar', 'polypheny-all.jar'), ... + fullfile(root, 'libs', 'polypheny-jdbc-driver-2.3.jar') ... + }; + + % Add JARs if not already on classpath + for i = 1:numel(jarPaths) + if ~any(strcmp(jarPaths{i}, javaclasspath('-all'))) + javaaddpath(jarPaths{i}); + end + end + + % Try to register the JDBC driver dynamically + try + %java.lang.Class.forName('org.polypheny.jdbc.PolyphenyDriver'); + driver = javaObject('org.polypheny.jdbc.PolyphenyDriver'); + java.sql.DriverManager.registerDriver(driver); + catch e + warning('Could not register Polypheny JDBC driver dynamically: %s', char(e.message)); + end + + % Add MATLAB namespace folder (+polypheny) + if exist(fullfile(root, '+polypheny'), 'dir') + addpath(root); + end + + disp('Polypheny connector initialized.'); +end diff --git a/matlab-polypheny-connector/toolbox.ignore b/matlab-polypheny-connector/toolbox.ignore new file mode 100644 index 0000000..d5161ed --- /dev/null +++ b/matlab-polypheny-connector/toolbox.ignore @@ -0,0 +1,32 @@ +% List the files in your toolbox folder to exclude from packaging. Specify the +% file path as a path relative to the toolbox folder. +% List only one exclude per line. +% +% For example: +% +% Exclude a specific file in the toolbox folder: +% file1.svn +% +% Exclude a specific file in a subfolder of the toolbox folder: +% example/file1.svn +% +% Exclude all files in a subfolder of the toolbox folder: +% example/* +% +% Exclude all files with a certain name in all subfolders of the toolbox folder: +% **/file1.svn +% +%Exclude all files matching a pattern in all subfolders of the toolbox folder: +% **/*.bak +% +% Exclude all top level files and folders beginning with the character "%": +% \%example/%file.svn +% +**/resources/project/**/* +resources +**/*.prj +**/*.prj.bak +**/.git/**/* +**/.svn/**/* +**/.buildtool/**/* +**/*.asv \ No newline at end of file diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 0000000..7821301 --- /dev/null +++ b/settings.gradle @@ -0,0 +1,14 @@ +/* + * This file was generated by the Gradle 'init' task. + * + * The settings file is used to specify which projects to include in your build. + * For more detailed information on multi-project builds, please refer to https://docs.gradle.org/8.13/userguide/multi_project_builds.html in the Gradle documentation. + */ + +plugins { + // Apply the foojay-resolver plugin to allow automatic download of JDKs + id 'org.gradle.toolchains.foojay-resolver-convention' version '0.9.0' +} + +rootProject.name = 'Learning Contract' +include('app') diff --git a/toolbox.ignore b/toolbox.ignore new file mode 100644 index 0000000..d5161ed --- /dev/null +++ b/toolbox.ignore @@ -0,0 +1,32 @@ +% List the files in your toolbox folder to exclude from packaging. Specify the +% file path as a path relative to the toolbox folder. +% List only one exclude per line. +% +% For example: +% +% Exclude a specific file in the toolbox folder: +% file1.svn +% +% Exclude a specific file in a subfolder of the toolbox folder: +% example/file1.svn +% +% Exclude all files in a subfolder of the toolbox folder: +% example/* +% +% Exclude all files with a certain name in all subfolders of the toolbox folder: +% **/file1.svn +% +%Exclude all files matching a pattern in all subfolders of the toolbox folder: +% **/*.bak +% +% Exclude all top level files and folders beginning with the character "%": +% \%example/%file.svn +% +**/resources/project/**/* +resources +**/*.prj +**/*.prj.bak +**/.git/**/* +**/.svn/**/* +**/.buildtool/**/* +**/*.asv \ No newline at end of file