From 7ce32fbe854bb82d9721800d3fdd8d6eb7f08a7e Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 23 Mar 2025 14:38:31 +0100 Subject: [PATCH 01/68] First prototype code implementation for an oracle adapter. Not testet yet. --- plugins/oracle-adapter/build.gradle | 10 ++ plugins/oracle-adapter/gradle.properties | 27 +++++ .../OraclePlugin.java | 48 ++++++++ .../OracleSqlDialect.java | 43 +++++++ .../source/OracleSource.java | 108 ++++++++++++++++++ .../store/OracleStore.java | 21 ++++ settings.gradle | 1 + 7 files changed, 258 insertions(+) create mode 100644 plugins/oracle-adapter/build.gradle create mode 100644 plugins/oracle-adapter/gradle.properties create mode 100644 plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java create mode 100644 plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java create mode 100644 plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java create mode 100644 plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java diff --git a/plugins/oracle-adapter/build.gradle b/plugins/oracle-adapter/build.gradle new file mode 100644 index 0000000000..d2b8926d58 --- /dev/null +++ b/plugins/oracle-adapter/build.gradle @@ -0,0 +1,10 @@ +group "org.polypheny" + +dependencies { + compileOnly project(":core") + compileOnly project(":plugins:sql-language") + compileOnly project(":plugins:jdbc-adapter-framework") + + implementation group: 'com.oracle.database.jdbc', name: 'ojdbc8', version: '19.8.0.0' + +} diff --git a/plugins/oracle-adapter/gradle.properties b/plugins/oracle-adapter/gradle.properties new file mode 100644 index 0000000000..6af6dcaec6 --- /dev/null +++ b/plugins/oracle-adapter/gradle.properties @@ -0,0 +1,27 @@ +# +# Copyright 2019-2025 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pluginVersion = 0.0.1 + +pluginId = oracle-adapter +pluginClass = org.polypheny.db.adapter.oracle.OraclePlugin +pluginProvider = The Polypheny Project +pluginDependencies = jdbc-adapter-framework, sql-language +pluginUrlPath = +pluginCategories = source +pluginPolyDependencies = +pluginIsSystemComponent = false +pluginIsUiVisible = true diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java new file mode 100644 index 0000000000..85eeebb29a --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle; + + +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.oracle.source.OracleSource; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; +import org.polypheny.db.sql.language.SqlDialectRegistry; + +public class OraclePlugin extends PolyPlugin { + + + public static final String ADAPTER_NAME = "Oracle"; + private long sourceId; + + + public OraclePlugin( PluginContext context ) { super( context ); } + + + @Override + public void afterCatalogInit() { + SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. + this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); + } + + + @Override + public void stop() { + SqlDialectRegistry.unregisterDialect( "Oracle" ); // TODO: if dialect is not necessary, unregistering dialect is redundant. + AdapterManager.removeAdapterTemplate( this.sourceId ); + } +} diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java new file mode 100644 index 0000000000..1729e4b3c1 --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java @@ -0,0 +1,43 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle; + + +import org.polypheny.db.algebra.constant.NullCollation; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.sql.language.SqlDialect; + +public class OracleSqlDialect extends SqlDialect { + + + /* + TODO: Find out if this SqlDialect is really necessary (PostgreSql does have it, MySql doesn't). + */ + private static final AlgDataTypeSystem ORACLE_TYPE_SYSTEM = null; + + + public static final SqlDialect DEFAULT = + new OracleSqlDialect( EMPTY_CONTEXT + .withNullCollation( NullCollation.HIGH ) + .withIdentifierQuoteString( "\"" ) + .withDataTypeSystem( ORACLE_TYPE_SYSTEM ) ); + + + + public OracleSqlDialect( Context context ) { super( context ); } + +} diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java new file mode 100644 index 0000000000..0d87e7eb4a --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -0,0 +1,108 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle.source; + + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.annotations.AdapterProperties; +import org.polypheny.db.adapter.annotations.AdapterSettingInteger; +import org.polypheny.db.adapter.annotations.AdapterSettingList; +import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; +import org.polypheny.db.adapter.oracle.OracleSqlDialect; +import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.sql.language.SqlDialect; +import java.sql.SQLException; +import java.util.List; +import java.util.Map; + + +@Slf4j +@AdapterProperties( + name = "Oracle", + description = "Data source explicit for relational oracle database systems.", + usedModes = DeployMode.REMOTE, + defaultMode = DeployMode.REMOTE) +@AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, //TODO: Find out correct hostname, IP. + description = "Hostname or IP address of the remote oracle instance.") +@AdapterSettingInteger(name = "port", defaultValue = 16345, position = 2, //TODO: Find out correct port number. + description = "Port number of the remote oracle instance.") +@AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, + description = "Name of the database to connect with.") +@AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, + description = "Username used for authentication at the remote instance.") +@AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, + description = "Password used for authentication at the remote instance.") +@AdapterSettingInteger(name = "maxConnections", defaultValue = 25, + description = "Maximum number of concurrent connections.") +@AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", + description = "Which level of transaction isolation should be used.") +@AdapterSettingString(name = "tables", defaultValue = "foo,bar", + description = "List of tables which should be imported. The names must be separated by a comma.") +public class OracleSource extends AbstractJdbcSource { + + public OracleSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { + super( + storeId, + uniqueName, + settings, + mode, + "oracle.jdbc.OracleDriver", + OracleSqlDialect.DEFAULT, + false ); + } + + + @Override + protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { + return String.format( "jdbc:oracle:thin:@//%s:%d/%s", dbHostname, dbPort, dbName ); + } + + + @Override + protected boolean requiresSchema() { + return false; + } + + + @Override + public void shutdown() { + try { + removeInformationPage(); + connectionFactory.close(); + } catch ( SQLException e ) { + log.warn( "Exception while closing oracle connection {}", getUniqueName(), e ); + } + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + //TODO: Implement disconnect and reconnect to Oracle instance. + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + return List.of(); + } + +} diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java new file mode 100644 index 0000000000..b97d1cac2a --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle.store; + +public class OracleStore { + +} diff --git a/settings.gradle b/settings.gradle index 583121783f..b355bc5f1b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -56,6 +56,7 @@ include 'plugins:mongodb-adapter' include 'plugins:file-adapter' include 'plugins:google-sheet-adapter' include 'plugins:excel-adapter' +include 'plugins:oracle-adapter' // other plugins include 'plugins:notebooks' From ae898db8b38796caf513b028d99ecfa2e0f8286a Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 23 Mar 2025 15:10:27 +0100 Subject: [PATCH 02/68] First UI for schema discovery and code for communication between UI with HttpServer.java --- .../schemaui/src/app/app.component.html | 4 +++ .../schemaui/src/app/app.component.scss | 18 ++++++++++ .../schemaui/src/app/app.component.spec.ts | 29 +++++++++++++++ .../schemaui/src/app/app.component.ts | 35 +++++++++++++++++++ .../schemaui/src/app/app.config.ts | 8 +++++ .../schemaui/src/app/app.routes.ts | 3 ++ schemadiscoveryui/schemaui/src/index.html | 13 +++++++ schemadiscoveryui/schemaui/src/main.ts | 6 ++++ schemadiscoveryui/schemaui/src/styles.scss | 4 +++ .../java/org/polypheny/db/webui/Crud.java | 6 ++++ .../org/polypheny/db/webui/HttpServer.java | 2 ++ 11 files changed, 128 insertions(+) create mode 100644 schemadiscoveryui/schemaui/src/app/app.component.html create mode 100644 schemadiscoveryui/schemaui/src/app/app.component.scss create mode 100644 schemadiscoveryui/schemaui/src/app/app.component.spec.ts create mode 100644 schemadiscoveryui/schemaui/src/app/app.component.ts create mode 100644 schemadiscoveryui/schemaui/src/app/app.config.ts create mode 100644 schemadiscoveryui/schemaui/src/app/app.routes.ts create mode 100644 schemadiscoveryui/schemaui/src/index.html create mode 100644 schemadiscoveryui/schemaui/src/main.ts create mode 100644 schemadiscoveryui/schemaui/src/styles.scss diff --git a/schemadiscoveryui/schemaui/src/app/app.component.html b/schemadiscoveryui/schemaui/src/app/app.component.html new file mode 100644 index 0000000000..1e18f10b8f --- /dev/null +++ b/schemadiscoveryui/schemaui/src/app/app.component.html @@ -0,0 +1,4 @@ +
+ +

{{ message}}

+
diff --git a/schemadiscoveryui/schemaui/src/app/app.component.scss b/schemadiscoveryui/schemaui/src/app/app.component.scss new file mode 100644 index 0000000000..7a61a11045 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/app/app.component.scss @@ -0,0 +1,18 @@ +.card { + display: block; + width: 100%; + min-height: 100px; + background-color: black; + align-content: center; +} + +.button { + width: 60%; + justify-content: center; + align-items: center; + margin-left: 200px; +} + +.button:hover { + background-color: #0056b3; +} diff --git a/schemadiscoveryui/schemaui/src/app/app.component.spec.ts b/schemadiscoveryui/schemaui/src/app/app.component.spec.ts new file mode 100644 index 0000000000..90e3ca7b66 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/app/app.component.spec.ts @@ -0,0 +1,29 @@ +import { TestBed } from '@angular/core/testing'; +import { AppComponent } from './app.component'; + +describe('AppComponent', () => { + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [AppComponent], + }).compileComponents(); + }); + + it('should create the app', () => { + const fixture = TestBed.createComponent(AppComponent); + const app = fixture.componentInstance; + expect(app).toBeTruthy(); + }); + + it(`should have the 'schemaui' title`, () => { + const fixture = TestBed.createComponent(AppComponent); + const app = fixture.componentInstance; + expect(app.title).toEqual('schemaui'); + }); + + it('should render title', () => { + const fixture = TestBed.createComponent(AppComponent); + fixture.detectChanges(); + const compiled = fixture.nativeElement as HTMLElement; + expect(compiled.querySelector('h1')?.textContent).toContain('Hello, schemaui'); + }); +}); diff --git a/schemadiscoveryui/schemaui/src/app/app.component.ts b/schemadiscoveryui/schemaui/src/app/app.component.ts new file mode 100644 index 0000000000..007a23d556 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/app/app.component.ts @@ -0,0 +1,35 @@ +import { Component } from '@angular/core'; +import {HttpClient, HttpParams} from '@angular/common/http'; +import {CommonModule} from '@angular/common'; +import {HttpClientModule} from '@angular/common/http'; +import { RouterOutlet } from '@angular/router'; + +@Component({ + selector: 'app-root', + imports: [CommonModule, HttpClientModule], + templateUrl: './app.component.html', + styleUrls: ['./app.component.scss'], +}) +export class AppComponent { + title = 'schemaui'; + message: string = ''; + + constructor(private http: HttpClient) {} + + makeRequest() { + this.message = 'Button geklickt !!!'; + } + + + sendRequest(): void { + this.http.post('http://127.0.0.1:7659/confirm', {}, {responseType: 'text'}) + .subscribe(response => { + console.log('Server response:', response); + alert("Nachricht angekommen."); + this.message = response; + }, error => { + console.error('Error:', error); + alert("Nachricht nicht angekommen !!!"); + }); + } +} diff --git a/schemadiscoveryui/schemaui/src/app/app.config.ts b/schemadiscoveryui/schemaui/src/app/app.config.ts new file mode 100644 index 0000000000..a1e7d6f864 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/app/app.config.ts @@ -0,0 +1,8 @@ +import { ApplicationConfig, provideZoneChangeDetection } from '@angular/core'; +import { provideRouter } from '@angular/router'; + +import { routes } from './app.routes'; + +export const appConfig: ApplicationConfig = { + providers: [provideZoneChangeDetection({ eventCoalescing: true }), provideRouter(routes)] +}; diff --git a/schemadiscoveryui/schemaui/src/app/app.routes.ts b/schemadiscoveryui/schemaui/src/app/app.routes.ts new file mode 100644 index 0000000000..dc39edb5f2 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/app/app.routes.ts @@ -0,0 +1,3 @@ +import { Routes } from '@angular/router'; + +export const routes: Routes = []; diff --git a/schemadiscoveryui/schemaui/src/index.html b/schemadiscoveryui/schemaui/src/index.html new file mode 100644 index 0000000000..a92720ee84 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/index.html @@ -0,0 +1,13 @@ + + + + + Schemaui + + + + + + + + diff --git a/schemadiscoveryui/schemaui/src/main.ts b/schemadiscoveryui/schemaui/src/main.ts new file mode 100644 index 0000000000..35b00f3463 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/main.ts @@ -0,0 +1,6 @@ +import { bootstrapApplication } from '@angular/platform-browser'; +import { appConfig } from './app/app.config'; +import { AppComponent } from './app/app.component'; + +bootstrapApplication(AppComponent, appConfig) + .catch((err) => console.error(err)); diff --git a/schemadiscoveryui/schemaui/src/styles.scss b/schemadiscoveryui/schemaui/src/styles.scss new file mode 100644 index 0000000000..cc9a8d9ba8 --- /dev/null +++ b/schemadiscoveryui/schemaui/src/styles.scss @@ -0,0 +1,4 @@ +/* You can add global styles to this file, and also import other style files */ +p { + color: white; +} diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 0b7edc9039..b7836ca7bf 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -868,6 +868,12 @@ void deleteTuple( final Context ctx ) { ctx.json( result ); } + void sendConfirmation( final Context ctx ) { + log.info( "Sending confirmation" ); + String result = "Angular confirmation message"; + ctx.result( result ); + } + /** * Update a row from a table. The row is determined by the value of every PK column in that row (conjunction). diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index aa6373a9d6..2465945c53 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -353,6 +353,8 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/product", ctx -> ctx.result( "Polypheny-DB" ) ); + webuiServer.post( "/confirm", crud::sendConfirmation ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); From 85c6bd3abd9115b574f3138549c7adb6f9e701d7 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 23 Mar 2025 18:42:50 +0100 Subject: [PATCH 03/68] Implement "createTable" in the oracle source and adding code for compilation in build.gradle of the oracle adapter. --- plugins/oracle-adapter/build.gradle | 52 +++++++++++++++++++ .../source/OracleSource.java | 18 +++++-- 2 files changed, 66 insertions(+), 4 deletions(-) diff --git a/plugins/oracle-adapter/build.gradle b/plugins/oracle-adapter/build.gradle index d2b8926d58..9296d11c7b 100644 --- a/plugins/oracle-adapter/build.gradle +++ b/plugins/oracle-adapter/build.gradle @@ -7,4 +7,56 @@ dependencies { implementation group: 'com.oracle.database.jdbc', name: 'ojdbc8', version: '19.8.0.0' + testImplementation project(path: ":core", configuration: "tests") +} + + +compileJava { + dependsOn(":core:processResources") + dependsOn(":plugins:sql-language:processResources") + dependsOn(":plugins:jdbc-adapter-framework:processResources") + mustRunAfter(":plugins:jdbc-adapter-framework:processResources") +} + +sourceSets { + main { + java { + srcDirs = ["src/main/java"] + } + resources { + srcDirs = ["src/main/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/classes") + } + test { + java { + srcDirs = ["src/test/java"] + destinationDirectory.set(file(project.buildDir.absolutePath + "/test-classes")) + } + resources { + srcDirs = ["src/test/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "test-classes") + } +} + + +jar { + manifest { + attributes "Manifest-Version": "1.0" + attributes "Copyright": "The Polypheny Project (polypheny.org)" + attributes "Version": "$project.version" + } +} + + +java { + withJavadocJar() + withSourcesJar() +} + + +licensee { + allow('MIT') + allow('Apache-2.0') } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 0d87e7eb4a..72ad74ac29 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -28,12 +28,14 @@ import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.prepare.Context; import org.polypheny.db.sql.language.SqlDialect; import java.sql.SQLException; import java.util.List; import java.util.Map; - +import java.util.stream.Collectors; +// TODO: Find out oracle credentials to log in and adjust URL if necessary. @Slf4j @AdapterProperties( @@ -43,7 +45,7 @@ defaultMode = DeployMode.REMOTE) @AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, //TODO: Find out correct hostname, IP. description = "Hostname or IP address of the remote oracle instance.") -@AdapterSettingInteger(name = "port", defaultValue = 16345, position = 2, //TODO: Find out correct port number. +@AdapterSettingInteger(name = "port", defaultValue = 1521, position = 2, //TODO: Find out correct port number. description = "Port number of the remote oracle instance.") @AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, description = "Name of the database to connect with.") @@ -102,7 +104,15 @@ protected void reloadSettings( List updatedSettings ) { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { - return List.of(); + PhysicalTable table = adapterCatalog.createTable( + logical.table.getNamespaceName(), + logical.table.name, + logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), + logical.table, + logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), + logical.pkIds, allocation ); + + adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); + return List.of( table ); } - } From 11684853fd2344517b4fe10d874a531485e3dd30 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 26 Mar 2025 13:38:30 +0100 Subject: [PATCH 04/68] Bug fix in Oracle Adapter. It is now listed on adapter sources. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- plugins/oracle-adapter/build.gradle | 8 ++ .../OraclePlugin.java | 5 +- .../store/OracleStore.java | 86 ++++++++++++++++++- .../org/polypheny/db/webui/HttpServer.java | 3 + 5 files changed, 101 insertions(+), 3 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index c9bc72f0f3..7b3e171979 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -223,7 +223,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada } // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { - // Make sure the table name is unique + // Make sure the table name is uniqueString tableName = entry.getKey(); String tableName = entry.getKey(); if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { int i = 0; diff --git a/plugins/oracle-adapter/build.gradle b/plugins/oracle-adapter/build.gradle index 9296d11c7b..e28a08c20c 100644 --- a/plugins/oracle-adapter/build.gradle +++ b/plugins/oracle-adapter/build.gradle @@ -41,6 +41,12 @@ sourceSets { } +delombok { + dependsOn(":plugins:sql-language:processResources") + dependsOn(":plugins:jdbc-adapter-framework:processResources") +} + + jar { manifest { attributes "Manifest-Version": "1.0" @@ -59,4 +65,6 @@ java { licensee { allow('MIT') allow('Apache-2.0') + // allow('https://www.oracle.com/downloads/licenses/oracle-free-license.html') + allowUrl('https://www.oracle.com/downloads/licenses/oracle-free-license.html') } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java index 85eeebb29a..6fa9e046f1 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -19,6 +19,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.oracle.source.OracleSource; +import org.polypheny.db.adapter.oracle.store.OracleStore; import org.polypheny.db.plugins.PluginContext; import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.sql.language.SqlDialectRegistry; @@ -28,6 +29,7 @@ public class OraclePlugin extends PolyPlugin { public static final String ADAPTER_NAME = "Oracle"; private long sourceId; + private long storeId; public OraclePlugin( PluginContext context ) { super( context ); } @@ -35,8 +37,9 @@ public class OraclePlugin extends PolyPlugin { @Override public void afterCatalogInit() { - SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. + // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); + this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java index b97d1cac2a..bb04ca151b 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java @@ -16,6 +16,90 @@ package org.polypheny.db.adapter.oracle.store; -public class OracleStore { + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DeployMode.DeploySetting; +import org.polypheny.db.adapter.annotations.AdapterProperties; +import org.polypheny.db.adapter.annotations.AdapterSettingInteger; +import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; +import org.polypheny.db.adapter.oracle.OracleSqlDialect; +import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.docker.DockerContainer; +import org.polypheny.db.prepare.Context; +import java.util.List; +import java.util.Map; + +@Slf4j +@AdapterProperties( + name = "Oracle", + description = "Data source explicit for relational oracle database systems.", + usedModes = { DeployMode.REMOTE, DeployMode.DOCKER }, + defaultMode = DeployMode.DOCKER) +@AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, + description = "Hostname or IP address of the remote PostgreSQL instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingInteger(name = "port", defaultValue = 1521, position = 2, + description = "JDBC port number on the remote PostgreSQL instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, + description = "Name of the database to connect to.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, + description = "Username to be used for authenticating at the remote instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, + description = "Password to be used for authenticating at the remote instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingInteger(name = "maxConnections", defaultValue = 25, position = 6, + description = "Maximum number of concurrent JDBC connections.") + + + +public class OracleStore extends AbstractJdbcSource { + private String host; + private int port; + private String database; + private String username; + private DockerContainer container; + + + public OracleStore( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { + super( storeId, + uniqueName, + settings, + mode, + "oracle.jdbc.OralceDriver", + OracleSqlDialect.DEFAULT, + false ); + } + + + @Override + protected String getConnectionUrl( String dbHostname, int dbPort, String dbName ) { + return ""; + } + + + @Override + protected boolean requiresSchema() { + return false; + } + + + @Override + public void shutdown() { + + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + return List.of(); + } } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 2465945c53..51b3ff49d2 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -353,6 +353,9 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/product", ctx -> ctx.result( "Polypheny-DB" ) ); + /* + * Confirmation message sending to schema discovery UI + */ webuiServer.post( "/confirm", crud::sendConfirmation ); webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); From 638b08197427db633834d20273c67145e1128c48 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 29 Mar 2025 16:24:22 +0100 Subject: [PATCH 05/68] Add functionality to iterate for Postgresql-Database information and data structures for saving those information. --- build.gradle | 1 + plugins/jdbc-adapter-framework/build.gradle | 1 + .../java/org/polypheny/db/webui/Crud.java | 14 ++- .../DataHandling/AttributeInfo.java | 27 +++++ .../DataHandling/DatabaseInfo.java | 60 ++++++++++ .../DataHandling/SchemaInfo.java | 36 ++++++ .../DataHandling/TableInfo.java | 34 ++++++ .../db/webui/schemaDiscovery/JsonExport.java | 35 ++++++ .../schemaDiscovery/PostgreSqlConnection.java | 110 ++++++++++++++++++ 9 files changed, 315 insertions(+), 3 deletions(-) create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java diff --git a/build.gradle b/build.gradle index c5fa517e72..7bcc030b0b 100644 --- a/build.gradle +++ b/build.gradle @@ -144,6 +144,7 @@ allprojects { implementation group: "org.slf4j", name: "slf4j-api", version: slf4j_api_version // MIT implementation group: "org.apache.logging.log4j", name: "log4j-slf4j2-impl", version: log4j_slf4j_impl_version // Apache 2.0 + implementation 'org.postgresql:postgresql:42.7.2' testCompileOnly(group: 'org.pf4j', name: 'pf4j', version: pf4j_version) { exclude group: "org.slf4j" diff --git a/plugins/jdbc-adapter-framework/build.gradle b/plugins/jdbc-adapter-framework/build.gradle index 720413682b..b5bc32df89 100644 --- a/plugins/jdbc-adapter-framework/build.gradle +++ b/plugins/jdbc-adapter-framework/build.gradle @@ -5,6 +5,7 @@ dependencies { compileOnly project(":core") compileOnly project(":plugins:sql-language") api group: 'org.apache.commons', name: 'commons-dbcp2', version: commons_dbcp2_version + implementation 'com.fasterxml.jackson.core:jackson-databind:2.16.1' // --- Test Compile --- testImplementation project(path: ":core", configuration: "tests") diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b7836ca7bf..0861ed7ef4 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -44,6 +44,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; +import java.sql.SQLException; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; @@ -209,7 +210,8 @@ import org.polypheny.db.webui.models.results.Result; import org.polypheny.db.webui.models.results.Result.ResultBuilder; import org.polypheny.db.webui.models.results.ResultType; - +import org.polypheny.db.webui.schemaDiscovery.DataHandling.DatabaseInfo; +import org.polypheny.db.webui.schemaDiscovery.PostgreSqlConnection; @Getter @Slf4j @@ -870,8 +872,14 @@ void deleteTuple( final Context ctx ) { void sendConfirmation( final Context ctx ) { log.info( "Sending confirmation" ); - String result = "Angular confirmation message"; - ctx.result( result ); + // String result = "Angular confirmation message"; + //ctx.result( result ); + try { + List dbs = PostgreSqlConnection.getDatabasesSchemasAndTables(); + ctx.json( dbs ); + } catch ( SQLException e ) { + System.err.println("Fehler bei der Schema-Erkennung: " + e.getMessage()); + } } diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java new file mode 100644 index 0000000000..9654ddbd24 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java @@ -0,0 +1,27 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery.DataHandling; + +public class AttributeInfo { + public String name; + public String type; + + public AttributeInfo( String name, String type ) { + this.name = name; + this.type = type; + } +} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java new file mode 100644 index 0000000000..664a931c64 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java @@ -0,0 +1,60 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery.DataHandling; + +/* +* Data structure for saving all schema information for every database collected because +* saving all database and schema information in two lists is more overhead + */ + +import java.util.ArrayList; +import java.util.List; + +public class DatabaseInfo { + public String name; + public List schemas; + + public DatabaseInfo(String name) { + this.name = name; + this.schemas = new ArrayList<>(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append("πŸ“¦ Datenbank: ").append(name).append("\n"); + + for (SchemaInfo schema : schemas) { + sb.append(" πŸ“ Schema: ").append(schema.name).append("\n"); + + for (TableInfo table : schema.tables) { + sb.append(" πŸ“„ Tabelle: ").append(table.name).append("\n"); + + for (AttributeInfo attr : table.attributes) { + sb.append(" πŸ”Ή Attribut: ") + .append(attr.name) + .append(" : ") + .append(attr.type) + .append("\n"); + } + } + } + + return sb.toString(); + } +} + diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java new file mode 100644 index 0000000000..48256ebee8 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java @@ -0,0 +1,36 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery.DataHandling; + +import java.util.ArrayList; +import java.util.List; + +public class SchemaInfo { + public String name; + public List tables; + + public SchemaInfo( String name ) { + this.name = name; + this.tables = new ArrayList<>(); + } + + + public void addTable( TableInfo table ) { + tables.add( table ); + } + +} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java new file mode 100644 index 0000000000..985fb04420 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java @@ -0,0 +1,34 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery.DataHandling; +import java.util.ArrayList; +import java.util.List; + +public class TableInfo { + public String name; + public List attributes; + + public TableInfo( String name ) { + this.name = name; + this.attributes = new ArrayList<>(); + } + + + public void addAttribute( AttributeInfo attr ) { + attributes.add( attr ); + } +} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java new file mode 100644 index 0000000000..22c66c1f51 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.polypheny.db.webui.schemaDiscovery.DataHandling.DatabaseInfo; +import java.util.List; + +public class JsonExport { + + public static void printAsJson( List database ) { + try { + ObjectMapper mapper = new ObjectMapper(); + String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString( database ); + System.out.println( json ); + } catch ( Exception e ) { + System.err.println( "Fail to convert DatabaseInfo Object to JSON-format: " + e.getMessage() ); + } + } + +} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java new file mode 100644 index 0000000000..353c43c4cd --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java @@ -0,0 +1,110 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery; + +import org.polypheny.db.webui.schemaDiscovery.DataHandling.AttributeInfo; +import org.polypheny.db.webui.schemaDiscovery.DataHandling.DatabaseInfo; +import org.polypheny.db.webui.schemaDiscovery.DataHandling.SchemaInfo; +import org.polypheny.db.webui.schemaDiscovery.DataHandling.TableInfo; +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; + +import java.sql.*; +import java.util.ArrayList; +import java.util.List; + +public class PostgreSqlConnection { + + static String host = "localhost"; + static String port = "5432"; + static String user = "postgres"; + static String password = "password"; + + + public static List getDatabasesSchemasAndTables() throws SQLException { + List dbs = new ArrayList<>(); + + String metaUrl = "jdbc:postgresql://" + host + ":" + port + "/postgres"; + try ( Connection metaConn = DriverManager.getConnection( metaUrl, user, password ); + Statement stmt = metaConn.createStatement(); + ResultSet rs = stmt.executeQuery( "SELECT datname FROM pg_database WHERE datistemplate = false" ) ) { + + while ( rs.next() ) { + String dbName = rs.getString( "datname" ); + DatabaseInfo dbInfo = new DatabaseInfo( dbName ); + + String dbUrl = "jdbc:postgresql://" + host + ":" + port + "/" + dbName; + try ( Connection dbConn = DriverManager.getConnection( dbUrl, user, password ) ) { + DatabaseMetaData meta = dbConn.getMetaData(); + + ResultSet schemas = meta.getSchemas(); + while ( schemas.next() ) { + String schemaName = schemas.getString( "TABLE_SCHEM" ); + SchemaInfo schema = new SchemaInfo( schemaName ); + + ResultSet tables = meta.getTables( null, schemaName, "%", new String[]{ "TABLE" } ); + while ( tables.next() ) { + String tableName = tables.getString( "TABLE_NAME" ); + TableInfo table = new TableInfo( tableName ); + + ResultSet columns = meta.getColumns( null, schemaName, tableName, "%" ); + while ( columns.next() ) { + String columnName = columns.getString( "COLUMN_NAME" ); + String columnType = columns.getString( "TYPE_NAME" ); + table.attributes.add( new AttributeInfo( columnName, columnType ) ); + } + + schema.tables.add( table ); + } + + dbInfo.schemas.add( schema ); + } + + } catch ( SQLException e ) { + System.err.println( "Fehler beim Abrufen von Schemas fΓΌr DB " + dbName + ": " + e.getMessage() ); + } + + dbs.add( dbInfo ); + } + } + + return dbs; + } + + + + + /*public static void main(String[] args) { + try { + List dbs = getDatabasesSchemasAndTables(); + for (DatabaseInfo db : dbs) { + System.out.print(db.toString()); + } + JsonExport.printAsJson( dbs ); + + } catch (SQLException e) { + System.err.println("Fehler bei der Schema-Erkennung: " + e.getMessage()); + e.printStackTrace(); + } + }*/ + +} From d35f3ea5601d542f73da93e555719988a21c2e1f Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 29 Mar 2025 16:25:22 +0100 Subject: [PATCH 06/68] Typo correction in OracleStore.java --- .../java/org.polypheny.db.adapter.oracle/store/OracleStore.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java index bb04ca151b..8d8fe78608 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java @@ -67,7 +67,7 @@ public OracleStore( final long storeId, final String uniqueName, final Map Date: Wed, 2 Apr 2025 09:29:52 +0200 Subject: [PATCH 07/68] Remove first prototype of schema discovery UI. --- .../schemaui/src/app/app.component.html | 4 --- .../schemaui/src/app/app.component.scss | 18 ---------- .../schemaui/src/app/app.component.spec.ts | 29 --------------- .../schemaui/src/app/app.component.ts | 35 ------------------- .../schemaui/src/app/app.config.ts | 8 ----- .../schemaui/src/app/app.routes.ts | 3 -- schemadiscoveryui/schemaui/src/index.html | 13 ------- schemadiscoveryui/schemaui/src/main.ts | 6 ---- schemadiscoveryui/schemaui/src/styles.scss | 4 --- 9 files changed, 120 deletions(-) delete mode 100644 schemadiscoveryui/schemaui/src/app/app.component.html delete mode 100644 schemadiscoveryui/schemaui/src/app/app.component.scss delete mode 100644 schemadiscoveryui/schemaui/src/app/app.component.spec.ts delete mode 100644 schemadiscoveryui/schemaui/src/app/app.component.ts delete mode 100644 schemadiscoveryui/schemaui/src/app/app.config.ts delete mode 100644 schemadiscoveryui/schemaui/src/app/app.routes.ts delete mode 100644 schemadiscoveryui/schemaui/src/index.html delete mode 100644 schemadiscoveryui/schemaui/src/main.ts delete mode 100644 schemadiscoveryui/schemaui/src/styles.scss diff --git a/schemadiscoveryui/schemaui/src/app/app.component.html b/schemadiscoveryui/schemaui/src/app/app.component.html deleted file mode 100644 index 1e18f10b8f..0000000000 --- a/schemadiscoveryui/schemaui/src/app/app.component.html +++ /dev/null @@ -1,4 +0,0 @@ -
- -

{{ message}}

-
diff --git a/schemadiscoveryui/schemaui/src/app/app.component.scss b/schemadiscoveryui/schemaui/src/app/app.component.scss deleted file mode 100644 index 7a61a11045..0000000000 --- a/schemadiscoveryui/schemaui/src/app/app.component.scss +++ /dev/null @@ -1,18 +0,0 @@ -.card { - display: block; - width: 100%; - min-height: 100px; - background-color: black; - align-content: center; -} - -.button { - width: 60%; - justify-content: center; - align-items: center; - margin-left: 200px; -} - -.button:hover { - background-color: #0056b3; -} diff --git a/schemadiscoveryui/schemaui/src/app/app.component.spec.ts b/schemadiscoveryui/schemaui/src/app/app.component.spec.ts deleted file mode 100644 index 90e3ca7b66..0000000000 --- a/schemadiscoveryui/schemaui/src/app/app.component.spec.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { TestBed } from '@angular/core/testing'; -import { AppComponent } from './app.component'; - -describe('AppComponent', () => { - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [AppComponent], - }).compileComponents(); - }); - - it('should create the app', () => { - const fixture = TestBed.createComponent(AppComponent); - const app = fixture.componentInstance; - expect(app).toBeTruthy(); - }); - - it(`should have the 'schemaui' title`, () => { - const fixture = TestBed.createComponent(AppComponent); - const app = fixture.componentInstance; - expect(app.title).toEqual('schemaui'); - }); - - it('should render title', () => { - const fixture = TestBed.createComponent(AppComponent); - fixture.detectChanges(); - const compiled = fixture.nativeElement as HTMLElement; - expect(compiled.querySelector('h1')?.textContent).toContain('Hello, schemaui'); - }); -}); diff --git a/schemadiscoveryui/schemaui/src/app/app.component.ts b/schemadiscoveryui/schemaui/src/app/app.component.ts deleted file mode 100644 index 007a23d556..0000000000 --- a/schemadiscoveryui/schemaui/src/app/app.component.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { Component } from '@angular/core'; -import {HttpClient, HttpParams} from '@angular/common/http'; -import {CommonModule} from '@angular/common'; -import {HttpClientModule} from '@angular/common/http'; -import { RouterOutlet } from '@angular/router'; - -@Component({ - selector: 'app-root', - imports: [CommonModule, HttpClientModule], - templateUrl: './app.component.html', - styleUrls: ['./app.component.scss'], -}) -export class AppComponent { - title = 'schemaui'; - message: string = ''; - - constructor(private http: HttpClient) {} - - makeRequest() { - this.message = 'Button geklickt !!!'; - } - - - sendRequest(): void { - this.http.post('http://127.0.0.1:7659/confirm', {}, {responseType: 'text'}) - .subscribe(response => { - console.log('Server response:', response); - alert("Nachricht angekommen."); - this.message = response; - }, error => { - console.error('Error:', error); - alert("Nachricht nicht angekommen !!!"); - }); - } -} diff --git a/schemadiscoveryui/schemaui/src/app/app.config.ts b/schemadiscoveryui/schemaui/src/app/app.config.ts deleted file mode 100644 index a1e7d6f864..0000000000 --- a/schemadiscoveryui/schemaui/src/app/app.config.ts +++ /dev/null @@ -1,8 +0,0 @@ -import { ApplicationConfig, provideZoneChangeDetection } from '@angular/core'; -import { provideRouter } from '@angular/router'; - -import { routes } from './app.routes'; - -export const appConfig: ApplicationConfig = { - providers: [provideZoneChangeDetection({ eventCoalescing: true }), provideRouter(routes)] -}; diff --git a/schemadiscoveryui/schemaui/src/app/app.routes.ts b/schemadiscoveryui/schemaui/src/app/app.routes.ts deleted file mode 100644 index dc39edb5f2..0000000000 --- a/schemadiscoveryui/schemaui/src/app/app.routes.ts +++ /dev/null @@ -1,3 +0,0 @@ -import { Routes } from '@angular/router'; - -export const routes: Routes = []; diff --git a/schemadiscoveryui/schemaui/src/index.html b/schemadiscoveryui/schemaui/src/index.html deleted file mode 100644 index a92720ee84..0000000000 --- a/schemadiscoveryui/schemaui/src/index.html +++ /dev/null @@ -1,13 +0,0 @@ - - - - - Schemaui - - - - - - - - diff --git a/schemadiscoveryui/schemaui/src/main.ts b/schemadiscoveryui/schemaui/src/main.ts deleted file mode 100644 index 35b00f3463..0000000000 --- a/schemadiscoveryui/schemaui/src/main.ts +++ /dev/null @@ -1,6 +0,0 @@ -import { bootstrapApplication } from '@angular/platform-browser'; -import { appConfig } from './app/app.config'; -import { AppComponent } from './app/app.component'; - -bootstrapApplication(AppComponent, appConfig) - .catch((err) => console.error(err)); diff --git a/schemadiscoveryui/schemaui/src/styles.scss b/schemadiscoveryui/schemaui/src/styles.scss deleted file mode 100644 index cc9a8d9ba8..0000000000 --- a/schemadiscoveryui/schemaui/src/styles.scss +++ /dev/null @@ -1,4 +0,0 @@ -/* You can add global styles to this file, and also import other style files */ -p { - color: white; -} From f4a8412175b86dca688e1bb71da8259705075522 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 6 Apr 2025 17:23:09 +0200 Subject: [PATCH 08/68] Correct the connection for the oracle adapter and open TODOS for this adapter for retrieving data correctly. --- build.gradle | 1 + .../jdbc/sources/AbstractJdbcSource.java | 1 + plugins/oracle-adapter/build.gradle | 3 - .../OraclePlugin.java | 4 +- .../OracleSqlDialect.java | 17 ++- .../source/OracleSource.java | 109 ++++++++++++++++-- .../postgres/source/PostgresqlSource.java | 6 +- .../java/org/polypheny/db/webui/Crud.java | 5 + .../org/polypheny/db/webui/WebSocket.java | 2 + .../schemaDiscovery/OracleConnection.java | 71 ++++++++++++ 10 files changed, 201 insertions(+), 18 deletions(-) create mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java diff --git a/build.gradle b/build.gradle index 7bcc030b0b..388cc81eea 100644 --- a/build.gradle +++ b/build.gradle @@ -145,6 +145,7 @@ allprojects { implementation group: "org.slf4j", name: "slf4j-api", version: slf4j_api_version // MIT implementation group: "org.apache.logging.log4j", name: "log4j-slf4j2-impl", version: log4j_slf4j_impl_version // Apache 2.0 implementation 'org.postgresql:postgresql:42.7.2' + implementation group: 'com.oracle.database.jdbc', name: 'ojdbc8', version: '19.8.0.0' testCompileOnly(group: 'org.pf4j', name: 'pf4j', version: pf4j_version) { exclude group: "org.slf4j" diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 47f361c263..e5053e519d 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -204,6 +204,7 @@ public void rollback( PolyXid xid ) { protected abstract boolean requiresSchema(); + // TODO: Handle case if it is an oracle adapter (not possible to define a database). @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); diff --git a/plugins/oracle-adapter/build.gradle b/plugins/oracle-adapter/build.gradle index e28a08c20c..a1ab5e1af6 100644 --- a/plugins/oracle-adapter/build.gradle +++ b/plugins/oracle-adapter/build.gradle @@ -4,9 +4,6 @@ dependencies { compileOnly project(":core") compileOnly project(":plugins:sql-language") compileOnly project(":plugins:jdbc-adapter-framework") - - implementation group: 'com.oracle.database.jdbc', name: 'ojdbc8', version: '19.8.0.0' - testImplementation project(path: ":core", configuration: "tests") } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java index 6fa9e046f1..25f8661558 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -29,7 +29,7 @@ public class OraclePlugin extends PolyPlugin { public static final String ADAPTER_NAME = "Oracle"; private long sourceId; - private long storeId; + // private long storeId; public OraclePlugin( PluginContext context ) { super( context ); } @@ -39,7 +39,7 @@ public class OraclePlugin extends PolyPlugin { public void afterCatalogInit() { // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); - this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); + // this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java index 1729e4b3c1..a9da404e6d 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java @@ -19,7 +19,10 @@ import org.polypheny.db.algebra.constant.NullCollation; import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; import org.polypheny.db.sql.language.SqlDialect; +import org.polypheny.db.type.PolyType; +import java.util.Objects; public class OracleSqlDialect extends SqlDialect { @@ -27,7 +30,19 @@ public class OracleSqlDialect extends SqlDialect { /* TODO: Find out if this SqlDialect is really necessary (PostgreSql does have it, MySql doesn't). */ - private static final AlgDataTypeSystem ORACLE_TYPE_SYSTEM = null; + private static final AlgDataTypeSystem ORACLE_TYPE_SYSTEM = + new AlgDataTypeSystemImpl() { + @Override + public int getMaxPrecision( PolyType typeName ) { + if ( Objects.requireNonNull( typeName ) == PolyType.VARCHAR ) {// From htup_details.h in postgresql: + // MaxAttrSize is a somewhat arbitrary upper limit on the declared size of data fields of char(n) and similar types. It need not have anything + // directly to do with the *actual* upper limit of varlena values, which is currently 1Gb (see TOAST structures in postgres.h). I've set it + // at 10Mb which seems like a reasonable number --- tgl 8/6/00. + return 10 * 1024 * 1024; + } + return super.getMaxPrecision( typeName ); + } + }; public static final SqlDialect DEFAULT = diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 72ad74ac29..376e04ea6d 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -23,19 +23,28 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.adapter.oracle.OracleSqlDialect; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; -import org.polypheny.db.sql.language.SqlDialect; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; -// TODO: Find out oracle credentials to log in and adjust URL if necessary. @Slf4j @AdapterProperties( @@ -43,15 +52,15 @@ description = "Data source explicit for relational oracle database systems.", usedModes = DeployMode.REMOTE, defaultMode = DeployMode.REMOTE) -@AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, //TODO: Find out correct hostname, IP. +@AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, description = "Hostname or IP address of the remote oracle instance.") -@AdapterSettingInteger(name = "port", defaultValue = 1521, position = 2, //TODO: Find out correct port number. +@AdapterSettingInteger(name = "port", defaultValue = 1521, position = 2, description = "Port number of the remote oracle instance.") -@AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, - description = "Name of the database to connect with.") -@AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, +@AdapterSettingString(name = "database", defaultValue = "XE", position = 3, + description = "Name of the database to connect to.") +@AdapterSettingString(name = "username", defaultValue = "system", position = 4, description = "Username used for authentication at the remote instance.") -@AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, +@AdapterSettingString(name = "password", defaultValue = "roman123", position = 5, description = "Password used for authentication at the remote instance.") @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Maximum number of concurrent connections.") @@ -75,7 +84,7 @@ public OracleSource( final long storeId, final String uniqueName, final Map createTable( Context context, LogicalTableWrapper lo adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); return List.of( table ); } + + + @Override + public Map> getExportedColumns() { + Map> map = new HashMap<>(); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID); + try { + ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler(xid); + java.sql.Statement statement = connectionHandler.getStatement(); + Connection connection = statement.getConnection(); + DatabaseMetaData dbmd = connection.getMetaData(); + + // FΓΌr Oracle: Nimm den User (z. B. SYSTEM) als Schema + String schema = "SYSTEM"; // liefert z.β€―B. SYSTEM + String tableName = "TEST"; // <- oder hole den Namen dynamisch aus settings + + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet pk = dbmd.getPrimaryKeys(null, schema, tableName)) { + while (pk.next()) { + primaryKeyColumns.add(pk.getString("COLUMN_NAME")); + } + } + + try (ResultSet columns = dbmd.getColumns(null, schema, tableName, "%")) { + List exportedColumns = new ArrayList<>(); + + while (columns.next()) { + PolyType type = PolyType.getNameForJdbcType(columns.getInt("DATA_TYPE")); + Integer length = null; + Integer scale = null; + + switch (type) { + case DECIMAL: + length = columns.getInt("COLUMN_SIZE"); + scale = columns.getInt("DECIMAL_DIGITS"); + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = columns.getInt("COLUMN_SIZE"); + break; + case VARBINARY: + case BINARY: + type = PolyType.VARBINARY; + length = columns.getInt("COLUMN_SIZE"); + break; + case TIME: + case TIMESTAMP: + length = columns.getInt("DECIMAL_DIGITS"); + break; + default: + // andere Typen ohne Length/Scale + break; + } + + exportedColumns.add(new ExportedColumn( + columns.getString("COLUMN_NAME").toLowerCase(), + type, + null, // keine collection + length, + scale, + null, + null, + "YES".equalsIgnoreCase(columns.getString("IS_NULLABLE")), + schema, + tableName, + columns.getString("COLUMN_NAME"), + columns.getInt("ORDINAL_POSITION"), + primaryKeyColumns.contains(columns.getString("COLUMN_NAME")) + )); + } + + map.put(tableName, exportedColumns); + } + } catch ( SQLException | ConnectionHandlerException e) { + throw new GenericRuntimeException("Exception while collecting Oracle schema info", e); + } + + return map; + } + } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 8cb1b3c55b..b82e22c919 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -46,11 +46,11 @@ description = "Hostname or IP address of the remote PostgreSQL instance.") @AdapterSettingInteger(name = "port", defaultValue = 5432, position = 2, description = "JDBC port number on the remote PostgreSQL instance.") -@AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, +@AdapterSettingString(name = "database", defaultValue = "postgres", position = 3, description = "Name of the database to connect to.") -@AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, +@AdapterSettingString(name = "username", defaultValue = "postgres", position = 4, description = "Username to be used for authenticating at the remote instance.") -@AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, +@AdapterSettingString(name = "password", defaultValue = "password", position = 5, description = "Password to be used for authenticating at the remote instance.") @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Maximum number of concurrent JDBC connections.") diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 0861ed7ef4..b01e5f21c3 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -278,6 +278,7 @@ RelationalResult getTable( final UIRequest request ) { } String fullTableName = getFullEntityName( request.entityId ); + // String fullTableName = "\"TEST\""; query.append( "SELECT * FROM " ) .append( fullTableName ) .append( where ) @@ -440,6 +441,7 @@ private LogicalNamespace getNamespace( EditTableRequest request ) { private String getFullEntityName( long entityId ) { LogicalTable table = Catalog.snapshot().rel().getTable( entityId ).orElseThrow(); + // TODO Change Namepsace for oraale !!! LogicalNamespace namespace = Catalog.snapshot().getNamespace( table.namespaceId ).orElseThrow(); return String.format( "\"%s\".\"%s\"", namespace.name, table.name ); } @@ -2634,6 +2636,9 @@ void getCatalog( final Context ctx ) { */ private long getTableSize( Transaction transaction, final UIRequest request ) { String tableId = getFullEntityName( request.entityId ); + // String tableId = "\"TEST\""; + + // TODO: Change sql query here to: tableID = ""system"."test"" String query = "SELECT count(*) FROM " + tableId; if ( request.filter != null ) { query += " " + filterTable( request.filter ); diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index 5445b9021a..c4cf31bacc 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -112,6 +112,7 @@ public void onMessage( final WsMessageContext ctx ) { if ( ctx.message().equals( "\"keepalive\"" ) ) { return; } + log.error( "UI message received: " + ctx.message() ); //close analyzers of a previous query that was sent over the same socket. Crud.cleanupOldSession( queryAnalyzers, ctx.getSessionId() ); @@ -246,6 +247,7 @@ public void onMessage( final WsMessageContext ctx ) { if ( result.xid != null ) { xIds.add( result.xid ); } + System.out.println( result ); ctx.send( result ); break; default: diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java new file mode 100644 index 0000000000..9303060d1b --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java @@ -0,0 +1,71 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.schemaDiscovery; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; + +import java.sql.*; + +public class OracleConnection { + + public static void main(String[] args) { + // Verbindungseinstellungen + String url = "jdbc:oracle:thin:@localhost:1521/XE"; + String username = "system"; + String password = "roman123"; + + // SQL-Abfrage + String query = "SELECT * FROM test"; + + try (Connection conn = DriverManager.getConnection(url, username, password); + Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery(query)) { + + // Metadaten holen (z.β€―B. Spaltenanzahl und -namen) + ResultSetMetaData metaData = rs.getMetaData(); + int columnCount = metaData.getColumnCount(); + + // Alle Zeilen durchlaufen + while (rs.next()) { + for (int i = 1; i <= columnCount; i++) { + System.out.print(rs.getString(i)); + if (i < columnCount) System.out.print(" | "); + } + System.out.println(); + } + + } catch (SQLException e) { + System.out.println("Fehler bei der Verbindung oder Abfrage:"); + e.printStackTrace(); + } + } +} + +/* +// ÜberprΓΌfen, ob das ResultSet Daten enthΓ€lt + if (rs.next()) { + // Angenommen, die Tabelle hat eine Spalte "spalte" (beispielhaft) + int spalte = rs.getInt("spalte"); + System.out.println("Wert aus Spalte: " + spalte); + } else { + System.out.println("Keine Daten gefunden."); + } + */ From 94ed87eda0ec5c74dc88d69ffd49ef3afe266196 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 10 Apr 2025 15:55:25 +0200 Subject: [PATCH 09/68] Changes in OracleSource.java, so the query in the "getExportedColumns" method searches with uppercase letters (lowercase doesn't work). --- .../source/OracleSource.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 376e04ea6d..09df11b474 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -144,7 +144,7 @@ public Map> getExportedColumns() { List primaryKeyColumns = new ArrayList<>(); try ( ResultSet pk = dbmd.getPrimaryKeys(null, schema, tableName)) { while (pk.next()) { - primaryKeyColumns.add(pk.getString("COLUMN_NAME")); + primaryKeyColumns.add(pk.getString("COLUMN_NAME").toUpperCase()); } } @@ -181,7 +181,7 @@ public Map> getExportedColumns() { } exportedColumns.add(new ExportedColumn( - columns.getString("COLUMN_NAME").toLowerCase(), + columns.getString("COLUMN_NAME").toUpperCase(), type, null, // keine collection length, @@ -191,9 +191,9 @@ public Map> getExportedColumns() { "YES".equalsIgnoreCase(columns.getString("IS_NULLABLE")), schema, tableName, - columns.getString("COLUMN_NAME"), + columns.getString("COLUMN_NAME").toUpperCase(), columns.getInt("ORDINAL_POSITION"), - primaryKeyColumns.contains(columns.getString("COLUMN_NAME")) + primaryKeyColumns.contains(columns.getString("COLUMN_NAME").toUpperCase()) )); } From cee60f8135ecce7367a2b30f6efdd653cbb0c0e0 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 16 Apr 2025 17:44:22 +0200 Subject: [PATCH 10/68] New interface and funcionality for adapters for retrieving their metadata. --- .../org/polypheny/db/adapter/DataSource.java | 2 + .../db/schemaDiscovery/MetadataProvider.java | 26 +++++++ .../polypheny/db/schemaDiscovery/Node.java | 59 +++++++++++++++ .../jdbc/sources/AbstractJdbcSource.java | 1 - .../OraclePlugin.java | 5 +- .../source/OracleSource.java | 62 ++++++++-------- .../postgres/source/PostgresqlSource.java | 66 +++++++++++++++++ .../java/org/polypheny/db/webui/Crud.java | 3 +- .../DataHandling/AttributeInfo.java | 5 ++ .../schemaDiscovery/PostgreSqlConnection.java | 74 ++++++++++++------- 10 files changed, 239 insertions(+), 64 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 6234046bfd..eeea17e3c3 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -25,6 +25,7 @@ import org.pf4j.ExtensionPoint; import org.polypheny.db.catalog.catalogs.AdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; +import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.type.PolyType; @Getter @@ -47,6 +48,7 @@ protected DataSource( final long adapterId, final String uniqueName, final Map children; + @Setter + @Getter + private Map properties; + + public Node(String type, String name) { + this.type = type; + this.name = name; + this.children = new ArrayList<>(); + this.properties = new HashMap<>(); + } + + + public void addChild(Node node) { + children.add(node); + } + + + public void addProperty(String key, Object value) { + properties.put(key, value); + } + + +} diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index e5053e519d..47f361c263 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -204,7 +204,6 @@ public void rollback( PolyXid xid ) { protected abstract boolean requiresSchema(); - // TODO: Handle case if it is an oracle adapter (not possible to define a database). @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java index 25f8661558..8dfdb0bc25 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -29,7 +29,7 @@ public class OraclePlugin extends PolyPlugin { public static final String ADAPTER_NAME = "Oracle"; private long sourceId; - // private long storeId; + private long storeId; public OraclePlugin( PluginContext context ) { super( context ); } @@ -39,7 +39,7 @@ public class OraclePlugin extends PolyPlugin { public void afterCatalogInit() { // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); - // this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); + this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); } @@ -47,5 +47,6 @@ public void afterCatalogInit() { public void stop() { SqlDialectRegistry.unregisterDialect( "Oracle" ); // TODO: if dialect is not necessary, unregistering dialect is redundant. AdapterManager.removeAdapterTemplate( this.sourceId ); + AdapterManager.removeAdapterTemplate( this.storeId ); } } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 09df11b474..83dcb3786e 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -114,7 +114,8 @@ protected void reloadSettings( List updatedSettings ) { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), + // logical.table.getNamespaceName(), + "SYSTEM", logical.table.name, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, @@ -130,80 +131,77 @@ public List createTable( Context context, LogicalTableWrapper lo public Map> getExportedColumns() { Map> map = new HashMap<>(); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); try { - ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler(xid); + ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); java.sql.Statement statement = connectionHandler.getStatement(); Connection connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); - // FΓΌr Oracle: Nimm den User (z. B. SYSTEM) als Schema - String schema = "SYSTEM"; // liefert z.β€―B. SYSTEM - String tableName = "TEST"; // <- oder hole den Namen dynamisch aus settings + String schema = "SYSTEM"; + String tableName = "TEST"; List primaryKeyColumns = new ArrayList<>(); - try ( ResultSet pk = dbmd.getPrimaryKeys(null, schema, tableName)) { - while (pk.next()) { - primaryKeyColumns.add(pk.getString("COLUMN_NAME").toUpperCase()); + try ( ResultSet pk = dbmd.getPrimaryKeys( null, schema, tableName ) ) { + while ( pk.next() ) { + primaryKeyColumns.add( pk.getString( "COLUMN_NAME" ).toUpperCase() ); } } - - try (ResultSet columns = dbmd.getColumns(null, schema, tableName, "%")) { + try ( ResultSet columns = dbmd.getColumns( null, schema, tableName, "%" ) ) { List exportedColumns = new ArrayList<>(); - - while (columns.next()) { - PolyType type = PolyType.getNameForJdbcType(columns.getInt("DATA_TYPE")); + while ( columns.next() ) { + PolyType type = PolyType.getNameForJdbcType( columns.getInt( "DATA_TYPE" ) ); Integer length = null; Integer scale = null; - switch (type) { + switch ( type ) { case DECIMAL: - length = columns.getInt("COLUMN_SIZE"); - scale = columns.getInt("DECIMAL_DIGITS"); + length = columns.getInt( "COLUMN_SIZE" ); + scale = columns.getInt( "DECIMAL_DIGITS" ); break; case CHAR: case VARCHAR: type = PolyType.VARCHAR; - length = columns.getInt("COLUMN_SIZE"); + length = columns.getInt( "COLUMN_SIZE" ); break; case VARBINARY: case BINARY: type = PolyType.VARBINARY; - length = columns.getInt("COLUMN_SIZE"); + length = columns.getInt( "COLUMN_SIZE" ); break; case TIME: case TIMESTAMP: - length = columns.getInt("DECIMAL_DIGITS"); + length = columns.getInt( "DECIMAL_DIGITS" ); break; default: - // andere Typen ohne Length/Scale break; } - exportedColumns.add(new ExportedColumn( - columns.getString("COLUMN_NAME").toUpperCase(), + exportedColumns.add( new ExportedColumn( + columns.getString( "COLUMN_NAME" ).toUpperCase(), type, - null, // keine collection + null, length, scale, null, null, - "YES".equalsIgnoreCase(columns.getString("IS_NULLABLE")), + "YES".equalsIgnoreCase( columns.getString( "IS_NULLABLE" ) ), schema, tableName, - columns.getString("COLUMN_NAME").toUpperCase(), - columns.getInt("ORDINAL_POSITION"), - primaryKeyColumns.contains(columns.getString("COLUMN_NAME").toUpperCase()) - )); + columns.getString( "COLUMN_NAME" ).toUpperCase(), + columns.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( columns.getString( "COLUMN_NAME" ).toUpperCase() ) + ) ); } - map.put(tableName, exportedColumns); + map.put( tableName, exportedColumns ); } - } catch ( SQLException | ConnectionHandlerException e) { - throw new GenericRuntimeException("Exception while collecting Oracle schema info", e); + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Exception while collecting Oracle schema info", e ); } return map; } + } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index b82e22c919..f3d412b727 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -18,6 +18,8 @@ import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -34,6 +36,8 @@ import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; @Slf4j @@ -60,6 +64,66 @@ description = "List of tables which should be imported. The names must to be separated by a comma.") public class PostgresqlSource extends AbstractJdbcSource { + + + public Node fetchMetadataTree() { + Node root = new Node("relational", getUniqueName()); + + Map> exported = getExportedColumns(); + + Map>> grouped = new HashMap<>(); + for (Map.Entry> entry : exported.entrySet()) { + for (ExportedColumn col : entry.getValue()) { + grouped + .computeIfAbsent(col.physicalSchemaName, k -> new HashMap<>()) + .computeIfAbsent(col.physicalTableName, k -> new ArrayList<>()) + .add(col); + } + } + + for (Map.Entry>> schemaEntry : grouped.entrySet()) { + Node schemaNode = new Node("schema", schemaEntry.getKey()); + + for (Map.Entry> tableEntry : schemaEntry.getValue().entrySet()) { + Node tableNode = new Node("table", tableEntry.getKey()); + + for (ExportedColumn col : tableEntry.getValue()) { + Node colNode = new Node("column", col.getName()); + colNode.addProperty("type", col.type.getName()); + colNode.addProperty("nullable", col.nullable); + colNode.addProperty("primaryKey", col.primary); + + if (col.length != null) { + colNode.addProperty("length", col.length); + } + if (col.scale != null) { + colNode.addProperty("scale", col.scale); + } + + tableNode.addChild(colNode); + } + + schemaNode.addChild(tableNode); + } + + root.addChild(schemaNode); + } + + return root; + } + + + private void printTree(Node node, int depth) { + System.out.println(" ".repeat(depth) + node.getType() + ": " + node.getName()); + for (Map.Entry entry : node.getProperties().entrySet()) { + System.out.println(" ".repeat(depth + 1) + "- " + entry.getKey() + ": " + entry.getValue()); + } + for (Node child : node.getChildren()) { + printTree(child, depth + 1); + } + } + + public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, @@ -112,6 +176,8 @@ public List createTable( Context context, LogicalTableWrapper lo logical.pkIds, allocation ); adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); + Node node = fetchMetadataTree(); + printTree( node, 0 ); return List.of( table ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b01e5f21c3..9ea35a8d9e 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -152,6 +152,8 @@ import org.polypheny.db.processing.ImplementationContext; import org.polypheny.db.processing.ImplementationContext.ExecutedContext; import org.polypheny.db.processing.QueryContext; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.security.SecurityManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -441,7 +443,6 @@ private LogicalNamespace getNamespace( EditTableRequest request ) { private String getFullEntityName( long entityId ) { LogicalTable table = Catalog.snapshot().rel().getTable( entityId ).orElseThrow(); - // TODO Change Namepsace for oraale !!! LogicalNamespace namespace = Catalog.snapshot().getNamespace( table.namespaceId ).orElseThrow(); return String.format( "\"%s\".\"%s\"", namespace.name, table.name ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java index 9654ddbd24..9dcd7faaad 100644 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java @@ -16,12 +16,17 @@ package org.polypheny.db.webui.schemaDiscovery.DataHandling; +import java.util.ArrayList; +import java.util.List; + public class AttributeInfo { public String name; public String type; + public List sampleValues; public AttributeInfo( String name, String type ) { this.name = name; this.type = type; + this.sampleValues = new ArrayList<>(); } } diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java index 353c43c4cd..5596709347 100644 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java +++ b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java @@ -44,46 +44,63 @@ public static List getDatabasesSchemasAndTables() throws SQLExcept List dbs = new ArrayList<>(); String metaUrl = "jdbc:postgresql://" + host + ":" + port + "/postgres"; - try ( Connection metaConn = DriverManager.getConnection( metaUrl, user, password ); + try ( + Connection metaConn = DriverManager.getConnection(metaUrl, user, password); Statement stmt = metaConn.createStatement(); - ResultSet rs = stmt.executeQuery( "SELECT datname FROM pg_database WHERE datistemplate = false" ) ) { - - while ( rs.next() ) { - String dbName = rs.getString( "datname" ); - DatabaseInfo dbInfo = new DatabaseInfo( dbName ); + ResultSet rs = stmt.executeQuery("SELECT datname FROM pg_database WHERE datistemplate = false") + ) { + while (rs.next()) { + String dbName = rs.getString("datname"); + DatabaseInfo dbInfo = new DatabaseInfo(dbName); String dbUrl = "jdbc:postgresql://" + host + ":" + port + "/" + dbName; - try ( Connection dbConn = DriverManager.getConnection( dbUrl, user, password ) ) { + try (Connection dbConn = DriverManager.getConnection(dbUrl, user, password)) { DatabaseMetaData meta = dbConn.getMetaData(); ResultSet schemas = meta.getSchemas(); - while ( schemas.next() ) { - String schemaName = schemas.getString( "TABLE_SCHEM" ); - SchemaInfo schema = new SchemaInfo( schemaName ); - - ResultSet tables = meta.getTables( null, schemaName, "%", new String[]{ "TABLE" } ); - while ( tables.next() ) { - String tableName = tables.getString( "TABLE_NAME" ); - TableInfo table = new TableInfo( tableName ); - - ResultSet columns = meta.getColumns( null, schemaName, tableName, "%" ); - while ( columns.next() ) { - String columnName = columns.getString( "COLUMN_NAME" ); - String columnType = columns.getString( "TYPE_NAME" ); - table.attributes.add( new AttributeInfo( columnName, columnType ) ); + while (schemas.next()) { + String schemaName = schemas.getString("TABLE_SCHEM"); + SchemaInfo schema = new SchemaInfo(schemaName); + + ResultSet tables = meta.getTables(null, schemaName, "%", new String[]{"TABLE"}); + while (tables.next()) { + String tableName = tables.getString("TABLE_NAME"); + TableInfo table = new TableInfo(tableName); + + ResultSet columns = meta.getColumns(null, schemaName, tableName, "%"); + while (columns.next()) { + String columnName = columns.getString("COLUMN_NAME"); + String columnType = columns.getString("TYPE_NAME"); + + AttributeInfo attribute = new AttributeInfo(columnName, columnType); + + String sampleQuery = "SELECT \"" + columnName + "\" FROM \"" + schemaName + "\".\"" + tableName + "\" LIMIT 20"; + try ( + Statement sampleStmt = dbConn.createStatement(); + ResultSet sampleRs = sampleStmt.executeQuery(sampleQuery) + ) { + while (sampleRs.next()) { + Object value = sampleRs.getObject(columnName); + attribute.sampleValues.add(value != null ? value.toString() : "NULL"); + } + } catch (SQLException e) { + System.err.println("Fehler beim Abrufen von Beispieldaten fΓΌr Spalte " + columnName + ": " + e.getMessage()); + } + + table.attributes.add(attribute); } - schema.tables.add( table ); + schema.tables.add(table); } - dbInfo.schemas.add( schema ); + dbInfo.schemas.add(schema); } - } catch ( SQLException e ) { - System.err.println( "Fehler beim Abrufen von Schemas fΓΌr DB " + dbName + ": " + e.getMessage() ); + } catch (SQLException e) { + System.err.println("Fehler beim Abrufen von Schemas fΓΌr DB " + dbName + ": " + e.getMessage()); } - dbs.add( dbInfo ); + dbs.add(dbInfo); } } @@ -93,7 +110,8 @@ public static List getDatabasesSchemasAndTables() throws SQLExcept - /*public static void main(String[] args) { + + public static void main(String[] args) { try { List dbs = getDatabasesSchemasAndTables(); for (DatabaseInfo db : dbs) { @@ -105,6 +123,6 @@ public static List getDatabasesSchemasAndTables() throws SQLExcept System.err.println("Fehler bei der Schema-Erkennung: " + e.getMessage()); e.printStackTrace(); } - }*/ + } } From 913c6e0f7a5f3d8de28cdae099dbfe1347156668 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 18 Apr 2025 17:52:33 +0200 Subject: [PATCH 11/68] First code changes in AdapterTemplate.java and Http-Request for preview added in Crud and HttpServer. --- .../db/adapter/java/AdapterTemplate.java | 13 ++++++ .../OraclePlugin.java | 4 +- .../postgres/source/PostgresqlSource.java | 4 ++ .../java/org/polypheny/db/webui/Crud.java | 9 ++++- .../org/polypheny/db/webui/HttpServer.java | 2 + .../webui/models/requests/PreviewRequest.java | 40 +++++++++++++++++++ 6 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index f9f7cae86d..6561c454da 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -27,6 +27,7 @@ import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.AdapterManager.Function5; +import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DeployMode.DeploySetting; @@ -100,4 +101,16 @@ public DeployMode getDefaultMode() { return clazz.getAnnotation( AdapterProperties.class ).defaultMode(); } + + public DataSource createEphemeral( Map settings ) { + String previewName = "_preview" + System.nanoTime(); + Adapter adapter = deployer.get( -1L, previewName, settings, DeployMode.REMOTE ); + + if ( !(adapter instanceof DataSource ds ) ) { + throw new GenericRuntimeException( "The adapter does not implement DataSource." ); + } + + return ds; + } + } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java index 8dfdb0bc25..eab1015fc5 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -39,7 +39,7 @@ public class OraclePlugin extends PolyPlugin { public void afterCatalogInit() { // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); - this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); + // this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); } @@ -47,6 +47,6 @@ public void afterCatalogInit() { public void stop() { SqlDialectRegistry.unregisterDialect( "Oracle" ); // TODO: if dialect is not necessary, unregistering dialect is redundant. AdapterManager.removeAdapterTemplate( this.sourceId ); - AdapterManager.removeAdapterTemplate( this.storeId ); + // AdapterManager.removeAdapterTemplate( this.storeId ); } } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index f3d412b727..9171fb4c39 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -181,5 +181,9 @@ public List createTable( Context context, LogicalTableWrapper lo return List.of( table ); } + public static void getPreview() { + log.error( "Methodenaufruf fΓΌr Postgresql-Preview funktioniert !!!" ); + } + } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 9ea35a8d9e..8b5708c97b 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -2077,6 +2077,14 @@ void getSources( final Context ctx ) { } + /** + * Get Metadata and preview of data before creating the adapter. + */ + void getMetadataAndPreview( final Context ctx ) { + + } + + /** * Deploy a new adapter */ @@ -2111,7 +2119,6 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { } AdapterTemplate adapter = AdapterManager.getAdapterTemplate( a.adapterName, a.type ); Map allSettings = adapter.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); - for ( Map.Entry entry : a.settings.entrySet() ) { AbstractAdapterSetting set = allSettings.get( entry.getKey() ); if ( set == null ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 51b3ff49d2..d2e1c43b40 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -358,6 +358,8 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { */ webuiServer.post( "/confirm", crud::sendConfirmation ); + webuiServer.post( "/previewTable", crud::getMetadataAndPreview ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java new file mode 100644 index 0000000000..ba5b03dd7a --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java @@ -0,0 +1,40 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models.requests; + +import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; +import java.util.Map; + +public class PreviewRequest { + + public String adapterName; + public AdapterType adapterType; + public Map settings; + public int rowLimit; + + public PreviewRequest( + String adapterName, + AdapterType adapterType, + Map settings, + int rowLimit ) { + this.adapterName = adapterName; + this.adapterType = adapterType; + this.settings = settings; + this.rowLimit = rowLimit; + } + +} From 859b1660ed2f3c110b8d9d787d71659f18c2e469 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 22 Apr 2025 18:48:53 +0200 Subject: [PATCH 12/68] Functionality for deriving metadata and preview from adapter (hardcoded for postgres first so far) --- .../db/adapter/java/AdapterTemplate.java | 31 +++++++++++++++++++ .../db/schemaDiscovery/MetadataProvider.java | 2 +- .../postgres/source/PostgresqlSource.java | 10 ++++-- .../java/org/polypheny/db/webui/Crud.java | 20 ++++++++++-- .../webui/models/requests/PreviewRequest.java | 2 ++ 5 files changed, 60 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index 6561c454da..f3ac35a85a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -22,6 +22,7 @@ import java.util.List; import java.util.Map; import lombok.Value; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingList; import org.polypheny.db.adapter.Adapter; @@ -35,7 +36,10 @@ import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.docker.DockerManager; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; +@Slf4j @Value public class AdapterTemplate { @@ -104,6 +108,7 @@ public DeployMode getDefaultMode() { public DataSource createEphemeral( Map settings ) { String previewName = "_preview" + System.nanoTime(); + log.info( "Creating ephemeral adapter {} with name {}", clazz.getName(), previewName ); Adapter adapter = deployer.get( -1L, previewName, settings, DeployMode.REMOTE ); if ( !(adapter instanceof DataSource ds ) ) { @@ -113,4 +118,30 @@ public DataSource createEphemeral( Map settings ) { return ds; } + + public PreviewResult preview( Map settings, int limit ) { + DataSource tmp = createEphemeral( settings ); + log.info("Adapter class: {}", tmp.getClass().getName()); + log.info("Implements MetadataProvider: {}", tmp instanceof MetadataProvider); + try { + if ( tmp instanceof MetadataProvider mp ) { + log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); + Node meta = mp.fetchMetadataTree(); + Object rows = mp.fetchpreview( limit ); + return new PreviewResult( meta, rows ); + } + throw new GenericRuntimeException( "The adapter does not implement MetadataProvider." ); + } finally { + log.info( "πŸ”» Shutting down preview adapter." ); + tmp.shutdown(); + } + } + + + @Value + public static class PreviewResult { + Node metadata; + Object preview; + } + } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index dab2a97dcb..8c0a330647 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -20,7 +20,7 @@ public interface MetadataProvider { Node fetchMetadataTree(); - Object fetchpreview(); + Object fetchpreview(int limit); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 9171fb4c39..99b575e7c1 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -62,10 +62,10 @@ description = "Which level of transaction isolation should be used.") @AdapterSettingString(name = "tables", defaultValue = "foo,bar", description = "List of tables which should be imported. The names must to be separated by a comma.") -public class PostgresqlSource extends AbstractJdbcSource { - +public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { + @Override public Node fetchMetadataTree() { Node root = new Node("relational", getUniqueName()); @@ -113,6 +113,12 @@ public Node fetchMetadataTree() { } + @Override + public Object fetchpreview( int limit ) { + return null; + } + + private void printTree(Node node, int depth) { System.out.println(" ".repeat(depth) + node.getType() + ": " + node.getName()); for (Map.Entry entry : node.getProperties().entrySet()) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 8b5708c97b..79a3a6a2a9 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -17,6 +17,7 @@ package org.polypheny.db.webui; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; @@ -26,6 +27,7 @@ import com.google.gson.JsonDeserializer; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; +import com.google.gson.JsonParser; import com.j256.simplemagic.ContentInfo; import com.j256.simplemagic.ContentInfoUtil; import io.javalin.http.Context; @@ -87,6 +89,7 @@ import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; import org.polypheny.db.catalog.Catalog; @@ -206,6 +209,7 @@ import org.polypheny.db.webui.models.requests.PartitioningRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest.ModifyPartitionRequest; import org.polypheny.db.webui.models.requests.PolyAlgRequest; +import org.polypheny.db.webui.models.requests.PreviewRequest; import org.polypheny.db.webui.models.requests.UIRequest; import org.polypheny.db.webui.models.results.RelationalResult; import org.polypheny.db.webui.models.results.RelationalResult.RelationalResultBuilder; @@ -2078,13 +2082,25 @@ void getSources( final Context ctx ) { /** - * Get Metadata and preview of data before creating the adapter. + * Get Metadata and preview of data before fully creating the adapter. */ - void getMetadataAndPreview( final Context ctx ) { + public void getMetadataAndPreview( final Context ctx ) { + try { + PreviewRequest req = ctx.bodyAsClass( PreviewRequest.class ); + log.info( "Parsed preview request: {}", req ); + + AdapterTemplate template = AdapterTemplate.fromString( req.adapterName, req.adapterType ); + PreviewResult result = template.preview( req.settings, req.rowLimit ); + ctx.json( result ); + } catch ( Exception e ) { + log.error( "πŸ”₯ Error while handling preview request", e ); + ctx.status( 500 ).json( Map.of( "error", "Internal error: " + e.getMessage() ) ); + } } + /** * Deploy a new adapter */ diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java index ba5b03dd7a..13003c06c6 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java @@ -26,6 +26,8 @@ public class PreviewRequest { public Map settings; public int rowLimit; + public PreviewRequest() { } + public PreviewRequest( String adapterName, AdapterType adapterType, From cf625dfb2906c96969c74707b9809d160aa82b1c Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 25 Apr 2025 17:38:34 +0200 Subject: [PATCH 13/68] PostgreSQL adapter delivers metadata and preview of data properly and sends it correctly to the UI. --- .../db/adapter/java/AdapterTemplate.java | 13 +- .../db/schemaDiscovery/MetadataProvider.java | 2 +- .../db/schemaDiscovery/NodeSerializer.java | 48 +++++ .../jdbc/sources/AbstractJdbcSource.java | 2 +- plugins/postgres-adapter/build.gradle | 1 + .../postgres/source/PostgresqlSource.java | 202 +++++++++++------- .../java/org/polypheny/db/webui/Crud.java | 3 +- .../webui/models/requests/PreviewRequest.java | 4 +- 8 files changed, 187 insertions(+), 88 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index f3ac35a85a..4bf8e0fcec 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -21,6 +21,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AbstractAdapterSetting; @@ -38,6 +39,7 @@ import org.polypheny.db.docker.DockerManager; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.Node; +import org.polypheny.db.schemaDiscovery.NodeSerializer; @Slf4j @Value @@ -127,8 +129,11 @@ public PreviewResult preview( Map settings, int limit ) { if ( tmp instanceof MetadataProvider mp ) { log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); Node meta = mp.fetchMetadataTree(); - Object rows = mp.fetchpreview( limit ); - return new PreviewResult( meta, rows ); + String json = NodeSerializer.serializeNode( meta ).toString(); + Object rows = mp.fetchPreview( limit ); + log.error( json ); + log.error( rows.toString() ); + return new PreviewResult( json, rows ); } throw new GenericRuntimeException( "The adapter does not implement MetadataProvider." ); } finally { @@ -140,7 +145,9 @@ public PreviewResult preview( Map settings, int limit ) { @Value public static class PreviewResult { - Node metadata; + @JsonProperty + String metadata; + @JsonProperty Object preview; } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index 8c0a330647..be84d6f944 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -20,7 +20,7 @@ public interface MetadataProvider { Node fetchMetadataTree(); - Object fetchpreview(int limit); + Object fetchPreview(int limit); } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java new file mode 100644 index 0000000000..35bcbe5a1a --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +public class NodeSerializer { + + private static final ObjectMapper objectMapper = new ObjectMapper(); + + public static ObjectNode serializeNode(Node node) { + ObjectNode json = objectMapper.createObjectNode(); + json.put( "type", node.getType() ); + json.put( "name", node.getName() ); + + ObjectNode props = objectMapper.createObjectNode(); + node.getProperties().forEach((key, value) -> { + props.putPOJO(key, value); + }); + json.set("properties", props); + + // Children + ArrayNode children = objectMapper.createArrayNode(); + for (Node child : node.getChildren()) { + children.add(serializeNode(child)); + } + json.set("children", children); + + return json; + } + +} diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 47f361c263..38be9c1530 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -56,7 +56,7 @@ public abstract class AbstractJdbcSource extends DataSource implements ExtensionPoint { @Delegate(excludes = Exclude.class) - private final RelationalScanDelegate delegate; + protected final RelationalScanDelegate delegate; protected SqlDialect dialect; protected JdbcSchema currentJdbcSchema; diff --git a/plugins/postgres-adapter/build.gradle b/plugins/postgres-adapter/build.gradle index e87572c823..f42951bc53 100644 --- a/plugins/postgres-adapter/build.gradle +++ b/plugins/postgres-adapter/build.gradle @@ -6,6 +6,7 @@ dependencies { compileOnly project(":plugins:sql-language") compileOnly project(":plugins:jdbc-adapter-framework") + implementation(project(":core")) implementation group: "net.postgis", name: "postgis-jdbc", version: postgis_version implementation group: "org.postgresql", name: "postgresql", version: postgresql_version // BSD 2-clause diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 99b575e7c1..35f9799526 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -19,7 +19,9 @@ import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -29,15 +31,19 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.adapter.postgres.PostgresqlSqlDialect; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.Node; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PolyXid; @Slf4j @@ -67,129 +73,165 @@ public class PostgresqlSource extends AbstractJdbcSource implements MetadataProv @Override public Node fetchMetadataTree() { - Node root = new Node("relational", getUniqueName()); + String dbName = settings.get( "database" ); + Node root = new Node( "relational", dbName ); Map> exported = getExportedColumns(); Map>> grouped = new HashMap<>(); - for (Map.Entry> entry : exported.entrySet()) { - for (ExportedColumn col : entry.getValue()) { + for ( Map.Entry> entry : exported.entrySet() ) { + for ( ExportedColumn col : entry.getValue() ) { grouped - .computeIfAbsent(col.physicalSchemaName, k -> new HashMap<>()) - .computeIfAbsent(col.physicalTableName, k -> new ArrayList<>()) - .add(col); + .computeIfAbsent( col.physicalSchemaName, k -> new HashMap<>() ) + .computeIfAbsent( col.physicalTableName, k -> new ArrayList<>() ) + .add( col ); } } - for (Map.Entry>> schemaEntry : grouped.entrySet()) { - Node schemaNode = new Node("schema", schemaEntry.getKey()); + for ( Map.Entry>> schemaEntry : grouped.entrySet() ) { + Node schemaNode = new Node( "schema", schemaEntry.getKey() ); - for (Map.Entry> tableEntry : schemaEntry.getValue().entrySet()) { - Node tableNode = new Node("table", tableEntry.getKey()); + for ( Map.Entry> tableEntry : schemaEntry.getValue().entrySet() ) { + Node tableNode = new Node( "table", tableEntry.getKey() ); - for (ExportedColumn col : tableEntry.getValue()) { - Node colNode = new Node("column", col.getName()); - colNode.addProperty("type", col.type.getName()); - colNode.addProperty("nullable", col.nullable); - colNode.addProperty("primaryKey", col.primary); + for ( ExportedColumn col : tableEntry.getValue() ) { + Node colNode = new Node( "column", col.getName() ); + colNode.addProperty( "type", col.type.getName() ); + colNode.addProperty( "nullable", col.nullable ); + colNode.addProperty( "primaryKey", col.primary ); - if (col.length != null) { - colNode.addProperty("length", col.length); + if ( col.length != null ) { + colNode.addProperty( "length", col.length ); } - if (col.scale != null) { - colNode.addProperty("scale", col.scale); + if ( col.scale != null ) { + colNode.addProperty( "scale", col.scale ); } - tableNode.addChild(colNode); + tableNode.addChild( colNode ); } - schemaNode.addChild(tableNode); + schemaNode.addChild( tableNode ); } - root.addChild(schemaNode); + root.addChild( schemaNode ); } - return root; } @Override - public Object fetchpreview( int limit ) { - return null; - } + public Object fetchPreview( int limit ) { + Map>> preview = new LinkedHashMap<>(); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + try { + ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Connection conn = ch.getStatement().getConnection(); + + String[] tables = settings.get( "tables" ).split( "," ); + for ( String str : tables ) { + String[] parts = str.split( "\\." ); + String schema = parts.length == 2 ? parts[0] : null; + String table = parts.length == 2 ? parts[1] : parts[0]; + + String fqName = (schema != null ? schema + "." : "") + table; + List> rows = new ArrayList<>(); + + try ( var stmt = conn.createStatement(); + var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + var meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new HashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } - private void printTree(Node node, int depth) { - System.out.println(" ".repeat(depth) + node.getType() + ": " + node.getName()); - for (Map.Entry entry : node.getProperties().entrySet()) { - System.out.println(" ".repeat(depth + 1) + "- " + entry.getKey() + ": " + entry.getValue()); - } - for (Node child : node.getChildren()) { - printTree(child, depth + 1); + preview.put( fqName, rows ); + } + } catch ( Exception e ) { + throw new GenericRuntimeException( "Error fetching preview data", e ); } + + return preview; } - public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { - super( - storeId, - uniqueName, - settings, - mode, - "org.postgresql.Driver", - PostgresqlSqlDialect.DEFAULT, - false ); + +private void printTree( Node node, int depth ) { + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); } + for ( Node child : node.getChildren() ) { + printTree( child, depth + 1 ); + } +} - @Override - public void shutdown() { - try { - removeInformationPage(); - connectionFactory.close(); - } catch ( SQLException e ) { - log.warn( "Exception while shutting down {}", getUniqueName(), e ); - } - } +public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { + super( + storeId, + uniqueName, + settings, + mode, + "org.postgresql.Driver", + PostgresqlSqlDialect.DEFAULT, + false ); +} - @Override - protected void reloadSettings( List updatedSettings ) { - // TODO: Implement disconnect and reconnect to PostgreSQL instance. +@Override +public void shutdown() { + try { + removeInformationPage(); + connectionFactory.close(); + } catch ( SQLException e ) { + log.warn( "Exception while shutting down {}", getUniqueName(), e ); } +} - @Override - protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { - return String.format( "jdbc:postgresql://%s:%d/%s", dbHostname, dbPort, dbName ); - } +@Override +protected void reloadSettings( List updatedSettings ) { + // TODO: Implement disconnect and reconnect to PostgreSQL instance. +} - @Override - protected boolean requiresSchema() { - return true; - } +@Override +protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { + return String.format( "jdbc:postgresql://%s:%d/%s", dbHostname, dbPort, dbName ); +} - @Override - public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { - PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), - logical.table.name, - logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), - logical.table, - logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), - logical.pkIds, allocation ); - - adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); - Node node = fetchMetadataTree(); - printTree( node, 0 ); - return List.of( table ); - } +@Override +protected boolean requiresSchema() { + return true; +} - public static void getPreview() { - log.error( "Methodenaufruf fΓΌr Postgresql-Preview funktioniert !!!" ); - } + +@Override +public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + PhysicalTable table = adapterCatalog.createTable( + logical.table.getNamespaceName(), + logical.table.name, + logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), + logical.table, + logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), + logical.pkIds, allocation ); + + adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); + Node node = fetchMetadataTree(); + return List.of( table ); +} + + +public static void getPreview() { + log.error( "Methodenaufruf fΓΌr Postgresql-Preview funktioniert !!!" ); +} } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 79a3a6a2a9..72c8a908cf 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -2090,7 +2090,8 @@ public void getMetadataAndPreview( final Context ctx ) { log.info( "Parsed preview request: {}", req ); AdapterTemplate template = AdapterTemplate.fromString( req.adapterName, req.adapterType ); - PreviewResult result = template.preview( req.settings, req.rowLimit ); + log.error( "Row limit: {}", req.limit ); + PreviewResult result = template.preview( req.settings, req.limit ); ctx.json( result ); } catch ( Exception e ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java index 13003c06c6..716966aa67 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java @@ -24,7 +24,7 @@ public class PreviewRequest { public String adapterName; public AdapterType adapterType; public Map settings; - public int rowLimit; + public int limit; public PreviewRequest() { } @@ -36,7 +36,7 @@ public PreviewRequest( this.adapterName = adapterName; this.adapterType = adapterType; this.settings = settings; - this.rowLimit = rowLimit; + this.limit = rowLimit; } } From e31df9ce42661a5b3d47712623583c9630024381 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 30 Apr 2025 17:53:16 +0200 Subject: [PATCH 14/68] Expended Node-class so different types of nodes can be saved in the metadata tree structure. And added new command for the HTTP-Server to handle selected attrbiutes --- .../db/adapter/java/AdapterTemplate.java | 3 +- .../db/schemaDiscovery/AbstractNode.java | 43 +++++++++++++++++++ .../db/schemaDiscovery/AttributeNode.java | 33 ++++++++++++++ .../db/schemaDiscovery/MetadataProvider.java | 2 +- .../polypheny/db/schemaDiscovery/Node.java | 12 +++--- .../db/schemaDiscovery/NodeSerializer.java | 4 +- .../postgres/source/PostgresqlSource.java | 22 ++++++---- .../java/org/polypheny/db/webui/Crud.java | 13 ++++++ .../org/polypheny/db/webui/HttpServer.java | 2 + 9 files changed, 116 insertions(+), 18 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index 4bf8e0fcec..5201625675 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -37,6 +37,7 @@ import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.docker.DockerManager; +import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.schemaDiscovery.NodeSerializer; @@ -128,7 +129,7 @@ public PreviewResult preview( Map settings, int limit ) { try { if ( tmp instanceof MetadataProvider mp ) { log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); - Node meta = mp.fetchMetadataTree(); + AbstractNode meta = mp.fetchMetadataTree(); String json = NodeSerializer.serializeNode( meta ).toString(); Object rows = mp.fetchPreview( limit ); log.error( json ); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java new file mode 100644 index 0000000000..eb5cf6e4f7 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java @@ -0,0 +1,43 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import java.util.List; +import java.util.Map; + +public interface AbstractNode { + + String type = ""; + String name = ""; + List children = null; + Map properties = null; + + void addChild(AbstractNode node); + void addProperty(String key, Object value); + + String getType(); + String getName(); + List getChildren(); + Map getProperties(); + + void setType(String type); + void setName(String name); + void setChildren(List children); + void setProperties(Map properties); + + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java new file mode 100644 index 0000000000..a5906e6f9d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java @@ -0,0 +1,33 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import lombok.Getter; +import lombok.Setter; + +public class AttributeNode extends Node implements AbstractNode { + + @Getter + @Setter + private boolean isSelected; + + public AttributeNode( String type, String name ) { + super( type, name ); + this.isSelected = false; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index be84d6f944..9c353fbc79 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -18,7 +18,7 @@ public interface MetadataProvider { - Node fetchMetadataTree(); + AbstractNode fetchMetadataTree(); Object fetchPreview(int limit); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java index 1970169a2a..879b334d95 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java @@ -23,20 +23,20 @@ import java.util.List; import java.util.Map; -public class Node { +public class Node implements AbstractNode { @Setter @Getter - private String type; + protected String type; @Setter @Getter - private String name; + protected String name; @Setter @Getter - private List children; + protected List children; @Setter @Getter - private Map properties; + protected Map properties; public Node(String type, String name) { this.type = type; @@ -46,7 +46,7 @@ public Node(String type, String name) { } - public void addChild(Node node) { + public void addChild(AbstractNode node) { children.add(node); } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java index 35bcbe5a1a..15a1ab6fe6 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java @@ -24,7 +24,7 @@ public class NodeSerializer { private static final ObjectMapper objectMapper = new ObjectMapper(); - public static ObjectNode serializeNode(Node node) { + public static ObjectNode serializeNode(AbstractNode node) { ObjectNode json = objectMapper.createObjectNode(); json.put( "type", node.getType() ); json.put( "name", node.getName() ); @@ -37,7 +37,7 @@ public static ObjectNode serializeNode(Node node) { // Children ArrayNode children = objectMapper.createArrayNode(); - for (Node child : node.getChildren()) { + for (AbstractNode child : node.getChildren()) { children.add(serializeNode(child)); } json.set("children", children); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 35f9799526..4889362186 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -40,6 +40,8 @@ import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PUID; @@ -70,9 +72,11 @@ description = "List of tables which should be imported. The names must to be separated by a comma.") public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { + private AbstractNode metadataRoot; + @Override - public Node fetchMetadataTree() { + public AbstractNode fetchMetadataTree() { String dbName = settings.get( "database" ); Node root = new Node( "relational", dbName ); @@ -89,13 +93,13 @@ public Node fetchMetadataTree() { } for ( Map.Entry>> schemaEntry : grouped.entrySet() ) { - Node schemaNode = new Node( "schema", schemaEntry.getKey() ); + AbstractNode schemaNode = new Node( "schema", schemaEntry.getKey() ); for ( Map.Entry> tableEntry : schemaEntry.getValue().entrySet() ) { - Node tableNode = new Node( "table", tableEntry.getKey() ); + AbstractNode tableNode = new Node( "table", tableEntry.getKey() ); for ( ExportedColumn col : tableEntry.getValue() ) { - Node colNode = new Node( "column", col.getName() ); + AbstractNode colNode = new AttributeNode( "column", col.getName() ); colNode.addProperty( "type", col.type.getName() ); colNode.addProperty( "nullable", col.nullable ); colNode.addProperty( "primaryKey", col.primary ); @@ -115,7 +119,8 @@ public Node fetchMetadataTree() { root.addChild( schemaNode ); } - return root; + this.metadataRoot = root; + return this.metadataRoot; } @@ -161,12 +166,12 @@ public Object fetchPreview( int limit ) { -private void printTree( Node node, int depth ) { +private void printTree( AbstractNode node, int depth ) { System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); for ( Map.Entry entry : node.getProperties().entrySet() ) { System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); } - for ( Node child : node.getChildren() ) { + for ( AbstractNode child : node.getChildren() ) { printTree( child, depth + 1 ); } } @@ -181,6 +186,7 @@ public PostgresqlSource( final long storeId, final String uniqueName, final Map< "org.postgresql.Driver", PostgresqlSqlDialect.DEFAULT, false ); + this.metadataRoot = null; } @@ -224,7 +230,7 @@ public List createTable( Context context, LogicalTableWrapper lo logical.pkIds, allocation ); adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); - Node node = fetchMetadataTree(); + AbstractNode node = fetchMetadataTree(); return List.of( table ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 72c8a908cf..03c2329e11 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -227,6 +227,8 @@ public class Crud implements InformationObserver, PropertyChangeListener { public static final String ORIGIN = "Polypheny-UI"; private final TransactionManager transactionManager; + private AdapterTemplate temp; + public final LanguageCrud languageCrud; public final StatisticCrud statisticCrud; @@ -245,6 +247,7 @@ public class Crud implements InformationObserver, PropertyChangeListener { this.statisticCrud = new StatisticCrud( this ); this.catalogCrud = new CatalogCrud( this ); this.authCrud = new AuthCrud( this ); + this.temp = null; Catalog.afterInit( () -> Catalog.getInstance().addObserver( this ) ); } @@ -2101,6 +2104,16 @@ public void getMetadataAndPreview( final Context ctx ) { } + public void processAttributes( final Context ctx ) { + try { + String req = ctx.body(); + log.error( "Attribut Daten" + req ); + } catch ( Exception e ) { + log.error( "Attribute Daten sind nicht vorhanden." ); + } + + } + /** * Deploy a new adapter diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index d2e1c43b40..26b155544d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -360,6 +360,8 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/previewTable", crud::getMetadataAndPreview ); + webuiServer.post( "/sendSelectedMetadata", crud::processAttributes ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); From f6d0b9979e45075023f4176644f5b660ed300da4 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 2 May 2025 20:47:55 +0200 Subject: [PATCH 15/68] Removed hardcodet schema- and table name from getExportedColumns in the oracle adapter --- .../source/OracleSource.java | 122 ++++++++++-------- 1 file changed, 69 insertions(+), 53 deletions(-) diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 83dcb3786e..51a7b7a581 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -90,7 +90,7 @@ protected String getConnectionUrl( final String dbHostname, final int dbPort, fi @Override protected boolean requiresSchema() { - return false; + return true; } @@ -138,63 +138,79 @@ public Map> getExportedColumns() { Connection connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); - String schema = "SYSTEM"; - String tableName = "TEST"; + String[] tables = settings.get( "tables" ).split( "," ); + for ( String str : tables ) { + String[] names = str.split( "\\." ); - List primaryKeyColumns = new ArrayList<>(); - try ( ResultSet pk = dbmd.getPrimaryKeys( null, schema, tableName ) ) { - while ( pk.next() ) { - primaryKeyColumns.add( pk.getString( "COLUMN_NAME" ).toUpperCase() ); + if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { + throw new GenericRuntimeException( "Invalid table name: " + tables ); + } + String schema; + String tableName; + + if ( requiresSchema() ) { + schema = names[0].toUpperCase(); + tableName = names[1].toUpperCase(); + } else { + schema = null; + tableName = names[0].toUpperCase(); } - } - try ( ResultSet columns = dbmd.getColumns( null, schema, tableName, "%" ) ) { - List exportedColumns = new ArrayList<>(); - while ( columns.next() ) { - PolyType type = PolyType.getNameForJdbcType( columns.getInt( "DATA_TYPE" ) ); - Integer length = null; - Integer scale = null; - - switch ( type ) { - case DECIMAL: - length = columns.getInt( "COLUMN_SIZE" ); - scale = columns.getInt( "DECIMAL_DIGITS" ); - break; - case CHAR: - case VARCHAR: - type = PolyType.VARCHAR; - length = columns.getInt( "COLUMN_SIZE" ); - break; - case VARBINARY: - case BINARY: - type = PolyType.VARBINARY; - length = columns.getInt( "COLUMN_SIZE" ); - break; - case TIME: - case TIMESTAMP: - length = columns.getInt( "DECIMAL_DIGITS" ); - break; - default: - break; - } - exportedColumns.add( new ExportedColumn( - columns.getString( "COLUMN_NAME" ).toUpperCase(), - type, - null, - length, - scale, - null, - null, - "YES".equalsIgnoreCase( columns.getString( "IS_NULLABLE" ) ), - schema, - tableName, - columns.getString( "COLUMN_NAME" ).toUpperCase(), - columns.getInt( "ORDINAL_POSITION" ), - primaryKeyColumns.contains( columns.getString( "COLUMN_NAME" ).toUpperCase() ) - ) ); + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet pk = dbmd.getPrimaryKeys( null, schema, tableName ) ) { + while ( pk.next() ) { + primaryKeyColumns.add( pk.getString( "COLUMN_NAME" ).toUpperCase() ); + } } + try ( ResultSet columns = dbmd.getColumns( null, schema, tableName, "%" ) ) { + List exportedColumns = new ArrayList<>(); + while ( columns.next() ) { + PolyType type = PolyType.getNameForJdbcType( columns.getInt( "DATA_TYPE" ) ); + Integer length = null; + Integer scale = null; + + switch ( type ) { + case DECIMAL: + length = columns.getInt( "COLUMN_SIZE" ); + scale = columns.getInt( "DECIMAL_DIGITS" ); + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = columns.getInt( "COLUMN_SIZE" ); + break; + case VARBINARY: + case BINARY: + type = PolyType.VARBINARY; + length = columns.getInt( "COLUMN_SIZE" ); + break; + case TIME: + case TIMESTAMP: + length = columns.getInt( "DECIMAL_DIGITS" ); + break; + default: + break; + } + + exportedColumns.add( new ExportedColumn( + columns.getString( "COLUMN_NAME" ).toUpperCase(), + type, + null, + length, + scale, + null, + null, + "YES".equalsIgnoreCase( columns.getString( "IS_NULLABLE" ) ), + schema, + tableName, + columns.getString( "COLUMN_NAME" ).toUpperCase(), + columns.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( columns.getString( "COLUMN_NAME" ).toUpperCase() ) + ) ); + } - map.put( tableName, exportedColumns ); + map.put( tableName, exportedColumns ); + } } } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Exception while collecting Oracle schema info", e ); From 96a90b74ff3d671c431770afbe3fd0b2d3074488 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 4 May 2025 16:03:40 +0200 Subject: [PATCH 16/68] Selected metadata (attributes) are saved in the corresponding adapter (yet only postgresql adapter) and are considered while creating the source in the DdlManagerImpl.java --- .../db/schemaDiscovery/MetadataProvider.java | 10 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 104 +++++-- .../postgres/source/PostgresqlSource.java | 285 ++++++++++++------ .../java/org/polypheny/db/webui/Crud.java | 41 ++- .../db/webui/models/catalog/AdapterModel.java | 10 +- 5 files changed, 330 insertions(+), 120 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index 9c353fbc79..c298d1d6d0 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -16,11 +16,19 @@ package org.polypheny.db.schemaDiscovery; +import java.util.List; + public interface MetadataProvider { AbstractNode fetchMetadataTree(); - Object fetchPreview(int limit); + Object fetchPreview( int limit ); + + void markSelectedAttributes( List paths ); + + void printTree( AbstractNode node, int depth ); + + void setRoot( AbstractNode root ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 7b3e171979..8929df4fd1 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -32,6 +32,8 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.polypheny.db.adapter.Adapter; @@ -108,6 +110,8 @@ import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; @@ -214,6 +218,27 @@ public void createSource( Transaction transaction, String uniqueName, String ada uniqueName = uniqueName.toLowerCase(); DataSource adapter = (DataSource) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); + String attributes = config.get( "selectedAttributes" ); + Set selectedAttributeNames = new HashSet<>(); + log.error( "Das ist das Attributes String: " + attributes ); + if ( attributes != null ) { + List selectedAttributes = new Gson().fromJson( attributes, new TypeToken>() { + }.getType() ); + selectedAttributeNames = selectedAttributes.stream() + .map( s -> s.replaceFirst( " : .*", "" ) ) + .map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ) + .collect( Collectors.toSet() ); + log.error( "Das sind die Attribute die gefiltert werden mΓΌssen: " + selectedAttributeNames ); + if ( adapter instanceof MetadataProvider mp ) { + AbstractNode node = mp.fetchMetadataTree(); + mp.setRoot( node ); + mp.markSelectedAttributes( selectedAttributes ); + log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); + mp.printTree( null, 0 ); + } + + } + Map> exportedColumns; try { exportedColumns = adapter.getExportedColumns(); @@ -232,7 +257,6 @@ public void createSource( Transaction transaction, String uniqueName, String ada } tableName += i; } - LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List columns = new ArrayList<>(); @@ -244,29 +268,61 @@ public void createSource( Transaction transaction, String uniqueName, String ada int colPos = 1; for ( ExportedColumn exportedColumn : entry.getValue() ) { - LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( - exportedColumn.name, - logical.id, - colPos++, - exportedColumn.type, - exportedColumn.collectionsType, - exportedColumn.length, - exportedColumn.scale, - exportedColumn.dimension, - exportedColumn.cardinality, - exportedColumn.nullable, - Collation.getDefaultCollation() ); - - AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( - placement.id, - logical.id, - column.id, - adapter.adapterId, - PlacementType.STATIC, - exportedColumn.physicalPosition ); // Not a valid partitionGroupID --> placeholder - - columns.add( column ); - aColumns.add( allocationColumn ); + + if ( adapter instanceof MetadataProvider mp && (attributes != null) ) { + if ( !selectedAttributeNames.contains( exportedColumn.name ) ) { + continue; + } + LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( + exportedColumn.name, + logical.id, + colPos++, + exportedColumn.type, + exportedColumn.collectionsType, + exportedColumn.length, + exportedColumn.scale, + exportedColumn.dimension, + exportedColumn.cardinality, + exportedColumn.nullable, + Collation.getDefaultCollation() ); + + AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( + placement.id, + logical.id, + column.id, + adapter.adapterId, + PlacementType.STATIC, + exportedColumn.physicalPosition ); + + columns.add( column ); + aColumns.add( allocationColumn ); + + } else { + LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( + exportedColumn.name, + logical.id, + colPos++, + exportedColumn.type, + exportedColumn.collectionsType, + exportedColumn.length, + exportedColumn.scale, + exportedColumn.dimension, + exportedColumn.cardinality, + exportedColumn.nullable, + Collation.getDefaultCollation() ); + + AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( + placement.id, + logical.id, + column.id, + adapter.adapterId, + PlacementType.STATIC, + exportedColumn.physicalPosition ); + + columns.add( column ); + aColumns.add( allocationColumn ); + } + } buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 4889362186..964760bf41 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -17,14 +17,21 @@ package org.polypheny.db.adapter.postgres.source; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; +import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.annotations.AdapterProperties; @@ -32,6 +39,7 @@ import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.adapter.postgres.PostgresqlSqlDialect; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; @@ -72,53 +80,103 @@ description = "List of tables which should be imported. The names must to be separated by a comma.") public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { - private AbstractNode metadataRoot; + public AbstractNode metadataRoot; @Override - public AbstractNode fetchMetadataTree() { - String dbName = settings.get( "database" ); - Node root = new Node( "relational", dbName ); - - Map> exported = getExportedColumns(); + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } - Map>> grouped = new HashMap<>(); - for ( Map.Entry> entry : exported.entrySet() ) { - for ( ExportedColumn col : entry.getValue() ) { - grouped - .computeIfAbsent( col.physicalSchemaName, k -> new HashMap<>() ) - .computeIfAbsent( col.physicalTableName, k -> new ArrayList<>() ) - .add( col ); - } - } - for ( Map.Entry>> schemaEntry : grouped.entrySet() ) { - AbstractNode schemaNode = new Node( "schema", schemaEntry.getKey() ); + @Override + public AbstractNode fetchMetadataTree() { - for ( Map.Entry> tableEntry : schemaEntry.getValue().entrySet() ) { - AbstractNode tableNode = new Node( "table", tableEntry.getKey() ); + String dbName = settings.get( "database" ); + Node root = new Node( "relational", dbName ); - for ( ExportedColumn col : tableEntry.getValue() ) { - AbstractNode colNode = new AttributeNode( "column", col.getName() ); - colNode.addProperty( "type", col.type.getName() ); - colNode.addProperty( "nullable", col.nullable ); - colNode.addProperty( "primaryKey", col.primary ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); - if ( col.length != null ) { - colNode.addProperty( "length", col.length ); - } - if ( col.scale != null ) { - colNode.addProperty( "scale", col.scale ); + try { + ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Statement stmt = handler.getStatement(); + Connection conn = stmt.getConnection(); + DatabaseMetaData meta = conn.getMetaData(); + + try ( ResultSet schemas = requiresSchema() + ? meta.getSchemas( dbName, "%" ) + : meta.getCatalogs() ) { + while ( schemas.next() ) { + + String schemaName = requiresSchema() + ? schemas.getString( "TABLE_SCHEM" ) + : schemas.getString( "TABLE_CAT" ); + + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = meta.getTables( + dbName, + requiresSchema() ? schemaName : null, + "%", + new String[]{ "TABLE" } + ) ) { + while ( tables.next() ) { + + String tableName = tables.getString( "TABLE_NAME" ); + AbstractNode tableNode = new Node( "table", tableName ); + + Set pkCols = new HashSet<>(); + try ( ResultSet pk = meta.getPrimaryKeys( + dbName, + requiresSchema() ? schemaName : null, + tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = meta.getColumns( + dbName, + requiresSchema() ? schemaName : null, + tableName, + "%" ) ) { + while ( cols.next() ) { + + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getObject( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getObject( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + + schemaNode.addChild( tableNode ); + } } - tableNode.addChild( colNode ); + root.addChild( schemaNode ); } - - schemaNode.addChild( tableNode ); } - root.addChild( schemaNode ); + } catch ( SQLException | ConnectionHandlerException ex ) { + throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); } + this.metadataRoot = root; return this.metadataRoot; } @@ -165,79 +223,128 @@ public Object fetchPreview( int limit ) { } + @Override + public void markSelectedAttributes( List selectedPaths ) { -private void printTree( AbstractNode node, int depth ) { - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } -} + List> attributePaths = new ArrayList<>(); + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); -public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { - super( - storeId, - uniqueName, - settings, - mode, - "org.postgresql.Driver", - PostgresqlSqlDialect.DEFAULT, - false ); - this.metadataRoot = null; -} + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); + } else { + log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); + } + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); + break; + } + } + } + } -@Override -public void shutdown() { - try { - removeInformationPage(); - connectionFactory.close(); - } catch ( SQLException e ) { - log.warn( "Exception while shutting down {}", getUniqueName(), e ); } -} -@Override -protected void reloadSettings( List updatedSettings ) { - // TODO: Implement disconnect and reconnect to PostgreSQL instance. -} + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + } -@Override -protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { - return String.format( "jdbc:postgresql://%s:%d/%s", dbHostname, dbPort, dbName ); -} + public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { + super( + storeId, + uniqueName, + settings, + mode, + "org.postgresql.Driver", + PostgresqlSqlDialect.DEFAULT, + false ); + this.metadataRoot = null; + } -@Override -protected boolean requiresSchema() { - return true; -} + @Override + public void shutdown() { + try { + removeInformationPage(); + connectionFactory.close(); + } catch ( SQLException e ) { + log.warn( "Exception while shutting down {}", getUniqueName(), e ); + } + } -@Override -public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { - PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), - logical.table.name, - logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), - logical.table, - logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), - logical.pkIds, allocation ); + @Override + protected void reloadSettings( List updatedSettings ) { + // TODO: Implement disconnect and reconnect to PostgreSQL instance. + } - adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); - AbstractNode node = fetchMetadataTree(); - return List.of( table ); -} + @Override + protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { + return String.format( "jdbc:postgresql://%s:%d/%s", dbHostname, dbPort, dbName ); + } -public static void getPreview() { - log.error( "Methodenaufruf fΓΌr Postgresql-Preview funktioniert !!!" ); -} + @Override + protected boolean requiresSchema() { + return true; + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + PhysicalTable table = adapterCatalog.createTable( + logical.table.getNamespaceName(), + logical.table.name, + logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), + logical.table, + logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), + logical.pkIds, allocation ); + + adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); + log.error( "Postgres Adapter ID ist: " + this.adapterId ); + return List.of( table ); + } } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 03c2329e11..15925bb62d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -17,7 +17,6 @@ package org.polypheny.db.webui; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; @@ -27,7 +26,6 @@ import com.google.gson.JsonDeserializer; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; -import com.google.gson.JsonParser; import com.j256.simplemagic.ContentInfo; import com.j256.simplemagic.ContentInfoUtil; import io.javalin.http.Context; @@ -77,6 +75,7 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.websocket.api.Session; +import org.jetbrains.annotations.NotNull; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingDirectory; import org.polypheny.db.adapter.Adapter; @@ -156,7 +155,6 @@ import org.polypheny.db.processing.ImplementationContext.ExecutedContext; import org.polypheny.db.processing.QueryContext; import org.polypheny.db.schemaDiscovery.MetadataProvider; -import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.security.SecurityManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -880,6 +878,7 @@ void deleteTuple( final Context ctx ) { ctx.json( result ); } + void sendConfirmation( final Context ctx ) { log.info( "Sending confirmation" ); // String result = "Angular confirmation message"; @@ -888,7 +887,7 @@ void sendConfirmation( final Context ctx ) { List dbs = PostgreSqlConnection.getDatabasesSchemasAndTables(); ctx.json( dbs ); } catch ( SQLException e ) { - System.err.println("Fehler bei der Schema-Erkennung: " + e.getMessage()); + System.err.println( "Fehler bei der Schema-Erkennung: " + e.getMessage() ); } } @@ -2141,6 +2140,26 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { return; } + log.info( "AdapterModel empfangen:" ); + log.info( "Name: " + a.name ); + log.info( "Adapter: " + a.adapterName ); + log.info( "Type: " + a.type ); + log.info( "Mode: " + a.mode ); + + log.info( "Settings:" ); + for ( Map.Entry entry : a.settings.entrySet() ) { + log.info( entry.getKey() + " = " + entry.getValue() ); + } + + if ( a.metadata != null && !a.metadata.isEmpty() ) { + log.info( "Metadaten:" ); + for ( String meta : a.metadata ) { + log.info( meta ); + } + } else { + log.info( "Keine Metadaten enthalten." ); + } + Map settings = new HashMap<>(); ConnectionMethod method = ConnectionMethod.UPLOAD; @@ -2175,6 +2194,9 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { settings.put( "mode", a.mode.toString() ); + if ( a.metadata != null && !a.metadata.isEmpty() ) { + settings.put( "selectedAttributes", Crud.gson.toJson( a.metadata ) ); + } String query = String.format( "ALTER ADAPTERS ADD \"%s\" USING '%s' AS '%s' WITH '%s'", a.name, a.adapterName, a.type, Crud.gson.toJson( settings ) ); QueryLanguage language = QueryLanguage.from( "sql" ); Result res = LanguageCrud.anyQueryResult( @@ -2184,6 +2206,17 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { .origin( ORIGIN ) .transactionManager( transactionManager ) .build(), UIRequest.builder().build() ).get( 0 ); + + //Setze selektierte Attribute auf der konkreten Adapterinstanz + /*if ( a.metadata != null && !a.metadata.isEmpty() ) { + @NotNull Optional> adapterInstance = AdapterManager.getInstance().getAdapter( a.name ); + if ( adapterInstance.isPresent() && adapterInstance.get() instanceof MetadataProvider yourAdapter ) { + yourAdapter.markSelectedAttributes( a.metadata ); + yourAdapter.printTree( null, 0 ); + } else { + log.warn( "Adapter-Instanz fΓΌr '" + a.name + "' nicht gefunden oder falscher Typ." ); + } + }*/ ctx.json( res ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java b/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java index 7a628ecc83..02b2d2103f 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java @@ -54,6 +54,9 @@ public class AdapterModel extends IdEntity { @JsonProperty public List indexMethods; + @JsonProperty + public List metadata; + public AdapterModel( @JsonProperty("id") @Nullable Long id, @@ -62,13 +65,15 @@ public AdapterModel( @JsonProperty("type") AdapterType type, @JsonProperty("settings") Map settings, @JsonProperty("mode") DeployMode mode, - @JsonProperty("indexMethods") List indexMethods ) { + @JsonProperty("indexMethods") List indexMethods, + @JsonProperty("metadata") List metadata ) { super( id, name ); this.adapterName = adapterName; this.type = type; this.settings = settings; this.mode = mode; this.indexMethods = indexMethods; + this.metadata = metadata; } @@ -84,7 +89,8 @@ public static AdapterModel from( LogicalAdapter adapter ) { adapter.type, settings, adapter.mode, - adapter.type == AdapterType.STORE ? ((DataStore) dataStore).getAvailableIndexMethods() : List.of() ) ).orElse( null ); + adapter.type == AdapterType.STORE ? ((DataStore) dataStore).getAvailableIndexMethods() : List.of(), + null) ).orElse( null ); } From a48d17b73f7b7ba1d2827a2981b817f5cd4c6923 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 7 May 2025 19:13:33 +0200 Subject: [PATCH 17/68] Added new JDBC_TYPE_TO_NAME mapping in PolyType.java such that Mysql's datatype "LONGVARCHAR" is correctly recognized. --- core/src/main/java/org/polypheny/db/type/PolyType.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/core/src/main/java/org/polypheny/db/type/PolyType.java b/core/src/main/java/org/polypheny/db/type/PolyType.java index 0e7d2e09a0..64cb9edb9b 100644 --- a/core/src/main/java/org/polypheny/db/type/PolyType.java +++ b/core/src/main/java/org/polypheny/db/type/PolyType.java @@ -401,6 +401,9 @@ public enum PolyType { .put( ExtraPolyTypes.NCHAR, CHAR ) .put( ExtraPolyTypes.NVARCHAR, VARCHAR ) + .put( Types.LONGVARCHAR, VARCHAR ) + .put( Types.LONGNVARCHAR, VARCHAR ) + .put( Types.BINARY, BINARY ) .put( Types.VARBINARY, VARBINARY ) From de237fb10e0d3c6757df315d8662a221a8446f9e Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 7 May 2025 19:16:10 +0200 Subject: [PATCH 18/68] Oracle adapter supports MetadataProvider Interface now. All unnecessary metadata from oracle source are not filterer yet. --- .../db/schemaDiscovery/MetadataProvider.java | 2 +- .../source/OracleSource.java | 231 +++++++++++++++++- 2 files changed, 228 insertions(+), 5 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index c298d1d6d0..1c444118c1 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -24,7 +24,7 @@ public interface MetadataProvider { Object fetchPreview( int limit ); - void markSelectedAttributes( List paths ); + void markSelectedAttributes( List selectedPaths ); void printTree( AbstractNode node, int depth ); diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 51a7b7a581..e4f8bbaffa 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -33,6 +33,10 @@ import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PUID; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; @@ -41,9 +45,16 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; @Slf4j @@ -68,7 +79,14 @@ description = "Which level of transaction isolation should be used.") @AdapterSettingString(name = "tables", defaultValue = "foo,bar", description = "List of tables which should be imported. The names must be separated by a comma.") -public class OracleSource extends AbstractJdbcSource { +public class OracleSource extends AbstractJdbcSource implements MetadataProvider { + + public AbstractNode metadataRoot; + + private static final Pattern ORACLE_INTERNAL = + Pattern.compile( "^(AQ\\$|AQS\\$|SYS_|WRI\\$|MDSYS_|XDB_|CTXSYS_|OLAP\\$|LOG\\$|DBMS_|ORDDATA|ORDSYS)", + Pattern.CASE_INSENSITIVE ); + public OracleSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( @@ -89,9 +107,7 @@ protected String getConnectionUrl( final String dbHostname, final int dbPort, fi @Override - protected boolean requiresSchema() { - return true; - } + protected boolean requiresSchema() { return true; } @Override @@ -220,4 +236,211 @@ public Map> getExportedColumns() { } + @Override + public AbstractNode fetchMetadataTree() { + Node root = new Node( "relational", settings.get( "database" ) ); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + + try { + ConnectionHandler h = connectionFactory.getOrCreateConnectionHandler( xid ); + DatabaseMetaData m = h.getStatement().getConnection().getMetaData(); + + String currentUser = m.getUserName(); // aktueller Owner (=β€―Schema) + + try ( ResultSet schemas = m.getSchemas() ) { + while ( schemas.next() ) { + String schemaName = schemas.getString( "TABLE_SCHEM" ); + Node schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = + m.getTables( null, schemaName, "%", new String[]{ "TABLE" } ) ) { + + while ( tables.next() ) { + String owner = tables.getString( "TABLE_SCHEM" ); // gleichβ€―schemaName + String tableName = tables.getString( "TABLE_NAME" ); + + /* (a) nur Objekte des eingeloggten Users */ + if ( !owner.equalsIgnoreCase( currentUser ) ) { + continue; + } + /* (b) interne Oracle‑Tabellen ausblenden */ + if ( tableName.contains( "$" ) || ORACLE_INTERNAL.matcher( tableName ).find() ) { + continue; + } + + Node tableNode = new Node( "table", tableName ); + + /* PrimΓ€rschlΓΌsselspalten zwischenspeichern ---------------------------------- */ + Set pkCols = new HashSet<>(); + try ( ResultSet pk = m.getPrimaryKeys( null, schemaName, tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + /* Spalten lesen ------------------------------------------------------------- */ + try ( ResultSet cols = + m.getColumns( null, schemaName, tableName, "%" ) ) { + + while ( cols.next() ) { + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = + cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getInt( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getInt( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + /* nur Tabellen mit mindestens einer sichtbaren Spalte ΓΌbernehmen */ + if ( !tableNode.getChildren().isEmpty() ) { + schemaNode.addChild( tableNode ); + } + } + } + /* Schema nur anhΓ€ngen, wenn mindestens eine Tabelle behalten wurde */ + if ( !schemaNode.getChildren().isEmpty() ) { + root.addChild( schemaNode ); + } + } + } + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Error while fetching Oracle metadata", e ); + } + + this.metadataRoot = root; + return root; + } + + + @Override + public Object fetchPreview( int limit ) { + Map>> preview = new LinkedHashMap<>(); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + try { + ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Connection conn = ch.getStatement().getConnection(); + + String[] tables = settings.get( "tables" ).split( "," ); + for ( String str : tables ) { + String[] parts = str.split( "\\." ); + String schema = parts.length == 2 ? parts[0] : null; + String table = parts.length == 2 ? parts[1] : parts[0]; + + schema = schema.toUpperCase(); + table = table.toUpperCase(); + + String fqName = (schema != null ? schema + "." : "") + table; + List> rows = new ArrayList<>(); + + try ( var stmt = conn.createStatement(); + var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { + + var meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new HashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } + + preview.put( fqName, rows ); + } + } catch ( Exception e ) { + throw new GenericRuntimeException( "Error fetching preview data", e ); + } + + return preview; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); + } else { + log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); + } + + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); + break; + } + } + } + } + } + + + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + } From 7b28a7676ff6f49acc1aac2674dac0ddae5df8f8 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 8 May 2025 17:32:16 +0200 Subject: [PATCH 19/68] Changes in table setting of monetdb adapter and override of "getExportedColumns" in monetdb-adapter. The adapter gets deployed now and shows data. --- .../monetdb/sources/MonetdbSource.java | 124 +++++++++++++++++- 1 file changed, 120 insertions(+), 4 deletions(-) diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index cc69825079..7249f563b1 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -17,7 +17,12 @@ package org.polypheny.db.adapter.monetdb.sources; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -29,6 +34,8 @@ import org.polypheny.db.adapter.annotations.AdapterSettingString; import org.polypheny.db.adapter.jdbc.JdbcTable; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.adapter.monetdb.MonetdbSqlDialect; @@ -36,8 +43,12 @@ import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; import org.polypheny.db.sql.language.SqlDialect; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; @Slf4j @@ -48,11 +59,11 @@ defaultMode = DeployMode.REMOTE) @AdapterSettingString(name = "host", defaultValue = "localhost", description = "Hostname or IP address of the remote MonetDB instance.", position = 1) @AdapterSettingInteger(name = "port", defaultValue = 50000, description = "JDBC port number on the remote MonetDB instance.", position = 2) -@AdapterSettingString(name = "database", defaultValue = "polypheny", description = "JDBC port number on the remote MonetDB instance.", position = 3) -@AdapterSettingString(name = "username", defaultValue = "polypheny", description = "Name of the database to connect to.", position = 4) -@AdapterSettingString(name = "password", defaultValue = "polypheny", description = "Username to be used for authenticating at the remote instance.", position = 5) +@AdapterSettingString(name = "database", defaultValue = "demo", description = "JDBC port number on the remote MonetDB instance.", position = 3) +@AdapterSettingString(name = "username", defaultValue = "monetdb", description = "Name of the database to connect to.", position = 4) +@AdapterSettingString(name = "password", defaultValue = "monetdb", description = "Username to be used for authenticating at the remote instance.", position = 5) @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Password to be used for authenticating at the remote instance.") -@AdapterSettingString(name = "table", defaultValue = "public.foo,public.bar", description = "Maximum number of concurrent JDBC connections.") +@AdapterSettingString(name = "tables", defaultValue = "sys.testtable", description = "Maximum number of concurrent JDBC connections.") public class MonetdbSource extends AbstractJdbcSource { public MonetdbSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { @@ -128,4 +139,109 @@ public List createTable( Context context, LogicalTableWrapper lo return List.of( physical ); } + + @Override + public Map> getExportedColumns() { + Map> map = new HashMap<>(); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + try { + ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Statement statement = connectionHandler.getStatement(); + Connection connection = statement.getConnection(); + DatabaseMetaData dbmd = connection.getMetaData(); + + String[] tables = settings.get( "tables" ).split( "," ); + for ( String str : tables ) { + String[] names = str.split( "\\." ); + if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { + throw new GenericRuntimeException( "Invalid table name: " + str ); + } + String tableName; + String schemaPattern; + if ( requiresSchema() ) { + schemaPattern = names[0]; + tableName = names[1]; + } else { + schemaPattern = null; + tableName = names[0]; + } + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet row = dbmd.getPrimaryKeys( null, schemaPattern, tableName ) ) { + while ( row.next() ) { + primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); + } + } + try ( ResultSet row = dbmd.getColumns( null, schemaPattern, tableName, "%" ) ) { + List list = new ArrayList<>(); + while ( row.next() ) { + PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); + Integer length = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + switch ( type ) { + case BOOLEAN: + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + case FLOAT: + case REAL: + case DOUBLE: + case DATE: + break; + case DECIMAL: + length = row.getInt( "COLUMN_SIZE" ); + scale = row.getInt( "DECIMAL_DIGITS" ); + break; + case TIME: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type time: " + length ); + } + break; + case TIMESTAMP: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type timestamp: " + length ); + } + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = row.getInt( "COLUMN_SIZE" ); + break; + case BINARY: + case VARBINARY: + type = PolyType.VARBINARY; + length = row.getInt( "COLUMN_SIZE" ); + break; + default: + throw new GenericRuntimeException( "Unsupported data type: " + type.getName() ); + } + list.add( new ExportedColumn( + row.getString( "COLUMN_NAME" ).toLowerCase(), + type, + null, + length, + scale, + dimension, + cardinality, + row.getString( "IS_NULLABLE" ).equalsIgnoreCase( "YES" ), + requiresSchema() ? row.getString( "TABLE_SCHEM" ) : row.getString( "TABLE_CAT" ), + row.getString( "TABLE_NAME" ), + row.getString( "COLUMN_NAME" ), + row.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( row.getString( "COLUMN_NAME" ) ) + ) ); + } + map.put( tableName, list ); + } + } + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } + return map; + } + } From c6dae6c4b497cc3c8b70e0f69f710e8a9fc9a3bd Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 8 May 2025 18:14:25 +0200 Subject: [PATCH 20/68] MonetdbSource.java implements now MetadaProvider interface. --- .../monetdb/sources/MonetdbSource.java | 213 +++++++++++++++++- 1 file changed, 211 insertions(+), 2 deletions(-) diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 7249f563b1..707e0f6cc4 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -22,9 +22,14 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Arrays; import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.dbcp2.BasicDataSource; @@ -45,6 +50,10 @@ import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.transaction.PUID; import org.polypheny.db.transaction.PolyXid; @@ -64,7 +73,10 @@ @AdapterSettingString(name = "password", defaultValue = "monetdb", description = "Username to be used for authenticating at the remote instance.", position = 5) @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Password to be used for authenticating at the remote instance.") @AdapterSettingString(name = "tables", defaultValue = "sys.testtable", description = "Maximum number of concurrent JDBC connections.") -public class MonetdbSource extends AbstractJdbcSource { +public class MonetdbSource extends AbstractJdbcSource implements MetadataProvider { + + private AbstractNode metadataRoot; + public MonetdbSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, "nl.cwi.monetdb.jdbc.MonetDriver", MonetdbSqlDialect.DEFAULT, false ); @@ -124,7 +136,7 @@ protected boolean requiresSchema() { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), + "sys", logical.table.name, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, @@ -244,4 +256,201 @@ public Map> getExportedColumns() { return map; } + + @Override + public AbstractNode fetchMetadataTree() { + String dbName = settings.getOrDefault( "database", "monetdb" ); + Node root = new Node( "relational", dbName ); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + + try { + ConnectionHandler h = connectionFactory.getOrCreateConnectionHandler( xid ); + DatabaseMetaData md = h.getStatement().getConnection().getMetaData(); + + try ( ResultSet schemas = md.getSchemas( null, "%" ) ) { + + while ( schemas.next() ) { + String schemaName = schemas.getString( "TABLE_SCHEM" ); + + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = md.getTables( + null, + schemaName, + "%", + new String[]{ "TABLE" } ) ) { + + while ( tables.next() ) { + String tableName = tables.getString( "TABLE_NAME" ); + AbstractNode tableNode = new Node( "table", tableName ); + + Set pkCols = new HashSet<>(); + try ( ResultSet pk = md.getPrimaryKeys( + null, + schemaName, + tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = md.getColumns( + null, + schemaName, + tableName, + "%" ) ) { + + while ( cols.next() ) { + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = + cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getObject( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getObject( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + schemaNode.addChild( tableNode ); + } + } + root.addChild( schemaNode ); + } + } + } catch ( SQLException | ConnectionHandlerException ex ) { + throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } + + this.metadataRoot = root; + return this.metadataRoot; + } + + + @Override + public Object fetchPreview( int limit ) { + Map>> preview = new LinkedHashMap<>(); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + try { + ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Connection conn = ch.getStatement().getConnection(); + + String[] tables = settings.get( "tables" ).split( "," ); + for ( String str : tables ) { + String[] parts = str.split( "\\." ); + String schema = parts.length == 2 ? parts[0] : null; + String table = parts.length == 2 ? parts[1] : parts[0]; + + String fqName = (schema != null ? schema + "." : "") + table; + List> rows = new ArrayList<>(); + + try ( var stmt = conn.createStatement(); + var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + var meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new HashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } + + preview.put( fqName, rows ); + } + } catch ( Exception e ) { + throw new GenericRuntimeException( "Error fetching preview data", e ); + } + + return preview; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); + } else { + log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); + } + + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); + break; + } + } + } + } + + } + + + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + } From 9e071f18a0c5283e0215388b01c110ab85f0c5d1 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 9 May 2025 17:24:36 +0200 Subject: [PATCH 21/68] Change in "getExportedColumns" so the columns exported doesn't rely on the tables setting anymore. "FetchPreview" method for the MetadataProvider instances now shows all metadata (working for postgres and monet db yet). --- .../db/schemaDiscovery/MetadataProvider.java | 6 +- .../jdbc/sources/AbstractJdbcSource.java | 21 ++- .../monetdb/sources/MonetdbSource.java | 141 +++++++++++------- .../postgres/source/PostgresqlSource.java | 81 +++++----- 4 files changed, 159 insertions(+), 90 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index 1c444118c1..adc4bb49bb 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -16,13 +16,15 @@ package org.polypheny.db.schemaDiscovery; +import java.sql.Connection; import java.util.List; +import java.util.Map; public interface MetadataProvider { AbstractNode fetchMetadataTree(); - Object fetchPreview( int limit ); + List> fetchPreview( Connection conn, String fqName, int limit ); void markSelectedAttributes( List selectedPaths ); @@ -30,5 +32,7 @@ public interface MetadataProvider { void setRoot( AbstractNode root ); + Object getPreview(); + } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 38be9c1530..e0ad2e0d79 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -23,8 +23,10 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.apache.commons.dbcp2.BasicDataSource; @@ -214,7 +216,24 @@ public Map> getExportedColumns() { Connection connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); - String[] tables = settings.get( "tables" ).split( "," ); + String[] tables; + if (settings.get("selectedAttributes").equals("")){ + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get("selectedAttributes").split(","); + Set tableNames = new HashSet<>(); + + for (String s : names2){ + String attr = s.split(" : ")[0]; + + String[] parts = attr.split("\\."); + if (parts.length >= 3) { + String tableName = parts[1] + "." + parts[2]; + tableNames.add(tableName); + } + } + tables = tableNames.toArray(new String[0]); + } for ( String str : tables ) { String[] names = str.split( "\\." ); if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 707e0f6cc4..4c33616859 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -20,7 +20,9 @@ import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -37,6 +39,7 @@ import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.SchemaFilter; import org.polypheny.db.adapter.jdbc.JdbcTable; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; @@ -76,6 +79,7 @@ public class MonetdbSource extends AbstractJdbcSource implements MetadataProvider { private AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); public MonetdbSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { @@ -162,7 +166,24 @@ public Map> getExportedColumns() { Connection connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); - String[] tables = settings.get( "tables" ).split( "," ); + String[] tables; + if (settings.get("selectedAttributes").equals("")){ + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get("selectedAttributes").split(","); + Set tableNames = new HashSet<>(); + + for (String s : names2){ + String attr = s.split(" : ")[0]; + + String[] parts = attr.split("\\."); + if (parts.length >= 3) { + String tableName = parts[1] + "." + parts[2]; + tableNames.add(tableName); + } + } + tables = tableNames.toArray(new String[0]); + } for ( String str : tables ) { String[] names = str.split( "\\." ); if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { @@ -262,50 +283,75 @@ public AbstractNode fetchMetadataTree() { String dbName = settings.getOrDefault( "database", "monetdb" ); Node root = new Node( "relational", dbName ); + SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); try { - ConnectionHandler h = connectionFactory.getOrCreateConnectionHandler( xid ); - DatabaseMetaData md = h.getStatement().getConnection().getMetaData(); + ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Statement stmt = handler.getStatement(); + Connection conn = stmt.getConnection(); + DatabaseMetaData meta = conn.getMetaData(); + + try ( ResultSet schemas = requiresSchema() + ? meta.getSchemas( null, "%" ) + : meta.getCatalogs() ) { + while ( schemas.next() ) { - try ( ResultSet schemas = md.getSchemas( null, "%" ) ) { + String schemaName = requiresSchema() + ? schemas.getString( "TABLE_SCHEM" ) + : schemas.getString( "TABLE_CAT" ); - while ( schemas.next() ) { - String schemaName = schemas.getString( "TABLE_SCHEM" ); + if ( filter.ignoredSchemas.contains( schemaName.toLowerCase() ) ) { + continue; + } AbstractNode schemaNode = new Node( "schema", schemaName ); - try ( ResultSet tables = md.getTables( + try ( ResultSet tables = meta.getTables( null, - schemaName, + requiresSchema() ? schemaName : null, "%", - new String[]{ "TABLE" } ) ) { - + new String[]{ "TABLE" } + ) ) { while ( tables.next() ) { + String tableName = tables.getString( "TABLE_NAME" ); + + String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; + previewByTable.computeIfAbsent( + schemaName + "." + tableName, + k -> { + try { + return fetchPreview( conn, fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); + AbstractNode tableNode = new Node( "table", tableName ); Set pkCols = new HashSet<>(); - try ( ResultSet pk = md.getPrimaryKeys( + try ( ResultSet pk = meta.getPrimaryKeys( null, - schemaName, + requiresSchema() ? schemaName : null, tableName ) ) { while ( pk.next() ) { pkCols.add( pk.getString( "COLUMN_NAME" ) ); } } - try ( ResultSet cols = md.getColumns( + try ( ResultSet cols = meta.getColumns( null, - schemaName, + requiresSchema() ? schemaName : null, tableName, "%" ) ) { - while ( cols.next() ) { + String colName = cols.getString( "COLUMN_NAME" ); String typeName = cols.getString( "TYPE_NAME" ); - boolean nullable = - cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; boolean primary = pkCols.contains( colName ); AbstractNode colNode = new AttributeNode( "column", colName ); @@ -325,59 +371,46 @@ public AbstractNode fetchMetadataTree() { tableNode.addChild( colNode ); } } + schemaNode.addChild( tableNode ); } } + root.addChild( schemaNode ); } } + } catch ( SQLException | ConnectionHandlerException ex ) { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); } this.metadataRoot = root; + log.error( "Neue Preview ist geladen als: " + previewByTable.toString() ); return this.metadataRoot; } - @Override - public Object fetchPreview( int limit ) { - Map>> preview = new LinkedHashMap<>(); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); - try { - ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Connection conn = ch.getStatement().getConnection(); - - String[] tables = settings.get( "tables" ).split( "," ); - for ( String str : tables ) { - String[] parts = str.split( "\\." ); - String schema = parts.length == 2 ? parts[0] : null; - String table = parts.length == 2 ? parts[1] : parts[0]; - - String fqName = (schema != null ? schema + "." : "") + table; - List> rows = new ArrayList<>(); - - try ( var stmt = conn.createStatement(); - var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { - - var meta = rs.getMetaData(); - while ( rs.next() ) { - Map row = new HashMap<>(); - for ( int i = 1; i <= meta.getColumnCount(); i++ ) { - row.put( meta.getColumnName( i ), rs.getObject( i ) ); - } - rows.add( row ); - } + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( + "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); } - - preview.put( fqName, rows ); + rows.add( row ); } - } catch ( Exception e ) { - throw new GenericRuntimeException( "Error fetching preview data", e ); + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); } - - return preview; + return rows; } @@ -453,4 +486,10 @@ public void setRoot( AbstractNode root ) { this.metadataRoot = root; } + + @Override + public Object getPreview() { + return this.previewByTable; + } + } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 964760bf41..e53d368fa9 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -20,10 +20,11 @@ import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; -import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; @@ -31,13 +32,13 @@ import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; -import lombok.Setter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.SchemaFilter; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; @@ -76,11 +77,10 @@ description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") -@AdapterSettingString(name = "tables", defaultValue = "foo,bar", - description = "List of tables which should be imported. The names must to be separated by a comma.") public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); @Override @@ -95,6 +95,8 @@ public AbstractNode fetchMetadataTree() { String dbName = settings.get( "database" ); Node root = new Node( "relational", dbName ); + SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); try { @@ -112,6 +114,10 @@ public AbstractNode fetchMetadataTree() { ? schemas.getString( "TABLE_SCHEM" ) : schemas.getString( "TABLE_CAT" ); + if ( filter.ignoredSchemas.contains( schemaName.toLowerCase() ) ) { + continue; + } + AbstractNode schemaNode = new Node( "schema", schemaName ); try ( ResultSet tables = meta.getTables( @@ -123,6 +129,19 @@ public AbstractNode fetchMetadataTree() { while ( tables.next() ) { String tableName = tables.getString( "TABLE_NAME" ); + + String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; + previewByTable.computeIfAbsent( + schemaName + "." + tableName, + k -> { + try { + return fetchPreview( conn, fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); + AbstractNode tableNode = new Node( "table", tableName ); Set pkCols = new HashSet<>(); @@ -178,48 +197,36 @@ public AbstractNode fetchMetadataTree() { } this.metadataRoot = root; + log.error( "Neue Preview ist geladen als: " + previewByTable.toString() ); return this.metadataRoot; } @Override - public Object fetchPreview( int limit ) { - Map>> preview = new LinkedHashMap<>(); - - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); - try { - ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Connection conn = ch.getStatement().getConnection(); - - String[] tables = settings.get( "tables" ).split( "," ); - for ( String str : tables ) { - String[] parts = str.split( "\\." ); - String schema = parts.length == 2 ? parts[0] : null; - String table = parts.length == 2 ? parts[1] : parts[0]; - - String fqName = (schema != null ? schema + "." : "") + table; - List> rows = new ArrayList<>(); - - try ( var stmt = conn.createStatement(); - var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { - - var meta = rs.getMetaData(); - while ( rs.next() ) { - Map row = new HashMap<>(); - for ( int i = 1; i <= meta.getColumnCount(); i++ ) { - row.put( meta.getColumnName( i ), rs.getObject( i ) ); - } - rows.add( row ); - } + public List> fetchPreview( Connection conn, String fqName, int limit ) { + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( + "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); } - - preview.put( fqName, rows ); + rows.add( row ); } - } catch ( Exception e ) { - throw new GenericRuntimeException( "Error fetching preview data", e ); + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); } + return rows; + } + - return preview; + public Object getPreview() { + return this.previewByTable; } From 10b6b46a728f745eb157747b464ae832e4b86a55 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 9 May 2025 17:27:01 +0200 Subject: [PATCH 22/68] Added SchemaFilter.java enum, so all unnecessary schemas while getting metadata information are filtered. --- .../db/adapter/java/SchemaFilter.java | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java b/core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java new file mode 100644 index 0000000000..9ee0d197e2 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.java; + +import java.util.Arrays; +import java.util.Set; + +public enum SchemaFilter { + + PostgreSQL( + Set.of( "pg_catalog", "information_schema" ) + ), + + MySQL( + Set.of( "information_schema", "mysql", "performance_schema" ) + ), + + MonetDB( + Set.of( "information_schema", "json", "logging", "profiler", "temp" ) + ), + + GENERIC( Set.of() ); + + public final Set ignoredSchemas; + + + SchemaFilter( Set ignoredSchemas ) { + this.ignoredSchemas = ignoredSchemas; + } + + + public static SchemaFilter forAdapter( String adapterName ) { + return Arrays.stream( values() ) + .filter( f -> f.name().equalsIgnoreCase( adapterName ) ) + .findFirst() + .orElse( GENERIC ); + } +} From 26c30804c1e6f1ed383a1eabcca54d76250eeda8 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 18 May 2025 20:55:45 +0200 Subject: [PATCH 23/68] Changes in excel-adapter so deploying works and functionality as metadata provider is working. --- plugins/excel-adapter/build.gradle | 2 + .../db/adapter/excel/ExcelEnumerator.java | 20 +- .../db/adapter/excel/ExcelNamespace.java | 11 +- .../db/adapter/excel/ExcelSource.java | 276 +++++++++++++++++- 4 files changed, 297 insertions(+), 12 deletions(-) diff --git a/plugins/excel-adapter/build.gradle b/plugins/excel-adapter/build.gradle index 9c18326f76..e8370c2905 100644 --- a/plugins/excel-adapter/build.gradle +++ b/plugins/excel-adapter/build.gradle @@ -6,6 +6,8 @@ dependencies { compileOnly project(":plugins:sql-language") implementation group: "org.apache.poi", name: "poi", version: poi_version + implementation "org.apache.poi:poi-ooxml:5.2.5" + implementation "org.apache.poi:poi-ooxml-full:5.2.5" // --- Test Compile --- testImplementation project(path: ":core", configuration: "tests") diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java index bfbcac32f6..6b4eca3d9c 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java @@ -457,24 +457,26 @@ public PolyValue[] convertRow( Row row ) { } - public PolyValue[] convertNormalRow( Row row ) { - Iterator cells = row.cellIterator(); + public PolyValue[] convertNormalRow(Row row) { final PolyValue[] objects = new PolyValue[fields.length]; - while ( cells.hasNext() ) { - Cell cell = cells.next(); - int field = fields[cell.getColumnIndex()] - 1; - objects[field] = convert( fieldTypes[field], cell ); + for (int i = 0; i < fields.length; i++) { + Cell cell = row.getCell(i); + objects[i] = convert(fieldTypes[i], cell); } return objects; } - - public PolyValue[] convertStreamRow( Row row ) { + public PolyValue[] convertStreamRow(Row row) { final PolyValue[] objects = new PolyValue[fields.length + 1]; - objects[0] = PolyLong.of( System.currentTimeMillis() ); + objects[0] = PolyLong.of(System.currentTimeMillis()); + for (int i = 0; i < fields.length; i++) { + Cell cell = row.getCell(i); + objects[i + 1] = convert(fieldTypes[i], cell); + } return objects; } + } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java index e22ef083ca..f67584158e 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java @@ -45,7 +45,7 @@ public class ExcelNamespace extends Namespace { private final URL directoryUrl; private final ExcelTable.Flavor flavor; private final Map tableMap = new HashMap<>(); - private final String sheet; + private String sheet; /** @@ -78,7 +78,14 @@ public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource fieldIds.add( column.position ); } - String excelFileName = excelSource.sheetName; + // String excelFileName = excelSource.sheetName; + String[] parts = table.name.split("_", 2); + String filePart = parts[0]; + String sheetPart = parts.length > 1 ? parts[1] : ""; + + String excelFileName = filePart + ".xlsx"; + this.sheet = sheetPart; + Source source; try { diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 9b64d91dc9..f63e3b8c96 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -21,19 +21,26 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.nio.file.Paths; +import java.sql.Connection; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Getter; import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.DateUtil; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; @@ -59,6 +66,10 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; @@ -76,7 +87,10 @@ @AdapterSettingString(name = "sheetName", description = "default to read the first sheet", defaultValue = "", required = false) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 2, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class ExcelSource extends DataSource { +public class ExcelSource extends DataSource implements MetadataProvider { + + public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; @@ -91,6 +105,7 @@ public class ExcelSource extends DataSource { public ExcelSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ) ); + log.error( settings.get( "directory" ) ); this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ) ) : ConnectionMethod.UPLOAD; // Validate maxStringLength setting @@ -111,8 +126,27 @@ public ExcelSource( final long storeId, final String uniqueName, final Map settings ) { String dir = settings.get( "directory" ); + log.error( "Directory kommt an als: " + settings.get( "directory" ) ); + + if ( dir != null && dir.trim().startsWith( "[" ) ) { + try { + List list = new ObjectMapper() + .readValue( dir, new TypeReference>() { + } ); + dir = list.isEmpty() ? null : list.get( 0 ); + log.error( "Directory nach Parsing: " + dir ); + } catch ( IOException e ) { + throw new GenericRuntimeException( "Cannot parse directory JSON", e ); + } + } + if ( connectionMethod == ConnectionMethod.LINK ) { dir = settings.get( "directoryName" ); + log.error( "DirectoryName kommt an als: " + settings.get( "directoryName" ) ); + } + + if ( dir == null ) { + throw new GenericRuntimeException( "Directory must not be null" ); } if ( dir.startsWith( "classpath://" ) ) { @@ -368,6 +402,246 @@ private void addInformationExportedColumns() { } + @Override + public AbstractNode fetchMetadataTree() { + + String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; + String mappeName = "Workbook"; + + AbstractNode root = new Node( "excel", mappeName ); + try ( Workbook wb = WorkbookFactory.create( new File( filePath ) ) ) { + + for ( Sheet sheet : wb ) { + + String sheetName = sheet.getSheetName(); + AbstractNode sheetNode = new Node( "sheet", sheetName ); + + Row header = sheet.getRow( sheet.getFirstRowNum() ); + if ( header == null ) { + continue; + } + for ( int c = 0; c < header.getLastCellNum(); c++ ) { + Cell cell = header.getCell( c ); + String colName = getCellValueAsString( cell, "COL_" + (c + 1) ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", inferType( sheet, c, header.getRowNum() + 1, 20 ) ); + colNode.addProperty( "nullable", true ); + + sheetNode.addChild( colNode ); + } + + String fqName = mappeName + "." + sheetName; + List> rows = fetchPreview( null, fqName, 10 ); + this.previewByTable.put( fqName, rows ); + + root.addChild( sheetNode ); + } + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read Excel metadata: " + filePath, e ); + } + + this.metadataRoot = root; + return metadataRoot; + } + + + private String inferType( Sheet sheet, int colIndex, int startRow, int maxRows ) { + int checked = 0; + for ( int r = startRow; r <= sheet.getLastRowNum() && checked < maxRows; r++ ) { + Row row = sheet.getRow( r ); + if ( row == null ) { + continue; + } + Cell cell = row.getCell( colIndex ); + if ( cell == null ) { + continue; + } + + switch ( cell.getCellType() ) { + case NUMERIC: + if ( DateUtil.isCellDateFormatted( cell ) ) { + return "DATE"; + } + return "DOUBLE"; + case STRING: + return "STRING"; + case BOOLEAN: + return "BOOLEAN"; + default: + continue; + } + } + return "STRING"; + } + + + private String getCellValueAsString( Cell cell, String fallback ) { + if ( cell == null ) { + return fallback; + } + try { + return switch ( cell.getCellType() ) { + case STRING -> cell.getStringCellValue(); + case NUMERIC -> String.valueOf( cell.getNumericCellValue() ); + case BOOLEAN -> String.valueOf( cell.getBooleanCellValue() ); + case FORMULA -> cell.getCellFormula(); + default -> fallback; + }; + } catch ( Exception e ) { + return fallback; + } + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + + String[] parts = fqName.split( "\\.", 2 ); + String sheetName = parts.length == 2 ? parts[1] : parts[0]; + String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; + + List> rows = new ArrayList<>(); + + try ( Workbook wb = WorkbookFactory.create( new File( filePath ) ) ) { + + Sheet sheet = wb.getSheet( sheetName ); + if ( sheet == null ) { + log.warn( "Sheet {} not found in {}", sheetName, filePath ); + return List.of(); + } + + Row header = sheet.getRow( sheet.getFirstRowNum() ); + if ( header == null ) { + return List.of(); + } + + List colNames = new ArrayList<>(); + for ( int c = 0; c < header.getLastCellNum(); c++ ) { + colNames.add( getCellValueAsString( header.getCell( c ), "COL_" + (c + 1) ) ); + } + + int first = header.getRowNum() + 1; + int last = Math.min( sheet.getLastRowNum(), first + limit - 1 ); + + for ( int r = first; r <= last; r++ ) { + Row dataRow = sheet.getRow( r ); + if ( dataRow == null ) { + continue; + } + + Map map = new LinkedHashMap<>(); + for ( int c = 0; c < colNames.size(); c++ ) { + map.put( colNames.get( c ), extractCellValue( dataRow.getCell( c ) ) ); + } + rows.add( map ); + } + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read Excel preview: " + filePath, e ); + } + + return rows; + } + + + private Object extractCellValue( Cell cell ) { + if ( cell == null ) { + return null; + } + return switch ( cell.getCellType() ) { + case STRING -> cell.getStringCellValue(); + case NUMERIC -> DateUtil.isCellDateFormatted( cell ) + ? cell.getDateCellValue() + : cell.getNumericCellValue(); + case BOOLEAN -> cell.getBooleanCellValue(); + case FORMULA -> cell.getCellFormula(); + case BLANK -> null; + default -> cell.toString(); + }; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); + } else { + log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); + } + + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); + break; + } + } + } + } + + } + + + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + @SuppressWarnings("unused") private interface Excludes { From 8dd96d0c5021f797735c7103f58bdc67f531dfd0 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 18 May 2025 20:56:59 +0200 Subject: [PATCH 24/68] Mysql- and oracle adapter are calculating the right preview now for a previewRequest --- .../db/adapter/jdbc/MysqlSourcePlugin.java | 244 +++++++++++++++++- .../source/OracleSource.java | 129 +++++---- 2 files changed, 317 insertions(+), 56 deletions(-) diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index f2d48a06d5..0d6cb3f2c0 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -17,9 +17,19 @@ package org.polypheny.db.adapter.jdbc; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AdapterManager; @@ -28,15 +38,24 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.plugins.PluginContext; import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.sql.language.dialect.MysqlSqlDialect; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PolyXid; @SuppressWarnings("unused") public class MysqlSourcePlugin extends PolyPlugin { @@ -77,19 +96,23 @@ public void stop() { description = "Hostname or IP address of the remote MariaDB / MySQL instance.") @AdapterSettingInteger(name = "port", defaultValue = 3306, position = 2, description = "JDBC port number on the remote MariaDB / MySQL instance.") - @AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, + @AdapterSettingString(name = "database", defaultValue = "test", position = 3, description = "Name of the database to connect to.") - @AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, + @AdapterSettingString(name = "username", defaultValue = "root", position = 4, description = "Username to be used for authenticating at the remote instance.") - @AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, + @AdapterSettingString(name = "password", defaultValue = "roman123", position = 5, description = "Password to be used for authenticating at the remote instance.") @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") - @AdapterSettingString(name = "tables", defaultValue = "foo,bar", + @AdapterSettingString(name = "tables", defaultValue = "test.testtable", description = "List of tables which should be imported. The names must to be separated by a comma.") - public static class MysqlSource extends AbstractJdbcSource { + public static class MysqlSource extends AbstractJdbcSource implements MetadataProvider { + + public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + public MysqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, "org.mariadb.jdbc.Driver", MysqlSqlDialect.DEFAULT, false ); @@ -99,7 +122,8 @@ public MysqlSource( final long storeId, final String uniqueName, final Map createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), + // logical.table.getNamespaceName(), + "test", logical.table.name, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, @@ -142,6 +166,214 @@ protected boolean requiresSchema() { return false; } + + @Override + public AbstractNode fetchMetadataTree() { + String dbName = settings.get( "database" ); + Node root = new Node( "relational", dbName ); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + + try { + ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Statement stmt = handler.getStatement(); + Connection conn = stmt.getConnection(); + DatabaseMetaData meta = conn.getMetaData(); + + try ( ResultSet schemas = meta.getCatalogs() ) { + while ( schemas.next() ) { + + String schemaName = requiresSchema() + ? schemas.getString( "TABLE_SCHEM" ) + : schemas.getString( "TABLE_CAT" ); + + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = meta.getTables( + schemaName, + null, + "%", + new String[]{ "TABLE" } + ) ) { + while ( tables.next() ) { + + String tableName = tables.getString( "TABLE_NAME" ); + AbstractNode tableNode = new Node( "table", tableName ); + + Set pkCols = new HashSet<>(); + try ( ResultSet pk = meta.getPrimaryKeys( + schemaName, + null, + tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = meta.getColumns( + schemaName, + null, + tableName, + "%" ) ) { + while ( cols.next() ) { + + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getInt( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getInt( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + + schemaNode.addChild( tableNode ); + } + } + + root.addChild( schemaNode ); + } + } + + } catch ( SQLException | ConnectionHandlerException ex ) { + throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } + + this.metadataRoot = root; + return this.metadataRoot; + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + return List.of(); + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); + } else { + log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); + } + + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); + break; + } + } + } + } + } + + + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + Map>> preview = new LinkedHashMap<>(); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + try { + ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Connection conn = ch.getStatement().getConnection(); + + String[] tables = {"testtable"}; + for ( String str : tables ) { + String[] parts = str.split( "\\." ); + String schema = parts.length == 2 ? parts[0] : null; + String table = parts.length == 2 ? parts[1] : parts[0]; + + String fqName = (schema != null ? schema + "." : "") + table; + List> rows = new ArrayList<>(); + + try ( var stmt = conn.createStatement(); + var rs = stmt.executeQuery( "SELECT * FROM TESTTABLE " + " LIMIT " + 10 ) ) { + + var meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new HashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } + + preview.put( fqName, rows ); + } + } catch ( Exception e ) { + throw new GenericRuntimeException( "Error fetching preview data", e ); + } + + return preview; + } + } } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index e4f8bbaffa..3dab1cc566 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -107,7 +107,9 @@ protected String getConnectionUrl( final String dbHostname, final int dbPort, fi @Override - protected boolean requiresSchema() { return true; } + protected boolean requiresSchema() { + return true; + } @Override @@ -154,7 +156,33 @@ public Map> getExportedColumns() { Connection connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); - String[] tables = settings.get( "tables" ).split( "," ); + String[] tables; + for ( Map.Entry entry : settings.entrySet() ) { + log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); + } + + if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); + Set tableNames = new HashSet<>(); + + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; + + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { + String tableName = parts[1] + "." + parts[2]; + + if ( !requiresSchema() ) { + tableNames.add( parts[2] ); + } else { + tableNames.add( tableName ); + } + } + } + tables = tableNames.toArray( new String[0] ); + } for ( String str : tables ) { String[] names = str.split( "\\." ); @@ -246,7 +274,7 @@ public AbstractNode fetchMetadataTree() { ConnectionHandler h = connectionFactory.getOrCreateConnectionHandler( xid ); DatabaseMetaData m = h.getStatement().getConnection().getMetaData(); - String currentUser = m.getUserName(); // aktueller Owner (=β€―Schema) + String currentUser = m.getUserName(); try ( ResultSet schemas = m.getSchemas() ) { while ( schemas.next() ) { @@ -257,21 +285,19 @@ public AbstractNode fetchMetadataTree() { m.getTables( null, schemaName, "%", new String[]{ "TABLE" } ) ) { while ( tables.next() ) { - String owner = tables.getString( "TABLE_SCHEM" ); // gleichβ€―schemaName + String owner = tables.getString( "TABLE_SCHEM" ); String tableName = tables.getString( "TABLE_NAME" ); - /* (a) nur Objekte des eingeloggten Users */ if ( !owner.equalsIgnoreCase( currentUser ) ) { continue; } - /* (b) interne Oracle‑Tabellen ausblenden */ + if ( tableName.contains( "$" ) || ORACLE_INTERNAL.matcher( tableName ).find() ) { continue; } Node tableNode = new Node( "table", tableName ); - /* PrimΓ€rschlΓΌsselspalten zwischenspeichern ---------------------------------- */ Set pkCols = new HashSet<>(); try ( ResultSet pk = m.getPrimaryKeys( null, schemaName, tableName ) ) { while ( pk.next() ) { @@ -279,7 +305,6 @@ public AbstractNode fetchMetadataTree() { } } - /* Spalten lesen ------------------------------------------------------------- */ try ( ResultSet cols = m.getColumns( null, schemaName, tableName, "%" ) ) { @@ -307,13 +332,11 @@ public AbstractNode fetchMetadataTree() { tableNode.addChild( colNode ); } } - /* nur Tabellen mit mindestens einer sichtbaren Spalte ΓΌbernehmen */ if ( !tableNode.getChildren().isEmpty() ) { schemaNode.addChild( tableNode ); } } } - /* Schema nur anhΓ€ngen, wenn mindestens eine Tabelle behalten wurde */ if ( !schemaNode.getChildren().isEmpty() ) { root.addChild( schemaNode ); } @@ -329,46 +352,8 @@ public AbstractNode fetchMetadataTree() { @Override - public Object fetchPreview( int limit ) { - Map>> preview = new LinkedHashMap<>(); - - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); - try { - ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Connection conn = ch.getStatement().getConnection(); - - String[] tables = settings.get( "tables" ).split( "," ); - for ( String str : tables ) { - String[] parts = str.split( "\\." ); - String schema = parts.length == 2 ? parts[0] : null; - String table = parts.length == 2 ? parts[1] : parts[0]; - - schema = schema.toUpperCase(); - table = table.toUpperCase(); - - String fqName = (schema != null ? schema + "." : "") + table; - List> rows = new ArrayList<>(); - - try ( var stmt = conn.createStatement(); - var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { - - var meta = rs.getMetaData(); - while ( rs.next() ) { - Map row = new HashMap<>(); - for ( int i = 1; i <= meta.getColumnCount(); i++ ) { - row.put( meta.getColumnName( i ), rs.getObject( i ) ); - } - rows.add( row ); - } - } - - preview.put( fqName, rows ); - } - } catch ( Exception e ) { - throw new GenericRuntimeException( "Error fetching preview data", e ); - } - - return preview; + public List> fetchPreview( Connection conn, String fqName, int limit ) { + return List.of(); } @@ -443,4 +428,48 @@ public void setRoot( AbstractNode root ) { this.metadataRoot = root; } + + @Override + public Object getPreview() { + Map>> preview = new LinkedHashMap<>(); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + try { + ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Connection conn = ch.getStatement().getConnection(); + + String[] tables = {"system.test"}; + for ( String str : tables ) { + String[] parts = str.split( "\\." ); + String schema = parts.length == 2 ? parts[0] : null; + String table = parts.length == 2 ? parts[1] : parts[0]; + + schema = schema.toUpperCase(); + table = table.toUpperCase(); + + String fqName = (schema != null ? schema + "." : "") + table; + List> rows = new ArrayList<>(); + + try ( var stmt = conn.createStatement(); + var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + 10 + " ROWS ONLY" ) ) { + + var meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new HashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } + + preview.put( fqName, rows ); + } + } catch ( Exception e ) { + throw new GenericRuntimeException( "Error fetching preview data", e ); + } + + return preview; + } + } From 5e644613dd502c340d102e074217e8e95bcaad5d Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 18 May 2025 20:59:07 +0200 Subject: [PATCH 25/68] Modified code for a preview request, so Excel Files are handled properly (prototype by now) --- .../db/adapter/java/AdapterTemplate.java | 5 +- .../java/org/polypheny/db/webui/Crud.java | 108 +++++++++++++++--- .../org/polypheny/db/webui/HttpServer.java | 9 +- .../webui/models/requests/PreviewRequest.java | 19 ++- 4 files changed, 109 insertions(+), 32 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index 5201625675..b4c2b2b59f 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -131,9 +131,10 @@ public PreviewResult preview( Map settings, int limit ) { log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); AbstractNode meta = mp.fetchMetadataTree(); String json = NodeSerializer.serializeNode( meta ).toString(); - Object rows = mp.fetchPreview( limit ); + // Object rows = mp.fetchPreview( limit ); + Object rows = mp.getPreview(); log.error( json ); - log.error( rows.toString() ); + // log.error( rows.toString() ); return new PreviewResult( json, rows ); } throw new GenericRuntimeException( "The adapter does not implement MetadataProvider." ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 15925bb62d..79d911ea90 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -17,6 +17,7 @@ package org.polypheny.db.webui; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; @@ -880,14 +881,69 @@ void deleteTuple( final Context ctx ) { void sendConfirmation( final Context ctx ) { - log.info( "Sending confirmation" ); - // String result = "Angular confirmation message"; - //ctx.result( result ); try { - List dbs = PostgreSqlConnection.getDatabasesSchemasAndTables(); - ctx.json( dbs ); - } catch ( SQLException e ) { - System.err.println( "Fehler bei der Schema-Erkennung: " + e.getMessage() ); + initMultipart( ctx ); + if ( !ctx.isMultipartFormData() ) { + ctx.status( HttpCode.BAD_REQUEST ).result( "Preview requires multipart/form-data" ); + return; + } + + String body = IOUtils.toString( ctx.req.getPart( "body" ).getInputStream(), StandardCharsets.UTF_8 ); + PreviewRequest a = HttpServer.mapper.readValue( body, PreviewRequest.class ); + + Map inputStreams = new HashMap<>(); + for ( Part part : ctx.req.getParts() ) { + if ( !part.getName().equals( "body" ) ) { + inputStreams.put( part.getName(), part.getInputStream() ); + } + } + + log.info( "πŸ”§ AdapterModel empfangen:" ); + log.info( " πŸ”Ή Name : {}", a.adapterName ); + log.info( " πŸ”Ή Adapter : {}", a.adapterType ); + log.info( " πŸ”Ή Type : {}", a.limit ); + + log.info( "πŸ“¦ Settings:" ); + for ( Map.Entry entry : a.settings.entrySet() ) { + log.info( " - {}: {}", entry.getKey(), entry.getValue() ); + } + + if ( inputStreams.isEmpty() ) { + log.info( "πŸ“ Keine Dateien empfangen." ); + } else { + log.info( "πŸ“ Empfangene Dateien:" ); + for ( String file : inputStreams.keySet() ) { + log.info( " - Datei: {}", file ); + } + } + + AdapterTemplate template = AdapterManager.getAdapterTemplate( a.adapterName, a.adapterType ); + Map allSettings = template.settings + .stream() + .collect( Collectors.toMap( e -> e.name, e -> e ) ); + + if ( a.settings.containsKey( "directory" ) ) { + List fileNames; + try { + fileNames = HttpServer.mapper.readValue( a.settings.get( "directory" ), new TypeReference>() { + } ); + } catch ( JsonProcessingException ex ) { + String raw = a.settings.get( "directory" ); + String cleaned = raw.replaceAll( "[\\[\\]\"]", "" ).trim(); + fileNames = Arrays.stream( cleaned.split( "," ) ).map( String::trim ).filter( s -> !s.isEmpty() ).toList(); + } + + String path = handleUploadFiles( inputStreams, fileNames, (AbstractAdapterSettingDirectory) allSettings.get( "directory" ), a ); + a.settings.put( "directory", path ); + } + + PreviewResult result = template.preview( a.settings, 10 ); + ctx.json( result ); + + + } catch ( Exception e ) { + log.error( "Fehler beim Verarbeiten des Preview-Requests", e ); + ctx.status( HttpCode.INTERNAL_SERVER_ERROR ).result( "Fehler beim Preview" ); } } @@ -2093,6 +2149,15 @@ public void getMetadataAndPreview( final Context ctx ) { AdapterTemplate template = AdapterTemplate.fromString( req.adapterName, req.adapterType ); log.error( "Row limit: {}", req.limit ); + Map allSettings = template.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); + + for ( Map.Entry entry : allSettings.entrySet() ) { + log.error( "Key: {} Value: {}", entry.getKey(), entry.getValue() ); + if ( entry instanceof AbstractAdapterSettingDirectory ) { + log.error( "Ist ein directory setting." ); + } + } + PreviewResult result = template.preview( req.settings, req.limit ); ctx.json( result ); @@ -2103,17 +2168,6 @@ public void getMetadataAndPreview( final Context ctx ) { } - public void processAttributes( final Context ctx ) { - try { - String req = ctx.body(); - log.error( "Attribut Daten" + req ); - } catch ( Exception e ) { - log.error( "Attribute Daten sind nicht vorhanden." ); - } - - } - - /** * Deploy a new adapter */ @@ -2262,6 +2316,24 @@ private static String handleUploadFiles( Map inputStreams, } + private static String handleUploadFiles( Map inputStreams, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest a ) { + for ( String fileName : fileNames ) { + setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); + } + File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + a.adapterName ); + for ( Entry is : setting.inputStreams.entrySet() ) { + try { + File file = new File( path, is.getKey() ); + log.info( "πŸ“ Datei wird geschrieben: {}", file.getAbsolutePath() ); + FileUtils.copyInputStreamToFile( is.getValue(), file ); + } catch ( IOException e ) { + throw new GenericRuntimeException( e ); + } + } + return path.getAbsolutePath(); + } + + /** * Remove an existing storeId or source */ diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 26b155544d..f889bcf95b 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -353,14 +353,7 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/product", ctx -> ctx.result( "Polypheny-DB" ) ); - /* - * Confirmation message sending to schema discovery UI - */ - webuiServer.post( "/confirm", crud::sendConfirmation ); - - webuiServer.post( "/previewTable", crud::getMetadataAndPreview ); - - webuiServer.post( "/sendSelectedMetadata", crud::processAttributes ); + webuiServer.post( "/previewTable", crud::sendConfirmation ); webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java index 716966aa67..9029fcc426 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java @@ -16,23 +16,34 @@ package org.polypheny.db.webui.models.requests; +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; +import lombok.NoArgsConstructor; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import java.util.Map; +@Data public class PreviewRequest { + @JsonProperty public String adapterName; + + @JsonProperty public AdapterType adapterType; + + @JsonProperty public Map settings; + + @JsonProperty public int limit; public PreviewRequest() { } public PreviewRequest( - String adapterName, - AdapterType adapterType, - Map settings, - int rowLimit ) { + @JsonProperty("adapterName") String adapterName, + @JsonProperty("adapterType") AdapterType adapterType, + @JsonProperty("settings") Map settings, + @JsonProperty("limit") int rowLimit ) { this.adapterName = adapterName; this.adapterType = adapterType; this.settings = settings; From 60154178674ec021d4389276f870323c0ccb229f Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 18 May 2025 21:01:05 +0200 Subject: [PATCH 26/68] Code change in "getExportedColumns" in AbstractJdbcSource.java takes table setting input if the user hasn't made a selection from the preview request (as a long as table setting is active). --- .../jdbc/sources/AbstractJdbcSource.java | 26 +++++++++++++------ 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index e0ad2e0d79..7b679cb1f7 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -217,22 +217,32 @@ public Map> getExportedColumns() { DatabaseMetaData dbmd = connection.getMetaData(); String[] tables; - if (settings.get("selectedAttributes").equals("")){ + + for ( Map.Entry entry : settings.entrySet() ) { + log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); + } + + if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { tables = settings.get( "tables" ).split( "," ); } else { - String[] names2 = settings.get("selectedAttributes").split(","); + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); Set tableNames = new HashSet<>(); - for (String s : names2){ - String attr = s.split(" : ")[0]; + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; - String[] parts = attr.split("\\."); - if (parts.length >= 3) { + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { String tableName = parts[1] + "." + parts[2]; - tableNames.add(tableName); + + if ( !requiresSchema() ) { + tableNames.add( parts[2] ); + } else { + tableNames.add( tableName ); + } } } - tables = tableNames.toArray(new String[0]); + tables = tableNames.toArray( new String[0] ); } for ( String str : tables ) { String[] names = str.split( "\\." ); From 364d6b7dcaf30ecdee3fae235b8048fcabd284cd Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 23 May 2025 19:20:41 +0200 Subject: [PATCH 27/68] Added functionality of observer to observe metadata changes for deployed adapter. Yet metadata changes are detected but not applied. --- .../MetadataObserver/AbstractListener.java | 63 +++++++++++++ .../MetadataObserver/AbstractPublisher.java | 93 +++++++++++++++++++ .../adapter/MetadataObserver/HashCache.java | 48 ++++++++++ .../MetadataObserver/MetadataHasher.java | 51 ++++++++++ .../MetadataObserver/MetadataListener.java | 29 ++++++ .../MetadataObserver/MetadataPublisher.java | 27 ++++++ .../MetadataObserver/PublisherManager.java | 58 ++++++++++++ .../db/adapter/java/AdapterTemplate.java | 13 ++- .../org/polypheny/db/ddl/DdlManagerImpl.java | 15 +++ .../postgres/source/PostgresqlSource.java | 21 ++++- 10 files changed, 410 insertions(+), 8 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java new file mode 100644 index 0000000000..62c842e3be --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -0,0 +1,63 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; + +@Slf4j +public class AbstractListener

implements MetadataListener

{ + + private boolean available; + private AbstractNode currentNode; + private P adapter; + + + public AbstractListener() { + available = true; + currentNode = null; + this.adapter = null; + } + + + @Override + public void onMetadataChange( P adapter, AbstractNode node, String hash ) { + available ^= true; + node = this.currentNode; + this.adapter = adapter; + log.info( "Listener saved credentials of adapter and sends now Request to UI and applies changes on adapter metadata and metadata the listener is holding." ); + applyChange(); + } + + + @Override + public void applyChange() { + available ^= true; + log.info( "Changes are going to be applied" ); + } + + + @Override + public boolean isAvailable() { + return this.available; + } + +} + diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java new file mode 100644 index 0000000000..6630ee8774 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java @@ -0,0 +1,93 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +@Slf4j +public class AbstractPublisher

implements MetadataPublisher { + + protected final P provider; + private final long intervalSeconds = 30; + private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); + private MetadataListener listener; + private final MetadataHasher hasher = new MetadataHasher(); + private final HashCache cache = HashCache.getInstance(); + + + protected AbstractPublisher( P provider, MetadataListener listener ) { + this.provider = provider; + this.listener = listener; + } + + + @Override + public String getAdapterUniqueName() { + return provider.getUniqueName(); + } + + + @Override + public void start() { + scheduler.scheduleAtFixedRate( this::runCheck, 0, intervalSeconds, java.util.concurrent.TimeUnit.SECONDS ); + } + + + @Override + public void stop() { + scheduler.shutdown(); + } + + + @Override + public void runCheck() { + if ( !listener.isAvailable() ) return; + try { + AbstractNode node = provider.fetchMetadataTree(); + String fresh = NodeSerializer.serializeNode( node ).toString(); + + String hash = hasher.hash( fresh ); + String lastHash = cache.getHash( provider.getUniqueName() ); + + log.info("Fresh JSON: {}", fresh); + log.info( "Metadata hash at Observer-Check (Current adapter hash) : {}", lastHash ); + log.info( "Metadata hash at Observer-Check (Newest hash) : {}", hash ); + log.info("Key used during observer-check: {}", provider.getUniqueName()); + + + + if ( lastHash != null && !lastHash.equals( hash ) ) { + log.info( "Metadata of adapter {} changed. Sending new snapshot to UI.", provider.getUniqueName() ); + listener.onMetadataChange( provider, node, hash ); + } else { + log.info( "Metadata of adapter {} did not change.", provider.getUniqueName() ); + } + } catch ( Exception e ) { + throw new RuntimeException( "Error while checking current snapshot.", e ); + } + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java new file mode 100644 index 0000000000..c9903f95b5 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public class HashCache { + + private static final HashCache INSTANCE = new HashCache(); + + private final Map cache = new ConcurrentHashMap<>(); + + + private HashCache() { + } + + + public static HashCache getInstance() { + return INSTANCE; + } + + + public void put( String uniqueName, String hash ) { + this.cache.put( uniqueName, hash ); + } + + + public String getHash( String uniqueName ) { + return this.cache.get( uniqueName ); + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java new file mode 100644 index 0000000000..931dee462f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +public class MetadataHasher { + + private final MessageDigest digest; + + + public MetadataHasher() { + try { + this.digest = MessageDigest.getInstance( "SHA-256" ); + } catch ( NoSuchAlgorithmException e ) { + throw new RuntimeException( e ); + } + } + + + public String hash( String text ) { + byte[] bytes = text.getBytes( StandardCharsets.UTF_8 ); + byte[] hash = digest.digest(bytes); + + StringBuilder sb = new StringBuilder(); + for ( byte b : hash ) { + sb.append( String.format( "%02x", b ) ); + } + return sb.toString(); + + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java new file mode 100644 index 0000000000..77c3be6402 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java @@ -0,0 +1,29 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; + +public interface MetadataListener

{ + + void onMetadataChange( P adapter, AbstractNode node, String hash ); + boolean isAvailable(); + void applyChange(); + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java new file mode 100644 index 0000000000..9af9f5cec1 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java @@ -0,0 +1,27 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +public interface MetadataPublisher { + + String getAdapterUniqueName(); + void start(); + void stop(); + void runCheck(); + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java new file mode 100644 index 0000000000..26058a2477 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +@Slf4j +public class PublisherManager { + + private final Map publishers = new ConcurrentHashMap<>(); + + private static final PublisherManager INSTANCE = new PublisherManager(); + + public static PublisherManager getInstance() { + return INSTANCE; + } + + private PublisherManager() { + } + + + public

void onAdapterDeploy( P adapter ) { + log.info( "Adapter {} is going to be registered for metadata publish.", adapter.getUniqueName() ); + if ( publishers.containsKey( adapter.getUniqueName() ) ) return; + MetadataListener listener = new AbstractListener(); + MetadataPublisher publisher = new AbstractPublisher<>( adapter, listener ); + publishers.put( adapter.getUniqueName(), publisher ); + publisher.start(); + } + + + public void onAdapterUndeploy( String uniqueName ) { + if ( publishers.containsKey( uniqueName ) ) { + publishers.get( uniqueName ).stop(); + publishers.remove( uniqueName ); + } + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index b4c2b2b59f..d8f451909d 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -33,13 +33,13 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DeployMode.DeploySetting; +import org.polypheny.db.adapter.MetadataObserver.MetadataHasher; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.docker.DockerManager; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; -import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.schemaDiscovery.NodeSerializer; @Slf4j @@ -114,7 +114,7 @@ public DataSource createEphemeral( Map settings ) { log.info( "Creating ephemeral adapter {} with name {}", clazz.getName(), previewName ); Adapter adapter = deployer.get( -1L, previewName, settings, DeployMode.REMOTE ); - if ( !(adapter instanceof DataSource ds ) ) { + if ( !(adapter instanceof DataSource ds) ) { throw new GenericRuntimeException( "The adapter does not implement DataSource." ); } @@ -124,13 +124,16 @@ public DataSource createEphemeral( Map settings ) { public PreviewResult preview( Map settings, int limit ) { DataSource tmp = createEphemeral( settings ); - log.info("Adapter class: {}", tmp.getClass().getName()); - log.info("Implements MetadataProvider: {}", tmp instanceof MetadataProvider); + log.info( "Adapter class: {}", tmp.getClass().getName() ); + log.info( "Implements MetadataProvider: {}", tmp instanceof MetadataProvider ); try { if ( tmp instanceof MetadataProvider mp ) { log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); AbstractNode meta = mp.fetchMetadataTree(); String json = NodeSerializer.serializeNode( meta ).toString(); + MetadataHasher hasher = new MetadataHasher(); + String hash = hasher.hash( json ); + log.info( "Metadata hash at preview: {}", hash ); // Object rows = mp.fetchPreview( limit ); Object rows = mp.getPreview(); log.error( json ); @@ -147,10 +150,12 @@ public PreviewResult preview( Map settings, int limit ) { @Value public static class PreviewResult { + @JsonProperty String metadata; @JsonProperty Object preview; + } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8929df4fd1..7d0f0c74ca 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -43,6 +43,9 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.IndexMethodModel; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.MetadataObserver.HashCache; +import org.polypheny.db.adapter.MetadataObserver.MetadataHasher; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -112,6 +115,7 @@ import org.polypheny.db.routing.RoutingManager; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; @@ -230,7 +234,18 @@ public void createSource( Transaction transaction, String uniqueName, String ada .collect( Collectors.toSet() ); log.error( "Das sind die Attribute die gefiltert werden mΓΌssen: " + selectedAttributeNames ); if ( adapter instanceof MetadataProvider mp ) { + + PublisherManager pm = PublisherManager.getInstance(); + MetadataHasher hasher = new MetadataHasher(); + AbstractNode node = mp.fetchMetadataTree(); + String hash = hasher.hash( NodeSerializer.serializeNode( node ).toString() ); + log.info( "Metadata hash at deployment: {}", hash ); + + HashCache.getInstance().put( uniqueName, hash ); + log.info( "Key used during deployment: {} ", uniqueName ); + pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + mp.setRoot( node ); mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index e53d368fa9..a82609dc91 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -54,6 +54,7 @@ import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; import org.polypheny.db.transaction.PolyXid; @@ -97,12 +98,16 @@ public AbstractNode fetchMetadataTree() { SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + java.sql.Statement stmt = null; + Connection conn = null; + try { ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement stmt = handler.getStatement(); - Connection conn = stmt.getConnection(); + stmt = handler.getStatement(); + conn = stmt.getConnection(); DatabaseMetaData meta = conn.getMetaData(); try ( ResultSet schemas = requiresSchema() @@ -131,11 +136,12 @@ public AbstractNode fetchMetadataTree() { String tableName = tables.getString( "TABLE_NAME" ); String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; + Connection finalConn = conn; previewByTable.computeIfAbsent( schemaName + "." + tableName, k -> { try { - return fetchPreview( conn, fqName, 10 ); + return fetchPreview( finalConn, fqName, 10 ); } catch ( Exception e ) { log.warn( "Preview failed for {}", fqName, e ); return List.of(); @@ -194,6 +200,13 @@ public AbstractNode fetchMetadataTree() { } catch ( SQLException | ConnectionHandlerException ex ) { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } finally { + try { + stmt.close(); + conn.close(); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } } this.metadataRoot = root; From b11accc39c349b92eed5e29c2c738013f58f9bd4 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 2 Jun 2025 10:44:50 +0200 Subject: [PATCH 28/68] CSV - and Excel Adapter now deployed as MetadataProvider. AbstractListener removes Adapter Hash when dropping. --- .../MetadataObserver/AbstractListener.java | 2 +- .../MetadataObserver/PublisherManager.java | 1 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 10 +- .../polypheny/db/adapter/csv/CsvSource.java | 180 +++++++++++++++++- .../db/adapter/excel/ExcelSource.java | 10 +- .../java/org/polypheny/db/webui/Crud.java | 8 +- .../webui/models/requests/PreviewRequest.java | 7 +- 7 files changed, 208 insertions(+), 10 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 62c842e3be..1916f820eb 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -40,7 +40,7 @@ public AbstractListener() { @Override public void onMetadataChange( P adapter, AbstractNode node, String hash ) { available ^= true; - node = this.currentNode; + this.currentNode = node; this.adapter = adapter; log.info( "Listener saved credentials of adapter and sends now Request to UI and applies changes on adapter metadata and metadata the listener is holding." ); applyChange(); diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 26058a2477..bb2315beec 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -51,6 +51,7 @@ public void onAdapterUndeploy( String uniqueName ) { if ( publishers.containsKey( uniqueName ) ) { publishers.get( uniqueName ).stop(); publishers.remove( uniqueName ); + log.error( "Adapter {} is going to be unregistered for metadata publish.", uniqueName ); } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 7d0f0c74ca..ad2ec6ca50 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -223,15 +223,14 @@ public void createSource( Transaction transaction, String uniqueName, String ada DataSource adapter = (DataSource) AdapterManager.getInstance().addAdapter( adapterName, uniqueName, adapterType, mode, config ); String attributes = config.get( "selectedAttributes" ); - Set selectedAttributeNames = new HashSet<>(); + List selectedAttributeNames = new ArrayList<>(); log.error( "Das ist das Attributes String: " + attributes ); if ( attributes != null ) { List selectedAttributes = new Gson().fromJson( attributes, new TypeToken>() { }.getType() ); selectedAttributeNames = selectedAttributes.stream() - .map( s -> s.replaceFirst( " : .*", "" ) ) .map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ) - .collect( Collectors.toSet() ); + .collect( Collectors.toList() ); log.error( "Das sind die Attribute die gefiltert werden mΓΌssen: " + selectedAttributeNames ); if ( adapter instanceof MetadataProvider mp ) { @@ -282,6 +281,10 @@ public void createSource( Transaction transaction, String uniqueName, String ada List aColumns = new ArrayList<>(); int colPos = 1; + selectedAttributeNames = selectedAttributeNames.stream() + .map( attr -> attr.split( ":" )[0].toLowerCase() ) + .collect( Collectors.toList() ); + for ( ExportedColumn exportedColumn : entry.getValue() ) { if ( adapter instanceof MetadataProvider mp && (attributes != null) ) { @@ -410,6 +413,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); + PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 89a95d3509..3b0d0c96a7 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -18,15 +18,19 @@ import java.io.BufferedReader; import java.io.File; +import java.io.FileReader; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.sql.Connection; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; @@ -54,6 +58,10 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; @@ -72,7 +80,7 @@ @AdapterSettingString(subOf = "method_link", defaultValue = "classpath://hr", name = "directoryName", description = "You can select a path to a folder or specific .csv or .csv.gz files.", position = 2) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 3, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class CsvSource extends DataSource { +public class CsvSource extends DataSource implements MetadataProvider { private static final Logger log = LoggerFactory.getLogger( CsvSource.class ); @Delegate(excludes = Excludes.class) @@ -85,6 +93,9 @@ public class CsvSource extends DataSource { private final int maxStringLength; private Map> exportedColumnCache; + private AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + public CsvSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ) ); @@ -372,6 +383,173 @@ public void renameLogicalColumn( long id, String newColumnName ) { } + @Override + public AbstractNode fetchMetadataTree() { + File csvFile = new File( "C:/Users/roman/Desktop/Dateieins.csv" ); + String tableName = csvFile.getName(); + AbstractNode rootNode = new Node( "csv", tableName ); + + try ( BufferedReader reader = new BufferedReader( new FileReader( csvFile ) ) ) { + String headerLine = reader.readLine(); + if ( headerLine == null ) { + throw new RuntimeException( "No header line found" ); + } + + String[] rawColumns = headerLine.split( "," ); + for ( String colRaw : rawColumns ) { + String[] split = colRaw.split( ":" ); + String name = split[0].trim().replaceAll( "[^a-zA-Z0-9_]", "" ); + String type = split.length > 1 ? split[1].trim() : "string"; + + AbstractNode columnNode = new AttributeNode( "column", name ); + columnNode.addProperty( "type", mapCsvType( type ) ); + columnNode.addProperty( "nullable", true ); + + rootNode.addChild( columnNode ); + } + String fqName = csvFile.getName(); + List> preview = fetchPreview( null, fqName, 10 ); + this.previewByTable.put( fqName, preview ); + } catch ( IOException e ) { + throw new RuntimeException( "Failed to parse metadata of CSV source: " + e ); + } + this.metadataRoot = rootNode; + return this.metadataRoot; + + } + + + private String mapCsvType( String rawType ) { + switch ( rawType ) { + case "int": + case "integer": + return "INTEGER"; + case "bool": + case "boolean": + return "BOOLEAN"; + case "long": + return "BIGINT"; + case "float": + return "REAL"; + case "double": + return "DOUBLE"; + case "date": + return "DATE"; + case "time": + return "TIME"; + case "timestamp": + return "TIMESTAMP"; + case "string": + default: + return "VARCHAR"; + } + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + File csvFile = new File( "C:/Users/roman/Desktop/Dateieins.csv" ); + List> rows = new ArrayList<>(); + + try ( BufferedReader reader = new BufferedReader( new FileReader( csvFile ) ) ) { + String headerLine = reader.readLine(); + if ( headerLine == null ) { + return List.of(); + } + + String[] headerParts = headerLine.split( "," ); + List colNames = new ArrayList<>(); + + for ( String raw : headerParts ) { + String[] split = raw.split( ":" ); + String colName = split[0].trim(); + colNames.add( colName ); + } + + String line; + int count = 0; + while ( (line = reader.readLine()) != null && count < limit ) { + String[] values = line.split( ",", -1 ); + Map row = new LinkedHashMap<>(); + + for ( int i = 0; i < colNames.size(); i++ ) { + String value = i < values.length ? values[i].trim() : null; + row.put( colNames.get( i ), value ); + } + + rows.add( row ); + count++; + } + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read CSV preview: " + fqName, e ); + } + + return rows; + } + + + @Override + public void markSelectedAttributes(List selectedPaths) { + if (this.metadataRoot == null) { + log.warn("⚠️ Kein Metadatenbaum vorhanden – kann Attribute nicht markieren."); + return; + } + + for (String path : selectedPaths) { + int lastDot = path.lastIndexOf('.'); + if (lastDot == -1 || lastDot == path.length() - 1) { + log.warn("⚠️ Kein gΓΌltiger Attribut-Pfad: " + path); + continue; + } + + String columnName = path.substring(lastDot + 1); + String normalizedColumnName = columnName.replaceAll("[^a-zA-Z0-9_]", ""); + + Optional attrOpt = metadataRoot.getChildren().stream() + .filter(child -> child instanceof AttributeNode + && child.getName().equals(normalizedColumnName)) + .findFirst(); + + if (attrOpt.isPresent()) { + ((AttributeNode) attrOpt.get()).setSelected(true); + log.info("βœ… Attribut gesetzt: " + path); + } else { + log.warn("❌ Attribut nicht gefunden: " + normalizedColumnName + " im Pfad: " + path); + } + } + } + + + + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index f63e3b8c96..c2f636d056 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -409,7 +409,7 @@ public AbstractNode fetchMetadataTree() { String mappeName = "Workbook"; AbstractNode root = new Node( "excel", mappeName ); - try ( Workbook wb = WorkbookFactory.create( new File( filePath ) ) ) { + try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis) ) { for ( Sheet sheet : wb ) { @@ -504,7 +504,7 @@ public List> fetchPreview( Connection conn, String fqName, i List> rows = new ArrayList<>(); - try ( Workbook wb = WorkbookFactory.create( new File( filePath ) ) ) { + try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis) ) { Sheet sheet = wb.getSheet( sheetName ); if ( sheet == null ) { @@ -565,10 +565,14 @@ private Object extractCellValue( Cell cell ) { @Override public void markSelectedAttributes( List selectedPaths ) { + List shortNames = selectedPaths.stream() + .map( p -> p.substring( p.lastIndexOf( '.' ) + 1 ).toLowerCase() ) + .collect( Collectors.toList() ); + List> attributePaths = new ArrayList<>(); for ( String path : selectedPaths ) { - String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + String cleanPath = path.trim(); List segments = Arrays.asList( cleanPath.split( "\\." ) ); if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 79d911ea90..90782cfede 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -286,7 +286,6 @@ RelationalResult getTable( final UIRequest request ) { } String fullTableName = getFullEntityName( request.entityId ); - // String fullTableName = "\"TEST\""; query.append( "SELECT * FROM " ) .append( fullTableName ) .append( where ) @@ -902,6 +901,7 @@ void sendConfirmation( final Context ctx ) { log.info( " πŸ”Ή Name : {}", a.adapterName ); log.info( " πŸ”Ή Adapter : {}", a.adapterType ); log.info( " πŸ”Ή Type : {}", a.limit ); + log.info( " πŸ”Ή UniqueName : {}", a.uniqueName ); log.info( "πŸ“¦ Settings:" ); for ( Map.Entry entry : a.settings.entrySet() ) { @@ -935,6 +935,7 @@ void sendConfirmation( final Context ctx ) { String path = handleUploadFiles( inputStreams, fileNames, (AbstractAdapterSettingDirectory) allSettings.get( "directory" ), a ); a.settings.put( "directory", path ); + log.error( "Full path: {}", path ); } PreviewResult result = template.preview( a.settings, 10 ); @@ -2167,6 +2168,11 @@ public void getMetadataAndPreview( final Context ctx ) { } } + /** + * Deploy a new adapter without collecting files out of InputStream + * Files from previews are cached in UploadDepot + */ + /** * Deploy a new adapter diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java index 9029fcc426..dd1b3ccfe5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java @@ -37,17 +37,22 @@ public class PreviewRequest { @JsonProperty public int limit; + @JsonProperty + public String uniqueName; + public PreviewRequest() { } public PreviewRequest( @JsonProperty("adapterName") String adapterName, @JsonProperty("adapterType") AdapterType adapterType, @JsonProperty("settings") Map settings, - @JsonProperty("limit") int rowLimit ) { + @JsonProperty("limit") int rowLimit, + @JsonProperty("uniqueName") String uniqueName ) { this.adapterName = adapterName; this.adapterType = adapterType; this.settings = settings; this.limit = rowLimit; + this.uniqueName = uniqueName; } } From ff3c47cfe5d48d6dd316210ca3d626eb1345bd71 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 6 Jun 2025 18:55:06 +0200 Subject: [PATCH 29/68] Changes in metadata observer and new buffer. UI pulls changes from ChangeBuffer.java. --- .../MetadataObserver/AbstractListener.java | 32 +++- .../MetadataObserver/AbstractPublisher.java | 10 +- .../MetadataObserver/ChangeBuffer.java | 76 +++++++++ .../MetadataObserver/PublisherManager.java | 22 ++- .../MetadataObserver/Utils/SimpleDiff.java | 72 ++++++++ .../Utils/SimpleDiffUtils.java | 154 ++++++++++++++++++ 6 files changed, 359 insertions(+), 7 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 1916f820eb..e2d0dcbc4e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -16,11 +16,17 @@ package org.polypheny.db.adapter.MetadataObserver; +import com.google.gson.Gson; +import com.google.gson.JsonObject; import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiff; +import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiffUtils; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; +import java.util.ArrayList; +import java.util.List; @Slf4j public class AbstractListener

implements MetadataListener

{ @@ -29,6 +35,8 @@ public class AbstractListener

implements M private AbstractNode currentNode; private P adapter; + private static final Gson GSON = new Gson(); + public AbstractListener() { available = true; @@ -37,13 +45,35 @@ public AbstractListener() { } + public void sendMetadataChangeEvent( Adapter adapter, List diffs ) { + JsonObject root = new JsonObject(); + root.addProperty( "type", "adapterMetadataChanged" ); + + JsonObject ad = new JsonObject(); + ad.addProperty( "uniqueName", adapter.getUniqueName() ); + ad.addProperty( "adapterName", adapter.getAdapterName() ); + ad.add( "settings", GSON.toJsonTree( adapter.getSettings() ) ); + ad.addProperty( "mode", adapter.getDeployMode().name() ); + root.add( "adapter", ad ); + + root.add( "diff", GSON.toJsonTree( diffs ) ); + + } + + @Override public void onMetadataChange( P adapter, AbstractNode node, String hash ) { available ^= true; this.currentNode = node; this.adapter = adapter; log.info( "Listener saved credentials of adapter and sends now Request to UI and applies changes on adapter metadata and metadata the listener is holding." ); - applyChange(); + + List diffs; + diffs = SimpleDiffUtils.findAddedNodes( this.adapter.getRoot(), node ); + + sendMetadataChangeEvent( adapter, diffs ); + + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java index 6630ee8774..f7e130a0f1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java @@ -64,7 +64,9 @@ public void stop() { @Override public void runCheck() { - if ( !listener.isAvailable() ) return; + if ( !listener.isAvailable() ) { + return; + } try { AbstractNode node = provider.fetchMetadataTree(); String fresh = NodeSerializer.serializeNode( node ).toString(); @@ -72,12 +74,10 @@ public void runCheck() { String hash = hasher.hash( fresh ); String lastHash = cache.getHash( provider.getUniqueName() ); - log.info("Fresh JSON: {}", fresh); + log.info( "Fresh JSON: {}", fresh ); log.info( "Metadata hash at Observer-Check (Current adapter hash) : {}", lastHash ); log.info( "Metadata hash at Observer-Check (Newest hash) : {}", hash ); - log.info("Key used during observer-check: {}", provider.getUniqueName()); - - + log.info( "Key used during observer-check: {}", provider.getUniqueName() ); if ( lastHash != null && !lastHash.equals( hash ) ) { log.info( "Metadata of adapter {} changed. Sending new snapshot to UI.", provider.getUniqueName() ); diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java new file mode 100644 index 0000000000..72bd62f430 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java @@ -0,0 +1,76 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiff; +import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiffUtils; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import java.util.List; +import java.util.Optional; + +public class ChangeBuffer

{ + + private final P adapter; + private final String adapterHash; + + private volatile boolean hasChanges = false; + private List diffs = List.of(); + + + public ChangeBuffer( final P adapter, final String adapterHash ) { + this.adapter = adapter; + this.adapterHash = adapterHash; + } + + + public synchronized void push( AbstractNode node ) { + this.diffs = SimpleDiffUtils.findAddedNodes( adapter.getRoot(), node ); + this.hasChanges = !diffs.isEmpty(); + } + + + public synchronized Optional consume() { + if ( !hasChanges ) { + return Optional.empty(); + } + + ChangeDTO dto = new ChangeDTO( + adapterHash, + List.of( adapter.getUniqueName() ), + diffs + ); + hasChanges = false; + diffs = List.of(); + return Optional.of( dto ); + } + + + public record ChangeDTO( + String adapterHash, + List credentials, + List diffs + ) { + + } + + +} + + + diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index bb2315beec..8a7a5fc2d1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -18,28 +18,38 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.MetadataObserver.ChangeBuffer.ChangeDTO; +import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import java.util.Map; +import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentMap; @Slf4j public class PublisherManager { private final Map publishers = new ConcurrentHashMap<>(); + private final ConcurrentMap buffers = new ConcurrentHashMap<>(); + private static final PublisherManager INSTANCE = new PublisherManager(); + public static PublisherManager getInstance() { return INSTANCE; } + private PublisherManager() { } public

void onAdapterDeploy( P adapter ) { log.info( "Adapter {} is going to be registered for metadata publish.", adapter.getUniqueName() ); - if ( publishers.containsKey( adapter.getUniqueName() ) ) return; + if ( publishers.containsKey( adapter.getUniqueName() ) ) { + return; + } MetadataListener listener = new AbstractListener(); MetadataPublisher publisher = new AbstractPublisher<>( adapter, listener ); publishers.put( adapter.getUniqueName(), publisher ); @@ -56,4 +66,14 @@ public void onAdapterUndeploy( String uniqueName ) { } + public

void onMetadataChange( P adapter, AbstractNode node, String hash ) { + buffers.computeIfAbsent( hash, h -> new ChangeBuffer( adapter, h ) ) + .push( node ); + } + + public Optional consumeChanges(String adapterHash) { + ChangeBuffer buffer = buffers.get(adapterHash); + return buffer == null ? Optional.empty() : buffer.consume(); + } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java new file mode 100644 index 0000000000..9bc16340fc --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java @@ -0,0 +1,72 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +public class SimpleDiff { + + public enum Type { + NODE_ADDED, + NONE + } + + + private final Type type; + private final String path; + private final String nodeType; + private final String nodeName; + + + public SimpleDiff( Type type, String path, String nodeType, String nodeName ) { + this.type = type; + this.path = path; + this.nodeType = nodeType; + this.nodeName = nodeName; + } + + + public Type getType() { + return type; + } + + + public String getPath() { + return path; + } + + + public String getNodeType() { + return nodeType; + } + + + public String getNodeName() { + return nodeName; + } + + + @Override + public String toString() { + return "SimpleDiff{" + + "type=" + type + + ", path='" + path + '\'' + + ", nodeType='" + nodeType + '\'' + + ", nodeName='" + nodeName + '\'' + + '}'; + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java new file mode 100644 index 0000000000..5db8b29e1c --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java @@ -0,0 +1,154 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import org.polypheny.db.schemaDiscovery.AbstractNode; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class SimpleDiffUtils { + + public static List findAddedNodes( AbstractNode oldNode, AbstractNode newNode ) { + List diffs = new ArrayList<>(); + findAddedRecursive( oldNode, newNode, "/" + newNode.getName(), diffs ); + return diffs; + } + + + private static void findAddedRecursive( + AbstractNode oldNode, + AbstractNode newNode, + String path, + List diffs ) { + Map oldIndex = indexChildren( oldNode.getChildren() ); + Map newIndex = indexChildren( newNode.getChildren() ); + + for ( Map.Entry entry : newIndex.entrySet() ) { + String id = entry.getKey(); + if ( !oldIndex.containsKey( id ) ) { + String[] parts = id.split( ":", 2 ); + String childType = parts[0]; + String childName = parts[1]; + String childPath = path + "/" + childType + ":" + childName; + + diffs.add( new SimpleDiff( + SimpleDiff.Type.NODE_ADDED, + childPath, + childType, + childName + ) ); + findAddedRecursive( + new AbstractNodeStub(), + entry.getValue(), + childPath, + diffs + ); + } else { + AbstractNode oldChild = oldIndex.get( id ); + AbstractNode newChild = entry.getValue(); + String childPath = path + "/" + id; + findAddedRecursive( oldChild, newChild, childPath, diffs ); + } + } + } + + + /** + * Baut aus einer Liste von AbstractNode eine Map<"type:name", node>. + */ + private static Map indexChildren( List children ) { + Map map = new HashMap<>(); + if ( children != null ) { + for ( AbstractNode c : children ) { + String key = c.getType() + ":" + c.getName(); + map.put( key, c ); + } + } + return map; + } + + + /** + * Leere Platzhalter-Klasse, um leere Subtrees darzustellen + */ + private static class AbstractNodeStub implements AbstractNode { + + @Override + public String getType() { + return ""; + } + + + @Override + public String getName() { + return ""; + } + + + @Override + public List getChildren() { + return Collections.emptyList(); + } + + + @Override + public Map getProperties() { + return Collections.emptyMap(); + } + + + @Override + public void addChild( AbstractNode node ) { + throw new UnsupportedOperationException(); + } + + + @Override + public void addProperty( String key, Object value ) { + throw new UnsupportedOperationException(); + } + + + @Override + public void setType( String type ) { + throw new UnsupportedOperationException(); + } + + + @Override + public void setName( String name ) { + throw new UnsupportedOperationException(); + } + + + @Override + public void setChildren( List children ) { + throw new UnsupportedOperationException(); + } + + + @Override + public void setProperties( Map properties ) { + throw new UnsupportedOperationException(); + } + + } + +} From b92394b065593ee4a98b3a6fdc71bbd7e3651c41 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 6 Jun 2025 18:56:02 +0200 Subject: [PATCH 30/68] MetadataProvider.java supports getter for metadata now --- .../db/schemaDiscovery/MetadataProvider.java | 2 ++ .../polypheny/db/adapter/csv/CsvSource.java | 6 ++++++ .../db/adapter/excel/ExcelSource.java | 18 +++++++++++++++--- .../adapter/monetdb/sources/MonetdbSource.java | 6 ++++++ .../db/adapter/jdbc/MysqlSourcePlugin.java | 6 ++++++ .../source/OracleSource.java | 6 ++++++ .../postgres/source/PostgresqlSource.java | 10 +++++++--- 7 files changed, 48 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index adc4bb49bb..5549662c21 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -34,5 +34,7 @@ public interface MetadataProvider { Object getPreview(); + AbstractNode getRoot(); + } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 3b0d0c96a7..e9a089a45e 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -550,6 +550,12 @@ public Object getPreview() { } + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index c2f636d056..7590496bb0 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -59,6 +59,7 @@ import org.polypheny.db.catalog.catalogs.RelAdapterCatalog; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -177,7 +178,12 @@ public List createTable( Context context, LogicalTableWrapper lo logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), logical.pkIds, allocation ); - ExcelTable physical = currentNamespace.createExcelTable( table, this ); + List physicalIds = new ArrayList<>(); + for ( LogicalColumn column : logical.columns ) { + physicalIds.add( column.position ); + } + + ExcelTable physical = currentNamespace.createExcelTable( table, this, physicalIds ); adapterCatalog.replacePhysical( physical ); @@ -409,7 +415,7 @@ public AbstractNode fetchMetadataTree() { String mappeName = "Workbook"; AbstractNode root = new Node( "excel", mappeName ); - try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis) ) { + try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis ) ) { for ( Sheet sheet : wb ) { @@ -504,7 +510,7 @@ public List> fetchPreview( Connection conn, String fqName, i List> rows = new ArrayList<>(); - try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis) ) { + try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis ) ) { Sheet sheet = wb.getSheet( sheetName ); if ( sheet == null ) { @@ -646,6 +652,12 @@ public Object getPreview() { } + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + @SuppressWarnings("unused") private interface Excludes { diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 4c33616859..3de052fd4e 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -492,4 +492,10 @@ public Object getPreview() { return this.previewByTable; } + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 0d6cb3f2c0..50988ca0f8 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -374,6 +374,12 @@ public Object getPreview() { return preview; } + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + } } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 3dab1cc566..d87e83d218 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -472,4 +472,10 @@ public Object getPreview() { return preview; } + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index a82609dc91..c024ae3655 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -208,10 +208,8 @@ public AbstractNode fetchMetadataTree() { throw new RuntimeException( e ); } } - - this.metadataRoot = root; log.error( "Neue Preview ist geladen als: " + previewByTable.toString() ); - return this.metadataRoot; + return root; } @@ -243,6 +241,12 @@ public Object getPreview() { } + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + @Override public void markSelectedAttributes( List selectedPaths ) { From 31326c1f844de78819e6312750b3255e843e0898 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 6 Jun 2025 18:57:00 +0200 Subject: [PATCH 31/68] Excel-Source column indexing bugfix. --- .../org/polypheny/db/adapter/java/AdapterTemplate.java | 1 + .../main/java/org/polypheny/db/ddl/DdlManagerImpl.java | 8 +++++--- .../org/polypheny/db/adapter/excel/ExcelEnumerator.java | 4 ++-- .../org/polypheny/db/adapter/excel/ExcelNamespace.java | 7 +++---- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index d8f451909d..a369aed543 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -130,6 +130,7 @@ public PreviewResult preview( Map settings, int limit ) { if ( tmp instanceof MetadataProvider mp ) { log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); AbstractNode meta = mp.fetchMetadataTree(); + mp.setRoot( meta ); String json = NodeSerializer.serializeNode( meta ).toString(); MetadataHasher hasher = new MetadataHasher(); String hash = hasher.hash( json ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index ad2ec6ca50..c8b1291d0a 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -232,12 +232,14 @@ public void createSource( Transaction transaction, String uniqueName, String ada .map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ) .collect( Collectors.toList() ); log.error( "Das sind die Attribute die gefiltert werden müssen: " + selectedAttributeNames ); + if ( adapter instanceof MetadataProvider mp ) { PublisherManager pm = PublisherManager.getInstance(); MetadataHasher hasher = new MetadataHasher(); AbstractNode node = mp.fetchMetadataTree(); + mp.setRoot( node ); String hash = hasher.hash( NodeSerializer.serializeNode( node ).toString() ); log.info( "Metadata hash at deployment: {}", hash ); @@ -245,7 +247,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada log.info( "Key used during deployment: {} ", uniqueName ); pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); - mp.setRoot( node ); + mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); mp.printTree( null, 0 ); @@ -294,7 +296,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( exportedColumn.name, logical.id, - colPos++, + exportedColumn.physicalPosition, exportedColumn.type, exportedColumn.collectionsType, exportedColumn.length, @@ -319,7 +321,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( exportedColumn.name, logical.id, - colPos++, + exportedColumn.physicalPosition, exportedColumn.type, exportedColumn.collectionsType, exportedColumn.length, diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java index 6b4eca3d9c..99b3eabeb7 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java @@ -460,7 +460,7 @@ public PolyValue[] convertRow( Row row ) { public PolyValue[] convertNormalRow(Row row) { final PolyValue[] objects = new PolyValue[fields.length]; for (int i = 0; i < fields.length; i++) { - Cell cell = row.getCell(i); + Cell cell = row.getCell(fields[i] - 1); objects[i] = convert(fieldTypes[i], cell); } return objects; @@ -470,7 +470,7 @@ public PolyValue[] convertStreamRow(Row row) { final PolyValue[] objects = new PolyValue[fields.length + 1]; objects[0] = PolyLong.of(System.currentTimeMillis()); for (int i = 0; i < fields.length; i++) { - Cell cell = row.getCell(i); + Cell cell = row.getCell(fields[i]); objects[i + 1] = convert(fieldTypes[i], cell); } return objects; diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java index f67584158e..00de8150fe 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java @@ -66,18 +66,17 @@ public ExcelNamespace( long id, long adapterId, URL directoryUrl, ExcelTable.Fla } - public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource ) { + public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource, List physicalIds ) { final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new LinkedList<>(); - List fieldIds = new ArrayList<>( table.columns.size() ); for ( PhysicalColumn column : table.columns ) { AlgDataType sqlType = sqlType( typeFactory, column.type, column.length, column.scale, null ); fieldInfo.add( column.id, column.name, column.name, sqlType ).nullable( column.nullable ); fieldTypes.add( ExcelFieldType.getExcelFieldType( column.type ) ); - fieldIds.add( column.position ); } + // String excelFileName = excelSource.sheetName; String[] parts = table.name.split("_", 2); String filePart = parts[0]; @@ -93,7 +92,7 @@ public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource } catch ( MalformedURLException e ) { throw new GenericRuntimeException( e ); } - int[] fields = fieldIds.stream().mapToInt( i -> i ).toArray(); + int[] fields = physicalIds.stream().mapToInt( i -> i ).toArray(); ExcelTable physical = createTable( table, source, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, excelSource ); tableMap.put( physical.name + "_" + physical.allocationId, physical ); return physical; From 24d26dc0d39370c0b83367c45cf3df4274050ce9 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 12 Jun 2025 12:32:43 +0200 Subject: [PATCH 32/68] Include new crud-route for checking metadata changes --- webui/src/main/java/org/polypheny/db/webui/Crud.java | 10 ++++++++++ .../main/java/org/polypheny/db/webui/HttpServer.java | 2 ++ 2 files changed, 12 insertions(+) diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 90782cfede..b985cfc7a9 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -87,6 +87,7 @@ import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; @@ -949,6 +950,15 @@ void sendConfirmation( final Context ctx ) { } + /** + * If any metadataChange is requested, they are sent here with the help of publisher manager. + */ + void metadataStatus( final Context ctx ) { + String uniqueName = ctx.pathParam( "uniqueName" ); + ctx.json( Map.of( "changed", true ) ); + } + + /** * Update a row from a table. The row is determined by the value of every PK column in that row (conjunction). */ diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index f889bcf95b..1af38ea1b5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -355,6 +355,8 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/previewTable", crud::sendConfirmation ); + webuiServer.get( "/metadataStatus/{uniqueName}", crud::metadataStatus ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); From 24cbb0c2159016773a03f523d545dddc4b7921bf Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 12 Jun 2025 12:34:19 +0200 Subject: [PATCH 33/68] Add TableFilter.java for filtering in "fetchMetadataTree" --- .../db/adapter/java/TableFilter.java | 62 +++++++++++++++++++ 1 file changed, 62 insertions(+) create mode 100644 core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java b/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java new file mode 100644 index 0000000000..bd83895c88 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.java; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +public enum TableFilter { + + Oracle( + Set.of( "AQ$_", "LOG", "MVIEW$_", "OL$", "REDO_", "REPL_", "ROLLING$", "SCHEDULER", "SQLPLUS", "HELP" ) + ), + + MySQL( + Set.of( "sys_config" ) + ), + + GENERIC( Set.of() ); + + public final Set ignoredTables; + + + TableFilter( final Set ignoredTables ) { + this.ignoredTables = ignoredTables.stream() + .map( String::trim ) + .collect( Collectors.toSet() ); + } + + + public static TableFilter forAdapter( String adapterName ) { + return Arrays.stream( values() ) + .filter( f -> f.name().equalsIgnoreCase( adapterName ) ) + .findFirst() + .orElse( GENERIC ); + } + + + public boolean shouldIgnore(String tableName) { + String upper = tableName.toUpperCase(); + return ignoredTables.stream() + .map(String::toUpperCase) + .anyMatch(upper::startsWith); + } + + + +} From 9ba6f7d01af0076599456cd04c6442936b9c75b1 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 12 Jun 2025 12:34:58 +0200 Subject: [PATCH 34/68] Remove ChangeBuffer in PublisherManager.java --- .../MetadataObserver/ChangeBuffer.java | 76 ------------------- .../MetadataObserver/PublisherManager.java | 14 ---- 2 files changed, 90 deletions(-) delete mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java deleted file mode 100644 index 72bd62f430..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeBuffer.java +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.MetadataObserver; - -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiff; -import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiffUtils; -import org.polypheny.db.schemaDiscovery.AbstractNode; -import org.polypheny.db.schemaDiscovery.MetadataProvider; -import java.util.List; -import java.util.Optional; - -public class ChangeBuffer

{ - - private final P adapter; - private final String adapterHash; - - private volatile boolean hasChanges = false; - private List diffs = List.of(); - - - public ChangeBuffer( final P adapter, final String adapterHash ) { - this.adapter = adapter; - this.adapterHash = adapterHash; - } - - - public synchronized void push( AbstractNode node ) { - this.diffs = SimpleDiffUtils.findAddedNodes( adapter.getRoot(), node ); - this.hasChanges = !diffs.isEmpty(); - } - - - public synchronized Optional consume() { - if ( !hasChanges ) { - return Optional.empty(); - } - - ChangeDTO dto = new ChangeDTO( - adapterHash, - List.of( adapter.getUniqueName() ), - diffs - ); - hasChanges = false; - diffs = List.of(); - return Optional.of( dto ); - } - - - public record ChangeDTO( - String adapterHash, - List credentials, - List diffs - ) { - - } - - -} - - - diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 8a7a5fc2d1..36a616b7b3 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -18,7 +18,6 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.MetadataObserver.ChangeBuffer.ChangeDTO; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import java.util.Map; @@ -30,8 +29,6 @@ public class PublisherManager { private final Map publishers = new ConcurrentHashMap<>(); - private final ConcurrentMap buffers = new ConcurrentHashMap<>(); - private static final PublisherManager INSTANCE = new PublisherManager(); @@ -65,15 +62,4 @@ public void onAdapterUndeploy( String uniqueName ) { } } - - public

void onMetadataChange( P adapter, AbstractNode node, String hash ) { - buffers.computeIfAbsent( hash, h -> new ChangeBuffer( adapter, h ) ) - .push( node ); - } - - public Optional consumeChanges(String adapterHash) { - ChangeBuffer buffer = buffers.get(adapterHash); - return buffer == null ? Optional.empty() : buffer.consume(); - } - } From 045de25816578c65a3e980b2cecbd13f8342cf7c Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 12 Jun 2025 12:36:04 +0200 Subject: [PATCH 35/68] Table fix: Querying data for sourced rely now on physical table names. --- .../org/polypheny/db/adapter/Scannable.java | 2 +- .../entity/logical/LogicalTableWrapper.java | 4 +++ .../org/polypheny/db/ddl/DdlManagerImpl.java | 31 +++++++++++-------- .../db/partition/FrequencyMapImpl.java | 2 +- .../monetdb/sources/MonetdbSource.java | 10 ++++-- .../db/adapter/jdbc/MysqlSourcePlugin.java | 25 +++++++++++++-- .../source/OracleSource.java | 22 +++++++------ .../postgres/source/PostgresqlSource.java | 10 ++++-- 8 files changed, 75 insertions(+), 31 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/Scannable.java b/core/src/main/java/org/polypheny/db/adapter/Scannable.java index 695ada4a09..0ce24cc644 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Scannable.java +++ b/core/src/main/java/org/polypheny/db/adapter/Scannable.java @@ -73,7 +73,7 @@ static PhysicalEntity createSubstitutionEntity( Scannable scannable, Context con allocColumns.add( alloc ); } // we use the provided first x columns from amountPk as pks (still requires them to be ordered and first first) - scannable.createTable( context, LogicalTableWrapper.of( table, columns, columns.subList( 0, amountPk ).stream().map( c -> c.id ).toList() ), AllocationTableWrapper.of( allocSubTable, allocColumns ) ); + scannable.createTable( context, LogicalTableWrapper.of( table, columns, columns.subList( 0, amountPk ).stream().map( c -> c.id ).toList(), null, null ), AllocationTableWrapper.of( allocSubTable, allocColumns ) ); return scannable.getCatalog().getPhysicalsFromAllocs( allocSubTable.id ).get( 0 ); } diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java index d6d6d6c120..fbb2a7390e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java @@ -28,4 +28,8 @@ public class LogicalTableWrapper { public List pkIds; + public String physicalSchemaFinal; + + public String physicalTable; + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index c8b1291d0a..3ae4f19501 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -245,8 +245,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada HashCache.getInstance().put( uniqueName, hash ); log.info( "Key used during deployment: {} ", uniqueName ); - pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); - + // pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); @@ -265,13 +264,18 @@ public void createSource( Transaction transaction, String uniqueName, String ada // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { // Make sure the table name is uniqueString tableName = entry.getKey(); - String tableName = entry.getKey(); - if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { - int i = 0; - while ( catalog.getSnapshot().rel().getTable( namespace, tableName + i ).isPresent() ) { - i++; - } - tableName += i; + String physicalSchema = entry.getValue().isEmpty() + ? Catalog.DEFAULT_NAMESPACE_NAME + : entry.getValue().get( 0 ).physicalSchemaName; + + String baseName = entry.getKey(); + String tableName = baseName; + + String physicalTable = baseName; + + int suffix = 0; + while ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { + tableName = baseName + suffix++; } LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List columns = new ArrayList<>(); @@ -290,7 +294,8 @@ public void createSource( Transaction transaction, String uniqueName, String ada for ( ExportedColumn exportedColumn : entry.getValue() ) { if ( adapter instanceof MetadataProvider mp && (attributes != null) ) { - if ( !selectedAttributeNames.contains( exportedColumn.name ) ) { + if ( selectedAttributeNames.stream().noneMatch( + name -> name.equalsIgnoreCase( exportedColumn.name ) ) ) { continue; } LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( @@ -349,7 +354,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada transaction.attachCommitAction( () -> // we can execute with initial logical and allocation data as this is a source and this will not change - adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aColumns ) ) ); + adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of(), physicalSchema, physicalTable ), AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aColumns ) ) ); catalog.updateSnapshot(); } @@ -415,7 +420,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); - PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); + // PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } @@ -2238,7 +2243,7 @@ private AllocationTable addAllocationTable( long namespaceId, Statement statemen List refreshedPks = catalog.getSnapshot().rel().getKey( refreshedLogical.primaryKey ).orElseThrow().fieldIds; AllocationTable refreshedAlloc = catalog.getSnapshot().alloc().getAlloc( alloc.placementId, alloc.partitionId ).flatMap( e -> e.unwrap( AllocationTable.class ) ).orElseThrow(); - adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( refreshedLogical, sortByPosition( refreshedLColumns ), refreshedPks ), AllocationTableWrapper.of( refreshedAlloc, refreshedAColumns ) ); + adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( refreshedLogical, sortByPosition( refreshedLColumns ), refreshedPks, null, null ), AllocationTableWrapper.of( refreshedAlloc, refreshedAColumns ) ); }; if ( postpone ) { diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 99f3f112bd..10e0ac5c70 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -331,7 +331,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT if ( !hotPartitionsToCreate.isEmpty() ) { catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); - store.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( null, null, null ), AllocationTableWrapper.of( null, null ) ); + store.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( null, null, null, null, null ), AllocationTableWrapper.of( null, null ) ); List logicalColumns = new ArrayList<>(); catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerEntity( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ).orElseThrow() ) ); diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 3de052fd4e..fe65a90170 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -139,9 +139,15 @@ protected boolean requiresSchema() { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - "sys", - logical.table.name, + physicalSchema, + logical.physicalTable, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 50988ca0f8..1596ebd985 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -38,6 +38,8 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.SchemaFilter; +import org.polypheny.db.adapter.java.TableFilter; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; @@ -121,10 +123,15 @@ public MysqlSource( final long storeId, final String uniqueName, final Map createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - // logical.table.getNamespaceName(), - "test", - logical.table.name, + physicalSchema, + logical.physicalTable.toLowerCase(), logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), @@ -172,6 +179,9 @@ public AbstractNode fetchMetadataTree() { String dbName = settings.get( "database" ); Node root = new Node( "relational", dbName ); + SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + TableFilter tableFilter = TableFilter.forAdapter( adapterName ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); try { @@ -187,6 +197,10 @@ public AbstractNode fetchMetadataTree() { ? schemas.getString( "TABLE_SCHEM" ) : schemas.getString( "TABLE_CAT" ); + if ( filter.ignoredSchemas.contains( schemaName ) ) { + continue; + } + AbstractNode schemaNode = new Node( "schema", schemaName ); try ( ResultSet tables = meta.getTables( @@ -198,6 +212,11 @@ public AbstractNode fetchMetadataTree() { while ( tables.next() ) { String tableName = tables.getString( "TABLE_NAME" ); + + if ( tableFilter.shouldIgnore( tableName ) ) { + continue; + } + AbstractNode tableNode = new Node( "table", tableName ); Set pkCols = new HashSet<>(); diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index d87e83d218..608878bf5c 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -23,6 +23,7 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.TableFilter; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; @@ -83,11 +84,6 @@ public class OracleSource extends AbstractJdbcSource implements MetadataProvider public AbstractNode metadataRoot; - private static final Pattern ORACLE_INTERNAL = - Pattern.compile( "^(AQ\\$|AQS\\$|SYS_|WRI\\$|MDSYS_|XDB_|CTXSYS_|OLAP\\$|LOG\\$|DBMS_|ORDDATA|ORDSYS)", - Pattern.CASE_INSENSITIVE ); - - public OracleSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, @@ -131,10 +127,15 @@ protected void reloadSettings( List updatedSettings ) { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - // logical.table.getNamespaceName(), - "SYSTEM", - logical.table.name, + physicalSchema.toUpperCase(), + logical.physicalTable.toUpperCase(), logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), @@ -268,6 +269,8 @@ public Map> getExportedColumns() { public AbstractNode fetchMetadataTree() { Node root = new Node( "relational", settings.get( "database" ) ); + TableFilter filter = TableFilter.forAdapter( adapterName ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); try { @@ -292,10 +295,11 @@ public AbstractNode fetchMetadataTree() { continue; } - if ( tableName.contains( "$" ) || ORACLE_INTERNAL.matcher( tableName ).find() ) { + if ( filter.shouldIgnore( tableName ) ) { continue; } + Node tableNode = new Node( "table", tableName ); Set pkCols = new HashSet<>(); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index c024ae3655..85acc44319 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -358,9 +358,15 @@ protected boolean requiresSchema() { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), - logical.table.name, + physicalSchema, + logical.physicalTable, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), From 026b3402cb541be34c822e2b34373809204c557e Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 13 Jun 2025 18:22:47 +0200 Subject: [PATCH 36/68] Recreating metadata-change observer. Newly added/removed metadata is recognized and send to UI. --- .../MetadataObserver/AbstractListener.java | 39 ++--- .../MetadataObserver/AbstractPublisher.java | 2 +- .../MetadataObserver/PublisherManager.java | 22 +++ .../MetadataObserver/Utils/DiffType.java | 24 +++ .../MetadataObserver/Utils/MetaAnnotator.java | 135 +++++++++++++++ .../MetadataObserver/Utils/MetaDiffUtil.java | 120 ++++++++++++++ .../MetadataObserver/Utils/NodeCloner.java | 54 ++++++ .../MetadataObserver/Utils/SimpleDiff.java | 72 -------- .../Utils/SimpleDiffUtils.java | 154 ------------------ .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- .../db/adapter/excel/ExcelSource.java | 33 +++- .../java/org/polypheny/db/webui/Crud.java | 52 +++++- .../org/polypheny/db/webui/HttpServer.java | 4 + 13 files changed, 456 insertions(+), 257 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java delete mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java delete mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index e2d0dcbc4e..779e0f12da 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -21,10 +21,13 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiff; -import org.polypheny.db.adapter.MetadataObserver.Utils.SimpleDiffUtils; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaAnnotator; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; import java.util.ArrayList; import java.util.List; @@ -34,6 +37,7 @@ public class AbstractListener

implements M private boolean available; private AbstractNode currentNode; private P adapter; + private String hash; private static final Gson GSON = new Gson(); @@ -42,22 +46,7 @@ public AbstractListener() { available = true; currentNode = null; this.adapter = null; - } - - - public void sendMetadataChangeEvent( Adapter adapter, List diffs ) { - JsonObject root = new JsonObject(); - root.addProperty( "type", "adapterMetadataChanged" ); - - JsonObject ad = new JsonObject(); - ad.addProperty( "uniqueName", adapter.getUniqueName() ); - ad.addProperty( "adapterName", adapter.getAdapterName() ); - ad.add( "settings", GSON.toJsonTree( adapter.getSettings() ) ); - ad.addProperty( "mode", adapter.getDeployMode().name() ); - root.add( "adapter", ad ); - - root.add( "diff", GSON.toJsonTree( diffs ) ); - + this.hash = null; } @@ -66,12 +55,20 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { available ^= true; this.currentNode = node; this.adapter = adapter; + this.hash = hash; + + Object preview = adapter.getPreview(); + log.info( "Listener saved credentials of adapter and sends now Request to UI and applies changes on adapter metadata and metadata the listener is holding." ); - List diffs; - diffs = SimpleDiffUtils.findAddedNodes( this.adapter.getRoot(), node ); + DiffResult result = MetaDiffUtil.diff( adapter.getRoot(), node ); + log.info( "Diffresult: {}", result ); + + AbstractNode annotatedCopy = MetaAnnotator.annotateTree( adapter.getRoot(), node, result ); + String json = NodeSerializer.serializeNode( annotatedCopy ).toString(); + log.info( "JSON: {}", json ); - sendMetadataChangeEvent( adapter, diffs ); + PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResult( json, preview ) ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java index f7e130a0f1..b6bd8118c6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java @@ -70,8 +70,8 @@ public void runCheck() { try { AbstractNode node = provider.fetchMetadataTree(); String fresh = NodeSerializer.serializeNode( node ).toString(); - String hash = hasher.hash( fresh ); + String lastHash = cache.getHash( provider.getUniqueName() ); log.info( "Fresh JSON: {}", fresh ); diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 36a616b7b3..365b58679e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -18,6 +18,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import java.util.Map; @@ -29,6 +30,7 @@ public class PublisherManager { private final Map publishers = new ConcurrentHashMap<>(); + private final Map changeCache = new ConcurrentHashMap<>(); private static final PublisherManager INSTANCE = new PublisherManager(); @@ -62,4 +64,24 @@ public void onAdapterUndeploy( String uniqueName ) { } } + + public boolean hasChange( String uniqueName ) { + return changeCache.containsKey( uniqueName ); + } + + + public void onMetadataChange( String uniqueName, PreviewResult data ) { + changeCache.put( uniqueName, data ); + } + + + public Optional fetchChange( String uniqueName ) { + return Optional.ofNullable( changeCache.get( uniqueName ) ); + } + + + void ack( String uniqueName ) { + changeCache.remove( uniqueName ); + } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java new file mode 100644 index 0000000000..4c5985dc93 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java @@ -0,0 +1,24 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +public enum DiffType { + ADDED, + REMOVED, + CHANGED, + NONE +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java new file mode 100644 index 0000000000..a81e4bc6eb --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java @@ -0,0 +1,135 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.Node; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public final class MetaAnnotator { + + private static class PathHelper { + + static Map collect( AbstractNode node ) { + Map map = new HashMap<>(); + traverse( node, node.getName(), map ); + return map; + } + + + private static void traverse( + AbstractNode n, String path, + Map sink ) { + sink.put( path, n ); + for ( AbstractNode c : n.getChildren() ) { + traverse( c, path + "/" + c.getName(), sink ); + } + } + + + static Optional getNode( AbstractNode root, String path ) { + String[] seg = path.split( "/" ); + AbstractNode cur = root; + for ( int i = 1; i < seg.length; i++ ) { + String s = seg[i]; + cur = cur.getChildren().stream() + .filter( n -> n.getName().equals( s ) ) + .findFirst().orElse( null ); + if ( cur == null ) { + return Optional.empty(); + } + } + return Optional.of( cur ); + } + + } + + + public static AbstractNode annotateTree( AbstractNode oldRoot, AbstractNode newRoot, DiffResult diff ) { + AbstractNode copyOld = NodeCloner.deepCopy( oldRoot ); + AbstractNode copyNew = NodeCloner.deepCopy( newRoot ); + + Map newMap = PathHelper.collect( copyNew ); + Map oldMap = PathHelper.collect( copyOld ); + + for ( Map.Entry e : oldMap.entrySet() ) { + if ( e.getValue() instanceof AttributeNode a && a.isSelected() ) { + AbstractNode match = newMap.get( e.getKey() ); + if ( match instanceof AttributeNode aNew ) { + aNew.setSelected( true ); + } + } + } + + diff.getAdded().forEach( p -> PathHelper + .getNode( copyNew, p ) + .ifPresent( n -> n.addProperty( "diff", DiffType.ADDED ) ) ); + + /*diff.getChanged().forEach( p -> PathHelper + .getNode( copyNew, p ) + .ifPresent( n -> n.addProperty( "diff", DiffType.CHANGED ) ) );*/ + + for ( String p : diff.getRemoved() ) { + if ( newMap.containsKey( p ) ) { + continue; + } + createGhostNode( copyNew, p ); + } + + return copyNew; + + } + + + private static void createGhostNode( AbstractNode root, String fullPath ) { + String[] parts = fullPath.split( "/" ); + AbstractNode current = root; + StringBuilder curPath = new StringBuilder( root.getName() ); + + for ( int i = 1; i < parts.length; i++ ) { + String segment = parts[i]; + curPath.append( "/" ).append( segment ); + + Optional opt = + current.getChildren().stream() + .filter( n -> n.getName().equals( segment ) ) + .findFirst(); + + if ( opt.isPresent() ) { + current = opt.get(); + } else { + Node stub = new Node( "ghost", segment ); + if ( i == parts.length - 1 ) { + stub.addProperty( "diff", DiffType.REMOVED ); + } + current.addChild( stub ); + current = stub; + } + + } + } + +} + + + + + diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java new file mode 100644 index 0000000000..ab86b9aae4 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java @@ -0,0 +1,120 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +public class MetaDiffUtil { + + @Getter + @AllArgsConstructor + public static class DiffResult { + + private final Set added; + private final Set removed; + private final Set changed; + + + @Override + public String toString() { + return "DiffResult{" + + "added=" + added + + ", removed=" + removed + + ", changed=" + changed + + '}'; + } + + } + + + @EqualsAndHashCode + @RequiredArgsConstructor(staticName = "of") + public static class Fingerprint { + + private final String type; + private final boolean selected; + private final int propertiesHash; + + + static Fingerprint of( AbstractNode n ) { + boolean sel = (n instanceof AttributeNode) && ((AttributeNode) n).isSelected(); + return Fingerprint.of( n.getType(), sel, Objects.hashCode( n.getProperties() ) ); + } + + } + + + private MetaDiffUtil() { + } + + + public static DiffResult diff( AbstractNode oldRoot, AbstractNode newRoot ) { + Map oldMap = new HashMap<>(); + Map newMap = new HashMap<>(); + + collect( oldRoot, "", oldMap ); + collect( newRoot, "", newMap ); + + Set added = new HashSet<>( newMap.keySet() ); + added.removeAll( oldMap.keySet() ); + + Set removed = new HashSet<>( oldMap.keySet() ); + removed.removeAll( newMap.keySet() ); + + Set changed = new HashSet<>(); + for ( String key : oldMap.keySet() ) { + if ( newMap.containsKey( key ) && + !oldMap.get( key ).equals( newMap.get( key ) ) ) { + changed.add( key ); + } + } + + return new DiffResult( added, removed, Collections.emptySet() ); + + } + + + private static void collect( + AbstractNode node, + String parentPath, + Map sink ) { + + String path = parentPath.isEmpty() ? + node.getName() : + parentPath + "/" + node.getName(); + + sink.put( path, Fingerprint.of( node ) ); + + for ( AbstractNode child : node.getChildren() ) { + collect( child, path, sink ); + } + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java new file mode 100644 index 0000000000..60bac92f8c --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java @@ -0,0 +1,54 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.Node; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +public class NodeCloner { + private NodeCloner() {} + + public static AbstractNode deepCopy(AbstractNode node) { + return copyNode(node); + } + + private static AbstractNode copyNode(AbstractNode n) { + AbstractNode clone; + + if (n instanceof AttributeNode a) { + AttributeNode c = new AttributeNode(a.getType(), a.getName()); + c.setSelected(a.isSelected()); + clone = c; + } else { + clone = new Node(n.getType(), n.getName()); + } + + clone.setProperties(new HashMap<>(n.getProperties())); + + List clonedChildren = new ArrayList<>(); + for (AbstractNode child : n.getChildren()) { + clonedChildren.add(copyNode(child)); + } + clone.setChildren(clonedChildren); + + return clone; + } +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java deleted file mode 100644 index 9bc16340fc..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiff.java +++ /dev/null @@ -1,72 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.MetadataObserver.Utils; - -public class SimpleDiff { - - public enum Type { - NODE_ADDED, - NONE - } - - - private final Type type; - private final String path; - private final String nodeType; - private final String nodeName; - - - public SimpleDiff( Type type, String path, String nodeType, String nodeName ) { - this.type = type; - this.path = path; - this.nodeType = nodeType; - this.nodeName = nodeName; - } - - - public Type getType() { - return type; - } - - - public String getPath() { - return path; - } - - - public String getNodeType() { - return nodeType; - } - - - public String getNodeName() { - return nodeName; - } - - - @Override - public String toString() { - return "SimpleDiff{" + - "type=" + type + - ", path='" + path + '\'' + - ", nodeType='" + nodeType + '\'' + - ", nodeName='" + nodeName + '\'' + - '}'; - } - - -} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java deleted file mode 100644 index 5db8b29e1c..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/SimpleDiffUtils.java +++ /dev/null @@ -1,154 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.MetadataObserver.Utils; - -import org.polypheny.db.schemaDiscovery.AbstractNode; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class SimpleDiffUtils { - - public static List findAddedNodes( AbstractNode oldNode, AbstractNode newNode ) { - List diffs = new ArrayList<>(); - findAddedRecursive( oldNode, newNode, "/" + newNode.getName(), diffs ); - return diffs; - } - - - private static void findAddedRecursive( - AbstractNode oldNode, - AbstractNode newNode, - String path, - List diffs ) { - Map oldIndex = indexChildren( oldNode.getChildren() ); - Map newIndex = indexChildren( newNode.getChildren() ); - - for ( Map.Entry entry : newIndex.entrySet() ) { - String id = entry.getKey(); - if ( !oldIndex.containsKey( id ) ) { - String[] parts = id.split( ":", 2 ); - String childType = parts[0]; - String childName = parts[1]; - String childPath = path + "/" + childType + ":" + childName; - - diffs.add( new SimpleDiff( - SimpleDiff.Type.NODE_ADDED, - childPath, - childType, - childName - ) ); - findAddedRecursive( - new AbstractNodeStub(), - entry.getValue(), - childPath, - diffs - ); - } else { - AbstractNode oldChild = oldIndex.get( id ); - AbstractNode newChild = entry.getValue(); - String childPath = path + "/" + id; - findAddedRecursive( oldChild, newChild, childPath, diffs ); - } - } - } - - - /** - * Baut aus einer Liste von AbstractNode eine Map<"type:name", node>. - */ - private static Map indexChildren( List children ) { - Map map = new HashMap<>(); - if ( children != null ) { - for ( AbstractNode c : children ) { - String key = c.getType() + ":" + c.getName(); - map.put( key, c ); - } - } - return map; - } - - - /** - * Leere Platzhalter-Klasse, um leere Subtrees darzustellen - */ - private static class AbstractNodeStub implements AbstractNode { - - @Override - public String getType() { - return ""; - } - - - @Override - public String getName() { - return ""; - } - - - @Override - public List getChildren() { - return Collections.emptyList(); - } - - - @Override - public Map getProperties() { - return Collections.emptyMap(); - } - - - @Override - public void addChild( AbstractNode node ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void addProperty( String key, Object value ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void setType( String type ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void setName( String name ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void setChildren( List children ) { - throw new UnsupportedOperationException(); - } - - - @Override - public void setProperties( Map properties ) { - throw new UnsupportedOperationException(); - } - - } - -} diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 3ae4f19501..6bddecdb33 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -245,7 +245,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada HashCache.getInstance().put( uniqueName, hash ); log.info( "Key used during deployment: {} ", uniqueName ); - // pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 7590496bb0..9c7a402e8b 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -280,7 +280,7 @@ public Map> getExportedColumns() { int position = 1; try { Source source = Sources.of( new URL( excelDir, fileName ) ); - File file = new File( source.path() ); //creating a new file instance + File file = new File( source.path() ); // creating a new file instance FileInputStream fs = new FileInputStream( file ); Workbook workbook = WorkbookFactory.create( fs ); @@ -385,6 +385,25 @@ public Map> getExportedColumns() { } + private Set resolveFileNames() { + Set names = new HashSet<>(); + + if ( Sources.of( excelDir ).file().isFile() ) { + names.add( Sources.of( excelDir ).file().getName() ); + return names; + } + + File[] files = Sources.of( excelDir ).file() + .listFiles( f -> f.getName().matches( ".*\\.(xlsx?|xlsx\\.gz|xls\\.gz)$" ) ); + if ( files != null ) { + Arrays.stream( files ) + .map( File::getName ) + .forEach( names::add ); + } + return names; + } + + private void addInformationExportedColumns() { for ( Map.Entry> entry : getExportedColumns().entrySet() ) { InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalSchemaName ); @@ -411,11 +430,19 @@ private void addInformationExportedColumns() { @Override public AbstractNode fetchMetadataTree() { - String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; + Source filePath; + // String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; + String firstFile = resolveFileNames().stream().findFirst() + .orElseThrow(() -> new GenericRuntimeException("No file found")); + try { + filePath = Sources.of(new URL(excelDir, firstFile)); + } catch ( MalformedURLException e ) { + throw new RuntimeException( e ); + } String mappeName = "Workbook"; AbstractNode root = new Node( "excel", mappeName ); - try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis ) ) { + try ( FileInputStream fis = new FileInputStream( filePath.path() ); Workbook wb = WorkbookFactory.create( fis ) ) { for ( Sheet sheet : wb ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b985cfc7a9..b812e3bc71 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -34,6 +34,7 @@ import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.io.BufferedReader; +import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; @@ -934,7 +935,16 @@ void sendConfirmation( final Context ctx ) { fileNames = Arrays.stream( cleaned.split( "," ) ).map( String::trim ).filter( s -> !s.isEmpty() ).toList(); } - String path = handleUploadFiles( inputStreams, fileNames, (AbstractAdapterSettingDirectory) allSettings.get( "directory" ), a ); + Map fileBytes = new HashMap<>(); + + for (Map.Entry e : inputStreams.entrySet()) { + try (InputStream in = e.getValue()) { + byte[] data = IOUtils.toByteArray(in); + fileBytes.put(e.getKey(), data); + } + } + + String path = handleUploadFiles( fileBytes, fileNames, (AbstractAdapterSettingDirectory) allSettings.get( "directory" ), a ); a.settings.put( "directory", path ); log.error( "Full path: {}", path ); } @@ -955,7 +965,20 @@ void sendConfirmation( final Context ctx ) { */ void metadataStatus( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); - ctx.json( Map.of( "changed", true ) ); + + boolean changed = PublisherManager.getInstance().hasChange( uniqueName ); + ctx.json( Map.of( "changed", changed ) ); + } + + void metadataChange( final Context ctx ) { + String uniqueName = ctx.pathParam( "uniqueName" ); + Optional data = PublisherManager.getInstance().fetchChange( uniqueName ); + ctx.json( data ); + + } + + void metadataAck( final Context ctx ) { + } @@ -2332,16 +2355,35 @@ private static String handleUploadFiles( Map inputStreams, } - private static String handleUploadFiles( Map inputStreams, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest a ) { + /* private static String handleUploadFiles( Map inputStreams, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest a ) { for ( String fileName : fileNames ) { setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); } File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + a.adapterName ); for ( Entry is : setting.inputStreams.entrySet() ) { - try { + try ( InputStream in = is.getValue() ) { File file = new File( path, is.getKey() ); log.info( "πŸ“ Datei wird geschrieben: {}", file.getAbsolutePath() ); - FileUtils.copyInputStreamToFile( is.getValue(), file ); + FileUtils.copyInputStreamToFile( in, file ); + } catch ( IOException e ) { + throw new GenericRuntimeException( e ); + } + } + return path.getAbsolutePath(); + }*/ + + + // Map statt Map + private static String handleUploadFiles( Map files, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest previewRequest ) { + File path = PolyphenyHomeDirManager.getInstance() + .registerNewFolder("data/csv/" + previewRequest.adapterName); + for ( String name : fileNames ) { + byte[] data = files.get( name ); + if ( data == null ) continue; + try ( InputStream in = new ByteArrayInputStream( data ) ) { + File target = new File( path, name ); + log.info( "πŸ“‚ Datei wird geschrieben: {}", target.getAbsolutePath() ); + FileUtils.copyInputStreamToFile( in, target ); } catch ( IOException e ) { throw new GenericRuntimeException( e ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 1af38ea1b5..4a2d5a1b2e 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -357,6 +357,10 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/metadataStatus/{uniqueName}", crud::metadataStatus ); + webuiServer.get( "/metadataChange/{uniqueName}", crud::metadataChange ); + + webuiServer.post( "/metadataAck/{uniqueName}", crud::metadataAck ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); From 0562438feaeab1cd29473ace6c8f29b8f85ded7d Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 14 Jun 2025 15:43:52 +0200 Subject: [PATCH 37/68] Removed Optional<> datatype from fetching metadata changes. --- .../db/adapter/MetadataObserver/PublisherManager.java | 4 ++-- webui/src/main/java/org/polypheny/db/webui/Crud.java | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 365b58679e..4a948f61f6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -75,8 +75,8 @@ public void onMetadataChange( String uniqueName, PreviewResult data ) { } - public Optional fetchChange( String uniqueName ) { - return Optional.ofNullable( changeCache.get( uniqueName ) ); + public PreviewResult fetchChange( String uniqueName ) { + return changeCache.get( uniqueName ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b812e3bc71..2642c08616 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -972,7 +972,7 @@ void metadataStatus( final Context ctx ) { void metadataChange( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); - Optional data = PublisherManager.getInstance().fetchChange( uniqueName ); + PreviewResult data = PublisherManager.getInstance().fetchChange( uniqueName ); ctx.json( data ); } From c4b47a9218e5afc85d09c1b0a019e319f9743199 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 16 Jun 2025 21:37:56 +0200 Subject: [PATCH 38/68] Code for applying changes on new metadata. Adding new configuration possibility for sources --- .../polypheny/db/adapter/AdapterManager.java | 9 ++++ .../MetadataObserver/AbstractListener.java | 15 ++++++- .../MetadataObserver/AbstractPublisher.java | 6 +++ .../MetadataObserver/MetadataListener.java | 2 +- .../MetadataObserver/MetadataPublisher.java | 1 + .../MetadataObserver/PublisherManager.java | 4 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- .../java/org/polypheny/db/webui/Crud.java | 42 ++++++++++++++++--- .../org/polypheny/db/webui/HttpServer.java | 2 + 9 files changed, 72 insertions(+), 11 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 4d1aa30957..9f94976b86 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -37,6 +37,7 @@ import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.schemaDiscovery.MetadataProvider; public class AdapterManager { @@ -238,6 +239,14 @@ public void restoreAdapters( List adapters ) { } + public Optional getMetadataProvider(String uniqueName) { + return getSource(uniqueName) + .filter(mp -> mp instanceof MetadataProvider) + .map(mp -> (MetadataProvider) mp); + } + + + public record AdapterInformation( String name, String description, AdapterType type, List settings, List modes ) { public static JsonSerializer getSerializer() { diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 779e0f12da..7c1b8305b5 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -29,6 +29,7 @@ import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.NodeSerializer; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; @Slf4j @@ -75,9 +76,19 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { @Override - public void applyChange() { - available ^= true; + public void applyChange( String[] metadata ) { log.info( "Changes are going to be applied" ); + + this.adapter.setRoot( this.currentNode ); + this.adapter.markSelectedAttributes( Arrays.stream( metadata ).toList() ); + HashCache.getInstance().put( this.adapter.getUniqueName(), this.hash ); + + this.currentNode = null; + this.adapter = null; + this.hash = null; + + available ^= true; + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java index b6bd8118c6..8b89d273ef 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java @@ -90,4 +90,10 @@ public void runCheck() { } } + + @Override + public MetadataListener getListener() { + return this.listener; + } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java index 77c3be6402..a3324f510b 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java @@ -24,6 +24,6 @@ public interface MetadataListener

{ void onMetadataChange( P adapter, AbstractNode node, String hash ); boolean isAvailable(); - void applyChange(); + void applyChange( String[] metadata ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java index 9af9f5cec1..3517308dd5 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java @@ -22,6 +22,7 @@ public interface MetadataPublisher { void start(); void stop(); void runCheck(); + MetadataListener getListener(); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 4a948f61f6..c3eec9b1bf 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -80,7 +80,9 @@ public PreviewResult fetchChange( String uniqueName ) { } - void ack( String uniqueName ) { + public void ack( String uniqueName, String[] metadata ) { + MetadataPublisher publisher = publishers.get( uniqueName ); + publisher.getListener().applyChange( metadata ); changeCache.remove( uniqueName ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 6bddecdb33..831423bcae 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -420,7 +420,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); - // PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); + PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 2642c08616..64bcde61ca 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -17,6 +17,7 @@ package org.polypheny.db.webui; +import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; @@ -158,6 +159,7 @@ import org.polypheny.db.processing.ImplementationContext.ExecutedContext; import org.polypheny.db.processing.QueryContext; import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.security.SecurityManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -937,10 +939,10 @@ void sendConfirmation( final Context ctx ) { Map fileBytes = new HashMap<>(); - for (Map.Entry e : inputStreams.entrySet()) { - try (InputStream in = e.getValue()) { - byte[] data = IOUtils.toByteArray(in); - fileBytes.put(e.getKey(), data); + for ( Map.Entry e : inputStreams.entrySet() ) { + try ( InputStream in = e.getValue() ) { + byte[] data = IOUtils.toByteArray( in ); + fileBytes.put( e.getKey(), data ); } } @@ -970,6 +972,7 @@ void metadataStatus( final Context ctx ) { ctx.json( Map.of( "changed", changed ) ); } + void metadataChange( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); PreviewResult data = PublisherManager.getInstance().fetchChange( uniqueName ); @@ -977,7 +980,28 @@ void metadataChange( final Context ctx ) { } + void metadataAck( final Context ctx ) { + AckPayload payload = ctx.bodyAsClass( AckPayload.class ); + log.info( "Acknowledgement incoming: " + payload.toString() ); + PublisherManager.getInstance().ack( payload.uniqueName, payload.selectedPaths ); + } + + + void getMetaConfiguration( final Context ctx ) { + String uniqueName = ctx.pathParam( "uniqueName" ); + MetadataProvider provider = AdapterManager.getInstance() + .getMetadataProvider( uniqueName ) + .orElseThrow( () -> new IllegalStateException( + "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); + + PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview() ); + ctx.json( data ); + } + + + void setMetaConfiguration( final Context ctx ) { + AckPayload config = ctx.bodyAsClass( AckPayload.class ); } @@ -2376,10 +2400,12 @@ private static String handleUploadFiles( Map inputStreams, // Map statt Map private static String handleUploadFiles( Map files, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest previewRequest ) { File path = PolyphenyHomeDirManager.getInstance() - .registerNewFolder("data/csv/" + previewRequest.adapterName); + .registerNewFolder( "data/csv/" + previewRequest.adapterName ); for ( String name : fileNames ) { byte[] data = files.get( name ); - if ( data == null ) continue; + if ( data == null ) { + continue; + } try ( InputStream in = new ByteArrayInputStream( data ) ) { File target = new File( path, name ); log.info( "πŸ“‚ Datei wird geschrieben: {}", target.getAbsolutePath() ); @@ -3206,4 +3232,8 @@ public void propertyChange( PropertyChangeEvent evt ) { } + public record AckPayload( @JsonProperty String uniqueName, @JsonProperty String[] selectedPaths ) { + + } + } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 4a2d5a1b2e..c83fcb269d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -361,6 +361,8 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/metadataAck/{uniqueName}", crud::metadataAck ); + webuiServer.get( "/metadataConfiguration/{uniqueName}", crud::getMetaConfiguration ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); From 53c5079b6759eebd2d740e0857522ec1b27d263a Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 16 Jun 2025 22:26:09 +0200 Subject: [PATCH 39/68] Aliases for columns are now send with when deploying new source. --- webui/src/main/java/org/polypheny/db/webui/Crud.java | 1 + .../polypheny/db/webui/models/catalog/AdapterModel.java | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 64bcde61ca..f375cfe2fb 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -2262,6 +2262,7 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { log.info( "Adapter: " + a.adapterName ); log.info( "Type: " + a.type ); log.info( "Mode: " + a.mode ); + log.info( "Aliases: " + a.columnAliases.toString() ); log.info( "Settings:" ); for ( Map.Entry entry : a.settings.entrySet() ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java b/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java index 02b2d2103f..1681755418 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java @@ -57,6 +57,9 @@ public class AdapterModel extends IdEntity { @JsonProperty public List metadata; + @JsonProperty + public Map columnAliases; + public AdapterModel( @JsonProperty("id") @Nullable Long id, @@ -66,7 +69,8 @@ public AdapterModel( @JsonProperty("settings") Map settings, @JsonProperty("mode") DeployMode mode, @JsonProperty("indexMethods") List indexMethods, - @JsonProperty("metadata") List metadata ) { + @JsonProperty("metadata") List metadata, + @JsonProperty("columnAliases") Map columnAliases) { super( id, name ); this.adapterName = adapterName; this.type = type; @@ -74,6 +78,7 @@ public AdapterModel( this.mode = mode; this.indexMethods = indexMethods; this.metadata = metadata; + this.columnAliases = columnAliases; } @@ -90,7 +95,7 @@ public static AdapterModel from( LogicalAdapter adapter ) { settings, adapter.mode, adapter.type == AdapterType.STORE ? ((DataStore) dataStore).getAvailableIndexMethods() : List.of(), - null) ).orElse( null ); + null, null) ).orElse( null ); } From 124f804695d6f7154f34a2f0402fc2f1631d2d0c Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 20 Jun 2025 18:32:50 +0200 Subject: [PATCH 40/68] Add new metadata to catalog when something was added. --- .../java/org/polypheny/db/ddl/DdlManager.java | 1 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 141 ++++++++++++++++++ .../jdbc/sources/AbstractJdbcSource.java | 3 +- .../java/org/polypheny/db/webui/Crud.java | 13 ++ 4 files changed, 157 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 25e16feeb9..70cb36a461 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -557,6 +557,7 @@ public static DdlManager getInstance() { public abstract void dropGraphPlacement( long graphId, DataStore dataStores, Statement statement ); + public abstract void addSelectedMetadata( Transaction tsx, String uniqueName, long namespace, List selectedPaths ); public abstract void dropCollection( LogicalCollection catalogCollection, Statement statement ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 831423bcae..78bc27190e 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -24,6 +24,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; @@ -363,6 +364,111 @@ public void createSource( Transaction transaction, String uniqueName, String ada } + public void addSelectedMetadata( Transaction tsx, String uniqueName, long namespace, List selectedPaths ) { + Optional> adapter = AdapterManager.getInstance().getSource( uniqueName ); + extendMetaSettings( adapter, selectedPaths ); + if ( adapter == null ) { + throw new GenericRuntimeException( "No adapter found for unique name " + uniqueName ); + } + Map> exported = adapter.get().getExportedColumns(); + Map> additions = + selectedPaths.stream() + .map( DdlManagerImpl::splitPath ) + .collect( Collectors.groupingBy( + ParsedPath::table, + Collectors.mapping( ParsedPath::column, + Collectors.toList() ) ) ); + + for ( Map.Entry> entry : additions.entrySet() ) { + String tableName = entry.getKey(); + String physicalTable = entry.getKey(); + List newCols = entry.getValue(); + + Optional logicalOpt = catalog.getSnapshot().rel().getTable( namespace, physicalTable ); + LogicalTable logical; + AllocationPlacement placement; + List logicalCols = new ArrayList<>(); + List allocCols = new ArrayList<>(); + AllocationEntity allocation; + + if ( logicalOpt.isEmpty() ) { + logical = catalog.getLogicalRel( namespace ) + .addTable( tableName, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); + Pair part = + createSinglePartition( logical.namespaceId, logical ); + placement = catalog.getAllocRel( namespace ) + .addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); + allocation = catalog.getAllocRel( namespace ) + .addAllocation( adapter.get().getAdapterId(), + placement.id, + part.left.id, + logical.id ); + catalog.getAllocRel( namespace ) + .addAllocation( adapter.get().getAdapterId(), placement.id, part.left.id, logical.id ); + } else { + allocation = null; + logical = logicalOpt.get(); + Optional placementOpt = + catalog.getSnapshot().alloc() + .getPlacement( adapter.get().getAdapterId(), logical.id ); + if ( placementOpt.isPresent() ) { + placement = placementOpt.get(); + } else { + placement = catalog.getAllocRel( namespace ) + .addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); + + catalog.getAllocRel( namespace ) + .addAllocation( adapter.get().getAdapterId(), + placement.id, + createSinglePartition( logical.namespaceId, logical ).left.id, + logical.id ); + } + + } + List exportedCols = exported.getOrDefault( physicalTable, List.of() ); + + String physicalSchema = + exportedCols.isEmpty() + ? Catalog.DEFAULT_NAMESPACE_NAME + : exportedCols.get( 0 ).physicalSchemaName; + for ( ExportedColumn ec : exportedCols ) { + if ( !newCols.contains( ec.name ) ) { + continue; + } + boolean exists = catalog.getSnapshot().rel() + .getColumn( logical.id, ec.name ).isPresent(); + if ( exists ) { + continue; + } + + LogicalColumn lc = catalog.getLogicalRel( namespace ).addColumn( + ec.name, logical.id, ec.physicalPosition, ec.type, + ec.collectionsType, ec.length, ec.scale, ec.dimension, + ec.cardinality, ec.nullable, Collation.getDefaultCollation() ); + AllocationColumn ac = catalog.getAllocRel( namespace ).addColumn( + placement.id, logical.id, lc.id, + adapter.get().getAdapterId(), PlacementType.STATIC, ec.physicalPosition ); + + logicalCols.add( lc ); + allocCols.add( ac ); + } + if ( !logicalCols.isEmpty() ) { + buildNamespace( Catalog.defaultNamespaceId, logical, adapter.get() ); + if ( logicalOpt.isEmpty() ) { + tsx.attachCommitAction( () -> + adapter.get().createTable( null, + LogicalTableWrapper.of( logical, logicalCols, List.of(), + physicalSchema, tableName ), + AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), + allocCols ) ) ); + } + catalog.updateSnapshot(); + } + } + + } + + @Override public void dropAdapter( String name, Statement statement ) { name = name.replace( "'", "" ); @@ -3117,4 +3223,39 @@ public void dropType() { throw new GenericRuntimeException( "Not supported yet" ); } + + public static ParsedPath splitPath( String fullPath ) { + String noType = fullPath.split( ":" )[0]; + int lastDot = noType.lastIndexOf( '.' ); + String column = noType.substring( lastDot + 1 ); + String tableKey = noType.substring( 0, lastDot ); + + String[] parts = tableKey.split( "\\." ); + String table = parts[parts.length - 1]; + String schema = parts.length >= 2 + ? parts[parts.length - 2] + : Catalog.DEFAULT_NAMESPACE_NAME; + + return new ParsedPath( schema, table, column ); + } + + + private void extendMetaSettings( Optional> adapter, List newData ) { + Map settings = adapter.get().getSettings(); + String json = settings.getOrDefault( "selectedAttributes", "[]" ); + List current = new Gson().fromJson( json, new TypeToken>() { + }.getType() ); + Set merged = new LinkedHashSet<>( current ); + merged.addAll( newData ); + + settings.put( "selectedAttributes", new Gson().toJson( merged ) ); + + + } + + + record ParsedPath( String schema, String table, String column ) { + + } + } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 7b679cb1f7..a752de2540 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -50,6 +50,7 @@ import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; @@ -209,7 +210,7 @@ public void rollback( PolyXid xid ) { @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); java.sql.Statement statement = connectionHandler.getStatement(); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index f375cfe2fb..e16b3df343 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -122,6 +122,7 @@ import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.docker.AutoDocker; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.docker.DockerManager; @@ -985,6 +986,18 @@ void metadataAck( final Context ctx ) { AckPayload payload = ctx.bodyAsClass( AckPayload.class ); log.info( "Acknowledgement incoming: " + payload.toString() ); PublisherManager.getInstance().ack( payload.uniqueName, payload.selectedPaths ); + + Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); + Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); + try { + DdlManager.getInstance().addSelectedMetadata( transaction, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.selectedPaths ) ); + transaction.commit(); + ctx.status( 200 ).result( "ACK processed" ); + } catch ( Exception e ) { + log.error( "metadataAck failed", e ); + ctx.status(200).json(Map.of("message", "ACK was processed")); + + } } From 9adaad708ce31da34bde27f0c303af50fa6ae677 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 21 Jun 2025 19:05:36 +0200 Subject: [PATCH 41/68] Adding/Removing metadata changes works now. --- .../MetadataObserver/AbstractListener.java | 3 +- .../java/org/polypheny/db/ddl/DdlManager.java | 2 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 184 ++++++++++++++++++ .../java/org/polypheny/db/webui/Crud.java | 15 +- 4 files changed, 200 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 7c1b8305b5..2777b898f4 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -80,7 +80,8 @@ public void applyChange( String[] metadata ) { log.info( "Changes are going to be applied" ); this.adapter.setRoot( this.currentNode ); - this.adapter.markSelectedAttributes( Arrays.stream( metadata ).toList() ); + if ( metadata != null && metadata.length > 0 ) + this.adapter.markSelectedAttributes( Arrays.stream( metadata ).toList() ); HashCache.getInstance().put( this.adapter.getUniqueName(), this.hash ); this.currentNode = null; diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 70cb36a461..d9dc813bbb 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -559,6 +559,8 @@ public static DdlManager getInstance() { public abstract void addSelectedMetadata( Transaction tsx, String uniqueName, long namespace, List selectedPaths ); + public abstract void dropSourceEntities( List paths, Statement statement ); + public abstract void dropCollection( LogicalCollection catalogCollection, Statement statement ); public abstract void dropCollectionPlacement( long namespaceId, LogicalCollection collection, List> dataStores, Statement statement ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 78bc27190e..2b410a83e7 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -19,6 +19,7 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; @@ -530,6 +531,189 @@ public void dropAdapter( String name, Statement statement ) { } + @Override + public void dropSourceEntities( List paths, Statement statement ) { + + /* + * 1) Vorverarbeiten: pro Tabelle sammeln wir, was wirklich gekillt werden soll. + * EnthΓ€lt das Set einen Stern (β€ž*β€œ) bedeutet das: ganze Tabelle weg, + * andernfalls nur die aufgezΓ€hlten Spalten. + */ + Map> worklist = new HashMap<>(); + + for ( String rawPath : paths ) { + String path = rawPath.replace( "'", "" ).trim(); + if ( path.isBlank() ) { + continue; + } + + String[] seg = path.replace( '/', '.' ) + .replace( '\\', '.' ) + .split( "\\." ); + if ( seg.length < 2 ) { + throw new GenericRuntimeException( "UngΓΌltiger Pfad: " + path ); + } + + String normalized = path.replace( '/', '.' ).replace( "\\", "." ); + + String columnName = (seg.length >= 4) ? seg[seg.length - 1] : null; + String tableName = seg[seg.length - (columnName == null ? 1 : 2)]; + String schemaName = seg[seg.length - (columnName == null ? 2 : 3)]; + + /* --- Namespace & Tabelle ermitteln --------------------------------- */ + LogicalNamespace ns = catalog.getSnapshot() + .getNamespace( schemaName ) + .orElseThrow( () -> new GenericRuntimeException( + "Schema-Pfad nicht gefunden: " + schemaName ) ); + + LogicalTable table = catalog.getSnapshot() + .rel() + .getTable( ns.id, tableName ) + .orElseThrow( () -> new GenericRuntimeException( + "Tabelle nicht gefunden: " + tableName + " im Schema " + schemaName ) ); + + /* Nur SOURCE-Tabellen dΓΌrfen wir hier anfassen */ + if ( table.entityType != EntityType.SOURCE ) { + throw new GenericRuntimeException( "Tabelle " + path + " ist kein SOURCE-Objekt." ); + } + + /* Arbeits-Set befΓΌllen */ + worklist.computeIfAbsent( table, t -> new HashSet<>() ); + worklist.get( table ).add( columnName == null ? "*" : columnName ); + } + + /* + * 2) Jetzt wirklich lΓΆschen. + * – Ist β€ž*β€œ im Set β‡’ komplette Tabelle droppen + * – sonst jede angegebene Spalte droppen + */ + for ( Map.Entry> entry : worklist.entrySet() ) { + LogicalTable table = entry.getKey(); + Set cols = entry.getValue(); + + for ( String col : cols ) { + dropSourceColumn( table, col, statement ); + } + + boolean tableHasColumnsLeft = + catalog.getLogicalRel( table.namespaceId ) + .getColumns() // Map + .values().stream() // β†’ Stream + .anyMatch( c -> c.tableId == table.id ); + + if ( cols.contains( "*" ) || !tableHasColumnsLeft ) + dropWholeSourceTable( table, statement ); + } + + + /* 4) PrΓΌfen, ob nach dem LΓΆschen noch Spalten ΓΌbrig sind */ + + + + /* Einmaliges Cache-Reset reicht. */ + statement.getQueryProcessor().resetCaches(); + } + + /* -------------------------------------------------------------------------- */ + /* Hilfsmethoden */ + /* -------------------------------------------------------------------------- */ + + + private void dropWholeSourceTable( LogicalTable table, Statement statement ) { + + /* ------------------------------------------------------------------ + * 0) Placement & Partition (READ) – brauchen wir fΓΌr deleteAllocation + * ------------------------------------------------------------------ */ + List placements = + catalog.getSnapshot().alloc().getPlacementsFromLogical( table.id ); + + if ( placements.size() != 1 ) { + throw new GenericRuntimeException( + "SOURCE-Tabelle " + table.name + " hat mehr als ein Placement." ); + } + AllocationPlacement placement = placements.get( 0 ); + + /* ------------------------------------------------------------------ + * 1) Spalten & FremdschlΓΌssel zum LΓΆschen heraussuchen (READ) + * ------------------------------------------------------------------ */ + List columns = + catalog.getSnapshot().rel().getColumns( table.id ); + List fks = + catalog.getSnapshot().rel().getForeignKeys( table.id ); + + /* ------------------------------------------------------------------ + * 2) FremdschlΓΌssel lΓΆschen (WRITE) + * ------------------------------------------------------------------ */ + for ( LogicalForeignKey fk : fks ) { + catalog.getLogicalRel( table.namespaceId ) + .deleteForeignKey( fk.id ); + } + + /* ------------------------------------------------------------------ + * 3) Column-Placements + Spalten lΓΆschen (WRITE) + * ------------------------------------------------------------------ */ + for ( LogicalColumn col : columns ) { + + // alle Placements der Spalte entfernen + for ( AllocationPlacement p : + catalog.getSnapshot().alloc().getPlacementsOfColumn( col.id ) ) { + catalog.getAllocRel( table.namespaceId ) + .deleteColumn( p.id, col.id ); + } + + // Spalte im logischen Modell entfernen + catalog.getLogicalRel( table.namespaceId ) + .deleteColumn( col.id ); + } + + /* ------------------------------------------------------------------ + * 4) Allocation des gesamten Tables lΓΆschen (WRITE) + * ------------------------------------------------------------------ */ + catalog.getAllocRel( table.namespaceId ) + .deleteAllocation( placement.id ); + + /* ------------------------------------------------------------------ + * 5) PrimΓ€rschlΓΌssel + Tabelle lΓΆschen (WRITE) + * ------------------------------------------------------------------ */ + catalog.getLogicalRel( table.namespaceId ) + .deletePrimaryKey( table.id ); + + catalog.getLogicalRel( table.namespaceId ) + .deleteTable( table.id ); + } + + + + private void dropSourceColumn( LogicalTable table, String columnName, Statement statement ) { + + LogicalColumn column = catalog.getSnapshot() + .rel() + .getColumn( table.id, columnName ) + .orElseThrow( () -> new GenericRuntimeException( + "Spalte " + columnName + " existiert nicht in Tabelle " + table.name ) ); + + /* 1) FKs, die diese Spalte benutzen, entfernen */ + for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { + boolean usesColumn = fk.getFieldIds().contains( column.id ) + || fk.getReferencedKeyFieldIds() + .contains( column.id ); + + if ( usesColumn ) { + catalog.getLogicalRel( table.namespaceId ) + .deleteForeignKey( fk.id ); + } + } + + /* 2) Alle Placements der Spalte entfernen */ + for ( AllocationPlacement placement : catalog.getSnapshot().alloc().getPlacementsOfColumn( column.id ) ) { + catalog.getAllocRel( table.namespaceId ).deleteColumn( placement.id, column.id ); + } + + /* 3) Spalte im Logischen Modell lΓΆschen */ + catalog.getLogicalRel( table.namespaceId ).deleteColumn( column.id ); + } + + @Override public void renameNamespace( String newName, String currentName ) { newName = newName.toLowerCase(); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index e16b3df343..e41fb6484e 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -985,18 +985,27 @@ void metadataChange( final Context ctx ) { void metadataAck( final Context ctx ) { AckPayload payload = ctx.bodyAsClass( AckPayload.class ); log.info( "Acknowledgement incoming: " + payload.toString() ); - PublisherManager.getInstance().ack( payload.uniqueName, payload.selectedPaths ); + PublisherManager.getInstance().ack( payload.uniqueName, payload.addedPaths ); Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); try { - DdlManager.getInstance().addSelectedMetadata( transaction, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.selectedPaths ) ); + if ( payload.addedPaths != null ) + DdlManager.getInstance().addSelectedMetadata( transaction, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); + + if ( payload.removedPaths != null ) { + Statement stmt = transaction.createStatement(); + DdlManager.getInstance().dropSourceEntities( List.of(payload.removedPaths), stmt ); + stmt = null; + } transaction.commit(); ctx.status( 200 ).result( "ACK processed" ); } catch ( Exception e ) { log.error( "metadataAck failed", e ); ctx.status(200).json(Map.of("message", "ACK was processed")); + } finally { + transaction = null; } } @@ -3246,7 +3255,7 @@ public void propertyChange( PropertyChangeEvent evt ) { } - public record AckPayload( @JsonProperty String uniqueName, @JsonProperty String[] selectedPaths ) { + public record AckPayload( @JsonProperty String uniqueName, @JsonProperty String[] addedPaths, @JsonProperty String[] removedPaths ) { } From 756f91551fbdaefe0b3961f57b08606ad76e1afe Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 21 Jun 2025 21:47:41 +0200 Subject: [PATCH 42/68] Add more metadata from nodes which are send to UI. --- .../db/schemaDiscovery/NodeSerializer.java | 3 +++ .../db/schemaDiscovery/NodeUtil.java | 21 +++++++++++++++++++ .../java/org/polypheny/db/webui/Crud.java | 8 +++++-- .../org/polypheny/db/webui/HttpServer.java | 2 ++ 4 files changed, 32 insertions(+), 2 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java index 15a1ab6fe6..666b87c787 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java @@ -29,6 +29,9 @@ public static ObjectNode serializeNode(AbstractNode node) { json.put( "type", node.getType() ); json.put( "name", node.getName() ); + if ( node instanceof AttributeNode attr ) + json.put( "isSelected", attr.isSelected() ); + ObjectNode props = objectMapper.createObjectNode(); node.getProperties().forEach((key, value) -> { props.putPOJO(key, value); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java new file mode 100644 index 0000000000..744f318630 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java @@ -0,0 +1,21 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +public class NodeUtil { + +} diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index e41fb6484e..90e9e7f0a8 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1023,8 +1023,8 @@ void getMetaConfiguration( final Context ctx ) { void setMetaConfiguration( final Context ctx ) { - AckPayload config = ctx.bodyAsClass( AckPayload.class ); - + ConfigPayload config = ctx.bodyAsClass( ConfigPayload.class ); + log.error( config.toString() ); } @@ -3259,4 +3259,8 @@ public record AckPayload( @JsonProperty String uniqueName, @JsonProperty String[ } + public record ConfigPayload ( @JsonProperty String uniqueName, @JsonProperty String[] selected ) { + + } + } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index c83fcb269d..64eda214d4 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -363,6 +363,8 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/metadataConfiguration/{uniqueName}", crud::getMetaConfiguration ); + webuiServer.post( "/setMetaConfig", crud::setMetaConfiguration ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); From 485cb3152ba0a85804b5da5f5a2ead101960a489 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 23 Jun 2025 12:08:31 +0200 Subject: [PATCH 43/68] Bugfix in removing tables: Tables without any columns are deleted automatically --- .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../db/schemaDiscovery/NodeUtil.java | 95 ++- .../org/polypheny/db/ddl/DdlManagerImpl.java | 565 ++++++++---------- .../java/org/polypheny/db/webui/Crud.java | 101 +++- 4 files changed, 419 insertions(+), 344 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index d9dc813bbb..3407b98c0d 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -557,7 +557,7 @@ public static DdlManager getInstance() { public abstract void dropGraphPlacement( long graphId, DataStore dataStores, Statement statement ); - public abstract void addSelectedMetadata( Transaction tsx, String uniqueName, long namespace, List selectedPaths ); + public abstract void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ); public abstract void dropSourceEntities( List paths, Statement statement ); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java index 744f318630..aa8f3b4995 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java @@ -16,6 +16,99 @@ package org.polypheny.db.schemaDiscovery; -public class NodeUtil { +import lombok.extern.slf4j.Slf4j; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +@Slf4j +public final class NodeUtil { + + private NodeUtil() { + } + + + public static Set collectSelecedAttributePaths( AbstractNode root ) { + Set selected = new HashSet<>(); + if ( root == null ) { + return selected; + } + Deque path = new ArrayDeque<>(); + traverse( root, path, selected ); + return selected; + } + + + private static void traverse( AbstractNode node, Deque path, Set acc ) { + path.addLast( node.getName() ); + if ( node instanceof AttributeNode attr && attr.isSelected() ) { + log.debug( ">> visiting {}", String.join( "/", path ) + " selected=" + attr.isSelected() ); + acc.add( String.join( ".", path ) ); + } + + for ( AbstractNode child : node.getChildren() ) { + traverse( child, path, acc ); + } + + path.removeLast(); + } + + + public static void unmarkSelectedAttributes( AbstractNode metadataRoot, List pathsToUnmark ) { + + List> attributePaths = new ArrayList<>(); + + for ( String path : pathsToUnmark ) { + String cleanPath = path.replaceFirst( "^.*/", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( false ); + log.info( "βœ”οΈ Attribut demarkiert: {}", String.join( ".", pathSegments ) ); + } else { + log.warn( "✘ Attribut nicht gefunden: {}", String.join( ".", pathSegments ) ); + } + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + log.warn( "✘ Segment nicht gefunden: {} in Pfad {}", + segment, String.join( ".", pathSegments ) ); + break; + } + } + } + } + } + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 2b410a83e7..bdd5b2dde2 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -34,6 +34,7 @@ import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; +import com.google.common.collect.Streams; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import lombok.extern.slf4j.Slf4j; @@ -61,6 +62,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.catalogs.RelAdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.entity.LogicalConstraint; @@ -87,6 +89,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalView; +import org.polypheny.db.catalog.entity.physical.PhysicalColumn; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; @@ -219,6 +222,241 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap } + @Override + public void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ) { + List selectedAttributeNames = new ArrayList<>(); + selectedAttributeNames = selectedPaths.stream().map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ).collect( Collectors.toList() ); + + Optional> adapter = AdapterManager.getInstance().getSource( uniqueName ); + RelAdapterCatalog rel1 = (RelAdapterCatalog) adapter.get().getCatalog(); + + Map> exportedColumns; + try { + exportedColumns = adapter.get().getExportedColumns(); + } catch ( Exception e ) { + throw new GenericRuntimeException( "Somethign went wrong while getting data source", e ); + } + + for ( Map.Entry> entry : exportedColumns.entrySet() ) { + String tableName = entry.getKey(); + LogicalTable logical = null; + + boolean isNewTable = !catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent(); + if ( isNewTable ) { + logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); + } else { + logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElseThrow(); + } + + List columns = new ArrayList<>(); + + Pair partitionProperty = createSinglePartition( logical.namespaceId, logical ); + + Optional existingPlacements = catalog.getSnapshot().alloc().getPlacement( adapter.get().adapterId, logical.id ); + AllocationPlacement placement = existingPlacements.isEmpty() ? catalog.getAllocRel( namespace ).addPlacement( logical.id, namespace, adapter.get().adapterId ) : existingPlacements.get(); + + Optional existingAlloc = catalog.getSnapshot().alloc().getFromLogical( logical.id ).stream().filter( a -> a.adapterId == adapter.get().adapterId ).findFirst(); + long tempId = logical.id; + AllocationEntity allocation = existingAlloc.orElseGet( () -> catalog.getAllocRel( namespace ).addAllocation( adapter.get().getAdapterId(), placement.id, partitionProperty.left.id, tempId ) ); + + List aColumns = new ArrayList<>(); + + String physicalSchema = entry.getValue().isEmpty() ? Catalog.DEFAULT_NAMESPACE_NAME : entry.getValue().get( 0 ).physicalSchemaName; + + for ( ExportedColumn exportedColumn : entry.getValue() ) { + if ( selectedAttributeNames.stream().noneMatch( name -> name.equalsIgnoreCase( exportedColumn.name ) ) ) { + continue; + } + if ( !catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name ).isEmpty() ) { + continue; + } + LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( + exportedColumn.name, + logical.id, + exportedColumn.physicalPosition, + exportedColumn.type, + exportedColumn.collectionsType, + exportedColumn.length, + exportedColumn.scale, + exportedColumn.dimension, + exportedColumn.cardinality, + exportedColumn.nullable, + Collation.getDefaultCollation() ); + + AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( + placement.id, + logical.id, + column.id, + adapter.get().adapterId, + PlacementType.STATIC, + exportedColumn.physicalPosition ); + + columns.add( column ); + aColumns.add( allocationColumn ); + + } + + if ( isNewTable ) { + buildNamespace( Catalog.defaultNamespaceId, logical, adapter.get() ); + } + + if ( isNewTable || !columns.isEmpty() ) { + LogicalTable t = logical; + List lCols = columns; + List aCols = aColumns; + + tsx.attachCommitAction( () -> + adapter.get().createTable( + null, + LogicalTableWrapper.of( t, + lCols, + List.of(), + physicalSchema, + t.name ), + AllocationTableWrapper.of( + allocation.unwrapOrThrow( AllocationTable.class ), + aCols ) ) ); + } + + catalog.updateSnapshot(); + + } + catalog.updateSnapshot(); + tsx.commit(); + statement.close(); + + } + + + @Override + public void dropSourceEntities( List paths, Statement statement ) { + Map> worklist = new HashMap<>(); + + for ( String raw : paths ) { + String path = raw.replace( "'", "" ).trim(); + if ( path.isBlank() ) { + continue; + } + + String[] seg = path.split( "\\." ); + if ( seg.length < 2 ) { + throw new GenericRuntimeException( "UngΓΌltiger Pfad: " + path ); + } + + String columnName = (seg.length >= 3) ? seg[seg.length - 1] : "*"; + String tableName = seg[seg.length - 2]; + + String schemaPath = String.join( ".", + Arrays.copyOf( seg, seg.length - (columnName.equals( "*" ) ? 1 : 2) ) ); + + LogicalNamespace ns = catalog.getSnapshot() + .getNamespace( Catalog.DEFAULT_NAMESPACE_NAME ) + .orElseThrow( () -> new GenericRuntimeException( + "Logisches Namespace 'public' nicht gefunden." ) ); + + LogicalTable table = catalog.getSnapshot().rel() + .getTable( ns.id, tableName ) + .orElseThrow( () -> new GenericRuntimeException( + "Tabelle nicht gefunden: " + schemaPath + "." + tableName ) ); + + if ( table.entityType != EntityType.SOURCE ) { + throw new GenericRuntimeException( "Tabelle " + table.name + + " ist kein SOURCE-Objekt." ); + } + + worklist.computeIfAbsent( table, t -> new HashSet<>() ) + .add( columnName ); + } + + for ( Map.Entry> entry : worklist.entrySet() ) { + LogicalTable table = entry.getKey(); + Set toDrop = entry.getValue(); + + if ( toDrop.contains( "*" ) ) { + dropWholeSourceTable( table, statement ); + continue; + } + + for ( String col : toDrop ) { + dropSourceColumn( table, col, statement ); + catalog.updateSnapshot(); + } + + if ( catalog.getSnapshot().rel().getColumns( table.id ).isEmpty() ) { + dropWholeSourceTable( table, statement ); + catalog.updateSnapshot(); + } + } + + statement.getQueryProcessor().resetCaches(); + } + + + private void dropWholeSourceTable( LogicalTable table, Statement statement ) { + + List allocs = catalog.getSnapshot().alloc().getFromLogical( table.id ); + + if ( allocs.size() != 1 ) { + throw new GenericRuntimeException( "SOURCE-Tabelle " + table.name + + " hat mehr als ein Placement." ); + } + + AllocationTable placement = allocs.get( 0 ) + .unwrapOrThrow( AllocationTable.class ); + + for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { + catalog.getLogicalRel( table.namespaceId ).deleteForeignKey( fk.id ); + } + + for ( AllocationColumn c : placement.getColumns() ) { + catalog.getAllocRel( table.namespaceId ).deleteColumn( placement.id, c.columnId ); + } + + catalog.getAllocRel( table.namespaceId ).deleteAllocation( placement.id ); + + catalog.getLogicalRel( table.namespaceId ).deletePrimaryKey( table.id ); + for ( LogicalColumn c : catalog.getSnapshot().rel().getColumns( table.id ) ) { + catalog.getLogicalRel( table.namespaceId ).deleteColumn( c.id ); + } + catalog.getLogicalRel( table.namespaceId ).deleteTable( table.id ); + } + + + private void dropSourceColumn( LogicalTable table, String columnName, Statement statement ) { + + LogicalColumn column = catalog.getSnapshot() + .rel() + .getColumn( table.id, columnName ) + .orElse( null ); + if ( column == null ) { + log.info( "Spalte {}.{} bereits weg β†’ nichts zu tun.", table.name, columnName ); + return; + } + + // 1) FKs weg + for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { + if ( fk.getFieldIds().contains( column.id ) ) { + catalog.getLogicalRel( table.namespaceId ).deleteForeignKey( fk.id ); + } + } + + for ( AllocationEntity alloc : catalog.getSnapshot().alloc().getFromLogical( table.id ) ) { + + AllocationTable at = alloc.unwrapOrThrow( AllocationTable.class ); + + for ( AllocationColumn p : at.getColumns() ) { + if ( p.columnId == column.id ) { + catalog.getAllocRel( table.namespaceId ).deleteColumn( alloc.id, column.id ); + } + + catalog.getLogicalRel( table.namespaceId ).deleteColumn( column.id ); + + statement.getQueryProcessor().resetCaches(); + } + } + } + + @Override public void createSource( Transaction transaction, String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map config, DeployMode mode ) { uniqueName = uniqueName.toLowerCase(); @@ -247,7 +485,7 @@ public void createSource( Transaction transaction, String uniqueName, String ada HashCache.getInstance().put( uniqueName, hash ); log.info( "Key used during deployment: {} ", uniqueName ); - pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + // pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); @@ -365,111 +603,6 @@ public void createSource( Transaction transaction, String uniqueName, String ada } - public void addSelectedMetadata( Transaction tsx, String uniqueName, long namespace, List selectedPaths ) { - Optional> adapter = AdapterManager.getInstance().getSource( uniqueName ); - extendMetaSettings( adapter, selectedPaths ); - if ( adapter == null ) { - throw new GenericRuntimeException( "No adapter found for unique name " + uniqueName ); - } - Map> exported = adapter.get().getExportedColumns(); - Map> additions = - selectedPaths.stream() - .map( DdlManagerImpl::splitPath ) - .collect( Collectors.groupingBy( - ParsedPath::table, - Collectors.mapping( ParsedPath::column, - Collectors.toList() ) ) ); - - for ( Map.Entry> entry : additions.entrySet() ) { - String tableName = entry.getKey(); - String physicalTable = entry.getKey(); - List newCols = entry.getValue(); - - Optional logicalOpt = catalog.getSnapshot().rel().getTable( namespace, physicalTable ); - LogicalTable logical; - AllocationPlacement placement; - List logicalCols = new ArrayList<>(); - List allocCols = new ArrayList<>(); - AllocationEntity allocation; - - if ( logicalOpt.isEmpty() ) { - logical = catalog.getLogicalRel( namespace ) - .addTable( tableName, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); - Pair part = - createSinglePartition( logical.namespaceId, logical ); - placement = catalog.getAllocRel( namespace ) - .addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); - allocation = catalog.getAllocRel( namespace ) - .addAllocation( adapter.get().getAdapterId(), - placement.id, - part.left.id, - logical.id ); - catalog.getAllocRel( namespace ) - .addAllocation( adapter.get().getAdapterId(), placement.id, part.left.id, logical.id ); - } else { - allocation = null; - logical = logicalOpt.get(); - Optional placementOpt = - catalog.getSnapshot().alloc() - .getPlacement( adapter.get().getAdapterId(), logical.id ); - if ( placementOpt.isPresent() ) { - placement = placementOpt.get(); - } else { - placement = catalog.getAllocRel( namespace ) - .addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); - - catalog.getAllocRel( namespace ) - .addAllocation( adapter.get().getAdapterId(), - placement.id, - createSinglePartition( logical.namespaceId, logical ).left.id, - logical.id ); - } - - } - List exportedCols = exported.getOrDefault( physicalTable, List.of() ); - - String physicalSchema = - exportedCols.isEmpty() - ? Catalog.DEFAULT_NAMESPACE_NAME - : exportedCols.get( 0 ).physicalSchemaName; - for ( ExportedColumn ec : exportedCols ) { - if ( !newCols.contains( ec.name ) ) { - continue; - } - boolean exists = catalog.getSnapshot().rel() - .getColumn( logical.id, ec.name ).isPresent(); - if ( exists ) { - continue; - } - - LogicalColumn lc = catalog.getLogicalRel( namespace ).addColumn( - ec.name, logical.id, ec.physicalPosition, ec.type, - ec.collectionsType, ec.length, ec.scale, ec.dimension, - ec.cardinality, ec.nullable, Collation.getDefaultCollation() ); - AllocationColumn ac = catalog.getAllocRel( namespace ).addColumn( - placement.id, logical.id, lc.id, - adapter.get().getAdapterId(), PlacementType.STATIC, ec.physicalPosition ); - - logicalCols.add( lc ); - allocCols.add( ac ); - } - if ( !logicalCols.isEmpty() ) { - buildNamespace( Catalog.defaultNamespaceId, logical, adapter.get() ); - if ( logicalOpt.isEmpty() ) { - tsx.attachCommitAction( () -> - adapter.get().createTable( null, - LogicalTableWrapper.of( logical, logicalCols, List.of(), - physicalSchema, tableName ), - AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), - allocCols ) ) ); - } - catalog.updateSnapshot(); - } - } - - } - - @Override public void dropAdapter( String name, Statement statement ) { name = name.replace( "'", "" ); @@ -527,190 +660,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); - PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); - } - - - @Override - public void dropSourceEntities( List paths, Statement statement ) { - - /* - * 1) Vorverarbeiten: pro Tabelle sammeln wir, was wirklich gekillt werden soll. - * EnthΓ€lt das Set einen Stern (β€ž*β€œ) bedeutet das: ganze Tabelle weg, - * andernfalls nur die aufgezΓ€hlten Spalten. - */ - Map> worklist = new HashMap<>(); - - for ( String rawPath : paths ) { - String path = rawPath.replace( "'", "" ).trim(); - if ( path.isBlank() ) { - continue; - } - - String[] seg = path.replace( '/', '.' ) - .replace( '\\', '.' ) - .split( "\\." ); - if ( seg.length < 2 ) { - throw new GenericRuntimeException( "UngΓΌltiger Pfad: " + path ); - } - - String normalized = path.replace( '/', '.' ).replace( "\\", "." ); - - String columnName = (seg.length >= 4) ? seg[seg.length - 1] : null; - String tableName = seg[seg.length - (columnName == null ? 1 : 2)]; - String schemaName = seg[seg.length - (columnName == null ? 2 : 3)]; - - /* --- Namespace & Tabelle ermitteln --------------------------------- */ - LogicalNamespace ns = catalog.getSnapshot() - .getNamespace( schemaName ) - .orElseThrow( () -> new GenericRuntimeException( - "Schema-Pfad nicht gefunden: " + schemaName ) ); - - LogicalTable table = catalog.getSnapshot() - .rel() - .getTable( ns.id, tableName ) - .orElseThrow( () -> new GenericRuntimeException( - "Tabelle nicht gefunden: " + tableName + " im Schema " + schemaName ) ); - - /* Nur SOURCE-Tabellen dΓΌrfen wir hier anfassen */ - if ( table.entityType != EntityType.SOURCE ) { - throw new GenericRuntimeException( "Tabelle " + path + " ist kein SOURCE-Objekt." ); - } - - /* Arbeits-Set befΓΌllen */ - worklist.computeIfAbsent( table, t -> new HashSet<>() ); - worklist.get( table ).add( columnName == null ? "*" : columnName ); - } - - /* - * 2) Jetzt wirklich lΓΆschen. - * – Ist β€ž*β€œ im Set β‡’ komplette Tabelle droppen - * – sonst jede angegebene Spalte droppen - */ - for ( Map.Entry> entry : worklist.entrySet() ) { - LogicalTable table = entry.getKey(); - Set cols = entry.getValue(); - - for ( String col : cols ) { - dropSourceColumn( table, col, statement ); - } - - boolean tableHasColumnsLeft = - catalog.getLogicalRel( table.namespaceId ) - .getColumns() // Map - .values().stream() // β†’ Stream - .anyMatch( c -> c.tableId == table.id ); - - if ( cols.contains( "*" ) || !tableHasColumnsLeft ) - dropWholeSourceTable( table, statement ); - } - - - /* 4) PrΓΌfen, ob nach dem LΓΆschen noch Spalten ΓΌbrig sind */ - - - - /* Einmaliges Cache-Reset reicht. */ - statement.getQueryProcessor().resetCaches(); - } - - /* -------------------------------------------------------------------------- */ - /* Hilfsmethoden */ - /* -------------------------------------------------------------------------- */ - - - private void dropWholeSourceTable( LogicalTable table, Statement statement ) { - - /* ------------------------------------------------------------------ - * 0) Placement & Partition (READ) – brauchen wir fΓΌr deleteAllocation - * ------------------------------------------------------------------ */ - List placements = - catalog.getSnapshot().alloc().getPlacementsFromLogical( table.id ); - - if ( placements.size() != 1 ) { - throw new GenericRuntimeException( - "SOURCE-Tabelle " + table.name + " hat mehr als ein Placement." ); - } - AllocationPlacement placement = placements.get( 0 ); - - /* ------------------------------------------------------------------ - * 1) Spalten & FremdschlΓΌssel zum LΓΆschen heraussuchen (READ) - * ------------------------------------------------------------------ */ - List columns = - catalog.getSnapshot().rel().getColumns( table.id ); - List fks = - catalog.getSnapshot().rel().getForeignKeys( table.id ); - - /* ------------------------------------------------------------------ - * 2) FremdschlΓΌssel lΓΆschen (WRITE) - * ------------------------------------------------------------------ */ - for ( LogicalForeignKey fk : fks ) { - catalog.getLogicalRel( table.namespaceId ) - .deleteForeignKey( fk.id ); - } - - /* ------------------------------------------------------------------ - * 3) Column-Placements + Spalten lΓΆschen (WRITE) - * ------------------------------------------------------------------ */ - for ( LogicalColumn col : columns ) { - - // alle Placements der Spalte entfernen - for ( AllocationPlacement p : - catalog.getSnapshot().alloc().getPlacementsOfColumn( col.id ) ) { - catalog.getAllocRel( table.namespaceId ) - .deleteColumn( p.id, col.id ); - } - - // Spalte im logischen Modell entfernen - catalog.getLogicalRel( table.namespaceId ) - .deleteColumn( col.id ); - } - - /* ------------------------------------------------------------------ - * 4) Allocation des gesamten Tables lΓΆschen (WRITE) - * ------------------------------------------------------------------ */ - catalog.getAllocRel( table.namespaceId ) - .deleteAllocation( placement.id ); - - /* ------------------------------------------------------------------ - * 5) PrimΓ€rschlΓΌssel + Tabelle lΓΆschen (WRITE) - * ------------------------------------------------------------------ */ - catalog.getLogicalRel( table.namespaceId ) - .deletePrimaryKey( table.id ); - - catalog.getLogicalRel( table.namespaceId ) - .deleteTable( table.id ); - } - - - - private void dropSourceColumn( LogicalTable table, String columnName, Statement statement ) { - - LogicalColumn column = catalog.getSnapshot() - .rel() - .getColumn( table.id, columnName ) - .orElseThrow( () -> new GenericRuntimeException( - "Spalte " + columnName + " existiert nicht in Tabelle " + table.name ) ); - - /* 1) FKs, die diese Spalte benutzen, entfernen */ - for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { - boolean usesColumn = fk.getFieldIds().contains( column.id ) - || fk.getReferencedKeyFieldIds() - .contains( column.id ); - - if ( usesColumn ) { - catalog.getLogicalRel( table.namespaceId ) - .deleteForeignKey( fk.id ); - } - } - - /* 2) Alle Placements der Spalte entfernen */ - for ( AllocationPlacement placement : catalog.getSnapshot().alloc().getPlacementsOfColumn( column.id ) ) { - catalog.getAllocRel( table.namespaceId ).deleteColumn( placement.id, column.id ); - } - - /* 3) Spalte im Logischen Modell lΓΆschen */ - catalog.getLogicalRel( table.namespaceId ).deleteColumn( column.id ); + // PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } @@ -3407,39 +3357,4 @@ public void dropType() { throw new GenericRuntimeException( "Not supported yet" ); } - - public static ParsedPath splitPath( String fullPath ) { - String noType = fullPath.split( ":" )[0]; - int lastDot = noType.lastIndexOf( '.' ); - String column = noType.substring( lastDot + 1 ); - String tableKey = noType.substring( 0, lastDot ); - - String[] parts = tableKey.split( "\\." ); - String table = parts[parts.length - 1]; - String schema = parts.length >= 2 - ? parts[parts.length - 2] - : Catalog.DEFAULT_NAMESPACE_NAME; - - return new ParsedPath( schema, table, column ); - } - - - private void extendMetaSettings( Optional> adapter, List newData ) { - Map settings = adapter.get().getSettings(); - String json = settings.getOrDefault( "selectedAttributes", "[]" ); - List current = new Gson().fromJson( json, new TypeToken>() { - }.getType() ); - Set merged = new LinkedHashSet<>( current ); - merged.addAll( newData ); - - settings.put( "selectedAttributes", new Gson().toJson( merged ) ); - - - } - - - record ParsedPath( String schema, String table, String column ) { - - } - } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 90e9e7f0a8..b621ce662c 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -54,6 +54,7 @@ import java.util.Arrays; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -159,8 +160,10 @@ import org.polypheny.db.processing.ImplementationContext; import org.polypheny.db.processing.ImplementationContext.ExecutedContext; import org.polypheny.db.processing.QueryContext; +import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.NodeSerializer; +import org.polypheny.db.schemaDiscovery.NodeUtil; import org.polypheny.db.security.SecurityManager; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; @@ -193,6 +196,7 @@ import org.polypheny.db.webui.models.PartitionFunctionModel.PartitionFunctionColumn; import org.polypheny.db.webui.models.PathAccessRequest; import org.polypheny.db.webui.models.PlacementFieldsModel; +import org.polypheny.db.webui.models.PlacementFieldsModel.Method; import org.polypheny.db.webui.models.PlacementModel; import org.polypheny.db.webui.models.PlacementModel.RelationalStore; import org.polypheny.db.webui.models.QueryInterfaceModel; @@ -909,7 +913,7 @@ void sendConfirmation( final Context ctx ) { log.info( " πŸ”Ή UniqueName : {}", a.uniqueName ); log.info( "πŸ“¦ Settings:" ); - for ( Map.Entry entry : a.settings.entrySet() ) { + for ( Entry entry : a.settings.entrySet() ) { log.info( " - {}: {}", entry.getKey(), entry.getValue() ); } @@ -940,7 +944,7 @@ void sendConfirmation( final Context ctx ) { Map fileBytes = new HashMap<>(); - for ( Map.Entry e : inputStreams.entrySet() ) { + for ( Entry e : inputStreams.entrySet() ) { try ( InputStream in = e.getValue() ) { byte[] data = IOUtils.toByteArray( in ); fileBytes.put( e.getKey(), data ); @@ -989,23 +993,27 @@ void metadataAck( final Context ctx ) { Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); + Statement stmt = null; try { - if ( payload.addedPaths != null ) - DdlManager.getInstance().addSelectedMetadata( transaction, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); + if ( payload.addedPaths != null ) { + DdlManager.getInstance().addSelectedMetadata( transaction, null, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); + } if ( payload.removedPaths != null ) { - Statement stmt = transaction.createStatement(); - DdlManager.getInstance().dropSourceEntities( List.of(payload.removedPaths), stmt ); + stmt = transaction.createStatement(); + DdlManager.getInstance().dropSourceEntities( List.of( payload.removedPaths ), stmt ); stmt = null; } transaction.commit(); ctx.status( 200 ).result( "ACK processed" ); } catch ( Exception e ) { log.error( "metadataAck failed", e ); - ctx.status(200).json(Map.of("message", "ACK was processed")); + ctx.status( 200 ).json( Map.of( "message", "ACK was processed" ) ); } finally { - transaction = null; + if ( stmt != null ) { + stmt.close(); + } } } @@ -1024,7 +1032,65 @@ void getMetaConfiguration( final Context ctx ) { void setMetaConfiguration( final Context ctx ) { ConfigPayload config = ctx.bodyAsClass( ConfigPayload.class ); + Set userSelection = Set.of( config.selected ); + Set markedPaths; log.error( config.toString() ); + Optional> adapter = AdapterManager.getInstance().getSource( config.uniqueName ); + + if ( adapter.get() instanceof MetadataProvider mp ) { + AbstractNode root = mp.getRoot(); + markedPaths = NodeUtil.collectSelecedAttributePaths( root ); + for ( String p : markedPaths ) { + log.info( "Selected path: " + p ); + } + } else { + ctx.status( 500 ).json( Map.of( "message", "Configuration can not be applied." ) ); + return; + } + + Set toUnselect = new HashSet<>( markedPaths ); + toUnselect.removeAll( userSelection ); + + Set toAdd = new HashSet<>( userSelection ); + toAdd.removeAll( markedPaths ); + + Transaction tx = transactionManager.startTransaction( Catalog.defaultUserId, false, "setMetaConfiguration" + config.uniqueName ); + Statement stmt = null; + try { + if ( !toAdd.isEmpty() ) { + stmt = tx.createStatement(); + DdlManager.getInstance().addSelectedMetadata( tx, stmt, config.uniqueName, Catalog.defaultNamespaceId, List.copyOf( toAdd ) ); + ((MetadataProvider) adapter.get()).markSelectedAttributes( List.copyOf( toAdd ) ); + } + + if ( !toUnselect.isEmpty() ) { + try { + stmt = tx.createStatement(); + DdlManager.getInstance().dropSourceEntities( List.copyOf( toUnselect ), stmt ); + NodeUtil.unmarkSelectedAttributes( ((MetadataProvider) adapter.get()).getRoot(), List.copyOf( toUnselect ) ); + tx.commit(); + stmt.close(); + ctx.json( Map.of( "message", "Configuration applied." ) ); + } catch ( Exception ex ) { + tx.rollback( "Error while dropping source entities" + ex.getMessage() ); + ctx.status( 500 ).json( Map.of( "message", ex.getMessage() ) ); + } finally { + if ( stmt != null ) { + stmt.close(); + } + } + } + + } catch ( Exception ex ) { + tx.rollback( "Changing adapter configuration was not successful !" ); + ctx.status( 500 ).json( Map.of( "message", ex.getMessage() ) ); + } finally { + if ( stmt != null ) { + stmt.close(); + } + } + + } @@ -1578,7 +1644,7 @@ void getConstraints( final Context ctx ) { } temp.get( "" ).add( columnName ); } - for ( Map.Entry> entry : temp.entrySet() ) { + for ( Entry> entry : temp.entrySet() ) { resultList.add( new TableConstraint( entry.getKey(), "PRIMARY KEY", entry.getValue() ) ); } } @@ -1591,7 +1657,7 @@ void getConstraints( final Context ctx ) { temp.put( logicalConstraint.name, new ArrayList<>( logicalConstraint.key.getFieldNames() ) ); } } - for ( Map.Entry> entry : temp.entrySet() ) { + for ( Entry> entry : temp.entrySet() ) { resultList.add( new TableConstraint( entry.getKey(), "UNIQUE", entry.getValue() ) ); } @@ -1888,7 +1954,7 @@ void addDropPlacement( final Context ctx ) { } StringJoiner columnJoiner = new StringJoiner( ",", "(", ")" ); int counter = 0; - if ( placementFields.method() != PlacementFieldsModel.Method.DROP ) { + if ( placementFields.method() != Method.DROP ) { for ( String name : placementFields.fieldNames() ) { columnJoiner.add( "\"" + name + "\"" ); counter++; @@ -2231,7 +2297,7 @@ public void getMetadataAndPreview( final Context ctx ) { log.error( "Row limit: {}", req.limit ); Map allSettings = template.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); - for ( Map.Entry entry : allSettings.entrySet() ) { + for ( Entry entry : allSettings.entrySet() ) { log.error( "Key: {} Value: {}", entry.getKey(), entry.getValue() ); if ( entry instanceof AbstractAdapterSettingDirectory ) { log.error( "Ist ein directory setting." ); @@ -2287,7 +2353,7 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { log.info( "Aliases: " + a.columnAliases.toString() ); log.info( "Settings:" ); - for ( Map.Entry entry : a.settings.entrySet() ) { + for ( Entry entry : a.settings.entrySet() ) { log.info( entry.getKey() + " = " + entry.getValue() ); } @@ -2308,7 +2374,7 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { } AdapterTemplate adapter = AdapterManager.getAdapterTemplate( a.adapterName, a.type ); Map allSettings = adapter.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); - for ( Map.Entry entry : a.settings.entrySet() ) { + for ( Entry entry : a.settings.entrySet() ) { AbstractAdapterSetting set = allSettings.get( entry.getKey() ); if ( set == null ) { continue; @@ -2927,7 +2993,7 @@ public int getPageSize() { private String filterTable( final Map filter ) { StringJoiner joiner = new StringJoiner( " AND ", " WHERE ", "" ); int counter = 0; - for ( Map.Entry entry : filter.entrySet() ) { + for ( Entry entry : filter.entrySet() ) { //special treatment for arrays if ( entry.getValue().startsWith( "[" ) ) { joiner.add( "\"" + entry.getKey() + "\"" + " = ARRAY" + entry.getValue() ); @@ -2953,7 +3019,7 @@ else if ( !entry.getValue().isEmpty() ) { private String sortTable( final Map sorting ) { StringJoiner joiner = new StringJoiner( ",", " ORDER BY ", "" ); int counter = 0; - for ( Map.Entry entry : sorting.entrySet() ) { + for ( Entry entry : sorting.entrySet() ) { if ( entry.getValue().sorting ) { joiner.add( "\"" + entry.getKey() + "\" " + entry.getValue().direction ); counter++; @@ -3259,7 +3325,8 @@ public record AckPayload( @JsonProperty String uniqueName, @JsonProperty String[ } - public record ConfigPayload ( @JsonProperty String uniqueName, @JsonProperty String[] selected ) { + + public record ConfigPayload( @JsonProperty String uniqueName, @JsonProperty String[] selected ) { } From 4439ec92fa2be45f845a2ae9e82e5475ecf36196 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 23 Jun 2025 20:42:23 +0200 Subject: [PATCH 44/68] Differ between warning of metadata change and critical change. --- .../MetadataObserver/AbstractListener.java | 8 +++-- .../MetadataObserver/PublisherManager.java | 21 +++++++++--- .../db/schemaDiscovery/NodeUtil.java | 33 +++++++++++++++++++ .../java/org/polypheny/db/webui/Crud.java | 3 +- 4 files changed, 57 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 2777b898f4..f6b7c87291 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -21,6 +21,7 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaAnnotator; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; @@ -28,6 +29,7 @@ import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.NodeSerializer; +import org.polypheny.db.schemaDiscovery.NodeUtil; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -65,13 +67,13 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { DiffResult result = MetaDiffUtil.diff( adapter.getRoot(), node ); log.info( "Diffresult: {}", result ); + ChangeStatus status = NodeUtil.evaluateStatus( result, adapter.getRoot() ); + AbstractNode annotatedCopy = MetaAnnotator.annotateTree( adapter.getRoot(), node, result ); String json = NodeSerializer.serializeNode( annotatedCopy ).toString(); log.info( "JSON: {}", json ); - PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResult( json, preview ) ); - - + PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResult( json, preview ), status ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index c3eec9b1bf..952f2abb73 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -31,6 +31,7 @@ public class PublisherManager { private final Map publishers = new ConcurrentHashMap<>(); private final Map changeCache = new ConcurrentHashMap<>(); + private final Map statusCache = new ConcurrentHashMap<>(); private static final PublisherManager INSTANCE = new PublisherManager(); @@ -65,18 +66,23 @@ public void onAdapterUndeploy( String uniqueName ) { } - public boolean hasChange( String uniqueName ) { - return changeCache.containsKey( uniqueName ); + public ChangeStatus hasChange( String uniqueName ) { + if ( changeCache.containsKey( uniqueName ) ) { + return statusCache.get( uniqueName ); + } else { + return null; + } } - public void onMetadataChange( String uniqueName, PreviewResult data ) { + public void onMetadataChange( String uniqueName, PreviewResult data, ChangeStatus status ) { changeCache.put( uniqueName, data ); + statusCache.put( uniqueName, status ); } public PreviewResult fetchChange( String uniqueName ) { - return changeCache.get( uniqueName ); + return changeCache.get( uniqueName ) ; } @@ -84,6 +90,13 @@ public void ack( String uniqueName, String[] metadata ) { MetadataPublisher publisher = publishers.get( uniqueName ); publisher.getListener().applyChange( metadata ); changeCache.remove( uniqueName ); + statusCache.remove( uniqueName ); + } + + public enum ChangeStatus { + CRITICAL, + WARNING, + OK } } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java index aa8f3b4995..2ca912dae8 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java @@ -17,6 +17,8 @@ package org.polypheny.db.schemaDiscovery; import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; @@ -30,6 +32,14 @@ @Slf4j public final class NodeUtil { + private static final String NORMALIZED_SEPARATOR = "."; + + private static String normalizePath(String rawPath) { + return rawPath.replace("/", NORMALIZED_SEPARATOR) + .replace("\\", NORMALIZED_SEPARATOR); + } + + private NodeUtil() { } @@ -111,4 +121,27 @@ public static void unmarkSelectedAttributes( AbstractNode metadataRoot, List selected = collectSelecedAttributePaths( oldRoot ); + for ( String path : selected ) log.info( path ); + for (String removedRaw : diff.getRemoved()) { + String removed = normalizePath(removedRaw); + for (String selectedRaw : selected) { + String selectedNorm = normalizePath(selectedRaw); + if (removed.equals(selectedNorm) || + selectedNorm.startsWith(removed + NORMALIZED_SEPARATOR) || + removed.startsWith(selectedNorm + NORMALIZED_SEPARATOR)) { + return ChangeStatus.CRITICAL; + } + } + } + return ChangeStatus.WARNING; + + } + + } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index b621ce662c..935338b25d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -91,6 +91,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; import org.polypheny.db.adapter.MetadataObserver.PublisherManager; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; @@ -973,7 +974,7 @@ void sendConfirmation( final Context ctx ) { void metadataStatus( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); - boolean changed = PublisherManager.getInstance().hasChange( uniqueName ); + ChangeStatus changed = PublisherManager.getInstance().hasChange( uniqueName ); ctx.json( Map.of( "changed", changed ) ); } From 835031f2bb0c9645ad34221a8d602685d39da591 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 24 Jun 2025 14:31:10 +0200 Subject: [PATCH 45/68] Bugfix while adding new configuration for data source. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 134 ++++++++---------- 1 file changed, 59 insertions(+), 75 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index bdd5b2dde2..93278ec47f 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -21,6 +21,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -224,106 +225,87 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap @Override public void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ) { - List selectedAttributeNames = new ArrayList<>(); - selectedAttributeNames = selectedPaths.stream().map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ).collect( Collectors.toList() ); + record PathParts( String physicalNs, String physicalSchema, String table, String column, String original ) { + + } + List parsedPaths = selectedPaths.stream() + .map( p -> { + String[] parts = p.split( "\\." ); + if ( parts.length != 4 ) { + throw new IllegalArgumentException( "Pfad muss 4 Teile haben: ... -> " + p ); + } + return new PathParts( parts[0], parts[1], parts[2], parts[3], p ); + } ).toList(); Optional> adapter = AdapterManager.getInstance().getSource( uniqueName ); - RelAdapterCatalog rel1 = (RelAdapterCatalog) adapter.get().getCatalog(); + + Map settings = new HashMap<>( adapter.get().getSettings() ); + String merged = parsedPaths.stream().map( PathParts::original ).collect( Collectors.joining( "," ) ); + settings.merge( "selectedAttributes", merged, ( oldVal, newVal ) -> oldVal.isBlank() ? newVal : oldVal + "," + newVal ); + adapter.get().updateSettings( settings ); + + Map> wishedColsPerTable = parsedPaths.stream() + .collect( Collectors.groupingBy( PathParts::table, Collectors.mapping( PathParts::column, Collectors.toSet() ) ) ); Map> exportedColumns; try { exportedColumns = adapter.get().getExportedColumns(); } catch ( Exception e ) { - throw new GenericRuntimeException( "Somethign went wrong while getting data source", e ); + throw new GenericRuntimeException( "Something went wrong when trying to get exported columns", e ); } - for ( Map.Entry> entry : exportedColumns.entrySet() ) { + for ( Map.Entry> entry : wishedColsPerTable.entrySet() ) { String tableName = entry.getKey(); - LogicalTable logical = null; + Set wishedColumnNames = entry.getValue(); + List exportedColumnList = exportedColumns.getOrDefault( tableName, List.of() ); + + LogicalTable logical; + AllocationPlacement placement; + AllocationEntity allocation; - boolean isNewTable = !catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent(); - if ( isNewTable ) { + if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isEmpty() ) { logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); + Pair pp = createSinglePartition( namespace, logical ); + placement = catalog.getAllocRel( namespace ).addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); + allocation = catalog.getAllocRel( namespace ).addAllocation( adapter.get().getAdapterId(), placement.id, pp.left.id, logical.id ); + catalog.updateSnapshot(); + } else { logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElseThrow(); + placement = catalog.getSnapshot().alloc().getPlacement( adapter.get().getAdapterId(), logical.id ).orElseThrow(); + allocation = catalog.getSnapshot().alloc().getFromLogical( logical.id ).stream().filter( a -> a.adapterId == adapter.get().getAdapterId() ).findFirst().orElseThrow(); } - List columns = new ArrayList<>(); - - Pair partitionProperty = createSinglePartition( logical.namespaceId, logical ); - - Optional existingPlacements = catalog.getSnapshot().alloc().getPlacement( adapter.get().adapterId, logical.id ); - AllocationPlacement placement = existingPlacements.isEmpty() ? catalog.getAllocRel( namespace ).addPlacement( logical.id, namespace, adapter.get().adapterId ) : existingPlacements.get(); - - Optional existingAlloc = catalog.getSnapshot().alloc().getFromLogical( logical.id ).stream().filter( a -> a.adapterId == adapter.get().adapterId ).findFirst(); - long tempId = logical.id; - AllocationEntity allocation = existingAlloc.orElseGet( () -> catalog.getAllocRel( namespace ).addAllocation( adapter.get().getAdapterId(), placement.id, partitionProperty.left.id, tempId ) ); - - List aColumns = new ArrayList<>(); - - String physicalSchema = entry.getValue().isEmpty() ? Catalog.DEFAULT_NAMESPACE_NAME : entry.getValue().get( 0 ).physicalSchemaName; - - for ( ExportedColumn exportedColumn : entry.getValue() ) { - if ( selectedAttributeNames.stream().noneMatch( name -> name.equalsIgnoreCase( exportedColumn.name ) ) ) { + for ( ExportedColumn exportedColumn : exportedColumnList ) { + if ( !wishedColumnNames.contains( exportedColumn.name ) || catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name ).isPresent() ) { continue; } - if ( !catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name ).isEmpty() ) { - continue; - } - LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( - exportedColumn.name, - logical.id, - exportedColumn.physicalPosition, - exportedColumn.type, - exportedColumn.collectionsType, - exportedColumn.length, - exportedColumn.scale, - exportedColumn.dimension, - exportedColumn.cardinality, - exportedColumn.nullable, - Collation.getDefaultCollation() ); - - AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( - placement.id, - logical.id, - column.id, - adapter.get().adapterId, - PlacementType.STATIC, - exportedColumn.physicalPosition ); - - columns.add( column ); - aColumns.add( allocationColumn ); - + addColumnToSourceTable( logical, exportedColumn.physicalColumnName, exportedColumn.name, null, null, null, statement ); } - if ( isNewTable ) { - buildNamespace( Catalog.defaultNamespaceId, logical, adapter.get() ); - } + List columns = catalog.getSnapshot().rel().getColumns( logical.id ); + List aCols = catalog.getSnapshot().alloc().getColumns( allocation.id ).stream().toList(); - if ( isNewTable || !columns.isEmpty() ) { - LogicalTable t = logical; - List lCols = columns; - List aCols = aColumns; - - tsx.attachCommitAction( () -> - adapter.get().createTable( - null, - LogicalTableWrapper.of( t, - lCols, - List.of(), - physicalSchema, - t.name ), - AllocationTableWrapper.of( - allocation.unwrapOrThrow( AllocationTable.class ), - aCols ) ) ); - } + adapter.get().createTable( + /* context */ null, + /* logical */ LogicalTableWrapper.of( + logical, + columns, + List.of(), // Keys / Constraints + "public", + tableName ), + /* allocation*/ AllocationTableWrapper.of( + allocation.unwrapOrThrow( AllocationTable.class ), + aCols ) ); catalog.updateSnapshot(); + } catalog.updateSnapshot(); + statement.getQueryProcessor().resetCaches(); tsx.commit(); - statement.close(); + } @@ -679,7 +661,7 @@ public void renameNamespace( String newName, String currentName ) { @Override public void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement ) { - if ( catalog.getSnapshot().rel().getColumn( table.id, columnLogicalName ).isEmpty() ) { + if ( !catalog.getSnapshot().rel().getColumn( table.id, columnLogicalName ).isEmpty() ) { throw new GenericRuntimeException( "There exist already a column with name %s on table %s", columnLogicalName, table.name ); } @@ -737,6 +719,7 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam // Add default value addDefaultValue( table.namespaceId, defaultValue, addedColumn ); + int physPos = exportedColumn.physicalPosition; // Add column placement catalog.getAllocRel( table.namespaceId ).addColumn( allocation.partitionId, @@ -744,12 +727,13 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam addedColumn.id, dataSource.adapterId, PlacementType.STATIC, - catalog.getSnapshot().alloc().getColumns( allocation.id ).size() );//Not a valid partitionID --> placeholder + physPos );//Not a valid partitionID --> placeholder // Set column position // catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, addedColumn.id, exportedColumn.physicalPosition ); // Reset plan cache implementation cache & routing cache + catalog.updateSnapshot(); statement.getQueryProcessor().resetCaches(); } From ea6526dcda8068b3d6d3457f22dd7e04d613a33a Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 24 Jun 2025 21:43:47 +0200 Subject: [PATCH 46/68] Bugfix: remove unnecessary metadata when reconfigure. --- .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../java/org/polypheny/db/ddl/DdlManagerImpl.java | 15 ++++++++++++++- .../main/java/org/polypheny/db/webui/Crud.java | 4 ++-- 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 3407b98c0d..40ae08c680 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -559,7 +559,7 @@ public static DdlManager getInstance() { public abstract void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ); - public abstract void dropSourceEntities( List paths, Statement statement ); + public abstract void dropSourceEntities( List paths, Statement statement, String uniqueName ); public abstract void dropCollection( LogicalCollection catalogCollection, Statement statement ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 93278ec47f..a93e3abaad 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -311,7 +311,20 @@ record PathParts( String physicalNs, String physicalSchema, String table, String @Override - public void dropSourceEntities( List paths, Statement statement ) { + public void dropSourceEntities( List paths, Statement statement, String uniqueName ) { + + DataSource adapter = AdapterManager.getInstance().getSource( uniqueName ).orElseThrow(); + Map settings = adapter.getSettings(); + + String selectedAttributes = settings.get( "selectedAttributes" ); + selectedAttributes = selectedAttributes.replace("[", "").replace("]", ""); + List currentPaths = new ArrayList<>( List.of( selectedAttributes.split( "," ) ) ); + currentPaths.removeIf( path -> paths.contains( path.trim() ) ); + + String newPaths = String.join( ",", currentPaths ); + settings.put( "selectedAttributes", newPaths ); + adapter.updateSettings( settings ); + Map> worklist = new HashMap<>(); for ( String raw : paths ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 935338b25d..f899c792d9 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1002,7 +1002,7 @@ void metadataAck( final Context ctx ) { if ( payload.removedPaths != null ) { stmt = transaction.createStatement(); - DdlManager.getInstance().dropSourceEntities( List.of( payload.removedPaths ), stmt ); + DdlManager.getInstance().dropSourceEntities( List.of( payload.removedPaths ), stmt, payload.uniqueName ); stmt = null; } transaction.commit(); @@ -1067,7 +1067,7 @@ void setMetaConfiguration( final Context ctx ) { if ( !toUnselect.isEmpty() ) { try { stmt = tx.createStatement(); - DdlManager.getInstance().dropSourceEntities( List.copyOf( toUnselect ), stmt ); + DdlManager.getInstance().dropSourceEntities( List.copyOf( toUnselect ), stmt, config.uniqueName ); NodeUtil.unmarkSelectedAttributes( ((MetadataProvider) adapter.get()).getRoot(), List.copyOf( toUnselect ) ); tx.commit(); stmt.close(); From c8aa141f420bde8cbc6bde22709eaa4bfe240f11 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 24 Jun 2025 21:44:18 +0200 Subject: [PATCH 47/68] Add method for removing connection handler when not used anymore. --- .../jdbc/connection/ConnectionFactory.java | 2 ++ .../TransactionalConnectionFactory.java | 20 +++++++++++++++++++ .../jdbc/connection/XaConnectionFactory.java | 6 ++++++ .../jdbc/sources/AbstractJdbcSource.java | 8 ++++++++ 4 files changed, 36 insertions(+) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java index 4b5947b4c1..ccea490d3c 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java @@ -37,4 +37,6 @@ public interface ConnectionFactory { int getNumIdle(); + void releaseConnectionHandler( Xid xid, boolean commit ) throws ConnectionHandlerException; + } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java index 22ad262ade..a7b5062e3e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java @@ -131,6 +131,26 @@ public int getNumIdle() { } + @Override + public void releaseConnectionHandler( Xid xid, boolean commit ) throws ConnectionHandlerException { + TransactionalConnectionHandler handler = activeInstances.remove( xid ); + if ( handler == null ) { + log.warn( "No active handler for XID {} to release", xid ); + return; + } + try { + if ( commit ) { + handler.commit(); + } else { + handler.rollback(); + } + } finally { + handler.xid = null; + freeInstances.offer( handler ); + } + } + + public class TransactionalConnectionHandler extends ConnectionHandler { private Xid xid; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java index d64e35b841..a1ec8dea3f 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java @@ -134,6 +134,12 @@ public int getNumIdle() { } + @Override + public void releaseConnectionHandler( Xid xid, boolean commit ) throws ConnectionHandlerException { + log.warn( "Not implemented!" ); + } + + public class XaConnectionHandler extends ConnectionHandler { private final XAResource xaResource; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index a752de2540..e573b2971e 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -332,9 +332,17 @@ public Map> getExportedColumns() { map.put( tableName, list ); } } + connectionFactory.releaseConnectionHandler( xid, true ); } catch ( SQLException | ConnectionHandlerException e ) { + try { + connectionFactory.releaseConnectionHandler( xid, false ); + } catch ( ConnectionHandlerException ex ) { + throw new RuntimeException( ex ); + } throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } + return map; } From 1a2c1952ad09eebc8d26752d3f8c28278a8e6b84 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 26 Jun 2025 15:53:41 +0200 Subject: [PATCH 48/68] Bugfixes for oracle- and mysql adapter. Add metadata provider for JSON-source (prototype now). Bugfix for adding meta-configurations. --- .../db/adapter/java/AdapterTemplate.java | 1 + .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../db/schemaDiscovery/AbstractNode.java | 5 + .../db/schemaDiscovery/AttributeNode.java | 2 + .../db/schemaDiscovery/DocumentArrayNode.java | 37 +++ .../schemaDiscovery/DocumentObjectNode.java | 41 ++++ .../db/schemaDiscovery/DocumentValueNode.java | 42 ++++ .../db/schemaDiscovery/MetadataProvider.java | 2 +- .../polypheny/db/schemaDiscovery/Node.java | 37 +-- .../db/schemaDiscovery/NodeSerializer.java | 11 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 115 +++++---- .../jdbc/sources/AbstractJdbcSource.java | 4 +- .../polypheny/db/adapter/json/JsonSource.java | 169 +++++++++++++- .../db/adapter/jdbc/MysqlSourcePlugin.java | 218 ++++++++++++++---- .../source/OracleSource.java | 90 ++++---- .../SqlAlterSourceTableAddColumn.java | 2 +- .../java/org/polypheny/db/webui/Crud.java | 11 +- 17 files changed, 636 insertions(+), 153 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java create mode 100644 core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index a369aed543..8a74d6c8b4 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -131,6 +131,7 @@ public PreviewResult preview( Map settings, int limit ) { log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); AbstractNode meta = mp.fetchMetadataTree(); mp.setRoot( meta ); + mp.printTree( meta, 0 ); String json = NodeSerializer.serializeNode( meta ).toString(); MetadataHasher hasher = new MetadataHasher(); String hash = hasher.hash( json ); diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 40ae08c680..6f1e1757d9 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -154,7 +154,7 @@ public static DdlManager getInstance() { * @param afterColumnName the name of the column after the column, which is inserted; can be null * @param defaultValue the default value of the inserted column */ - public abstract void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement ); + public abstract void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement, String tablePhysicalName ); /** * Add a column to an existing table diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java index eb5cf6e4f7..e247ddebd0 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java @@ -16,14 +16,19 @@ package org.polypheny.db.schemaDiscovery; +import com.fasterxml.jackson.annotation.JsonProperty; import java.util.List; import java.util.Map; public interface AbstractNode { + @JsonProperty String type = ""; + @JsonProperty String name = ""; + @JsonProperty List children = null; + @JsonProperty Map properties = null; void addChild(AbstractNode node); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java index a5906e6f9d..095b3e9442 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java @@ -16,6 +16,7 @@ package org.polypheny.db.schemaDiscovery; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Getter; import lombok.Setter; @@ -23,6 +24,7 @@ public class AttributeNode extends Node implements AbstractNode { @Getter @Setter + @JsonProperty private boolean isSelected; public AttributeNode( String type, String name ) { diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java new file mode 100644 index 0000000000..5b5cbff1d8 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class DocumentArrayNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private String jsonPath; + + + public DocumentArrayNode( String name, String jsonPath ) { + super( "array", name ); + this.jsonPath = jsonPath; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java new file mode 100644 index 0000000000..b823ec1e77 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class DocumentObjectNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private String jsonPath; + @Getter + @Setter + @JsonProperty + private boolean cardCandidate; + + + public DocumentObjectNode( String name, String jsonPath, boolean cardCandidate ) { + super( "object", name ); + this.jsonPath = jsonPath; + this.cardCandidate = cardCandidate; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java new file mode 100644 index 0000000000..23d08ac441 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class DocumentValueNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private String jsonPath; + @Getter + @Setter + @JsonProperty + private String valueType; + + + public DocumentValueNode( String name, String jsonPath, String valueType, Object sample ) { + super( "value", name ); + this.jsonPath = jsonPath; + this.valueType = valueType; + addProperty( "sample", sample ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index 5549662c21..3111dd670a 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -24,7 +24,7 @@ public interface MetadataProvider { AbstractNode fetchMetadataTree(); - List> fetchPreview( Connection conn, String fqName, int limit ); + Object fetchPreview( Connection conn, String fqName, int limit ); void markSelectedAttributes( List selectedPaths ); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java index 879b334d95..bddb7b1567 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java @@ -16,6 +16,7 @@ package org.polypheny.db.schemaDiscovery; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Getter; import lombok.Setter; import java.util.ArrayList; @@ -23,37 +24,43 @@ import java.util.List; import java.util.Map; +@Setter +@Getter public class Node implements AbstractNode { - @Setter - @Getter + @JsonProperty protected String type; - @Setter - @Getter + @JsonProperty protected String name; - @Setter - @Getter + @JsonProperty protected List children; - @Setter - @Getter + @JsonProperty protected Map properties; - public Node(String type, String name) { + + public Node( String type, String name ) { this.type = type; this.name = name; this.children = new ArrayList<>(); this.properties = new HashMap<>(); } + @JsonProperty + public void addChild( AbstractNode node ) { + children.add( node ); + } - public void addChild(AbstractNode node) { - children.add(node); + @JsonProperty + public void addProperty( String key, Object value ) { + properties.put( key, value ); } +} + + + + + - public void addProperty(String key, Object value) { - properties.put(key, value); - } -} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java index 666b87c787..66aa28018c 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java @@ -31,6 +31,17 @@ public static ObjectNode serializeNode(AbstractNode node) { if ( node instanceof AttributeNode attr ) json.put( "isSelected", attr.isSelected() ); + if (node instanceof DocumentObjectNode obj) { + json.put("jsonPath", obj.getJsonPath()); + json.put("cardCandidate", obj.isCardCandidate()); + } + else if (node instanceof DocumentArrayNode arr) { + json.put("jsonPath", arr.getJsonPath()); + } + else if (node instanceof DocumentValueNode val) { + json.put("jsonPath", val.getJsonPath()); + json.put("valueType", val.getValueType()); + } ObjectNode props = objectMapper.createObjectNode(); node.getProperties().forEach((key, value) -> { diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 083a6c154c..6cde8ff075 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -65,6 +65,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.catalogs.AdapterCatalog; import org.polypheny.db.catalog.catalogs.RelAdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; @@ -93,6 +94,8 @@ import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalView; import org.polypheny.db.catalog.entity.physical.PhysicalColumn; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; @@ -251,7 +254,7 @@ record PathParts( String physicalNs, String physicalSchema, String table, String Map> exportedColumns; try { - exportedColumns = adapter.get().getExportedColumns(); + exportedColumns = adapter.get().asRelationalDataSource().getExportedColumns(); } catch ( Exception e ) { throw new GenericRuntimeException( "Something went wrong when trying to get exported columns", e ); } @@ -261,11 +264,15 @@ record PathParts( String physicalNs, String physicalSchema, String table, String Set wishedColumnNames = entry.getValue(); List exportedColumnList = exportedColumns.getOrDefault( tableName, List.of() ); - LogicalTable logical; + String physicalSchema = exportedColumnList.isEmpty() + ? Catalog.DEFAULT_NAMESPACE_NAME + : exportedColumnList.get( 0 ).physicalSchemaName(); + + LogicalTable logical = findLogicalTableByPhysical( namespace, adapter.get(), physicalSchema, tableName ); AllocationPlacement placement; AllocationEntity allocation; - if ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isEmpty() ) { + if ( logical == null ) { logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); Pair pp = createSinglePartition( namespace, logical ); placement = catalog.getAllocRel( namespace ).addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); @@ -273,30 +280,29 @@ record PathParts( String physicalNs, String physicalSchema, String table, String catalog.updateSnapshot(); } else { - logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElseThrow(); placement = catalog.getSnapshot().alloc().getPlacement( adapter.get().getAdapterId(), logical.id ).orElseThrow(); allocation = catalog.getSnapshot().alloc().getFromLogical( logical.id ).stream().filter( a -> a.adapterId == adapter.get().getAdapterId() ).findFirst().orElseThrow(); } for ( ExportedColumn exportedColumn : exportedColumnList ) { - if ( !wishedColumnNames.contains( exportedColumn.name ) || catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name ).isPresent() ) { + if ( !wishedColumnNames.contains( exportedColumn.name() ) || catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name() ).isPresent() ) { continue; } - addColumnToSourceTable( logical, exportedColumn.physicalColumnName, exportedColumn.name, null, null, null, statement ); + addColumnToSourceTable( logical, exportedColumn.physicalColumnName(), exportedColumn.name(), null, null, null, statement, exportedColumn.physicalTableName() ); } List columns = catalog.getSnapshot().rel().getColumns( logical.id ); List aCols = catalog.getSnapshot().alloc().getColumns( allocation.id ).stream().toList(); adapter.get().createTable( - /* context */ null, - /* logical */ LogicalTableWrapper.of( + null, + LogicalTableWrapper.of( logical, columns, - List.of(), // Keys / Constraints - "public", + List.of(), + physicalSchema, tableName ), - /* allocation*/ AllocationTableWrapper.of( + AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aCols ) ); @@ -312,6 +318,26 @@ record PathParts( String physicalNs, String physicalSchema, String table, String } + private LogicalTable findLogicalTableByPhysical( long namespace, DataSource adapter, String physicalSchema, String physicalTable ) { + RelAdapterCatalog ac = (RelAdapterCatalog) adapter.getCatalog(); + for ( PhysicalEntity pe : ac.getPhysicals().values() ) { + if ( !( pe instanceof PhysicalTable pt ) ) { + continue; + } + if ( physicalSchema.equals( pt.getNamespaceName() ) && physicalTable.equals( pt.getName() ) ) { + + long logicalId = pt.getLogicalId(); + return catalog.getSnapshot() + .rel() + .getTable( logicalId ) + .orElse( null ); + } + } + + return null; + } + + @Override public void dropSourceEntities( List paths, Statement statement, String uniqueName ) { @@ -319,7 +345,7 @@ public void dropSourceEntities( List paths, Statement statement, String Map settings = adapter.getSettings(); String selectedAttributes = settings.get( "selectedAttributes" ); - selectedAttributes = selectedAttributes.replace("[", "").replace("]", ""); + selectedAttributes = selectedAttributes.replace( "[", "" ).replace( "]", "" ); List currentPaths = new ArrayList<>( List.of( selectedAttributes.split( "," ) ) ); currentPaths.removeIf( path -> paths.contains( path.trim() ) ); @@ -385,7 +411,9 @@ public void dropSourceEntities( List paths, Statement statement, String } } + catalog.updateSnapshot(); statement.getQueryProcessor().resetCaches(); + statement.getTransaction().commit(); } @@ -416,6 +444,7 @@ private void dropWholeSourceTable( LogicalTable table, Statement statement ) { catalog.getLogicalRel( table.namespaceId ).deleteColumn( c.id ); } catalog.getLogicalRel( table.namespaceId ).deleteTable( table.id ); + catalog.updateSnapshot(); } @@ -451,6 +480,7 @@ private void dropSourceColumn( LogicalTable table, String columnName, Statement statement.getQueryProcessor().resetCaches(); } } + catalog.updateSnapshot(); } @@ -541,11 +571,10 @@ private void createRelationalSource( Transaction transaction, DataSource adap // Make sure the table name is uniqueString tableName = entry.getKey(); String physicalSchema = entry.getValue().isEmpty() ? Catalog.DEFAULT_NAMESPACE_NAME - : entry.getValue().get( 0 ).physicalSchemaName; + : entry.getValue().get( 0 ).physicalSchemaName(); String baseName = entry.getKey(); String tableName = baseName; - String physicalTable = baseName; int suffix = 0; @@ -570,29 +599,33 @@ private void createRelationalSource( Transaction transaction, DataSource adap .collect( Collectors.toList() ); for ( ExportedColumn exportedColumn : entry.getValue() ) { - LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( - exportedColumn.name(), - logical.id, - colPos++, - exportedColumn.type(), - exportedColumn.collectionsType(), - exportedColumn.length(), - exportedColumn.scale(), - exportedColumn.dimension(), - exportedColumn.cardinality(), - exportedColumn.nullable(), - Collation.getDefaultCollation() ); - - AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( - placement.id, - logical.id, - column.id, - adapter.adapterId, - PlacementType.STATIC, - exportedColumn.physicalPosition() ); // Not a valid partitionGroupID --> placeholder - - columns.add( column ); - aColumns.add( allocationColumn ); + if ( adapter instanceof MetadataProvider mp && (attributes != null) && selectedAttributeNames.stream().noneMatch( name -> name.equalsIgnoreCase( exportedColumn.name() ) ) ) { + continue; + } else { + LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( + exportedColumn.name(), + logical.id, + colPos++, + exportedColumn.type(), + exportedColumn.collectionsType(), + exportedColumn.length(), + exportedColumn.scale(), + exportedColumn.dimension(), + exportedColumn.cardinality(), + exportedColumn.nullable(), + Collation.getDefaultCollation() ); + + AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( + placement.id, + logical.id, + column.id, + adapter.adapterId, + PlacementType.STATIC, + exportedColumn.physicalPosition() ); + + columns.add( column ); + aColumns.add( allocationColumn ); + } } buildRelationalNamespace( namespace, logical, adapter ); @@ -692,7 +725,7 @@ public void renameNamespace( String newName, String currentName ) { @Override - public void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement ) { + public void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement, String tablePhysicalName ) { if ( !catalog.getSnapshot().rel().getColumn( table.id, columnLogicalName ).isEmpty() ) { throw new GenericRuntimeException( "There exist already a column with name %s on table %s", columnLogicalName, table.name ); @@ -719,7 +752,7 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam long adapterId = allocation.adapterId; DataSource dataSource = AdapterManager.getInstance().getSource( adapterId ).orElseThrow(); //String physicalTableName = catalog.getSnapshot().alloc().getPhysicalTable( catalogTable.id, adapterId ).name; - List exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( table.name ); + List exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( tablePhysicalName != null ? tablePhysicalName : table.name ); // Check if physicalColumnName is valid ExportedColumn exportedColumn = exportedColumns.stream() @@ -746,7 +779,7 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam // Add default value addDefaultValue( table.namespaceId, defaultValue, addedColumn ); - int physPos = exportedColumn.physicalPosition; + int physPos = exportedColumn.physicalPosition(); // Add column placement catalog.getAllocRel( table.namespaceId ).addColumn( allocation.partitionId, @@ -2458,7 +2491,7 @@ private List addAllocationsForPlacement( long namespaceId, Stat columns.add( catalog.getAllocRel( namespaceId ).addColumn( placementId, logical.id, column.id, adapter.adapterId, PlacementType.AUTOMATIC, i++ ) ); } - buildNamespace( namespaceId, logical, adapter ); + buildRelationalNamespace( namespaceId, logical, adapter ); List tables = new ArrayList<>(); for ( Long partitionId : partitionIds ) { tables.add( addAllocationTable( namespaceId, statement, logical, placementId, partitionId, adapter, true ) ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 179511f971..fd0e8b084d 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -261,12 +261,12 @@ public Map> getExportedColumns() { tableName = names[0]; } List primaryKeyColumns = new ArrayList<>(); - try ( ResultSet row = dbmd.getPrimaryKeys( settings.get( "database" ), schemaPattern, tableName ) ) { + try ( ResultSet row = dbmd.getPrimaryKeys( schemaPattern, null, tableName ) ) { while ( row.next() ) { primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); } } - try ( ResultSet row = dbmd.getColumns( settings.get( "database" ), schemaPattern, tableName, "%" ) ) { + try ( ResultSet row = dbmd.getColumns( schemaPattern, null, tableName, "%" ) ) { List list = new ArrayList<>(); while ( row.next() ) { PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java index c0cbed72e2..a922391797 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -21,6 +21,8 @@ import java.net.MalformedURLException; import java.net.URL; import java.nio.file.NoSuchFileException; +import java.sql.Connection; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -51,6 +53,16 @@ import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.DocumentArrayNode; +import org.polypheny.db.schemaDiscovery.DocumentObjectNode; +import org.polypheny.db.schemaDiscovery.DocumentValueNode; +import org.polypheny.db.schemaDiscovery.Node; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.transaction.PolyXid; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +78,7 @@ @AdapterSettingString(subOf = "method_link", defaultValue = "classpath://articles.json", name = "directoryName", description = "Path to the JSON file(s) to be integrated as this source.", position = 2) @AdapterSettingString(subOf = "method_url", defaultValue = "http://localhost/articles.json", name = "url", description = "URL to the JSON file(s) to be integrated as this source.", position = 2) -public class JsonSource extends DataSource implements DocumentDataSource, Scannable { +public class JsonSource extends DataSource implements DocumentDataSource, Scannable, MetadataProvider { private static final Logger log = LoggerFactory.getLogger( JsonSource.class ); @Delegate(excludes = Excludes.class) @@ -74,6 +86,11 @@ public class JsonSource extends DataSource implements Documen private JsonNamespace namespace; private final ConnectionMethod connectionMethod; private URL jsonFiles; + public AbstractNode metadataRoot; + private Map>> preview = new LinkedHashMap<>(); + + private static final String TEST_JSON_PATH = "C:\\Users\\roman\\Desktop\\data.json"; + private final ObjectMapper mapper = new ObjectMapper(); public JsonSource( final long storeId, final String uniqueName, final Map settings, DeployMode mode ) { @@ -194,6 +211,7 @@ public List createCollection( Context context, LogicalCollection } } + @Override public void truncate( Context context, long allocId ) { log.debug( "NOT SUPPORTED: JSON source does not support method truncate()." ); @@ -206,6 +224,7 @@ public boolean prepare( PolyXid xid ) { return true; } + @Override public void commit( PolyXid xid ) { log.debug( "NOT SUPPORTED: JSON source does not support method commit()." ); @@ -262,6 +281,154 @@ public DocumentDataSource asDocumentDataSource() { } + @Override + public AbstractNode fetchMetadataTree() { + AbstractNode root = new Node( "document", new File( TEST_JSON_PATH ).getName() ); + + try { + JsonNode jsonRoot = mapper.readTree( new File( TEST_JSON_PATH ) ); + buildTreeRecursively( jsonRoot, root, "$", "root" ); + } catch ( Exception ex ) { + throw new RuntimeException( "Failed to build metadata tree for JSON", ex ); + } + + preview.put( "jsonPreview", List.of( Map.of( "metadata", "rootNode", "preview", root ) ) ); + + return root; + } + + + @Override + public List> fetchPreview( Connection ignored, String fqName, int limit ) { + return null; + } + + + private void buildTreeRecursively( JsonNode current, AbstractNode parent, String jsonPath, String nodeName ) { + if ( current.isObject() ) { + boolean isCard = parent != null && "array".equals( parent.getType() ); + + AbstractNode obj = new DocumentObjectNode( + nodeName, // Anzeigename + jsonPath, // vollstΓ€ndiger Pfad + isCard // cardCandidate-Flag + ); + parent.addChild( obj ); + + current.fields().forEachRemaining( e -> + buildTreeRecursively( + e.getValue(), // Kind-JsonNode + obj, // neues Parent + jsonPath + "." + e.getKey(), // Pfad erweitern + e.getKey() // Kind-Name + ) + ); + return; + } + + /* ───────────── JSON-ARRAY ────────────── */ + if ( current.isArray() ) { + AbstractNode arr = new DocumentArrayNode( nodeName, jsonPath ); + parent.addChild( arr ); + + int idx = 0; + for ( JsonNode element : current ) { + + /* sprechender Name fΓΌr Array-Element */ + String childName = "idx" + idx; + if ( element.isObject() ) { + if ( element.has( "id" ) ) { + childName = "id=" + element.get( "id" ).asText(); + } else if ( element.has( "title" ) ) { + childName = "\"" + element.get( "title" ).asText() + "\""; + } else if ( element.has( "name" ) ) { + childName = element.get( "name" ).asText(); + } + } + + buildTreeRecursively( + element, + arr, + jsonPath + "[" + idx + "]", + childName + ); + idx++; + } + return; + } + + /* ───────────── PRIMITIVER WERT ───────── */ + String valueType = detectType( current ); // string | number | … + Object sample = current.isNull() ? null : current.asText(); + + AbstractNode val = new DocumentValueNode( + nodeName, + jsonPath, + valueType, + sample + ); + parent.addChild( val ); + + } + + + private static String detectType( JsonNode n ) { + if ( n.isTextual() ) { + return "string"; + } + if ( n.isNumber() ) { + return "number"; + } + if ( n.isBoolean() ) { + return "boolean"; + } + if ( n.isNull() ) { + return "null"; + } + return "unknown"; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + + } + + + @Override + public void printTree( AbstractNode node, int depth ) { + if ( node == null ) { + node = this.metadataRoot; + } + System.out.println("Node type:" + node.toString()); + System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); + for ( Map.Entry entry : node.getProperties().entrySet() ) { + System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); + } + for ( AbstractNode child : node.getChildren() ) { + printTree( child, depth + 1 ); + } + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return NodeSerializer.serializeNode( this.metadataRoot ); + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + private interface Excludes { void refreshCollection( long allocId ); diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 9007999384..a3913dcf87 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -20,7 +20,9 @@ import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -58,7 +60,9 @@ import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.sql.language.dialect.MysqlSqlDialect; import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; @SuppressWarnings("unused") public class MysqlSourcePlugin extends PolyPlugin { @@ -171,7 +175,7 @@ protected String getConnectionUrl( final String dbHostname, final int dbPort, fi @Override protected boolean requiresSchema() { - return false; + return true; } @@ -194,9 +198,7 @@ public AbstractNode fetchMetadataTree() { try ( ResultSet schemas = meta.getCatalogs() ) { while ( schemas.next() ) { - String schemaName = requiresSchema() - ? schemas.getString( "TABLE_SCHEM" ) - : schemas.getString( "TABLE_CAT" ); + String schemaName = schemas.getString( "TABLE_CAT" ); if ( filter.ignoredSchemas.contains( schemaName ) ) { continue; @@ -218,6 +220,19 @@ public AbstractNode fetchMetadataTree() { continue; } + String fqName = "`" + schemaName + "`.`" + tableName + "`"; + + List> preview = previewByTable.computeIfAbsent( + schemaName + "." + tableName, + k -> { + try { + return fetchPreview(conn, fqName, 10); + } catch (Exception e) { + log.warn("Preview failed for {}", fqName, e); + return List.of(); + } + }); + AbstractNode tableNode = new Node( "table", tableName ); Set pkCols = new HashSet<>(); @@ -279,7 +294,163 @@ public AbstractNode fetchMetadataTree() { @Override public List> fetchPreview( Connection conn, String fqName, int limit ) { - return List.of(); + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + return rows; + } + + + @Override + public Map> getExportedColumns() { + Map> map = new HashMap<>(); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + try { + ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + java.sql.Statement statement = connectionHandler.getStatement(); + Connection connection = statement.getConnection(); + DatabaseMetaData dbmd = connection.getMetaData(); + + String[] tables; + + for ( Map.Entry entry : settings.entrySet() ) { + log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); + } + + if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); + Set tableNames = new HashSet<>(); + + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; + + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { + String tableName = parts[1] + "." + parts[2]; + + if ( !requiresSchema() ) { + tableNames.add( parts[2] ); + } else { + tableNames.add( tableName ); + } + } + } + tables = tableNames.toArray( new String[0] ); + } + for ( String str : tables ) { + String[] names = str.split( "\\." ); + if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { + throw new GenericRuntimeException( "Invalid table name: " + str ); + } + String tableName; + String schemaPattern; + if ( requiresSchema() ) { + schemaPattern = names[0]; + tableName = names[1]; + } else { + schemaPattern = null; + tableName = names[0]; + } + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet row = dbmd.getPrimaryKeys( schemaPattern, null, tableName ) ) { + while ( row.next() ) { + primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); + } + } + try ( ResultSet row = dbmd.getColumns( schemaPattern, null, tableName, "%" ) ) { + List list = new ArrayList<>(); + while ( row.next() ) { + PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); + Integer length = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + switch ( type ) { + case BOOLEAN: + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + case FLOAT: + case REAL: + case DOUBLE: + case DATE: + break; + case DECIMAL: + length = row.getInt( "COLUMN_SIZE" ); + scale = row.getInt( "DECIMAL_DIGITS" ); + break; + case TIME: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type time: " + length ); + } + break; + case TIMESTAMP: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type timestamp: " + length ); + } + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = row.getInt( "COLUMN_SIZE" ); + break; + case BINARY: + case VARBINARY: + type = PolyType.VARBINARY; + length = row.getInt( "COLUMN_SIZE" ); + break; + default: + throw new GenericRuntimeException( "Unsupported data type: " + type.getName() ); + } + list.add( new ExportedColumn( + row.getString( "COLUMN_NAME" ).toLowerCase(), + type, + null, + length, + scale, + dimension, + cardinality, + row.getString( "IS_NULLABLE" ).equalsIgnoreCase( "YES" ), + row.getString( "TABLE_CAT" ), + row.getString( "TABLE_NAME" ), + row.getString( "COLUMN_NAME" ), + row.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( row.getString( "COLUMN_NAME" ) ) + ) ); + } + map.put( tableName, list ); + } + } + connectionFactory.releaseConnectionHandler( xid, true ); + } catch ( SQLException | ConnectionHandlerException e ) { + try { + connectionFactory.releaseConnectionHandler( xid, false ); + } catch ( ConnectionHandlerException ex ) { + throw new RuntimeException( ex ); + } + throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + + } + + return map; } @@ -356,42 +527,7 @@ public void setRoot( AbstractNode root ) { @Override public Object getPreview() { - Map>> preview = new LinkedHashMap<>(); - - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); - try { - ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Connection conn = ch.getStatement().getConnection(); - - String[] tables = {"testtable"}; - for ( String str : tables ) { - String[] parts = str.split( "\\." ); - String schema = parts.length == 2 ? parts[0] : null; - String table = parts.length == 2 ? parts[1] : parts[0]; - - String fqName = (schema != null ? schema + "." : "") + table; - List> rows = new ArrayList<>(); - - try ( var stmt = conn.createStatement(); - var rs = stmt.executeQuery( "SELECT * FROM TESTTABLE " + " LIMIT " + 10 ) ) { - - var meta = rs.getMetaData(); - while ( rs.next() ) { - Map row = new HashMap<>(); - for ( int i = 1; i <= meta.getColumnCount(); i++ ) { - row.put( meta.getColumnName( i ), rs.getObject( i ) ); - } - rows.add( row ); - } - } - - preview.put( fqName, rows ); - } - } catch ( Exception e ) { - throw new GenericRuntimeException( "Error fetching preview data", e ); - } - - return preview; + return this.previewByTable; } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 608878bf5c..5b9104ee11 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -19,6 +19,7 @@ import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; @@ -44,7 +45,9 @@ import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; @@ -83,6 +86,8 @@ public class OracleSource extends AbstractJdbcSource implements MetadataProvider { public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + public OracleSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( @@ -267,7 +272,7 @@ public Map> getExportedColumns() { @Override public AbstractNode fetchMetadataTree() { - Node root = new Node( "relational", settings.get( "database" ) ); + AbstractNode root = new Node( "relational", settings.get( "database" ) ); TableFilter filter = TableFilter.forAdapter( adapterName ); @@ -282,10 +287,9 @@ public AbstractNode fetchMetadataTree() { try ( ResultSet schemas = m.getSchemas() ) { while ( schemas.next() ) { String schemaName = schemas.getString( "TABLE_SCHEM" ); - Node schemaNode = new Node( "schema", schemaName ); + AbstractNode schemaNode = new Node( "schema", schemaName ); - try ( ResultSet tables = - m.getTables( null, schemaName, "%", new String[]{ "TABLE" } ) ) { + try ( ResultSet tables = m.getTables( null, schemaName, "%", new String[]{ "TABLE" } ) ) { while ( tables.next() ) { String owner = tables.getString( "TABLE_SCHEM" ); @@ -299,8 +303,19 @@ public AbstractNode fetchMetadataTree() { continue; } + String fqName = "\"" + owner + "\".\"" + tableName + "\""; + List> preview = previewByTable.computeIfAbsent( + owner + "." + tableName, + k -> { + try { + return fetchPreview( h.getStatement().getConnection(), fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); - Node tableNode = new Node( "table", tableName ); + AbstractNode tableNode = new Node( "table", tableName ); Set pkCols = new HashSet<>(); try ( ResultSet pk = m.getPrimaryKeys( null, schemaName, tableName ) ) { @@ -357,7 +372,24 @@ public AbstractNode fetchMetadataTree() { @Override public List> fetchPreview( Connection conn, String fqName, int limit ) { - return List.of(); + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( + "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { + + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + return rows; } @@ -435,45 +467,7 @@ public void setRoot( AbstractNode root ) { @Override public Object getPreview() { - Map>> preview = new LinkedHashMap<>(); - - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); - try { - ConnectionHandler ch = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Connection conn = ch.getStatement().getConnection(); - - String[] tables = {"system.test"}; - for ( String str : tables ) { - String[] parts = str.split( "\\." ); - String schema = parts.length == 2 ? parts[0] : null; - String table = parts.length == 2 ? parts[1] : parts[0]; - - schema = schema.toUpperCase(); - table = table.toUpperCase(); - - String fqName = (schema != null ? schema + "." : "") + table; - List> rows = new ArrayList<>(); - - try ( var stmt = conn.createStatement(); - var rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + 10 + " ROWS ONLY" ) ) { - - var meta = rs.getMetaData(); - while ( rs.next() ) { - Map row = new HashMap<>(); - for ( int i = 1; i <= meta.getColumnCount(); i++ ) { - row.put( meta.getColumnName( i ), rs.getObject( i ) ); - } - rows.add( row ); - } - } - - preview.put( fqName, rows ); - } - } catch ( Exception e ) { - throw new GenericRuntimeException( "Error fetching preview data", e ); - } - - return preview; + return this.previewByTable; } @@ -482,4 +476,10 @@ public AbstractNode getRoot() { return this.metadataRoot; } + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + } diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java index 91e061442e..2e30c2b78c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java @@ -132,7 +132,7 @@ public void execute( Context context, Statement statement, ParsedQueryContext pa beforeColumnName == null ? null : beforeColumnName.getSimple(), afterColumnName == null ? null : afterColumnName.getSimple(), SqlLiteral.toPoly( defaultValue ), - statement ); + statement, null ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 2f2eb11794..8ee372eef4 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -87,9 +87,11 @@ import org.polypheny.db.adapter.AdapterManager.AdapterInformation; import org.polypheny.db.adapter.ConnectionMethod; import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.DataSource.ExportedColumn; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; @@ -1053,17 +1055,15 @@ void setMetaConfiguration( final Context ctx ) { toAdd.removeAll( markedPaths ); Transaction tx = transactionManager.startTransaction( Catalog.defaultUserId, false, "setMetaConfiguration" + config.uniqueName ); - Statement stmt = null; + Statement stmt = tx.createStatement(); try { if ( !toAdd.isEmpty() ) { - stmt = tx.createStatement(); DdlManager.getInstance().addSelectedMetadata( tx, stmt, config.uniqueName, Catalog.defaultNamespaceId, List.copyOf( toAdd ) ); ((MetadataProvider) adapter.get()).markSelectedAttributes( List.copyOf( toAdd ) ); } if ( !toUnselect.isEmpty() ) { try { - stmt = tx.createStatement(); DdlManager.getInstance().dropSourceEntities( List.copyOf( toUnselect ), stmt, config.uniqueName ); NodeUtil.unmarkSelectedAttributes( ((MetadataProvider) adapter.get()).getRoot(), List.copyOf( toUnselect ) ); tx.commit(); @@ -1085,6 +1085,7 @@ void setMetaConfiguration( final Context ctx ) { } finally { if ( stmt != null ) { stmt.close(); + transactionManager.removeTransaction( tx.getXid() ); } } @@ -2427,7 +2428,7 @@ private Exception handleLinkFiles( Context ctx, AdapterModel a, AbstractAdapterS private Exception handleLinkFiles( AbstractAdapterSettingString setting ) { - Path path = Path.of( setting.getValue() ); + Path path = Path.of( "C:\\Users\\roman\\Desktop\\data.json" ); SecurityManager.getInstance().requestPathAccess( "webui", "webui", path ); if ( !SecurityManager.getInstance().checkPathAccess( path ) ) { return new GenericRuntimeException( "Security check for access was not successful; not enough permissions." ); From e1fbf26fcee1163d746336fe4a342e61251a78f4 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 26 Jun 2025 19:19:22 +0200 Subject: [PATCH 49/68] Bugfix in excel-source: Can now deploy subset of columns. --- .../catalog/catalogs/RelAdapterCatalog.java | 13 +++- .../org/polypheny/db/ddl/DdlManagerImpl.java | 2 +- .../db/adapter/excel/ExcelEnumerator.java | 19 ++--- .../db/adapter/excel/ExcelNamespace.java | 69 ++++++++++++++----- 4 files changed, 75 insertions(+), 28 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java index 84a3f92cc7..697d3dff69 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java @@ -100,7 +100,18 @@ public PhysicalColumn getColumn( long id, long allocId ) { public PhysicalTable createTable( String namespaceName, String tableName, Map columnNames, LogicalTable logical, Map lColumns, List pkIds, AllocationTableWrapper wrapper ) { AllocationTable allocation = wrapper.table; List columns = wrapper.columns; - List pColumns = Streams.mapWithIndex( columns.stream(), ( c, i ) -> new PhysicalColumn( columnNames.get( c.columnId ), logical.id, allocation.id, allocation.adapterId, (int) i, lColumns.get( c.columnId ) ) ).toList(); + List pColumns = columns.stream().map(ac -> {LogicalColumn lc = lColumns.get(ac.columnId); + int sheetPos0 = lc.position - 1; // 0-basierte XLSX-Pos + return new PhysicalColumn( + columnNames.get(ac.columnId), + logical.id, + allocation.id, + allocation.adapterId, + sheetPos0, // ← **kein** i++ + lc); + }) + .toList(); + PhysicalTable table = new PhysicalTable( IdBuilder.getInstance().getNewPhysicalId(), allocation.id, allocation.logicalId, tableName, pColumns, logical.namespaceId, namespaceName, pkIds, allocation.adapterId ); pColumns.forEach( this::addColumn ); addPhysical( allocation, table ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 6cde8ff075..e8a28f3d62 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -605,7 +605,7 @@ private void createRelationalSource( Transaction transaction, DataSource adap LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( exportedColumn.name(), logical.id, - colPos++, + exportedColumn.physicalPosition(), exportedColumn.type(), exportedColumn.collectionsType(), exportedColumn.length(), diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java index 99b3eabeb7..101e020108 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java @@ -457,21 +457,22 @@ public PolyValue[] convertRow( Row row ) { } - public PolyValue[] convertNormalRow(Row row) { + public PolyValue[] convertNormalRow( Row row ) { final PolyValue[] objects = new PolyValue[fields.length]; - for (int i = 0; i < fields.length; i++) { - Cell cell = row.getCell(fields[i] - 1); - objects[i] = convert(fieldTypes[i], cell); + for ( int i = 0; i < fields.length; i++ ) { + Cell cell = row.getCell( fields[i]); + objects[i] = convert( fieldTypes[i], cell ); } return objects; } - public PolyValue[] convertStreamRow(Row row) { + + public PolyValue[] convertStreamRow( Row row ) { final PolyValue[] objects = new PolyValue[fields.length + 1]; - objects[0] = PolyLong.of(System.currentTimeMillis()); - for (int i = 0; i < fields.length; i++) { - Cell cell = row.getCell(fields[i]); - objects[i + 1] = convert(fieldTypes[i], cell); + objects[0] = PolyLong.of( System.currentTimeMillis() ); + for ( int i = 0; i < fields.length; i++ ) { + Cell cell = row.getCell( fields[i] ); + objects[i + 1] = convert( fieldTypes[i], cell ); } return objects; } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java index 00de8150fe..09651290e6 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java @@ -20,9 +20,11 @@ import java.net.URL; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import org.jetbrains.annotations.Nullable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -66,37 +68,70 @@ public ExcelNamespace( long id, long adapterId, URL directoryUrl, ExcelTable.Fla } - public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource, List physicalIds ) { + public ExcelTable createExcelTable( PhysicalTable table, + ExcelSource excelSource, + List physicalIds ) { + + /* -------- Basis -------- */ final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - List fieldTypes = new LinkedList<>(); - for ( PhysicalColumn column : table.columns ) { - AlgDataType sqlType = sqlType( typeFactory, column.type, column.length, column.scale, null ); - fieldInfo.add( column.id, column.name, column.name, sqlType ).nullable( column.nullable ); - fieldTypes.add( ExcelFieldType.getExcelFieldType( column.type ) ); + + // physicalIds SO wie sie ankommen (0-basierte Excel-Positionen) + int[] fields = physicalIds.stream() + .mapToInt( i -> i - 1 ) + .toArray(); + + /* -------- Column-Lookup (global) -------- */ + Map byPosition = new HashMap<>(); + for ( PhysicalColumn c : table.columns ) { + byPosition.put( c.position, c ); // 0-basierte Sheet-Position } + /* -------- Metadaten der ausgewΓ€hlten Spalten aufbauen -------- */ + final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); + List fieldTypes = new ArrayList<>(); - // String excelFileName = excelSource.sheetName; - String[] parts = table.name.split("_", 2); - String filePart = parts[0]; - String sheetPart = parts.length > 1 ? parts[1] : ""; + for ( int pos : fields ) { + PhysicalColumn column = byPosition.get( pos ); + if ( column == null ) { + throw new GenericRuntimeException( "No column for position " + pos ); + } - String excelFileName = filePart + ".xlsx"; - this.sheet = sheetPart; + AlgDataType sqlType = sqlType( typeFactory, + column.type, + column.length, + column.scale, + null ); + fieldInfo.add( column.id, column.name, column.name, sqlType ) + .nullable( column.nullable ); + + fieldTypes.add( ExcelFieldType.getExcelFieldType( column.type ) ); + } + + /* -------- Datei- und Sheet-Namen ableiten -------- */ + String[] parts = table.name.split( "_", 2 ); + String filePart = parts[0]; + String sheetPart = parts.length > 1 ? parts[1] : ""; + String excelName = filePart + ".xlsx"; + this.sheet = sheetPart; Source source; try { - source = Sources.of( new URL( directoryUrl, excelFileName ) ); + source = Sources.of( new URL( directoryUrl, excelName ) ); } catch ( MalformedURLException e ) { throw new GenericRuntimeException( e ); } - int[] fields = physicalIds.stream().mapToInt( i -> i ).toArray(); - ExcelTable physical = createTable( table, source, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, excelSource ); + + /* -------- Physische Tabelle registrieren -------- */ + ExcelTable physical = createTable( table, + source, + AlgDataTypeImpl.proto( fieldInfo.build() ), + fieldTypes, + fields, + excelSource ); + tableMap.put( physical.name + "_" + physical.allocationId, physical ); return physical; - } From d8a5468f4c143cba0ced51dd12090c13226ed2fb Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 26 Jun 2025 21:19:24 +0200 Subject: [PATCH 50/68] Bugfix in csv-source: Can now deploy subset of column. --- .../org/polypheny/db/adapter/csv/CsvEnumerator.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java index cbcee9cf14..20b5b88998 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java @@ -228,16 +228,16 @@ public boolean moveNext() { reader.close(); return false; } - if ( filterValues != null ) { - for ( int i = 0; i < strings.length; i++ ) { + if (filterValues != null) { + int limit = Math.min(strings.length, filterValues.length); + for (int i = 0; i < limit; i++) { String filterValue = filterValues[i]; - if ( filterValue != null ) { - if ( !filterValue.equals( strings[i] ) ) { - continue outer; - } + if (filterValue != null && !filterValue.equals(strings[i])) { + continue outer; } } } + current = rowConverter.convertRow( strings ); return true; } From d7491299b892ec9363f65fb9b3f86bf54b7ddee3 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 28 Jun 2025 13:43:58 +0200 Subject: [PATCH 51/68] Bugfix: Deleting tables when configuration metadata works properly. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 19 +++++++------------ .../postgres/source/PostgresqlSource.java | 2 ++ 2 files changed, 9 insertions(+), 12 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index e8a28f3d62..de0e455504 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -273,7 +273,9 @@ record PathParts( String physicalNs, String physicalSchema, String table, String AllocationEntity allocation; if ( logical == null ) { - logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); + String logicalTable = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); + + logical = catalog.getLogicalRel( namespace ).addTable( logicalTable, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); Pair pp = createSinglePartition( namespace, logical ); placement = catalog.getAllocRel( namespace ).addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); allocation = catalog.getAllocRel( namespace ).addAllocation( adapter.get().getAdapterId(), placement.id, pp.left.id, logical.id ); @@ -321,7 +323,7 @@ record PathParts( String physicalNs, String physicalSchema, String table, String private LogicalTable findLogicalTableByPhysical( long namespace, DataSource adapter, String physicalSchema, String physicalTable ) { RelAdapterCatalog ac = (RelAdapterCatalog) adapter.getCatalog(); for ( PhysicalEntity pe : ac.getPhysicals().values() ) { - if ( !( pe instanceof PhysicalTable pt ) ) { + if ( !(pe instanceof PhysicalTable pt) ) { continue; } if ( physicalSchema.equals( pt.getNamespaceName() ) && physicalTable.equals( pt.getName() ) ) { @@ -368,6 +370,7 @@ public void dropSourceEntities( List paths, Statement statement, String String columnName = (seg.length >= 3) ? seg[seg.length - 1] : "*"; String tableName = seg[seg.length - 2]; + String schemaName = seg[seg.length - 3]; String schemaPath = String.join( ".", Arrays.copyOf( seg, seg.length - (columnName.equals( "*" ) ? 1 : 2) ) ); @@ -377,10 +380,7 @@ public void dropSourceEntities( List paths, Statement statement, String .orElseThrow( () -> new GenericRuntimeException( "Logisches Namespace 'public' nicht gefunden." ) ); - LogicalTable table = catalog.getSnapshot().rel() - .getTable( ns.id, tableName ) - .orElseThrow( () -> new GenericRuntimeException( - "Tabelle nicht gefunden: " + schemaPath + "." + tableName ) ); + LogicalTable table = findLogicalTableByPhysical( Catalog.defaultNamespaceId, adapter, schemaName, tableName ); if ( table.entityType != EntityType.SOURCE ) { throw new GenericRuntimeException( "Tabelle " + table.name + @@ -574,15 +574,10 @@ private void createRelationalSource( Transaction transaction, DataSource adap : entry.getValue().get( 0 ).physicalSchemaName(); String baseName = entry.getKey(); - String tableName = baseName; String physicalTable = baseName; - int suffix = 0; - while ( catalog.getSnapshot().rel().getTable( namespace, tableName ).isPresent() ) { - tableName = baseName + suffix++; - } // Make sure the table name is unique - tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); + String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); List columns = new ArrayList<>(); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 89dd52a188..04f95d2707 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -79,6 +79,8 @@ description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") +@AdapterSettingString(name = "tables", defaultValue = "foo,bar", + description = "List of tables which should be imported. The names must be separated by a comma.") public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { public AbstractNode metadataRoot; From edb49d26b0f84e497958d1fa08b70134ee5a106f Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sat, 28 Jun 2025 16:28:11 +0200 Subject: [PATCH 52/68] Bugfix in excelsource: getExportedColumns retrieves all sheets now. Add fallback datatype for ExcelEnumerator --- .../polypheny/db/adapter/csv/CsvSource.java | 2 +- .../db/adapter/excel/ExcelEnumerator.java | 3 + .../db/adapter/excel/ExcelSource.java | 150 +++++++----------- 3 files changed, 60 insertions(+), 95 deletions(-) diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index c25f8edbb4..5d39d1a207 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -389,7 +389,7 @@ public void renameLogicalColumn( long id, String newColumnName ) { public AbstractNode fetchMetadataTree() { File csvFile = new File( "C:/Users/roman/Desktop/Dateieins.csv" ); String tableName = csvFile.getName(); - AbstractNode rootNode = new Node( "csv", tableName ); + AbstractNode rootNode = new Node( "csv", tableName.split( "\\." )[0] ); try ( BufferedReader reader = new BufferedReader( new FileReader( csvFile ) ) ) { String headerLine = reader.readLine(); diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java index 101e020108..42b465a3c7 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java @@ -412,6 +412,9 @@ protected PolyValue convert( ExcelFieldType fieldType, Cell cell ) { } case STRING: default: + if ( cell.getCellType() == CellType.NUMERIC ) { + return PolyString.of( cell.toString() ); + } return PolyString.of( cell.getStringCellValue() ); } } catch ( Exception e ) { diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 24144d235d..4a42bdbbfd 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -234,8 +234,6 @@ protected void reloadSettings( List updatedSettings ) { @Override public Map> getExportedColumns() { - String currentSheetName; - if ( connectionMethod == ConnectionMethod.UPLOAD && exportedColumnCache != null ) { // If we upload, file will not be changed, and we can cache the columns information, if "link" is used this is not advised return exportedColumnCache; @@ -278,109 +276,73 @@ public Map> getExportedColumns() { .replaceAll( "[^a-z0-9_]+", "" ); } - List list = new ArrayList<>(); - int position = 1; try { Source source = Sources.of( new URL( excelDir, fileName ) ); - File file = new File( source.path() ); // creating a new file instance - FileInputStream fs = new FileInputStream( file ); + Workbook workbook = WorkbookFactory.create( source.file() ); - Workbook workbook = WorkbookFactory.create( fs ); - Sheet sheet; + for ( int s = 0; s < workbook.getNumberOfSheets(); s++ ) { - if ( this.sheetName.equals( "" ) ) { - sheet = workbook.getSheetAt( 0 ); - currentSheetName = workbook.getSheetName( 0 ); + Sheet sheet = workbook.getSheetAt( s ); + String currentSheetName = workbook.getSheetName( s ); - } else { - sheet = workbook.getSheet( this.sheetName ); - currentSheetName = this.sheetName; - } + List list = new ArrayList<>(); + int position = 1; + + Row header = sheet.getRow( 0 ); + if ( header == null ) { + continue; + } - // Read first row to extract column attribute name and datatype - for ( Row row : sheet ) { - // For each row, iterate through all the columns - Iterator cellIterator = row.cellIterator(); - - while ( cellIterator.hasNext() ) { - Cell cell = cellIterator.next(); - try { - String[] colSplit = cell.getStringCellValue().split( ":" ); - String name = colSplit[0] - .toLowerCase() - .trim() - .replaceAll( "[^a-z0-9_]+", "" ); - String typeStr = "string"; - if ( colSplit.length > 1 ) { - typeStr = colSplit[1].toLowerCase().trim(); + for ( Cell cell : header ) { + String[] colSplit = cell.getStringCellValue().split( ":" ); + String name = colSplit[0].toLowerCase() + .trim() + .replaceAll( "[^a-z0-9_]+", "" ); + String typeStr = (colSplit.length > 1 ? colSplit[1] : "string") + .toLowerCase().trim(); + + PolyType type; + Integer length = null, scale = null; + switch ( typeStr ) { + case "int" -> type = PolyType.INTEGER; + case "boolean" -> type = PolyType.BOOLEAN; + case "long" -> type = PolyType.BIGINT; + case "float" -> type = PolyType.REAL; + case "double" -> type = PolyType.DOUBLE; + case "date" -> type = PolyType.DATE; + case "time" -> { + type = PolyType.TIME; + length = 0; } - PolyType collectionsType = null; - PolyType type; - Integer length = null; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - switch ( typeStr.toLowerCase() ) { - case "int": - type = PolyType.INTEGER; - break; - case "string": - type = PolyType.VARCHAR; - length = maxStringLength; - break; - case "boolean": - type = PolyType.BOOLEAN; - break; - case "long": - type = PolyType.BIGINT; - break; - case "float": - type = PolyType.REAL; - break; - case "double": - type = PolyType.DOUBLE; - break; - case "date": - type = PolyType.DATE; - break; - case "time": - type = PolyType.TIME; - length = 0; - break; - case "timestamp": - type = PolyType.TIMESTAMP; - length = 0; - break; - default: - throw new GenericRuntimeException( "Unknown type: " + typeStr.toLowerCase() ); + case "timestamp" -> { + type = PolyType.TIMESTAMP; + length = 0; + } + default -> { + type = PolyType.VARCHAR; + length = maxStringLength; } - - list.add( new ExportedColumn( - name, - type, - collectionsType, - length, - scale, - dimension, - cardinality, - false, - fileName, - physicalTableName, - name, - position, - position == 1 ) ); // TODO - - position++; - } catch ( Exception e ) { - throw new GenericRuntimeException( e ); } + + list.add( new ExportedColumn( + name, type, + null, length, scale, + null, null, + false, + fileName, + physicalTableName, + name, + position, + position == 1 ) ); + position++; } - break; + + exportedColumnCache.put( physicalTableName + "_" + currentSheetName, list ); } } catch ( IOException e ) { throw new GenericRuntimeException( e ); } - exportedColumnCache.put( physicalTableName + "_" + currentSheetName, list ); + } this.exportedColumnCache = exportedColumnCache; return exportedColumnCache; @@ -435,13 +397,13 @@ public AbstractNode fetchMetadataTree() { Source filePath; // String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; String firstFile = resolveFileNames().stream().findFirst() - .orElseThrow(() -> new GenericRuntimeException("No file found")); + .orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); try { - filePath = Sources.of(new URL(excelDir, firstFile)); + filePath = Sources.of( new URL( excelDir, firstFile ) ); } catch ( MalformedURLException e ) { throw new RuntimeException( e ); } - String mappeName = "Workbook"; + String mappeName = firstFile.split( "\\." )[0]; AbstractNode root = new Node( "excel", mappeName ); try ( FileInputStream fis = new FileInputStream( filePath.path() ); Workbook wb = WorkbookFactory.create( fis ) ) { From 09c2861b0580bff2b96ce54fc7a822fdb6715bfa Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 29 Jun 2025 19:37:37 +0200 Subject: [PATCH 53/68] Bugfix for jdbc-adapter. Parallel connections to database are possible now. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 146 +++++++++++++++++- .../polypheny/db/adapter/csv/CsvSource.java | 3 +- .../db/adapter/excel/ExcelSource.java | 5 +- .../jdbc/sources/AbstractJdbcSource.java | 15 +- .../monetdb/sources/MonetdbSource.java | 52 ++++--- .../db/adapter/jdbc/MysqlSourcePlugin.java | 45 +++--- .../source/OracleSource.java | 30 +++- .../postgres/source/PostgresqlSource.java | 2 - .../java/org/polypheny/db/webui/Crud.java | 36 ++++- 9 files changed, 257 insertions(+), 77 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index de0e455504..d108ab5c01 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -33,6 +33,7 @@ import java.util.Optional; import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; @@ -236,11 +237,37 @@ record PathParts( String physicalNs, String physicalSchema, String table, String List parsedPaths = selectedPaths.stream() .map( p -> { String[] parts = p.split( "\\." ); - if ( parts.length != 4 ) { - throw new IllegalArgumentException( "Pfad muss 4 Teile haben: ..
. -> " + p ); + + String physNs; + String physSchema; + String table; + String column; + + if ( parts.length == 4 ) { + physNs = parts[0]; + physSchema = parts[1]; + table = parts[2]; + column = parts[3]; + + } else if ( parts.length == 3 ) { + physNs = parts[0]; + physSchema = "Mappe1.xlsx"; + table = parts[1]; + column = parts[2].split( "\\:" )[0].toLowerCase(); + + } else if ( parts.length == 2 ) { + physNs = parts[0]; + physSchema = "Dateieins.csv"; + table = parts[0].toLowerCase(); + column = parts[1].toLowerCase(); + + } else { + throw new IllegalArgumentException( "Unknown path format for adapter with unique name: " + uniqueName ); } - return new PathParts( parts[0], parts[1], parts[2], parts[3], p ); - } ).toList(); + + return new PathParts( physNs, physSchema, table, column, p ); + } ) + .toList(); Optional> adapter = AdapterManager.getInstance().getSource( uniqueName ); @@ -269,6 +296,9 @@ record PathParts( String physicalNs, String physicalSchema, String table, String : exportedColumnList.get( 0 ).physicalSchemaName(); LogicalTable logical = findLogicalTableByPhysical( namespace, adapter.get(), physicalSchema, tableName ); + if ( logical == null ) { + logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElse( null ); + } AllocationPlacement placement; AllocationEntity allocation; @@ -290,7 +320,7 @@ record PathParts( String physicalNs, String physicalSchema, String table, String if ( !wishedColumnNames.contains( exportedColumn.name() ) || catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name() ).isPresent() ) { continue; } - addColumnToSourceTable( logical, exportedColumn.physicalColumnName(), exportedColumn.name(), null, null, null, statement, exportedColumn.physicalTableName() ); + addColumnToSourceTable( logical, exportedColumn.physicalColumnName(), exportedColumn.name(), null, null, null, statement, tableName ); } List columns = catalog.getSnapshot().rel().getColumns( logical.id ); @@ -340,7 +370,7 @@ private LogicalTable findLogicalTableByPhysical( long namespace, DataSource a } - @Override + /* @Override public void dropSourceEntities( List paths, Statement statement, String uniqueName ) { DataSource adapter = AdapterManager.getInstance().getSource( uniqueName ).orElseThrow(); @@ -414,6 +444,106 @@ public void dropSourceEntities( List paths, Statement statement, String catalog.updateSnapshot(); statement.getQueryProcessor().resetCaches(); statement.getTransaction().commit(); + }*/ + + + @Override + public void dropSourceEntities( List paths, Statement stmt, String uniqueName ) { + + DataSource adapter = AdapterManager.getInstance() + .getSource( uniqueName ) + .orElseThrow(); + Map settings = adapter.getSettings(); + + String selectedAttributes = settings.getOrDefault( "selectedAttributes", "" ) + .replace( "[", "" ) + .replace( "]", "" ); + List currentPaths = new ArrayList<>( List.of( selectedAttributes.split( "," ) ) ); + currentPaths.removeIf( p -> Arrays.asList( paths ).contains( p.trim() ) ); + settings.put( "selectedAttributes", String.join( ",", currentPaths ) ); + adapter.updateSettings( settings ); + + record PathParts( String physNs, String physSchema, String table, String column, String original ) { + + } + + Function parse = ( raw ) -> { + String p = raw.replace( "'", "" ).trim(); + String[] parts = p.split( "\\." ); + + String physNs, physSchema, table, column; + + if ( parts.length == 4 ) { + physNs = parts[0]; + physSchema = parts[1]; + table = parts[2]; + column = parts[3]; + + } else if ( parts.length == 3 ) { + physNs = parts[0]; + physSchema = "Mappe1.xslx"; + table = parts[1]; + column = parts[2].split( "\\:" )[0].toLowerCase(); + + } else if ( parts.length == 2 ) { + physNs = parts[0]; + physSchema = "Dateieins.csv"; + table = parts[0].toLowerCase(); + column = parts[1].toLowerCase(); + + } else { + throw new GenericRuntimeException( "UngΓΌltiger Pfad '" + p + "' fΓΌr Adapter " + adapter.getAdapterName() ); + } + return new PathParts( physNs, physSchema, table, column, p ); + }; + + Map> work = new HashMap<>(); + + for ( String raw : paths ) { + if ( raw == null || raw.isBlank() ) { + continue; + } + + PathParts pp = parse.apply( raw ); + String physSchema = pp.physSchema(); + String tableName = pp.table(); + String columnName = pp.column(); + + LogicalTable table = findLogicalTableByPhysical( Catalog.defaultNamespaceId, adapter, physSchema, tableName ); + if ( table == null ) { + table = catalog.getSnapshot().rel().getTable( Catalog.defaultNamespaceId, tableName ).orElse( null ); + } + + if ( table.entityType != EntityType.SOURCE ) { + throw new GenericRuntimeException( "Tabelle " + table.name + " ist kein SOURCE-Objekt." ); + } + + work.computeIfAbsent( table, t -> new HashSet<>() ).add( columnName ); + } + + for ( Map.Entry> e : work.entrySet() ) { + LogicalTable table = e.getKey(); + Set cols = e.getValue(); + + if ( cols.contains( "*" ) ) { + dropWholeSourceTable( table, stmt ); + continue; + } + + for ( String col : cols ) { + dropSourceColumn( table, col, stmt ); + catalog.updateSnapshot(); + } + + if ( catalog.getSnapshot().rel().getColumns( table.id ).isEmpty() ) { + dropWholeSourceTable( table, stmt ); + catalog.updateSnapshot(); + } + } + + catalog.updateSnapshot(); + stmt.getQueryProcessor().resetCaches(); + stmt.getTransaction().commit(); } @@ -551,7 +681,7 @@ private void createRelationalSource( Transaction transaction, DataSource adap HashCache.getInstance().put( uniqueName, hash ); log.info( "Key used during deployment: {} ", uniqueName ); - // pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); @@ -703,7 +833,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); - // PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); + PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 5d39d1a207..f601c722b7 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -415,8 +415,7 @@ public AbstractNode fetchMetadataTree() { } catch ( IOException e ) { throw new RuntimeException( "Failed to parse metadata of CSV source: " + e ); } - this.metadataRoot = rootNode; - return this.metadataRoot; + return rootNode; } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 4a42bdbbfd..0b25c0ee01 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -411,7 +411,7 @@ public AbstractNode fetchMetadataTree() { for ( Sheet sheet : wb ) { String sheetName = sheet.getSheetName(); - AbstractNode sheetNode = new Node( "sheet", sheetName ); + AbstractNode sheetNode = new Node( "sheet", mappeName.toLowerCase() + "_" + sheetName ); Row header = sheet.getRow( sheet.getFirstRowNum() ); if ( header == null ) { @@ -439,8 +439,7 @@ public AbstractNode fetchMetadataTree() { throw new RuntimeException( "Failed to read Excel metadata: " + filePath, e ); } - this.metadataRoot = root; - return metadataRoot; + return root; } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index fd0e8b084d..2d2b543f14 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -211,11 +211,15 @@ public void rollback( PolyXid xid ) { @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); + + java.sql.Statement statement = null; + Connection connection = null; + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement statement = connectionHandler.getStatement(); - Connection connection = statement.getConnection(); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); String[] tables; @@ -333,15 +337,8 @@ public Map> getExportedColumns() { map.put( tableName, list ); } } - connectionFactory.releaseConnectionHandler( xid, true ); } catch ( SQLException | ConnectionHandlerException e ) { - try { - connectionFactory.releaseConnectionHandler( xid, false ); - } catch ( ConnectionHandlerException ex ) { - throw new RuntimeException( ex ); - } throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); - } return map; diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 6b4782c3fd..6dae66ba85 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -60,6 +60,7 @@ import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; @@ -165,30 +166,34 @@ public List createTable( Context context, LogicalTableWrapper lo @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + + java.sql.Statement statement = null; + Connection connection = null; + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement statement = connectionHandler.getStatement(); - Connection connection = statement.getConnection(); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); String[] tables; - if (settings.get("selectedAttributes").equals("")){ + if ( settings.get( "selectedAttributes" ).equals( "" ) ) { tables = settings.get( "tables" ).split( "," ); } else { - String[] names2 = settings.get("selectedAttributes").split(","); + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); Set tableNames = new HashSet<>(); - for (String s : names2){ - String attr = s.split(" : ")[0]; + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; - String[] parts = attr.split("\\."); - if (parts.length >= 3) { + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { String tableName = parts[1] + "." + parts[2]; - tableNames.add(tableName); + tableNames.add( tableName ); } } - tables = tableNames.toArray(new String[0]); + tables = tableNames.toArray( new String[0] ); } for ( String str : tables ) { String[] names = str.split( "\\." ); @@ -291,12 +296,15 @@ public AbstractNode fetchMetadataTree() { SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + java.sql.Statement stmt = null; + Connection conn = null; + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement stmt = handler.getStatement(); - Connection conn = stmt.getConnection(); + stmt = handler.getStatement(); + conn = stmt.getConnection(); DatabaseMetaData meta = conn.getMetaData(); try ( ResultSet schemas = requiresSchema() @@ -325,11 +333,12 @@ public AbstractNode fetchMetadataTree() { String tableName = tables.getString( "TABLE_NAME" ); String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; + Connection finalConn = conn; previewByTable.computeIfAbsent( schemaName + "." + tableName, k -> { try { - return fetchPreview( conn, fqName, 10 ); + return fetchPreview( finalConn, fqName, 10 ); } catch ( Exception e ) { log.warn( "Preview failed for {}", fqName, e ); return List.of(); @@ -388,15 +397,18 @@ public AbstractNode fetchMetadataTree() { } catch ( SQLException | ConnectionHandlerException ex ) { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } finally { + try { + stmt.close(); + conn.close(); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } } - - this.metadataRoot = root; - log.error( "Neue Preview ist geladen als: " + previewByTable.toString() ); - return this.metadataRoot; + return root; } - @Override public List> fetchPreview( Connection conn, String fqName, int limit ) { List> rows = new ArrayList<>(); diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index a3913dcf87..8b4d3a9f64 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -169,7 +169,7 @@ protected void reloadSettings( List updatedSettings ) { @Override protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { - return String.format( "jdbc:mysql://%s:%d/%s", dbHostname, dbPort, dbName ); + return String.format( "jdbc:mysql://%s:%d/%s?allowPublicKeyRetrieval=true&useSSL=false", dbHostname, dbPort, dbName ); } @@ -187,12 +187,15 @@ public AbstractNode fetchMetadataTree() { SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); TableFilter tableFilter = TableFilter.forAdapter( adapterName ); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + java.sql.Statement stmt = null; + Connection conn = null; try { ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement stmt = handler.getStatement(); - Connection conn = stmt.getConnection(); + stmt = handler.getStatement(); + conn = stmt.getConnection(); DatabaseMetaData meta = conn.getMetaData(); try ( ResultSet schemas = meta.getCatalogs() ) { @@ -222,16 +225,17 @@ public AbstractNode fetchMetadataTree() { String fqName = "`" + schemaName + "`.`" + tableName + "`"; + Connection finalConn = conn; List> preview = previewByTable.computeIfAbsent( schemaName + "." + tableName, k -> { try { - return fetchPreview(conn, fqName, 10); - } catch (Exception e) { - log.warn("Preview failed for {}", fqName, e); + return fetchPreview( finalConn, fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); return List.of(); } - }); + } ); AbstractNode tableNode = new Node( "table", tableName ); @@ -285,10 +289,16 @@ public AbstractNode fetchMetadataTree() { } catch ( SQLException | ConnectionHandlerException ex ) { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } finally { + try { + stmt.close(); + conn.close(); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } } - this.metadataRoot = root; - return this.metadataRoot; + return root; } @@ -317,11 +327,16 @@ public List> fetchPreview( Connection conn, String fqName, i @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); + java.sql.Statement statement = null; + Connection connection = null; + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + try { ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement statement = connectionHandler.getStatement(); - Connection connection = statement.getConnection(); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); String[] tables; @@ -439,13 +454,7 @@ public Map> getExportedColumns() { map.put( tableName, list ); } } - connectionFactory.releaseConnectionHandler( xid, true ); } catch ( SQLException | ConnectionHandlerException e ) { - try { - connectionFactory.releaseConnectionHandler( xid, false ); - } catch ( ConnectionHandlerException ex ) { - throw new RuntimeException( ex ); - } throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 5b9104ee11..101d43b70e 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -40,6 +40,7 @@ import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import java.sql.Connection; @@ -155,11 +156,14 @@ public List createTable( Context context, LogicalTableWrapper lo public Map> getExportedColumns() { Map> map = new HashMap<>(); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + java.sql.Statement statement = null; + Connection connection = null; + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement statement = connectionHandler.getStatement(); - Connection connection = statement.getConnection(); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); String[] tables; @@ -276,11 +280,16 @@ public AbstractNode fetchMetadataTree() { TableFilter filter = TableFilter.forAdapter( adapterName ); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + java.sql.Statement stmt = null; + Connection conn = null; try { ConnectionHandler h = connectionFactory.getOrCreateConnectionHandler( xid ); - DatabaseMetaData m = h.getStatement().getConnection().getMetaData(); + stmt = h.getStatement(); + conn = stmt.getConnection(); + DatabaseMetaData m = conn.getMetaData(); String currentUser = m.getUserName(); @@ -304,11 +313,12 @@ public AbstractNode fetchMetadataTree() { } String fqName = "\"" + owner + "\".\"" + tableName + "\""; + ConnectionHandler finalH = h; List> preview = previewByTable.computeIfAbsent( owner + "." + tableName, k -> { try { - return fetchPreview( h.getStatement().getConnection(), fqName, 10 ); + return fetchPreview( finalH.getStatement().getConnection(), fqName, 10 ); } catch ( Exception e ) { log.warn( "Preview failed for {}", fqName, e ); return List.of(); @@ -363,9 +373,15 @@ public AbstractNode fetchMetadataTree() { } } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Error while fetching Oracle metadata", e ); + } finally { + try { + stmt.close(); + conn.close(); + } catch ( SQLException e ) { + throw new RuntimeException( e ); + } } - this.metadataRoot = root; return root; } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 04f95d2707..93885e646b 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -106,7 +106,6 @@ public AbstractNode fetchMetadataTree() { java.sql.Statement stmt = null; Connection conn = null; - try { ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); stmt = handler.getStatement(); @@ -211,7 +210,6 @@ public AbstractNode fetchMetadataTree() { throw new RuntimeException( e ); } } - log.error( "Neue Preview ist geladen als: " + previewByTable.toString() ); return root; } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 8ee372eef4..07880f21e4 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -49,6 +49,8 @@ import java.text.DateFormat; import java.sql.SQLException; import java.text.SimpleDateFormat; +import java.util.Collection; +import java.util.Comparator; import java.util.HashSet; import java.util.ArrayList; import java.util.Arrays; @@ -993,16 +995,15 @@ void metadataAck( final Context ctx ) { Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); - Statement stmt = null; + Statement stmt = transaction.createStatement(); try { - if ( payload.addedPaths != null ) { - DdlManager.getInstance().addSelectedMetadata( transaction, null, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); + if ( payload.addedPaths != null || payload.addedPaths.length > 0 ) { + DdlManager.getInstance().addSelectedMetadata( transaction, stmt, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); } - if ( payload.removedPaths != null ) { - stmt = transaction.createStatement(); - DdlManager.getInstance().dropSourceEntities( List.of( payload.removedPaths ), stmt, payload.uniqueName ); - stmt = null; + if ( payload.removedPaths != null || payload.removedPaths.length > 0 ) { + String[] filtered = filterPrefixes( payload.removedPaths ); + DdlManager.getInstance().dropSourceEntities( List.of( filtered ), stmt, payload.uniqueName ); } transaction.commit(); ctx.status( 200 ).result( "ACK processed" ); @@ -1013,6 +1014,7 @@ void metadataAck( final Context ctx ) { } finally { if ( stmt != null ) { stmt.close(); + transactionManager.removeTransaction( transaction.getXid() ); } } } @@ -1080,7 +1082,7 @@ void setMetaConfiguration( final Context ctx ) { } } catch ( Exception ex ) { - tx.rollback( "Changing adapter configuration was not successful !" ); + tx.rollback( "Changing adapter configuration was not successful !" + ex ); ctx.status( 500 ).json( Map.of( "message", ex.getMessage() ) ); } finally { if ( stmt != null ) { @@ -3291,6 +3293,24 @@ private static void zipDirectory( String basePath, File dir, ZipOutputStream zip } + private String[] filterPrefixes( String[] paths ) { + String[] sorted = Arrays.copyOf( paths, paths.length ); + Arrays.sort( sorted, Comparator.comparingInt( String::length ) ); + List keep = new ArrayList<>(); + outer: + for ( int i = 0; i < sorted.length; i++ ) { + String p = sorted[i]; + for ( int j = i + 1; j < sorted.length; j++ ) { + if ( sorted[j].startsWith( p + "." ) ) { + continue outer; + } + } + keep.add( p ); + } + return keep.toArray( new String[0] ); + } + + public void getAvailablePlugins( Context ctx ) { ctx.json( PolyPluginManager .getPLUGINS() From 02993032cad7af88eced29244108a7d86bac9cb0 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Sun, 29 Jun 2025 20:01:22 +0200 Subject: [PATCH 54/68] Bugfix: New metadata tree is now marked with previous marked and exisitng metadata. --- .../db/adapter/MetadataObserver/AbstractListener.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index f6b7c87291..f7a541e247 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -33,6 +33,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Set; @Slf4j public class AbstractListener

implements MetadataListener

{ @@ -81,9 +82,13 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { public void applyChange( String[] metadata ) { log.info( "Changes are going to be applied" ); + Set prevSelected = NodeUtil.collectSelecedAttributePaths( this.adapter.getRoot() ); + this.adapter.setRoot( this.currentNode ); - if ( metadata != null && metadata.length > 0 ) - this.adapter.markSelectedAttributes( Arrays.stream( metadata ).toList() ); + if ( metadata != null && metadata.length > 0 ) { + prevSelected.addAll( Arrays.asList( metadata ) ); + } + this.adapter.markSelectedAttributes( List.copyOf( prevSelected ) ); HashCache.getInstance().put( this.adapter.getUniqueName(), this.hash ); this.currentNode = null; From ea6f3e03088f15b2ca237a888d134970c18cc43b Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 30 Jun 2025 16:51:47 +0200 Subject: [PATCH 55/68] Add logs for every source to PublisherManager.java --- .../MetadataObserver/AbstractListener.java | 8 ++- .../MetadataObserver/ChangeLogEntry.java | 62 +++++++++++++++++++ .../MetadataObserver/PublisherManager.java | 36 ++++++++++- .../db/adapter/java/AdapterTemplate.java | 5 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 6 +- .../java/org/polypheny/db/webui/Crud.java | 5 +- 6 files changed, 116 insertions(+), 6 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index f7a541e247..d6085ea4d0 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -21,6 +21,8 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry.DiffMessageUtil; import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaAnnotator; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; @@ -30,6 +32,7 @@ import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.schemaDiscovery.NodeUtil; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -70,11 +73,14 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { ChangeStatus status = NodeUtil.evaluateStatus( result, adapter.getRoot() ); + ChangeLogEntry entry = new ChangeLogEntry( adapter.getUniqueName(), Instant.now().toString(), DiffMessageUtil.toMessages( result ), status ); + PublisherManager.getInstance().addChange( entry ); + AbstractNode annotatedCopy = MetaAnnotator.annotateTree( adapter.getRoot(), node, result ); String json = NodeSerializer.serializeNode( annotatedCopy ).toString(); log.info( "JSON: {}", json ); - PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResult( json, preview ), status ); + PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResult( json, preview, List.of( entry ) ), status ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java new file mode 100644 index 0000000000..4a27d2f829 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Value; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +@Value +@AllArgsConstructor +public class ChangeLogEntry { + + @JsonProperty + String adapterName; + @JsonProperty + String timestamp; + @JsonProperty + List messages; + @JsonProperty + ChangeStatus severity; + + public class DiffMessageUtil { + + private DiffMessageUtil() {} + + public static List toMessages(DiffResult diff) { + List msgs = new ArrayList<>(); + + diff.getAdded() + .forEach(p -> msgs.add("Added metadata " + p)); + + diff.getRemoved() + .forEach(p -> msgs.add("Removed metadata " + p)); + + diff.getChanged() + .forEach(p -> msgs.add("Changed metadata " + p)); + + return msgs; + } + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 952f2abb73..9ff09bc627 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -16,22 +16,33 @@ package org.polypheny.db.adapter.MetadataObserver; +import lombok.AllArgsConstructor; +import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; +import java.time.Instant; +import java.util.Deque; +import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; import java.util.concurrent.ConcurrentMap; @Slf4j public class PublisherManager { + private static final int MAX_ENTRIES_PER_ADAPTER = 100; + private final Map publishers = new ConcurrentHashMap<>(); private final Map changeCache = new ConcurrentHashMap<>(); - private final Map statusCache = new ConcurrentHashMap<>(); + private final Map statusCache = new ConcurrentHashMap<>(); + + private final ConcurrentHashMap> changeCatalog = new ConcurrentHashMap<>(); private static final PublisherManager INSTANCE = new PublisherManager(); @@ -82,7 +93,7 @@ public void onMetadataChange( String uniqueName, PreviewResult data, ChangeStatu public PreviewResult fetchChange( String uniqueName ) { - return changeCache.get( uniqueName ) ; + return changeCache.get( uniqueName ); } @@ -93,10 +104,31 @@ public void ack( String uniqueName, String[] metadata ) { statusCache.remove( uniqueName ); } + public enum ChangeStatus { CRITICAL, WARNING, OK } + + public void addChange( ChangeLogEntry entry ) { + changeCatalog.computeIfAbsent( entry.getAdapterName(), k -> new ConcurrentLinkedDeque<>() ).addFirst( entry ); + } + + + public List getHistory( String adapterName ) { + return changeCatalog.getOrDefault( adapterName, new ConcurrentLinkedDeque<>() ) + .stream() + .toList(); + } + + + private void prune( String adapterName ) { + Deque deque = changeCatalog.get( adapterName ); + while ( deque != null && deque.size() > MAX_ENTRIES_PER_ADAPTER ) { + deque.removeLast(); + } + } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index 8a74d6c8b4..d3e69592e6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -33,6 +33,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DeployMode.DeploySetting; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; import org.polypheny.db.adapter.MetadataObserver.MetadataHasher; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; @@ -140,7 +141,7 @@ public PreviewResult preview( Map settings, int limit ) { Object rows = mp.getPreview(); log.error( json ); // log.error( rows.toString() ); - return new PreviewResult( json, rows ); + return new PreviewResult( json, rows, null ); } throw new GenericRuntimeException( "The adapter does not implement MetadataProvider." ); } finally { @@ -157,6 +158,8 @@ public static class PreviewResult { String metadata; @JsonProperty Object preview; + @JsonProperty + List history; } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index d108ab5c01..5330859ce8 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -681,7 +681,11 @@ private void createRelationalSource( Transaction transaction, DataSource adap HashCache.getInstance().put( uniqueName, hash ); log.info( "Key used during deployment: {} ", uniqueName ); - pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + + if ( !( adapter.getAdapterName().equals( "Excel" ) || adapter.getAdapterName().equals( "CSV" ) ) ) { + pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + } + mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 07880f21e4..8154441426 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -91,6 +91,7 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; import org.polypheny.db.adapter.MetadataObserver.PublisherManager; import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; @@ -1027,7 +1028,9 @@ void getMetaConfiguration( final Context ctx ) { .orElseThrow( () -> new IllegalStateException( "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); - PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview() ); + List history = PublisherManager.getInstance().getHistory(uniqueName); + + PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview(), history ); ctx.json( data ); } From b9c0944e12382b34c49ebdbf03c84e013428a51f Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 30 Jun 2025 18:53:19 +0200 Subject: [PATCH 56/68] Bugfix for getExportedColumns: Double columns are filtered correctly now. --- .../polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 2d2b543f14..a188ae92d5 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -270,7 +270,7 @@ public Map> getExportedColumns() { primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); } } - try ( ResultSet row = dbmd.getColumns( schemaPattern, null, tableName, "%" ) ) { + try ( ResultSet row = dbmd.getColumns( schemaPattern, schemaPattern, tableName, "%" ) ) { List list = new ArrayList<>(); while ( row.next() ) { PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); From 212f6c9a8d3ea645b80d1b1a9453ce69978ab604 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 1 Jul 2025 19:57:58 +0200 Subject: [PATCH 57/68] Bugfix for fetching metadata and exported columns: connectionHandler is created and freed by every run. --- .../jdbc/sources/AbstractJdbcSource.java | 11 ++++++++- .../monetdb/sources/MonetdbSource.java | 21 ++++++++++++---- .../db/adapter/jdbc/MysqlSourcePlugin.java | 24 +++++++++++++------ .../source/OracleSource.java | 21 ++++++++++++---- .../postgres/source/PostgresqlSource.java | 12 ++++++---- 5 files changed, 67 insertions(+), 22 deletions(-) diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index a188ae92d5..ef9d2679dd 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -214,10 +214,11 @@ public Map> getExportedColumns() { java.sql.Statement statement = null; Connection connection = null; + ConnectionHandler connectionHandler = null; PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { - ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); statement = connectionHandler.getStatement(); connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); @@ -339,6 +340,14 @@ public Map> getExportedColumns() { } } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } } return map; diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 6dae66ba85..709736d1bd 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -169,10 +169,11 @@ public Map> getExportedColumns() { java.sql.Statement statement = null; Connection connection = null; + ConnectionHandler connectionHandler = null; PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { - ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); statement = connectionHandler.getStatement(); connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); @@ -284,6 +285,14 @@ public Map> getExportedColumns() { } } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } } return map; } @@ -298,11 +307,12 @@ public AbstractNode fetchMetadataTree() { java.sql.Statement stmt = null; Connection conn = null; + ConnectionHandler handler = null; PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { - ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); + handler = connectionFactory.getOrCreateConnectionHandler( xid ); stmt = handler.getStatement(); conn = stmt.getConnection(); DatabaseMetaData meta = conn.getMetaData(); @@ -399,9 +409,10 @@ public AbstractNode fetchMetadataTree() { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); } finally { try { - stmt.close(); - conn.close(); - } catch ( SQLException e ) { + // stmt.close(); + // conn.close(); + handler.commit(); + } catch ( ConnectionHandlerException e ) { throw new RuntimeException( e ); } } diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 8b4d3a9f64..d52528d84b 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -191,9 +191,10 @@ public AbstractNode fetchMetadataTree() { java.sql.Statement stmt = null; Connection conn = null; + ConnectionHandler handler = null; try { - ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); + handler = connectionFactory.getOrCreateConnectionHandler( xid ); stmt = handler.getStatement(); conn = stmt.getConnection(); DatabaseMetaData meta = conn.getMetaData(); @@ -291,9 +292,10 @@ public AbstractNode fetchMetadataTree() { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); } finally { try { - stmt.close(); - conn.close(); - } catch ( SQLException e ) { + //stmt.close(); + //conn.close(); + handler.commit(); + } catch ( ConnectionHandlerException e ) { throw new RuntimeException( e ); } } @@ -331,10 +333,10 @@ public Map> getExportedColumns() { Connection connection = null; PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); - + ConnectionHandler connectionHandler = null; try { - ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); statement = connectionHandler.getStatement(); connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); @@ -387,7 +389,7 @@ public Map> getExportedColumns() { primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); } } - try ( ResultSet row = dbmd.getColumns( schemaPattern, null, tableName, "%" ) ) { + try ( ResultSet row = dbmd.getColumns( schemaPattern, schemaPattern, tableName, "%" ) ) { List list = new ArrayList<>(); while ( row.next() ) { PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); @@ -457,6 +459,14 @@ public Map> getExportedColumns() { } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } } return map; diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 101d43b70e..eb8d01a52b 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -158,10 +158,11 @@ public Map> getExportedColumns() { java.sql.Statement statement = null; Connection connection = null; + ConnectionHandler connectionHandler = null; PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { - ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); statement = connectionHandler.getStatement(); connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); @@ -268,6 +269,14 @@ public Map> getExportedColumns() { } } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Exception while collecting Oracle schema info", e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } } return map; @@ -284,9 +293,10 @@ public AbstractNode fetchMetadataTree() { java.sql.Statement stmt = null; Connection conn = null; + ConnectionHandler h = null; try { - ConnectionHandler h = connectionFactory.getOrCreateConnectionHandler( xid ); + h = connectionFactory.getOrCreateConnectionHandler( xid ); stmt = h.getStatement(); conn = stmt.getConnection(); DatabaseMetaData m = conn.getMetaData(); @@ -375,9 +385,10 @@ public AbstractNode fetchMetadataTree() { throw new GenericRuntimeException( "Error while fetching Oracle metadata", e ); } finally { try { - stmt.close(); - conn.close(); - } catch ( SQLException e ) { + // stmt.close(); + // conn.close(); + h.commit(); + } catch ( ConnectionHandlerException e ) { throw new RuntimeException( e ); } } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 93885e646b..b095146e24 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -105,9 +105,10 @@ public AbstractNode fetchMetadataTree() { java.sql.Statement stmt = null; Connection conn = null; + ConnectionHandler handler = null; try { - ConnectionHandler handler = connectionFactory.getOrCreateConnectionHandler( xid ); + handler = connectionFactory.getOrCreateConnectionHandler( xid ); stmt = handler.getStatement(); conn = stmt.getConnection(); DatabaseMetaData meta = conn.getMetaData(); @@ -204,9 +205,12 @@ public AbstractNode fetchMetadataTree() { throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); } finally { try { - stmt.close(); - conn.close(); - } catch ( SQLException e ) { + // stmt.close(); + // conn.close(); + handler.commit(); + } /*catch ( SQLException e ) { + throw new RuntimeException( e ); + }*/ catch ( ConnectionHandlerException e ) { throw new RuntimeException( e ); } } From e050b06c4c9cbae787973263c506ee2c36aaf382 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 1 Jul 2025 19:58:31 +0200 Subject: [PATCH 58/68] Bugfix when adding metadata and deploy. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 28 ++++++++++++++++--- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 5330859ce8..17214d4147 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -296,9 +296,9 @@ record PathParts( String physicalNs, String physicalSchema, String table, String : exportedColumnList.get( 0 ).physicalSchemaName(); LogicalTable logical = findLogicalTableByPhysical( namespace, adapter.get(), physicalSchema, tableName ); - if ( logical == null ) { + /*if ( logical == null ) { logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElse( null ); - } + }*/ AllocationPlacement placement; AllocationEntity allocation; @@ -710,6 +710,24 @@ private void createRelationalSource( Transaction transaction, DataSource adap String baseName = entry.getKey(); String physicalTable = baseName; + + Map> filter = new HashMap<>(); + + if (attributes != null && !attributes.isBlank()) { + List paths = new Gson().fromJson( + attributes, new com.google.gson.reflect.TypeToken>() {}.getType()); + + for (String p : paths) { + String[] tok = p.split("\\."); + if (tok.length < 3) continue; + + String table = tok[tok.length - 2].toLowerCase(); + String column = tok[tok.length - 1].split(":")[0].toLowerCase(); + + filter.computeIfAbsent(table, k -> new HashSet<>()).add(column); + } + } + // Make sure the table name is unique String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); @@ -727,8 +745,11 @@ private void createRelationalSource( Transaction transaction, DataSource adap .map( attr -> attr.split( ":" )[0].toLowerCase() ) .collect( Collectors.toList() ); + String currentTable = physicalTable.toLowerCase(); + Set allowed = filter.getOrDefault(currentTable, Collections.emptySet()); + for ( ExportedColumn exportedColumn : entry.getValue() ) { - if ( adapter instanceof MetadataProvider mp && (attributes != null) && selectedAttributeNames.stream().noneMatch( name -> name.equalsIgnoreCase( exportedColumn.name() ) ) ) { + if ( adapter instanceof MetadataProvider mp && (attributes != null) && !allowed.isEmpty() && !allowed.contains(exportedColumn.name().toLowerCase())) { continue; } else { LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( @@ -840,7 +861,6 @@ public void dropAdapter( String name, Statement statement ) { PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } - @Override public void renameNamespace( String newName, String currentName ) { newName = newName.toLowerCase(); From 9297ec7fd0dcaea93ce9e95e80325bbb90aab511 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 16 Jul 2025 20:30:52 +0200 Subject: [PATCH 59/68] Minor improvements/delete unnecessary code. --- .../org/polypheny/db/schemaDiscovery/MetadataProvider.java | 2 ++ dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java | 4 ++-- webui/src/main/java/org/polypheny/db/webui/Crud.java | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index 3111dd670a..257ef58406 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -37,4 +37,6 @@ public interface MetadataProvider { AbstractNode getRoot(); + + } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 17214d4147..8f668e9335 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -296,9 +296,9 @@ record PathParts( String physicalNs, String physicalSchema, String table, String : exportedColumnList.get( 0 ).physicalSchemaName(); LogicalTable logical = findLogicalTableByPhysical( namespace, adapter.get(), physicalSchema, tableName ); - /*if ( logical == null ) { + if ( logical == null ) { logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElse( null ); - }*/ + } AllocationPlacement placement; AllocationEntity allocation; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 8154441426..dbed4fdf42 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -994,7 +994,7 @@ void metadataAck( final Context ctx ) { log.info( "Acknowledgement incoming: " + payload.toString() ); PublisherManager.getInstance().ack( payload.uniqueName, payload.addedPaths ); - Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); + // Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); Statement stmt = transaction.createStatement(); try { From 6f9f347a34d45c349dfbfe7af0b2cb8572f9d358 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 5 Aug 2025 16:29:45 +0200 Subject: [PATCH 60/68] Bufix: Delete Logs when removing Adapter. Minor code improvements. --- .../MetadataObserver/PublisherManager.java | 1 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 53 ++++++++++--------- .../postgres/source/PostgresqlSource.java | 2 - .../java/org/polypheny/db/webui/Crud.java | 43 ++++++++++++++- .../org/polypheny/db/webui/HttpServer.java | 4 +- 5 files changed, 74 insertions(+), 29 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 9ff09bc627..51834b2ff6 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -72,6 +72,7 @@ public void onAdapterUndeploy( String uniqueName ) { if ( publishers.containsKey( uniqueName ) ) { publishers.get( uniqueName ).stop(); publishers.remove( uniqueName ); + this.changeCatalog.remove( uniqueName ); log.error( "Adapter {} is going to be unregistered for metadata publish.", uniqueName ); } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8f668e9335..8d7303b253 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -682,11 +682,10 @@ private void createRelationalSource( Transaction transaction, DataSource adap HashCache.getInstance().put( uniqueName, hash ); log.info( "Key used during deployment: {} ", uniqueName ); - if ( !( adapter.getAdapterName().equals( "Excel" ) || adapter.getAdapterName().equals( "CSV" ) ) ) { - pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + if ( !(adapter.getAdapterName().equals( "Excel" ) || adapter.getAdapterName().equals( "CSV" )) ) { + // pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); } - mp.markSelectedAttributes( selectedAttributes ); log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); mp.printTree( null, 0 ); @@ -702,7 +701,6 @@ private void createRelationalSource( Transaction transaction, DataSource adap } // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { - // Make sure the table name is uniqueString tableName = entry.getKey(); String physicalSchema = entry.getValue().isEmpty() ? Catalog.DEFAULT_NAMESPACE_NAME : entry.getValue().get( 0 ).physicalSchemaName(); @@ -710,25 +708,36 @@ private void createRelationalSource( Transaction transaction, DataSource adap String baseName = entry.getKey(); String physicalTable = baseName; - Map> filter = new HashMap<>(); - if (attributes != null && !attributes.isBlank()) { - List paths = new Gson().fromJson( - attributes, new com.google.gson.reflect.TypeToken>() {}.getType()); + if ( attributes != null && !attributes.isBlank() ) { + List paths = new Gson().fromJson( attributes, new com.google.gson.reflect.TypeToken>() { + }.getType() ); - for (String p : paths) { - String[] tok = p.split("\\."); - if (tok.length < 3) continue; + for ( String p : paths ) { + String[] tok = p.split( "\\." ); + if ( tok.length < 3 ) { + continue; + } - String table = tok[tok.length - 2].toLowerCase(); - String column = tok[tok.length - 1].split(":")[0].toLowerCase(); + String table = tok[tok.length - 2].toLowerCase(); + String column = tok[tok.length - 1].split( ":" )[0].toLowerCase(); - filter.computeIfAbsent(table, k -> new HashSet<>()).add(column); + filter.computeIfAbsent( table, k -> new HashSet<>() ).add( column ); } } - // Make sure the table name is unique + selectedAttributeNames = selectedAttributeNames.stream() + .map( attr -> attr.split( ":" )[0].toLowerCase() ) + .collect( Collectors.toList() ); + + String currentTable = physicalTable.toLowerCase(); + Set allowed = filter.getOrDefault( currentTable, Collections.emptySet() ); + + if ( attributes != null && allowed.isEmpty() ) { + continue; + } + String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); @@ -741,15 +750,8 @@ private void createRelationalSource( Transaction transaction, DataSource adap List aColumns = new ArrayList<>(); int colPos = 1; - selectedAttributeNames = selectedAttributeNames.stream() - .map( attr -> attr.split( ":" )[0].toLowerCase() ) - .collect( Collectors.toList() ); - - String currentTable = physicalTable.toLowerCase(); - Set allowed = filter.getOrDefault(currentTable, Collections.emptySet()); - for ( ExportedColumn exportedColumn : entry.getValue() ) { - if ( adapter instanceof MetadataProvider mp && (attributes != null) && !allowed.isEmpty() && !allowed.contains(exportedColumn.name().toLowerCase())) { + if ( adapter instanceof MetadataProvider mp && attributes != null && (allowed.isEmpty() || !allowed.contains( exportedColumn.name().toLowerCase() )) ) { continue; } else { LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( @@ -781,10 +783,10 @@ private void createRelationalSource( Transaction transaction, DataSource adap buildRelationalNamespace( namespace, logical, adapter ); transaction.attachCommitAction( () -> - // we can execute with initial logical and allocation data as this is a source and this will not change adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of(), physicalSchema, physicalTable ), AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aColumns ) ) ); catalog.updateSnapshot(); } + } @@ -858,9 +860,10 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); - PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); + // PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } + @Override public void renameNamespace( String newName, String currentName ) { newName = newName.toLowerCase(); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index b095146e24..ba852579c1 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -79,8 +79,6 @@ description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") -@AdapterSettingString(name = "tables", defaultValue = "foo,bar", - description = "List of tables which should be imported. The names must be separated by a comma.") public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { public AbstractNode metadataRoot; diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index dbed4fdf42..6e9cd06d64 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -1028,7 +1028,7 @@ void getMetaConfiguration( final Context ctx ) { .orElseThrow( () -> new IllegalStateException( "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); - List history = PublisherManager.getInstance().getHistory(uniqueName); + List history = PublisherManager.getInstance().getHistory( uniqueName ); PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview(), history ); ctx.json( data ); @@ -2217,6 +2217,7 @@ void getAvailableStoresForIndexes( final Context ctx ) { */ void updateAdapterSettings( final Context ctx ) { //see https://stackoverflow.com/questions/16872492/gson-and-abstract-superclasses-deserialization-issue + log.error( "β–Ά Payload:\n" + ctx.body() ); JsonDeserializer> storeDeserializer = ( json, typeOfT, context ) -> { JsonObject jsonObject = json.getAsJsonObject(); String type = jsonObject.get( "type" ).getAsString(); @@ -2261,6 +2262,40 @@ void updateAdapterSettings( final Context ctx ) { } + void updateSettings( final Context ctx ) { + log.error( ctx.body() ); + AdapterModel upd = ctx.bodyAsClass( AdapterModel.class ); + + if ( upd.getName() == null || upd.getSettings() == null ) { + ctx.status( HttpCode.BAD_REQUEST ).result( "uniqueName und settings required!" ); + return; + } + + Optional> store = AdapterManager.getInstance().getStore( upd.getName() ); + Optional> opt = store.isPresent() ? store : AdapterManager.getInstance().getSource( upd.getName() ); + + if ( opt.isEmpty() ) { + ctx.json( RelationalResult.builder().error( "Adapter not found!" ).build() ); + return; + } + + Adapter adapter = opt.get(); + + try { + adapter.updateSettings( upd.getSettings() ); + Catalog.getInstance().commit(); + + Transaction tx = getTransaction(); + tx.createStatement().getQueryProcessor().resetCaches(); + tx.commit(); + + ctx.json( RelationalResult.builder().affectedTuples( 1 ).build() ); + } catch ( Throwable t ) { + ctx.json( RelationalResult.builder().error( "Update canceled: " + t.getMessage() ).build() ); + } + } + + /** * Get available adapters */ @@ -3339,4 +3374,10 @@ public record ConfigPayload( @JsonProperty String uniqueName, @JsonProperty Stri } + + public record AdapterSettingsUpdate( String uniqueName, Map settings ) { + + } + + } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index 64eda214d4..d541120b7d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -325,7 +325,9 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/removeAdapter", crud::removeAdapter ); - webuiServer.post( "/updateAdapterSettings", crud::updateAdapterSettings ); + webuiServer.post( "/updateAdapterSettings", crud::updateSettings ); + + webuiServer.post( "/updateAdapterSettingsForm", crud::updateSettings ); webuiServer.get( "/getAvailableStores", crud::getAvailableStores ); From d721b75609e3c597f84a0546b2d9b97d34f5369e Mon Sep 17 00:00:00 2001 From: romanost03 Date: Wed, 6 Aug 2025 17:28:51 +0200 Subject: [PATCH 61/68] Create ephemeral adapter for comparison of new and old metadata. --- .../java/org/polypheny/db/webui/Crud.java | 82 ++++++++++++++++++- .../org/polypheny/db/webui/HttpServer.java | 2 +- 2 files changed, 82 insertions(+), 2 deletions(-) diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 6e9cd06d64..4332d3d270 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -2296,6 +2296,86 @@ void updateSettings( final Context ctx ) { } + void updateSettingsForm( final Context ctx ) throws IOException, ServletException { + + initMultipart( ctx ); + if ( !ctx.isMultipartFormData() ) { + ctx.status( HttpCode.BAD_REQUEST ) + .result( "Multipart-FormData required" ); + return; + } + + String bodyJson = IOUtils.toString( + ctx.req.getPart( "body" ).getInputStream(), + StandardCharsets.UTF_8 ); + PreviewRequest am = HttpServer.mapper.readValue( bodyJson, PreviewRequest.class ); + + // … PreviewRequest am = … + + List fileNames; + String rawDir = am.getSettings().get("directory"); + + try { + /* Fall 1: richtiges JSON-Array */ + fileNames = HttpServer.mapper.readValue( + rawDir, + new com.fasterxml.jackson.core.type.TypeReference>() {}); + } catch (com.fasterxml.jackson.core.JsonProcessingException ex) { + /* Fall 2: einzelner String oder kommaΒ­getrennte Liste */ + String cleaned = rawDir + .replaceAll("[\\[\\]\"]", "") // eckige/AnfΓΌhrungszeichen weg + .trim(); + fileNames = Arrays.stream(cleaned.split(",")) + .map(String::trim) + .filter(s -> !s.isEmpty()) + .toList(); + } + + + Map fileBytes = new HashMap<>(); + for ( Part p : ctx.req.getParts() ) { + if ( !"body".equals( p.getName() ) ) { + try ( InputStream in = p.getInputStream() ) { + fileBytes.put( p.getName(), IOUtils.toByteArray( in ) ); + } + } + } + + /* AbstractAdapterSettingDirectory dirSetting = + (AbstractAdapterSettingDirectory) AdapterManager + .getAdapterTemplate( am.getAdapterName(), am.getAdapterType() ) + .settings.stream() + .filter( s -> s instanceof AbstractAdapterSettingDirectory ) + .findFirst().orElseThrow();*/ + + String fullPath = handleUploadFiles( fileBytes, fileNames, null, am ); + createFormDiffs( am, fullPath ); + log.error( fullPath ); + ctx.result( "File(s) stored at: " + fullPath ); + } + + + private void createFormDiffs( PreviewRequest previewRequest, String path ) { + DataSource currentSource = AdapterManager.getInstance().getSource( previewRequest.uniqueName ).orElseThrow( ); + + MetadataProvider currentProvider = (MetadataProvider) currentSource; + AbstractNode currentNode = currentProvider.getRoot(); + + previewRequest.settings.put( "directory", path ); + + DataSource tempSource = AdapterManager.getAdapterTemplate( previewRequest.adapterName, AdapterType.SOURCE ).createEphemeral( previewRequest.settings ); + + MetadataProvider tempProvider = (MetadataProvider) currentSource; + AbstractNode tempNode = currentProvider.getRoot(); + + currentProvider.printTree( currentNode, 0 ); + tempProvider.printTree( tempNode, 0 ); + + try { tempSource.shutdown(); } catch (Exception ignore) {} + + } + + /** * Get available adapters */ @@ -2518,7 +2598,7 @@ private static String handleUploadFiles( Map inputStreams, // Map statt Map private static String handleUploadFiles( Map files, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest previewRequest ) { File path = PolyphenyHomeDirManager.getInstance() - .registerNewFolder( "data/csv/" + previewRequest.adapterName ); + .registerNewFolder( "data/csv/" + previewRequest.uniqueName ); for ( String name : fileNames ) { byte[] data = files.get( name ); if ( data == null ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index d541120b7d..d3403a383f 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -327,7 +327,7 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/updateAdapterSettings", crud::updateSettings ); - webuiServer.post( "/updateAdapterSettingsForm", crud::updateSettings ); + webuiServer.post( "/updateAdapterSettingsForm", crud::updateSettingsForm ); webuiServer.get( "/getAvailableStores", crud::getAvailableStores ); From 8721918da00540ad5ea4037b327d8d0bafc1da7a Mon Sep 17 00:00:00 2001 From: romanost03 Date: Thu, 7 Aug 2025 16:57:03 +0200 Subject: [PATCH 62/68] Bugfix: Multiple InputStreams for Excel are ignored. --- .../MetadataObserver/AbstractListener.java | 40 +++++++++++++++++++ .../db/adapter/excel/ExcelSource.java | 15 ++++--- .../java/org/polypheny/db/webui/Crud.java | 15 ++++++- 3 files changed, 62 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index d6085ea4d0..301fa66023 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -21,6 +21,8 @@ import lombok.Getter; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry.DiffMessageUtil; import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; @@ -84,6 +86,44 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { } + public static PreviewResult buildFormChange( String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview ) { + DiffResult diff = MetaDiffUtil.diff( oldRoot, newRoot ); + ChangeStatus status = NodeUtil.evaluateStatus( diff, oldRoot ); + + ChangeLogEntry entry = new ChangeLogEntry( uniqueName, Instant.now().toString(), DiffMessageUtil.toMessages( diff ), status ); + + AbstractNode annotated = MetaAnnotator.annotateTree( oldRoot, newRoot, diff ); + String json = NodeSerializer.serializeNode( annotated ).toString(); + + PublisherManager pm = PublisherManager.getInstance(); + pm.addChange( entry ); + PreviewResult result = new PreviewResult( json, preview, List.of( entry ) ); + pm.onMetadataChange( uniqueName, result, status ); + + return result; + + } + + + public static void applyAnnotatedTree( Adapter adapter, AbstractNode newRoot, String newHash, String[] additionallySelectedMetadata ) { + + if ( !( adapter instanceof DataSource ) ) { + throw new IllegalArgumentException( "Adapter must be of type DataSource" ); + } + + MetadataProvider metadataProvider = ( MetadataProvider ) adapter; + + Set selected = NodeUtil.collectSelecedAttributePaths( metadataProvider.getRoot() ); + if ( additionallySelectedMetadata != null ) { + selected.addAll( Arrays.asList( additionallySelectedMetadata ) ); + } + + metadataProvider.setRoot( newRoot ); + metadataProvider.markSelectedAttributes( List.copyOf( selected ) ); + HashCache.getInstance().put( adapter.getUniqueName(), newHash ); + } + + @Override public void applyChange( String[] metadata ) { log.info( "Changes are going to be applied" ); diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 0b25c0ee01..fba4eed7be 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -145,7 +145,6 @@ private void setExcelDir( Map settings ) { if ( connectionMethod == ConnectionMethod.LINK ) { dir = settings.get( "directoryName" ); - log.error( "DirectoryName kommt an als: " + settings.get( "directoryName" ) ); } if ( dir == null ) { @@ -395,9 +394,7 @@ private void addInformationExportedColumns() { public AbstractNode fetchMetadataTree() { Source filePath; - // String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; - String firstFile = resolveFileNames().stream().findFirst() - .orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); + String firstFile = resolveFileNames().stream().findFirst().orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); try { filePath = Sources.of( new URL( excelDir, firstFile ) ); } catch ( MalformedURLException e ) { @@ -496,11 +493,17 @@ public List> fetchPreview( Connection conn, String fqName, i String[] parts = fqName.split( "\\.", 2 ); String sheetName = parts.length == 2 ? parts[1] : parts[0]; - String filePath = "C:/Users/roman/Desktop/Mappe1.xlsx"; + Source filePath; + String firstFile = resolveFileNames().stream().findFirst().orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); + try { + filePath = Sources.of( new URL( excelDir, firstFile ) ); + } catch ( MalformedURLException e ) { + throw new RuntimeException( e ); + } List> rows = new ArrayList<>(); - try ( FileInputStream fis = new FileInputStream( filePath ); Workbook wb = WorkbookFactory.create( fis ) ) { + try ( FileInputStream fis = new FileInputStream( filePath.path() ); Workbook wb = WorkbookFactory.create( fis ) ) { Sheet sheet = wb.getSheet( sheetName ); if ( sheet == null ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 4332d3d270..01b87c37e5 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -91,6 +91,7 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.MetadataObserver.AbstractListener; import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; import org.polypheny.db.adapter.MetadataObserver.PublisherManager; import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; @@ -2367,6 +2368,9 @@ private void createFormDiffs( PreviewRequest previewRequest, String path ) { MetadataProvider tempProvider = (MetadataProvider) currentSource; AbstractNode tempNode = currentProvider.getRoot(); + Object newPreview = tempProvider.getPreview(); + + PreviewResult result = AbstractListener.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview ); currentProvider.printTree( currentNode, 0 ); tempProvider.printTree( tempNode, 0 ); @@ -2561,11 +2565,17 @@ private static String handleUploadFiles( Map inputStreams, if ( fileNames.isEmpty() ) { throw new GenericRuntimeException( "No file or directory specified for upload!" ); } + + setting.inputStreams.clear(); for ( String fileName : fileNames ) { - setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); + InputStream in = inputStreams.get( fileName ); + if ( in != null ) { + setting.inputStreams.put( fileName, in ); + } } + File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + a.name ); - for ( Entry is : setting.inputStreams.entrySet() ) { + for ( Map.Entry is : setting.inputStreams.entrySet() ) { try { File file = new File( path, is.getKey() ); FileUtils.copyInputStreamToFile( is.getValue(), file ); @@ -2577,6 +2587,7 @@ private static String handleUploadFiles( Map inputStreams, } + /* private static String handleUploadFiles( Map inputStreams, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest a ) { for ( String fileName : fileNames ) { setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); From b9fbe01e305a26313f1db2b8c4b465a2cea59093 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Fri, 8 Aug 2025 17:39:28 +0200 Subject: [PATCH 63/68] Functionality for reuploading Excel- or CSV files (early stage). --- .../MetadataObserver/AbstractListener.java | 81 +++++++++- .../MetadataObserver/PublisherManager.java | 36 ++++- .../org/polypheny/db/ddl/DdlManagerImpl.java | 6 +- .../polypheny/db/adapter/csv/CsvSource.java | 148 +++++++++++------- .../db/adapter/excel/ExcelSource.java | 2 + .../java/org/polypheny/db/webui/Crud.java | 28 ++-- 6 files changed, 217 insertions(+), 84 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 301fa66023..07a151fe8b 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -34,6 +34,10 @@ import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.schemaDiscovery.NodeUtil; +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; @@ -48,6 +52,8 @@ public class AbstractListener

implements M private P adapter; private String hash; + private static AbstractNode formRootNode = null; + private static final Gson GSON = new Gson(); @@ -86,7 +92,7 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { } - public static PreviewResult buildFormChange( String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview ) { + public static PreviewResult buildFormChange( String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview, String path ) { DiffResult diff = MetaDiffUtil.diff( oldRoot, newRoot ); ChangeStatus status = NodeUtil.evaluateStatus( diff, oldRoot ); @@ -99,19 +105,21 @@ public static PreviewResult buildFormChange( String uniqueName, AbstractNode old pm.addChange( entry ); PreviewResult result = new PreviewResult( json, preview, List.of( entry ) ); pm.onMetadataChange( uniqueName, result, status ); + pm.saveTempPath( uniqueName, path ); - return result; + formRootNode = newRoot; + return result; } public static void applyAnnotatedTree( Adapter adapter, AbstractNode newRoot, String newHash, String[] additionallySelectedMetadata ) { - if ( !( adapter instanceof DataSource ) ) { + if ( !(adapter instanceof DataSource) ) { throw new IllegalArgumentException( "Adapter must be of type DataSource" ); } - MetadataProvider metadataProvider = ( MetadataProvider ) adapter; + MetadataProvider metadataProvider = (MetadataProvider) adapter; Set selected = NodeUtil.collectSelecedAttributePaths( metadataProvider.getRoot() ); if ( additionallySelectedMetadata != null ) { @@ -146,10 +154,73 @@ public void applyChange( String[] metadata ) { } + public static void applyFormChange( String[] metadata, String uniqueName, String newPath ) { + log.info( "Form changes are going to be applied." ); + AbstractNode newRoot = formRootNode; + + DataSource adapter = AdapterManager.getInstance().getSource( uniqueName ).orElseThrow(); + MetadataProvider metadataprovider = (MetadataProvider) adapter; + + deleteTempPath( newPath, adapter.getSettings().get( "directory" ) ); + + newRoot = metadataprovider.fetchMetadataTree(); + + AbstractNode oldRoot = metadataprovider.getRoot(); + metadataprovider.setRoot( newRoot ); + + Set prevSelected = NodeUtil.collectSelecedAttributePaths( oldRoot ); + // metadataprovider.setRoot( newRoot ); + if ( metadata != null && metadata.length > 0 ) { + prevSelected.addAll( Arrays.asList( metadata ) ); + } + + metadataprovider.markSelectedAttributes( List.copyOf( prevSelected ) ); + + formRootNode = null; + PublisherManager.getInstance().deleteTempPath( uniqueName ); + + } + + + private static void deleteTempPath(String tmpPath, String directory) { + File tmpDir = new File(tmpPath); + File targetDir = new File(directory); + + if (!tmpDir.exists() || !tmpDir.isDirectory()) { + throw new IllegalArgumentException("tmpPath is not a valid directory: " + tmpPath); + } + if (!targetDir.exists() || !targetDir.isDirectory()) { + throw new IllegalArgumentException("directory is not a valid directory: " + directory); + } + + for (File file : targetDir.listFiles()) { + if (!file.delete()) { + throw new RuntimeException("Failed to delete file: " + file.getAbsolutePath()); + } + } + + for (File file : tmpDir.listFiles()) { + try { + Files.copy(file.toPath(), new File(targetDir, file.getName()).toPath(), + StandardCopyOption.REPLACE_EXISTING); + } catch ( IOException e) { + throw new RuntimeException("Failed to copy file: " + file.getAbsolutePath(), e); + } + } + + for ( File file : tmpDir.listFiles()) { + file.delete(); + } + if (!tmpDir.delete()) { + throw new RuntimeException("Failed to delete tmpPath directory: " + tmpDir.getAbsolutePath()); + } + } + + + @Override public boolean isAvailable() { return this.available; } } - diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 51834b2ff6..ba7b7db05e 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -20,6 +20,8 @@ import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; import org.polypheny.db.schemaDiscovery.AbstractNode; @@ -42,6 +44,10 @@ public class PublisherManager { private final Map changeCache = new ConcurrentHashMap<>(); private final Map statusCache = new ConcurrentHashMap<>(); + + // Cache for file metadata changes. Reuploaded Excel- or CSV file paths are temporarily saved. + private final Map tempFileCache = new ConcurrentHashMap<>(); + private final ConcurrentHashMap> changeCatalog = new ConcurrentHashMap<>(); private static final PublisherManager INSTANCE = new PublisherManager(); @@ -72,9 +78,13 @@ public void onAdapterUndeploy( String uniqueName ) { if ( publishers.containsKey( uniqueName ) ) { publishers.get( uniqueName ).stop(); publishers.remove( uniqueName ); - this.changeCatalog.remove( uniqueName ); - log.error( "Adapter {} is going to be unregistered for metadata publish.", uniqueName ); } + this.changeCatalog.remove( uniqueName ); + this.tempFileCache.remove( uniqueName ); + this.changeCache.remove( uniqueName ); + this.statusCache.remove( uniqueName ); + + log.error( "Adapter {} is going to be unregistered for metadata publish.", uniqueName ); } @@ -100,7 +110,13 @@ public PreviewResult fetchChange( String uniqueName ) { public void ack( String uniqueName, String[] metadata ) { MetadataPublisher publisher = publishers.get( uniqueName ); - publisher.getListener().applyChange( metadata ); + + if ( publishers.isEmpty() ) { + AbstractListener.applyFormChange( metadata, uniqueName, tempFileCache.get( uniqueName ) ); + } else { + publisher.getListener().applyChange( metadata ); + } + changeCache.remove( uniqueName ); statusCache.remove( uniqueName ); } @@ -132,4 +148,18 @@ private void prune( String adapterName ) { } } + + public void saveTempPath( String uniqueName, String path ) { + tempFileCache.put( uniqueName, path ); + } + + + public String getTempPath( String uniqueName ) { + return tempFileCache.get( uniqueName ); + } + + + public void deleteTempPath( String uniqueName ) { + tempFileCache.remove( uniqueName ); + } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 8d7303b253..e151f1580d 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -683,7 +683,7 @@ private void createRelationalSource( Transaction transaction, DataSource adap log.info( "Key used during deployment: {} ", uniqueName ); if ( !(adapter.getAdapterName().equals( "Excel" ) || adapter.getAdapterName().equals( "CSV" )) ) { - // pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); } mp.markSelectedAttributes( selectedAttributes ); @@ -716,7 +716,7 @@ private void createRelationalSource( Transaction transaction, DataSource adap for ( String p : paths ) { String[] tok = p.split( "\\." ); - if ( tok.length < 3 ) { + if ( tok.length < 2 ) { continue; } @@ -860,7 +860,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); - // PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); + PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index f601c722b7..511e23dfd0 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -70,6 +70,7 @@ import org.polypheny.db.util.Sources; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Nullable; @Extension @AdapterProperties( @@ -387,39 +388,45 @@ public void renameLogicalColumn( long id, String newColumnName ) { @Override public AbstractNode fetchMetadataTree() { - File csvFile = new File( "C:/Users/roman/Desktop/Dateieins.csv" ); - String tableName = csvFile.getName(); - AbstractNode rootNode = new Node( "csv", tableName.split( "\\." )[0] ); - - try ( BufferedReader reader = new BufferedReader( new FileReader( csvFile ) ) ) { - String headerLine = reader.readLine(); - if ( headerLine == null ) { - throw new RuntimeException( "No header line found" ); + this.previewByTable = new HashMap<>(); + + try { + Source src = openCsvSource(null); + String fileName = src.file().getName(); + String baseName = fileName.replaceFirst("\\.csv(\\.gz)?$", ""); + AbstractNode rootNode = new Node("csv", baseName); + + try (BufferedReader reader = new BufferedReader(src.reader())) { + String headerLine = reader.readLine(); + if (headerLine == null) { + throw new RuntimeException("No header line found in " + fileName); + } + + String[] rawColumns = headerLine.split(","); + for (String colRaw : rawColumns) { + String[] split = colRaw.split(":"); + String name = split[0].trim().replaceAll("[^a-zA-Z0-9_]", ""); + String type = split.length > 1 ? split[1].trim() : "string"; + + AbstractNode columnNode = new AttributeNode("column", name); + columnNode.addProperty("type", mapCsvType(type)); + columnNode.addProperty("nullable", true); + rootNode.addChild(columnNode); + } } - String[] rawColumns = headerLine.split( "," ); - for ( String colRaw : rawColumns ) { - String[] split = colRaw.split( ":" ); - String name = split[0].trim().replaceAll( "[^a-zA-Z0-9_]", "" ); - String type = split.length > 1 ? split[1].trim() : "string"; + List> preview = fetchPreview(null, fileName, 10); + this.previewByTable.put(fileName, preview); - AbstractNode columnNode = new AttributeNode( "column", name ); - columnNode.addProperty( "type", mapCsvType( type ) ); - columnNode.addProperty( "nullable", true ); + return rootNode; - rootNode.addChild( columnNode ); - } - String fqName = csvFile.getName(); - List> preview = fetchPreview( null, fqName, 10 ); - this.previewByTable.put( fqName, preview ); - } catch ( IOException e ) { - throw new RuntimeException( "Failed to parse metadata of CSV source: " + e ); + } catch (IOException e) { + throw new RuntimeException("Failed to parse CSV metadata", e); } - return rootNode; - } + private String mapCsvType( String rawType ) { switch ( rawType ) { case "int": @@ -448,48 +455,81 @@ private String mapCsvType( String rawType ) { @Override - public List> fetchPreview( Connection conn, String fqName, int limit ) { - File csvFile = new File( "C:/Users/roman/Desktop/Dateieins.csv" ); - List> rows = new ArrayList<>(); - - try ( BufferedReader reader = new BufferedReader( new FileReader( csvFile ) ) ) { - String headerLine = reader.readLine(); - if ( headerLine == null ) { - return List.of(); - } + public List> fetchPreview(Connection conn, String fqName, int limit) { + try { + Source src = openCsvSource(fqName); + List> rows = new ArrayList<>(); + + try (BufferedReader reader = new BufferedReader(src.reader())) { + String headerLine = reader.readLine(); + if (headerLine == null) { + return List.of(); + } - String[] headerParts = headerLine.split( "," ); - List colNames = new ArrayList<>(); + String[] headerParts = headerLine.split(","); + List colNames = new ArrayList<>(headerParts.length); + for (String raw : headerParts) { + String[] split = raw.split(":"); + colNames.add(split[0].trim()); + } - for ( String raw : headerParts ) { - String[] split = raw.split( ":" ); - String colName = split[0].trim(); - colNames.add( colName ); + String line; + int count = 0; + while ((line = reader.readLine()) != null && count < limit) { + String[] values = line.split(",", -1); + Map row = new LinkedHashMap<>(); + for (int i = 0; i < colNames.size(); i++) { + String value = i < values.length ? values[i].trim() : null; + row.put(colNames.get(i), value); + } + rows.add(row); + count++; + } } - String line; - int count = 0; - while ( (line = reader.readLine()) != null && count < limit ) { - String[] values = line.split( ",", -1 ); - Map row = new LinkedHashMap<>(); + return rows; - for ( int i = 0; i < colNames.size(); i++ ) { - String value = i < values.length ? values[i].trim() : null; - row.put( colNames.get( i ), value ); - } + } catch (IOException e) { + throw new RuntimeException("Failed to read CSV preview: " + fqName, e); + } + } - rows.add( row ); - count++; + + private Source openCsvSource(@Nullable String fqName) throws IOException { + if (csvDir.getProtocol().equals("jar")) { + if (fqName == null || fqName.isBlank()) { + throw new GenericRuntimeException("fqName required when using jar protocol for CSV."); } + return Sources.of(new URL(csvDir, fqName)); + } + + if (Sources.of(csvDir).file().isFile()) { + return Sources.of(csvDir); + } + + File[] files = Sources.of(csvDir) + .file() + .listFiles((d, name) -> name.endsWith(".csv") || name.endsWith(".csv.gz")); + if (files == null || files.length == 0) { + throw new GenericRuntimeException("No .csv files were found in: " + Sources.of(csvDir).file()); + } - } catch ( IOException e ) { - throw new RuntimeException( "Failed to read CSV preview: " + fqName, e ); + File chosen; + if (fqName != null && !fqName.isBlank()) { + chosen = Arrays.stream(files) + .filter(f -> f.getName().equals(fqName)) + .findFirst() + .orElseThrow(() -> new GenericRuntimeException("Requested CSV not found: " + fqName)); + } else { + chosen = files[0]; } - return rows; + return Sources.of(new URL(csvDir, chosen.getName())); } + + @Override public void markSelectedAttributes(List selectedPaths) { if (this.metadataRoot == null) { diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index fba4eed7be..12fad7193a 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -393,6 +393,8 @@ private void addInformationExportedColumns() { @Override public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); + Source filePath; String firstFile = resolveFileNames().stream().findFirst().orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); try { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 01b87c37e5..90a6169c5d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -2306,25 +2306,19 @@ void updateSettingsForm( final Context ctx ) throws IOException, ServletExceptio return; } - String bodyJson = IOUtils.toString( - ctx.req.getPart( "body" ).getInputStream(), - StandardCharsets.UTF_8 ); + String bodyJson = IOUtils.toString( ctx.req.getPart( "body" ).getInputStream(), StandardCharsets.UTF_8 ); PreviewRequest am = HttpServer.mapper.readValue( bodyJson, PreviewRequest.class ); - // … PreviewRequest am = … - List fileNames; String rawDir = am.getSettings().get("directory"); try { - /* Fall 1: richtiges JSON-Array */ fileNames = HttpServer.mapper.readValue( rawDir, new com.fasterxml.jackson.core.type.TypeReference>() {}); } catch (com.fasterxml.jackson.core.JsonProcessingException ex) { - /* Fall 2: einzelner String oder kommaΒ­getrennte Liste */ String cleaned = rawDir - .replaceAll("[\\[\\]\"]", "") // eckige/AnfΓΌhrungszeichen weg + .replaceAll("[\\[\\]\"]", "") .trim(); fileNames = Arrays.stream(cleaned.split(",")) .map(String::trim) @@ -2342,14 +2336,11 @@ void updateSettingsForm( final Context ctx ) throws IOException, ServletExceptio } } - /* AbstractAdapterSettingDirectory dirSetting = - (AbstractAdapterSettingDirectory) AdapterManager - .getAdapterTemplate( am.getAdapterName(), am.getAdapterType() ) - .settings.stream() - .filter( s -> s instanceof AbstractAdapterSettingDirectory ) - .findFirst().orElseThrow();*/ - + String uniqueName = am.getUniqueName(); + String tmpName = "tmp_" + System.nanoTime(); + am.uniqueName = tmpName; String fullPath = handleUploadFiles( fileBytes, fileNames, null, am ); + am.uniqueName = uniqueName; createFormDiffs( am, fullPath ); log.error( fullPath ); ctx.result( "File(s) stored at: " + fullPath ); @@ -2366,17 +2357,16 @@ private void createFormDiffs( PreviewRequest previewRequest, String path ) { DataSource tempSource = AdapterManager.getAdapterTemplate( previewRequest.adapterName, AdapterType.SOURCE ).createEphemeral( previewRequest.settings ); - MetadataProvider tempProvider = (MetadataProvider) currentSource; - AbstractNode tempNode = currentProvider.getRoot(); + MetadataProvider tempProvider = (MetadataProvider) tempSource; + AbstractNode tempNode = tempProvider.fetchMetadataTree(); Object newPreview = tempProvider.getPreview(); - PreviewResult result = AbstractListener.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview ); + PreviewResult result = AbstractListener.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview, path ); currentProvider.printTree( currentNode, 0 ); tempProvider.printTree( tempNode, 0 ); try { tempSource.shutdown(); } catch (Exception ignore) {} - } From 3559cd6bc3f6ff542819a5b460aa2d47c2504721 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 11 Aug 2025 15:20:38 +0200 Subject: [PATCH 64/68] Bugfix during config deletion. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 122 +++++++++--------- .../monetdb/sources/MonetdbSource.java | 1 + .../db/adapter/jdbc/MysqlSourcePlugin.java | 1 + .../source/OracleSource.java | 1 + .../postgres/source/PostgresqlSource.java | 2 + 5 files changed, 63 insertions(+), 64 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index e151f1580d..a639c3a965 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -30,6 +30,7 @@ import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.BiFunction; @@ -251,13 +252,13 @@ record PathParts( String physicalNs, String physicalSchema, String table, String } else if ( parts.length == 3 ) { physNs = parts[0]; - physSchema = "Mappe1.xlsx"; + physSchema = parts[0] + ".xlsx"; table = parts[1]; column = parts[2].split( "\\:" )[0].toLowerCase(); } else if ( parts.length == 2 ) { physNs = parts[0]; - physSchema = "Dateieins.csv"; + physSchema = parts[0] + ".csv"; table = parts[0].toLowerCase(); column = parts[1].toLowerCase(); @@ -449,99 +450,55 @@ public void dropSourceEntities( List paths, Statement statement, String @Override public void dropSourceEntities( List paths, Statement stmt, String uniqueName ) { - DataSource adapter = AdapterManager.getInstance() .getSource( uniqueName ) .orElseThrow(); - Map settings = adapter.getSettings(); - String selectedAttributes = settings.getOrDefault( "selectedAttributes", "" ) - .replace( "[", "" ) - .replace( "]", "" ); - List currentPaths = new ArrayList<>( List.of( selectedAttributes.split( "," ) ) ); - currentPaths.removeIf( p -> Arrays.asList( paths ).contains( p.trim() ) ); - settings.put( "selectedAttributes", String.join( ",", currentPaths ) ); + List current = new ArrayList<>( List.of( adapter.getSettings().getOrDefault( "selectedAttributes", "" ) + .replace( "[", "" ).replace( "]", "" ).split( "," ) ) ); + current.removeIf( s -> s == null || s.isBlank() || paths.contains( s.trim() ) ); + Map settings = new HashMap<>( adapter.getSettings() ); + settings.put( "selectedAttributes", String.join( ",", current ) ); adapter.updateSettings( settings ); - record PathParts( String physNs, String physSchema, String table, String column, String original ) { - - } - - Function parse = ( raw ) -> { - String p = raw.replace( "'", "" ).trim(); - String[] parts = p.split( "\\." ); - - String physNs, physSchema, table, column; - - if ( parts.length == 4 ) { - physNs = parts[0]; - physSchema = parts[1]; - table = parts[2]; - column = parts[3]; - - } else if ( parts.length == 3 ) { - physNs = parts[0]; - physSchema = "Mappe1.xslx"; - table = parts[1]; - column = parts[2].split( "\\:" )[0].toLowerCase(); - - } else if ( parts.length == 2 ) { - physNs = parts[0]; - physSchema = "Dateieins.csv"; - table = parts[0].toLowerCase(); - column = parts[1].toLowerCase(); - - } else { - throw new GenericRuntimeException( "UngΓΌltiger Pfad '" + p + "' fΓΌr Adapter " + adapter.getAdapterName() ); - } - return new PathParts( physNs, physSchema, table, column, p ); - }; - - Map> work = new HashMap<>(); - + Map> work = new HashMap<>(); for ( String raw : paths ) { if ( raw == null || raw.isBlank() ) { continue; } + PathParts pp = parsePathUniversal( raw ); + work.computeIfAbsent( new TableKey( pp.physSchema(), pp.table() ), k -> new HashSet<>() ).add( pp.column() ); + } - PathParts pp = parse.apply( raw ); - String physSchema = pp.physSchema(); - String tableName = pp.table(); - String columnName = pp.column(); + for ( Map.Entry> e : work.entrySet() ) { + TableKey k = e.getKey(); + Set cols = e.getValue(); - LogicalTable table = findLogicalTableByPhysical( Catalog.defaultNamespaceId, adapter, physSchema, tableName ); + LogicalTable table = findLogicalTableByPhysical( Catalog.defaultNamespaceId, adapter, k.physSchema(), k.table() ); if ( table == null ) { - table = catalog.getSnapshot().rel().getTable( Catalog.defaultNamespaceId, tableName ).orElse( null ); + table = catalog.getSnapshot().rel().getTable( Catalog.defaultNamespaceId, k.table() ).orElse( null ); + } + if ( table == null ) { + throw new GenericRuntimeException( "Tabelle " + k + " nicht gefunden." ); } - if ( table.entityType != EntityType.SOURCE ) { throw new GenericRuntimeException( "Tabelle " + table.name + " ist kein SOURCE-Objekt." ); } - work.computeIfAbsent( table, t -> new HashSet<>() ).add( columnName ); - } - - for ( Map.Entry> e : work.entrySet() ) { - LogicalTable table = e.getKey(); - Set cols = e.getValue(); - if ( cols.contains( "*" ) ) { dropWholeSourceTable( table, stmt ); continue; } - for ( String col : cols ) { dropSourceColumn( table, col, stmt ); - catalog.updateSnapshot(); } if ( catalog.getSnapshot().rel().getColumns( table.id ).isEmpty() ) { dropWholeSourceTable( table, stmt ); - catalog.updateSnapshot(); } + catalog.updateSnapshot(); } - catalog.updateSnapshot(); stmt.getQueryProcessor().resetCaches(); stmt.getTransaction().commit(); } @@ -3533,6 +3490,43 @@ private void prepareMonitoring( Statement statement, Kind kind, LogicalTable cat } + record PathParts( String physSchema, String table, String column, String original ) { + + } + + + record TableKey( String physSchema, String table ) { + + } + + + static String norm( String s ) { + return s == null ? "" : s.replace( "'", "" ).trim(); + } + + + static PathParts parsePathUniversal( String raw ) { + String p = norm( raw ); + if ( p.isBlank() ) { + throw new IllegalArgumentException( "Empty path !" ); + } + + String[] parts = p.split( "\\." ); + switch ( parts.length ) { + case 4 -> { + return new PathParts( parts[1], parts[2], parts[3], p ); + } + case 3 -> { + return new PathParts( parts[0], parts[1], parts[2], p ); + } + case 2 -> { + return new PathParts( parts[0], parts[0], parts[1], p ); + } + default -> throw new IllegalArgumentException( "Unknown path format:" + p ); + } + } + + @Override public void dropFunction() { throw new GenericRuntimeException( "Not supported yet" ); diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 709736d1bd..949468710e 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -300,6 +300,7 @@ public Map> getExportedColumns() { @Override public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); String dbName = settings.getOrDefault( "database", "monetdb" ); Node root = new Node( "relational", dbName ); diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index d52528d84b..b283bb912c 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -181,6 +181,7 @@ protected boolean requiresSchema() { @Override public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); String dbName = settings.get( "database" ); Node root = new Node( "relational", dbName ); diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index eb8d01a52b..f1073def68 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -285,6 +285,7 @@ public Map> getExportedColumns() { @Override public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); AbstractNode root = new Node( "relational", settings.get( "database" ) ); TableFilter filter = TableFilter.forAdapter( adapterName ); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index ba852579c1..16c7e80342 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -94,6 +94,8 @@ public void setRoot( AbstractNode root ) { @Override public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); + String dbName = settings.get( "database" ); Node root = new Node( "relational", dbName ); From e2e328056b4c4d432a4195ce21f81a3b73d238ec Mon Sep 17 00:00:00 2001 From: romanost03 Date: Mon, 11 Aug 2025 15:45:21 +0200 Subject: [PATCH 65/68] Code cleaning and minor improvements. --- .../MetadataObserver/AbstractListener.java | 48 +++---- .../MetadataObserver/ChangeLogEntry.java | 16 ++- .../MetadataObserver/ChangeLogView.java | 88 ++++++++++++ .../MetadataObserver/MetadataHasher.java | 2 +- .../MetadataObserver/MetadataListener.java | 2 + .../MetadataObserver/MetadataPublisher.java | 4 + .../MetadataObserver/PublisherManager.java | 14 +- .../MetadataObserver/Utils/MetaAnnotator.java | 8 +- .../MetadataObserver/Utils/MetaDiffUtil.java | 8 +- .../MetadataObserver/Utils/NodeCloner.java | 29 ++-- .../db/adapter/java/AdapterTemplate.java | 14 ++ .../db/adapter/java/TableFilter.java | 7 +- .../java/org/polypheny/db/ddl/DdlManager.java | 2 +- .../org/polypheny/db/ddl/DdlManagerImpl.java | 79 +--------- .../polypheny/db/adapter/csv/CsvSource.java | 135 +++++++++--------- .../db/adapter/excel/ExcelEnumerator.java | 2 - .../db/adapter/excel/ExcelNamespace.java | 25 ++-- .../db/adapter/excel/ExcelSource.java | 4 +- .../jdbc/sources/AbstractJdbcSource.java | 2 + .../polypheny/db/adapter/json/JsonSource.java | 44 ++---- .../db/adapter/jdbc/MysqlSourcePlugin.java | 6 - .../OraclePlugin.java | 4 +- .../OracleSqlDialect.java | 5 +- .../source/OracleSource.java | 16 +-- .../postgres/source/PostgresqlSource.java | 10 +- .../java/org/polypheny/db/webui/Crud.java | 57 +++++--- .../org/polypheny/db/webui/HttpServer.java | 2 +- .../DataHandling/AttributeInfo.java | 32 ----- .../DataHandling/DatabaseInfo.java | 60 -------- .../DataHandling/SchemaInfo.java | 36 ----- .../DataHandling/TableInfo.java | 34 ----- .../db/webui/schemaDiscovery/JsonExport.java | 35 ----- .../schemaDiscovery/OracleConnection.java | 71 --------- .../schemaDiscovery/PostgreSqlConnection.java | 128 ----------------- 34 files changed, 323 insertions(+), 706 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java delete mode 100644 webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index 07a151fe8b..fe8a3c0179 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -30,6 +30,7 @@ import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import org.polypheny.db.schemaDiscovery.NodeSerializer; @@ -81,29 +82,29 @@ public void onMetadataChange( P adapter, AbstractNode node, String hash ) { ChangeStatus status = NodeUtil.evaluateStatus( result, adapter.getRoot() ); - ChangeLogEntry entry = new ChangeLogEntry( adapter.getUniqueName(), Instant.now().toString(), DiffMessageUtil.toMessages( result ), status ); + ChangeLogEntry entry = new ChangeLogEntry( adapter.getUniqueName(), Instant.now(), DiffMessageUtil.toMessages( result ), status ); PublisherManager.getInstance().addChange( entry ); AbstractNode annotatedCopy = MetaAnnotator.annotateTree( adapter.getRoot(), node, result ); String json = NodeSerializer.serializeNode( annotatedCopy ).toString(); log.info( "JSON: {}", json ); - PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResult( json, preview, List.of( entry ) ), status ); + PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResultEntry( json, preview, List.of( entry ) ), status ); } - public static PreviewResult buildFormChange( String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview, String path ) { + public static PreviewResultEntry buildFormChange( String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview, String path ) { DiffResult diff = MetaDiffUtil.diff( oldRoot, newRoot ); ChangeStatus status = NodeUtil.evaluateStatus( diff, oldRoot ); - ChangeLogEntry entry = new ChangeLogEntry( uniqueName, Instant.now().toString(), DiffMessageUtil.toMessages( diff ), status ); + ChangeLogEntry entry = new ChangeLogEntry( uniqueName, Instant.now(), DiffMessageUtil.toMessages( diff ), status ); AbstractNode annotated = MetaAnnotator.annotateTree( oldRoot, newRoot, diff ); String json = NodeSerializer.serializeNode( annotated ).toString(); PublisherManager pm = PublisherManager.getInstance(); pm.addChange( entry ); - PreviewResult result = new PreviewResult( json, preview, List.of( entry ) ); + PreviewResultEntry result = new PreviewResultEntry( json, preview, List.of( entry ) ); pm.onMetadataChange( uniqueName, result, status ); pm.saveTempPath( uniqueName, path ); @@ -182,42 +183,41 @@ public static void applyFormChange( String[] metadata, String uniqueName, String } - private static void deleteTempPath(String tmpPath, String directory) { - File tmpDir = new File(tmpPath); - File targetDir = new File(directory); + private static void deleteTempPath( String tmpPath, String directory ) { + File tmpDir = new File( tmpPath ); + File targetDir = new File( directory ); - if (!tmpDir.exists() || !tmpDir.isDirectory()) { - throw new IllegalArgumentException("tmpPath is not a valid directory: " + tmpPath); + if ( !tmpDir.exists() || !tmpDir.isDirectory() ) { + throw new IllegalArgumentException( "tmpPath is not a valid directory: " + tmpPath ); } - if (!targetDir.exists() || !targetDir.isDirectory()) { - throw new IllegalArgumentException("directory is not a valid directory: " + directory); + if ( !targetDir.exists() || !targetDir.isDirectory() ) { + throw new IllegalArgumentException( "directory is not a valid directory: " + directory ); } - for (File file : targetDir.listFiles()) { - if (!file.delete()) { - throw new RuntimeException("Failed to delete file: " + file.getAbsolutePath()); + for ( File file : targetDir.listFiles() ) { + if ( !file.delete() ) { + throw new RuntimeException( "Failed to delete file: " + file.getAbsolutePath() ); } } - for (File file : tmpDir.listFiles()) { + for ( File file : tmpDir.listFiles() ) { try { - Files.copy(file.toPath(), new File(targetDir, file.getName()).toPath(), - StandardCopyOption.REPLACE_EXISTING); - } catch ( IOException e) { - throw new RuntimeException("Failed to copy file: " + file.getAbsolutePath(), e); + Files.copy( file.toPath(), new File( targetDir, file.getName() ).toPath(), + StandardCopyOption.REPLACE_EXISTING ); + } catch ( IOException e ) { + throw new RuntimeException( "Failed to copy file: " + file.getAbsolutePath(), e ); } } - for ( File file : tmpDir.listFiles()) { + for ( File file : tmpDir.listFiles() ) { file.delete(); } - if (!tmpDir.delete()) { - throw new RuntimeException("Failed to delete tmpPath directory: " + tmpDir.getAbsolutePath()); + if ( !tmpDir.delete() ) { + throw new RuntimeException( "Failed to delete tmpPath directory: " + tmpDir.getAbsolutePath() ); } } - @Override public boolean isAvailable() { return this.available; diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java index 4a27d2f829..fd8d145e3f 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java @@ -32,30 +32,34 @@ public class ChangeLogEntry { @JsonProperty String adapterName; @JsonProperty - String timestamp; + Instant timestamp; @JsonProperty List messages; @JsonProperty ChangeStatus severity; + public class DiffMessageUtil { - private DiffMessageUtil() {} + private DiffMessageUtil() { + } + - public static List toMessages(DiffResult diff) { + public static List toMessages( DiffResult diff ) { List msgs = new ArrayList<>(); diff.getAdded() - .forEach(p -> msgs.add("Added metadata " + p)); + .forEach( p -> msgs.add( "Added metadata " + p ) ); diff.getRemoved() - .forEach(p -> msgs.add("Removed metadata " + p)); + .forEach( p -> msgs.add( "Removed metadata " + p ) ); diff.getChanged() - .forEach(p -> msgs.add("Changed metadata " + p)); + .forEach( p -> msgs.add( "Changed metadata " + p ) ); return msgs; } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java new file mode 100644 index 0000000000..ce3488e83b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java @@ -0,0 +1,88 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Value; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; + +import java.time.Duration; +import java.time.Instant; +import java.util.List; + +@Value +@AllArgsConstructor +public class ChangeLogView { + + @JsonProperty + String adapterName; + + @JsonProperty + String timestamp; + + @JsonProperty + List messages; + + @JsonProperty + ChangeStatus severity; + + + public static ChangeLogView from( ChangeLogEntry e ) { + return new ChangeLogView( + e.getAdapterName(), + rel( e.getTimestamp() ), + e.getMessages(), + e.getSeverity() + ); + } + + + private static String rel( Instant then ) { + long s = Duration.between( then, Instant.now() ).getSeconds(); + if ( s < 0 ) { + s = 0; + } + + if ( s < 60 ) { + return s == 1 ? "1 second ago" : s + " seconds ago"; + } + long m = s / 60; + if ( m < 60 ) { + return m == 1 ? "1 minute ago" : m + " minutes ago"; + } + long h = m / 60; + if ( h < 24 ) { + return h == 1 ? "1 hour ago" : h + " hours ago"; + } + long d = h / 24; + if ( d < 7 ) { + return d == 1 ? "1 day ago" : d + " days ago"; + } + long w = d / 7; + if ( w < 5 ) { + return w == 1 ? "1 week ago" : w + " weeks ago"; + } + long mo = d / 30; + if ( mo < 12 ) { + return mo == 1 ? "1 month ago" : mo + " months ago"; + } + long y = d / 365; + return y == 1 ? "1 year ago" : y + " years ago"; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java index 931dee462f..563938540c 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java @@ -37,7 +37,7 @@ public MetadataHasher() { public String hash( String text ) { byte[] bytes = text.getBytes( StandardCharsets.UTF_8 ); - byte[] hash = digest.digest(bytes); + byte[] hash = digest.digest( bytes ); StringBuilder sb = new StringBuilder(); for ( byte b : hash ) { diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java index a3324f510b..ad2a92ad90 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java @@ -23,7 +23,9 @@ public interface MetadataListener

{ void onMetadataChange( P adapter, AbstractNode node, String hash ); + boolean isAvailable(); + void applyChange( String[] metadata ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java index 3517308dd5..3f27c01bc9 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java @@ -19,9 +19,13 @@ public interface MetadataPublisher { String getAdapterUniqueName(); + void start(); + void stop(); + void runCheck(); + MetadataListener getListener(); diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index ba7b7db05e..5b1a31f62a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -24,6 +24,7 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; import java.time.Instant; @@ -41,13 +42,18 @@ public class PublisherManager { private static final int MAX_ENTRIES_PER_ADAPTER = 100; private final Map publishers = new ConcurrentHashMap<>(); - private final Map changeCache = new ConcurrentHashMap<>(); + + // Temporarily save the changes computed by a listener. + private final Map changeCache = new ConcurrentHashMap<>(); + + // Deliverd with the change from the listener. Saves either a status WARNING or CRITICAL. private final Map statusCache = new ConcurrentHashMap<>(); // Cache for file metadata changes. Reuploaded Excel- or CSV file paths are temporarily saved. private final Map tempFileCache = new ConcurrentHashMap<>(); + // History of all changes occurred for every adapter during deploy-time. private final ConcurrentHashMap> changeCatalog = new ConcurrentHashMap<>(); private static final PublisherManager INSTANCE = new PublisherManager(); @@ -97,13 +103,13 @@ public ChangeStatus hasChange( String uniqueName ) { } - public void onMetadataChange( String uniqueName, PreviewResult data, ChangeStatus status ) { + public void onMetadataChange( String uniqueName, PreviewResultEntry data, ChangeStatus status ) { changeCache.put( uniqueName, data ); statusCache.put( uniqueName, status ); } - public PreviewResult fetchChange( String uniqueName ) { + public PreviewResultEntry fetchChange( String uniqueName ) { return changeCache.get( uniqueName ); } @@ -141,6 +147,7 @@ public List getHistory( String adapterName ) { } + // Not used but serves the purpose that the number of logs does not become excessively high. private void prune( String adapterName ) { Deque deque = changeCatalog.get( adapterName ); while ( deque != null && deque.size() > MAX_ENTRIES_PER_ADAPTER ) { @@ -162,4 +169,5 @@ public String getTempPath( String uniqueName ) { public void deleteTempPath( String uniqueName ) { tempFileCache.remove( uniqueName ); } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java index a81e4bc6eb..b9eb71b059 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java @@ -35,9 +35,7 @@ static Map collect( AbstractNode node ) { } - private static void traverse( - AbstractNode n, String path, - Map sink ) { + private static void traverse( AbstractNode n, String path, Map sink ) { sink.put( path, n ); for ( AbstractNode c : n.getChildren() ) { traverse( c, path + "/" + c.getName(), sink ); @@ -83,6 +81,9 @@ public static AbstractNode annotateTree( AbstractNode oldRoot, AbstractNode newR .getNode( copyNew, p ) .ifPresent( n -> n.addProperty( "diff", DiffType.ADDED ) ) ); + // TODO By now, it is not recognized if some metadata has just changed in their name or not. + // TODO A change is recognized as something was removed and something new was added. In future, + // TODO that can be used of course. /*diff.getChanged().forEach( p -> PathHelper .getNode( copyNew, p ) .ifPresent( n -> n.addProperty( "diff", DiffType.CHANGED ) ) );*/ @@ -99,6 +100,7 @@ public static AbstractNode annotateTree( AbstractNode oldRoot, AbstractNode newR } + // Removed metadata / nodes are marked as type "ghost". That can be recognized at the UI. private static void createGhostNode( AbstractNode root, String fullPath ) { String[] parts = fullPath.split( "/" ); AbstractNode current = root; diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java index ab86b9aae4..4bc71e9778 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java @@ -55,6 +55,8 @@ public String toString() { @EqualsAndHashCode @RequiredArgsConstructor(staticName = "of") + // Instead of comparing every variable in a node, a hash (fingerprint) is created for every node. + // Used for comparison. public static class Fingerprint { private final String type; @@ -100,11 +102,7 @@ public static DiffResult diff( AbstractNode oldRoot, AbstractNode newRoot ) { } - private static void collect( - AbstractNode node, - String parentPath, - Map sink ) { - + private static void collect( AbstractNode node, String parentPath, Map sink ) { String path = parentPath.isEmpty() ? node.getName() : parentPath + "/" + node.getName(); diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java index 60bac92f8c..2aa4250386 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java @@ -24,31 +24,36 @@ import java.util.List; public class NodeCloner { - private NodeCloner() {} - public static AbstractNode deepCopy(AbstractNode node) { - return copyNode(node); + private NodeCloner() { } - private static AbstractNode copyNode(AbstractNode n) { + + public static AbstractNode deepCopy( AbstractNode node ) { + return copyNode( node ); + } + + + private static AbstractNode copyNode( AbstractNode n ) { AbstractNode clone; - if (n instanceof AttributeNode a) { - AttributeNode c = new AttributeNode(a.getType(), a.getName()); - c.setSelected(a.isSelected()); + if ( n instanceof AttributeNode a ) { + AttributeNode c = new AttributeNode( a.getType(), a.getName() ); + c.setSelected( a.isSelected() ); clone = c; } else { - clone = new Node(n.getType(), n.getName()); + clone = new Node( n.getType(), n.getName() ); } - clone.setProperties(new HashMap<>(n.getProperties())); + clone.setProperties( new HashMap<>( n.getProperties() ) ); List clonedChildren = new ArrayList<>(); - for (AbstractNode child : n.getChildren()) { - clonedChildren.add(copyNode(child)); + for ( AbstractNode child : n.getChildren() ) { + clonedChildren.add( copyNode( child ) ); } - clone.setChildren(clonedChildren); + clone.setChildren( clonedChildren ); return clone; } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index d3e69592e6..bbca38dc76 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -34,6 +34,7 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DeployMode.DeploySetting; import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogView; import org.polypheny.db.adapter.MetadataObserver.MetadataHasher; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; @@ -154,6 +155,19 @@ public PreviewResult preview( Map settings, int limit ) { @Value public static class PreviewResult { + @JsonProperty + String metadata; + @JsonProperty + Object preview; + @JsonProperty + List history; + + } + + + @Value + public static class PreviewResultEntry { + @JsonProperty String metadata; @JsonProperty diff --git a/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java b/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java index bd83895c88..356df33ee9 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java @@ -50,13 +50,12 @@ public static TableFilter forAdapter( String adapterName ) { } - public boolean shouldIgnore(String tableName) { + public boolean shouldIgnore( String tableName ) { String upper = tableName.toUpperCase(); return ignoredTables.stream() - .map(String::toUpperCase) - .anyMatch(upper::startsWith); + .map( String::toUpperCase ) + .anyMatch( upper::startsWith ); } - } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 6f1e1757d9..34dc1a02a9 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -559,7 +559,7 @@ public static DdlManager getInstance() { public abstract void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ); - public abstract void dropSourceEntities( List paths, Statement statement, String uniqueName ); + public abstract void removeSelectedMetadata( List paths, Statement statement, String uniqueName ); public abstract void dropCollection( LogicalCollection catalogCollection, Statement statement ); diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index a639c3a965..410782db8f 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -371,85 +371,8 @@ private LogicalTable findLogicalTableByPhysical( long namespace, DataSource a } - /* @Override - public void dropSourceEntities( List paths, Statement statement, String uniqueName ) { - - DataSource adapter = AdapterManager.getInstance().getSource( uniqueName ).orElseThrow(); - Map settings = adapter.getSettings(); - - String selectedAttributes = settings.get( "selectedAttributes" ); - selectedAttributes = selectedAttributes.replace( "[", "" ).replace( "]", "" ); - List currentPaths = new ArrayList<>( List.of( selectedAttributes.split( "," ) ) ); - currentPaths.removeIf( path -> paths.contains( path.trim() ) ); - - String newPaths = String.join( ",", currentPaths ); - settings.put( "selectedAttributes", newPaths ); - adapter.updateSettings( settings ); - - Map> worklist = new HashMap<>(); - - for ( String raw : paths ) { - String path = raw.replace( "'", "" ).trim(); - if ( path.isBlank() ) { - continue; - } - - String[] seg = path.split( "\\." ); - if ( seg.length < 2 ) { - throw new GenericRuntimeException( "UngΓΌltiger Pfad: " + path ); - } - - String columnName = (seg.length >= 3) ? seg[seg.length - 1] : "*"; - String tableName = seg[seg.length - 2]; - String schemaName = seg[seg.length - 3]; - - String schemaPath = String.join( ".", - Arrays.copyOf( seg, seg.length - (columnName.equals( "*" ) ? 1 : 2) ) ); - - LogicalNamespace ns = catalog.getSnapshot() - .getNamespace( Catalog.DEFAULT_NAMESPACE_NAME ) - .orElseThrow( () -> new GenericRuntimeException( - "Logisches Namespace 'public' nicht gefunden." ) ); - - LogicalTable table = findLogicalTableByPhysical( Catalog.defaultNamespaceId, adapter, schemaName, tableName ); - - if ( table.entityType != EntityType.SOURCE ) { - throw new GenericRuntimeException( "Tabelle " + table.name + - " ist kein SOURCE-Objekt." ); - } - - worklist.computeIfAbsent( table, t -> new HashSet<>() ) - .add( columnName ); - } - - for ( Map.Entry> entry : worklist.entrySet() ) { - LogicalTable table = entry.getKey(); - Set toDrop = entry.getValue(); - - if ( toDrop.contains( "*" ) ) { - dropWholeSourceTable( table, statement ); - continue; - } - - for ( String col : toDrop ) { - dropSourceColumn( table, col, statement ); - catalog.updateSnapshot(); - } - - if ( catalog.getSnapshot().rel().getColumns( table.id ).isEmpty() ) { - dropWholeSourceTable( table, statement ); - catalog.updateSnapshot(); - } - } - - catalog.updateSnapshot(); - statement.getQueryProcessor().resetCaches(); - statement.getTransaction().commit(); - }*/ - - @Override - public void dropSourceEntities( List paths, Statement stmt, String uniqueName ) { + public void removeSelectedMetadata( List paths, Statement stmt, String uniqueName ) { DataSource adapter = AdapterManager.getInstance() .getSource( uniqueName ) .orElseThrow(); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 511e23dfd0..687b56e3c1 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -389,44 +389,42 @@ public void renameLogicalColumn( long id, String newColumnName ) { @Override public AbstractNode fetchMetadataTree() { this.previewByTable = new HashMap<>(); - try { - Source src = openCsvSource(null); + Source src = openCsvSource( null ); String fileName = src.file().getName(); - String baseName = fileName.replaceFirst("\\.csv(\\.gz)?$", ""); - AbstractNode rootNode = new Node("csv", baseName); + String baseName = fileName.replaceFirst( "\\.csv(\\.gz)?$", "" ); + AbstractNode rootNode = new Node( "csv", baseName ); - try (BufferedReader reader = new BufferedReader(src.reader())) { + try ( BufferedReader reader = new BufferedReader( src.reader() ) ) { String headerLine = reader.readLine(); - if (headerLine == null) { - throw new RuntimeException("No header line found in " + fileName); + if ( headerLine == null ) { + throw new RuntimeException( "No header line found in " + fileName ); } - String[] rawColumns = headerLine.split(","); - for (String colRaw : rawColumns) { - String[] split = colRaw.split(":"); - String name = split[0].trim().replaceAll("[^a-zA-Z0-9_]", ""); + String[] rawColumns = headerLine.split( "," ); + for ( String colRaw : rawColumns ) { + String[] split = colRaw.split( ":" ); + String name = split[0].trim().replaceAll( "[^a-zA-Z0-9_]", "" ); String type = split.length > 1 ? split[1].trim() : "string"; - AbstractNode columnNode = new AttributeNode("column", name); - columnNode.addProperty("type", mapCsvType(type)); - columnNode.addProperty("nullable", true); - rootNode.addChild(columnNode); + AbstractNode columnNode = new AttributeNode( "column", name ); + columnNode.addProperty( "type", mapCsvType( type ) ); + columnNode.addProperty( "nullable", true ); + rootNode.addChild( columnNode ); } } - List> preview = fetchPreview(null, fileName, 10); - this.previewByTable.put(fileName, preview); + List> preview = fetchPreview( null, fileName, 10 ); + this.previewByTable.put( fileName, preview ); return rootNode; - } catch (IOException e) { - throw new RuntimeException("Failed to parse CSV metadata", e); + } catch ( IOException e ) { + throw new RuntimeException( "Failed to parse CSV metadata", e ); } } - private String mapCsvType( String rawType ) { switch ( rawType ) { case "int": @@ -455,114 +453,111 @@ private String mapCsvType( String rawType ) { @Override - public List> fetchPreview(Connection conn, String fqName, int limit) { + public List> fetchPreview( Connection conn, String fqName, int limit ) { try { - Source src = openCsvSource(fqName); + Source src = openCsvSource( fqName ); List> rows = new ArrayList<>(); - try (BufferedReader reader = new BufferedReader(src.reader())) { + try ( BufferedReader reader = new BufferedReader( src.reader() ) ) { String headerLine = reader.readLine(); - if (headerLine == null) { + if ( headerLine == null ) { return List.of(); } - String[] headerParts = headerLine.split(","); - List colNames = new ArrayList<>(headerParts.length); - for (String raw : headerParts) { - String[] split = raw.split(":"); - colNames.add(split[0].trim()); + String[] headerParts = headerLine.split( "," ); + List colNames = new ArrayList<>( headerParts.length ); + for ( String raw : headerParts ) { + String[] split = raw.split( ":" ); + colNames.add( split[0].trim() ); } String line; int count = 0; - while ((line = reader.readLine()) != null && count < limit) { - String[] values = line.split(",", -1); + while ( (line = reader.readLine()) != null && count < limit ) { + String[] values = line.split( ",", -1 ); Map row = new LinkedHashMap<>(); - for (int i = 0; i < colNames.size(); i++) { + for ( int i = 0; i < colNames.size(); i++ ) { String value = i < values.length ? values[i].trim() : null; - row.put(colNames.get(i), value); + row.put( colNames.get( i ), value ); } - rows.add(row); + rows.add( row ); count++; } } return rows; - } catch (IOException e) { - throw new RuntimeException("Failed to read CSV preview: " + fqName, e); + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read CSV preview: " + fqName, e ); } } - private Source openCsvSource(@Nullable String fqName) throws IOException { - if (csvDir.getProtocol().equals("jar")) { - if (fqName == null || fqName.isBlank()) { - throw new GenericRuntimeException("fqName required when using jar protocol for CSV."); + private Source openCsvSource( @Nullable String fqName ) throws IOException { + if ( csvDir.getProtocol().equals( "jar" ) ) { + if ( fqName == null || fqName.isBlank() ) { + throw new GenericRuntimeException( "fqName required when using jar protocol for CSV." ); } - return Sources.of(new URL(csvDir, fqName)); + return Sources.of( new URL( csvDir, fqName ) ); } - if (Sources.of(csvDir).file().isFile()) { - return Sources.of(csvDir); + if ( Sources.of( csvDir ).file().isFile() ) { + return Sources.of( csvDir ); } - File[] files = Sources.of(csvDir) + File[] files = Sources.of( csvDir ) .file() - .listFiles((d, name) -> name.endsWith(".csv") || name.endsWith(".csv.gz")); - if (files == null || files.length == 0) { - throw new GenericRuntimeException("No .csv files were found in: " + Sources.of(csvDir).file()); + .listFiles( ( d, name ) -> name.endsWith( ".csv" ) || name.endsWith( ".csv.gz" ) ); + if ( files == null || files.length == 0 ) { + throw new GenericRuntimeException( "No .csv files were found in: " + Sources.of( csvDir ).file() ); } File chosen; - if (fqName != null && !fqName.isBlank()) { - chosen = Arrays.stream(files) - .filter(f -> f.getName().equals(fqName)) + if ( fqName != null && !fqName.isBlank() ) { + chosen = Arrays.stream( files ) + .filter( f -> f.getName().equals( fqName ) ) .findFirst() - .orElseThrow(() -> new GenericRuntimeException("Requested CSV not found: " + fqName)); + .orElseThrow( () -> new GenericRuntimeException( "Requested CSV not found: " + fqName ) ); } else { chosen = files[0]; } - return Sources.of(new URL(csvDir, chosen.getName())); + return Sources.of( new URL( csvDir, chosen.getName() ) ); } - - @Override - public void markSelectedAttributes(List selectedPaths) { - if (this.metadataRoot == null) { - log.warn("⚠️ Kein Metadatenbaum vorhanden – kann Attribute nicht markieren."); + public void markSelectedAttributes( List selectedPaths ) { + if ( this.metadataRoot == null ) { + log.warn( "⚠️ Kein Metadatenbaum vorhanden – kann Attribute nicht markieren." ); return; } - for (String path : selectedPaths) { - int lastDot = path.lastIndexOf('.'); - if (lastDot == -1 || lastDot == path.length() - 1) { - log.warn("⚠️ Kein gΓΌltiger Attribut-Pfad: " + path); + for ( String path : selectedPaths ) { + int lastDot = path.lastIndexOf( '.' ); + if ( lastDot == -1 || lastDot == path.length() - 1 ) { + log.warn( "⚠️ Kein gΓΌltiger Attribut-Pfad: " + path ); continue; } - String columnName = path.substring(lastDot + 1); - String normalizedColumnName = columnName.replaceAll("[^a-zA-Z0-9_]", ""); + String columnName = path.substring( lastDot + 1 ); + String normalizedColumnName = columnName.replaceAll( "[^a-zA-Z0-9_]", "" ); Optional attrOpt = metadataRoot.getChildren().stream() - .filter(child -> child instanceof AttributeNode - && child.getName().equals(normalizedColumnName)) + .filter( child -> child instanceof AttributeNode + && child.getName().equals( normalizedColumnName ) ) .findFirst(); - if (attrOpt.isPresent()) { - ((AttributeNode) attrOpt.get()).setSelected(true); - log.info("βœ… Attribut gesetzt: " + path); + if ( attrOpt.isPresent() ) { + ((AttributeNode) attrOpt.get()).setSelected( true ); + log.info( "βœ… Attribut gesetzt: " + path ); } else { - log.warn("❌ Attribut nicht gefunden: " + normalizedColumnName + " im Pfad: " + path); + log.warn( "❌ Attribut nicht gefunden: " + normalizedColumnName + " im Pfad: " + path ); } } } - @Override public void printTree( AbstractNode node, int depth ) { if ( node == null ) { diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java index 42b465a3c7..ba63486143 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java @@ -354,7 +354,6 @@ protected PolyValue convert( ExcelFieldType fieldType, Cell cell ) { return PolyNull.NULL; } try { - switch ( fieldType ) { case BOOLEAN: return PolyBoolean.of( cell.getBooleanCellValue() ); @@ -392,7 +391,6 @@ protected PolyValue convert( ExcelFieldType fieldType, Cell cell ) { throw new GenericRuntimeException( "Could not read the date field from the document." ); } case TIME: - try { Date date = TIME_FORMAT_TIME.parse( cell .getStringCellValue() ); diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java index 09651290e6..b0cfffee3b 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java @@ -68,25 +68,20 @@ public ExcelNamespace( long id, long adapterId, URL directoryUrl, ExcelTable.Fla } - public ExcelTable createExcelTable( PhysicalTable table, - ExcelSource excelSource, - List physicalIds ) { + public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource, List physicalIds ) { - /* -------- Basis -------- */ final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); - // physicalIds SO wie sie ankommen (0-basierte Excel-Positionen) + // The physical id's are one too far, so it has to be mapped one index back. int[] fields = physicalIds.stream() .mapToInt( i -> i - 1 ) .toArray(); - /* -------- Column-Lookup (global) -------- */ Map byPosition = new HashMap<>(); for ( PhysicalColumn c : table.columns ) { - byPosition.put( c.position, c ); // 0-basierte Sheet-Position + byPosition.put( c.position, c ); } - /* -------- Metadaten der ausgewΓ€hlten Spalten aufbauen -------- */ final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); List fieldTypes = new ArrayList<>(); @@ -102,17 +97,14 @@ public ExcelTable createExcelTable( PhysicalTable table, column.scale, null ); - fieldInfo.add( column.id, column.name, column.name, sqlType ) - .nullable( column.nullable ); - + fieldInfo.add( column.id, column.name, column.name, sqlType ).nullable( column.nullable ); fieldTypes.add( ExcelFieldType.getExcelFieldType( column.type ) ); } - /* -------- Datei- und Sheet-Namen ableiten -------- */ - String[] parts = table.name.split( "_", 2 ); - String filePart = parts[0]; - String sheetPart = parts.length > 1 ? parts[1] : ""; - String excelName = filePart + ".xlsx"; + String[] parts = table.name.split( "_", 2 ); + String filePart = parts[0]; + String sheetPart = parts.length > 1 ? parts[1] : ""; + String excelName = filePart + ".xlsx"; this.sheet = sheetPart; Source source; @@ -122,7 +114,6 @@ public ExcelTable createExcelTable( PhysicalTable table, throw new GenericRuntimeException( e ); } - /* -------- Physische Tabelle registrieren -------- */ ExcelTable physical = createTable( table, source, AlgDataTypeImpl.proto( fieldInfo.build() ), diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 12fad7193a..c742bb24f2 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -395,6 +395,7 @@ public AbstractNode fetchMetadataTree() { this.previewByTable = new LinkedHashMap<>(); + // Assuming that one file was uploaded only. Source filePath; String firstFile = resolveFileNames().stream().findFirst().orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); try { @@ -408,7 +409,6 @@ public AbstractNode fetchMetadataTree() { try ( FileInputStream fis = new FileInputStream( filePath.path() ); Workbook wb = WorkbookFactory.create( fis ) ) { for ( Sheet sheet : wb ) { - String sheetName = sheet.getSheetName(); AbstractNode sheetNode = new Node( "sheet", mappeName.toLowerCase() + "_" + sheetName ); @@ -490,6 +490,7 @@ private String getCellValueAsString( Cell cell, String fallback ) { } + @Override public List> fetchPreview( Connection conn, String fqName, int limit ) { @@ -518,6 +519,7 @@ public List> fetchPreview( Connection conn, String fqName, i return List.of(); } + // All columns in the Excel have to start at the most left. Blank columns are marked as COL_i . List colNames = new ArrayList<>(); for ( int c = 0; c < header.getLastCellNum(); c++ ) { colNames.add( getCellValueAsString( header.getCell( c ), "COL_" + (c + 1) ) ); diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index ef9d2679dd..066bb6df93 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -216,6 +216,7 @@ public Map> getExportedColumns() { Connection connection = null; ConnectionHandler connectionHandler = null; + // Use random PUID to prevent usage of an expired snapshot of the transaction identifier. PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); @@ -229,6 +230,7 @@ public Map> getExportedColumns() { log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); } + // TODO If-else usage for possibly allow the usage of the old table-setting or selecting metadata. Not implemented yet. if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { tables = settings.get( "tables" ).split( "," ); } else { diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java index a922391797..c23be1df6d 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -291,13 +291,13 @@ public AbstractNode fetchMetadataTree() { } catch ( Exception ex ) { throw new RuntimeException( "Failed to build metadata tree for JSON", ex ); } - preview.put( "jsonPreview", List.of( Map.of( "metadata", "rootNode", "preview", root ) ) ); return root; } + // Preview itself is build in the metadata tree. Preview not necessary. @Override public List> fetchPreview( Connection ignored, String fqName, int limit ) { return null; @@ -305,36 +305,26 @@ public List> fetchPreview( Connection ignored, String fqName private void buildTreeRecursively( JsonNode current, AbstractNode parent, String jsonPath, String nodeName ) { + + /* ───────────── Json-Object ────────────── */ if ( current.isObject() ) { boolean isCard = parent != null && "array".equals( parent.getType() ); - AbstractNode obj = new DocumentObjectNode( - nodeName, // Anzeigename - jsonPath, // vollstΓ€ndiger Pfad - isCard // cardCandidate-Flag - ); + AbstractNode obj = new DocumentObjectNode( nodeName, jsonPath, isCard ); parent.addChild( obj ); current.fields().forEachRemaining( e -> - buildTreeRecursively( - e.getValue(), // Kind-JsonNode - obj, // neues Parent - jsonPath + "." + e.getKey(), // Pfad erweitern - e.getKey() // Kind-Name - ) - ); + buildTreeRecursively( e.getValue(), obj, jsonPath + "." + e.getKey(), e.getKey() ) ); return; } - /* ───────────── JSON-ARRAY ────────────── */ + /* ───────────── Json-Array ────────────── */ if ( current.isArray() ) { AbstractNode arr = new DocumentArrayNode( nodeName, jsonPath ); parent.addChild( arr ); int idx = 0; for ( JsonNode element : current ) { - - /* sprechender Name fΓΌr Array-Element */ String childName = "idx" + idx; if ( element.isObject() ) { if ( element.has( "id" ) ) { @@ -346,29 +336,18 @@ private void buildTreeRecursively( JsonNode current, AbstractNode parent, String } } - buildTreeRecursively( - element, - arr, - jsonPath + "[" + idx + "]", - childName - ); + buildTreeRecursively( element, arr, jsonPath + "[" + idx + "]", childName ); idx++; } return; } - /* ───────────── PRIMITIVER WERT ───────── */ - String valueType = detectType( current ); // string | number | … + /* ───────────── Primitive-Value ───────── */ + String valueType = detectType( current ); Object sample = current.isNull() ? null : current.asText(); - AbstractNode val = new DocumentValueNode( - nodeName, - jsonPath, - valueType, - sample - ); + AbstractNode val = new DocumentValueNode( nodeName, jsonPath, valueType, sample ); parent.addChild( val ); - } @@ -389,6 +368,7 @@ private static String detectType( JsonNode n ) { } + // TODO Implement when preview and deploy works. @Override public void markSelectedAttributes( List selectedPaths ) { @@ -400,7 +380,7 @@ public void printTree( AbstractNode node, int depth ) { if ( node == null ) { node = this.metadataRoot; } - System.out.println("Node type:" + node.toString()); + System.out.println( "Node type:" + node.toString() ); System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); for ( Map.Entry entry : node.getProperties().entrySet() ) { System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index b283bb912c..07da5873c1 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -113,8 +113,6 @@ public void stop() { description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") - @AdapterSettingString(name = "tables", defaultValue = "test.testtable", - description = "List of tables which should be imported. The names must to be separated by a comma.") public static class MysqlSource extends AbstractJdbcSource implements MetadataProvider { public AbstractNode metadataRoot; @@ -202,7 +200,6 @@ public AbstractNode fetchMetadataTree() { try ( ResultSet schemas = meta.getCatalogs() ) { while ( schemas.next() ) { - String schemaName = schemas.getString( "TABLE_CAT" ); if ( filter.ignoredSchemas.contains( schemaName ) ) { @@ -218,7 +215,6 @@ public AbstractNode fetchMetadataTree() { new String[]{ "TABLE" } ) ) { while ( tables.next() ) { - String tableName = tables.getString( "TABLE_NAME" ); if ( tableFilter.shouldIgnore( tableName ) ) { @@ -257,7 +253,6 @@ public AbstractNode fetchMetadataTree() { tableName, "%" ) ) { while ( cols.next() ) { - String colName = cols.getString( "COLUMN_NAME" ); String typeName = cols.getString( "TYPE_NAME" ); boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; @@ -310,7 +305,6 @@ public List> fetchPreview( Connection conn, String fqName, i List> rows = new ArrayList<>(); try ( Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { - ResultSetMetaData meta = rs.getMetaData(); while ( rs.next() ) { Map row = new LinkedHashMap<>(); diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java index eab1015fc5..60896a926b 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -37,7 +37,7 @@ public class OraclePlugin extends PolyPlugin { @Override public void afterCatalogInit() { - // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); // TODO: Dialect might not be necessary. + // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); // this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); } @@ -45,7 +45,7 @@ public void afterCatalogInit() { @Override public void stop() { - SqlDialectRegistry.unregisterDialect( "Oracle" ); // TODO: if dialect is not necessary, unregistering dialect is redundant. + SqlDialectRegistry.unregisterDialect( "Oracle" ); AdapterManager.removeAdapterTemplate( this.sourceId ); // AdapterManager.removeAdapterTemplate( this.storeId ); } diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java index a9da404e6d..c962237e68 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java @@ -34,10 +34,7 @@ public class OracleSqlDialect extends SqlDialect { new AlgDataTypeSystemImpl() { @Override public int getMaxPrecision( PolyType typeName ) { - if ( Objects.requireNonNull( typeName ) == PolyType.VARCHAR ) {// From htup_details.h in postgresql: - // MaxAttrSize is a somewhat arbitrary upper limit on the declared size of data fields of char(n) and similar types. It need not have anything - // directly to do with the *actual* upper limit of varlena values, which is currently 1Gb (see TOAST structures in postgres.h). I've set it - // at 10Mb which seems like a reasonable number --- tgl 8/6/00. + if ( Objects.requireNonNull( typeName ) == PolyType.VARCHAR ) { return 10 * 1024 * 1024; } return super.getMaxPrecision( typeName ); diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index f1073def68..6b34e39e92 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -55,11 +55,9 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; -import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.Set; -import java.util.regex.Pattern; import java.util.stream.Collectors; @Slf4j @@ -82,8 +80,6 @@ description = "Maximum number of concurrent connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") -@AdapterSettingString(name = "tables", defaultValue = "foo,bar", - description = "List of tables which should be imported. The names must be separated by a comma.") public class OracleSource extends AbstractJdbcSource implements MetadataProvider { public AbstractNode metadataRoot; @@ -160,6 +156,7 @@ public Map> getExportedColumns() { Connection connection = null; ConnectionHandler connectionHandler = null; + // Use random PUID to prevent usage of an expired snapshot of the transaction identifier. PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); @@ -172,6 +169,7 @@ public Map> getExportedColumns() { log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); } + // TODO If-else usage for possibly allow the usage of the old table-setting or selecting metadata. Not implemented yet. if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { tables = settings.get( "tables" ).split( "," ); } else { @@ -345,14 +343,11 @@ public AbstractNode fetchMetadataTree() { } } - try ( ResultSet cols = - m.getColumns( null, schemaName, tableName, "%" ) ) { - + try ( ResultSet cols = m.getColumns( null, schemaName, tableName, "%" ) ) { while ( cols.next() ) { String colName = cols.getString( "COLUMN_NAME" ); String typeName = cols.getString( "TYPE_NAME" ); - boolean nullable = - cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; boolean primary = pkCols.contains( colName ); AbstractNode colNode = new AttributeNode( "column", colName ); @@ -388,6 +383,8 @@ public AbstractNode fetchMetadataTree() { try { // stmt.close(); // conn.close(); + + // Manually commit to prevent an overflow of open transactions. h.commit(); } catch ( ConnectionHandlerException e ) { throw new RuntimeException( e ); @@ -404,7 +401,6 @@ public List> fetchPreview( Connection conn, String fqName, i try ( Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { - ResultSetMetaData meta = rs.getMetaData(); while ( rs.next() ) { Map row = new LinkedHashMap<>(); diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 16c7e80342..ebd8bd820c 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -101,6 +101,7 @@ public AbstractNode fetchMetadataTree() { SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + // Use random PUID to prevent usage of an expired snapshot of the transaction identifier. PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); java.sql.Statement stmt = null; @@ -117,7 +118,6 @@ public AbstractNode fetchMetadataTree() { ? meta.getSchemas( dbName, "%" ) : meta.getCatalogs() ) { while ( schemas.next() ) { - String schemaName = requiresSchema() ? schemas.getString( "TABLE_SCHEM" ) : schemas.getString( "TABLE_CAT" ); @@ -135,9 +135,7 @@ public AbstractNode fetchMetadataTree() { new String[]{ "TABLE" } ) ) { while ( tables.next() ) { - String tableName = tables.getString( "TABLE_NAME" ); - String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; Connection finalConn = conn; previewByTable.computeIfAbsent( @@ -152,7 +150,6 @@ public AbstractNode fetchMetadataTree() { } ); AbstractNode tableNode = new Node( "table", tableName ); - Set pkCols = new HashSet<>(); try ( ResultSet pk = meta.getPrimaryKeys( dbName, @@ -169,7 +166,6 @@ public AbstractNode fetchMetadataTree() { tableName, "%" ) ) { while ( cols.next() ) { - String colName = cols.getString( "COLUMN_NAME" ); String typeName = cols.getString( "TYPE_NAME" ); boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; @@ -208,9 +204,7 @@ public AbstractNode fetchMetadataTree() { // stmt.close(); // conn.close(); handler.commit(); - } /*catch ( SQLException e ) { - throw new RuntimeException( e ); - }*/ catch ( ConnectionHandlerException e ) { + } catch ( ConnectionHandlerException e ) { throw new RuntimeException( e ); } } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 90a6169c5d..cbff2bc136 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -93,12 +93,14 @@ import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; import org.polypheny.db.adapter.MetadataObserver.AbstractListener; import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogView; import org.polypheny.db.adapter.MetadataObserver.PublisherManager; import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; import org.polypheny.db.catalog.Catalog; @@ -892,7 +894,7 @@ void deleteTuple( final Context ctx ) { } - void sendConfirmation( final Context ctx ) { + void buildPreview( final Context ctx ) { try { initMultipart( ctx ); if ( !ctx.isMultipartFormData() ) { @@ -976,7 +978,6 @@ void sendConfirmation( final Context ctx ) { */ void metadataStatus( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); - ChangeStatus changed = PublisherManager.getInstance().hasChange( uniqueName ); ctx.json( Map.of( "changed", changed ) ); } @@ -984,9 +985,19 @@ void metadataStatus( final Context ctx ) { void metadataChange( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); - PreviewResult data = PublisherManager.getInstance().fetchChange( uniqueName ); - ctx.json( data ); + PreviewResultEntry data = PublisherManager.getInstance().fetchChange( uniqueName ); + + List convertedHistory = data.getHistory().stream() + .map( ChangeLogView::from ) + .toList(); + PreviewResult result = new PreviewResult( + data.getMetadata(), + data.getPreview(), + convertedHistory + ); + + ctx.json( result ); } @@ -995,7 +1006,6 @@ void metadataAck( final Context ctx ) { log.info( "Acknowledgement incoming: " + payload.toString() ); PublisherManager.getInstance().ack( payload.uniqueName, payload.addedPaths ); - // Optional> adapter = AdapterManager.getInstance().getSource( payload.uniqueName ); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); Statement stmt = transaction.createStatement(); try { @@ -1005,7 +1015,7 @@ void metadataAck( final Context ctx ) { if ( payload.removedPaths != null || payload.removedPaths.length > 0 ) { String[] filtered = filterPrefixes( payload.removedPaths ); - DdlManager.getInstance().dropSourceEntities( List.of( filtered ), stmt, payload.uniqueName ); + DdlManager.getInstance().removeSelectedMetadata( List.of( filtered ), stmt, payload.uniqueName ); } transaction.commit(); ctx.status( 200 ).result( "ACK processed" ); @@ -1029,9 +1039,13 @@ void getMetaConfiguration( final Context ctx ) { .orElseThrow( () -> new IllegalStateException( "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); - List history = PublisherManager.getInstance().getHistory( uniqueName ); + List historyBeforeParsing = PublisherManager.getInstance().getHistory( uniqueName ); - PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview(), history ); + List response = historyBeforeParsing.stream() + .map( ChangeLogView::from ) + .toList(); + + PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview(), response ); ctx.json( data ); } @@ -1070,7 +1084,7 @@ void setMetaConfiguration( final Context ctx ) { if ( !toUnselect.isEmpty() ) { try { - DdlManager.getInstance().dropSourceEntities( List.copyOf( toUnselect ), stmt, config.uniqueName ); + DdlManager.getInstance().removeSelectedMetadata( List.copyOf( toUnselect ), stmt, config.uniqueName ); NodeUtil.unmarkSelectedAttributes( ((MetadataProvider) adapter.get()).getRoot(), List.copyOf( toUnselect ) ); tx.commit(); stmt.close(); @@ -2310,23 +2324,23 @@ void updateSettingsForm( final Context ctx ) throws IOException, ServletExceptio PreviewRequest am = HttpServer.mapper.readValue( bodyJson, PreviewRequest.class ); List fileNames; - String rawDir = am.getSettings().get("directory"); + String rawDir = am.getSettings().get( "directory" ); try { fileNames = HttpServer.mapper.readValue( rawDir, - new com.fasterxml.jackson.core.type.TypeReference>() {}); - } catch (com.fasterxml.jackson.core.JsonProcessingException ex) { + new com.fasterxml.jackson.core.type.TypeReference>() { + } ); + } catch ( com.fasterxml.jackson.core.JsonProcessingException ex ) { String cleaned = rawDir - .replaceAll("[\\[\\]\"]", "") + .replaceAll( "[\\[\\]\"]", "" ) .trim(); - fileNames = Arrays.stream(cleaned.split(",")) - .map(String::trim) - .filter(s -> !s.isEmpty()) + fileNames = Arrays.stream( cleaned.split( "," ) ) + .map( String::trim ) + .filter( s -> !s.isEmpty() ) .toList(); } - Map fileBytes = new HashMap<>(); for ( Part p : ctx.req.getParts() ) { if ( !"body".equals( p.getName() ) ) { @@ -2348,7 +2362,7 @@ void updateSettingsForm( final Context ctx ) throws IOException, ServletExceptio private void createFormDiffs( PreviewRequest previewRequest, String path ) { - DataSource currentSource = AdapterManager.getInstance().getSource( previewRequest.uniqueName ).orElseThrow( ); + DataSource currentSource = AdapterManager.getInstance().getSource( previewRequest.uniqueName ).orElseThrow(); MetadataProvider currentProvider = (MetadataProvider) currentSource; AbstractNode currentNode = currentProvider.getRoot(); @@ -2361,12 +2375,15 @@ private void createFormDiffs( PreviewRequest previewRequest, String path ) { AbstractNode tempNode = tempProvider.fetchMetadataTree(); Object newPreview = tempProvider.getPreview(); - PreviewResult result = AbstractListener.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview, path ); + PreviewResultEntry result = AbstractListener.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview, path ); currentProvider.printTree( currentNode, 0 ); tempProvider.printTree( tempNode, 0 ); - try { tempSource.shutdown(); } catch (Exception ignore) {} + try { + tempSource.shutdown(); + } catch ( Exception ignore ) { + } } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index d3403a383f..c09a6e241d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -355,7 +355,7 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/product", ctx -> ctx.result( "Polypheny-DB" ) ); - webuiServer.post( "/previewTable", crud::sendConfirmation ); + webuiServer.post( "/previewTable", crud::buildPreview ); webuiServer.get( "/metadataStatus/{uniqueName}", crud::metadataStatus ); diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java deleted file mode 100644 index 9dcd7faaad..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/AttributeInfo.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery.DataHandling; - -import java.util.ArrayList; -import java.util.List; - -public class AttributeInfo { - public String name; - public String type; - public List sampleValues; - - public AttributeInfo( String name, String type ) { - this.name = name; - this.type = type; - this.sampleValues = new ArrayList<>(); - } -} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java deleted file mode 100644 index 664a931c64..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/DatabaseInfo.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery.DataHandling; - -/* -* Data structure for saving all schema information for every database collected because -* saving all database and schema information in two lists is more overhead - */ - -import java.util.ArrayList; -import java.util.List; - -public class DatabaseInfo { - public String name; - public List schemas; - - public DatabaseInfo(String name) { - this.name = name; - this.schemas = new ArrayList<>(); - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append("πŸ“¦ Datenbank: ").append(name).append("\n"); - - for (SchemaInfo schema : schemas) { - sb.append(" πŸ“ Schema: ").append(schema.name).append("\n"); - - for (TableInfo table : schema.tables) { - sb.append(" πŸ“„ Tabelle: ").append(table.name).append("\n"); - - for (AttributeInfo attr : table.attributes) { - sb.append(" πŸ”Ή Attribut: ") - .append(attr.name) - .append(" : ") - .append(attr.type) - .append("\n"); - } - } - } - - return sb.toString(); - } -} - diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java deleted file mode 100644 index 48256ebee8..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/SchemaInfo.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery.DataHandling; - -import java.util.ArrayList; -import java.util.List; - -public class SchemaInfo { - public String name; - public List tables; - - public SchemaInfo( String name ) { - this.name = name; - this.tables = new ArrayList<>(); - } - - - public void addTable( TableInfo table ) { - tables.add( table ); - } - -} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java deleted file mode 100644 index 985fb04420..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/DataHandling/TableInfo.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery.DataHandling; -import java.util.ArrayList; -import java.util.List; - -public class TableInfo { - public String name; - public List attributes; - - public TableInfo( String name ) { - this.name = name; - this.attributes = new ArrayList<>(); - } - - - public void addAttribute( AttributeInfo attr ) { - attributes.add( attr ); - } -} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java deleted file mode 100644 index 22c66c1f51..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/JsonExport.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery; - -import com.fasterxml.jackson.databind.ObjectMapper; -import org.polypheny.db.webui.schemaDiscovery.DataHandling.DatabaseInfo; -import java.util.List; - -public class JsonExport { - - public static void printAsJson( List database ) { - try { - ObjectMapper mapper = new ObjectMapper(); - String json = mapper.writerWithDefaultPrettyPrinter().writeValueAsString( database ); - System.out.println( json ); - } catch ( Exception e ) { - System.err.println( "Fail to convert DatabaseInfo Object to JSON-format: " + e.getMessage() ); - } - } - -} diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java deleted file mode 100644 index 9303060d1b..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/OracleConnection.java +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery; - -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; - -import java.sql.*; - -public class OracleConnection { - - public static void main(String[] args) { - // Verbindungseinstellungen - String url = "jdbc:oracle:thin:@localhost:1521/XE"; - String username = "system"; - String password = "roman123"; - - // SQL-Abfrage - String query = "SELECT * FROM test"; - - try (Connection conn = DriverManager.getConnection(url, username, password); - Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery(query)) { - - // Metadaten holen (z.β€―B. Spaltenanzahl und -namen) - ResultSetMetaData metaData = rs.getMetaData(); - int columnCount = metaData.getColumnCount(); - - // Alle Zeilen durchlaufen - while (rs.next()) { - for (int i = 1; i <= columnCount; i++) { - System.out.print(rs.getString(i)); - if (i < columnCount) System.out.print(" | "); - } - System.out.println(); - } - - } catch (SQLException e) { - System.out.println("Fehler bei der Verbindung oder Abfrage:"); - e.printStackTrace(); - } - } -} - -/* -// ÜberprΓΌfen, ob das ResultSet Daten enthΓ€lt - if (rs.next()) { - // Angenommen, die Tabelle hat eine Spalte "spalte" (beispielhaft) - int spalte = rs.getInt("spalte"); - System.out.println("Wert aus Spalte: " + spalte); - } else { - System.out.println("Keine Daten gefunden."); - } - */ diff --git a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java b/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java deleted file mode 100644 index 5596709347..0000000000 --- a/webui/src/main/java/org/polypheny/db/webui/schemaDiscovery/PostgreSqlConnection.java +++ /dev/null @@ -1,128 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.webui.schemaDiscovery; - -import org.polypheny.db.webui.schemaDiscovery.DataHandling.AttributeInfo; -import org.polypheny.db.webui.schemaDiscovery.DataHandling.DatabaseInfo; -import org.polypheny.db.webui.schemaDiscovery.DataHandling.SchemaInfo; -import org.polypheny.db.webui.schemaDiscovery.DataHandling.TableInfo; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Statement; -import java.util.ArrayList; -import java.util.List; - -import java.sql.*; -import java.util.ArrayList; -import java.util.List; - -public class PostgreSqlConnection { - - static String host = "localhost"; - static String port = "5432"; - static String user = "postgres"; - static String password = "password"; - - - public static List getDatabasesSchemasAndTables() throws SQLException { - List dbs = new ArrayList<>(); - - String metaUrl = "jdbc:postgresql://" + host + ":" + port + "/postgres"; - try ( - Connection metaConn = DriverManager.getConnection(metaUrl, user, password); - Statement stmt = metaConn.createStatement(); - ResultSet rs = stmt.executeQuery("SELECT datname FROM pg_database WHERE datistemplate = false") - ) { - while (rs.next()) { - String dbName = rs.getString("datname"); - DatabaseInfo dbInfo = new DatabaseInfo(dbName); - - String dbUrl = "jdbc:postgresql://" + host + ":" + port + "/" + dbName; - try (Connection dbConn = DriverManager.getConnection(dbUrl, user, password)) { - DatabaseMetaData meta = dbConn.getMetaData(); - - ResultSet schemas = meta.getSchemas(); - while (schemas.next()) { - String schemaName = schemas.getString("TABLE_SCHEM"); - SchemaInfo schema = new SchemaInfo(schemaName); - - ResultSet tables = meta.getTables(null, schemaName, "%", new String[]{"TABLE"}); - while (tables.next()) { - String tableName = tables.getString("TABLE_NAME"); - TableInfo table = new TableInfo(tableName); - - ResultSet columns = meta.getColumns(null, schemaName, tableName, "%"); - while (columns.next()) { - String columnName = columns.getString("COLUMN_NAME"); - String columnType = columns.getString("TYPE_NAME"); - - AttributeInfo attribute = new AttributeInfo(columnName, columnType); - - String sampleQuery = "SELECT \"" + columnName + "\" FROM \"" + schemaName + "\".\"" + tableName + "\" LIMIT 20"; - try ( - Statement sampleStmt = dbConn.createStatement(); - ResultSet sampleRs = sampleStmt.executeQuery(sampleQuery) - ) { - while (sampleRs.next()) { - Object value = sampleRs.getObject(columnName); - attribute.sampleValues.add(value != null ? value.toString() : "NULL"); - } - } catch (SQLException e) { - System.err.println("Fehler beim Abrufen von Beispieldaten fΓΌr Spalte " + columnName + ": " + e.getMessage()); - } - - table.attributes.add(attribute); - } - - schema.tables.add(table); - } - - dbInfo.schemas.add(schema); - } - - } catch (SQLException e) { - System.err.println("Fehler beim Abrufen von Schemas fΓΌr DB " + dbName + ": " + e.getMessage()); - } - - dbs.add(dbInfo); - } - } - - return dbs; - } - - - - - - public static void main(String[] args) { - try { - List dbs = getDatabasesSchemasAndTables(); - for (DatabaseInfo db : dbs) { - System.out.print(db.toString()); - } - JsonExport.printAsJson( dbs ); - - } catch (SQLException e) { - System.err.println("Fehler bei der Schema-Erkennung: " + e.getMessage()); - e.printStackTrace(); - } - } - -} From 7a40a40c72b3f746c281cd3332b8ba980825a948 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 12 Aug 2025 13:22:05 +0200 Subject: [PATCH 66/68] Bugfix: Meta Acknowledgements correctly displays success or error. --- .../org/polypheny/db/ddl/DdlManagerImpl.java | 4 ++-- .../java/org/polypheny/db/webui/Crud.java | 23 +++++++++++++------ 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index 410782db8f..d9f7169ea7 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -402,10 +402,10 @@ public void removeSelectedMetadata( List paths, Statement stmt, String u table = catalog.getSnapshot().rel().getTable( Catalog.defaultNamespaceId, k.table() ).orElse( null ); } if ( table == null ) { - throw new GenericRuntimeException( "Tabelle " + k + " nicht gefunden." ); + log.info( "Table: " + k.physSchema() + "." + k.table() + " not found" ); } if ( table.entityType != EntityType.SOURCE ) { - throw new GenericRuntimeException( "Tabelle " + table.name + " ist kein SOURCE-Objekt." ); + throw new GenericRuntimeException( "Table " + table.name + " is not a source object!" ); } if ( cols.contains( "*" ) ) { diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index cbff2bc136..0289a7c2e8 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -983,6 +983,9 @@ void metadataStatus( final Context ctx ) { } + /** + * Exact changes are send here. The publisher manager gets the changes from the listener. + */ void metadataChange( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); PreviewResultEntry data = PublisherManager.getInstance().fetchChange( uniqueName ); @@ -1001,6 +1004,9 @@ void metadataChange( final Context ctx ) { } + /** + * Add / Delete new metadata and update adapter. + */ void metadataAck( final Context ctx ) { AckPayload payload = ctx.bodyAsClass( AckPayload.class ); log.info( "Acknowledgement incoming: " + payload.toString() ); @@ -1009,20 +1015,24 @@ void metadataAck( final Context ctx ) { Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); Statement stmt = transaction.createStatement(); try { - if ( payload.addedPaths != null || payload.addedPaths.length > 0 ) { + if ( payload.addedPaths != null && payload.addedPaths.length > 0 ) { DdlManager.getInstance().addSelectedMetadata( transaction, stmt, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); } - if ( payload.removedPaths != null || payload.removedPaths.length > 0 ) { + if ( payload.removedPaths != null && payload.removedPaths.length > 0 ) { String[] filtered = filterPrefixes( payload.removedPaths ); DdlManager.getInstance().removeSelectedMetadata( List.of( filtered ), stmt, payload.uniqueName ); } transaction.commit(); - ctx.status( 200 ).result( "ACK processed" ); + ctx.status( 200 ).json( Map.of( "message", "ACK processed" ) ); } catch ( Exception e ) { log.error( "metadataAck failed", e ); - ctx.status( 200 ).json( Map.of( "message", "ACK was processed" ) ); - + try { + transaction.rollback( "Error occurred during metadata acknowledgement!" ); + } catch ( Exception ignore ) { + } + ctx.status( 500 ).json( Map.of( "message", "ACK failed", "error", e.getClass().getSimpleName() + ) ); } finally { if ( stmt != null ) { stmt.close(); @@ -1036,8 +1046,7 @@ void getMetaConfiguration( final Context ctx ) { String uniqueName = ctx.pathParam( "uniqueName" ); MetadataProvider provider = AdapterManager.getInstance() .getMetadataProvider( uniqueName ) - .orElseThrow( () -> new IllegalStateException( - "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); + .orElseThrow( () -> new IllegalStateException( "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); List historyBeforeParsing = PublisherManager.getInstance().getHistory( uniqueName ); From 330d7df488c51d237df260c9c901773390fee775 Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 12 Aug 2025 16:38:25 +0200 Subject: [PATCH 67/68] Code cleaning and removing logs. --- .../MetadataObserver/AbstractListener.java | 203 +--------------- .../MetadataObserver/AbstractPublisher.java | 79 +------ .../MetadataObserver/ListenerImpl.java | 218 ++++++++++++++++++ .../MetadataObserver/MetadataListener.java | 31 --- .../MetadataObserver/MetadataPublisher.java | 32 --- .../MetadataObserver/PublisherImpl.java | 89 +++++++ .../MetadataObserver/PublisherManager.java | 22 +- .../db/adapter/java/AdapterTemplate.java | 10 - .../db/schemaDiscovery/MetadataProvider.java | 4 - .../db/schemaDiscovery/NodeUtil.java | 28 +-- .../org/polypheny/db/ddl/DdlManagerImpl.java | 26 +-- .../polypheny/db/adapter/csv/CsvSource.java | 23 +- .../db/adapter/excel/ExcelSource.java | 27 --- .../jdbc/sources/AbstractJdbcSource.java | 4 - .../polypheny/db/adapter/json/JsonSource.java | 16 -- .../monetdb/sources/MonetdbSource.java | 20 -- .../db/adapter/jdbc/MysqlSourcePlugin.java | 24 -- .../source/OracleSource.java | 26 +-- .../postgres/source/PostgresqlSource.java | 20 -- .../java/org/polypheny/db/webui/Crud.java | 76 ++---- .../org/polypheny/db/webui/WebSocket.java | 1 - 21 files changed, 359 insertions(+), 620 deletions(-) create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java delete mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java delete mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java create mode 100644 core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java index fe8a3c0179..2a02fbaafe 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -16,211 +16,16 @@ package org.polypheny.db.adapter.MetadataObserver; -import com.google.gson.Gson; -import com.google.gson.JsonObject; -import lombok.Getter; -import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; -import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry.DiffMessageUtil; -import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; -import org.polypheny.db.adapter.MetadataObserver.Utils.MetaAnnotator; -import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; -import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; -import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; -import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; -import org.polypheny.db.schemaDiscovery.NodeSerializer; -import org.polypheny.db.schemaDiscovery.NodeUtil; -import java.io.File; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.StandardCopyOption; -import java.time.Instant; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.List; -import java.util.Set; -@Slf4j -public class AbstractListener

implements MetadataListener

{ +public interface AbstractListener

{ - private boolean available; - private AbstractNode currentNode; - private P adapter; - private String hash; + void onMetadataChange( P adapter, AbstractNode node, String hash ); - private static AbstractNode formRootNode = null; + boolean isAvailable(); - private static final Gson GSON = new Gson(); - - - public AbstractListener() { - available = true; - currentNode = null; - this.adapter = null; - this.hash = null; - } - - - @Override - public void onMetadataChange( P adapter, AbstractNode node, String hash ) { - available ^= true; - this.currentNode = node; - this.adapter = adapter; - this.hash = hash; - - Object preview = adapter.getPreview(); - - log.info( "Listener saved credentials of adapter and sends now Request to UI and applies changes on adapter metadata and metadata the listener is holding." ); - - DiffResult result = MetaDiffUtil.diff( adapter.getRoot(), node ); - log.info( "Diffresult: {}", result ); - - ChangeStatus status = NodeUtil.evaluateStatus( result, adapter.getRoot() ); - - ChangeLogEntry entry = new ChangeLogEntry( adapter.getUniqueName(), Instant.now(), DiffMessageUtil.toMessages( result ), status ); - PublisherManager.getInstance().addChange( entry ); - - AbstractNode annotatedCopy = MetaAnnotator.annotateTree( adapter.getRoot(), node, result ); - String json = NodeSerializer.serializeNode( annotatedCopy ).toString(); - log.info( "JSON: {}", json ); - - PublisherManager.getInstance().onMetadataChange( adapter.getUniqueName(), new PreviewResultEntry( json, preview, List.of( entry ) ), status ); - } - - - public static PreviewResultEntry buildFormChange( String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview, String path ) { - DiffResult diff = MetaDiffUtil.diff( oldRoot, newRoot ); - ChangeStatus status = NodeUtil.evaluateStatus( diff, oldRoot ); - - ChangeLogEntry entry = new ChangeLogEntry( uniqueName, Instant.now(), DiffMessageUtil.toMessages( diff ), status ); - - AbstractNode annotated = MetaAnnotator.annotateTree( oldRoot, newRoot, diff ); - String json = NodeSerializer.serializeNode( annotated ).toString(); - - PublisherManager pm = PublisherManager.getInstance(); - pm.addChange( entry ); - PreviewResultEntry result = new PreviewResultEntry( json, preview, List.of( entry ) ); - pm.onMetadataChange( uniqueName, result, status ); - pm.saveTempPath( uniqueName, path ); - - formRootNode = newRoot; - - return result; - } - - - public static void applyAnnotatedTree( Adapter adapter, AbstractNode newRoot, String newHash, String[] additionallySelectedMetadata ) { - - if ( !(adapter instanceof DataSource) ) { - throw new IllegalArgumentException( "Adapter must be of type DataSource" ); - } - - MetadataProvider metadataProvider = (MetadataProvider) adapter; - - Set selected = NodeUtil.collectSelecedAttributePaths( metadataProvider.getRoot() ); - if ( additionallySelectedMetadata != null ) { - selected.addAll( Arrays.asList( additionallySelectedMetadata ) ); - } - - metadataProvider.setRoot( newRoot ); - metadataProvider.markSelectedAttributes( List.copyOf( selected ) ); - HashCache.getInstance().put( adapter.getUniqueName(), newHash ); - } - - - @Override - public void applyChange( String[] metadata ) { - log.info( "Changes are going to be applied" ); - - Set prevSelected = NodeUtil.collectSelecedAttributePaths( this.adapter.getRoot() ); - - this.adapter.setRoot( this.currentNode ); - if ( metadata != null && metadata.length > 0 ) { - prevSelected.addAll( Arrays.asList( metadata ) ); - } - this.adapter.markSelectedAttributes( List.copyOf( prevSelected ) ); - HashCache.getInstance().put( this.adapter.getUniqueName(), this.hash ); - - this.currentNode = null; - this.adapter = null; - this.hash = null; - - available ^= true; - - } - - - public static void applyFormChange( String[] metadata, String uniqueName, String newPath ) { - log.info( "Form changes are going to be applied." ); - AbstractNode newRoot = formRootNode; - - DataSource adapter = AdapterManager.getInstance().getSource( uniqueName ).orElseThrow(); - MetadataProvider metadataprovider = (MetadataProvider) adapter; - - deleteTempPath( newPath, adapter.getSettings().get( "directory" ) ); - - newRoot = metadataprovider.fetchMetadataTree(); - - AbstractNode oldRoot = metadataprovider.getRoot(); - metadataprovider.setRoot( newRoot ); - - Set prevSelected = NodeUtil.collectSelecedAttributePaths( oldRoot ); - // metadataprovider.setRoot( newRoot ); - if ( metadata != null && metadata.length > 0 ) { - prevSelected.addAll( Arrays.asList( metadata ) ); - } - - metadataprovider.markSelectedAttributes( List.copyOf( prevSelected ) ); - - formRootNode = null; - PublisherManager.getInstance().deleteTempPath( uniqueName ); - - } - - - private static void deleteTempPath( String tmpPath, String directory ) { - File tmpDir = new File( tmpPath ); - File targetDir = new File( directory ); - - if ( !tmpDir.exists() || !tmpDir.isDirectory() ) { - throw new IllegalArgumentException( "tmpPath is not a valid directory: " + tmpPath ); - } - if ( !targetDir.exists() || !targetDir.isDirectory() ) { - throw new IllegalArgumentException( "directory is not a valid directory: " + directory ); - } - - for ( File file : targetDir.listFiles() ) { - if ( !file.delete() ) { - throw new RuntimeException( "Failed to delete file: " + file.getAbsolutePath() ); - } - } - - for ( File file : tmpDir.listFiles() ) { - try { - Files.copy( file.toPath(), new File( targetDir, file.getName() ).toPath(), - StandardCopyOption.REPLACE_EXISTING ); - } catch ( IOException e ) { - throw new RuntimeException( "Failed to copy file: " + file.getAbsolutePath(), e ); - } - } - - for ( File file : tmpDir.listFiles() ) { - file.delete(); - } - if ( !tmpDir.delete() ) { - throw new RuntimeException( "Failed to delete tmpPath directory: " + tmpDir.getAbsolutePath() ); - } - } - - - @Override - public boolean isAvailable() { - return this.available; - } + void applyChange( String[] metadata ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java index 8b89d273ef..20524d1673 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java @@ -16,84 +16,17 @@ package org.polypheny.db.adapter.MetadataObserver; -import lombok.extern.slf4j.Slf4j; -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.schemaDiscovery.AbstractNode; -import org.polypheny.db.schemaDiscovery.MetadataProvider; -import org.polypheny.db.schemaDiscovery.NodeSerializer; -import java.util.List; -import java.util.Map; -import java.util.concurrent.CopyOnWriteArrayList; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; +public interface AbstractPublisher { -@Slf4j -public class AbstractPublisher

implements MetadataPublisher { + String getAdapterUniqueName(); - protected final P provider; - private final long intervalSeconds = 30; - private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); - private MetadataListener listener; - private final MetadataHasher hasher = new MetadataHasher(); - private final HashCache cache = HashCache.getInstance(); + void start(); + void stop(); - protected AbstractPublisher( P provider, MetadataListener listener ) { - this.provider = provider; - this.listener = listener; - } + void runCheck(); + AbstractListener getListener(); - @Override - public String getAdapterUniqueName() { - return provider.getUniqueName(); - } - - - @Override - public void start() { - scheduler.scheduleAtFixedRate( this::runCheck, 0, intervalSeconds, java.util.concurrent.TimeUnit.SECONDS ); - } - - - @Override - public void stop() { - scheduler.shutdown(); - } - - - @Override - public void runCheck() { - if ( !listener.isAvailable() ) { - return; - } - try { - AbstractNode node = provider.fetchMetadataTree(); - String fresh = NodeSerializer.serializeNode( node ).toString(); - String hash = hasher.hash( fresh ); - - String lastHash = cache.getHash( provider.getUniqueName() ); - - log.info( "Fresh JSON: {}", fresh ); - log.info( "Metadata hash at Observer-Check (Current adapter hash) : {}", lastHash ); - log.info( "Metadata hash at Observer-Check (Newest hash) : {}", hash ); - log.info( "Key used during observer-check: {}", provider.getUniqueName() ); - - if ( lastHash != null && !lastHash.equals( hash ) ) { - log.info( "Metadata of adapter {} changed. Sending new snapshot to UI.", provider.getUniqueName() ); - listener.onMetadataChange( provider, node, hash ); - } else { - log.info( "Metadata of adapter {} did not change.", provider.getUniqueName() ); - } - } catch ( Exception e ) { - throw new RuntimeException( "Error while checking current snapshot.", e ); - } - } - - - @Override - public MetadataListener getListener() { - return this.listener; - } } diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java new file mode 100644 index 0000000000..dd7458b1ce --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java @@ -0,0 +1,218 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.google.gson.Gson; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry.DiffMessageUtil; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaAnnotator; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; +import org.polypheny.db.schemaDiscovery.NodeUtil; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.time.Instant; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +@Slf4j +public class ListenerImpl

implements AbstractListener

{ + + private boolean available; + private AbstractNode currentNode; + private P adapter; + private String hash; + + private static AbstractNode formRootNode = null; + + private static final Gson GSON = new Gson(); + + + public ListenerImpl() { + available = true; + currentNode = null; + this.adapter = null; + this.hash = null; + } + + + @Override + public void onMetadataChange(P adapter, AbstractNode node, String hash) { + available ^= true; + this.currentNode = node; + this.adapter = adapter; + this.hash = hash; + + Object preview = adapter.getPreview(); + + + DiffResult result = MetaDiffUtil.diff(adapter.getRoot(), node); + + ChangeStatus status = NodeUtil.evaluateStatus(result, adapter.getRoot()); + + ChangeLogEntry entry = new ChangeLogEntry(adapter.getUniqueName(), Instant.now(), DiffMessageUtil.toMessages(result), status); + PublisherManager.getInstance().addChange(entry); + + AbstractNode annotatedCopy = MetaAnnotator.annotateTree(adapter.getRoot(), node, result); + String json = NodeSerializer.serializeNode(annotatedCopy).toString(); + + PublisherManager.getInstance().onMetadataChange(adapter.getUniqueName(), new PreviewResultEntry(json, preview, List.of(entry)), status); + } + + + public static PreviewResultEntry buildFormChange(String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview, String path) { + DiffResult diff = MetaDiffUtil.diff(oldRoot, newRoot); + ChangeStatus status = NodeUtil.evaluateStatus(diff, oldRoot); + + ChangeLogEntry entry = new ChangeLogEntry(uniqueName, Instant.now(), DiffMessageUtil.toMessages(diff), status); + + AbstractNode annotated = MetaAnnotator.annotateTree(oldRoot, newRoot, diff); + String json = NodeSerializer.serializeNode(annotated).toString(); + + PublisherManager pm = PublisherManager.getInstance(); + pm.addChange(entry); + PreviewResultEntry result = new PreviewResultEntry(json, preview, List.of(entry)); + pm.onMetadataChange(uniqueName, result, status); + pm.saveTempPath(uniqueName, path); + + formRootNode = newRoot; + + return result; + } + + + public static void applyAnnotatedTree(Adapter adapter, AbstractNode newRoot, String newHash, String[] additionallySelectedMetadata) { + + if (!(adapter instanceof DataSource)) { + throw new IllegalArgumentException("Adapter must be of type DataSource"); + } + + MetadataProvider metadataProvider = (MetadataProvider) adapter; + + Set selected = NodeUtil.collectSelecedAttributePaths(metadataProvider.getRoot()); + if (additionallySelectedMetadata != null) { + selected.addAll(Arrays.asList(additionallySelectedMetadata)); + } + + metadataProvider.setRoot(newRoot); + metadataProvider.markSelectedAttributes(List.copyOf(selected)); + HashCache.getInstance().put(adapter.getUniqueName(), newHash); + } + + + @Override + public void applyChange(String[] metadata) { + Set prevSelected = NodeUtil.collectSelecedAttributePaths(this.adapter.getRoot()); + + this.adapter.setRoot(this.currentNode); + if (metadata != null && metadata.length > 0) { + prevSelected.addAll(Arrays.asList(metadata)); + } + this.adapter.markSelectedAttributes(List.copyOf(prevSelected)); + HashCache.getInstance().put(this.adapter.getUniqueName(), this.hash); + + this.currentNode = null; + this.adapter = null; + this.hash = null; + + available ^= true; + + } + + + // CSV and Excel does not support observer deployment. Therefore, a manual approach with a reupload is necessary to update data. + public static void applyFormChange(String[] metadata, String uniqueName, String newPath) { + AbstractNode newRoot = formRootNode; + + DataSource adapter = AdapterManager.getInstance().getSource(uniqueName).orElseThrow(); + MetadataProvider metadataprovider = (MetadataProvider) adapter; + + deleteTempPath(newPath, adapter.getSettings().get("directory")); + + newRoot = metadataprovider.fetchMetadataTree(); + + AbstractNode oldRoot = metadataprovider.getRoot(); + metadataprovider.setRoot(newRoot); + + Set prevSelected = NodeUtil.collectSelecedAttributePaths(oldRoot); + // metadataprovider.setRoot( newRoot ); + if (metadata != null && metadata.length > 0) { + prevSelected.addAll(Arrays.asList(metadata)); + } + + metadataprovider.markSelectedAttributes(List.copyOf(prevSelected)); + + formRootNode = null; + PublisherManager.getInstance().deleteTempPath(uniqueName); + + } + + + // Changing the old file on the directory with the new/temporary file. + private static void deleteTempPath(String tmpPath, String directory) { + File tmpDir = new File(tmpPath); + File targetDir = new File(directory); + + if (!tmpDir.exists() || !tmpDir.isDirectory()) { + throw new IllegalArgumentException("tmpPath is not a valid directory: " + tmpPath); + } + if (!targetDir.exists() || !targetDir.isDirectory()) { + throw new IllegalArgumentException("directory is not a valid directory: " + directory); + } + + for (File file : targetDir.listFiles()) { + if (!file.delete()) { + throw new RuntimeException("Failed to delete file: " + file.getAbsolutePath()); + } + } + + for (File file : tmpDir.listFiles()) { + try { + Files.copy(file.toPath(), new File(targetDir, file.getName()).toPath(), + StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + throw new RuntimeException("Failed to copy file: " + file.getAbsolutePath(), e); + } + } + + for (File file : tmpDir.listFiles()) { + file.delete(); + } + if (!tmpDir.delete()) { + throw new RuntimeException("Failed to delete tmpPath directory: " + tmpDir.getAbsolutePath()); + } + } + + + @Override + public boolean isAvailable() { + return this.available; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java deleted file mode 100644 index ad2a92ad90..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataListener.java +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.MetadataObserver; - -import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.schemaDiscovery.AbstractNode; -import org.polypheny.db.schemaDiscovery.MetadataProvider; - -public interface MetadataListener

{ - - void onMetadataChange( P adapter, AbstractNode node, String hash ); - - boolean isAvailable(); - - void applyChange( String[] metadata ); - -} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java deleted file mode 100644 index 3f27c01bc9..0000000000 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataPublisher.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright 2019-2025 The Polypheny Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.polypheny.db.adapter.MetadataObserver; - -public interface MetadataPublisher { - - String getAdapterUniqueName(); - - void start(); - - void stop(); - - void runCheck(); - - MetadataListener getListener(); - - -} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java new file mode 100644 index 0000000000..f49d890220 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java @@ -0,0 +1,89 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; + +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +@Slf4j +public class PublisherImpl

implements AbstractPublisher { + + protected final P provider; + private final long intervalSeconds = 30; + private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); + private AbstractListener listener; + private final MetadataHasher hasher = new MetadataHasher(); + private final HashCache cache = HashCache.getInstance(); + + + protected PublisherImpl(P provider, AbstractListener listener) { + this.provider = provider; + this.listener = listener; + } + + + @Override + public String getAdapterUniqueName() { + return provider.getUniqueName(); + } + + + @Override + public void start() { + scheduler.scheduleAtFixedRate(this::runCheck, 0, intervalSeconds, java.util.concurrent.TimeUnit.SECONDS); + } + + + @Override + public void stop() { + scheduler.shutdown(); + } + + + @Override + public void runCheck() { + if (!listener.isAvailable()) { + return; + } + try { + AbstractNode node = provider.fetchMetadataTree(); + String fresh = NodeSerializer.serializeNode(node).toString(); + String hash = hasher.hash(fresh); + + String lastHash = cache.getHash(provider.getUniqueName()); + + if (lastHash != null && !lastHash.equals(hash)) { + listener.onMetadataChange(provider, node, hash); + } + } catch (Exception e) { + throw new RuntimeException("Error while checking current snapshot.", e); + } + } + + + @Override + public AbstractListener getListener() { + return this.listener; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java index 5b1a31f62a..0b4119cd7b 100644 --- a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -16,32 +16,22 @@ package org.polypheny.db.adapter.MetadataObserver; -import lombok.AllArgsConstructor; -import lombok.Value; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.Adapter; -import org.polypheny.db.adapter.AdapterManager; -import org.polypheny.db.adapter.DataSource; -import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; -import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; -import org.polypheny.db.schemaDiscovery.AbstractNode; import org.polypheny.db.schemaDiscovery.MetadataProvider; -import java.time.Instant; import java.util.Deque; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedDeque; -import java.util.concurrent.ConcurrentMap; @Slf4j public class PublisherManager { private static final int MAX_ENTRIES_PER_ADAPTER = 100; - private final Map publishers = new ConcurrentHashMap<>(); + private final Map publishers = new ConcurrentHashMap<>(); // Temporarily save the changes computed by a listener. private final Map changeCache = new ConcurrentHashMap<>(); @@ -69,12 +59,11 @@ private PublisherManager() { public

void onAdapterDeploy( P adapter ) { - log.info( "Adapter {} is going to be registered for metadata publish.", adapter.getUniqueName() ); if ( publishers.containsKey( adapter.getUniqueName() ) ) { return; } - MetadataListener listener = new AbstractListener(); - MetadataPublisher publisher = new AbstractPublisher<>( adapter, listener ); + AbstractListener listener = new ListenerImpl(); + AbstractPublisher publisher = new PublisherImpl<>( adapter, listener ); publishers.put( adapter.getUniqueName(), publisher ); publisher.start(); } @@ -90,7 +79,6 @@ public void onAdapterUndeploy( String uniqueName ) { this.changeCache.remove( uniqueName ); this.statusCache.remove( uniqueName ); - log.error( "Adapter {} is going to be unregistered for metadata publish.", uniqueName ); } @@ -115,10 +103,10 @@ public PreviewResultEntry fetchChange( String uniqueName ) { public void ack( String uniqueName, String[] metadata ) { - MetadataPublisher publisher = publishers.get( uniqueName ); + AbstractPublisher publisher = publishers.get( uniqueName ); if ( publishers.isEmpty() ) { - AbstractListener.applyFormChange( metadata, uniqueName, tempFileCache.get( uniqueName ) ); + ListenerImpl.applyFormChange( metadata, uniqueName, tempFileCache.get( uniqueName ) ); } else { publisher.getListener().applyChange( metadata ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index bbca38dc76..fafb272dac 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -113,7 +113,6 @@ public DeployMode getDefaultMode() { public DataSource createEphemeral( Map settings ) { String previewName = "_preview" + System.nanoTime(); - log.info( "Creating ephemeral adapter {} with name {}", clazz.getName(), previewName ); Adapter adapter = deployer.get( -1L, previewName, settings, DeployMode.REMOTE ); if ( !(adapter instanceof DataSource ds) ) { @@ -126,27 +125,18 @@ public DataSource createEphemeral( Map settings ) { public PreviewResult preview( Map settings, int limit ) { DataSource tmp = createEphemeral( settings ); - log.info( "Adapter class: {}", tmp.getClass().getName() ); - log.info( "Implements MetadataProvider: {}", tmp instanceof MetadataProvider ); try { if ( tmp instanceof MetadataProvider mp ) { - log.info( "🎯 Adapter supports MetadataProvider. Fetching metadata and preview..." ); AbstractNode meta = mp.fetchMetadataTree(); mp.setRoot( meta ); - mp.printTree( meta, 0 ); String json = NodeSerializer.serializeNode( meta ).toString(); MetadataHasher hasher = new MetadataHasher(); String hash = hasher.hash( json ); - log.info( "Metadata hash at preview: {}", hash ); - // Object rows = mp.fetchPreview( limit ); Object rows = mp.getPreview(); - log.error( json ); - // log.error( rows.toString() ); return new PreviewResult( json, rows, null ); } throw new GenericRuntimeException( "The adapter does not implement MetadataProvider." ); } finally { - log.info( "πŸ”» Shutting down preview adapter." ); tmp.shutdown(); } } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java index 257ef58406..ddb9726a3d 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -28,8 +28,6 @@ public interface MetadataProvider { void markSelectedAttributes( List selectedPaths ); - void printTree( AbstractNode node, int depth ); - void setRoot( AbstractNode root ); Object getPreview(); @@ -37,6 +35,4 @@ public interface MetadataProvider { AbstractNode getRoot(); - - } diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java index 2ca912dae8..96490a4140 100644 --- a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java @@ -34,9 +34,10 @@ public final class NodeUtil { private static final String NORMALIZED_SEPARATOR = "."; - private static String normalizePath(String rawPath) { - return rawPath.replace("/", NORMALIZED_SEPARATOR) - .replace("\\", NORMALIZED_SEPARATOR); + + private static String normalizePath( String rawPath ) { + return rawPath.replace( "/", NORMALIZED_SEPARATOR ) + .replace( "\\", NORMALIZED_SEPARATOR ); } @@ -58,7 +59,6 @@ public static Set collectSelecedAttributePaths( AbstractNode root ) { private static void traverse( AbstractNode node, Deque path, Set acc ) { path.addLast( node.getName() ); if ( node instanceof AttributeNode attr && attr.isSelected() ) { - log.debug( ">> visiting {}", String.join( "/", path ) + " selected=" + attr.isSelected() ); acc.add( String.join( ".", path ) ); } @@ -99,9 +99,6 @@ public static void unmarkSelectedAttributes( AbstractNode metadataRoot, List childOpt = current.getChildren().stream() @@ -111,8 +108,6 @@ public static void unmarkSelectedAttributes( AbstractNode metadataRoot, List selected = collectSelecedAttributePaths( oldRoot ); - for ( String path : selected ) log.info( path ); - for (String removedRaw : diff.getRemoved()) { - String removed = normalizePath(removedRaw); - for (String selectedRaw : selected) { - String selectedNorm = normalizePath(selectedRaw); - if (removed.equals(selectedNorm) || - selectedNorm.startsWith(removed + NORMALIZED_SEPARATOR) || - removed.startsWith(selectedNorm + NORMALIZED_SEPARATOR)) { + for ( String removedRaw : diff.getRemoved() ) { + String removed = normalizePath( removedRaw ); + for ( String selectedRaw : selected ) { + String selectedNorm = normalizePath( selectedRaw ); + if ( removed.equals( selectedNorm ) || + selectedNorm.startsWith( removed + NORMALIZED_SEPARATOR ) || + removed.startsWith( selectedNorm + NORMALIZED_SEPARATOR ) ) { return ChangeStatus.CRITICAL; } } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index d9f7169ea7..a7dbeb4fc1 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -238,7 +238,6 @@ record PathParts( String physicalNs, String physicalSchema, String table, String List parsedPaths = selectedPaths.stream() .map( p -> { String[] parts = p.split( "\\." ); - String physNs; String physSchema; String table; @@ -277,7 +276,8 @@ record PathParts( String physicalNs, String physicalSchema, String table, String settings.merge( "selectedAttributes", merged, ( oldVal, newVal ) -> oldVal.isBlank() ? newVal : oldVal + "," + newVal ); adapter.get().updateSettings( settings ); - Map> wishedColsPerTable = parsedPaths.stream() + Map> wishedColsPerTable = parsedPaths + .stream() .collect( Collectors.groupingBy( PathParts::table, Collectors.mapping( PathParts::column, Collectors.toSet() ) ) ); Map> exportedColumns; @@ -390,7 +390,13 @@ public void removeSelectedMetadata( List paths, Statement stmt, String u continue; } PathParts pp = parsePathUniversal( raw ); - work.computeIfAbsent( new TableKey( pp.physSchema(), pp.table() ), k -> new HashSet<>() ).add( pp.column() ); + + String colName = pp.column(); + if ( colName.contains( ":" ) ) { + colName = colName.split( ":" )[0]; + } + + work.computeIfAbsent( new TableKey( pp.physSchema(), pp.table() ), k -> new HashSet<>() ).add( colName ); } for ( Map.Entry> e : work.entrySet() ) { @@ -432,12 +438,10 @@ private void dropWholeSourceTable( LogicalTable table, Statement statement ) { List allocs = catalog.getSnapshot().alloc().getFromLogical( table.id ); if ( allocs.size() != 1 ) { - throw new GenericRuntimeException( "SOURCE-Tabelle " + table.name + - " hat mehr als ein Placement." ); + throw new GenericRuntimeException( "Source-Table " + table.name + " has more than one placement." ); } - AllocationTable placement = allocs.get( 0 ) - .unwrapOrThrow( AllocationTable.class ); + AllocationTable placement = allocs.get( 0 ).unwrapOrThrow( AllocationTable.class ); for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { catalog.getLogicalRel( table.namespaceId ).deleteForeignKey( fk.id ); @@ -465,11 +469,9 @@ private void dropSourceColumn( LogicalTable table, String columnName, Statement .getColumn( table.id, columnName ) .orElse( null ); if ( column == null ) { - log.info( "Spalte {}.{} bereits weg β†’ nichts zu tun.", table.name, columnName ); return; } - // 1) FKs weg for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { if ( fk.getFieldIds().contains( column.id ) ) { catalog.getLogicalRel( table.namespaceId ).deleteForeignKey( fk.id ); @@ -540,14 +542,12 @@ private void createRelationalSource( Transaction transaction, DataSource adap String attributes = adapter.getSettings().get( "selectedAttributes" ); String uniqueName = adapter.getUniqueName(); List selectedAttributeNames = new ArrayList<>(); - log.error( "Das ist das Attributes String: " + attributes ); if ( attributes != null ) { List selectedAttributes = new Gson().fromJson( attributes, new TypeToken>() { }.getType() ); selectedAttributeNames = selectedAttributes.stream() .map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ) .collect( Collectors.toList() ); - log.error( "Das sind die Attribute die gefiltert werden mΓΌssen: " + selectedAttributeNames ); if ( adapter instanceof MetadataProvider mp ) { @@ -557,18 +557,14 @@ private void createRelationalSource( Transaction transaction, DataSource adap AbstractNode node = mp.fetchMetadataTree(); mp.setRoot( node ); String hash = hasher.hash( NodeSerializer.serializeNode( node ).toString() ); - log.info( "Metadata hash at deployment: {}", hash ); HashCache.getInstance().put( uniqueName, hash ); - log.info( "Key used during deployment: {} ", uniqueName ); if ( !(adapter.getAdapterName().equals( "Excel" ) || adapter.getAdapterName().equals( "CSV" )) ) { pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); } mp.markSelectedAttributes( selectedAttributes ); - log.error( "SelectedAttributes ist gesetzt aus dem DdlManager und der Tree ist das hier: " ); - mp.printTree( null, 0 ); } } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index 687b56e3c1..85208d46d5 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -529,14 +529,12 @@ private Source openCsvSource( @Nullable String fqName ) throws IOException { @Override public void markSelectedAttributes( List selectedPaths ) { if ( this.metadataRoot == null ) { - log.warn( "⚠️ Kein Metadatenbaum vorhanden – kann Attribute nicht markieren." ); - return; + throw new GenericRuntimeException( "No metadata tree available." ); } for ( String path : selectedPaths ) { int lastDot = path.lastIndexOf( '.' ); if ( lastDot == -1 || lastDot == path.length() - 1 ) { - log.warn( "⚠️ Kein gΓΌltiger Attribut-Pfad: " + path ); continue; } @@ -550,30 +548,11 @@ public void markSelectedAttributes( List selectedPaths ) { if ( attrOpt.isPresent() ) { ((AttributeNode) attrOpt.get()).setSelected( true ); - log.info( "βœ… Attribut gesetzt: " + path ); - } else { - log.warn( "❌ Attribut nicht gefunden: " + normalizedColumnName + " im Pfad: " + path ); } } } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - - } - - @Override public void setRoot( AbstractNode root ) { this.metadataRoot = root; diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index c742bb24f2..05238a7799 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -108,7 +108,6 @@ public class ExcelSource extends DataSource implements Relati public ExcelSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); - log.error( settings.get( "directory" ) ); this.connectionMethod = settings.containsKey( "method" ) ? ConnectionMethod.from( settings.get( "method" ) ) : ConnectionMethod.UPLOAD; // Validate maxStringLength setting @@ -129,15 +128,12 @@ public ExcelSource( final long storeId, final String uniqueName, final Map settings ) { String dir = settings.get( "directory" ); - log.error( "Directory kommt an als: " + settings.get( "directory" ) ); - if ( dir != null && dir.trim().startsWith( "[" ) ) { try { List list = new ObjectMapper() .readValue( dir, new TypeReference>() { } ); dir = list.isEmpty() ? null : list.get( 0 ); - log.error( "Directory nach Parsing: " + dir ); } catch ( IOException e ) { throw new GenericRuntimeException( "Cannot parse directory JSON", e ); } @@ -490,7 +486,6 @@ private String getCellValueAsString( Cell cell, String fallback ) { } - @Override public List> fetchPreview( Connection conn, String fqName, int limit ) { @@ -510,7 +505,6 @@ public List> fetchPreview( Connection conn, String fqName, i Sheet sheet = wb.getSheet( sheetName ); if ( sheet == null ) { - log.warn( "Sheet {} not found in {}", sheetName, filePath ); return List.of(); } @@ -598,11 +592,7 @@ public void markSelectedAttributes( List selectedPaths ) { if ( attrNodeOpt.isPresent() ) { ((AttributeNode) attrNodeOpt.get()).setSelected( true ); - log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); - } else { - log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); } - } else { Optional childOpt = current.getChildren().stream() .filter( c -> segment.equals( c.getName() ) ) @@ -611,7 +601,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( childOpt.isPresent() ) { current = childOpt.get(); } else { - log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); break; } } @@ -621,22 +610,6 @@ public void markSelectedAttributes( List selectedPaths ) { } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - - } - - @Override public void setRoot( AbstractNode root ) { this.metadataRoot = root; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 066bb6df93..ad509b3c18 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -226,10 +226,6 @@ public Map> getExportedColumns() { String[] tables; - for ( Map.Entry entry : settings.entrySet() ) { - log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); - } - // TODO If-else usage for possibly allow the usage of the old table-setting or selecting metadata. Not implemented yet. if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { tables = settings.get( "tables" ).split( "," ); diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java index c23be1df6d..738f4fe1c3 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -375,22 +375,6 @@ public void markSelectedAttributes( List selectedPaths ) { } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( "Node type:" + node.toString() ); - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - } - - @Override public void setRoot( AbstractNode root ) { this.metadataRoot = root; diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index 949468710e..87d8303153 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -473,11 +473,7 @@ public void markSelectedAttributes( List selectedPaths ) { if ( attrNodeOpt.isPresent() ) { ((AttributeNode) attrNodeOpt.get()).setSelected( true ); - log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); - } else { - log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); } - } else { Optional childOpt = current.getChildren().stream() .filter( c -> segment.equals( c.getName() ) ) @@ -486,7 +482,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( childOpt.isPresent() ) { current = childOpt.get(); } else { - log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); break; } } @@ -496,21 +491,6 @@ public void markSelectedAttributes( List selectedPaths ) { } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - } - - @Override public void setRoot( AbstractNode root ) { this.metadataRoot = root; diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index 07da5873c1..99aa461e06 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -338,10 +338,6 @@ public Map> getExportedColumns() { String[] tables; - for ( Map.Entry entry : settings.entrySet() ) { - log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); - } - if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { tables = settings.get( "tables" ).split( "," ); } else { @@ -496,11 +492,7 @@ public void markSelectedAttributes( List selectedPaths ) { if ( attrNodeOpt.isPresent() ) { ((AttributeNode) attrNodeOpt.get()).setSelected( true ); - log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); - } else { - log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); } - } else { Optional childOpt = current.getChildren().stream() .filter( c -> segment.equals( c.getName() ) ) @@ -509,7 +501,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( childOpt.isPresent() ) { current = childOpt.get(); } else { - log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); break; } } @@ -518,21 +509,6 @@ public void markSelectedAttributes( List selectedPaths ) { } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - } - - @Override public void setRoot( AbstractNode root ) { this.metadataRoot = root; diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java index 6b34e39e92..6ea8cca5c5 100644 --- a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -165,9 +165,6 @@ public Map> getExportedColumns() { DatabaseMetaData dbmd = connection.getMetaData(); String[] tables; - for ( Map.Entry entry : settings.entrySet() ) { - log.error( "Entry: {} = {}", entry.getKey(), entry.getValue() ); - } // TODO If-else usage for possibly allow the usage of the old table-setting or selecting metadata. Not implemented yet. if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { @@ -399,8 +396,7 @@ public AbstractNode fetchMetadataTree() { public List> fetchPreview( Connection conn, String fqName, int limit ) { List> rows = new ArrayList<>(); try ( Statement stmt = conn.createStatement(); - ResultSet rs = stmt.executeQuery( - "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { + ResultSet rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { ResultSetMetaData meta = rs.getMetaData(); while ( rs.next() ) { Map row = new LinkedHashMap<>(); @@ -445,9 +441,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( attrNodeOpt.isPresent() ) { ((AttributeNode) attrNodeOpt.get()).setSelected( true ); - log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); - } else { - log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); } } else { @@ -458,7 +451,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( childOpt.isPresent() ) { current = childOpt.get(); } else { - log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); break; } } @@ -467,22 +459,6 @@ public void markSelectedAttributes( List selectedPaths ) { } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - - } - - @Override public void setRoot( AbstractNode root ) { this.metadataRoot = root; diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index ebd8bd820c..9c69ef781d 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -275,9 +275,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( attrNodeOpt.isPresent() ) { ((AttributeNode) attrNodeOpt.get()).setSelected( true ); - log.info( "βœ… Attribut gesetzt: " + String.join( ".", pathSegments ) ); - } else { - log.warn( "❌ Attribut nicht gefunden: " + String.join( ".", pathSegments ) ); } } else { @@ -288,7 +285,6 @@ public void markSelectedAttributes( List selectedPaths ) { if ( childOpt.isPresent() ) { current = childOpt.get(); } else { - log.warn( "❌ Segment nicht gefunden: " + segment + " im Pfad " + String.join( ".", pathSegments ) ); break; } } @@ -298,21 +294,6 @@ public void markSelectedAttributes( List selectedPaths ) { } - @Override - public void printTree( AbstractNode node, int depth ) { - if ( node == null ) { - node = this.metadataRoot; - } - System.out.println( " ".repeat( depth ) + node.getType() + ": " + node.getName() ); - for ( Map.Entry entry : node.getProperties().entrySet() ) { - System.out.println( " ".repeat( depth + 1 ) + "- " + entry.getKey() + ": " + entry.getValue() ); - } - for ( AbstractNode child : node.getChildren() ) { - printTree( child, depth + 1 ); - } - } - - public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, @@ -372,7 +353,6 @@ public List createTable( Context context, LogicalTableWrapper lo logical.pkIds, allocation ); adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); - log.error( "Postgres Adapter ID ist: " + this.adapterId ); return List.of( table ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 0289a7c2e8..6664278274 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -47,9 +47,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.text.DateFormat; -import java.sql.SQLException; import java.text.SimpleDateFormat; -import java.util.Collection; import java.util.Comparator; import java.util.HashSet; import java.util.ArrayList; @@ -80,7 +78,6 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.websocket.api.Session; -import org.jetbrains.annotations.NotNull; import org.polypheny.db.adapter.AbstractAdapterSettingString; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingDirectory; @@ -91,7 +88,7 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; -import org.polypheny.db.adapter.MetadataObserver.AbstractListener; +import org.polypheny.db.adapter.MetadataObserver.ListenerImpl; import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; import org.polypheny.db.adapter.MetadataObserver.ChangeLogView; import org.polypheny.db.adapter.MetadataObserver.PublisherManager; @@ -203,7 +200,6 @@ import org.polypheny.db.webui.models.PartitionFunctionModel.PartitionFunctionColumn; import org.polypheny.db.webui.models.PathAccessRequest; import org.polypheny.db.webui.models.PlacementFieldsModel; -import org.polypheny.db.webui.models.PlacementFieldsModel.Method; import org.polypheny.db.webui.models.PlacementModel; import org.polypheny.db.webui.models.PlacementModel.RelationalStore; import org.polypheny.db.webui.models.QueryInterfaceModel; @@ -912,26 +908,6 @@ void buildPreview( final Context ctx ) { } } - log.info( "πŸ”§ AdapterModel empfangen:" ); - log.info( " πŸ”Ή Name : {}", a.adapterName ); - log.info( " πŸ”Ή Adapter : {}", a.adapterType ); - log.info( " πŸ”Ή Type : {}", a.limit ); - log.info( " πŸ”Ή UniqueName : {}", a.uniqueName ); - - log.info( "πŸ“¦ Settings:" ); - for ( Entry entry : a.settings.entrySet() ) { - log.info( " - {}: {}", entry.getKey(), entry.getValue() ); - } - - if ( inputStreams.isEmpty() ) { - log.info( "πŸ“ Keine Dateien empfangen." ); - } else { - log.info( "πŸ“ Empfangene Dateien:" ); - for ( String file : inputStreams.keySet() ) { - log.info( " - Datei: {}", file ); - } - } - AdapterTemplate template = AdapterManager.getAdapterTemplate( a.adapterName, a.adapterType ); Map allSettings = template.settings .stream() @@ -959,7 +935,6 @@ void buildPreview( final Context ctx ) { String path = handleUploadFiles( fileBytes, fileNames, (AbstractAdapterSettingDirectory) allSettings.get( "directory" ), a ); a.settings.put( "directory", path ); - log.error( "Full path: {}", path ); } PreviewResult result = template.preview( a.settings, 10 ); @@ -967,8 +942,8 @@ void buildPreview( final Context ctx ) { } catch ( Exception e ) { - log.error( "Fehler beim Verarbeiten des Preview-Requests", e ); - ctx.status( HttpCode.INTERNAL_SERVER_ERROR ).result( "Fehler beim Preview" ); + log.error( "Error during the preview-request.", e ); + ctx.status( HttpCode.INTERNAL_SERVER_ERROR ).result( "Error while building preview !" ); } } @@ -1009,7 +984,6 @@ void metadataChange( final Context ctx ) { */ void metadataAck( final Context ctx ) { AckPayload payload = ctx.bodyAsClass( AckPayload.class ); - log.info( "Acknowledgement incoming: " + payload.toString() ); PublisherManager.getInstance().ack( payload.uniqueName, payload.addedPaths ); Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); @@ -1036,7 +1010,10 @@ void metadataAck( final Context ctx ) { } finally { if ( stmt != null ) { stmt.close(); - transactionManager.removeTransaction( transaction.getXid() ); + if ( transaction.isActive() ) { + transactionManager.removeTransaction( transaction.getXid() ); + } + } } } @@ -1063,15 +1040,11 @@ void setMetaConfiguration( final Context ctx ) { ConfigPayload config = ctx.bodyAsClass( ConfigPayload.class ); Set userSelection = Set.of( config.selected ); Set markedPaths; - log.error( config.toString() ); Optional> adapter = AdapterManager.getInstance().getSource( config.uniqueName ); if ( adapter.get() instanceof MetadataProvider mp ) { AbstractNode root = mp.getRoot(); markedPaths = NodeUtil.collectSelecedAttributePaths( root ); - for ( String p : markedPaths ) { - log.info( "Selected path: " + p ); - } } else { ctx.status( 500 ).json( Map.of( "message", "Configuration can not be applied." ) ); return; @@ -1114,7 +1087,10 @@ void setMetaConfiguration( final Context ctx ) { } finally { if ( stmt != null ) { stmt.close(); - transactionManager.removeTransaction( tx.getXid() ); + if ( tx.isActive() ) { + transactionManager.removeTransaction( tx.getXid() ); + } + } } @@ -2365,7 +2341,6 @@ void updateSettingsForm( final Context ctx ) throws IOException, ServletExceptio String fullPath = handleUploadFiles( fileBytes, fileNames, null, am ); am.uniqueName = uniqueName; createFormDiffs( am, fullPath ); - log.error( fullPath ); ctx.result( "File(s) stored at: " + fullPath ); } @@ -2384,10 +2359,7 @@ private void createFormDiffs( PreviewRequest previewRequest, String path ) { AbstractNode tempNode = tempProvider.fetchMetadataTree(); Object newPreview = tempProvider.getPreview(); - PreviewResultEntry result = AbstractListener.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview, path ); - - currentProvider.printTree( currentNode, 0 ); - tempProvider.printTree( tempNode, 0 ); + PreviewResultEntry result = ListenerImpl.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview, path ); try { tempSource.shutdown(); @@ -2603,29 +2575,8 @@ private static String handleUploadFiles( Map inputStreams, } - - /* private static String handleUploadFiles( Map inputStreams, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest a ) { - for ( String fileName : fileNames ) { - setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); - } - File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + a.adapterName ); - for ( Entry is : setting.inputStreams.entrySet() ) { - try ( InputStream in = is.getValue() ) { - File file = new File( path, is.getKey() ); - log.info( "πŸ“ Datei wird geschrieben: {}", file.getAbsolutePath() ); - FileUtils.copyInputStreamToFile( in, file ); - } catch ( IOException e ) { - throw new GenericRuntimeException( e ); - } - } - return path.getAbsolutePath(); - }*/ - - - // Map statt Map private static String handleUploadFiles( Map files, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest previewRequest ) { - File path = PolyphenyHomeDirManager.getInstance() - .registerNewFolder( "data/csv/" + previewRequest.uniqueName ); + File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + previewRequest.uniqueName ); for ( String name : fileNames ) { byte[] data = files.get( name ); if ( data == null ) { @@ -2633,7 +2584,6 @@ private static String handleUploadFiles( Map files, List } try ( InputStream in = new ByteArrayInputStream( data ) ) { File target = new File( path, name ); - log.info( "πŸ“‚ Datei wird geschrieben: {}", target.getAbsolutePath() ); FileUtils.copyInputStreamToFile( in, target ); } catch ( IOException e ) { throw new GenericRuntimeException( e ); diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index c4cf31bacc..1121bcf2cc 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -112,7 +112,6 @@ public void onMessage( final WsMessageContext ctx ) { if ( ctx.message().equals( "\"keepalive\"" ) ) { return; } - log.error( "UI message received: " + ctx.message() ); //close analyzers of a previous query that was sent over the same socket. Crud.cleanupOldSession( queryAnalyzers, ctx.getSessionId() ); From 0c7e0d473b32298d6b4fe3258372c991eb16b8ef Mon Sep 17 00:00:00 2001 From: romanost03 Date: Tue, 12 Aug 2025 20:03:54 +0200 Subject: [PATCH 68/68] Delete adapter-directory when dropping source. --- .../polypheny/db/adapter/AdapterManager.java | 40 ++++++++++++++++--- 1 file changed, 34 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 9f94976b86..b5ea5a5361 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -19,13 +19,18 @@ import com.google.common.collect.ImmutableMap; import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; +import java.io.IOException; import java.lang.reflect.Modifier; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import lombok.extern.slf4j.Slf4j; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.jetbrains.annotations.NotNull; @@ -40,6 +45,7 @@ import org.polypheny.db.schemaDiscovery.MetadataProvider; +@Slf4j public class AdapterManager { public static Expression ADAPTER_MANAGER_EXPRESSION = Expressions.call( AdapterManager.class, "getInstance" ); @@ -178,7 +184,6 @@ public Adapter addAdapter( String adapterName, String uniqueName, AdapterType AdapterTemplate adapterTemplate = AdapterTemplate.fromString( adapterName, adapterType ); - for ( AbstractAdapterSetting setting : adapterTemplate.settings ) { if ( setting.appliesTo.stream().noneMatch( s -> s.appliesTo( mode ) ) ) { settings.remove( setting.name ); @@ -218,6 +223,30 @@ public void removeAdapter( long adapterId ) { // Shutdown store adapterInstance.shutdownAndRemoveListeners(); + // Delete directory if exist + try { + var s = adapterInstance.getCurrentSettings(); + if ( s != null && s.containsKey( "directory" ) ) { + String dirStr = s.get( "directory" ); + if ( dirStr != null && !dirStr.isBlank() ) { + Path dir = Paths.get( dirStr ).normalize(); + if ( Files.isDirectory( dir ) ) { + Files.walk( dir ) + .sorted( java.util.Comparator.reverseOrder() ) + .forEach( p -> { + try { + Files.deleteIfExists( p ); + } catch ( IOException ignored ) { + } + } ); + log.info( "Deleted adapter directory: {}", dir.toAbsolutePath().toString() ); + } + } + } + } catch ( Exception e ) { + log.warn( "Could not delete adapter directory: {}", e.toString() ); + } + // Remove store from maps adapterById.remove( adapterInstance.getAdapterId() ); adapterByName.remove( adapterInstance.getUniqueName() ); @@ -239,14 +268,13 @@ public void restoreAdapters( List adapters ) { } - public Optional getMetadataProvider(String uniqueName) { - return getSource(uniqueName) - .filter(mp -> mp instanceof MetadataProvider) - .map(mp -> (MetadataProvider) mp); + public Optional getMetadataProvider( String uniqueName ) { + return getSource( uniqueName ) + .filter( mp -> mp instanceof MetadataProvider ) + .map( mp -> (MetadataProvider) mp ); } - public record AdapterInformation( String name, String description, AdapterType type, List settings, List modes ) { public static JsonSerializer getSerializer() {