diff --git a/build.gradle b/build.gradle index 13ec183c2a..993d181418 100644 --- a/build.gradle +++ b/build.gradle @@ -144,6 +144,8 @@ allprojects { implementation group: "org.slf4j", name: "slf4j-api", version: slf4j_api_version // MIT implementation group: "org.apache.logging.log4j", name: "log4j-slf4j2-impl", version: log4j_slf4j_impl_version // Apache 2.0 + implementation 'org.postgresql:postgresql:42.7.2' + implementation group: 'com.oracle.database.jdbc', name: 'ojdbc8', version: '19.8.0.0' testCompileOnly(group: 'org.pf4j', name: 'pf4j', version: pf4j_version) { exclude group: "org.slf4j" diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index 4d1aa30957..b5ea5a5361 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -19,13 +19,18 @@ import com.google.common.collect.ImmutableMap; import com.google.gson.JsonObject; import com.google.gson.JsonSerializer; +import java.io.IOException; import java.lang.reflect.Modifier; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; +import lombok.extern.slf4j.Slf4j; import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.jetbrains.annotations.NotNull; @@ -37,8 +42,10 @@ import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +@Slf4j public class AdapterManager { public static Expression ADAPTER_MANAGER_EXPRESSION = Expressions.call( AdapterManager.class, "getInstance" ); @@ -177,7 +184,6 @@ public Adapter addAdapter( String adapterName, String uniqueName, AdapterType AdapterTemplate adapterTemplate = AdapterTemplate.fromString( adapterName, adapterType ); - for ( AbstractAdapterSetting setting : adapterTemplate.settings ) { if ( setting.appliesTo.stream().noneMatch( s -> s.appliesTo( mode ) ) ) { settings.remove( setting.name ); @@ -217,6 +223,30 @@ public void removeAdapter( long adapterId ) { // Shutdown store adapterInstance.shutdownAndRemoveListeners(); + // Delete directory if exist + try { + var s = adapterInstance.getCurrentSettings(); + if ( s != null && s.containsKey( "directory" ) ) { + String dirStr = s.get( "directory" ); + if ( dirStr != null && !dirStr.isBlank() ) { + Path dir = Paths.get( dirStr ).normalize(); + if ( Files.isDirectory( dir ) ) { + Files.walk( dir ) + .sorted( java.util.Comparator.reverseOrder() ) + .forEach( p -> { + try { + Files.deleteIfExists( p ); + } catch ( IOException ignored ) { + } + } ); + log.info( "Deleted adapter directory: {}", dir.toAbsolutePath().toString() ); + } + } + } + } catch ( Exception e ) { + log.warn( "Could not delete adapter directory: {}", e.toString() ); + } + // Remove store from maps adapterById.remove( adapterInstance.getAdapterId() ); adapterByName.remove( adapterInstance.getUniqueName() ); @@ -238,6 +268,13 @@ public void restoreAdapters( List adapters ) { } + public Optional getMetadataProvider( String uniqueName ) { + return getSource( uniqueName ) + .filter( mp -> mp instanceof MetadataProvider ) + .map( mp -> (MetadataProvider) mp ); + } + + public record AdapterInformation( String name, String description, AdapterType type, List settings, List modes ) { public static JsonSerializer getSerializer() { diff --git a/core/src/main/java/org/polypheny/db/adapter/DataSource.java b/core/src/main/java/org/polypheny/db/adapter/DataSource.java index 9933b8c192..3c9a4e0b32 100644 --- a/core/src/main/java/org/polypheny/db/adapter/DataSource.java +++ b/core/src/main/java/org/polypheny/db/adapter/DataSource.java @@ -25,6 +25,8 @@ import org.polypheny.db.catalog.catalogs.AdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.logistic.DataModel; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.type.PolyType; @Getter public abstract class DataSource extends Adapter implements ExtensionPoint { diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java new file mode 100644 index 0000000000..2a02fbaafe --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractListener.java @@ -0,0 +1,31 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; + +public interface AbstractListener

{ + + void onMetadataChange( P adapter, AbstractNode node, String hash ); + + boolean isAvailable(); + + void applyChange( String[] metadata ); + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java new file mode 100644 index 0000000000..20524d1673 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/AbstractPublisher.java @@ -0,0 +1,32 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +public interface AbstractPublisher { + + String getAdapterUniqueName(); + + void start(); + + void stop(); + + void runCheck(); + + AbstractListener getListener(); + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java new file mode 100644 index 0000000000..fd8d145e3f --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogEntry.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Value; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import java.time.Instant; +import java.util.ArrayList; +import java.util.List; + +@Value +@AllArgsConstructor +public class ChangeLogEntry { + + @JsonProperty + String adapterName; + @JsonProperty + Instant timestamp; + @JsonProperty + List messages; + @JsonProperty + ChangeStatus severity; + + + public class DiffMessageUtil { + + private DiffMessageUtil() { + } + + + public static List toMessages( DiffResult diff ) { + List msgs = new ArrayList<>(); + + diff.getAdded() + .forEach( p -> msgs.add( "Added metadata " + p ) ); + + diff.getRemoved() + .forEach( p -> msgs.add( "Removed metadata " + p ) ); + + diff.getChanged() + .forEach( p -> msgs.add( "Changed metadata " + p ) ); + + return msgs; + } + + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java new file mode 100644 index 0000000000..ce3488e83b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ChangeLogView.java @@ -0,0 +1,88 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.AllArgsConstructor; +import lombok.Value; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; + +import java.time.Duration; +import java.time.Instant; +import java.util.List; + +@Value +@AllArgsConstructor +public class ChangeLogView { + + @JsonProperty + String adapterName; + + @JsonProperty + String timestamp; + + @JsonProperty + List messages; + + @JsonProperty + ChangeStatus severity; + + + public static ChangeLogView from( ChangeLogEntry e ) { + return new ChangeLogView( + e.getAdapterName(), + rel( e.getTimestamp() ), + e.getMessages(), + e.getSeverity() + ); + } + + + private static String rel( Instant then ) { + long s = Duration.between( then, Instant.now() ).getSeconds(); + if ( s < 0 ) { + s = 0; + } + + if ( s < 60 ) { + return s == 1 ? "1 second ago" : s + " seconds ago"; + } + long m = s / 60; + if ( m < 60 ) { + return m == 1 ? "1 minute ago" : m + " minutes ago"; + } + long h = m / 60; + if ( h < 24 ) { + return h == 1 ? "1 hour ago" : h + " hours ago"; + } + long d = h / 24; + if ( d < 7 ) { + return d == 1 ? "1 day ago" : d + " days ago"; + } + long w = d / 7; + if ( w < 5 ) { + return w == 1 ? "1 week ago" : w + " weeks ago"; + } + long mo = d / 30; + if ( mo < 12 ) { + return mo == 1 ? "1 month ago" : mo + " months ago"; + } + long y = d / 365; + return y == 1 ? "1 year ago" : y + " years ago"; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java new file mode 100644 index 0000000000..c9903f95b5 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/HashCache.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; + +public class HashCache { + + private static final HashCache INSTANCE = new HashCache(); + + private final Map cache = new ConcurrentHashMap<>(); + + + private HashCache() { + } + + + public static HashCache getInstance() { + return INSTANCE; + } + + + public void put( String uniqueName, String hash ) { + this.cache.put( uniqueName, hash ); + } + + + public String getHash( String uniqueName ) { + return this.cache.get( uniqueName ); + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java new file mode 100644 index 0000000000..dd7458b1ce --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/ListenerImpl.java @@ -0,0 +1,218 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.google.gson.Gson; +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.DataSource; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry.DiffMessageUtil; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaAnnotator; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; +import org.polypheny.db.schemaDiscovery.NodeUtil; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.StandardCopyOption; +import java.time.Instant; +import java.util.Arrays; +import java.util.List; +import java.util.Set; + +@Slf4j +public class ListenerImpl

implements AbstractListener

{ + + private boolean available; + private AbstractNode currentNode; + private P adapter; + private String hash; + + private static AbstractNode formRootNode = null; + + private static final Gson GSON = new Gson(); + + + public ListenerImpl() { + available = true; + currentNode = null; + this.adapter = null; + this.hash = null; + } + + + @Override + public void onMetadataChange(P adapter, AbstractNode node, String hash) { + available ^= true; + this.currentNode = node; + this.adapter = adapter; + this.hash = hash; + + Object preview = adapter.getPreview(); + + + DiffResult result = MetaDiffUtil.diff(adapter.getRoot(), node); + + ChangeStatus status = NodeUtil.evaluateStatus(result, adapter.getRoot()); + + ChangeLogEntry entry = new ChangeLogEntry(adapter.getUniqueName(), Instant.now(), DiffMessageUtil.toMessages(result), status); + PublisherManager.getInstance().addChange(entry); + + AbstractNode annotatedCopy = MetaAnnotator.annotateTree(adapter.getRoot(), node, result); + String json = NodeSerializer.serializeNode(annotatedCopy).toString(); + + PublisherManager.getInstance().onMetadataChange(adapter.getUniqueName(), new PreviewResultEntry(json, preview, List.of(entry)), status); + } + + + public static PreviewResultEntry buildFormChange(String uniqueName, AbstractNode oldRoot, AbstractNode newRoot, Object preview, String path) { + DiffResult diff = MetaDiffUtil.diff(oldRoot, newRoot); + ChangeStatus status = NodeUtil.evaluateStatus(diff, oldRoot); + + ChangeLogEntry entry = new ChangeLogEntry(uniqueName, Instant.now(), DiffMessageUtil.toMessages(diff), status); + + AbstractNode annotated = MetaAnnotator.annotateTree(oldRoot, newRoot, diff); + String json = NodeSerializer.serializeNode(annotated).toString(); + + PublisherManager pm = PublisherManager.getInstance(); + pm.addChange(entry); + PreviewResultEntry result = new PreviewResultEntry(json, preview, List.of(entry)); + pm.onMetadataChange(uniqueName, result, status); + pm.saveTempPath(uniqueName, path); + + formRootNode = newRoot; + + return result; + } + + + public static void applyAnnotatedTree(Adapter adapter, AbstractNode newRoot, String newHash, String[] additionallySelectedMetadata) { + + if (!(adapter instanceof DataSource)) { + throw new IllegalArgumentException("Adapter must be of type DataSource"); + } + + MetadataProvider metadataProvider = (MetadataProvider) adapter; + + Set selected = NodeUtil.collectSelecedAttributePaths(metadataProvider.getRoot()); + if (additionallySelectedMetadata != null) { + selected.addAll(Arrays.asList(additionallySelectedMetadata)); + } + + metadataProvider.setRoot(newRoot); + metadataProvider.markSelectedAttributes(List.copyOf(selected)); + HashCache.getInstance().put(adapter.getUniqueName(), newHash); + } + + + @Override + public void applyChange(String[] metadata) { + Set prevSelected = NodeUtil.collectSelecedAttributePaths(this.adapter.getRoot()); + + this.adapter.setRoot(this.currentNode); + if (metadata != null && metadata.length > 0) { + prevSelected.addAll(Arrays.asList(metadata)); + } + this.adapter.markSelectedAttributes(List.copyOf(prevSelected)); + HashCache.getInstance().put(this.adapter.getUniqueName(), this.hash); + + this.currentNode = null; + this.adapter = null; + this.hash = null; + + available ^= true; + + } + + + // CSV and Excel does not support observer deployment. Therefore, a manual approach with a reupload is necessary to update data. + public static void applyFormChange(String[] metadata, String uniqueName, String newPath) { + AbstractNode newRoot = formRootNode; + + DataSource adapter = AdapterManager.getInstance().getSource(uniqueName).orElseThrow(); + MetadataProvider metadataprovider = (MetadataProvider) adapter; + + deleteTempPath(newPath, adapter.getSettings().get("directory")); + + newRoot = metadataprovider.fetchMetadataTree(); + + AbstractNode oldRoot = metadataprovider.getRoot(); + metadataprovider.setRoot(newRoot); + + Set prevSelected = NodeUtil.collectSelecedAttributePaths(oldRoot); + // metadataprovider.setRoot( newRoot ); + if (metadata != null && metadata.length > 0) { + prevSelected.addAll(Arrays.asList(metadata)); + } + + metadataprovider.markSelectedAttributes(List.copyOf(prevSelected)); + + formRootNode = null; + PublisherManager.getInstance().deleteTempPath(uniqueName); + + } + + + // Changing the old file on the directory with the new/temporary file. + private static void deleteTempPath(String tmpPath, String directory) { + File tmpDir = new File(tmpPath); + File targetDir = new File(directory); + + if (!tmpDir.exists() || !tmpDir.isDirectory()) { + throw new IllegalArgumentException("tmpPath is not a valid directory: " + tmpPath); + } + if (!targetDir.exists() || !targetDir.isDirectory()) { + throw new IllegalArgumentException("directory is not a valid directory: " + directory); + } + + for (File file : targetDir.listFiles()) { + if (!file.delete()) { + throw new RuntimeException("Failed to delete file: " + file.getAbsolutePath()); + } + } + + for (File file : tmpDir.listFiles()) { + try { + Files.copy(file.toPath(), new File(targetDir, file.getName()).toPath(), + StandardCopyOption.REPLACE_EXISTING); + } catch (IOException e) { + throw new RuntimeException("Failed to copy file: " + file.getAbsolutePath(), e); + } + } + + for (File file : tmpDir.listFiles()) { + file.delete(); + } + if (!tmpDir.delete()) { + throw new RuntimeException("Failed to delete tmpPath directory: " + tmpDir.getAbsolutePath()); + } + } + + + @Override + public boolean isAvailable() { + return this.available; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java new file mode 100644 index 0000000000..563938540c --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/MetadataHasher.java @@ -0,0 +1,51 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import com.fasterxml.jackson.databind.node.ObjectNode; +import java.nio.charset.StandardCharsets; +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; + +public class MetadataHasher { + + private final MessageDigest digest; + + + public MetadataHasher() { + try { + this.digest = MessageDigest.getInstance( "SHA-256" ); + } catch ( NoSuchAlgorithmException e ) { + throw new RuntimeException( e ); + } + } + + + public String hash( String text ) { + byte[] bytes = text.getBytes( StandardCharsets.UTF_8 ); + byte[] hash = digest.digest( bytes ); + + StringBuilder sb = new StringBuilder(); + for ( byte b : hash ) { + sb.append( String.format( "%02x", b ) ); + } + return sb.toString(); + + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java new file mode 100644 index 0000000000..f49d890220 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherImpl.java @@ -0,0 +1,89 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; + +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +@Slf4j +public class PublisherImpl

implements AbstractPublisher { + + protected final P provider; + private final long intervalSeconds = 30; + private final ScheduledExecutorService scheduler = Executors.newSingleThreadScheduledExecutor(); + private AbstractListener listener; + private final MetadataHasher hasher = new MetadataHasher(); + private final HashCache cache = HashCache.getInstance(); + + + protected PublisherImpl(P provider, AbstractListener listener) { + this.provider = provider; + this.listener = listener; + } + + + @Override + public String getAdapterUniqueName() { + return provider.getUniqueName(); + } + + + @Override + public void start() { + scheduler.scheduleAtFixedRate(this::runCheck, 0, intervalSeconds, java.util.concurrent.TimeUnit.SECONDS); + } + + + @Override + public void stop() { + scheduler.shutdown(); + } + + + @Override + public void runCheck() { + if (!listener.isAvailable()) { + return; + } + try { + AbstractNode node = provider.fetchMetadataTree(); + String fresh = NodeSerializer.serializeNode(node).toString(); + String hash = hasher.hash(fresh); + + String lastHash = cache.getHash(provider.getUniqueName()); + + if (lastHash != null && !lastHash.equals(hash)) { + listener.onMetadataChange(provider, node, hash); + } + } catch (Exception e) { + throw new RuntimeException("Error while checking current snapshot.", e); + } + } + + + @Override + public AbstractListener getListener() { + return this.listener; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java new file mode 100644 index 0000000000..0b4119cd7b --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/PublisherManager.java @@ -0,0 +1,161 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver; + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.Adapter; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import java.util.Deque; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ConcurrentLinkedDeque; + +@Slf4j +public class PublisherManager { + + private static final int MAX_ENTRIES_PER_ADAPTER = 100; + + private final Map publishers = new ConcurrentHashMap<>(); + + // Temporarily save the changes computed by a listener. + private final Map changeCache = new ConcurrentHashMap<>(); + + // Deliverd with the change from the listener. Saves either a status WARNING or CRITICAL. + private final Map statusCache = new ConcurrentHashMap<>(); + + + // Cache for file metadata changes. Reuploaded Excel- or CSV file paths are temporarily saved. + private final Map tempFileCache = new ConcurrentHashMap<>(); + + // History of all changes occurred for every adapter during deploy-time. + private final ConcurrentHashMap> changeCatalog = new ConcurrentHashMap<>(); + + private static final PublisherManager INSTANCE = new PublisherManager(); + + + public static PublisherManager getInstance() { + return INSTANCE; + } + + + private PublisherManager() { + } + + + public

void onAdapterDeploy( P adapter ) { + if ( publishers.containsKey( adapter.getUniqueName() ) ) { + return; + } + AbstractListener listener = new ListenerImpl(); + AbstractPublisher publisher = new PublisherImpl<>( adapter, listener ); + publishers.put( adapter.getUniqueName(), publisher ); + publisher.start(); + } + + + public void onAdapterUndeploy( String uniqueName ) { + if ( publishers.containsKey( uniqueName ) ) { + publishers.get( uniqueName ).stop(); + publishers.remove( uniqueName ); + } + this.changeCatalog.remove( uniqueName ); + this.tempFileCache.remove( uniqueName ); + this.changeCache.remove( uniqueName ); + this.statusCache.remove( uniqueName ); + + } + + + public ChangeStatus hasChange( String uniqueName ) { + if ( changeCache.containsKey( uniqueName ) ) { + return statusCache.get( uniqueName ); + } else { + return null; + } + } + + + public void onMetadataChange( String uniqueName, PreviewResultEntry data, ChangeStatus status ) { + changeCache.put( uniqueName, data ); + statusCache.put( uniqueName, status ); + } + + + public PreviewResultEntry fetchChange( String uniqueName ) { + return changeCache.get( uniqueName ); + } + + + public void ack( String uniqueName, String[] metadata ) { + AbstractPublisher publisher = publishers.get( uniqueName ); + + if ( publishers.isEmpty() ) { + ListenerImpl.applyFormChange( metadata, uniqueName, tempFileCache.get( uniqueName ) ); + } else { + publisher.getListener().applyChange( metadata ); + } + + changeCache.remove( uniqueName ); + statusCache.remove( uniqueName ); + } + + + public enum ChangeStatus { + CRITICAL, + WARNING, + OK + } + + + public void addChange( ChangeLogEntry entry ) { + changeCatalog.computeIfAbsent( entry.getAdapterName(), k -> new ConcurrentLinkedDeque<>() ).addFirst( entry ); + } + + + public List getHistory( String adapterName ) { + return changeCatalog.getOrDefault( adapterName, new ConcurrentLinkedDeque<>() ) + .stream() + .toList(); + } + + + // Not used but serves the purpose that the number of logs does not become excessively high. + private void prune( String adapterName ) { + Deque deque = changeCatalog.get( adapterName ); + while ( deque != null && deque.size() > MAX_ENTRIES_PER_ADAPTER ) { + deque.removeLast(); + } + } + + + public void saveTempPath( String uniqueName, String path ) { + tempFileCache.put( uniqueName, path ); + } + + + public String getTempPath( String uniqueName ) { + return tempFileCache.get( uniqueName ); + } + + + public void deleteTempPath( String uniqueName ) { + tempFileCache.remove( uniqueName ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java new file mode 100644 index 0000000000..4c5985dc93 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/DiffType.java @@ -0,0 +1,24 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +public enum DiffType { + ADDED, + REMOVED, + CHANGED, + NONE +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java new file mode 100644 index 0000000000..b9eb71b059 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaAnnotator.java @@ -0,0 +1,137 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.Node; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +public final class MetaAnnotator { + + private static class PathHelper { + + static Map collect( AbstractNode node ) { + Map map = new HashMap<>(); + traverse( node, node.getName(), map ); + return map; + } + + + private static void traverse( AbstractNode n, String path, Map sink ) { + sink.put( path, n ); + for ( AbstractNode c : n.getChildren() ) { + traverse( c, path + "/" + c.getName(), sink ); + } + } + + + static Optional getNode( AbstractNode root, String path ) { + String[] seg = path.split( "/" ); + AbstractNode cur = root; + for ( int i = 1; i < seg.length; i++ ) { + String s = seg[i]; + cur = cur.getChildren().stream() + .filter( n -> n.getName().equals( s ) ) + .findFirst().orElse( null ); + if ( cur == null ) { + return Optional.empty(); + } + } + return Optional.of( cur ); + } + + } + + + public static AbstractNode annotateTree( AbstractNode oldRoot, AbstractNode newRoot, DiffResult diff ) { + AbstractNode copyOld = NodeCloner.deepCopy( oldRoot ); + AbstractNode copyNew = NodeCloner.deepCopy( newRoot ); + + Map newMap = PathHelper.collect( copyNew ); + Map oldMap = PathHelper.collect( copyOld ); + + for ( Map.Entry e : oldMap.entrySet() ) { + if ( e.getValue() instanceof AttributeNode a && a.isSelected() ) { + AbstractNode match = newMap.get( e.getKey() ); + if ( match instanceof AttributeNode aNew ) { + aNew.setSelected( true ); + } + } + } + + diff.getAdded().forEach( p -> PathHelper + .getNode( copyNew, p ) + .ifPresent( n -> n.addProperty( "diff", DiffType.ADDED ) ) ); + + // TODO By now, it is not recognized if some metadata has just changed in their name or not. + // TODO A change is recognized as something was removed and something new was added. In future, + // TODO that can be used of course. + /*diff.getChanged().forEach( p -> PathHelper + .getNode( copyNew, p ) + .ifPresent( n -> n.addProperty( "diff", DiffType.CHANGED ) ) );*/ + + for ( String p : diff.getRemoved() ) { + if ( newMap.containsKey( p ) ) { + continue; + } + createGhostNode( copyNew, p ); + } + + return copyNew; + + } + + + // Removed metadata / nodes are marked as type "ghost". That can be recognized at the UI. + private static void createGhostNode( AbstractNode root, String fullPath ) { + String[] parts = fullPath.split( "/" ); + AbstractNode current = root; + StringBuilder curPath = new StringBuilder( root.getName() ); + + for ( int i = 1; i < parts.length; i++ ) { + String segment = parts[i]; + curPath.append( "/" ).append( segment ); + + Optional opt = + current.getChildren().stream() + .filter( n -> n.getName().equals( segment ) ) + .findFirst(); + + if ( opt.isPresent() ) { + current = opt.get(); + } else { + Node stub = new Node( "ghost", segment ); + if ( i == parts.length - 1 ) { + stub.addProperty( "diff", DiffType.REMOVED ); + } + current.addChild( stub ); + current = stub; + } + + } + } + +} + + + + + diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java new file mode 100644 index 0000000000..4bc71e9778 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/MetaDiffUtil.java @@ -0,0 +1,118 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import lombok.AllArgsConstructor; +import lombok.EqualsAndHashCode; +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; + +public class MetaDiffUtil { + + @Getter + @AllArgsConstructor + public static class DiffResult { + + private final Set added; + private final Set removed; + private final Set changed; + + + @Override + public String toString() { + return "DiffResult{" + + "added=" + added + + ", removed=" + removed + + ", changed=" + changed + + '}'; + } + + } + + + @EqualsAndHashCode + @RequiredArgsConstructor(staticName = "of") + // Instead of comparing every variable in a node, a hash (fingerprint) is created for every node. + // Used for comparison. + public static class Fingerprint { + + private final String type; + private final boolean selected; + private final int propertiesHash; + + + static Fingerprint of( AbstractNode n ) { + boolean sel = (n instanceof AttributeNode) && ((AttributeNode) n).isSelected(); + return Fingerprint.of( n.getType(), sel, Objects.hashCode( n.getProperties() ) ); + } + + } + + + private MetaDiffUtil() { + } + + + public static DiffResult diff( AbstractNode oldRoot, AbstractNode newRoot ) { + Map oldMap = new HashMap<>(); + Map newMap = new HashMap<>(); + + collect( oldRoot, "", oldMap ); + collect( newRoot, "", newMap ); + + Set added = new HashSet<>( newMap.keySet() ); + added.removeAll( oldMap.keySet() ); + + Set removed = new HashSet<>( oldMap.keySet() ); + removed.removeAll( newMap.keySet() ); + + Set changed = new HashSet<>(); + for ( String key : oldMap.keySet() ) { + if ( newMap.containsKey( key ) && + !oldMap.get( key ).equals( newMap.get( key ) ) ) { + changed.add( key ); + } + } + + return new DiffResult( added, removed, Collections.emptySet() ); + + } + + + private static void collect( AbstractNode node, String parentPath, Map sink ) { + String path = parentPath.isEmpty() ? + node.getName() : + parentPath + "/" + node.getName(); + + sink.put( path, Fingerprint.of( node ) ); + + for ( AbstractNode child : node.getChildren() ) { + collect( child, path, sink ); + } + } + + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java new file mode 100644 index 0000000000..2aa4250386 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/MetadataObserver/Utils/NodeCloner.java @@ -0,0 +1,59 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.MetadataObserver.Utils; + +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.Node; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; + +public class NodeCloner { + + private NodeCloner() { + } + + + public static AbstractNode deepCopy( AbstractNode node ) { + return copyNode( node ); + } + + + private static AbstractNode copyNode( AbstractNode n ) { + AbstractNode clone; + + if ( n instanceof AttributeNode a ) { + AttributeNode c = new AttributeNode( a.getType(), a.getName() ); + c.setSelected( a.isSelected() ); + clone = c; + } else { + clone = new Node( n.getType(), n.getName() ); + } + + clone.setProperties( new HashMap<>( n.getProperties() ) ); + + List clonedChildren = new ArrayList<>(); + for ( AbstractNode child : n.getChildren() ) { + clonedChildren.add( copyNode( child ) ); + } + clone.setChildren( clonedChildren ); + + return clone; + } + +} diff --git a/core/src/main/java/org/polypheny/db/adapter/Scannable.java b/core/src/main/java/org/polypheny/db/adapter/Scannable.java index 695ada4a09..0ce24cc644 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Scannable.java +++ b/core/src/main/java/org/polypheny/db/adapter/Scannable.java @@ -73,7 +73,7 @@ static PhysicalEntity createSubstitutionEntity( Scannable scannable, Context con allocColumns.add( alloc ); } // we use the provided first x columns from amountPk as pks (still requires them to be ordered and first first) - scannable.createTable( context, LogicalTableWrapper.of( table, columns, columns.subList( 0, amountPk ).stream().map( c -> c.id ).toList() ), AllocationTableWrapper.of( allocSubTable, allocColumns ) ); + scannable.createTable( context, LogicalTableWrapper.of( table, columns, columns.subList( 0, amountPk ).stream().map( c -> c.id ).toList(), null, null ), AllocationTableWrapper.of( allocSubTable, allocColumns ) ); return scannable.getCatalog().getPhysicalsFromAllocs( allocSubTable.id ).get( 0 ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index f9f7cae86d..fafb272dac 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -21,20 +21,30 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import com.fasterxml.jackson.annotation.JsonProperty; import lombok.Value; +import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingList; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.AdapterManager.Function5; +import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DeployMode.DeploySetting; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogView; +import org.polypheny.db.adapter.MetadataObserver.MetadataHasher; import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.docker.DockerManager; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; +@Slf4j @Value public class AdapterTemplate { @@ -100,4 +110,61 @@ public DeployMode getDefaultMode() { return clazz.getAnnotation( AdapterProperties.class ).defaultMode(); } + + public DataSource createEphemeral( Map settings ) { + String previewName = "_preview" + System.nanoTime(); + Adapter adapter = deployer.get( -1L, previewName, settings, DeployMode.REMOTE ); + + if ( !(adapter instanceof DataSource ds) ) { + throw new GenericRuntimeException( "The adapter does not implement DataSource." ); + } + + return ds; + } + + + public PreviewResult preview( Map settings, int limit ) { + DataSource tmp = createEphemeral( settings ); + try { + if ( tmp instanceof MetadataProvider mp ) { + AbstractNode meta = mp.fetchMetadataTree(); + mp.setRoot( meta ); + String json = NodeSerializer.serializeNode( meta ).toString(); + MetadataHasher hasher = new MetadataHasher(); + String hash = hasher.hash( json ); + Object rows = mp.getPreview(); + return new PreviewResult( json, rows, null ); + } + throw new GenericRuntimeException( "The adapter does not implement MetadataProvider." ); + } finally { + tmp.shutdown(); + } + } + + + @Value + public static class PreviewResult { + + @JsonProperty + String metadata; + @JsonProperty + Object preview; + @JsonProperty + List history; + + } + + + @Value + public static class PreviewResultEntry { + + @JsonProperty + String metadata; + @JsonProperty + Object preview; + @JsonProperty + List history; + + } + } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java b/core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java new file mode 100644 index 0000000000..9ee0d197e2 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/java/SchemaFilter.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.java; + +import java.util.Arrays; +import java.util.Set; + +public enum SchemaFilter { + + PostgreSQL( + Set.of( "pg_catalog", "information_schema" ) + ), + + MySQL( + Set.of( "information_schema", "mysql", "performance_schema" ) + ), + + MonetDB( + Set.of( "information_schema", "json", "logging", "profiler", "temp" ) + ), + + GENERIC( Set.of() ); + + public final Set ignoredSchemas; + + + SchemaFilter( Set ignoredSchemas ) { + this.ignoredSchemas = ignoredSchemas; + } + + + public static SchemaFilter forAdapter( String adapterName ) { + return Arrays.stream( values() ) + .filter( f -> f.name().equalsIgnoreCase( adapterName ) ) + .findFirst() + .orElse( GENERIC ); + } +} diff --git a/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java b/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java new file mode 100644 index 0000000000..356df33ee9 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/adapter/java/TableFilter.java @@ -0,0 +1,61 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.java; + +import java.util.Arrays; +import java.util.Set; +import java.util.stream.Collectors; + +public enum TableFilter { + + Oracle( + Set.of( "AQ$_", "LOG", "MVIEW$_", "OL$", "REDO_", "REPL_", "ROLLING$", "SCHEDULER", "SQLPLUS", "HELP" ) + ), + + MySQL( + Set.of( "sys_config" ) + ), + + GENERIC( Set.of() ); + + public final Set ignoredTables; + + + TableFilter( final Set ignoredTables ) { + this.ignoredTables = ignoredTables.stream() + .map( String::trim ) + .collect( Collectors.toSet() ); + } + + + public static TableFilter forAdapter( String adapterName ) { + return Arrays.stream( values() ) + .filter( f -> f.name().equalsIgnoreCase( adapterName ) ) + .findFirst() + .orElse( GENERIC ); + } + + + public boolean shouldIgnore( String tableName ) { + String upper = tableName.toUpperCase(); + return ignoredTables.stream() + .map( String::toUpperCase ) + .anyMatch( upper::startsWith ); + } + + +} diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java index 84a3f92cc7..697d3dff69 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelAdapterCatalog.java @@ -100,7 +100,18 @@ public PhysicalColumn getColumn( long id, long allocId ) { public PhysicalTable createTable( String namespaceName, String tableName, Map columnNames, LogicalTable logical, Map lColumns, List pkIds, AllocationTableWrapper wrapper ) { AllocationTable allocation = wrapper.table; List columns = wrapper.columns; - List pColumns = Streams.mapWithIndex( columns.stream(), ( c, i ) -> new PhysicalColumn( columnNames.get( c.columnId ), logical.id, allocation.id, allocation.adapterId, (int) i, lColumns.get( c.columnId ) ) ).toList(); + List pColumns = columns.stream().map(ac -> {LogicalColumn lc = lColumns.get(ac.columnId); + int sheetPos0 = lc.position - 1; // 0-basierte XLSX-Pos + return new PhysicalColumn( + columnNames.get(ac.columnId), + logical.id, + allocation.id, + allocation.adapterId, + sheetPos0, // ← **kein** i++ + lc); + }) + .toList(); + PhysicalTable table = new PhysicalTable( IdBuilder.getInstance().getNewPhysicalId(), allocation.id, allocation.logicalId, tableName, pColumns, logical.namespaceId, namespaceName, pkIds, allocation.adapterId ); pColumns.forEach( this::addColumn ); addPhysical( allocation, table ); diff --git a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java index d6d6d6c120..fbb2a7390e 100644 --- a/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java +++ b/core/src/main/java/org/polypheny/db/catalog/entity/logical/LogicalTableWrapper.java @@ -28,4 +28,8 @@ public class LogicalTableWrapper { public List pkIds; + public String physicalSchemaFinal; + + public String physicalTable; + } diff --git a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java index 25e16feeb9..34dc1a02a9 100644 --- a/core/src/main/java/org/polypheny/db/ddl/DdlManager.java +++ b/core/src/main/java/org/polypheny/db/ddl/DdlManager.java @@ -154,7 +154,7 @@ public static DdlManager getInstance() { * @param afterColumnName the name of the column after the column, which is inserted; can be null * @param defaultValue the default value of the inserted column */ - public abstract void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement ); + public abstract void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement, String tablePhysicalName ); /** * Add a column to an existing table @@ -557,6 +557,9 @@ public static DdlManager getInstance() { public abstract void dropGraphPlacement( long graphId, DataStore dataStores, Statement statement ); + public abstract void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ); + + public abstract void removeSelectedMetadata( List paths, Statement statement, String uniqueName ); public abstract void dropCollection( LogicalCollection catalogCollection, Statement statement ); diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java new file mode 100644 index 0000000000..e247ddebd0 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/AbstractNode.java @@ -0,0 +1,48 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; +import java.util.Map; + +public interface AbstractNode { + + @JsonProperty + String type = ""; + @JsonProperty + String name = ""; + @JsonProperty + List children = null; + @JsonProperty + Map properties = null; + + void addChild(AbstractNode node); + void addProperty(String key, Object value); + + String getType(); + String getName(); + List getChildren(); + Map getProperties(); + + void setType(String type); + void setName(String name); + void setChildren(List children); + void setProperties(Map properties); + + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java new file mode 100644 index 0000000000..095b3e9442 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/AttributeNode.java @@ -0,0 +1,35 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class AttributeNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private boolean isSelected; + + public AttributeNode( String type, String name ) { + super( type, name ); + this.isSelected = false; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java new file mode 100644 index 0000000000..5b5cbff1d8 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentArrayNode.java @@ -0,0 +1,37 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class DocumentArrayNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private String jsonPath; + + + public DocumentArrayNode( String name, String jsonPath ) { + super( "array", name ); + this.jsonPath = jsonPath; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java new file mode 100644 index 0000000000..b823ec1e77 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentObjectNode.java @@ -0,0 +1,41 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class DocumentObjectNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private String jsonPath; + @Getter + @Setter + @JsonProperty + private boolean cardCandidate; + + + public DocumentObjectNode( String name, String jsonPath, boolean cardCandidate ) { + super( "object", name ); + this.jsonPath = jsonPath; + this.cardCandidate = cardCandidate; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java new file mode 100644 index 0000000000..23d08ac441 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/DocumentValueNode.java @@ -0,0 +1,42 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; + +public class DocumentValueNode extends Node implements AbstractNode { + + @Getter + @Setter + @JsonProperty + private String jsonPath; + @Getter + @Setter + @JsonProperty + private String valueType; + + + public DocumentValueNode( String name, String jsonPath, String valueType, Object sample ) { + super( "value", name ); + this.jsonPath = jsonPath; + this.valueType = valueType; + addProperty( "sample", sample ); + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java new file mode 100644 index 0000000000..ddb9726a3d --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/MetadataProvider.java @@ -0,0 +1,38 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import java.sql.Connection; +import java.util.List; +import java.util.Map; + +public interface MetadataProvider { + + AbstractNode fetchMetadataTree(); + + Object fetchPreview( Connection conn, String fqName, int limit ); + + void markSelectedAttributes( List selectedPaths ); + + void setRoot( AbstractNode root ); + + Object getPreview(); + + AbstractNode getRoot(); + + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java new file mode 100644 index 0000000000..bddb7b1567 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/Node.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +@Setter +@Getter +public class Node implements AbstractNode { + + @JsonProperty + protected String type; + @JsonProperty + protected String name; + @JsonProperty + protected List children; + @JsonProperty + protected Map properties; + + + public Node( String type, String name ) { + this.type = type; + this.name = name; + this.children = new ArrayList<>(); + this.properties = new HashMap<>(); + } + + @JsonProperty + public void addChild( AbstractNode node ) { + children.add( node ); + } + + @JsonProperty + public void addProperty( String key, Object value ) { + properties.put( key, value ); + } + +} + + + + + + + + diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java new file mode 100644 index 0000000000..66aa28018c --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeSerializer.java @@ -0,0 +1,62 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; +import com.fasterxml.jackson.databind.node.ObjectNode; + +public class NodeSerializer { + + private static final ObjectMapper objectMapper = new ObjectMapper(); + + public static ObjectNode serializeNode(AbstractNode node) { + ObjectNode json = objectMapper.createObjectNode(); + json.put( "type", node.getType() ); + json.put( "name", node.getName() ); + + if ( node instanceof AttributeNode attr ) + json.put( "isSelected", attr.isSelected() ); + if (node instanceof DocumentObjectNode obj) { + json.put("jsonPath", obj.getJsonPath()); + json.put("cardCandidate", obj.isCardCandidate()); + } + else if (node instanceof DocumentArrayNode arr) { + json.put("jsonPath", arr.getJsonPath()); + } + else if (node instanceof DocumentValueNode val) { + json.put("jsonPath", val.getJsonPath()); + json.put("valueType", val.getValueType()); + } + + ObjectNode props = objectMapper.createObjectNode(); + node.getProperties().forEach((key, value) -> { + props.putPOJO(key, value); + }); + json.set("properties", props); + + // Children + ArrayNode children = objectMapper.createArrayNode(); + for (AbstractNode child : node.getChildren()) { + children.add(serializeNode(child)); + } + json.set("children", children); + + return json; + } + +} diff --git a/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java new file mode 100644 index 0000000000..96490a4140 --- /dev/null +++ b/core/src/main/java/org/polypheny/db/schemaDiscovery/NodeUtil.java @@ -0,0 +1,141 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.schemaDiscovery; + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; +import org.polypheny.db.adapter.MetadataObserver.Utils.MetaDiffUtil.DiffResult; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Deque; +import java.util.HashSet; +import java.util.List; +import java.util.Optional; +import java.util.Set; + +@Slf4j +public final class NodeUtil { + + private static final String NORMALIZED_SEPARATOR = "."; + + + private static String normalizePath( String rawPath ) { + return rawPath.replace( "/", NORMALIZED_SEPARATOR ) + .replace( "\\", NORMALIZED_SEPARATOR ); + } + + + private NodeUtil() { + } + + + public static Set collectSelecedAttributePaths( AbstractNode root ) { + Set selected = new HashSet<>(); + if ( root == null ) { + return selected; + } + Deque path = new ArrayDeque<>(); + traverse( root, path, selected ); + return selected; + } + + + private static void traverse( AbstractNode node, Deque path, Set acc ) { + path.addLast( node.getName() ); + if ( node instanceof AttributeNode attr && attr.isSelected() ) { + acc.add( String.join( ".", path ) ); + } + + for ( AbstractNode child : node.getChildren() ) { + traverse( child, path, acc ); + } + + path.removeLast(); + } + + + public static void unmarkSelectedAttributes( AbstractNode metadataRoot, List pathsToUnmark ) { + + List> attributePaths = new ArrayList<>(); + + for ( String path : pathsToUnmark ) { + String cleanPath = path.replaceFirst( "^.*/", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( false ); + } + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + break; + } + } + } + } + } + + + public static ChangeStatus evaluateStatus( DiffResult diff, AbstractNode oldRoot ) { + if ( (diff.getAdded().isEmpty()) && (diff.getRemoved().isEmpty()) ) { + return ChangeStatus.OK; + } + + Set selected = collectSelecedAttributePaths( oldRoot ); + for ( String removedRaw : diff.getRemoved() ) { + String removed = normalizePath( removedRaw ); + for ( String selectedRaw : selected ) { + String selectedNorm = normalizePath( selectedRaw ); + if ( removed.equals( selectedNorm ) || + selectedNorm.startsWith( removed + NORMALIZED_SEPARATOR ) || + removed.startsWith( selectedNorm + NORMALIZED_SEPARATOR ) ) { + return ChangeStatus.CRITICAL; + } + } + } + return ChangeStatus.WARNING; + + } + + +} diff --git a/core/src/main/java/org/polypheny/db/type/PolyType.java b/core/src/main/java/org/polypheny/db/type/PolyType.java index 0e7d2e09a0..64cb9edb9b 100644 --- a/core/src/main/java/org/polypheny/db/type/PolyType.java +++ b/core/src/main/java/org/polypheny/db/type/PolyType.java @@ -401,6 +401,9 @@ public enum PolyType { .put( ExtraPolyTypes.NCHAR, CHAR ) .put( ExtraPolyTypes.NVARCHAR, VARCHAR ) + .put( Types.LONGVARCHAR, VARCHAR ) + .put( Types.LONGNVARCHAR, VARCHAR ) + .put( Types.BINARY, BINARY ) .put( Types.VARBINARY, VARBINARY ) diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index fc626d5f13..a7dbeb4fc1 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -19,20 +19,28 @@ import com.google.common.collect.ImmutableList; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; +import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import javax.annotation.Nullable; +import com.google.common.collect.Streams; +import com.google.gson.Gson; +import com.google.gson.reflect.TypeToken; import lombok.extern.slf4j.Slf4j; import org.jetbrains.annotations.NotNull; import org.polypheny.db.adapter.Adapter; @@ -43,6 +51,9 @@ import org.polypheny.db.adapter.DeployMode; import org.polypheny.db.adapter.DocumentDataSource.ExportedDocument; import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; +import org.polypheny.db.adapter.MetadataObserver.HashCache; +import org.polypheny.db.adapter.MetadataObserver.MetadataHasher; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgNode; @@ -56,6 +67,8 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.DocumentType; import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.catalogs.AdapterCatalog; +import org.polypheny.db.catalog.catalogs.RelAdapterCatalog; import org.polypheny.db.catalog.entity.LogicalAdapter; import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; import org.polypheny.db.catalog.entity.LogicalConstraint; @@ -82,6 +95,9 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalView; +import org.polypheny.db.catalog.entity.physical.PhysicalColumn; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.catalog.logistic.Collation; import org.polypheny.db.catalog.logistic.ConstraintType; @@ -110,6 +126,9 @@ import org.polypheny.db.partition.raw.RawTemperaturePartitionInformation; import org.polypheny.db.processing.DataMigrator; import org.polypheny.db.routing.RoutingManager; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.transaction.Statement; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.TransactionException; @@ -211,6 +230,272 @@ public void createStore( String uniqueName, String adapterName, AdapterType adap } + @Override + public void addSelectedMetadata( Transaction tsx, Statement statement, String uniqueName, long namespace, List selectedPaths ) { + record PathParts( String physicalNs, String physicalSchema, String table, String column, String original ) { + + } + List parsedPaths = selectedPaths.stream() + .map( p -> { + String[] parts = p.split( "\\." ); + String physNs; + String physSchema; + String table; + String column; + + if ( parts.length == 4 ) { + physNs = parts[0]; + physSchema = parts[1]; + table = parts[2]; + column = parts[3]; + + } else if ( parts.length == 3 ) { + physNs = parts[0]; + physSchema = parts[0] + ".xlsx"; + table = parts[1]; + column = parts[2].split( "\\:" )[0].toLowerCase(); + + } else if ( parts.length == 2 ) { + physNs = parts[0]; + physSchema = parts[0] + ".csv"; + table = parts[0].toLowerCase(); + column = parts[1].toLowerCase(); + + } else { + throw new IllegalArgumentException( "Unknown path format for adapter with unique name: " + uniqueName ); + } + + return new PathParts( physNs, physSchema, table, column, p ); + } ) + .toList(); + + Optional> adapter = AdapterManager.getInstance().getSource( uniqueName ); + + Map settings = new HashMap<>( adapter.get().getSettings() ); + String merged = parsedPaths.stream().map( PathParts::original ).collect( Collectors.joining( "," ) ); + settings.merge( "selectedAttributes", merged, ( oldVal, newVal ) -> oldVal.isBlank() ? newVal : oldVal + "," + newVal ); + adapter.get().updateSettings( settings ); + + Map> wishedColsPerTable = parsedPaths + .stream() + .collect( Collectors.groupingBy( PathParts::table, Collectors.mapping( PathParts::column, Collectors.toSet() ) ) ); + + Map> exportedColumns; + try { + exportedColumns = adapter.get().asRelationalDataSource().getExportedColumns(); + } catch ( Exception e ) { + throw new GenericRuntimeException( "Something went wrong when trying to get exported columns", e ); + } + + for ( Map.Entry> entry : wishedColsPerTable.entrySet() ) { + String tableName = entry.getKey(); + Set wishedColumnNames = entry.getValue(); + List exportedColumnList = exportedColumns.getOrDefault( tableName, List.of() ); + + String physicalSchema = exportedColumnList.isEmpty() + ? Catalog.DEFAULT_NAMESPACE_NAME + : exportedColumnList.get( 0 ).physicalSchemaName(); + + LogicalTable logical = findLogicalTableByPhysical( namespace, adapter.get(), physicalSchema, tableName ); + if ( logical == null ) { + logical = catalog.getSnapshot().rel().getTable( namespace, tableName ).orElse( null ); + } + AllocationPlacement placement; + AllocationEntity allocation; + + if ( logical == null ) { + String logicalTable = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); + + logical = catalog.getLogicalRel( namespace ).addTable( logicalTable, EntityType.SOURCE, !adapter.get().isDataReadOnly() ); + Pair pp = createSinglePartition( namespace, logical ); + placement = catalog.getAllocRel( namespace ).addPlacement( logical.id, namespace, adapter.get().getAdapterId() ); + allocation = catalog.getAllocRel( namespace ).addAllocation( adapter.get().getAdapterId(), placement.id, pp.left.id, logical.id ); + catalog.updateSnapshot(); + + } else { + placement = catalog.getSnapshot().alloc().getPlacement( adapter.get().getAdapterId(), logical.id ).orElseThrow(); + allocation = catalog.getSnapshot().alloc().getFromLogical( logical.id ).stream().filter( a -> a.adapterId == adapter.get().getAdapterId() ).findFirst().orElseThrow(); + } + + for ( ExportedColumn exportedColumn : exportedColumnList ) { + if ( !wishedColumnNames.contains( exportedColumn.name() ) || catalog.getSnapshot().rel().getColumn( logical.id, exportedColumn.name() ).isPresent() ) { + continue; + } + addColumnToSourceTable( logical, exportedColumn.physicalColumnName(), exportedColumn.name(), null, null, null, statement, tableName ); + } + + List columns = catalog.getSnapshot().rel().getColumns( logical.id ); + List aCols = catalog.getSnapshot().alloc().getColumns( allocation.id ).stream().toList(); + + adapter.get().createTable( + null, + LogicalTableWrapper.of( + logical, + columns, + List.of(), + physicalSchema, + tableName ), + AllocationTableWrapper.of( + allocation.unwrapOrThrow( AllocationTable.class ), + aCols ) ); + + catalog.updateSnapshot(); + + + } + catalog.updateSnapshot(); + statement.getQueryProcessor().resetCaches(); + tsx.commit(); + + + } + + + private LogicalTable findLogicalTableByPhysical( long namespace, DataSource adapter, String physicalSchema, String physicalTable ) { + RelAdapterCatalog ac = (RelAdapterCatalog) adapter.getCatalog(); + for ( PhysicalEntity pe : ac.getPhysicals().values() ) { + if ( !(pe instanceof PhysicalTable pt) ) { + continue; + } + if ( physicalSchema.equals( pt.getNamespaceName() ) && physicalTable.equals( pt.getName() ) ) { + + long logicalId = pt.getLogicalId(); + return catalog.getSnapshot() + .rel() + .getTable( logicalId ) + .orElse( null ); + } + } + + return null; + } + + + @Override + public void removeSelectedMetadata( List paths, Statement stmt, String uniqueName ) { + DataSource adapter = AdapterManager.getInstance() + .getSource( uniqueName ) + .orElseThrow(); + + List current = new ArrayList<>( List.of( adapter.getSettings().getOrDefault( "selectedAttributes", "" ) + .replace( "[", "" ).replace( "]", "" ).split( "," ) ) ); + current.removeIf( s -> s == null || s.isBlank() || paths.contains( s.trim() ) ); + Map settings = new HashMap<>( adapter.getSettings() ); + settings.put( "selectedAttributes", String.join( ",", current ) ); + adapter.updateSettings( settings ); + + Map> work = new HashMap<>(); + for ( String raw : paths ) { + if ( raw == null || raw.isBlank() ) { + continue; + } + PathParts pp = parsePathUniversal( raw ); + + String colName = pp.column(); + if ( colName.contains( ":" ) ) { + colName = colName.split( ":" )[0]; + } + + work.computeIfAbsent( new TableKey( pp.physSchema(), pp.table() ), k -> new HashSet<>() ).add( colName ); + } + + for ( Map.Entry> e : work.entrySet() ) { + TableKey k = e.getKey(); + Set cols = e.getValue(); + + LogicalTable table = findLogicalTableByPhysical( Catalog.defaultNamespaceId, adapter, k.physSchema(), k.table() ); + if ( table == null ) { + table = catalog.getSnapshot().rel().getTable( Catalog.defaultNamespaceId, k.table() ).orElse( null ); + } + if ( table == null ) { + log.info( "Table: " + k.physSchema() + "." + k.table() + " not found" ); + } + if ( table.entityType != EntityType.SOURCE ) { + throw new GenericRuntimeException( "Table " + table.name + " is not a source object!" ); + } + + if ( cols.contains( "*" ) ) { + dropWholeSourceTable( table, stmt ); + continue; + } + for ( String col : cols ) { + dropSourceColumn( table, col, stmt ); + } + + if ( catalog.getSnapshot().rel().getColumns( table.id ).isEmpty() ) { + dropWholeSourceTable( table, stmt ); + } + catalog.updateSnapshot(); + } + + stmt.getQueryProcessor().resetCaches(); + stmt.getTransaction().commit(); + } + + + private void dropWholeSourceTable( LogicalTable table, Statement statement ) { + + List allocs = catalog.getSnapshot().alloc().getFromLogical( table.id ); + + if ( allocs.size() != 1 ) { + throw new GenericRuntimeException( "Source-Table " + table.name + " has more than one placement." ); + } + + AllocationTable placement = allocs.get( 0 ).unwrapOrThrow( AllocationTable.class ); + + for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { + catalog.getLogicalRel( table.namespaceId ).deleteForeignKey( fk.id ); + } + + for ( AllocationColumn c : placement.getColumns() ) { + catalog.getAllocRel( table.namespaceId ).deleteColumn( placement.id, c.columnId ); + } + + catalog.getAllocRel( table.namespaceId ).deleteAllocation( placement.id ); + + catalog.getLogicalRel( table.namespaceId ).deletePrimaryKey( table.id ); + for ( LogicalColumn c : catalog.getSnapshot().rel().getColumns( table.id ) ) { + catalog.getLogicalRel( table.namespaceId ).deleteColumn( c.id ); + } + catalog.getLogicalRel( table.namespaceId ).deleteTable( table.id ); + catalog.updateSnapshot(); + } + + + private void dropSourceColumn( LogicalTable table, String columnName, Statement statement ) { + + LogicalColumn column = catalog.getSnapshot() + .rel() + .getColumn( table.id, columnName ) + .orElse( null ); + if ( column == null ) { + return; + } + + for ( LogicalForeignKey fk : catalog.getSnapshot().rel().getForeignKeys( table.id ) ) { + if ( fk.getFieldIds().contains( column.id ) ) { + catalog.getLogicalRel( table.namespaceId ).deleteForeignKey( fk.id ); + } + } + + for ( AllocationEntity alloc : catalog.getSnapshot().alloc().getFromLogical( table.id ) ) { + + AllocationTable at = alloc.unwrapOrThrow( AllocationTable.class ); + + for ( AllocationColumn p : at.getColumns() ) { + if ( p.columnId == column.id ) { + catalog.getAllocRel( table.namespaceId ).deleteColumn( alloc.id, column.id ); + } + + catalog.getLogicalRel( table.namespaceId ).deleteColumn( column.id ); + + statement.getQueryProcessor().resetCaches(); + } + } + catalog.updateSnapshot(); + } + + @Override public void createSource( Transaction transaction, String uniqueName, String adapterName, long namespace, AdapterType adapterType, Map config, DeployMode mode ) { uniqueName = uniqueName.toLowerCase(); @@ -254,6 +539,35 @@ private void createDocumentSource( DataSource adapter, long namespace ) { private void createRelationalSource( Transaction transaction, DataSource adapter, long namespace ) { + String attributes = adapter.getSettings().get( "selectedAttributes" ); + String uniqueName = adapter.getUniqueName(); + List selectedAttributeNames = new ArrayList<>(); + if ( attributes != null ) { + List selectedAttributes = new Gson().fromJson( attributes, new TypeToken>() { + }.getType() ); + selectedAttributeNames = selectedAttributes.stream() + .map( s -> s.substring( s.lastIndexOf( '.' ) + 1 ) ) + .collect( Collectors.toList() ); + + if ( adapter instanceof MetadataProvider mp ) { + + PublisherManager pm = PublisherManager.getInstance(); + MetadataHasher hasher = new MetadataHasher(); + + AbstractNode node = mp.fetchMetadataTree(); + mp.setRoot( node ); + String hash = hasher.hash( NodeSerializer.serializeNode( node ).toString() ); + + HashCache.getInstance().put( uniqueName, hash ); + + if ( !(adapter.getAdapterName().equals( "Excel" ) || adapter.getAdapterName().equals( "CSV" )) ) { + pm.onAdapterDeploy( (Adapter & MetadataProvider) mp ); + } + + mp.markSelectedAttributes( selectedAttributes ); + } + + } Map> exportedColumns; try { exportedColumns = adapter.asRelationalDataSource().getExportedColumns(); @@ -263,7 +577,43 @@ private void createRelationalSource( Transaction transaction, DataSource adap } // Create table, columns etc. for ( Map.Entry> entry : exportedColumns.entrySet() ) { - // Make sure the table name is unique + String physicalSchema = entry.getValue().isEmpty() + ? Catalog.DEFAULT_NAMESPACE_NAME + : entry.getValue().get( 0 ).physicalSchemaName(); + + String baseName = entry.getKey(); + String physicalTable = baseName; + + Map> filter = new HashMap<>(); + + if ( attributes != null && !attributes.isBlank() ) { + List paths = new Gson().fromJson( attributes, new com.google.gson.reflect.TypeToken>() { + }.getType() ); + + for ( String p : paths ) { + String[] tok = p.split( "\\." ); + if ( tok.length < 2 ) { + continue; + } + + String table = tok[tok.length - 2].toLowerCase(); + String column = tok[tok.length - 1].split( ":" )[0].toLowerCase(); + + filter.computeIfAbsent( table, k -> new HashSet<>() ).add( column ); + } + } + + selectedAttributeNames = selectedAttributeNames.stream() + .map( attr -> attr.split( ":" )[0].toLowerCase() ) + .collect( Collectors.toList() ); + + String currentTable = physicalTable.toLowerCase(); + Set allowed = filter.getOrDefault( currentTable, Collections.emptySet() ); + + if ( attributes != null && allowed.isEmpty() ) { + continue; + } + String tableName = getUniqueEntityName( namespace, entry.getKey(), ( ns, en ) -> catalog.getSnapshot().rel().getTable( ns, en ) ); LogicalTable logical = catalog.getLogicalRel( namespace ).addTable( tableName, EntityType.SOURCE, !(adapter).isDataReadOnly() ); @@ -277,38 +627,42 @@ private void createRelationalSource( Transaction transaction, DataSource adap int colPos = 1; for ( ExportedColumn exportedColumn : entry.getValue() ) { - LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( - exportedColumn.name(), - logical.id, - colPos++, - exportedColumn.type(), - exportedColumn.collectionsType(), - exportedColumn.length(), - exportedColumn.scale(), - exportedColumn.dimension(), - exportedColumn.cardinality(), - exportedColumn.nullable(), - Collation.getDefaultCollation() ); - - AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( - placement.id, - logical.id, - column.id, - adapter.adapterId, - PlacementType.STATIC, - exportedColumn.physicalPosition() ); // Not a valid partitionGroupID --> placeholder - - columns.add( column ); - aColumns.add( allocationColumn ); + if ( adapter instanceof MetadataProvider mp && attributes != null && (allowed.isEmpty() || !allowed.contains( exportedColumn.name().toLowerCase() )) ) { + continue; + } else { + LogicalColumn column = catalog.getLogicalRel( namespace ).addColumn( + exportedColumn.name(), + logical.id, + exportedColumn.physicalPosition(), + exportedColumn.type(), + exportedColumn.collectionsType(), + exportedColumn.length(), + exportedColumn.scale(), + exportedColumn.dimension(), + exportedColumn.cardinality(), + exportedColumn.nullable(), + Collation.getDefaultCollation() ); + + AllocationColumn allocationColumn = catalog.getAllocRel( namespace ).addColumn( + placement.id, + logical.id, + column.id, + adapter.adapterId, + PlacementType.STATIC, + exportedColumn.physicalPosition() ); + + columns.add( column ); + aColumns.add( allocationColumn ); + } } buildRelationalNamespace( namespace, logical, adapter ); transaction.attachCommitAction( () -> - // we can execute with initial logical and allocation data as this is a source and this will not change - adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of() ), AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aColumns ) ) ); + adapter.createTable( null, LogicalTableWrapper.of( logical, columns, List.of(), physicalSchema, physicalTable ), AllocationTableWrapper.of( allocation.unwrapOrThrow( AllocationTable.class ), aColumns ) ) ); catalog.updateSnapshot(); } + } @@ -382,6 +736,7 @@ public void dropAdapter( String name, Statement statement ) { } } AdapterManager.getInstance().removeAdapter( adapter.id ); + PublisherManager.getInstance().onAdapterUndeploy( adapter.uniqueName ); } @@ -398,9 +753,9 @@ public void renameNamespace( String newName, String currentName ) { @Override - public void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement ) { + public void addColumnToSourceTable( LogicalTable table, String columnPhysicalName, String columnLogicalName, String beforeColumnName, String afterColumnName, PolyValue defaultValue, Statement statement, String tablePhysicalName ) { - if ( catalog.getSnapshot().rel().getColumn( table.id, columnLogicalName ).isEmpty() ) { + if ( !catalog.getSnapshot().rel().getColumn( table.id, columnLogicalName ).isEmpty() ) { throw new GenericRuntimeException( "There exist already a column with name %s on table %s", columnLogicalName, table.name ); } @@ -425,7 +780,7 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam long adapterId = allocation.adapterId; DataSource dataSource = AdapterManager.getInstance().getSource( adapterId ).orElseThrow(); //String physicalTableName = catalog.getSnapshot().alloc().getPhysicalTable( catalogTable.id, adapterId ).name; - List exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( table.name ); + List exportedColumns = dataSource.asRelationalDataSource().getExportedColumns().get( tablePhysicalName != null ? tablePhysicalName : table.name ); // Check if physicalColumnName is valid ExportedColumn exportedColumn = exportedColumns.stream() @@ -452,6 +807,7 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam // Add default value addDefaultValue( table.namespaceId, defaultValue, addedColumn ); + int physPos = exportedColumn.physicalPosition(); // Add column placement catalog.getAllocRel( table.namespaceId ).addColumn( allocation.partitionId, @@ -459,12 +815,13 @@ public void addColumnToSourceTable( LogicalTable table, String columnPhysicalNam addedColumn.id, dataSource.adapterId, PlacementType.STATIC, - catalog.getSnapshot().alloc().getColumns( allocation.id ).size() );//Not a valid partitionID --> placeholder + physPos );//Not a valid partitionID --> placeholder // Set column position // catalog.getAllocRel( catalogTable.namespaceId ).updateColumnPlacementPhysicalPosition( adapterId, addedColumn.id, exportedColumn.physicalPosition ); // Reset plan cache implementation cache & routing cache + catalog.updateSnapshot(); statement.getQueryProcessor().resetCaches(); } @@ -2198,7 +2555,7 @@ private AllocationTable addAllocationTable( long namespaceId, Statement statemen List refreshedPks = catalog.getSnapshot().rel().getKey( refreshedLogical.primaryKey ).orElseThrow().fieldIds; AllocationTable refreshedAlloc = catalog.getSnapshot().alloc().getAlloc( alloc.placementId, alloc.partitionId ).flatMap( e -> e.unwrap( AllocationTable.class ) ).orElseThrow(); - adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( refreshedLogical, sortByPosition( refreshedLColumns ), refreshedPks ), AllocationTableWrapper.of( refreshedAlloc, refreshedAColumns ) ); + adapter.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( refreshedLogical, sortByPosition( refreshedLColumns ), refreshedPks, null, null ), AllocationTableWrapper.of( refreshedAlloc, refreshedAColumns ) ); }; if ( postpone ) { @@ -3052,6 +3409,43 @@ private void prepareMonitoring( Statement statement, Kind kind, LogicalTable cat } + record PathParts( String physSchema, String table, String column, String original ) { + + } + + + record TableKey( String physSchema, String table ) { + + } + + + static String norm( String s ) { + return s == null ? "" : s.replace( "'", "" ).trim(); + } + + + static PathParts parsePathUniversal( String raw ) { + String p = norm( raw ); + if ( p.isBlank() ) { + throw new IllegalArgumentException( "Empty path !" ); + } + + String[] parts = p.split( "\\." ); + switch ( parts.length ) { + case 4 -> { + return new PathParts( parts[1], parts[2], parts[3], p ); + } + case 3 -> { + return new PathParts( parts[0], parts[1], parts[2], p ); + } + case 2 -> { + return new PathParts( parts[0], parts[0], parts[1], p ); + } + default -> throw new IllegalArgumentException( "Unknown path format:" + p ); + } + } + + @Override public void dropFunction() { throw new GenericRuntimeException( "Not supported yet" ); diff --git a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java index 99f3f112bd..10e0ac5c70 100644 --- a/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java +++ b/dbms/src/main/java/org/polypheny/db/partition/FrequencyMapImpl.java @@ -331,7 +331,7 @@ private void createHotTables( LogicalTable table, List partitionsFromColdT if ( !hotPartitionsToCreate.isEmpty() ) { catalog.getSnapshot().alloc().getPartitionsOnDataPlacement( store.getAdapterId(), table.id ); - store.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( null, null, null ), AllocationTableWrapper.of( null, null ) ); + store.createTable( statement.getPrepareContext(), LogicalTableWrapper.of( null, null, null, null, null ), AllocationTableWrapper.of( null, null ) ); List logicalColumns = new ArrayList<>(); catalog.getSnapshot().alloc().getColumnPlacementsOnAdapterPerEntity( store.getAdapterId(), table.id ).forEach( cp -> logicalColumns.add( catalog.getSnapshot().rel().getColumn( cp.columnId ).orElseThrow() ) ); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java index cbcee9cf14..20b5b88998 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvEnumerator.java @@ -228,16 +228,16 @@ public boolean moveNext() { reader.close(); return false; } - if ( filterValues != null ) { - for ( int i = 0; i < strings.length; i++ ) { + if (filterValues != null) { + int limit = Math.min(strings.length, filterValues.length); + for (int i = 0; i < limit; i++) { String filterValue = filterValues[i]; - if ( filterValue != null ) { - if ( !filterValue.equals( strings[i] ) ) { - continue outer; - } + if (filterValue != null && !filterValue.equals(strings[i])) { + continue outer; } } } + current = rowConverter.convertRow( strings ); return true; } diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java index bee19c1662..85208d46d5 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvSource.java @@ -18,15 +18,19 @@ import java.io.BufferedReader; import java.io.File; +import java.io.FileReader; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.sql.Connection; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import lombok.Getter; @@ -56,12 +60,17 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; import org.polypheny.db.util.Sources; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import javax.annotation.Nullable; @Extension @AdapterProperties( @@ -74,7 +83,7 @@ @AdapterSettingString(subOf = "method_link", defaultValue = "classpath://hr", name = "directoryName", description = "You can select a path to a folder or specific .csv or .csv.gz files.", position = 2) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 3, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class CsvSource extends DataSource implements RelationalDataSource { +public class CsvSource extends DataSource implements RelationalDataSource, MetadataProvider { private static final Logger log = LoggerFactory.getLogger( CsvSource.class ); @Delegate(excludes = Excludes.class) @@ -87,6 +96,9 @@ public class CsvSource extends DataSource implements Relation private final int maxStringLength; private Map> exportedColumnCache; + private AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + public CsvSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, true, new RelAdapterCatalog( storeId ), Set.of( DataModel.RELATIONAL ) ); @@ -374,6 +386,191 @@ public void renameLogicalColumn( long id, String newColumnName ) { } + @Override + public AbstractNode fetchMetadataTree() { + this.previewByTable = new HashMap<>(); + try { + Source src = openCsvSource( null ); + String fileName = src.file().getName(); + String baseName = fileName.replaceFirst( "\\.csv(\\.gz)?$", "" ); + AbstractNode rootNode = new Node( "csv", baseName ); + + try ( BufferedReader reader = new BufferedReader( src.reader() ) ) { + String headerLine = reader.readLine(); + if ( headerLine == null ) { + throw new RuntimeException( "No header line found in " + fileName ); + } + + String[] rawColumns = headerLine.split( "," ); + for ( String colRaw : rawColumns ) { + String[] split = colRaw.split( ":" ); + String name = split[0].trim().replaceAll( "[^a-zA-Z0-9_]", "" ); + String type = split.length > 1 ? split[1].trim() : "string"; + + AbstractNode columnNode = new AttributeNode( "column", name ); + columnNode.addProperty( "type", mapCsvType( type ) ); + columnNode.addProperty( "nullable", true ); + rootNode.addChild( columnNode ); + } + } + + List> preview = fetchPreview( null, fileName, 10 ); + this.previewByTable.put( fileName, preview ); + + return rootNode; + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to parse CSV metadata", e ); + } + } + + + private String mapCsvType( String rawType ) { + switch ( rawType ) { + case "int": + case "integer": + return "INTEGER"; + case "bool": + case "boolean": + return "BOOLEAN"; + case "long": + return "BIGINT"; + case "float": + return "REAL"; + case "double": + return "DOUBLE"; + case "date": + return "DATE"; + case "time": + return "TIME"; + case "timestamp": + return "TIMESTAMP"; + case "string": + default: + return "VARCHAR"; + } + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + try { + Source src = openCsvSource( fqName ); + List> rows = new ArrayList<>(); + + try ( BufferedReader reader = new BufferedReader( src.reader() ) ) { + String headerLine = reader.readLine(); + if ( headerLine == null ) { + return List.of(); + } + + String[] headerParts = headerLine.split( "," ); + List colNames = new ArrayList<>( headerParts.length ); + for ( String raw : headerParts ) { + String[] split = raw.split( ":" ); + colNames.add( split[0].trim() ); + } + + String line; + int count = 0; + while ( (line = reader.readLine()) != null && count < limit ) { + String[] values = line.split( ",", -1 ); + Map row = new LinkedHashMap<>(); + for ( int i = 0; i < colNames.size(); i++ ) { + String value = i < values.length ? values[i].trim() : null; + row.put( colNames.get( i ), value ); + } + rows.add( row ); + count++; + } + } + + return rows; + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read CSV preview: " + fqName, e ); + } + } + + + private Source openCsvSource( @Nullable String fqName ) throws IOException { + if ( csvDir.getProtocol().equals( "jar" ) ) { + if ( fqName == null || fqName.isBlank() ) { + throw new GenericRuntimeException( "fqName required when using jar protocol for CSV." ); + } + return Sources.of( new URL( csvDir, fqName ) ); + } + + if ( Sources.of( csvDir ).file().isFile() ) { + return Sources.of( csvDir ); + } + + File[] files = Sources.of( csvDir ) + .file() + .listFiles( ( d, name ) -> name.endsWith( ".csv" ) || name.endsWith( ".csv.gz" ) ); + if ( files == null || files.length == 0 ) { + throw new GenericRuntimeException( "No .csv files were found in: " + Sources.of( csvDir ).file() ); + } + + File chosen; + if ( fqName != null && !fqName.isBlank() ) { + chosen = Arrays.stream( files ) + .filter( f -> f.getName().equals( fqName ) ) + .findFirst() + .orElseThrow( () -> new GenericRuntimeException( "Requested CSV not found: " + fqName ) ); + } else { + chosen = files[0]; + } + + return Sources.of( new URL( csvDir, chosen.getName() ) ); + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + if ( this.metadataRoot == null ) { + throw new GenericRuntimeException( "No metadata tree available." ); + } + + for ( String path : selectedPaths ) { + int lastDot = path.lastIndexOf( '.' ); + if ( lastDot == -1 || lastDot == path.length() - 1 ) { + continue; + } + + String columnName = path.substring( lastDot + 1 ); + String normalizedColumnName = columnName.replaceAll( "[^a-zA-Z0-9_]", "" ); + + Optional attrOpt = metadataRoot.getChildren().stream() + .filter( child -> child instanceof AttributeNode + && child.getName().equals( normalizedColumnName ) ) + .findFirst(); + + if ( attrOpt.isPresent() ) { + ((AttributeNode) attrOpt.get()).setSelected( true ); + } + } + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + @Override public RelationalDataSource asRelationalDataSource() { return this; diff --git a/plugins/excel-adapter/build.gradle b/plugins/excel-adapter/build.gradle index 9c18326f76..e8370c2905 100644 --- a/plugins/excel-adapter/build.gradle +++ b/plugins/excel-adapter/build.gradle @@ -6,6 +6,8 @@ dependencies { compileOnly project(":plugins:sql-language") implementation group: "org.apache.poi", name: "poi", version: poi_version + implementation "org.apache.poi:poi-ooxml:5.2.5" + implementation "org.apache.poi:poi-ooxml-full:5.2.5" // --- Test Compile --- testImplementation project(path: ":core", configuration: "tests") diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java index bfbcac32f6..ba63486143 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelEnumerator.java @@ -354,7 +354,6 @@ protected PolyValue convert( ExcelFieldType fieldType, Cell cell ) { return PolyNull.NULL; } try { - switch ( fieldType ) { case BOOLEAN: return PolyBoolean.of( cell.getBooleanCellValue() ); @@ -392,7 +391,6 @@ protected PolyValue convert( ExcelFieldType fieldType, Cell cell ) { throw new GenericRuntimeException( "Could not read the date field from the document." ); } case TIME: - try { Date date = TIME_FORMAT_TIME.parse( cell .getStringCellValue() ); @@ -412,6 +410,9 @@ protected PolyValue convert( ExcelFieldType fieldType, Cell cell ) { } case STRING: default: + if ( cell.getCellType() == CellType.NUMERIC ) { + return PolyString.of( cell.toString() ); + } return PolyString.of( cell.getStringCellValue() ); } } catch ( Exception e ) { @@ -458,12 +459,10 @@ public PolyValue[] convertRow( Row row ) { public PolyValue[] convertNormalRow( Row row ) { - Iterator cells = row.cellIterator(); final PolyValue[] objects = new PolyValue[fields.length]; - while ( cells.hasNext() ) { - Cell cell = cells.next(); - int field = fields[cell.getColumnIndex()] - 1; - objects[field] = convert( fieldTypes[field], cell ); + for ( int i = 0; i < fields.length; i++ ) { + Cell cell = row.getCell( fields[i]); + objects[i] = convert( fieldTypes[i], cell ); } return objects; } @@ -472,9 +471,14 @@ public PolyValue[] convertNormalRow( Row row ) { public PolyValue[] convertStreamRow( Row row ) { final PolyValue[] objects = new PolyValue[fields.length + 1]; objects[0] = PolyLong.of( System.currentTimeMillis() ); + for ( int i = 0; i < fields.length; i++ ) { + Cell cell = row.getCell( fields[i] ); + objects[i + 1] = convert( fieldTypes[i], cell ); + } return objects; } + } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java index e22ef083ca..b0cfffee3b 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelNamespace.java @@ -20,9 +20,11 @@ import java.net.URL; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Set; import org.jetbrains.annotations.Nullable; import org.polypheny.db.algebra.type.AlgDataType; import org.polypheny.db.algebra.type.AlgDataTypeFactory; @@ -45,7 +47,7 @@ public class ExcelNamespace extends Namespace { private final URL directoryUrl; private final ExcelTable.Flavor flavor; private final Map tableMap = new HashMap<>(); - private final String sheet; + private String sheet; /** @@ -66,31 +68,61 @@ public ExcelNamespace( long id, long adapterId, URL directoryUrl, ExcelTable.Fla } - public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource ) { + public ExcelTable createExcelTable( PhysicalTable table, ExcelSource excelSource, List physicalIds ) { + final AlgDataTypeFactory typeFactory = new PolyTypeFactoryImpl( AlgDataTypeSystem.DEFAULT ); + + // The physical id's are one too far, so it has to be mapped one index back. + int[] fields = physicalIds.stream() + .mapToInt( i -> i - 1 ) + .toArray(); + + Map byPosition = new HashMap<>(); + for ( PhysicalColumn c : table.columns ) { + byPosition.put( c.position, c ); + } + final AlgDataTypeFactory.Builder fieldInfo = typeFactory.builder(); - List fieldTypes = new LinkedList<>(); - List fieldIds = new ArrayList<>( table.columns.size() ); - for ( PhysicalColumn column : table.columns ) { - AlgDataType sqlType = sqlType( typeFactory, column.type, column.length, column.scale, null ); + List fieldTypes = new ArrayList<>(); + + for ( int pos : fields ) { + PhysicalColumn column = byPosition.get( pos ); + if ( column == null ) { + throw new GenericRuntimeException( "No column for position " + pos ); + } + + AlgDataType sqlType = sqlType( typeFactory, + column.type, + column.length, + column.scale, + null ); + fieldInfo.add( column.id, column.name, column.name, sqlType ).nullable( column.nullable ); fieldTypes.add( ExcelFieldType.getExcelFieldType( column.type ) ); - fieldIds.add( column.position ); } - String excelFileName = excelSource.sheetName; + String[] parts = table.name.split( "_", 2 ); + String filePart = parts[0]; + String sheetPart = parts.length > 1 ? parts[1] : ""; + String excelName = filePart + ".xlsx"; + this.sheet = sheetPart; Source source; try { - source = Sources.of( new URL( directoryUrl, excelFileName ) ); + source = Sources.of( new URL( directoryUrl, excelName ) ); } catch ( MalformedURLException e ) { throw new GenericRuntimeException( e ); } - int[] fields = fieldIds.stream().mapToInt( i -> i ).toArray(); - ExcelTable physical = createTable( table, source, AlgDataTypeImpl.proto( fieldInfo.build() ), fieldTypes, fields, excelSource ); + + ExcelTable physical = createTable( table, + source, + AlgDataTypeImpl.proto( fieldInfo.build() ), + fieldTypes, + fields, + excelSource ); + tableMap.put( physical.name + "_" + physical.allocationId, physical ); return physical; - } diff --git a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java index 0163bfd9ca..05238a7799 100644 --- a/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java +++ b/plugins/excel-adapter/src/main/java/org/polypheny/db/adapter/excel/ExcelSource.java @@ -21,19 +21,26 @@ import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; +import java.nio.file.Paths; +import java.sql.Connection; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; import lombok.Getter; import lombok.experimental.Delegate; import lombok.extern.slf4j.Slf4j; import org.apache.poi.ss.usermodel.Cell; +import org.apache.poi.ss.usermodel.DateUtil; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; @@ -53,6 +60,7 @@ import org.polypheny.db.catalog.catalogs.RelAdapterCatalog; import org.polypheny.db.catalog.entity.allocation.AllocationEntity; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalColumn; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; @@ -61,6 +69,10 @@ import org.polypheny.db.information.InformationGroup; import org.polypheny.db.information.InformationTable; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; import org.polypheny.db.util.Source; @@ -78,7 +90,10 @@ @AdapterSettingString(name = "sheetName", description = "default to read the first sheet", defaultValue = "", required = false) @AdapterSettingInteger(name = "maxStringLength", defaultValue = 255, position = 2, description = "Which length (number of characters including whitespace) should be used for the varchar columns. Make sure this is equal or larger than the longest string in any of the columns.") -public class ExcelSource extends DataSource implements RelationalDataSource { +public class ExcelSource extends DataSource implements RelationalDataSource, MetadataProvider { + + public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); @Delegate(excludes = Excludes.class) private final RelationalScanDelegate delegate; @@ -113,10 +128,25 @@ public ExcelSource( final long storeId, final String uniqueName, final Map settings ) { String dir = settings.get( "directory" ); + if ( dir != null && dir.trim().startsWith( "[" ) ) { + try { + List list = new ObjectMapper() + .readValue( dir, new TypeReference>() { + } ); + dir = list.isEmpty() ? null : list.get( 0 ); + } catch ( IOException e ) { + throw new GenericRuntimeException( "Cannot parse directory JSON", e ); + } + } + if ( connectionMethod == ConnectionMethod.LINK ) { dir = settings.get( "directoryName" ); } + if ( dir == null ) { + throw new GenericRuntimeException( "Directory must not be null" ); + } + if ( dir.startsWith( "classpath://" ) ) { excelDir = this.getClass().getClassLoader().getResource( dir.replace( "classpath://", "" ) + "/" ); } else { @@ -145,7 +175,12 @@ public List createTable( Context context, LogicalTableWrapper lo logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), logical.pkIds, allocation ); - ExcelTable physical = currentNamespace.createExcelTable( table, this ); + List physicalIds = new ArrayList<>(); + for ( LogicalColumn column : logical.columns ) { + physicalIds.add( column.position ); + } + + ExcelTable physical = currentNamespace.createExcelTable( table, this, physicalIds ); adapterCatalog.replacePhysical( physical ); @@ -194,8 +229,6 @@ protected void reloadSettings( List updatedSettings ) { @Override public Map> getExportedColumns() { - String currentSheetName; - if ( connectionMethod == ConnectionMethod.UPLOAD && exportedColumnCache != null ) { // If we upload, file will not be changed, and we can cache the columns information, if "link" is used this is not advised return exportedColumnCache; @@ -238,115 +271,98 @@ public Map> getExportedColumns() { .replaceAll( "[^a-z0-9_]+", "" ); } - List list = new ArrayList<>(); - int position = 1; try { Source source = Sources.of( new URL( excelDir, fileName ) ); - File file = new File( source.path() ); //creating a new file instance - FileInputStream fs = new FileInputStream( file ); + Workbook workbook = WorkbookFactory.create( source.file() ); - Workbook workbook = WorkbookFactory.create( fs ); - Sheet sheet; + for ( int s = 0; s < workbook.getNumberOfSheets(); s++ ) { - if ( this.sheetName.equals( "" ) ) { - sheet = workbook.getSheetAt( 0 ); - currentSheetName = workbook.getSheetName( 0 ); + Sheet sheet = workbook.getSheetAt( s ); + String currentSheetName = workbook.getSheetName( s ); - } else { - sheet = workbook.getSheet( this.sheetName ); - currentSheetName = this.sheetName; - } + List list = new ArrayList<>(); + int position = 1; + + Row header = sheet.getRow( 0 ); + if ( header == null ) { + continue; + } - // Read first row to extract column attribute name and datatype - for ( Row row : sheet ) { - // For each row, iterate through all the columns - Iterator cellIterator = row.cellIterator(); - - while ( cellIterator.hasNext() ) { - Cell cell = cellIterator.next(); - try { - String[] colSplit = cell.getStringCellValue().split( ":" ); - String name = colSplit[0] - .toLowerCase() - .trim() - .replaceAll( "[^a-z0-9_]+", "" ); - String typeStr = "string"; - if ( colSplit.length > 1 ) { - typeStr = colSplit[1].toLowerCase().trim(); + for ( Cell cell : header ) { + String[] colSplit = cell.getStringCellValue().split( ":" ); + String name = colSplit[0].toLowerCase() + .trim() + .replaceAll( "[^a-z0-9_]+", "" ); + String typeStr = (colSplit.length > 1 ? colSplit[1] : "string") + .toLowerCase().trim(); + + PolyType type; + Integer length = null, scale = null; + switch ( typeStr ) { + case "int" -> type = PolyType.INTEGER; + case "boolean" -> type = PolyType.BOOLEAN; + case "long" -> type = PolyType.BIGINT; + case "float" -> type = PolyType.REAL; + case "double" -> type = PolyType.DOUBLE; + case "date" -> type = PolyType.DATE; + case "time" -> { + type = PolyType.TIME; + length = 0; } - PolyType collectionsType = null; - PolyType type; - Integer length = null; - Integer scale = null; - Integer dimension = null; - Integer cardinality = null; - switch ( typeStr.toLowerCase() ) { - case "int": - type = PolyType.INTEGER; - break; - case "string": - type = PolyType.VARCHAR; - length = maxStringLength; - break; - case "boolean": - type = PolyType.BOOLEAN; - break; - case "long": - type = PolyType.BIGINT; - break; - case "float": - type = PolyType.REAL; - break; - case "double": - type = PolyType.DOUBLE; - break; - case "date": - type = PolyType.DATE; - break; - case "time": - type = PolyType.TIME; - length = 0; - break; - case "timestamp": - type = PolyType.TIMESTAMP; - length = 0; - break; - default: - throw new GenericRuntimeException( "Unknown type: " + typeStr.toLowerCase() ); + case "timestamp" -> { + type = PolyType.TIMESTAMP; + length = 0; + } + default -> { + type = PolyType.VARCHAR; + length = maxStringLength; } - - list.add( new ExportedColumn( - name, - type, - collectionsType, - length, - scale, - dimension, - cardinality, - false, - fileName, - physicalTableName, - name, - position, - position == 1 ) ); // TODO - - position++; - } catch ( Exception e ) { - throw new GenericRuntimeException( e ); } + + list.add( new ExportedColumn( + name, type, + null, length, scale, + null, null, + false, + fileName, + physicalTableName, + name, + position, + position == 1 ) ); + position++; } - break; + + exportedColumnCache.put( physicalTableName + "_" + currentSheetName, list ); } } catch ( IOException e ) { throw new GenericRuntimeException( e ); } - exportedColumnCache.put( physicalTableName + "_" + currentSheetName, list ); + } this.exportedColumnCache = exportedColumnCache; return exportedColumnCache; } + private Set resolveFileNames() { + Set names = new HashSet<>(); + + if ( Sources.of( excelDir ).file().isFile() ) { + names.add( Sources.of( excelDir ).file().getName() ); + return names; + } + + File[] files = Sources.of( excelDir ).file() + .listFiles( f -> f.getName().matches( ".*\\.(xlsx?|xlsx\\.gz|xls\\.gz)$" ) ); + if ( files != null ) { + Arrays.stream( files ) + .map( File::getName ) + .forEach( names::add ); + } + return names; + } + + private void addInformationExportedColumns() { for ( Map.Entry> entry : getExportedColumns().entrySet() ) { InformationGroup group = new InformationGroup( informationPage, entry.getValue().get( 0 ).physicalSchemaName() ); @@ -370,6 +386,248 @@ private void addInformationExportedColumns() { } + @Override + public AbstractNode fetchMetadataTree() { + + this.previewByTable = new LinkedHashMap<>(); + + // Assuming that one file was uploaded only. + Source filePath; + String firstFile = resolveFileNames().stream().findFirst().orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); + try { + filePath = Sources.of( new URL( excelDir, firstFile ) ); + } catch ( MalformedURLException e ) { + throw new RuntimeException( e ); + } + String mappeName = firstFile.split( "\\." )[0]; + + AbstractNode root = new Node( "excel", mappeName ); + try ( FileInputStream fis = new FileInputStream( filePath.path() ); Workbook wb = WorkbookFactory.create( fis ) ) { + + for ( Sheet sheet : wb ) { + String sheetName = sheet.getSheetName(); + AbstractNode sheetNode = new Node( "sheet", mappeName.toLowerCase() + "_" + sheetName ); + + Row header = sheet.getRow( sheet.getFirstRowNum() ); + if ( header == null ) { + continue; + } + for ( int c = 0; c < header.getLastCellNum(); c++ ) { + Cell cell = header.getCell( c ); + String colName = getCellValueAsString( cell, "COL_" + (c + 1) ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", inferType( sheet, c, header.getRowNum() + 1, 20 ) ); + colNode.addProperty( "nullable", true ); + + sheetNode.addChild( colNode ); + } + + String fqName = mappeName + "." + sheetName; + List> rows = fetchPreview( null, fqName, 10 ); + this.previewByTable.put( fqName, rows ); + + root.addChild( sheetNode ); + } + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read Excel metadata: " + filePath, e ); + } + + return root; + } + + + private String inferType( Sheet sheet, int colIndex, int startRow, int maxRows ) { + int checked = 0; + for ( int r = startRow; r <= sheet.getLastRowNum() && checked < maxRows; r++ ) { + Row row = sheet.getRow( r ); + if ( row == null ) { + continue; + } + Cell cell = row.getCell( colIndex ); + if ( cell == null ) { + continue; + } + + switch ( cell.getCellType() ) { + case NUMERIC: + if ( DateUtil.isCellDateFormatted( cell ) ) { + return "DATE"; + } + return "DOUBLE"; + case STRING: + return "STRING"; + case BOOLEAN: + return "BOOLEAN"; + default: + continue; + } + } + return "STRING"; + } + + + private String getCellValueAsString( Cell cell, String fallback ) { + if ( cell == null ) { + return fallback; + } + try { + return switch ( cell.getCellType() ) { + case STRING -> cell.getStringCellValue(); + case NUMERIC -> String.valueOf( cell.getNumericCellValue() ); + case BOOLEAN -> String.valueOf( cell.getBooleanCellValue() ); + case FORMULA -> cell.getCellFormula(); + default -> fallback; + }; + } catch ( Exception e ) { + return fallback; + } + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + + String[] parts = fqName.split( "\\.", 2 ); + String sheetName = parts.length == 2 ? parts[1] : parts[0]; + Source filePath; + String firstFile = resolveFileNames().stream().findFirst().orElseThrow( () -> new GenericRuntimeException( "No file found" ) ); + try { + filePath = Sources.of( new URL( excelDir, firstFile ) ); + } catch ( MalformedURLException e ) { + throw new RuntimeException( e ); + } + + List> rows = new ArrayList<>(); + + try ( FileInputStream fis = new FileInputStream( filePath.path() ); Workbook wb = WorkbookFactory.create( fis ) ) { + + Sheet sheet = wb.getSheet( sheetName ); + if ( sheet == null ) { + return List.of(); + } + + Row header = sheet.getRow( sheet.getFirstRowNum() ); + if ( header == null ) { + return List.of(); + } + + // All columns in the Excel have to start at the most left. Blank columns are marked as COL_i . + List colNames = new ArrayList<>(); + for ( int c = 0; c < header.getLastCellNum(); c++ ) { + colNames.add( getCellValueAsString( header.getCell( c ), "COL_" + (c + 1) ) ); + } + + int first = header.getRowNum() + 1; + int last = Math.min( sheet.getLastRowNum(), first + limit - 1 ); + + for ( int r = first; r <= last; r++ ) { + Row dataRow = sheet.getRow( r ); + if ( dataRow == null ) { + continue; + } + + Map map = new LinkedHashMap<>(); + for ( int c = 0; c < colNames.size(); c++ ) { + map.put( colNames.get( c ), extractCellValue( dataRow.getCell( c ) ) ); + } + rows.add( map ); + } + + } catch ( IOException e ) { + throw new RuntimeException( "Failed to read Excel preview: " + filePath, e ); + } + + return rows; + } + + + private Object extractCellValue( Cell cell ) { + if ( cell == null ) { + return null; + } + return switch ( cell.getCellType() ) { + case STRING -> cell.getStringCellValue(); + case NUMERIC -> DateUtil.isCellDateFormatted( cell ) + ? cell.getDateCellValue() + : cell.getNumericCellValue(); + case BOOLEAN -> cell.getBooleanCellValue(); + case FORMULA -> cell.getCellFormula(); + case BLANK -> null; + default -> cell.toString(); + }; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + List shortNames = selectedPaths.stream() + .map( p -> p.substring( p.lastIndexOf( '.' ) + 1 ).toLowerCase() ) + .collect( Collectors.toList() ); + + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + } + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + break; + } + } + } + } + + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + @Override public RelationalDataSource asRelationalDataSource() { return this; diff --git a/plugins/jdbc-adapter-framework/build.gradle b/plugins/jdbc-adapter-framework/build.gradle index 720413682b..b5bc32df89 100644 --- a/plugins/jdbc-adapter-framework/build.gradle +++ b/plugins/jdbc-adapter-framework/build.gradle @@ -5,6 +5,7 @@ dependencies { compileOnly project(":core") compileOnly project(":plugins:sql-language") api group: 'org.apache.commons', name: 'commons-dbcp2', version: commons_dbcp2_version + implementation 'com.fasterxml.jackson.core:jackson-databind:2.16.1' // --- Test Compile --- testImplementation project(path: ":core", configuration: "tests") diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java index 4b5947b4c1..ccea490d3c 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/ConnectionFactory.java @@ -37,4 +37,6 @@ public interface ConnectionFactory { int getNumIdle(); + void releaseConnectionHandler( Xid xid, boolean commit ) throws ConnectionHandlerException; + } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java index 2a1c88bacf..b628156f83 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/TransactionalConnectionFactory.java @@ -132,6 +132,26 @@ public int getNumIdle() { } + @Override + public void releaseConnectionHandler( Xid xid, boolean commit ) throws ConnectionHandlerException { + TransactionalConnectionHandler handler = activeInstances.remove( xid ); + if ( handler == null ) { + log.warn( "No active handler for XID {} to release", xid ); + return; + } + try { + if ( commit ) { + handler.commit(); + } else { + handler.rollback(); + } + } finally { + handler.xid = null; + freeInstances.offer( handler ); + } + } + + public class TransactionalConnectionHandler extends ConnectionHandler { private Xid xid; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java index d64e35b841..a1ec8dea3f 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/connection/XaConnectionFactory.java @@ -134,6 +134,12 @@ public int getNumIdle() { } + @Override + public void releaseConnectionHandler( Xid xid, boolean commit ) throws ConnectionHandlerException { + log.warn( "Not implemented!" ); + } + + public class XaConnectionHandler extends ConnectionHandler { private final XAResource xaResource; diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java index 12ce90e4ba..ad509b3c18 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/sources/AbstractJdbcSource.java @@ -23,6 +23,7 @@ import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -51,6 +52,7 @@ import org.polypheny.db.schema.Namespace; import org.polypheny.db.sql.language.SqlDialect; import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; import org.polypheny.db.transaction.PolyXid; import org.polypheny.db.type.PolyType; @@ -59,7 +61,7 @@ public abstract class AbstractJdbcSource extends DataSource implements RelationalDataSource, ExtensionPoint { @Delegate(excludes = Exclude.class) - private final RelationalScanDelegate delegate; + protected final RelationalScanDelegate delegate; protected SqlDialect dialect; protected JdbcSchema currentJdbcSchema; @@ -209,14 +211,44 @@ public void rollback( PolyXid xid ) { @Override public Map> getExportedColumns() { Map> map = new HashMap<>(); - PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.EMPTY_PUID, PUID.EMPTY_PUID ); + + java.sql.Statement statement = null; + Connection connection = null; + ConnectionHandler connectionHandler = null; + + // Use random PUID to prevent usage of an expired snapshot of the transaction identifier. + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); try { - ConnectionHandler connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); - java.sql.Statement statement = connectionHandler.getStatement(); - Connection connection = statement.getConnection(); + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); DatabaseMetaData dbmd = connection.getMetaData(); - String[] tables = settings.get( "tables" ).split( "," ); + String[] tables; + + // TODO If-else usage for possibly allow the usage of the old table-setting or selecting metadata. Not implemented yet. + if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); + Set tableNames = new HashSet<>(); + + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; + + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { + String tableName = parts[1] + "." + parts[2]; + + if ( !requiresSchema() ) { + tableNames.add( parts[2] ); + } else { + tableNames.add( tableName ); + } + } + } + tables = tableNames.toArray( new String[0] ); + } for ( String str : tables ) { String[] names = str.split( "\\." ); if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { @@ -232,12 +264,12 @@ public Map> getExportedColumns() { tableName = names[0]; } List primaryKeyColumns = new ArrayList<>(); - try ( ResultSet row = dbmd.getPrimaryKeys( settings.get( "database" ), schemaPattern, tableName ) ) { + try ( ResultSet row = dbmd.getPrimaryKeys( schemaPattern, null, tableName ) ) { while ( row.next() ) { primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); } } - try ( ResultSet row = dbmd.getColumns( settings.get( "database" ), schemaPattern, tableName, "%" ) ) { + try ( ResultSet row = dbmd.getColumns( schemaPattern, schemaPattern, tableName, "%" ) ) { List list = new ArrayList<>(); while ( row.next() ) { PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); @@ -306,7 +338,16 @@ public Map> getExportedColumns() { } } catch ( SQLException | ConnectionHandlerException e ) { throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } } + return map; } diff --git a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java index c0cbed72e2..738f4fe1c3 100644 --- a/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java +++ b/plugins/json-adapter/src/main/java/org/polypheny/db/adapter/json/JsonSource.java @@ -21,6 +21,8 @@ import java.net.MalformedURLException; import java.net.URL; import java.nio.file.NoSuchFileException; +import java.sql.Connection; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; @@ -51,6 +53,16 @@ import org.polypheny.db.catalog.logistic.DataModel; import org.polypheny.db.prepare.Context; import org.polypheny.db.schema.Namespace; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.DocumentArrayNode; +import org.polypheny.db.schemaDiscovery.DocumentObjectNode; +import org.polypheny.db.schemaDiscovery.DocumentValueNode; +import org.polypheny.db.schemaDiscovery.Node; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.polypheny.db.schemaDiscovery.NodeSerializer; import org.polypheny.db.transaction.PolyXid; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,7 +78,7 @@ @AdapterSettingString(subOf = "method_link", defaultValue = "classpath://articles.json", name = "directoryName", description = "Path to the JSON file(s) to be integrated as this source.", position = 2) @AdapterSettingString(subOf = "method_url", defaultValue = "http://localhost/articles.json", name = "url", description = "URL to the JSON file(s) to be integrated as this source.", position = 2) -public class JsonSource extends DataSource implements DocumentDataSource, Scannable { +public class JsonSource extends DataSource implements DocumentDataSource, Scannable, MetadataProvider { private static final Logger log = LoggerFactory.getLogger( JsonSource.class ); @Delegate(excludes = Excludes.class) @@ -74,6 +86,11 @@ public class JsonSource extends DataSource implements Documen private JsonNamespace namespace; private final ConnectionMethod connectionMethod; private URL jsonFiles; + public AbstractNode metadataRoot; + private Map>> preview = new LinkedHashMap<>(); + + private static final String TEST_JSON_PATH = "C:\\Users\\roman\\Desktop\\data.json"; + private final ObjectMapper mapper = new ObjectMapper(); public JsonSource( final long storeId, final String uniqueName, final Map settings, DeployMode mode ) { @@ -194,6 +211,7 @@ public List createCollection( Context context, LogicalCollection } } + @Override public void truncate( Context context, long allocId ) { log.debug( "NOT SUPPORTED: JSON source does not support method truncate()." ); @@ -206,6 +224,7 @@ public boolean prepare( PolyXid xid ) { return true; } + @Override public void commit( PolyXid xid ) { log.debug( "NOT SUPPORTED: JSON source does not support method commit()." ); @@ -262,6 +281,118 @@ public DocumentDataSource asDocumentDataSource() { } + @Override + public AbstractNode fetchMetadataTree() { + AbstractNode root = new Node( "document", new File( TEST_JSON_PATH ).getName() ); + + try { + JsonNode jsonRoot = mapper.readTree( new File( TEST_JSON_PATH ) ); + buildTreeRecursively( jsonRoot, root, "$", "root" ); + } catch ( Exception ex ) { + throw new RuntimeException( "Failed to build metadata tree for JSON", ex ); + } + preview.put( "jsonPreview", List.of( Map.of( "metadata", "rootNode", "preview", root ) ) ); + + return root; + } + + + // Preview itself is build in the metadata tree. Preview not necessary. + @Override + public List> fetchPreview( Connection ignored, String fqName, int limit ) { + return null; + } + + + private void buildTreeRecursively( JsonNode current, AbstractNode parent, String jsonPath, String nodeName ) { + + /* ───────────── Json-Object ────────────── */ + if ( current.isObject() ) { + boolean isCard = parent != null && "array".equals( parent.getType() ); + + AbstractNode obj = new DocumentObjectNode( nodeName, jsonPath, isCard ); + parent.addChild( obj ); + + current.fields().forEachRemaining( e -> + buildTreeRecursively( e.getValue(), obj, jsonPath + "." + e.getKey(), e.getKey() ) ); + return; + } + + /* ───────────── Json-Array ────────────── */ + if ( current.isArray() ) { + AbstractNode arr = new DocumentArrayNode( nodeName, jsonPath ); + parent.addChild( arr ); + + int idx = 0; + for ( JsonNode element : current ) { + String childName = "idx" + idx; + if ( element.isObject() ) { + if ( element.has( "id" ) ) { + childName = "id=" + element.get( "id" ).asText(); + } else if ( element.has( "title" ) ) { + childName = "\"" + element.get( "title" ).asText() + "\""; + } else if ( element.has( "name" ) ) { + childName = element.get( "name" ).asText(); + } + } + + buildTreeRecursively( element, arr, jsonPath + "[" + idx + "]", childName ); + idx++; + } + return; + } + + /* ───────────── Primitive-Value ───────── */ + String valueType = detectType( current ); + Object sample = current.isNull() ? null : current.asText(); + + AbstractNode val = new DocumentValueNode( nodeName, jsonPath, valueType, sample ); + parent.addChild( val ); + } + + + private static String detectType( JsonNode n ) { + if ( n.isTextual() ) { + return "string"; + } + if ( n.isNumber() ) { + return "number"; + } + if ( n.isBoolean() ) { + return "boolean"; + } + if ( n.isNull() ) { + return "null"; + } + return "unknown"; + } + + + // TODO Implement when preview and deploy works. + @Override + public void markSelectedAttributes( List selectedPaths ) { + + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return NodeSerializer.serializeNode( this.metadataRoot ); + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + private interface Excludes { void refreshCollection( long allocId ); diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java index ddabad552e..87d8303153 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/sources/MonetdbSource.java @@ -17,9 +17,21 @@ package org.polypheny.db.adapter.monetdb.sources; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.apache.commons.dbcp2.BasicDataSource; @@ -28,8 +40,11 @@ import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.SchemaFilter; import org.polypheny.db.adapter.jdbc.JdbcTable; import org.polypheny.db.adapter.jdbc.connection.ConnectionFactory; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.connection.TransactionalConnectionFactory; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.adapter.monetdb.MonetdbSqlDialect; @@ -37,8 +52,17 @@ import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.sql.language.SqlDialect; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; @Slf4j @@ -49,12 +73,16 @@ defaultMode = DeployMode.REMOTE) @AdapterSettingString(name = "host", defaultValue = "localhost", description = "Hostname or IP address of the remote MonetDB instance.", position = 1) @AdapterSettingInteger(name = "port", defaultValue = 50000, description = "JDBC port number on the remote MonetDB instance.", position = 2) -@AdapterSettingString(name = "database", defaultValue = "polypheny", description = "JDBC port number on the remote MonetDB instance.", position = 3) -@AdapterSettingString(name = "username", defaultValue = "polypheny", description = "Name of the database to connect to.", position = 4) -@AdapterSettingString(name = "password", defaultValue = "polypheny", description = "Username to be used for authenticating at the remote instance.", position = 5) +@AdapterSettingString(name = "database", defaultValue = "demo", description = "JDBC port number on the remote MonetDB instance.", position = 3) +@AdapterSettingString(name = "username", defaultValue = "monetdb", description = "Name of the database to connect to.", position = 4) +@AdapterSettingString(name = "password", defaultValue = "monetdb", description = "Username to be used for authenticating at the remote instance.", position = 5) @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Password to be used for authenticating at the remote instance.") -@AdapterSettingString(name = "table", defaultValue = "public.foo,public.bar", description = "Maximum number of concurrent JDBC connections.") -public class MonetdbSource extends AbstractJdbcSource { +@AdapterSettingString(name = "tables", defaultValue = "sys.testtable", description = "Maximum number of concurrent JDBC connections.") +public class MonetdbSource extends AbstractJdbcSource implements MetadataProvider { + + private AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + public MonetdbSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, "nl.cwi.monetdb.jdbc.MonetDriver", MonetdbSqlDialect.DEFAULT, false ); @@ -112,9 +140,15 @@ protected boolean requiresSchema() { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), - logical.table.name, + physicalSchema, + logical.physicalTable, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), @@ -129,6 +163,352 @@ public List createTable( Context context, LogicalTableWrapper lo } + @Override + public Map> getExportedColumns() { + Map> map = new HashMap<>(); + + java.sql.Statement statement = null; + Connection connection = null; + ConnectionHandler connectionHandler = null; + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + try { + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); + DatabaseMetaData dbmd = connection.getMetaData(); + + String[] tables; + if ( settings.get( "selectedAttributes" ).equals( "" ) ) { + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); + Set tableNames = new HashSet<>(); + + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; + + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { + String tableName = parts[1] + "." + parts[2]; + tableNames.add( tableName ); + } + } + tables = tableNames.toArray( new String[0] ); + } + for ( String str : tables ) { + String[] names = str.split( "\\." ); + if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { + throw new GenericRuntimeException( "Invalid table name: " + str ); + } + String tableName; + String schemaPattern; + if ( requiresSchema() ) { + schemaPattern = names[0]; + tableName = names[1]; + } else { + schemaPattern = null; + tableName = names[0]; + } + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet row = dbmd.getPrimaryKeys( null, schemaPattern, tableName ) ) { + while ( row.next() ) { + primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); + } + } + try ( ResultSet row = dbmd.getColumns( null, schemaPattern, tableName, "%" ) ) { + List list = new ArrayList<>(); + while ( row.next() ) { + PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); + Integer length = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + switch ( type ) { + case BOOLEAN: + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + case FLOAT: + case REAL: + case DOUBLE: + case DATE: + break; + case DECIMAL: + length = row.getInt( "COLUMN_SIZE" ); + scale = row.getInt( "DECIMAL_DIGITS" ); + break; + case TIME: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type time: " + length ); + } + break; + case TIMESTAMP: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type timestamp: " + length ); + } + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = row.getInt( "COLUMN_SIZE" ); + break; + case BINARY: + case VARBINARY: + type = PolyType.VARBINARY; + length = row.getInt( "COLUMN_SIZE" ); + break; + default: + throw new GenericRuntimeException( "Unsupported data type: " + type.getName() ); + } + list.add( new ExportedColumn( + row.getString( "COLUMN_NAME" ).toLowerCase(), + type, + null, + length, + scale, + dimension, + cardinality, + row.getString( "IS_NULLABLE" ).equalsIgnoreCase( "YES" ), + requiresSchema() ? row.getString( "TABLE_SCHEM" ) : row.getString( "TABLE_CAT" ), + row.getString( "TABLE_NAME" ), + row.getString( "COLUMN_NAME" ), + row.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( row.getString( "COLUMN_NAME" ) ) + ) ); + } + map.put( tableName, list ); + } + } + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + return map; + } + + + @Override + public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); + String dbName = settings.getOrDefault( "database", "monetdb" ); + Node root = new Node( "relational", dbName ); + + SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + + java.sql.Statement stmt = null; + Connection conn = null; + ConnectionHandler handler = null; + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + try { + handler = connectionFactory.getOrCreateConnectionHandler( xid ); + stmt = handler.getStatement(); + conn = stmt.getConnection(); + DatabaseMetaData meta = conn.getMetaData(); + + try ( ResultSet schemas = requiresSchema() + ? meta.getSchemas( null, "%" ) + : meta.getCatalogs() ) { + while ( schemas.next() ) { + + String schemaName = requiresSchema() + ? schemas.getString( "TABLE_SCHEM" ) + : schemas.getString( "TABLE_CAT" ); + + if ( filter.ignoredSchemas.contains( schemaName.toLowerCase() ) ) { + continue; + } + + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = meta.getTables( + null, + requiresSchema() ? schemaName : null, + "%", + new String[]{ "TABLE" } + ) ) { + while ( tables.next() ) { + + String tableName = tables.getString( "TABLE_NAME" ); + + String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; + Connection finalConn = conn; + previewByTable.computeIfAbsent( + schemaName + "." + tableName, + k -> { + try { + return fetchPreview( finalConn, fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); + + AbstractNode tableNode = new Node( "table", tableName ); + + Set pkCols = new HashSet<>(); + try ( ResultSet pk = meta.getPrimaryKeys( + null, + requiresSchema() ? schemaName : null, + tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = meta.getColumns( + null, + requiresSchema() ? schemaName : null, + tableName, + "%" ) ) { + while ( cols.next() ) { + + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getObject( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getObject( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + + schemaNode.addChild( tableNode ); + } + } + + root.addChild( schemaNode ); + } + } + + } catch ( SQLException | ConnectionHandlerException ex ) { + throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } finally { + try { + // stmt.close(); + // conn.close(); + handler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + return root; + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( + "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + return rows; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + } + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + break; + } + } + } + } + + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + @Override public RelationalDataSource asRelationalDataSource() { return this; diff --git a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java index ddf811541d..99aa461e06 100644 --- a/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java +++ b/plugins/mysql-adapter/src/main/java/org/polypheny/db/adapter/jdbc/MysqlSourcePlugin.java @@ -17,9 +17,21 @@ package org.polypheny.db.adapter.jdbc; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.AdapterManager; @@ -29,15 +41,28 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.SchemaFilter; +import org.polypheny.db.adapter.java.TableFilter; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.plugins.PluginContext; import org.polypheny.db.plugins.PolyPlugin; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; import org.polypheny.db.sql.language.dialect.MysqlSqlDialect; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; @SuppressWarnings("unused") public class MysqlSourcePlugin extends PolyPlugin { @@ -78,19 +103,21 @@ public void stop() { description = "Hostname or IP address of the remote MariaDB / MySQL instance.") @AdapterSettingInteger(name = "port", defaultValue = 3306, position = 2, description = "JDBC port number on the remote MariaDB / MySQL instance.") - @AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, + @AdapterSettingString(name = "database", defaultValue = "test", position = 3, description = "Name of the database to connect to.") - @AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, + @AdapterSettingString(name = "username", defaultValue = "root", position = 4, description = "Username to be used for authenticating at the remote instance.") - @AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, + @AdapterSettingString(name = "password", defaultValue = "roman123", position = 5, description = "Password to be used for authenticating at the remote instance.") @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") - @AdapterSettingString(name = "tables", defaultValue = "foo,bar", - description = "List of tables which should be imported. The names must to be separated by a comma.") - public static class MysqlSource extends AbstractJdbcSource { + public static class MysqlSource extends AbstractJdbcSource implements MetadataProvider { + + public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + public MysqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( storeId, uniqueName, settings, mode, "org.mariadb.jdbc.Driver", MysqlSqlDialect.DEFAULT, false ); @@ -99,9 +126,15 @@ public MysqlSource( final long storeId, final String uniqueName, final Map createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), - logical.table.name, + physicalSchema, + logical.physicalTable.toLowerCase(), logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), @@ -134,13 +167,363 @@ protected void reloadSettings( List updatedSettings ) { @Override protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { - return String.format( "jdbc:mysql://%s:%d/%s", dbHostname, dbPort, dbName ); + return String.format( "jdbc:mysql://%s:%d/%s?allowPublicKeyRetrieval=true&useSSL=false", dbHostname, dbPort, dbName ); } @Override protected boolean requiresSchema() { - return false; + return true; + } + + + @Override + public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); + String dbName = settings.get( "database" ); + Node root = new Node( "relational", dbName ); + + SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + TableFilter tableFilter = TableFilter.forAdapter( adapterName ); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + java.sql.Statement stmt = null; + Connection conn = null; + ConnectionHandler handler = null; + + try { + handler = connectionFactory.getOrCreateConnectionHandler( xid ); + stmt = handler.getStatement(); + conn = stmt.getConnection(); + DatabaseMetaData meta = conn.getMetaData(); + + try ( ResultSet schemas = meta.getCatalogs() ) { + while ( schemas.next() ) { + String schemaName = schemas.getString( "TABLE_CAT" ); + + if ( filter.ignoredSchemas.contains( schemaName ) ) { + continue; + } + + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = meta.getTables( + schemaName, + null, + "%", + new String[]{ "TABLE" } + ) ) { + while ( tables.next() ) { + String tableName = tables.getString( "TABLE_NAME" ); + + if ( tableFilter.shouldIgnore( tableName ) ) { + continue; + } + + String fqName = "`" + schemaName + "`.`" + tableName + "`"; + + Connection finalConn = conn; + List> preview = previewByTable.computeIfAbsent( + schemaName + "." + tableName, + k -> { + try { + return fetchPreview( finalConn, fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); + + AbstractNode tableNode = new Node( "table", tableName ); + + Set pkCols = new HashSet<>(); + try ( ResultSet pk = meta.getPrimaryKeys( + schemaName, + null, + tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = meta.getColumns( + schemaName, + null, + tableName, + "%" ) ) { + while ( cols.next() ) { + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getInt( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getInt( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + + schemaNode.addChild( tableNode ); + } + } + + root.addChild( schemaNode ); + } + } + + } catch ( SQLException | ConnectionHandlerException ex ) { + throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } finally { + try { + //stmt.close(); + //conn.close(); + handler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + + return root; + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + return rows; + } + + + @Override + public Map> getExportedColumns() { + Map> map = new HashMap<>(); + java.sql.Statement statement = null; + Connection connection = null; + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + ConnectionHandler connectionHandler = null; + + try { + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); + DatabaseMetaData dbmd = connection.getMetaData(); + + String[] tables; + + if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); + Set tableNames = new HashSet<>(); + + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; + + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { + String tableName = parts[1] + "." + parts[2]; + + if ( !requiresSchema() ) { + tableNames.add( parts[2] ); + } else { + tableNames.add( tableName ); + } + } + } + tables = tableNames.toArray( new String[0] ); + } + for ( String str : tables ) { + String[] names = str.split( "\\." ); + if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { + throw new GenericRuntimeException( "Invalid table name: " + str ); + } + String tableName; + String schemaPattern; + if ( requiresSchema() ) { + schemaPattern = names[0]; + tableName = names[1]; + } else { + schemaPattern = null; + tableName = names[0]; + } + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet row = dbmd.getPrimaryKeys( schemaPattern, null, tableName ) ) { + while ( row.next() ) { + primaryKeyColumns.add( row.getString( "COLUMN_NAME" ) ); + } + } + try ( ResultSet row = dbmd.getColumns( schemaPattern, schemaPattern, tableName, "%" ) ) { + List list = new ArrayList<>(); + while ( row.next() ) { + PolyType type = PolyType.getNameForJdbcType( row.getInt( "DATA_TYPE" ) ); + Integer length = null; + Integer scale = null; + Integer dimension = null; + Integer cardinality = null; + switch ( type ) { + case BOOLEAN: + case TINYINT: + case SMALLINT: + case INTEGER: + case BIGINT: + case FLOAT: + case REAL: + case DOUBLE: + case DATE: + break; + case DECIMAL: + length = row.getInt( "COLUMN_SIZE" ); + scale = row.getInt( "DECIMAL_DIGITS" ); + break; + case TIME: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type time: " + length ); + } + break; + case TIMESTAMP: + length = row.getInt( "DECIMAL_DIGITS" ); + if ( length > 3 ) { + throw new GenericRuntimeException( "Unsupported precision for data type timestamp: " + length ); + } + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = row.getInt( "COLUMN_SIZE" ); + break; + case BINARY: + case VARBINARY: + type = PolyType.VARBINARY; + length = row.getInt( "COLUMN_SIZE" ); + break; + default: + throw new GenericRuntimeException( "Unsupported data type: " + type.getName() ); + } + list.add( new ExportedColumn( + row.getString( "COLUMN_NAME" ).toLowerCase(), + type, + null, + length, + scale, + dimension, + cardinality, + row.getString( "IS_NULLABLE" ).equalsIgnoreCase( "YES" ), + row.getString( "TABLE_CAT" ), + row.getString( "TABLE_NAME" ), + row.getString( "COLUMN_NAME" ), + row.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( row.getString( "COLUMN_NAME" ) ) + ) ); + } + map.put( tableName, list ); + } + } + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Exception while collecting schema information!" + e ); + + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + + return map; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + } + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + break; + } + } + } + } + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; } diff --git a/plugins/oracle-adapter/build.gradle b/plugins/oracle-adapter/build.gradle new file mode 100644 index 0000000000..a1ab5e1af6 --- /dev/null +++ b/plugins/oracle-adapter/build.gradle @@ -0,0 +1,67 @@ +group "org.polypheny" + +dependencies { + compileOnly project(":core") + compileOnly project(":plugins:sql-language") + compileOnly project(":plugins:jdbc-adapter-framework") + testImplementation project(path: ":core", configuration: "tests") +} + + +compileJava { + dependsOn(":core:processResources") + dependsOn(":plugins:sql-language:processResources") + dependsOn(":plugins:jdbc-adapter-framework:processResources") + mustRunAfter(":plugins:jdbc-adapter-framework:processResources") +} + +sourceSets { + main { + java { + srcDirs = ["src/main/java"] + } + resources { + srcDirs = ["src/main/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "/classes") + } + test { + java { + srcDirs = ["src/test/java"] + destinationDirectory.set(file(project.buildDir.absolutePath + "/test-classes")) + } + resources { + srcDirs = ["src/test/resources"] + } + output.resourcesDir = file(project.buildDir.absolutePath + "test-classes") + } +} + + +delombok { + dependsOn(":plugins:sql-language:processResources") + dependsOn(":plugins:jdbc-adapter-framework:processResources") +} + + +jar { + manifest { + attributes "Manifest-Version": "1.0" + attributes "Copyright": "The Polypheny Project (polypheny.org)" + attributes "Version": "$project.version" + } +} + + +java { + withJavadocJar() + withSourcesJar() +} + + +licensee { + allow('MIT') + allow('Apache-2.0') + // allow('https://www.oracle.com/downloads/licenses/oracle-free-license.html') + allowUrl('https://www.oracle.com/downloads/licenses/oracle-free-license.html') +} diff --git a/plugins/oracle-adapter/gradle.properties b/plugins/oracle-adapter/gradle.properties new file mode 100644 index 0000000000..6af6dcaec6 --- /dev/null +++ b/plugins/oracle-adapter/gradle.properties @@ -0,0 +1,27 @@ +# +# Copyright 2019-2025 The Polypheny Project +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +pluginVersion = 0.0.1 + +pluginId = oracle-adapter +pluginClass = org.polypheny.db.adapter.oracle.OraclePlugin +pluginProvider = The Polypheny Project +pluginDependencies = jdbc-adapter-framework, sql-language +pluginUrlPath = +pluginCategories = source +pluginPolyDependencies = +pluginIsSystemComponent = false +pluginIsUiVisible = true diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java new file mode 100644 index 0000000000..60896a926b --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OraclePlugin.java @@ -0,0 +1,52 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle; + + +import org.polypheny.db.adapter.AdapterManager; +import org.polypheny.db.adapter.oracle.source.OracleSource; +import org.polypheny.db.adapter.oracle.store.OracleStore; +import org.polypheny.db.plugins.PluginContext; +import org.polypheny.db.plugins.PolyPlugin; +import org.polypheny.db.sql.language.SqlDialectRegistry; + +public class OraclePlugin extends PolyPlugin { + + + public static final String ADAPTER_NAME = "Oracle"; + private long sourceId; + private long storeId; + + + public OraclePlugin( PluginContext context ) { super( context ); } + + + @Override + public void afterCatalogInit() { + // SqlDialectRegistry.registerDialect( "Oracle", OracleSqlDialect.DEFAULT ); + this.sourceId = AdapterManager.addAdapterTemplate( OracleSource.class, ADAPTER_NAME, OracleSource::new ); + // this.storeId = AdapterManager.addAdapterTemplate( OracleStore.class, ADAPTER_NAME, OracleStore::new ); + } + + + @Override + public void stop() { + SqlDialectRegistry.unregisterDialect( "Oracle" ); + AdapterManager.removeAdapterTemplate( this.sourceId ); + // AdapterManager.removeAdapterTemplate( this.storeId ); + } +} diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java new file mode 100644 index 0000000000..c962237e68 --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/OracleSqlDialect.java @@ -0,0 +1,55 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle; + + +import org.polypheny.db.algebra.constant.NullCollation; +import org.polypheny.db.algebra.type.AlgDataTypeSystem; +import org.polypheny.db.algebra.type.AlgDataTypeSystemImpl; +import org.polypheny.db.sql.language.SqlDialect; +import org.polypheny.db.type.PolyType; +import java.util.Objects; + +public class OracleSqlDialect extends SqlDialect { + + + /* + TODO: Find out if this SqlDialect is really necessary (PostgreSql does have it, MySql doesn't). + */ + private static final AlgDataTypeSystem ORACLE_TYPE_SYSTEM = + new AlgDataTypeSystemImpl() { + @Override + public int getMaxPrecision( PolyType typeName ) { + if ( Objects.requireNonNull( typeName ) == PolyType.VARCHAR ) { + return 10 * 1024 * 1024; + } + return super.getMaxPrecision( typeName ); + } + }; + + + public static final SqlDialect DEFAULT = + new OracleSqlDialect( EMPTY_CONTEXT + .withNullCollation( NullCollation.HIGH ) + .withIdentifierQuoteString( "\"" ) + .withDataTypeSystem( ORACLE_TYPE_SYSTEM ) ); + + + + public OracleSqlDialect( Context context ) { super( context ); } + +} diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java new file mode 100644 index 0000000000..6ea8cca5c5 --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/source/OracleSource.java @@ -0,0 +1,485 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle.source; + + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.RelationalDataSource; +import org.polypheny.db.adapter.annotations.AdapterProperties; +import org.polypheny.db.adapter.annotations.AdapterSettingInteger; +import org.polypheny.db.adapter.annotations.AdapterSettingList; +import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.TableFilter; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; +import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; +import org.polypheny.db.adapter.oracle.OracleSqlDialect; +import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; +import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; +import org.polypheny.db.transaction.PolyXid; +import org.polypheny.db.type.PolyType; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.HashSet; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +@Slf4j +@AdapterProperties( + name = "Oracle", + description = "Data source explicit for relational oracle database systems.", + usedModes = DeployMode.REMOTE, + defaultMode = DeployMode.REMOTE) +@AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, + description = "Hostname or IP address of the remote oracle instance.") +@AdapterSettingInteger(name = "port", defaultValue = 1521, position = 2, + description = "Port number of the remote oracle instance.") +@AdapterSettingString(name = "database", defaultValue = "XE", position = 3, + description = "Name of the database to connect to.") +@AdapterSettingString(name = "username", defaultValue = "system", position = 4, + description = "Username used for authentication at the remote instance.") +@AdapterSettingString(name = "password", defaultValue = "roman123", position = 5, + description = "Password used for authentication at the remote instance.") +@AdapterSettingInteger(name = "maxConnections", defaultValue = 25, + description = "Maximum number of concurrent connections.") +@AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", + description = "Which level of transaction isolation should be used.") +public class OracleSource extends AbstractJdbcSource implements MetadataProvider { + + public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + + + public OracleSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { + super( + storeId, + uniqueName, + settings, + mode, + "oracle.jdbc.OracleDriver", + OracleSqlDialect.DEFAULT, + false ); + } + + + @Override + protected String getConnectionUrl( final String dbHostname, final int dbPort, final String dbName ) { + return String.format( "jdbc:oracle:thin:@%s:%d/%s", dbHostname, dbPort, dbName ); + } + + + @Override + protected boolean requiresSchema() { + return true; + } + + + @Override + public void shutdown() { + try { + removeInformationPage(); + connectionFactory.close(); + } catch ( SQLException e ) { + log.warn( "Exception while closing oracle connection {}", getUniqueName(), e ); + } + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + //TODO: Implement disconnect and reconnect to Oracle instance. + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } + PhysicalTable table = adapterCatalog.createTable( + physicalSchema.toUpperCase(), + logical.physicalTable.toUpperCase(), + logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), + logical.table, + logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), + logical.pkIds, allocation ); + + adapterCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( table ) ); + return List.of( table ); + } + + + @Override + public Map> getExportedColumns() { + Map> map = new HashMap<>(); + + java.sql.Statement statement = null; + Connection connection = null; + ConnectionHandler connectionHandler = null; + + // Use random PUID to prevent usage of an expired snapshot of the transaction identifier. + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + try { + connectionHandler = connectionFactory.getOrCreateConnectionHandler( xid ); + statement = connectionHandler.getStatement(); + connection = statement.getConnection(); + DatabaseMetaData dbmd = connection.getMetaData(); + + String[] tables; + + // TODO If-else usage for possibly allow the usage of the old table-setting or selecting metadata. Not implemented yet. + if ( !settings.containsKey( "selectedAttributes" ) || settings.get( "selectedAttributes" ).equals( "" ) || settings.get( "selectedAttributes" ).isEmpty() || settings.get( "selectedAttributes" ) == null ) { + tables = settings.get( "tables" ).split( "," ); + } else { + String[] names2 = settings.get( "selectedAttributes" ).split( "," ); + Set tableNames = new HashSet<>(); + + for ( String s : names2 ) { + String attr = s.split( " : " )[0]; + + String[] parts = attr.split( "\\." ); + if ( parts.length >= 3 ) { + String tableName = parts[1] + "." + parts[2]; + + if ( !requiresSchema() ) { + tableNames.add( parts[2] ); + } else { + tableNames.add( tableName ); + } + } + } + tables = tableNames.toArray( new String[0] ); + } + for ( String str : tables ) { + String[] names = str.split( "\\." ); + + if ( names.length == 0 || names.length > 2 || (requiresSchema() && names.length == 1) ) { + throw new GenericRuntimeException( "Invalid table name: " + tables ); + } + String schema; + String tableName; + + if ( requiresSchema() ) { + schema = names[0].toUpperCase(); + tableName = names[1].toUpperCase(); + } else { + schema = null; + tableName = names[0].toUpperCase(); + } + + List primaryKeyColumns = new ArrayList<>(); + try ( ResultSet pk = dbmd.getPrimaryKeys( null, schema, tableName ) ) { + while ( pk.next() ) { + primaryKeyColumns.add( pk.getString( "COLUMN_NAME" ).toUpperCase() ); + } + } + try ( ResultSet columns = dbmd.getColumns( null, schema, tableName, "%" ) ) { + List exportedColumns = new ArrayList<>(); + while ( columns.next() ) { + PolyType type = PolyType.getNameForJdbcType( columns.getInt( "DATA_TYPE" ) ); + Integer length = null; + Integer scale = null; + + switch ( type ) { + case DECIMAL: + length = columns.getInt( "COLUMN_SIZE" ); + scale = columns.getInt( "DECIMAL_DIGITS" ); + break; + case CHAR: + case VARCHAR: + type = PolyType.VARCHAR; + length = columns.getInt( "COLUMN_SIZE" ); + break; + case VARBINARY: + case BINARY: + type = PolyType.VARBINARY; + length = columns.getInt( "COLUMN_SIZE" ); + break; + case TIME: + case TIMESTAMP: + length = columns.getInt( "DECIMAL_DIGITS" ); + break; + default: + break; + } + + exportedColumns.add( new ExportedColumn( + columns.getString( "COLUMN_NAME" ).toUpperCase(), + type, + null, + length, + scale, + null, + null, + "YES".equalsIgnoreCase( columns.getString( "IS_NULLABLE" ) ), + schema, + tableName, + columns.getString( "COLUMN_NAME" ).toUpperCase(), + columns.getInt( "ORDINAL_POSITION" ), + primaryKeyColumns.contains( columns.getString( "COLUMN_NAME" ).toUpperCase() ) + ) ); + } + + map.put( tableName, exportedColumns ); + } + } + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Exception while collecting Oracle schema info", e ); + } finally { + try { + // stmt.close(); + // conn.close(); + connectionHandler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + + return map; + } + + + @Override + public AbstractNode fetchMetadataTree() { + this.previewByTable = new LinkedHashMap<>(); + AbstractNode root = new Node( "relational", settings.get( "database" ) ); + + TableFilter filter = TableFilter.forAdapter( adapterName ); + + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + java.sql.Statement stmt = null; + Connection conn = null; + ConnectionHandler h = null; + + try { + h = connectionFactory.getOrCreateConnectionHandler( xid ); + stmt = h.getStatement(); + conn = stmt.getConnection(); + DatabaseMetaData m = conn.getMetaData(); + + String currentUser = m.getUserName(); + + try ( ResultSet schemas = m.getSchemas() ) { + while ( schemas.next() ) { + String schemaName = schemas.getString( "TABLE_SCHEM" ); + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = m.getTables( null, schemaName, "%", new String[]{ "TABLE" } ) ) { + + while ( tables.next() ) { + String owner = tables.getString( "TABLE_SCHEM" ); + String tableName = tables.getString( "TABLE_NAME" ); + + if ( !owner.equalsIgnoreCase( currentUser ) ) { + continue; + } + + if ( filter.shouldIgnore( tableName ) ) { + continue; + } + + String fqName = "\"" + owner + "\".\"" + tableName + "\""; + ConnectionHandler finalH = h; + List> preview = previewByTable.computeIfAbsent( + owner + "." + tableName, + k -> { + try { + return fetchPreview( finalH.getStatement().getConnection(), fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); + + AbstractNode tableNode = new Node( "table", tableName ); + + Set pkCols = new HashSet<>(); + try ( ResultSet pk = m.getPrimaryKeys( null, schemaName, tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = m.getColumns( null, schemaName, tableName, "%" ) ) { + while ( cols.next() ) { + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getInt( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getInt( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + if ( !tableNode.getChildren().isEmpty() ) { + schemaNode.addChild( tableNode ); + } + } + } + if ( !schemaNode.getChildren().isEmpty() ) { + root.addChild( schemaNode ); + } + } + } + } catch ( SQLException | ConnectionHandlerException e ) { + throw new GenericRuntimeException( "Error while fetching Oracle metadata", e ); + } finally { + try { + // stmt.close(); + // conn.close(); + + // Manually commit to prevent an overflow of open transactions. + h.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + + return root; + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( "SELECT * FROM " + fqName + " FETCH FIRST " + limit + " ROWS ONLY" ) ) { + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + return rows; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + } + + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + break; + } + } + } + } + } + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public Object getPreview() { + return this.previewByTable; + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + + @Override + public RelationalDataSource asRelationalDataSource() { + return this; + } + +} diff --git a/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java new file mode 100644 index 0000000000..8d8fe78608 --- /dev/null +++ b/plugins/oracle-adapter/src/main/java/org.polypheny.db.adapter.oracle/store/OracleStore.java @@ -0,0 +1,105 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.oracle.store; + + +import lombok.extern.slf4j.Slf4j; +import org.polypheny.db.adapter.DeployMode; +import org.polypheny.db.adapter.DeployMode.DeploySetting; +import org.polypheny.db.adapter.annotations.AdapterProperties; +import org.polypheny.db.adapter.annotations.AdapterSettingInteger; +import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; +import org.polypheny.db.adapter.oracle.OracleSqlDialect; +import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; +import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; +import org.polypheny.db.catalog.entity.physical.PhysicalEntity; +import org.polypheny.db.docker.DockerContainer; +import org.polypheny.db.prepare.Context; +import java.util.List; +import java.util.Map; + +@Slf4j +@AdapterProperties( + name = "Oracle", + description = "Data source explicit for relational oracle database systems.", + usedModes = { DeployMode.REMOTE, DeployMode.DOCKER }, + defaultMode = DeployMode.DOCKER) +@AdapterSettingString(name = "host", defaultValue = "localhost", position = 1, + description = "Hostname or IP address of the remote PostgreSQL instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingInteger(name = "port", defaultValue = 1521, position = 2, + description = "JDBC port number on the remote PostgreSQL instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, + description = "Name of the database to connect to.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, + description = "Username to be used for authenticating at the remote instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, + description = "Password to be used for authenticating at the remote instance.", appliesTo = DeploySetting.REMOTE) +@AdapterSettingInteger(name = "maxConnections", defaultValue = 25, position = 6, + description = "Maximum number of concurrent JDBC connections.") + + + +public class OracleStore extends AbstractJdbcSource { + private String host; + private int port; + private String database; + private String username; + private DockerContainer container; + + + public OracleStore( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { + super( storeId, + uniqueName, + settings, + mode, + "oracle.jdbc.OracleDriver", + OracleSqlDialect.DEFAULT, + false ); + } + + + @Override + protected String getConnectionUrl( String dbHostname, int dbPort, String dbName ) { + return ""; + } + + + @Override + protected boolean requiresSchema() { + return false; + } + + + @Override + public void shutdown() { + + } + + + @Override + protected void reloadSettings( List updatedSettings ) { + + } + + + @Override + public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + return List.of(); + } + +} diff --git a/plugins/postgres-adapter/build.gradle b/plugins/postgres-adapter/build.gradle index e87572c823..f42951bc53 100644 --- a/plugins/postgres-adapter/build.gradle +++ b/plugins/postgres-adapter/build.gradle @@ -6,6 +6,7 @@ dependencies { compileOnly project(":plugins:sql-language") compileOnly project(":plugins:jdbc-adapter-framework") + implementation(project(":core")) implementation group: "net.postgis", name: "postgis-jdbc", version: postgis_version implementation group: "org.postgresql", name: "postgresql", version: postgresql_version // BSD 2-clause diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 6834741f05..9c69ef781d 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -17,9 +17,20 @@ package org.polypheny.db.adapter.postgres.source; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.adapter.DeployMode; @@ -28,13 +39,24 @@ import org.polypheny.db.adapter.annotations.AdapterSettingInteger; import org.polypheny.db.adapter.annotations.AdapterSettingList; import org.polypheny.db.adapter.annotations.AdapterSettingString; +import org.polypheny.db.adapter.java.SchemaFilter; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandler; +import org.polypheny.db.adapter.jdbc.connection.ConnectionHandlerException; import org.polypheny.db.adapter.jdbc.sources.AbstractJdbcSource; import org.polypheny.db.adapter.postgres.PostgresqlSqlDialect; import org.polypheny.db.catalog.entity.allocation.AllocationTableWrapper; import org.polypheny.db.catalog.entity.logical.LogicalTableWrapper; import org.polypheny.db.catalog.entity.physical.PhysicalEntity; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.prepare.Context; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.AttributeNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.Node; +import org.polypheny.db.transaction.PUID; +import org.polypheny.db.transaction.PUID.Type; +import org.polypheny.db.transaction.PolyXid; @Slf4j @@ -47,19 +69,230 @@ description = "Hostname or IP address of the remote PostgreSQL instance.") @AdapterSettingInteger(name = "port", defaultValue = 5432, position = 2, description = "JDBC port number on the remote PostgreSQL instance.") -@AdapterSettingString(name = "database", defaultValue = "polypheny", position = 3, +@AdapterSettingString(name = "database", defaultValue = "postgres", position = 3, description = "Name of the database to connect to.") -@AdapterSettingString(name = "username", defaultValue = "polypheny", position = 4, +@AdapterSettingString(name = "username", defaultValue = "postgres", position = 4, description = "Username to be used for authenticating at the remote instance.") -@AdapterSettingString(name = "password", defaultValue = "polypheny", position = 5, +@AdapterSettingString(name = "password", defaultValue = "password", position = 5, description = "Password to be used for authenticating at the remote instance.") @AdapterSettingInteger(name = "maxConnections", defaultValue = 25, description = "Maximum number of concurrent JDBC connections.") @AdapterSettingList(name = "transactionIsolation", options = { "SERIALIZABLE", "READ_UNCOMMITTED", "READ_COMMITTED", "REPEATABLE_READ" }, defaultValue = "SERIALIZABLE", description = "Which level of transaction isolation should be used.") -@AdapterSettingString(name = "tables", defaultValue = "foo,bar", - description = "List of tables which should be imported. The names must to be separated by a comma.") -public class PostgresqlSource extends AbstractJdbcSource { +public class PostgresqlSource extends AbstractJdbcSource implements MetadataProvider { + + public AbstractNode metadataRoot; + private Map>> previewByTable = new LinkedHashMap<>(); + + + @Override + public void setRoot( AbstractNode root ) { + this.metadataRoot = root; + } + + + @Override + public AbstractNode fetchMetadataTree() { + + this.previewByTable = new LinkedHashMap<>(); + + String dbName = settings.get( "database" ); + Node root = new Node( "relational", dbName ); + + SchemaFilter filter = SchemaFilter.forAdapter( adapterName ); + + // Use random PUID to prevent usage of an expired snapshot of the transaction identifier. + PolyXid xid = PolyXid.generateLocalTransactionIdentifier( PUID.randomPUID( Type.RANDOM ), PUID.randomPUID( Type.RANDOM ) ); + + java.sql.Statement stmt = null; + Connection conn = null; + ConnectionHandler handler = null; + + try { + handler = connectionFactory.getOrCreateConnectionHandler( xid ); + stmt = handler.getStatement(); + conn = stmt.getConnection(); + DatabaseMetaData meta = conn.getMetaData(); + + try ( ResultSet schemas = requiresSchema() + ? meta.getSchemas( dbName, "%" ) + : meta.getCatalogs() ) { + while ( schemas.next() ) { + String schemaName = requiresSchema() + ? schemas.getString( "TABLE_SCHEM" ) + : schemas.getString( "TABLE_CAT" ); + + if ( filter.ignoredSchemas.contains( schemaName.toLowerCase() ) ) { + continue; + } + + AbstractNode schemaNode = new Node( "schema", schemaName ); + + try ( ResultSet tables = meta.getTables( + dbName, + requiresSchema() ? schemaName : null, + "%", + new String[]{ "TABLE" } + ) ) { + while ( tables.next() ) { + String tableName = tables.getString( "TABLE_NAME" ); + String fqName = (requiresSchema() ? "\"" + schemaName + "\"." : "") + "\"" + tableName + "\""; + Connection finalConn = conn; + previewByTable.computeIfAbsent( + schemaName + "." + tableName, + k -> { + try { + return fetchPreview( finalConn, fqName, 10 ); + } catch ( Exception e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + } ); + + AbstractNode tableNode = new Node( "table", tableName ); + Set pkCols = new HashSet<>(); + try ( ResultSet pk = meta.getPrimaryKeys( + dbName, + requiresSchema() ? schemaName : null, + tableName ) ) { + while ( pk.next() ) { + pkCols.add( pk.getString( "COLUMN_NAME" ) ); + } + } + + try ( ResultSet cols = meta.getColumns( + dbName, + requiresSchema() ? schemaName : null, + tableName, + "%" ) ) { + while ( cols.next() ) { + String colName = cols.getString( "COLUMN_NAME" ); + String typeName = cols.getString( "TYPE_NAME" ); + boolean nullable = cols.getInt( "NULLABLE" ) == DatabaseMetaData.columnNullable; + boolean primary = pkCols.contains( colName ); + + AbstractNode colNode = new AttributeNode( "column", colName ); + colNode.addProperty( "type", typeName ); + colNode.addProperty( "nullable", nullable ); + colNode.addProperty( "primaryKey", primary ); + + Integer len = (Integer) cols.getObject( "COLUMN_SIZE" ); + Integer scale = (Integer) cols.getObject( "DECIMAL_DIGITS" ); + if ( len != null ) { + colNode.addProperty( "length", len ); + } + if ( scale != null ) { + colNode.addProperty( "scale", scale ); + } + + tableNode.addChild( colNode ); + } + } + + schemaNode.addChild( tableNode ); + } + } + + root.addChild( schemaNode ); + } + } + + } catch ( SQLException | ConnectionHandlerException ex ) { + throw new GenericRuntimeException( "Error while fetching metadata tree", ex ); + } finally { + try { + // stmt.close(); + // conn.close(); + handler.commit(); + } catch ( ConnectionHandlerException e ) { + throw new RuntimeException( e ); + } + } + return root; + } + + + @Override + public List> fetchPreview( Connection conn, String fqName, int limit ) { + List> rows = new ArrayList<>(); + try ( Statement stmt = conn.createStatement(); + ResultSet rs = stmt.executeQuery( + "SELECT * FROM " + fqName + " LIMIT " + limit ) ) { + + ResultSetMetaData meta = rs.getMetaData(); + while ( rs.next() ) { + Map row = new LinkedHashMap<>(); + for ( int i = 1; i <= meta.getColumnCount(); i++ ) { + row.put( meta.getColumnName( i ), rs.getObject( i ) ); + } + rows.add( row ); + } + } catch ( SQLException e ) { + log.warn( "Preview failed for {}", fqName, e ); + return List.of(); + } + return rows; + } + + + public Object getPreview() { + return this.previewByTable; + } + + + @Override + public AbstractNode getRoot() { + return this.metadataRoot; + } + + + @Override + public void markSelectedAttributes( List selectedPaths ) { + + List> attributePaths = new ArrayList<>(); + + for ( String path : selectedPaths ) { + String cleanPath = path.replaceFirst( " ?:.*$", "" ).trim(); + + List segments = Arrays.asList( cleanPath.split( "\\." ) ); + if ( !segments.isEmpty() && segments.get( 0 ).equals( metadataRoot.getName() ) ) { + segments = segments.subList( 1, segments.size() ); + } + + attributePaths.add( segments ); + } + + for ( List pathSegments : attributePaths ) { + AbstractNode current = metadataRoot; + + for ( int i = 0; i < pathSegments.size(); i++ ) { + String segment = pathSegments.get( i ); + + if ( i == pathSegments.size() - 1 ) { + Optional attrNodeOpt = current.getChildren().stream() + .filter( c -> c instanceof AttributeNode && segment.equals( c.getName() ) ) + .findFirst(); + + if ( attrNodeOpt.isPresent() ) { + ((AttributeNode) attrNodeOpt.get()).setSelected( true ); + } + + } else { + Optional childOpt = current.getChildren().stream() + .filter( c -> segment.equals( c.getName() ) ) + .findFirst(); + + if ( childOpt.isPresent() ) { + current = childOpt.get(); + } else { + break; + } + } + } + } + + } + public PostgresqlSource( final long storeId, final String uniqueName, final Map settings, final DeployMode mode ) { super( @@ -70,6 +303,7 @@ public PostgresqlSource( final long storeId, final String uniqueName, final Map< "org.postgresql.Driver", PostgresqlSqlDialect.DEFAULT, false ); + this.metadataRoot = null; } @@ -104,9 +338,15 @@ protected boolean requiresSchema() { @Override public List createTable( Context context, LogicalTableWrapper logical, AllocationTableWrapper allocation ) { + String physicalSchema; + if ( logical.physicalSchemaFinal == null ) { + physicalSchema = logical.table.getNamespaceName(); + } else { + physicalSchema = logical.physicalSchemaFinal; + } PhysicalTable table = adapterCatalog.createTable( - logical.table.getNamespaceName(), - logical.table.name, + physicalSchema, + logical.physicalTable, logical.columns.stream().collect( Collectors.toMap( c -> c.id, c -> c.name ) ), logical.table, logical.columns.stream().collect( Collectors.toMap( t -> t.id, t -> t ) ), diff --git a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java index 91e061442e..2e30c2b78c 100644 --- a/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java +++ b/plugins/sql-language/src/main/java/org/polypheny/db/sql/language/ddl/altertable/SqlAlterSourceTableAddColumn.java @@ -132,7 +132,7 @@ public void execute( Context context, Statement statement, ParsedQueryContext pa beforeColumnName == null ? null : beforeColumnName.getSimple(), afterColumnName == null ? null : afterColumnName.getSimple(), SqlLiteral.toPoly( defaultValue ), - statement ); + statement, null ); } diff --git a/settings.gradle b/settings.gradle index f759a733cf..938c22bff1 100644 --- a/settings.gradle +++ b/settings.gradle @@ -57,6 +57,7 @@ include 'plugins:mongodb-adapter' include 'plugins:file-adapter' include 'plugins:google-sheet-adapter' include 'plugins:excel-adapter' +include 'plugins:oracle-adapter' // other plugins include 'plugins:notebooks' diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index d30bd9f131..6664278274 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -16,7 +16,8 @@ package org.polypheny.db.webui; - +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableList; @@ -34,6 +35,7 @@ import java.beans.PropertyChangeListener; import java.io.BufferedReader; import java.io.File; +import java.io.ByteArrayInputStream; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; @@ -46,6 +48,8 @@ import java.nio.file.Path; import java.text.DateFormat; import java.text.SimpleDateFormat; +import java.util.Comparator; +import java.util.HashSet; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; @@ -74,9 +78,9 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.eclipse.jetty.websocket.api.Session; +import org.polypheny.db.adapter.AbstractAdapterSettingString; import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.AbstractAdapterSettingDirectory; -import org.polypheny.db.adapter.AbstractAdapterSettingString; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.AdapterManager.AdapterInformation; @@ -84,9 +88,16 @@ import org.polypheny.db.adapter.DataSource; import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; +import org.polypheny.db.adapter.MetadataObserver.ListenerImpl; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogEntry; +import org.polypheny.db.adapter.MetadataObserver.ChangeLogView; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager; +import org.polypheny.db.adapter.MetadataObserver.PublisherManager.ChangeStatus; import org.polypheny.db.adapter.RelationalDataSource.ExportedColumn; import org.polypheny.db.adapter.index.IndexManager; import org.polypheny.db.adapter.java.AdapterTemplate; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResult; +import org.polypheny.db.adapter.java.AdapterTemplate.PreviewResultEntry; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.polyalg.PolyAlgRegistry; import org.polypheny.db.catalog.Catalog; @@ -116,6 +127,7 @@ import org.polypheny.db.catalog.snapshot.LogicalRelSnapshot; import org.polypheny.db.catalog.snapshot.Snapshot; import org.polypheny.db.config.RuntimeConfig; +import org.polypheny.db.ddl.DdlManager; import org.polypheny.db.docker.AutoDocker; import org.polypheny.db.docker.DockerInstance; import org.polypheny.db.docker.DockerManager; @@ -154,6 +166,10 @@ import org.polypheny.db.processing.QueryContext; import org.polypheny.db.security.SecurityManager; import org.polypheny.db.transaction.Statement; +import org.polypheny.db.schemaDiscovery.AbstractNode; +import org.polypheny.db.schemaDiscovery.MetadataProvider; +import org.polypheny.db.schemaDiscovery.NodeSerializer; +import org.polypheny.db.schemaDiscovery.NodeUtil; import org.polypheny.db.transaction.Transaction; import org.polypheny.db.transaction.Transaction.MultimediaFlavor; import org.polypheny.db.transaction.TransactionException; @@ -204,6 +220,7 @@ import org.polypheny.db.webui.models.requests.PartitioningRequest; import org.polypheny.db.webui.models.requests.PartitioningRequest.ModifyPartitionRequest; import org.polypheny.db.webui.models.requests.PolyAlgRequest; +import org.polypheny.db.webui.models.requests.PreviewRequest; import org.polypheny.db.webui.models.requests.UIRequest; import org.polypheny.db.webui.models.results.RelationalResult; import org.polypheny.db.webui.models.results.RelationalResult.RelationalResultBuilder; @@ -220,6 +237,8 @@ public class Crud implements InformationObserver, PropertyChangeListener { public static final String ORIGIN = "Polypheny-UI"; private final TransactionManager transactionManager; + private AdapterTemplate temp; + public final LanguageCrud languageCrud; public final StatisticCrud statisticCrud; @@ -238,6 +257,7 @@ public class Crud implements InformationObserver, PropertyChangeListener { this.statisticCrud = new StatisticCrud( this ); this.catalogCrud = new CatalogCrud( this ); this.authCrud = new AuthCrud( this ); + this.temp = null; Catalog.afterInit( () -> Catalog.getInstance().addObserver( this ) ); } @@ -870,6 +890,214 @@ void deleteTuple( final Context ctx ) { } + void buildPreview( final Context ctx ) { + try { + initMultipart( ctx ); + if ( !ctx.isMultipartFormData() ) { + ctx.status( HttpCode.BAD_REQUEST ).result( "Preview requires multipart/form-data" ); + return; + } + + String body = IOUtils.toString( ctx.req.getPart( "body" ).getInputStream(), StandardCharsets.UTF_8 ); + PreviewRequest a = HttpServer.mapper.readValue( body, PreviewRequest.class ); + + Map inputStreams = new HashMap<>(); + for ( Part part : ctx.req.getParts() ) { + if ( !part.getName().equals( "body" ) ) { + inputStreams.put( part.getName(), part.getInputStream() ); + } + } + + AdapterTemplate template = AdapterManager.getAdapterTemplate( a.adapterName, a.adapterType ); + Map allSettings = template.settings + .stream() + .collect( Collectors.toMap( e -> e.name, e -> e ) ); + + if ( a.settings.containsKey( "directory" ) ) { + List fileNames; + try { + fileNames = HttpServer.mapper.readValue( a.settings.get( "directory" ), new TypeReference>() { + } ); + } catch ( JsonProcessingException ex ) { + String raw = a.settings.get( "directory" ); + String cleaned = raw.replaceAll( "[\\[\\]\"]", "" ).trim(); + fileNames = Arrays.stream( cleaned.split( "," ) ).map( String::trim ).filter( s -> !s.isEmpty() ).toList(); + } + + Map fileBytes = new HashMap<>(); + + for ( Entry e : inputStreams.entrySet() ) { + try ( InputStream in = e.getValue() ) { + byte[] data = IOUtils.toByteArray( in ); + fileBytes.put( e.getKey(), data ); + } + } + + String path = handleUploadFiles( fileBytes, fileNames, (AbstractAdapterSettingDirectory) allSettings.get( "directory" ), a ); + a.settings.put( "directory", path ); + } + + PreviewResult result = template.preview( a.settings, 10 ); + ctx.json( result ); + + + } catch ( Exception e ) { + log.error( "Error during the preview-request.", e ); + ctx.status( HttpCode.INTERNAL_SERVER_ERROR ).result( "Error while building preview !" ); + } + } + + + /** + * If any metadataChange is requested, they are sent here with the help of publisher manager. + */ + void metadataStatus( final Context ctx ) { + String uniqueName = ctx.pathParam( "uniqueName" ); + ChangeStatus changed = PublisherManager.getInstance().hasChange( uniqueName ); + ctx.json( Map.of( "changed", changed ) ); + } + + + /** + * Exact changes are send here. The publisher manager gets the changes from the listener. + */ + void metadataChange( final Context ctx ) { + String uniqueName = ctx.pathParam( "uniqueName" ); + PreviewResultEntry data = PublisherManager.getInstance().fetchChange( uniqueName ); + + List convertedHistory = data.getHistory().stream() + .map( ChangeLogView::from ) + .toList(); + + PreviewResult result = new PreviewResult( + data.getMetadata(), + data.getPreview(), + convertedHistory + ); + + ctx.json( result ); + } + + + /** + * Add / Delete new metadata and update adapter. + */ + void metadataAck( final Context ctx ) { + AckPayload payload = ctx.bodyAsClass( AckPayload.class ); + PublisherManager.getInstance().ack( payload.uniqueName, payload.addedPaths ); + + Transaction transaction = transactionManager.startTransaction( Catalog.defaultUserId, false, "metadata-ack-" + payload.uniqueName ); + Statement stmt = transaction.createStatement(); + try { + if ( payload.addedPaths != null && payload.addedPaths.length > 0 ) { + DdlManager.getInstance().addSelectedMetadata( transaction, stmt, payload.uniqueName, Catalog.defaultNamespaceId, List.of( payload.addedPaths ) ); + } + + if ( payload.removedPaths != null && payload.removedPaths.length > 0 ) { + String[] filtered = filterPrefixes( payload.removedPaths ); + DdlManager.getInstance().removeSelectedMetadata( List.of( filtered ), stmt, payload.uniqueName ); + } + transaction.commit(); + ctx.status( 200 ).json( Map.of( "message", "ACK processed" ) ); + } catch ( Exception e ) { + log.error( "metadataAck failed", e ); + try { + transaction.rollback( "Error occurred during metadata acknowledgement!" ); + } catch ( Exception ignore ) { + } + ctx.status( 500 ).json( Map.of( "message", "ACK failed", "error", e.getClass().getSimpleName() + ) ); + } finally { + if ( stmt != null ) { + stmt.close(); + if ( transaction.isActive() ) { + transactionManager.removeTransaction( transaction.getXid() ); + } + + } + } + } + + + void getMetaConfiguration( final Context ctx ) { + String uniqueName = ctx.pathParam( "uniqueName" ); + MetadataProvider provider = AdapterManager.getInstance() + .getMetadataProvider( uniqueName ) + .orElseThrow( () -> new IllegalStateException( "Adapter %s doesn't support inteface metadata provider !".formatted( uniqueName ) ) ); + + List historyBeforeParsing = PublisherManager.getInstance().getHistory( uniqueName ); + + List response = historyBeforeParsing.stream() + .map( ChangeLogView::from ) + .toList(); + + PreviewResult data = new PreviewResult( NodeSerializer.serializeNode( provider.getRoot() ).toString(), provider.getPreview(), response ); + ctx.json( data ); + } + + + void setMetaConfiguration( final Context ctx ) { + ConfigPayload config = ctx.bodyAsClass( ConfigPayload.class ); + Set userSelection = Set.of( config.selected ); + Set markedPaths; + Optional> adapter = AdapterManager.getInstance().getSource( config.uniqueName ); + + if ( adapter.get() instanceof MetadataProvider mp ) { + AbstractNode root = mp.getRoot(); + markedPaths = NodeUtil.collectSelecedAttributePaths( root ); + } else { + ctx.status( 500 ).json( Map.of( "message", "Configuration can not be applied." ) ); + return; + } + + Set toUnselect = new HashSet<>( markedPaths ); + toUnselect.removeAll( userSelection ); + + Set toAdd = new HashSet<>( userSelection ); + toAdd.removeAll( markedPaths ); + + Transaction tx = transactionManager.startTransaction( Catalog.defaultUserId, false, "setMetaConfiguration" + config.uniqueName ); + Statement stmt = tx.createStatement(); + try { + if ( !toAdd.isEmpty() ) { + DdlManager.getInstance().addSelectedMetadata( tx, stmt, config.uniqueName, Catalog.defaultNamespaceId, List.copyOf( toAdd ) ); + ((MetadataProvider) adapter.get()).markSelectedAttributes( List.copyOf( toAdd ) ); + } + + if ( !toUnselect.isEmpty() ) { + try { + DdlManager.getInstance().removeSelectedMetadata( List.copyOf( toUnselect ), stmt, config.uniqueName ); + NodeUtil.unmarkSelectedAttributes( ((MetadataProvider) adapter.get()).getRoot(), List.copyOf( toUnselect ) ); + tx.commit(); + stmt.close(); + ctx.json( Map.of( "message", "Configuration applied." ) ); + } catch ( Exception ex ) { + tx.rollback( "Error while dropping source entities" + ex.getMessage() ); + ctx.status( 500 ).json( Map.of( "message", ex.getMessage() ) ); + } finally { + if ( stmt != null ) { + stmt.close(); + } + } + } + + } catch ( Exception ex ) { + tx.rollback( "Changing adapter configuration was not successful !" + ex ); + ctx.status( 500 ).json( Map.of( "message", ex.getMessage() ) ); + } finally { + if ( stmt != null ) { + stmt.close(); + if ( tx.isActive() ) { + transactionManager.removeTransaction( tx.getXid() ); + } + + } + } + + + } + + /** * Update a row from a table. The row is determined by the value of every PK column in that row (conjunction). */ @@ -1989,6 +2217,7 @@ void getAvailableStoresForIndexes( final Context ctx ) { */ void updateAdapterSettings( final Context ctx ) { //see https://stackoverflow.com/questions/16872492/gson-and-abstract-superclasses-deserialization-issue + log.error( "▶ Payload:\n" + ctx.body() ); JsonDeserializer> storeDeserializer = ( json, typeOfT, context ) -> { JsonObject jsonObject = json.getAsJsonObject(); String type = jsonObject.get( "type" ).getAsString(); @@ -2033,6 +2262,112 @@ void updateAdapterSettings( final Context ctx ) { } + void updateSettings( final Context ctx ) { + log.error( ctx.body() ); + AdapterModel upd = ctx.bodyAsClass( AdapterModel.class ); + + if ( upd.getName() == null || upd.getSettings() == null ) { + ctx.status( HttpCode.BAD_REQUEST ).result( "uniqueName und settings required!" ); + return; + } + + Optional> store = AdapterManager.getInstance().getStore( upd.getName() ); + Optional> opt = store.isPresent() ? store : AdapterManager.getInstance().getSource( upd.getName() ); + + if ( opt.isEmpty() ) { + ctx.json( RelationalResult.builder().error( "Adapter not found!" ).build() ); + return; + } + + Adapter adapter = opt.get(); + + try { + adapter.updateSettings( upd.getSettings() ); + Catalog.getInstance().commit(); + + Transaction tx = getTransaction(); + tx.createStatement().getQueryProcessor().resetCaches(); + tx.commit(); + + ctx.json( RelationalResult.builder().affectedTuples( 1 ).build() ); + } catch ( Throwable t ) { + ctx.json( RelationalResult.builder().error( "Update canceled: " + t.getMessage() ).build() ); + } + } + + + void updateSettingsForm( final Context ctx ) throws IOException, ServletException { + + initMultipart( ctx ); + if ( !ctx.isMultipartFormData() ) { + ctx.status( HttpCode.BAD_REQUEST ) + .result( "Multipart-FormData required" ); + return; + } + + String bodyJson = IOUtils.toString( ctx.req.getPart( "body" ).getInputStream(), StandardCharsets.UTF_8 ); + PreviewRequest am = HttpServer.mapper.readValue( bodyJson, PreviewRequest.class ); + + List fileNames; + String rawDir = am.getSettings().get( "directory" ); + + try { + fileNames = HttpServer.mapper.readValue( + rawDir, + new com.fasterxml.jackson.core.type.TypeReference>() { + } ); + } catch ( com.fasterxml.jackson.core.JsonProcessingException ex ) { + String cleaned = rawDir + .replaceAll( "[\\[\\]\"]", "" ) + .trim(); + fileNames = Arrays.stream( cleaned.split( "," ) ) + .map( String::trim ) + .filter( s -> !s.isEmpty() ) + .toList(); + } + + Map fileBytes = new HashMap<>(); + for ( Part p : ctx.req.getParts() ) { + if ( !"body".equals( p.getName() ) ) { + try ( InputStream in = p.getInputStream() ) { + fileBytes.put( p.getName(), IOUtils.toByteArray( in ) ); + } + } + } + + String uniqueName = am.getUniqueName(); + String tmpName = "tmp_" + System.nanoTime(); + am.uniqueName = tmpName; + String fullPath = handleUploadFiles( fileBytes, fileNames, null, am ); + am.uniqueName = uniqueName; + createFormDiffs( am, fullPath ); + ctx.result( "File(s) stored at: " + fullPath ); + } + + + private void createFormDiffs( PreviewRequest previewRequest, String path ) { + DataSource currentSource = AdapterManager.getInstance().getSource( previewRequest.uniqueName ).orElseThrow(); + + MetadataProvider currentProvider = (MetadataProvider) currentSource; + AbstractNode currentNode = currentProvider.getRoot(); + + previewRequest.settings.put( "directory", path ); + + DataSource tempSource = AdapterManager.getAdapterTemplate( previewRequest.adapterName, AdapterType.SOURCE ).createEphemeral( previewRequest.settings ); + + MetadataProvider tempProvider = (MetadataProvider) tempSource; + AbstractNode tempNode = tempProvider.fetchMetadataTree(); + Object newPreview = tempProvider.getPreview(); + + PreviewResultEntry result = ListenerImpl.buildFormChange( previewRequest.uniqueName, currentNode, tempNode, newPreview, path ); + + try { + tempSource.shutdown(); + } catch ( Exception ignore ) { + } + } + + /** * Get available adapters */ @@ -2061,6 +2396,40 @@ void getSources( final Context ctx ) { } + /** + * Get Metadata and preview of data before fully creating the adapter. + */ + public void getMetadataAndPreview( final Context ctx ) { + try { + PreviewRequest req = ctx.bodyAsClass( PreviewRequest.class ); + log.info( "Parsed preview request: {}", req ); + + AdapterTemplate template = AdapterTemplate.fromString( req.adapterName, req.adapterType ); + log.error( "Row limit: {}", req.limit ); + Map allSettings = template.settings.stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); + + for ( Entry entry : allSettings.entrySet() ) { + log.error( "Key: {} Value: {}", entry.getKey(), entry.getValue() ); + if ( entry instanceof AbstractAdapterSettingDirectory ) { + log.error( "Ist ein directory setting." ); + } + } + + PreviewResult result = template.preview( req.settings, req.limit ); + + ctx.json( result ); + } catch ( Exception e ) { + log.error( "🔥 Error while handling preview request", e ); + ctx.status( 500 ).json( Map.of( "error", "Internal error: " + e.getMessage() ) ); + } + } + + /** + * Deploy a new adapter without collecting files out of InputStream + * Files from previews are cached in UploadDepot + */ + + /** * Deploy a new adapter */ @@ -2123,11 +2492,16 @@ void createAdapter( final Context ctx ) throws ServletException, IOException { ctx.json( RelationalResult.builder().exception( e ).build() ); return; } + } else { + adapterSettings.put( set.name, entry.getValue() ); } } adapterSettings.put( "mode", adapterModel.mode.toString() ); + if ( adapterModel.metadata != null && !adapterModel.metadata.isEmpty() ) { + adapterSettings.put( "selectedAttributes", Crud.gson.toJson( adapterModel.metadata ) ); + } String query = String.format( "ALTER ADAPTERS ADD \"%s\" USING '%s' AS '%s' WITH '%s'", adapterModel.name, adapterModel.adapterName, adapterModel.type, Crud.gson.toJson( adapterSettings ) ); QueryLanguage language = QueryLanguage.from( "sql" ); Result res = LanguageCrud.anyQueryResult( @@ -2152,8 +2526,21 @@ public void startAccessRequest( Context ctx ) { } + private Exception handleLinkFiles( Context ctx, AdapterModel a, AbstractAdapterSettingDirectory setting, Map settings ) { + if ( !settings.containsKey( "directoryName" ) ) { + return new GenericRuntimeException( "Security check for access was not performed; id missing." ); + } + Path path = Path.of( settings.get( "directoryName" ).defaultValue ); + if ( !SecurityManager.getInstance().checkPathAccess( path ) ) { + return new GenericRuntimeException( "Security check for access was not successful; not enough permissions." ); + } + + return null; + } + + private Exception handleLinkFiles( AbstractAdapterSettingString setting ) { - Path path = Path.of( setting.getValue() ); + Path path = Path.of( "C:\\Users\\roman\\Desktop\\data.json" ); SecurityManager.getInstance().requestPathAccess( "webui", "webui", path ); if ( !SecurityManager.getInstance().checkPathAccess( path ) ) { return new GenericRuntimeException( "Security check for access was not successful; not enough permissions." ); @@ -2166,11 +2553,17 @@ private static String handleUploadFiles( Map inputStreams, if ( fileNames.isEmpty() ) { throw new GenericRuntimeException( "No file or directory specified for upload!" ); } + + setting.inputStreams.clear(); for ( String fileName : fileNames ) { - setting.inputStreams.put( fileName, inputStreams.get( fileName ) ); + InputStream in = inputStreams.get( fileName ); + if ( in != null ) { + setting.inputStreams.put( fileName, in ); + } } + File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + a.name ); - for ( Entry is : setting.inputStreams.entrySet() ) { + for ( Map.Entry is : setting.inputStreams.entrySet() ) { try { File file = new File( path, is.getKey() ); FileUtils.copyInputStreamToFile( is.getValue(), file ); @@ -2182,6 +2575,24 @@ private static String handleUploadFiles( Map inputStreams, } + private static String handleUploadFiles( Map files, List fileNames, AbstractAdapterSettingDirectory setting, PreviewRequest previewRequest ) { + File path = PolyphenyHomeDirManager.getInstance().registerNewFolder( "data/csv/" + previewRequest.uniqueName ); + for ( String name : fileNames ) { + byte[] data = files.get( name ); + if ( data == null ) { + continue; + } + try ( InputStream in = new ByteArrayInputStream( data ) ) { + File target = new File( path, name ); + FileUtils.copyInputStreamToFile( in, target ); + } catch ( IOException e ) { + throw new GenericRuntimeException( e ); + } + } + return path.getAbsolutePath(); + } + + /** * Remove an existing storeId or source */ @@ -2977,6 +3388,24 @@ private static void zipDirectory( String basePath, File dir, ZipOutputStream zip } + private String[] filterPrefixes( String[] paths ) { + String[] sorted = Arrays.copyOf( paths, paths.length ); + Arrays.sort( sorted, Comparator.comparingInt( String::length ) ); + List keep = new ArrayList<>(); + outer: + for ( int i = 0; i < sorted.length; i++ ) { + String p = sorted[i]; + for ( int j = i + 1; j < sorted.length; j++ ) { + if ( sorted[j].startsWith( p + "." ) ) { + continue outer; + } + } + keep.add( p ); + } + return keep.toArray( new String[0] ); + } + + public void getAvailablePlugins( Context ctx ) { ctx.json( PolyPluginManager .getPLUGINS() @@ -2993,4 +3422,19 @@ public void propertyChange( PropertyChangeEvent evt ) { } + public record AckPayload( @JsonProperty String uniqueName, @JsonProperty String[] addedPaths, @JsonProperty String[] removedPaths ) { + + } + + + public record ConfigPayload( @JsonProperty String uniqueName, @JsonProperty String[] selected ) { + + } + + + public record AdapterSettingsUpdate( String uniqueName, Map settings ) { + + } + + } diff --git a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java index aa6373a9d6..c09a6e241d 100644 --- a/webui/src/main/java/org/polypheny/db/webui/HttpServer.java +++ b/webui/src/main/java/org/polypheny/db/webui/HttpServer.java @@ -325,7 +325,9 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.post( "/removeAdapter", crud::removeAdapter ); - webuiServer.post( "/updateAdapterSettings", crud::updateAdapterSettings ); + webuiServer.post( "/updateAdapterSettings", crud::updateSettings ); + + webuiServer.post( "/updateAdapterSettingsForm", crud::updateSettingsForm ); webuiServer.get( "/getAvailableStores", crud::getAvailableStores ); @@ -353,6 +355,18 @@ private void attachRoutes( Javalin webuiServer, Crud crud ) { webuiServer.get( "/product", ctx -> ctx.result( "Polypheny-DB" ) ); + webuiServer.post( "/previewTable", crud::buildPreview ); + + webuiServer.get( "/metadataStatus/{uniqueName}", crud::metadataStatus ); + + webuiServer.get( "/metadataChange/{uniqueName}", crud::metadataChange ); + + webuiServer.post( "/metadataAck/{uniqueName}", crud::metadataAck ); + + webuiServer.get( "/metadataConfiguration/{uniqueName}", crud::getMetaConfiguration ); + + webuiServer.post( "/setMetaConfig", crud::setMetaConfiguration ); + webuiServer.get( "/getPolyAlgRegistry", crud::getPolyAlgRegistry ); webuiServer.post( "/buildPolyPlan", crud::buildPlanFromPolyAlg ); diff --git a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java index 5445b9021a..1121bcf2cc 100644 --- a/webui/src/main/java/org/polypheny/db/webui/WebSocket.java +++ b/webui/src/main/java/org/polypheny/db/webui/WebSocket.java @@ -246,6 +246,7 @@ public void onMessage( final WsMessageContext ctx ) { if ( result.xid != null ) { xIds.add( result.xid ); } + System.out.println( result ); ctx.send( result ); break; default: diff --git a/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java b/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java index 7a628ecc83..1681755418 100644 --- a/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java +++ b/webui/src/main/java/org/polypheny/db/webui/models/catalog/AdapterModel.java @@ -54,6 +54,12 @@ public class AdapterModel extends IdEntity { @JsonProperty public List indexMethods; + @JsonProperty + public List metadata; + + @JsonProperty + public Map columnAliases; + public AdapterModel( @JsonProperty("id") @Nullable Long id, @@ -62,13 +68,17 @@ public AdapterModel( @JsonProperty("type") AdapterType type, @JsonProperty("settings") Map settings, @JsonProperty("mode") DeployMode mode, - @JsonProperty("indexMethods") List indexMethods ) { + @JsonProperty("indexMethods") List indexMethods, + @JsonProperty("metadata") List metadata, + @JsonProperty("columnAliases") Map columnAliases) { super( id, name ); this.adapterName = adapterName; this.type = type; this.settings = settings; this.mode = mode; this.indexMethods = indexMethods; + this.metadata = metadata; + this.columnAliases = columnAliases; } @@ -84,7 +94,8 @@ public static AdapterModel from( LogicalAdapter adapter ) { adapter.type, settings, adapter.mode, - adapter.type == AdapterType.STORE ? ((DataStore) dataStore).getAvailableIndexMethods() : List.of() ) ).orElse( null ); + adapter.type == AdapterType.STORE ? ((DataStore) dataStore).getAvailableIndexMethods() : List.of(), + null, null) ).orElse( null ); } diff --git a/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java new file mode 100644 index 0000000000..dd1b3ccfe5 --- /dev/null +++ b/webui/src/main/java/org/polypheny/db/webui/models/requests/PreviewRequest.java @@ -0,0 +1,58 @@ +/* + * Copyright 2019-2025 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.webui.models.requests; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Data; +import lombok.NoArgsConstructor; +import org.polypheny.db.catalog.entity.LogicalAdapter.AdapterType; +import java.util.Map; + +@Data +public class PreviewRequest { + + @JsonProperty + public String adapterName; + + @JsonProperty + public AdapterType adapterType; + + @JsonProperty + public Map settings; + + @JsonProperty + public int limit; + + @JsonProperty + public String uniqueName; + + public PreviewRequest() { } + + public PreviewRequest( + @JsonProperty("adapterName") String adapterName, + @JsonProperty("adapterType") AdapterType adapterType, + @JsonProperty("settings") Map settings, + @JsonProperty("limit") int rowLimit, + @JsonProperty("uniqueName") String uniqueName ) { + this.adapterName = adapterName; + this.adapterType = adapterType; + this.settings = settings; + this.limit = rowLimit; + this.uniqueName = uniqueName; + } + +}