From 16d7b0cc2fd2e08c42c095c435d499fc02b5da03 Mon Sep 17 00:00:00 2001 From: datomo Date: Thu, 21 Sep 2023 00:42:24 +0200 Subject: [PATCH] further adjusting mongo --- .../org/polypheny/db/adapter/Adapter.java | 7 - .../polypheny/db/adapter/AdapterManager.java | 2 +- .../db/adapter/java/AdapterTemplate.java | 12 + .../db/catalog/catalogs/RelStoreCatalog.java | 5 +- .../db/catalog/catalogs/StoreCatalog.java | 1 + .../org/polypheny/db/ddl/DdlManagerImpl.java | 5 +- .../polypheny/db/adapter/csv/CsvPlugin.java | 2 +- .../db/hsqldb/stores/HsqldbPlugin.java | 2 +- .../jdbc/stores/AbstractJdbcStore.java | 2 +- .../db/adapter/monetdb/MonetdbPlugin.java | 4 +- .../db/adapter/mongodb/MongoAlg.java | 8 +- .../db/adapter/mongodb/MongoConvention.java | 1 + .../db/adapter/mongodb/MongoEntity.java | 1 + .../db/adapter/mongodb/MongoPlugin.java | 26 +- .../db/adapter/mongodb/MongoRowType.java | 4 +- .../db/adapter/mongodb/MongoStoreCatalog.java | 4 +- .../mongodb/{ => rules}/MongoAggregate.java | 27 +- .../mongodb/rules/MongoDocumentAggregate.java | 136 +++++ .../mongodb/rules/MongoDocumentModify.java | 54 ++ .../mongodb/rules/MongoDocumentProject.java | 66 ++ .../mongodb/rules/MongoEntityModify.java | 507 ++++++++++++++++ .../mongodb/{ => rules}/MongoFilter.java | 21 +- .../mongodb/{ => rules}/MongoProject.java | 36 +- .../mongodb/{ => rules}/MongoRules.java | 564 +----------------- .../mongodb/{ => rules}/MongoScan.java | 27 +- .../mongodb/{ => rules}/MongoSort.java | 39 +- .../MongoToEnumerableConverter.java | 27 +- .../MongoToEnumerableConverterRule.java | 22 +- .../db/adapter/mongodb/rules/MongoValues.java | 39 ++ .../adapter/mongodb/util/MongoTypeUtil.java | 6 +- .../db/adapter/postgres/PostgresqlPlugin.java | 4 +- .../postgres/source/PostgresqlSource.java | 2 +- .../postgres/store/PostgresqlStore.java | 3 +- .../java/org/polypheny/db/webui/Crud.java | 9 +- 34 files changed, 928 insertions(+), 747 deletions(-) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoAggregate.java (89%) create mode 100644 plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentAggregate.java create mode 100644 plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentModify.java create mode 100644 plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java create mode 100644 plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoEntityModify.java rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoFilter.java (98%) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoProject.java (87%) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoRules.java (59%) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoScan.java (70%) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoSort.java (75%) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoToEnumerableConverter.java (88%) rename plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/{ => rules}/MongoToEnumerableConverterRule.java (65%) create mode 100644 plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoValues.java diff --git a/core/src/main/java/org/polypheny/db/adapter/Adapter.java b/core/src/main/java/org/polypheny/db/adapter/Adapter.java index 66453c62da..4bd4c620d1 100644 --- a/core/src/main/java/org/polypheny/db/adapter/Adapter.java +++ b/core/src/main/java/org/polypheny/db/adapter/Adapter.java @@ -150,13 +150,6 @@ public List getAvailableSettings( Class clazz ) { } - public static Map getDefaultSettings( Class> clazz ) { - return AbstractAdapterSetting.fromAnnotations( clazz.getAnnotations(), clazz.getAnnotation( AdapterProperties.class ) ) - .stream() - .collect( Collectors.toMap( e -> e.name, e -> e.defaultValue ) ); - } - - public void shutdownAndRemoveListeners() { shutdown(); if ( deployMode == DeployMode.DOCKER ) { diff --git a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java index f6c31c96d1..b656750a8a 100644 --- a/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java +++ b/core/src/main/java/org/polypheny/db/adapter/AdapterManager.java @@ -65,7 +65,7 @@ private AdapterManager() { } - public static void addAdapterDeploy( Class clazz, String adapterName, Map defaultSettings, Function4, Adapter> deployer ) { + public static void addAdapterTemplate( Class clazz, String adapterName, Map defaultSettings, Function4, Adapter> deployer ) { REGISTER.put( Pair.of( adapterName.toLowerCase(), AdapterTemplate.getAdapterType( clazz ) ), new AdapterTemplate( clazz, adapterName, defaultSettings, deployer ) ); } diff --git a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java index d05685dddf..b88fd9e1ff 100644 --- a/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java +++ b/core/src/main/java/org/polypheny/db/adapter/java/AdapterTemplate.java @@ -16,14 +16,18 @@ package org.polypheny.db.adapter.java; +import java.util.List; import java.util.Map; import lombok.Getter; import lombok.Value; +import org.polypheny.db.adapter.AbstractAdapterSetting; import org.polypheny.db.adapter.Adapter; import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.AdapterManager.Function4; import org.polypheny.db.adapter.DataStore; +import org.polypheny.db.adapter.annotations.AdapterProperties; import org.polypheny.db.catalog.entity.CatalogAdapter.AdapterType; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; @Value public class AdapterTemplate { @@ -56,4 +60,12 @@ public static AdapterTemplate fromString( String adapterName, AdapterType adapte } + public List getAllSettings() { + AdapterProperties properties = clazz.getAnnotation( AdapterProperties.class ); + if ( clazz.getAnnotation( AdapterProperties.class ) == null ) { + throw new GenericRuntimeException( "The used adapter does not annotate its properties correctly." ); + } + return AbstractAdapterSetting.fromAnnotations( clazz.getAnnotations(), properties ); + } + } diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java index 010edfbb54..2f644b8192 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/RelStoreCatalog.java @@ -28,6 +28,7 @@ import java.util.stream.Collectors; import lombok.EqualsAndHashCode; import lombok.Value; +import lombok.experimental.NonFinal; import lombok.extern.slf4j.Slf4j; import org.polypheny.db.catalog.IdBuilder; import org.polypheny.db.catalog.entity.allocation.AllocationColumn; @@ -44,6 +45,7 @@ @EqualsAndHashCode(callSuper = true) @Value @Slf4j +@NonFinal public class RelStoreCatalog extends StoreCatalog { @@ -122,8 +124,7 @@ public PhysicalTable fromAllocation( long id ) { } - - public void dropColum( long allocId, long columnId ) { + public void dropColumn( long allocId, long columnId ) { PhysicalColumn column = columns.get( Pair.of( allocId, columnId ) ); PhysicalTable table = fromAllocation( allocId ); List pColumns = new ArrayList<>( table.columns ); diff --git a/core/src/main/java/org/polypheny/db/catalog/catalogs/StoreCatalog.java b/core/src/main/java/org/polypheny/db/catalog/catalogs/StoreCatalog.java index 28dc4689ce..f2993635f8 100644 --- a/core/src/main/java/org/polypheny/db/catalog/catalogs/StoreCatalog.java +++ b/core/src/main/java/org/polypheny/db/catalog/catalogs/StoreCatalog.java @@ -124,6 +124,7 @@ public void addPhysical( AllocationEntity allocation, PhysicalEntity... physical } allocToPhysicals.put( allocation.id, physicals ); + allocations.put( allocation.id, allocation ); List.of( physicalEntities ).forEach( p -> this.physicals.put( p.id, p ) ); } diff --git a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java index deddcca3c2..987a45b3cb 100644 --- a/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java +++ b/dbms/src/main/java/org/polypheny/db/ddl/DdlManagerImpl.java @@ -260,13 +260,10 @@ private void handleSource( DataSource adapter ) { aColumns.add( allocationColumn ); } - catalog.updateSnapshot(); - logical = catalog.getSnapshot().rel().getTable( logical.id ).orElseThrow(); - allocation = catalog.getSnapshot().alloc().getEntity( allocation.id ).orElseThrow(); buildNamespace( Catalog.defaultNamespaceId, logical, adapter ); adapter.createTable( null, LogicalTableWrapper.of( logical, columns ), AllocationTableWrapper.of( allocation.unwrap( AllocationTable.class ), aColumns ) ); - + catalog.updateSnapshot(); } catalog.updateSnapshot(); diff --git a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java index f034d7587c..3b48b10913 100644 --- a/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java +++ b/plugins/csv-adapter/src/main/java/org/polypheny/db/adapter/csv/CsvPlugin.java @@ -43,7 +43,7 @@ public void start() { "maxStringLength", "255" ); - AdapterManager.addAdapterDeploy( CsvSource.class, "CSV", defaults, CsvSource::new ); + AdapterManager.addAdapterTemplate( CsvSource.class, "CSV", defaults, CsvSource::new ); } diff --git a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java index 186d9cc07f..48cbd8d990 100644 --- a/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java +++ b/plugins/hsqldb-adapter/src/main/java/org/polypheny/db/hsqldb/stores/HsqldbPlugin.java @@ -48,7 +48,7 @@ public void start() { "trxIsolationLevel", "read_committed" ) ); - AdapterManager.addAdapterDeploy( HsqldbStore.class, ADAPTER_NAME, defaults, HsqldbStore::new ); + AdapterManager.addAdapterTemplate( HsqldbStore.class, ADAPTER_NAME, defaults, HsqldbStore::new ); } diff --git a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java index 7b728ee7ae..d6bba2f522 100644 --- a/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java +++ b/plugins/jdbc-adapter-framework/src/main/java/org/polypheny/db/adapter/jdbc/stores/AbstractJdbcStore.java @@ -368,7 +368,7 @@ public void dropColumn( Context context, long allocId, long columnId ) { .append( dialect.quoteIdentifier( table.name ) ); builder.append( " DROP " ).append( dialect.quoteIdentifier( column.name ) ); executeUpdate( builder, context ); - storeCatalog.dropColum( allocId, columnId ); + storeCatalog.dropColumn( allocId, columnId ); //} } diff --git a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/MonetdbPlugin.java b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/MonetdbPlugin.java index 598ed40e86..b3074b1c52 100644 --- a/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/MonetdbPlugin.java +++ b/plugins/monetdb-adapter/src/main/java/org/polypheny/db/adapter/monetdb/MonetdbPlugin.java @@ -48,8 +48,8 @@ public void start() { "maxConnections", "25" ); - AdapterManager.addAdapterDeploy( MonetdbStore.class, ADAPTER_NAME, settings, MonetdbStore::new ); - AdapterManager.addAdapterDeploy( MonetdbSource.class, ADAPTER_NAME, settings, MonetdbSource::new ); + AdapterManager.addAdapterTemplate( MonetdbStore.class, ADAPTER_NAME, settings, MonetdbStore::new ); + AdapterManager.addAdapterTemplate( MonetdbSource.class, ADAPTER_NAME, settings, MonetdbSource::new ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java index 6503a2323b..b0d1e4ddb7 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAlg.java @@ -79,7 +79,7 @@ public interface MongoAlg extends AlgNode { */ class Implementor extends AlgShuttleImpl implements Serializable { - final List> list = new ArrayList<>(); + public final List> list = new ArrayList<>(); public List operations = new ArrayList<>(); public BsonArray filter = new BsonArray(); @Getter @@ -93,7 +93,7 @@ class Implementor extends AlgShuttleImpl implements Serializable { public boolean onlyOne = false; public boolean isDocumentUpdate = false; - CatalogEntity table; + public CatalogEntity entity; @Setter @Getter public boolean hasProject = false; @@ -211,8 +211,8 @@ public List getNecessaryPhysicalFields() { public List reorderPhysical() { // this is only needed if there is a basic scan without project or group, // where we cannot be sure if the fields are all ordered as intended - assert table.getRowType().getFieldCount() == physicalMapper.size(); - return table.getRowType().getFieldNames(); + assert entity.getRowType().getFieldCount() == physicalMapper.size(); + return entity.getRowType().getFieldNames(); } } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoConvention.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoConvention.java index 7f53caf783..23300a1dae 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoConvention.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoConvention.java @@ -16,6 +16,7 @@ package org.polypheny.db.adapter.mongodb; +import org.polypheny.db.adapter.mongodb.rules.MongoRules; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.Convention; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java index 2b8aa6c590..e2fa5dd9bb 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoEntity.java @@ -71,6 +71,7 @@ import org.polypheny.db.adapter.AdapterManager; import org.polypheny.db.adapter.DataContext; import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; +import org.polypheny.db.adapter.mongodb.rules.MongoScan; import org.polypheny.db.adapter.mongodb.util.MongoDynamic; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.common.Modify; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java index c1d5ae49a9..6eb5999ce9 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoPlugin.java @@ -112,7 +112,7 @@ public void start() { "trxLifetimeLimit", "1209600" ); - AdapterManager.addAdapterDeploy( MongoStore.class, ADAPTER_NAME, settings, MongoStore::new ); + AdapterManager.addAdapterTemplate( MongoStore.class, ADAPTER_NAME, settings, MongoStore::new ); } @@ -369,9 +369,7 @@ public void dropCollection( Context context, AllocationCollection allocation ) { PhysicalCollection collection = storeCatalog.fromAllocation( allocation.id ).unwrap( PhysicalCollection.class ); this.currentNamespace.database.getCollection( collection.name ).drop(); - catalog.dropCollection( allocation.id ); - catalog.getAllocRelations().remove( allocation.id ); - + storeCatalog.removePhysical( allocation.id ); } @@ -382,7 +380,7 @@ public void dropTable( Context context, long allocId ) { PhysicalTable physical = storeCatalog.fromAllocation( allocId ).unwrap( PhysicalTable.class ); this.currentNamespace.database.getCollection( physical.name ).drop(); - storeCatalog.dropTable( allocId ); + storeCatalog.removePhysical( allocId ); } @@ -391,8 +389,9 @@ public void addColumn( Context context, long allocId, LogicalColumn logicalColum commitAll(); context.getStatement().getTransaction().registerInvolvedAdapter( this ); PhysicalTable physical = storeCatalog.fromAllocation( allocId ).unwrap( PhysicalTable.class ); + String physicalName = getPhysicalColumnName( logicalColumn ); // updates all columns with this field if a default value is provided - PhysicalColumn column = storeCatalog.addColumn( physicalColumnName, allocId, adapterId, table.columns.size() - 1, logicalColumn ); + PhysicalColumn column = storeCatalog.addColumn( physicalName, allocId, adapterId, physical.columns.size() - 1, logicalColumn ); Document field; if ( logicalColumn.defaultValue != null ) { @@ -514,16 +513,6 @@ private void addCompositeIndex( LogicalIndex index, List columns, Physic } - @Override - public void dropIndex( Context context, LogicalIndex logicalIndex, long allocId ) { - commitAll(); - context.getStatement().getTransaction().registerInvolvedAdapter( this ); - PhysicalTable physical = storeCatalog.fromAllocation( allocId ).unwrap( PhysicalTable.class ); - - this.currentNamespace.database.getCollection( physical.name ).dropIndex( catalogIndex.physicalName + "_" + partitionPlacement.partitionId ); - } - - @Override public void updateColumnType( Context context, long allocId, LogicalColumn newCol ) { PhysicalColumn column = storeCatalog.updateColumnType( allocId, newCol ); @@ -552,7 +541,6 @@ public IndexMethodModel getDefaultIndexMethod() { } - @Override public List getFunctionalIndexes( LogicalTable catalogTable ) { return ImmutableList.of(); @@ -644,14 +632,14 @@ public void createTable( Context context, LogicalTableWrapper logical, Allocatio @Override public void refreshTable( long allocId ) { PhysicalTable physical = storeCatalog.fromAllocation( allocId ).unwrap( PhysicalTable.class ); - storeCatalog.addTable( currentNamespace.createEntity( storeCatalog, physical ) ); + storeCatalog.replacePhysical( currentNamespace.createEntity( storeCatalog, physical ) ); } @Override public void refreshCollection( long allocId ) { PhysicalCollection physical = storeCatalog.fromAllocation( allocId ).unwrap( PhysicalCollection.class ); - storeCatalog.addTable( this.currentNamespace.createEntity( storeCatalog, physical ) ); + storeCatalog.replacePhysical( this.currentNamespace.createEntity( storeCatalog, physical ) ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java index e015069c6b..b3596079ab 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRowType.java @@ -23,7 +23,7 @@ import org.polypheny.db.algebra.type.AlgDataTypeField; import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.algebra.type.StructKind; -import org.polypheny.db.catalog.Catalog; +import org.polypheny.db.catalog.entity.physical.PhysicalTable; public class MongoRowType extends AlgRecordType { @@ -33,7 +33,7 @@ public class MongoRowType extends AlgRecordType { public MongoRowType( StructKind kind, List fields, MongoEntity mongoEntity ) { super( kind, fields ); - Catalog.getInstance().getColumns( mongoEntity.getId() ).forEach( column -> { + mongoEntity.physical.unwrap( PhysicalTable.class ).columns.forEach( column -> { idToName.put( column.id, column.name ); nameToId.put( column.name, column.id ); } ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStoreCatalog.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStoreCatalog.java index 87ca13412b..f3566c91f7 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStoreCatalog.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoStoreCatalog.java @@ -16,9 +16,9 @@ package org.polypheny.db.adapter.mongodb; -import org.polypheny.db.catalog.catalogs.StoreCatalog; +import org.polypheny.db.catalog.catalogs.RelStoreCatalog; -public class MongoStoreCatalog extends StoreCatalog { +public class MongoStoreCatalog extends RelStoreCatalog { public MongoStoreCatalog( long adapterId ) { super( adapterId ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAggregate.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoAggregate.java similarity index 89% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAggregate.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoAggregate.java index 2154be193f..a8c719dc51 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoAggregate.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoAggregate.java @@ -12,28 +12,15 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.List; +import org.polypheny.db.adapter.mongodb.MongoAlg; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.InvalidAlgException; import org.polypheny.db.algebra.core.Aggregate; @@ -49,10 +36,6 @@ import org.polypheny.db.util.ImmutableBitSet; import org.polypheny.db.util.Util; -import java.util.AbstractList; -import java.util.ArrayList; -import java.util.List; - /** * Implementation of {@link Aggregate} relational expression in MongoDB. diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentAggregate.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentAggregate.java new file mode 100644 index 0000000000..4fcefca296 --- /dev/null +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentAggregate.java @@ -0,0 +1,136 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.mongodb.rules; + +import java.util.AbstractList; +import java.util.ArrayList; +import java.util.List; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.AggregateCall; +import org.polypheny.db.algebra.core.document.DocumentAggregate; +import org.polypheny.db.algebra.fun.AggFunction; +import org.polypheny.db.algebra.operators.OperatorName; +import org.polypheny.db.languages.OperatorRegistry; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.schema.trait.ModelTrait; +import org.polypheny.db.sql.language.fun.SqlSingleValueAggFunction; +import org.polypheny.db.sql.language.fun.SqlSumAggFunction; +import org.polypheny.db.sql.language.fun.SqlSumEmptyIsZeroAggFunction; +import org.polypheny.db.util.Util; + +public class MongoDocumentAggregate extends DocumentAggregate implements MongoAlg { + + /** + * Creates a {@link DocumentAggregate}. + * {@link ModelTrait#DOCUMENT} native node of an aggregate. + * + * @param cluster + * @param traits + * @param child + * @param indicator + * @param groupSet + * @param groupSets + * @param aggCalls + * @param names + */ + protected MongoDocumentAggregate( AlgOptCluster cluster, AlgTraitSet traits, AlgNode child, boolean indicator, @NotNull List groupSet, List> groupSets, List aggCalls, List names ) { + super( cluster, traits, child, indicator, groupSet, groupSets, aggCalls, names ); + } + + + @Override + public void implement( Implementor implementor ) { + implementor.visitChild( 0, getInput() ); + List list = new ArrayList<>(); + final List inNames = MongoRules.mongoFieldNames( getInput().getRowType() ); + final List outNames = MongoRules.mongoFieldNames( getRowType() ); + int i = 0; + + final String inName = groupSet.get( 0 ); + list.add( "_id: " + MongoRules.maybeQuote( "$" + inName ) ); + implementor.physicalMapper.add( inName ); + ++i; + + for ( AggregateCall aggCall : aggCalls ) { + list.add( MongoRules.maybeQuote( outNames.get( i++ ) ) + ": " + toMongo( aggCall.getAggregation(), inNames, aggCall.getArgList(), implementor ) ); + } + implementor.add( null, "{$group: " + Util.toString( list, "{", ", ", "}" ) + "}" ); + final List fixups; + + fixups = new AbstractList<>() { + @Override + public String get( int index ) { + final String outName = outNames.get( index ); + return MongoRules.maybeQuote( outName ) + ": " + MongoRules.maybeQuote( "$" + (index == 0 ? "_id" : outName) ); + } + + + @Override + public int size() { + return outNames.size(); + } + }; + if ( !groupSet.isEmpty() ) { + implementor.add( null, "{$project: " + Util.toString( fixups, "{", ", ", "}" ) + "}" ); + } + } + + + private String toMongo( AggFunction aggregation, List inNames, List args, Implementor implementor ) { + if ( aggregation.getOperatorName() == OperatorName.COUNT ) { + if ( args.size() == 0 ) { + return "{$sum: 1}"; + } else { + assert args.size() == 1; + final String inName = inNames.get( args.get( 0 ) ); + implementor.physicalMapper.add( inName ); + return "{$sum: {$cond: [ {$eq: [" + MongoRules.quote( inName ) + ", null]}, 0, 1]}}"; + } + } else if ( aggregation instanceof SqlSumAggFunction || aggregation instanceof SqlSumEmptyIsZeroAggFunction ) { + assert args.size() == 1; + final String inName = inNames.get( args.get( 0 ) ); + implementor.physicalMapper.add( inName ); + return "{$sum: " + MongoRules.maybeQuote( "$" + inName ) + "}"; + } else if ( aggregation.getOperatorName() == OperatorName.MIN ) { + assert args.size() == 1; + final String inName = inNames.get( args.get( 0 ) ); + implementor.physicalMapper.add( inName ); + return "{$min: " + MongoRules.maybeQuote( "$" + inName ) + "}"; + } else if ( aggregation.equals( OperatorRegistry.getAgg( OperatorName.MAX ) ) ) { + assert args.size() == 1; + final String inName = inNames.get( args.get( 0 ) ); + implementor.physicalMapper.add( inName ); + return "{$max: " + MongoRules.maybeQuote( "$" + inName ) + "}"; + } else if ( aggregation.getOperatorName() == OperatorName.AVG || aggregation.getKind() == OperatorRegistry.getAgg( OperatorName.AVG ).getKind() ) { + assert args.size() == 1; + final String inName = inNames.get( args.get( 0 ) ); + implementor.physicalMapper.add( inName ); + return "{$avg: " + MongoRules.maybeQuote( "$" + inName ) + "}"; + } else if ( aggregation instanceof SqlSingleValueAggFunction ) { + assert args.size() == 1; + final String inName = inNames.get( args.get( 0 ) ); + implementor.physicalMapper.add( inName ); + return "{$sum:" + MongoRules.maybeQuote( "$" + inName ) + "}"; + } else { + throw new AssertionError( "unknown aggregate " + aggregation ); + } + } + +} diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentModify.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentModify.java new file mode 100644 index 0000000000..7b46ef89a1 --- /dev/null +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentModify.java @@ -0,0 +1,54 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.mongodb.rules; + +import java.util.List; +import java.util.Map; +import lombok.NonNull; +import org.apache.commons.lang3.NotImplementedException; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.adapter.mongodb.MongoEntity; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.document.DocumentModify; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexNode; + +public class MongoDocumentModify extends DocumentModify implements MongoAlg { + + + protected MongoDocumentModify( + AlgTraitSet traits, + MongoEntity collection, + AlgNode input, + @NonNull Operation operation, + Map updates, + List removes, + Map renames ) { + super( traits, collection, input, operation, updates, removes, renames ); + } + + + @Override + public void implement( Implementor implementor ) { + Implementor condImplementor = new Implementor( true ); + condImplementor.setStaticRowType( implementor.getStaticRowType() ); + ((MongoAlg) input).implement( condImplementor ); + + throw new NotImplementedException(); + } + +} diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java new file mode 100644 index 0000000000..4efeec617f --- /dev/null +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoDocumentProject.java @@ -0,0 +1,66 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.mongodb.rules; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.jetbrains.annotations.NotNull; +import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.core.document.DocumentProject; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.schema.trait.ModelTrait; +import org.polypheny.db.util.Pair; + +public class MongoDocumentProject extends DocumentProject implements MongoAlg { + + /** + * Creates a {@link DocumentProject}. + * {@link ModelTrait#DOCUMENT} native node of a project. + * + * @param cluster + * @param traits + * @param input + * @param includes + * @param excludes + */ + protected MongoDocumentProject( AlgOptCluster cluster, AlgTraitSet traits, AlgNode input, @NotNull Map includes, @NotNull List excludes ) { + super( cluster, traits, input, includes, excludes ); + } + + + @Override + public void implement( Implementor implementor ) { + implementor.visitChild( 0, getInput() ); + List> projects = new ArrayList<>(); + + final MongoRules.RexToMongoTranslator translator = new MongoRules.RexToMongoTranslator( (JavaTypeFactory) getCluster().getTypeFactory(), MongoRules.mongoFieldNames( getInput().getRowType() ), implementor ); + includes.forEach( ( n, p ) -> projects.add( Pair.of( n, p.accept( translator ) ) ) ); + excludes.forEach( n -> projects.add( Pair.of( n, "1" ) ) ); + + String merged = projects.stream().map( p -> "\"" + p.left + "\":" + p.right ).collect( Collectors.joining( "," ) ); + + implementor.add( merged, "{$project: " + merged + "}" ); + + } + +} diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoEntityModify.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoEntityModify.java new file mode 100644 index 0000000000..13756a7d43 --- /dev/null +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoEntityModify.java @@ -0,0 +1,507 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.mongodb.rules; + +import com.google.common.collect.ImmutableList; +import com.mongodb.client.gridfs.GridFSBucket; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; +import org.bson.BsonArray; +import org.bson.BsonDocument; +import org.bson.BsonString; +import org.bson.BsonValue; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.adapter.mongodb.MongoEntity; +import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; +import org.polypheny.db.adapter.mongodb.MongoRowType; +import org.polypheny.db.adapter.mongodb.bson.BsonDynamic; +import org.polypheny.db.adapter.mongodb.rules.MongoRules.MongoDocuments; +import org.polypheny.db.algebra.AbstractAlgNode; +import org.polypheny.db.algebra.AlgNode; +import org.polypheny.db.algebra.constant.Kind; +import org.polypheny.db.algebra.core.relational.RelModify; +import org.polypheny.db.algebra.metadata.AlgMetadataQuery; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.algebra.type.AlgDataTypeField; +import org.polypheny.db.algebra.type.AlgRecordType; +import org.polypheny.db.catalog.entity.logical.LogicalTable; +import org.polypheny.db.catalog.entity.physical.PhysicalCollection; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgOptCost; +import org.polypheny.db.plan.AlgOptPlanner; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexCall; +import org.polypheny.db.rex.RexDynamicParam; +import org.polypheny.db.rex.RexFieldAccess; +import org.polypheny.db.rex.RexIndexRef; +import org.polypheny.db.rex.RexLiteral; +import org.polypheny.db.rex.RexNode; +import org.polypheny.db.type.PolyType; +import org.polypheny.db.type.entity.PolyValue; +import org.polypheny.db.util.BsonUtil; +import org.polypheny.db.util.Pair; + +class MongoEntityModify extends RelModify implements MongoAlg { + + + private final GridFSBucket bucket; + private Implementor implementor; + + + protected MongoEntityModify( + AlgOptCluster cluster, + AlgTraitSet traitSet, + MongoEntity entity, + AlgNode input, + Operation operation, + List updateColumnList, + List sourceExpressionList, + boolean flattened ) { + super( cluster, traitSet, entity, input, operation, updateColumnList, sourceExpressionList, flattened ); + this.bucket = entity.unwrap( MongoEntity.class ).getMongoNamespace().getBucket(); + } + + + @Override + public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { + return super.computeSelfCost( planner, mq ).multiplyBy( .1 ); + } + + + @Override + public AlgNode copy( AlgTraitSet traitSet, List inputs ) { + return new MongoEntityModify( + getCluster(), + traitSet, + getEntity(), + AbstractAlgNode.sole( inputs ), + getOperation(), + getUpdateColumnList(), + getSourceExpressionList(), + isFlattened() ); + } + + + @Override + public void implement( Implementor implementor ) { + implementor.setDML( true ); + this.implementor = implementor; + + implementor.entity = entity; + implementor.setOperation( this.getOperation() ); + + switch ( this.getOperation() ) { + case INSERT: + if ( input instanceof MongoValues ) { + handleDirectInsert( implementor, ((MongoValues) input) ); + } else if ( input instanceof MongoDocuments ) { + handleDocumentInsert( implementor, ((MongoDocuments) input) ); + } else if ( input instanceof MongoProject ) { + handlePreparedInsert( implementor, ((MongoProject) input) ); + } else { + return; + } + break; + case UPDATE: + Implementor condImplementor = new Implementor( true ); + condImplementor.setStaticRowType( implementor.getStaticRowType() ); + ((MongoAlg) input).implement( condImplementor ); + implementor.filter = condImplementor.filter; + assert condImplementor.getStaticRowType() instanceof MongoRowType; + MongoRowType rowType = (MongoRowType) condImplementor.getStaticRowType(); + int pos = 0; + BsonDocument doc = new BsonDocument(); + List docDocs = new ArrayList<>(); + GridFSBucket bucket = implementor.getBucket(); + for ( RexNode el : getSourceExpressionList() ) { + if ( el.isA( Kind.LITERAL ) ) { + doc.append( + rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), + BsonUtil.getAsBson( (RexLiteral) el, bucket ) ); + } else if ( el instanceof RexCall ) { + RexCall call = ((RexCall) el); + if ( Arrays.asList( Kind.PLUS, Kind.PLUS, Kind.TIMES, Kind.DIVIDE ).contains( call.op.getKind() ) ) { + doc.append( + rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), + visitCall( implementor, (RexCall) el, call.op.getKind(), el.getType().getPolyType() ) ); + } else if ( call.op.getKind().belongsTo( Kind.MQL_KIND ) ) { + docDocs.add( handleDocumentUpdate( (RexCall) el, bucket, rowType ) ); + } else { + doc.append( + rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), + BsonUtil.getBsonArray( call, bucket ) ); + } + } else if ( el.isA( Kind.DYNAMIC_PARAM ) ) { + doc.append( + rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), + new BsonDynamic( (RexDynamicParam) el ) ); + } else if ( el.isA( Kind.FIELD_ACCESS ) ) { + doc.append( + rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), + new BsonString( + "$" + rowType.getPhysicalName( + ((RexFieldAccess) el).getField().getName(), implementor ) ) ); + } + pos++; + } + if ( !doc.isEmpty() ) { + BsonDocument update = new BsonDocument().append( "$set", doc ); + + implementor.operations = Collections.singletonList( update ); + } else { + implementor.operations = docDocs; + } + + if ( Pair.right( condImplementor.list ).contains( "{$limit: 1}" ) ) { + implementor.onlyOne = true; + } + + break; + case MERGE: + break; + case DELETE: + Implementor filterCollector = new Implementor( true ); + filterCollector.setStaticRowType( implementor.getStaticRowType() ); + ((MongoAlg) input).implement( filterCollector ); + implementor.filter = filterCollector.filter; + if ( Pair.right( filterCollector.list ).contains( "{$limit: 1}" ) ) { + implementor.onlyOne = true; + } + + break; + } + } + + + private BsonDocument handleDocumentUpdate( RexCall el, GridFSBucket bucket, MongoRowType rowType ) { + if ( el.op.getKind() == Kind.MQL_JSONIFY ) { + assert el.getOperands().size() == 1; + return handleDocumentUpdate( (RexCall) el.getOperands().get( 0 ), bucket, rowType ); + } + + BsonDocument doc = new BsonDocument(); + assert el.getOperands().size() >= 2; + assert el.getOperands().get( 0 ) instanceof RexIndexRef; + + String key = getDocParentKey( (RexIndexRef) el.operands.get( 0 ), rowType ); + attachUpdateStep( doc, el, rowType, key ); + + return doc; + } + + + private void attachUpdateStep( BsonDocument doc, RexCall el, MongoRowType rowType, String key ) { + List keys = getDocUpdateKey( (RexIndexRef) el.operands.get( 0 ), (RexCall) el.operands.get( 1 ), rowType ); + switch ( el.op.getKind() ) { + case MQL_UPDATE_REPLACE: + assert el.getOperands().size() == 3; + assert el.getOperands().get( 2 ) instanceof RexCall; + + doc.putAll( getReplaceUpdate( keys, (RexCall) el.operands.get( 2 ), implementor, bucket ) ); + break; + case MQL_ADD_FIELDS: + assert el.getOperands().size() == 3; + assert el.getOperands().get( 2 ) instanceof RexCall; + + doc.putAll( getAddUpdate( keys, (RexCall) el.operands.get( 2 ) ) ); + break; + case MQL_UPDATE_REMOVE: + assert el.getOperands().size() == 2; + + doc.putAll( getRemoveUpdate( keys, (RexCall) el.operands.get( 1 ) ) ); + break; + case MQL_UPDATE_RENAME: + assert el.getOperands().size() == 3; + assert el.getOperands().get( 2 ) instanceof RexCall; + + doc.putAll( getRenameUpdate( keys, key, (RexCall) el.operands.get( 2 ) ) ); + break; + default: + throw new RuntimeException( "The used update operation is not supported by the MongoDB adapter." ); + } + } + + + private String getDocParentKey( RexIndexRef rexInputRef, MongoRowType rowType ) { + return rowType.getFieldNames().get( rexInputRef.getIndex() ); + } + + + private BsonDocument getRenameUpdate( List keys, String parentKey, RexCall call ) { + BsonDocument doc = new BsonDocument(); + assert keys.size() == call.operands.size(); + int pos = 0; + for ( String key : keys ) { + doc.put( key, new BsonString( parentKey + "." + ((RexLiteral) call.operands.get( pos )).getValueAs( String.class ) ) ); + pos++; + } + + return new BsonDocument( "$rename", doc ); + } + + + private BsonDocument getRemoveUpdate( List keys, RexCall call ) { + BsonDocument doc = new BsonDocument(); + for ( String key : keys ) { + doc.put( key, new BsonString( "" ) ); + } + + return new BsonDocument( "$unset", doc ); + } + + + private BsonDocument getAddUpdate( List keys, RexCall call ) { + BsonDocument doc = new BsonDocument(); + assert keys.size() == call.operands.size(); + int pos = 0; + for ( String key : keys ) { + doc.put( key, BsonUtil.getAsBson( (RexLiteral) call.operands.get( pos ), this.bucket ) ); + pos++; + } + + return new BsonDocument( "$set", doc ); + } + + + public static BsonDocument getReplaceUpdate( List keys, RexCall call, Implementor implementor, GridFSBucket bucket ) { + BsonDocument doc = new BsonDocument(); + assert keys.size() == call.operands.size(); + + int pos = 0; + for ( RexNode operand : call.operands ) { + if ( !(operand instanceof RexCall) ) { + doc.append( "$set", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) operand, bucket ) ) ); + } else { + RexCall op = (RexCall) operand; + implementor.isDocumentUpdate = true; + switch ( op.getKind() ) { + case PLUS: + doc.append( "$inc", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), bucket ) ) ); + break; + case TIMES: + doc.append( "$mul", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), bucket ) ) ); + break; + case MIN: + doc.append( "$min", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), bucket ) ) ); + break; + case MAX: + doc.append( "$max", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), bucket ) ) ); + break; + } + } + pos++; + } + + return doc; + } + + + private List getDocUpdateKey( RexIndexRef row, RexCall subfield, MongoRowType rowType ) { + String name = rowType.getFieldNames().get( row.getIndex() ); + return subfield + .operands + .stream() + .map( n -> ((RexLiteral) n).getValueAs( String.class ) ) + .map( n -> name + "." + n ) + .collect( Collectors.toList() ); + } + + + private void handleDocumentInsert( Implementor implementor, MongoDocuments documents ) { + implementor.operations = documents.documents + .stream() + .filter( PolyValue::isDocument ) + .map( d -> BsonDocument.parse( d.toJson() ) ) + .collect( Collectors.toList() ); + } + + + private BsonValue visitCall( Implementor implementor, RexCall call, Kind op, PolyType type ) { + BsonDocument doc = new BsonDocument(); + + BsonArray array = new BsonArray(); + for ( RexNode operand : call.operands ) { + if ( operand.getKind() == Kind.FIELD_ACCESS ) { + String physicalName = "$" + implementor.getPhysicalName( ((RexFieldAccess) operand).getField().getName() ); + array.add( new BsonString( physicalName ) ); + } else if ( operand instanceof RexCall ) { + array.add( visitCall( implementor, (RexCall) operand, ((RexCall) operand).op.getKind(), type ) ); + } else if ( operand.getKind() == Kind.LITERAL ) { + array.add( BsonUtil.getAsBson( ((RexLiteral) operand).getValueAs( BsonUtil.getClassFromType( type ) ), type, bucket ) ); + } else if ( operand.getKind() == Kind.DYNAMIC_PARAM ) { + array.add( new BsonDynamic( (RexDynamicParam) operand ) ); + } else { + throw new RuntimeException( "Not implemented yet" ); + } + } + switch ( op ) { + case PLUS: + doc.append( "$add", array ); + break; + case MINUS: + doc.append( "$subtract", array ); + break; + case TIMES: + doc.append( "$multiply", array ); + break; + case DIVIDE: + doc.append( "$divide", array ); + break; + default: + throw new RuntimeException( "Not implemented yet" ); + } + + return doc; + } + + + private void handlePreparedInsert( Implementor implementor, MongoProject input ) { + if ( !(input.getInput() instanceof MongoValues || input.getInput() instanceof MongoDocuments) && input.getInput().getRowType().getFieldList().size() == 1 ) { + return; + } + + BsonDocument doc = new BsonDocument(); + LogicalTable catalogTable = implementor.entity.unwrap( LogicalTable.class ); + GridFSBucket bucket = implementor.getBucket(); + //noinspection AssertWithSideEffects + assert input.getRowType().getFieldCount() == this.getEntity().getRowType().getFieldCount(); + Map physicalMapping; + if ( input.getInput() instanceof MongoValues ) { + physicalMapping = getPhysicalMap( input.getRowType().getFieldList(), catalogTable ); + } else if ( input.getInput() instanceof MongoDocuments ) { + physicalMapping = getPhysicalMap( input.getRowType().getFieldList(), implementor.entity.unwrap( PhysicalCollection.class ) ); + } else { + throw new RuntimeException( "Mapping for physical mongo fields not found" ); + } + + implementor.setStaticRowType( (AlgRecordType) input.getRowType() ); + + int pos = 0; + for ( RexNode rexNode : input.getChildExps() ) { + if ( rexNode instanceof RexDynamicParam ) { + // preparedInsert + doc.append( physicalMapping.get( pos ), new BsonDynamic( (RexDynamicParam) rexNode ) ); + } else if ( rexNode instanceof RexLiteral ) { + doc.append( getPhysicalName( input, catalogTable, pos ), BsonUtil.getAsBson( (RexLiteral) rexNode, bucket ) ); + } else if ( rexNode instanceof RexCall ) { + PolyType type = entity + .getRowType( getCluster().getTypeFactory() ) + .getFieldList() + .get( pos ) + .getType() + .getComponentType() + .getPolyType(); + + doc.append( physicalMapping.get( pos ), getBsonArray( (RexCall) rexNode, type, bucket ) ); + + } else if ( rexNode.getKind() == Kind.INPUT_REF && input.getInput() instanceof MongoValues ) { + handleDirectInsert( implementor, (MongoValues) input.getInput() ); + return; + } else { + throw new RuntimeException( "This rexType was not considered" ); + } + + pos++; + } + implementor.operations = Collections.singletonList( doc ); + } + + + private Map getPhysicalMap( List fieldList, PhysicalCollection catalogCollection ) { + Map map = new HashMap<>(); + map.put( 0, "d" ); + return map; + } + + + private Map getPhysicalMap( List fieldList, LogicalTable table ) { + Map map = new HashMap<>(); + List names = table.getColumnNames(); + List ids = table.getColumnIds(); + int pos = 0; + for ( String name : Pair.left( fieldList ) ) { + map.put( pos, MongoStore.getPhysicalColumnName( name, ids.get( names.indexOf( name ) ) ) ); + pos++; + } + return map; + } + + + private String getPhysicalName( MongoProject input, LogicalTable table, int pos ) { + String logicalName = input.getRowType().getFieldNames().get( pos ); + int index = table.getColumnNames().indexOf( logicalName ); + return MongoStore.getPhysicalColumnName( logicalName, table.getColumnIds().get( index ) ); + } + + + private BsonValue getBsonArray( RexCall el, PolyType type, GridFSBucket bucket ) { + if ( el.op.getKind() == Kind.ARRAY_VALUE_CONSTRUCTOR ) { + BsonArray array = new BsonArray(); + array.addAll( el.operands.stream().map( operand -> { + if ( operand instanceof RexLiteral ) { + return BsonUtil.getAsBson( BsonUtil.getMongoComparable( type, (RexLiteral) operand ), type, bucket ); + } else if ( operand instanceof RexCall ) { + return getBsonArray( (RexCall) operand, type, bucket ); + } + throw new RuntimeException( "The given RexCall could not be transformed correctly." ); + } ).collect( Collectors.toList() ) ); + return array; + } + throw new RuntimeException( "The given RexCall could not be transformed correctly." ); + } + + + private void handleDirectInsert( Implementor implementor, MongoValues values ) { + List docs = new ArrayList<>(); + LogicalTable catalogTable = implementor.entity.unwrap( LogicalTable.class ); + GridFSBucket bucket = implementor.bucket; + + AlgDataType valRowType = rowType; + + if ( valRowType == null ) { + valRowType = values.getRowType(); + } + + List columnNames = catalogTable.getColumnNames(); + List columnIds = catalogTable.getColumnIds(); + for ( ImmutableList literals : values.tuples ) { + BsonDocument doc = new BsonDocument(); + int pos = 0; + for ( RexLiteral literal : literals ) { + String name = valRowType.getFieldNames().get( pos ); + if ( columnNames.contains( name ) ) { + doc.append( + MongoStore.getPhysicalColumnName( name, columnIds.get( columnNames.indexOf( name ) ) ), + BsonUtil.getAsBson( literal, bucket ) ); + } else { + doc.append( + rowType.getFieldNames().get( pos ), + BsonUtil.getAsBson( literal, bucket ) ); + } + pos++; + } + docs.add( doc ); + } + implementor.operations = docs; + } + +} diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoFilter.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java similarity index 98% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoFilter.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java index 389a44ed49..2b022ae362 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoFilter.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoFilter.java @@ -12,26 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; import com.google.common.collect.ImmutableList; import com.mongodb.client.gridfs.GridFSBucket; @@ -55,6 +38,8 @@ import org.bson.BsonValue; import org.bson.json.JsonMode; import org.bson.json.JsonWriterSettings; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.adapter.mongodb.MongoRowType; import org.polypheny.db.adapter.mongodb.bson.BsonDynamic; import org.polypheny.db.adapter.mongodb.bson.BsonFunctionHelper; import org.polypheny.db.algebra.AlgNode; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoProject.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoProject.java similarity index 87% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoProject.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoProject.java index 751ddb32ad..4ad43b96bd 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoProject.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoProject.java @@ -12,34 +12,23 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; import com.google.common.collect.Streams; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; import lombok.Getter; import org.bson.BsonDocument; import org.bson.json.JsonMode; import org.bson.json.JsonWriterSettings; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.adapter.mongodb.MongoRowType; import org.polypheny.db.adapter.mongodb.bson.BsonFunctionHelper; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.constant.Kind; @@ -56,11 +45,6 @@ import org.polypheny.db.util.Pair; import org.polypheny.db.util.Util; -import java.util.ArrayList; -import java.util.List; -import java.util.stream.Collectors; -import java.util.stream.Stream; - /** * Implementation of {@link Project} relational expression in MongoDB. @@ -154,7 +138,7 @@ public void implement( Implementor implementor ) { } List mergedItems; - if ( documents.size() != 0 ) { + if ( !documents.isEmpty() ) { String functions = documents.toJson( JsonWriterSettings.builder().outputMode( JsonMode.RELAXED ).build() ); mergedItems = Streams.concat( items.stream(), Stream.of( functions.substring( 1, functions.length() - 1 ) ) ) .collect( Collectors.toList() ); @@ -171,11 +155,11 @@ public void implement( Implementor implementor ) { implementor.hasProject = true; if ( !implementor.isDML() && items.size() + documents.size() != 0 ) { implementor.add( op.left, op.right ); - if ( unwinds.size() != 0 ) { + if ( !unwinds.isEmpty() ) { implementor.add( Util.toString( unwinds, "{", ",", "}" ), Util.toString( unwinds, "{$unwind:", ",", "}" ) ); } } - if ( excludes.size() != 0 ) { + if ( !excludes.isEmpty() ) { String excludeString = Util.toString( excludes, "{", ", ", "}" ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java similarity index 59% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java index caae60d5ea..b700899917 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoRules.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoRules.java @@ -12,33 +12,12 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; -import com.google.common.collect.ImmutableList; -import com.mongodb.client.gridfs.GridFSBucket; import java.util.AbstractList; import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -46,14 +25,14 @@ import java.util.stream.Collectors; import lombok.Getter; import org.bson.BsonArray; -import org.bson.BsonDocument; import org.bson.BsonString; import org.bson.BsonValue; import org.polypheny.db.adapter.java.JavaTypeFactory; +import org.polypheny.db.adapter.mongodb.MongoAlg; import org.polypheny.db.adapter.mongodb.MongoAlg.Implementor; -import org.polypheny.db.adapter.mongodb.MongoPlugin.MongoStore; +import org.polypheny.db.adapter.mongodb.MongoConvention; +import org.polypheny.db.adapter.mongodb.MongoEntity; import org.polypheny.db.adapter.mongodb.bson.BsonDynamic; -import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.AlgShuttleImpl; @@ -65,7 +44,6 @@ import org.polypheny.db.algebra.core.AlgFactories; import org.polypheny.db.algebra.core.Sort; import org.polypheny.db.algebra.core.Values; -import org.polypheny.db.algebra.core.common.Modify; import org.polypheny.db.algebra.core.document.DocumentModify; import org.polypheny.db.algebra.core.document.DocumentSort; import org.polypheny.db.algebra.core.document.DocumentValues; @@ -79,19 +57,11 @@ import org.polypheny.db.algebra.logical.relational.LogicalAggregate; import org.polypheny.db.algebra.logical.relational.LogicalFilter; import org.polypheny.db.algebra.logical.relational.LogicalProject; -import org.polypheny.db.algebra.metadata.AlgMetadataQuery; import org.polypheny.db.algebra.operators.OperatorName; import org.polypheny.db.algebra.type.AlgDataType; -import org.polypheny.db.algebra.type.AlgDataTypeField; -import org.polypheny.db.algebra.type.AlgRecordType; -import org.polypheny.db.catalog.entity.logical.LogicalCollection; -import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.languages.OperatorRegistry; import org.polypheny.db.nodes.Operator; import org.polypheny.db.plan.AlgOptCluster; -import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; -import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgTrait; import org.polypheny.db.plan.AlgTraitSet; @@ -99,19 +69,16 @@ import org.polypheny.db.plan.volcano.AlgSubset; import org.polypheny.db.rex.RexCall; import org.polypheny.db.rex.RexDynamicParam; -import org.polypheny.db.rex.RexFieldAccess; import org.polypheny.db.rex.RexIndexRef; import org.polypheny.db.rex.RexLiteral; import org.polypheny.db.rex.RexNode; import org.polypheny.db.rex.RexVisitorImpl; -import org.polypheny.db.schema.Entity; import org.polypheny.db.schema.document.DocumentRules; import org.polypheny.db.schema.types.ModifiableEntity; import org.polypheny.db.sql.language.fun.SqlDatetimePlusOperator; import org.polypheny.db.sql.language.fun.SqlDatetimeSubtractionOperator; import org.polypheny.db.type.PolyType; import org.polypheny.db.type.entity.document.PolyDocument; -import org.polypheny.db.util.BsonUtil; import org.polypheny.db.util.Pair; import org.polypheny.db.util.UnsupportedRexCallVisitor; import org.polypheny.db.util.Util; @@ -578,6 +545,7 @@ public AlgNode convert( AlgNode alg ) { traitSet, convert( sort.getInput(), traitSet.replace( AlgCollations.EMPTY ) ), sort.getCollation(), + sort.getChildExps(), sort.offset, sort.fetch ); } @@ -605,6 +573,7 @@ public AlgNode convert( AlgNode alg ) { traitSet, convert( sort.getInput(), out ), sort.collation, + sort.fieldExps, sort.offset, sort.fetch ); } @@ -727,12 +696,12 @@ private MongoDocumentProjectRule() { public AlgNode convert( AlgNode alg ) { final LogicalDocumentProject project = (LogicalDocumentProject) alg; final AlgTraitSet traitSet = project.getTraitSet().replace( out ); - return new MongoProject( + return new MongoDocumentProject( project.getCluster(), traitSet, convert( project.getInput(), out ), - project.projects, - project.getRowType() ); + project.includes, + project.excludes ); } } @@ -756,9 +725,9 @@ private static boolean containsIncompatible( SingleAlg alg ) { * which for one have similar performance to a JavaScript implementation but don't need * maintenance */ + @Getter public static class MongoExcludeVisitor extends RexVisitorImpl { - @Getter private boolean containsIncompatible = false; @@ -811,21 +780,6 @@ public AlgNode convert( AlgNode alg ) { } - public static class MongoValues extends Values implements MongoAlg { - - MongoValues( AlgOptCluster cluster, AlgDataType rowType, ImmutableList> tuples, AlgTraitSet traitSet ) { - super( cluster, rowType, tuples, traitSet ); - } - - - @Override - public void implement( Implementor implementor ) { - - } - - } - - public static class MongoDocumentsRule extends MongoConverterRule { private static final MongoDocumentsRule INSTANCE = new MongoDocumentsRule(); @@ -877,7 +831,7 @@ private static class MongoTableModificationRule extends MongoConverterRule { } - private static boolean mongoSupported( RelModify modify ) { + private static boolean mongoSupported( RelModify modify ) { if ( !modify.isInsert() ) { return true; } @@ -888,9 +842,9 @@ private static boolean mongoSupported( RelModify modify ) { } + @Getter private static class ScanChecker extends AlgShuttleImpl { - @Getter private boolean supported = true; @@ -950,480 +904,24 @@ private static class MongoDocumentModificationRule extends MongoConverterRule { @Override public AlgNode convert( AlgNode alg ) { - final DocumentModify modify = (DocumentModify) alg; - final ModifiableEntity modifiableCollection = modify.getCollection().unwrap( ModifiableEntity.class ); + final DocumentModify modify = (DocumentModify) alg; + final ModifiableEntity modifiableCollection = modify.entity.unwrap( ModifiableEntity.class ); if ( modifiableCollection == null ) { return null; } - if ( modify.getCollection().unwrap( MongoEntity.class ) == null ) { + if ( modify.entity.unwrap( MongoEntity.class ) == null ) { return null; } final AlgTraitSet traitSet = modify.getTraitSet().replace( out ); - return new MongoEntityModify( - modify.getCluster(), + return new MongoDocumentModify( traitSet, - modify.getCollection(), - modify.getCatalogReader(), + modify.entity, AlgOptRule.convert( modify.getInput(), traitSet ), modify.operation, - modify.getKeys(), - modify.getUpdates(), - true ); - } - - } - - - private static class MongoEntityModify extends Modify implements MongoAlg { - - - private final GridFSBucket bucket; - private Implementor implementor; - - - protected MongoEntityModify( - AlgOptCluster cluster, - AlgTraitSet traitSet, - AlgOptEntity table, - AlgNode input, - Operation operation, - List updateColumnList, - List sourceExpressionList, - boolean flattened ) { - super( cluster, traitSet, table, input, operation, updateColumnList, sourceExpressionList, flattened ); - this.bucket = table.unwrap( MongoEntity.class ).getMongoNamespace().getBucket(); - } - - - @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( .1 ); - } - - - @Override - public AlgNode copy( AlgTraitSet traitSet, List inputs ) { - return new MongoEntityModify( - getCluster(), - traitSet, - getEntity(), - getCatalogReader(), - AbstractAlgNode.sole( inputs ), - getOperation(), - getUpdateColumnList(), - getSourceExpressionList(), - isFlattened() ); - } - - - @Override - public void implement( Implementor implementor ) { - implementor.setDML( true ); - Entity preEntity = entity.getEntity(); - this.implementor = implementor; - - if ( !(preEntity instanceof MongoEntity) ) { - throw new RuntimeException( "There seems to be a problem with the correct costs for one of stores." ); - } - implementor.mongoEntity = (MongoEntity) preEntity; - implementor.table = table; - implementor.setOperation( this.getOperation() ); - - switch ( this.getOperation() ) { - case INSERT: - if ( input instanceof MongoValues ) { - handleDirectInsert( implementor, ((MongoValues) input) ); - } else if ( input instanceof MongoDocuments ) { - handleDocumentInsert( implementor, ((MongoDocuments) input) ); - } else if ( input instanceof MongoProject ) { - handlePreparedInsert( implementor, ((MongoProject) input) ); - } else { - return; - } - break; - case UPDATE: - MongoAlg.Implementor condImplementor = new Implementor( true ); - condImplementor.setStaticRowType( implementor.getStaticRowType() ); - ((MongoAlg) input).implement( condImplementor ); - implementor.filter = condImplementor.filter; - assert condImplementor.getStaticRowType() instanceof MongoRowType; - MongoRowType rowType = (MongoRowType) condImplementor.getStaticRowType(); - int pos = 0; - BsonDocument doc = new BsonDocument(); - List docDocs = new ArrayList<>(); - GridFSBucket bucket = implementor.mongoEntity.getMongoNamespace().getBucket(); - for ( RexNode el : getSourceExpressionList() ) { - if ( el.isA( Kind.LITERAL ) ) { - doc.append( - rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), - BsonUtil.getAsBson( (RexLiteral) el, bucket ) ); - } else if ( el instanceof RexCall ) { - RexCall call = ((RexCall) el); - if ( Arrays.asList( Kind.PLUS, Kind.PLUS, Kind.TIMES, Kind.DIVIDE ).contains( call.op.getKind() ) ) { - doc.append( - rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), - visitCall( implementor, (RexCall) el, call.op.getKind(), el.getType().getPolyType() ) ); - } else if ( call.op.getKind().belongsTo( Kind.MQL_KIND ) ) { - docDocs.add( handleDocumentUpdate( (RexCall) el, bucket, rowType ) ); - } else { - doc.append( - rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), - BsonUtil.getBsonArray( call, bucket ) ); - } - } else if ( el.isA( Kind.DYNAMIC_PARAM ) ) { - doc.append( - rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), - new BsonDynamic( (RexDynamicParam) el ) ); - } else if ( el.isA( Kind.FIELD_ACCESS ) ) { - doc.append( - rowType.getPhysicalName( getUpdateColumnList().get( pos ), implementor ), - new BsonString( - "$" + rowType.getPhysicalName( - ((RexFieldAccess) el).getField().getName(), implementor ) ) ); - } - pos++; - } - if ( doc.size() > 0 ) { - BsonDocument update = new BsonDocument().append( "$set", doc ); - - implementor.operations = Collections.singletonList( update ); - } else { - implementor.operations = docDocs; - } - - if ( Pair.right( condImplementor.list ).contains( "{$limit: 1}" ) ) { - implementor.onlyOne = true; - } - - break; - case MERGE: - break; - case DELETE: - MongoAlg.Implementor filterCollector = new Implementor( true ); - filterCollector.setStaticRowType( implementor.getStaticRowType() ); - ((MongoAlg) input).implement( filterCollector ); - implementor.filter = filterCollector.filter; - if ( Pair.right( filterCollector.list ).contains( "{$limit: 1}" ) ) { - implementor.onlyOne = true; - } - - break; - } - } - - - private BsonDocument handleDocumentUpdate( RexCall el, GridFSBucket bucket, MongoRowType rowType ) { - if ( el.op.getKind() == Kind.MQL_JSONIFY ) { - assert el.getOperands().size() == 1; - return handleDocumentUpdate( (RexCall) el.getOperands().get( 0 ), bucket, rowType ); - } - - BsonDocument doc = new BsonDocument(); - assert el.getOperands().size() >= 2; - assert el.getOperands().get( 0 ) instanceof RexInputRef; - - String key = getDocParentKey( (RexInputRef) el.operands.get( 0 ), rowType ); - attachUpdateStep( doc, el, rowType, key ); - - return doc; - } - - - private void attachUpdateStep( BsonDocument doc, RexCall el, MongoRowType rowType, String key ) { - List keys = getDocUpdateKey( (RexInputRef) el.operands.get( 0 ), (RexCall) el.operands.get( 1 ), rowType ); - switch ( el.op.getKind() ) { - case MQL_UPDATE_REPLACE: - assert el.getOperands().size() == 3; - assert el.getOperands().get( 2 ) instanceof RexCall; - - doc.putAll( getReplaceUpdate( keys, (RexCall) el.operands.get( 2 ) ) ); - break; - case MQL_ADD_FIELDS: - assert el.getOperands().size() == 3; - assert el.getOperands().get( 2 ) instanceof RexCall; - - doc.putAll( getAddUpdate( keys, (RexCall) el.operands.get( 2 ) ) ); - break; - case MQL_UPDATE_REMOVE: - assert el.getOperands().size() == 2; - - doc.putAll( getRemoveUpdate( keys, (RexCall) el.operands.get( 1 ) ) ); - break; - case MQL_UPDATE_RENAME: - assert el.getOperands().size() == 3; - assert el.getOperands().get( 2 ) instanceof RexCall; - - doc.putAll( getRenameUpdate( keys, key, (RexCall) el.operands.get( 2 ) ) ); - break; - default: - throw new RuntimeException( "The used update operation is not supported by the MongoDB adapter." ); - } - } - - - private String getDocParentKey( RexInputRef rexInputRef, MongoRowType rowType ) { - return rowType.getFieldNames().get( rexInputRef.getIndex() ); - } - - - private BsonDocument getRenameUpdate( List keys, String parentKey, RexCall call ) { - BsonDocument doc = new BsonDocument(); - assert keys.size() == call.operands.size(); - int pos = 0; - for ( String key : keys ) { - doc.put( key, new BsonString( parentKey + "." + ((RexLiteral) call.operands.get( pos )).getValueAs( String.class ) ) ); - pos++; - } - - return new BsonDocument( "$rename", doc ); - } - - - private BsonDocument getRemoveUpdate( List keys, RexCall call ) { - BsonDocument doc = new BsonDocument(); - for ( String key : keys ) { - doc.put( key, new BsonString( "" ) ); - } - - return new BsonDocument( "$unset", doc ); - } - - - private BsonDocument getAddUpdate( List keys, RexCall call ) { - BsonDocument doc = new BsonDocument(); - assert keys.size() == call.operands.size(); - int pos = 0; - for ( String key : keys ) { - doc.put( key, BsonUtil.getAsBson( (RexLiteral) call.operands.get( pos ), this.bucket ) ); - pos++; - } - - return new BsonDocument( "$set", doc ); - } - - - private BsonDocument getReplaceUpdate( List keys, RexCall call ) { - BsonDocument doc = new BsonDocument(); - assert keys.size() == call.operands.size(); - - int pos = 0; - for ( RexNode operand : call.operands ) { - if ( !(operand instanceof RexCall) ) { - doc.append( "$set", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) operand, this.bucket ) ) ); - } else { - RexCall op = (RexCall) operand; - this.implementor.isDocumentUpdate = true; - switch ( op.getKind() ) { - case PLUS: - doc.append( "$inc", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), this.bucket ) ) ); - break; - case TIMES: - doc.append( "$mul", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), this.bucket ) ) ); - break; - case MIN: - doc.append( "$min", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), this.bucket ) ) ); - break; - case MAX: - doc.append( "$max", new BsonDocument( keys.get( pos ), BsonUtil.getAsBson( (RexLiteral) op.operands.get( 1 ), this.bucket ) ) ); - break; - } - } - pos++; - } - - return doc; - } - - - private List getDocUpdateKey( RexInputRef row, RexCall subfield, MongoRowType rowType ) { - String name = rowType.getFieldNames().get( row.getIndex() ); - return subfield - .operands - .stream() - .map( n -> ((RexLiteral) n).getValueAs( String.class ) ) - .map( n -> name + "." + n ) - .collect( Collectors.toList() ); - } - - - private void handleDocumentInsert( Implementor implementor, MongoDocuments documents ) { - implementor.operations = documents.documentTuples - .stream() - .filter( BsonValue::isDocument ) - .map( d -> new BsonDocument( "d", d.asDocument() ) ) - .collect( Collectors.toList() ); - } - - - private BsonValue visitCall( Implementor implementor, RexCall call, Kind op, PolyType type ) { - BsonDocument doc = new BsonDocument(); - - BsonArray array = new BsonArray(); - for ( RexNode operand : call.operands ) { - if ( operand.getKind() == Kind.FIELD_ACCESS ) { - String physicalName = "$" + implementor.getPhysicalName( ((RexFieldAccess) operand).getField().getName() ); - array.add( new BsonString( physicalName ) ); - } else if ( operand instanceof RexCall ) { - array.add( visitCall( implementor, (RexCall) operand, ((RexCall) operand).op.getKind(), type ) ); - } else if ( operand.getKind() == Kind.LITERAL ) { - array.add( BsonUtil.getAsBson( ((RexLiteral) operand).getValueAs( BsonUtil.getClassFromType( type ) ), type, implementor.mongoEntity.getMongoNamespace().getBucket() ) ); - } else if ( operand.getKind() == Kind.DYNAMIC_PARAM ) { - array.add( new BsonDynamic( (RexDynamicParam) operand ) ); - } else { - throw new RuntimeException( "Not implemented yet" ); - } - } - switch ( op ) { - case PLUS: - doc.append( "$add", array ); - break; - case MINUS: - doc.append( "$subtract", array ); - break; - case TIMES: - doc.append( "$multiply", array ); - break; - case DIVIDE: - doc.append( "$divide", array ); - break; - default: - throw new RuntimeException( "Not implemented yet" ); - } - - return doc; - } - - - private void handlePreparedInsert( Implementor implementor, MongoProject input ) { - if ( !(input.getInput() instanceof MongoValues || input.getInput() instanceof MongoDocuments) && input.getInput().getRowType().getFieldList().size() == 1 ) { - return; - } - - BsonDocument doc = new BsonDocument(); - LogicalTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ); - GridFSBucket bucket = implementor.mongoEntity.getMongoNamespace().getBucket(); - //noinspection AssertWithSideEffects - assert input.getRowType().getFieldCount() == this.getEntity().getRowType().getFieldCount(); - Map physicalMapping; - if ( input.getInput() instanceof MongoValues ) { - physicalMapping = getPhysicalMap( input.getRowType().getFieldList(), catalogTable ); - } else if ( input.getInput() instanceof MongoDocuments ) { - physicalMapping = getPhysicalMap( input.getRowType().getFieldList(), implementor.mongoEntity.getCatalogCollection() ); - } else { - throw new RuntimeException( "Mapping for physical mongo fields not found" ); - } - - implementor.setStaticRowType( (AlgRecordType) input.getRowType() ); - - int pos = 0; - for ( RexNode rexNode : input.getChildExps() ) { - if ( rexNode instanceof RexDynamicParam ) { - // preparedInsert - doc.append( physicalMapping.get( pos ), new BsonDynamic( (RexDynamicParam) rexNode ) ); - } else if ( rexNode instanceof RexLiteral ) { - doc.append( getPhysicalName( input, catalogTable, pos ), BsonUtil.getAsBson( (RexLiteral) rexNode, bucket ) ); - } else if ( rexNode instanceof RexCall ) { - PolyType type = table - .getEntity() - .getRowType( getCluster().getTypeFactory() ) - .getFieldList() - .get( pos ) - .getType() - .getComponentType() - .getPolyType(); - - doc.append( physicalMapping.get( pos ), getBsonArray( (RexCall) rexNode, type, bucket ) ); - - } else if ( rexNode.getKind() == Kind.INPUT_REF && input.getInput() instanceof MongoValues ) { - handleDirectInsert( implementor, (MongoValues) input.getInput() ); - return; - } else { - throw new RuntimeException( "This rexType was not considered" ); - } - - pos++; - } - implementor.operations = Collections.singletonList( doc ); - } - - - private Map getPhysicalMap( List fieldList, LogicalCollection catalogCollection ) { - Map map = new HashMap<>(); - map.put( 0, "d" ); - return map; - } - - - private Map getPhysicalMap( List fieldList, LogicalTable catalogTable ) { - Map map = new HashMap<>(); - List names = catalogTable.getColumnNames(); - List ids = catalogTable.fieldIds; - int pos = 0; - for ( String name : Pair.left( fieldList ) ) { - map.put( pos, MongoStore.getPhysicalColumnName( name, ids.get( names.indexOf( name ) ) ) ); - pos++; - } - return map; - } - - - private String getPhysicalName( MongoProject input, LogicalTable catalogTable, int pos ) { - String logicalName = input.getRowType().getFieldNames().get( pos ); - int index = catalogTable.getColumnNames().indexOf( logicalName ); - return MongoStore.getPhysicalColumnName( logicalName, catalogTable.fieldIds.get( index ) ); - } - - - private BsonValue getBsonArray( RexCall el, PolyType type, GridFSBucket bucket ) { - if ( el.op.getKind() == Kind.ARRAY_VALUE_CONSTRUCTOR ) { - BsonArray array = new BsonArray(); - array.addAll( el.operands.stream().map( operand -> { - if ( operand instanceof RexLiteral ) { - return BsonUtil.getAsBson( BsonUtil.getMongoComparable( type, (RexLiteral) operand ), type, bucket ); - } else if ( operand instanceof RexCall ) { - return getBsonArray( (RexCall) operand, type, bucket ); - } - throw new RuntimeException( "The given RexCall could not be transformed correctly." ); - } ).collect( Collectors.toList() ) ); - return array; - } - throw new RuntimeException( "The given RexCall could not be transformed correctly." ); - } - - - private void handleDirectInsert( Implementor implementor, MongoValues values ) { - List docs = new ArrayList<>(); - LogicalTable catalogTable = implementor.mongoEntity.getCatalogEntity().unwrap( LogicalTable.class ); - GridFSBucket bucket = implementor.mongoEntity.getMongoNamespace().getBucket(); - - AlgDataType valRowType = rowType; - - if ( valRowType == null ) { - valRowType = values.getRowType(); - } - - List columnNames = catalogTable.getColumnNames(); - List columnIds = catalogTable.fieldIds; - for ( ImmutableList literals : values.tuples ) { - BsonDocument doc = new BsonDocument(); - int pos = 0; - for ( RexLiteral literal : literals ) { - String name = valRowType.getFieldNames().get( pos ); - if ( columnNames.contains( name ) ) { - doc.append( - MongoStore.getPhysicalColumnName( name, columnIds.get( columnNames.indexOf( name ) ) ), - BsonUtil.getAsBson( literal, bucket ) ); - } else { - doc.append( - rowType.getFieldNames().get( pos ), - BsonUtil.getAsBson( literal, bucket ) ); - } - pos++; - } - docs.add( doc ); - } - implementor.operations = docs; + modify.updates, + modify.removes, + modify.renames ); } } @@ -1488,19 +986,15 @@ public AlgNode convert( AlgNode alg ) { final LogicalDocumentAggregate agg = (LogicalDocumentAggregate) alg; final AlgTraitSet traitSet = agg.getTraitSet().replace( out ); - try { - return new MongoAggregate( - alg.getCluster(), - traitSet, - convert( agg.getInput(), traitSet.simplify() ), - agg.indicator, - agg.groupSet, - agg.groupSets, - agg.aggCalls ); - } catch ( InvalidAlgException e ) { - LOGGER.warn( e.toString() ); - return null; - } + return new MongoDocumentAggregate( + alg.getCluster(), + traitSet, + convert( agg.getInput(), traitSet.simplify() ), + agg.indicator, + agg.groupSet, + agg.groupSets, + agg.aggCalls, + agg.names ); } } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoScan.java similarity index 70% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoScan.java index dfff8a2be8..b76f8d69fb 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoScan.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoScan.java @@ -12,29 +12,14 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; import java.util.List; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.adapter.mongodb.MongoEntity; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.core.relational.RelScan; import org.polypheny.db.algebra.metadata.AlgMetadataQuery; @@ -42,7 +27,6 @@ import org.polypheny.db.algebra.type.AlgRecordType; import org.polypheny.db.plan.AlgOptCluster; import org.polypheny.db.plan.AlgOptCost; -import org.polypheny.db.plan.AlgOptEntity; import org.polypheny.db.plan.AlgOptPlanner; import org.polypheny.db.plan.AlgOptRule; import org.polypheny.db.plan.AlgTraitSet; @@ -66,7 +50,7 @@ public class MongoScan extends RelScan implements MongoAlg { * @param table Table * @param projectRowType Fields and types to project; null to project raw row */ - protected MongoScan( AlgOptCluster cluster, AlgTraitSet traitSet, MongoEntity table, AlgDataType projectRowType ) { + public MongoScan( AlgOptCluster cluster, AlgTraitSet traitSet, MongoEntity table, AlgDataType projectRowType ) { super( cluster, traitSet, table ); this.projectRowType = projectRowType; @@ -105,8 +89,7 @@ public void register( AlgOptPlanner planner ) { @Override public void implement( Implementor implementor ) { - implementor.mongoEntity = entity; - implementor.table = entity; + implementor.entity = entity; implementor.setStaticRowType( (AlgRecordType) rowType ); implementor.physicalMapper.addAll( rowType.getFieldNames() ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSort.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java similarity index 75% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSort.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java index ff786268b8..01baa761cf 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoSort.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoSort.java @@ -12,28 +12,16 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; +import com.google.common.collect.ImmutableList; +import java.util.ArrayList; +import java.util.List; +import org.jetbrains.annotations.Nullable; +import org.polypheny.db.adapter.mongodb.MongoAlg; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgFieldCollation; import org.polypheny.db.algebra.AlgNode; @@ -48,31 +36,28 @@ import org.polypheny.db.rex.RexNode; import org.polypheny.db.util.Util; -import java.util.ArrayList; -import java.util.List; - /** * Implementation of {@link Sort} relational expression in MongoDB. */ public class MongoSort extends Sort implements MongoAlg { - public MongoSort( AlgOptCluster cluster, AlgTraitSet traitSet, AlgNode child, AlgCollation collation, RexNode offset, RexNode fetch ) { - super( cluster, traitSet, child, collation, offset, fetch ); + public MongoSort( AlgOptCluster cluster, AlgTraitSet traitSet, AlgNode child, AlgCollation collation, @Nullable List fieldExpr, RexNode offset, RexNode fetch ) { + super( cluster, traitSet, child, collation, fieldExpr, offset, fetch ); assert getConvention() == CONVENTION; assert getConvention() == child.getConvention(); } @Override - public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { - return super.computeSelfCost( planner, mq ).multiplyBy( 0.05 ); + public Sort copy( AlgTraitSet traitSet, AlgNode newInput, AlgCollation newCollation, ImmutableList fieldExps, RexNode offset, RexNode fetch ) { + return new MongoSort( getCluster(), traitSet, input, collation, fieldExps, offset, fetch ); } @Override - public Sort copy( AlgTraitSet traitSet, AlgNode input, AlgCollation newCollation, RexNode offset, RexNode fetch ) { - return new MongoSort( getCluster(), traitSet, input, collation, offset, fetch ); + public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) { + return super.computeSelfCost( planner, mq ).multiplyBy( 0.05 ); } diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java similarity index 88% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java index 214b58e0a8..87b6da8722 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverter.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverter.java @@ -12,26 +12,9 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; import com.google.common.collect.Lists; @@ -42,6 +25,8 @@ import org.apache.calcite.linq4j.tree.Expression; import org.apache.calcite.linq4j.tree.Expressions; import org.apache.calcite.linq4j.tree.MethodCallExpression; +import org.polypheny.db.adapter.mongodb.MongoAlg.Implementor; +import org.polypheny.db.adapter.mongodb.MongoMethod; import org.polypheny.db.algebra.AbstractAlgNode; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.convert.ConverterImpl; @@ -92,13 +77,13 @@ public AlgOptCost computeSelfCost( AlgOptPlanner planner, AlgMetadataQuery mq ) @Override public Result implement( EnumerableAlgImplementor implementor, Prefer pref ) { final BlockBuilder list = new BlockBuilder(); - final MongoAlg.Implementor mongoImplementor = new MongoAlg.Implementor(); + final Implementor mongoImplementor = new Implementor(); mongoImplementor.visitChild( 0, getInput() ); final AlgDataType rowType = getRowType(); final PhysType physType = PhysTypeImpl.of( implementor.getTypeFactory(), rowType, pref.prefer( JavaRowFormat.ARRAY ) ); - if ( mongoImplementor.table == null ) { + if ( mongoImplementor.entity == null ) { return implementor.result( physType, new BlockBuilder().toBlock() ); } @@ -152,7 +137,7 @@ public int size() { } ), Pair.class ) ); - final Expression table = list.append( "table", mongoImplementor.table.asExpression()/*.getExpression( MongoEntity.MongoQueryable.class )*/ ); + final Expression table = list.append( "table", mongoImplementor.entity.asExpression()/*.getExpression( MongoEntity.MongoQueryable.class )*/ ); List opList = Pair.right( mongoImplementor.list ); diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverterRule.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverterRule.java similarity index 65% rename from plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverterRule.java rename to plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverterRule.java index d44b4b60a4..ddde3fd0a5 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/MongoToEnumerableConverterRule.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoToEnumerableConverterRule.java @@ -12,32 +12,16 @@ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. - * - * This file incorporates code covered by the following terms: - * - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to you under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. */ -package org.polypheny.db.adapter.mongodb; +package org.polypheny.db.adapter.mongodb.rules; -import org.polypheny.db.adapter.enumerable.EnumerableConvention; +import org.polypheny.db.adapter.mongodb.MongoAlg; import org.polypheny.db.algebra.AlgNode; import org.polypheny.db.algebra.convert.ConverterRule; import org.polypheny.db.algebra.core.AlgFactories; +import org.polypheny.db.algebra.enumerable.EnumerableConvention; import org.polypheny.db.plan.AlgTraitSet; import org.polypheny.db.tools.AlgBuilderFactory; diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoValues.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoValues.java new file mode 100644 index 0000000000..82dc6caf98 --- /dev/null +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/rules/MongoValues.java @@ -0,0 +1,39 @@ +/* + * Copyright 2019-2023 The Polypheny Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.polypheny.db.adapter.mongodb.rules; + +import com.google.common.collect.ImmutableList; +import org.polypheny.db.adapter.mongodb.MongoAlg; +import org.polypheny.db.algebra.core.Values; +import org.polypheny.db.algebra.type.AlgDataType; +import org.polypheny.db.plan.AlgOptCluster; +import org.polypheny.db.plan.AlgTraitSet; +import org.polypheny.db.rex.RexLiteral; + +public class MongoValues extends Values implements MongoAlg { + + MongoValues( AlgOptCluster cluster, AlgDataType rowType, ImmutableList> tuples, AlgTraitSet traitSet ) { + super( cluster, rowType, tuples, traitSet ); + } + + + @Override + public void implement( Implementor implementor ) { + + } + +} diff --git a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java index 8c03e9208f..a9726996ac 100644 --- a/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java +++ b/plugins/mongodb-adapter/src/main/java/org/polypheny/db/adapter/mongodb/util/MongoTypeUtil.java @@ -30,13 +30,13 @@ public class MongoTypeUtil { - public static BsonDocument getPhysicalProjections( List logicalCols, LogicalTable catalogTable ) { + public static BsonDocument getPhysicalProjections( List logicalCols, LogicalTable table ) { BsonDocument projections = new BsonDocument(); - List names = catalogTable.getColumnNames(); + List names = table.getColumnNames(); for ( String logicalCol : logicalCols ) { int index = names.indexOf( logicalCol ); if ( index != -1 ) { - projections.append( logicalCol, new BsonString( "$" + MongoStore.getPhysicalColumnName( logicalCol, catalogTable.fieldIds.get( index ) ) ) ); + projections.append( logicalCol, new BsonString( "$" + MongoStore.getPhysicalColumnName( logicalCol, table.getColumnIds().get( index ) ) ) ); } else { projections.append( logicalCol, new BsonInt32( 1 ) ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/PostgresqlPlugin.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/PostgresqlPlugin.java index 39659ddbeb..28360f023a 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/PostgresqlPlugin.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/PostgresqlPlugin.java @@ -47,8 +47,8 @@ public void start() { "maxConnections", "25" ); - AdapterManager.addAdapterDeploy( PostgresqlStore.class, ADAPTER_NAME, settings, PostgresqlStore::new ); - AdapterManager.addAdapterDeploy( PostgresqlSource.class, ADAPTER_NAME, settings, PostgresqlSource::new ); + AdapterManager.addAdapterTemplate( PostgresqlStore.class, ADAPTER_NAME, settings, PostgresqlStore::new ); + AdapterManager.addAdapterTemplate( PostgresqlSource.class, ADAPTER_NAME, settings, PostgresqlSource::new ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java index 2dbd322443..15e7336555 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/source/PostgresqlSource.java @@ -117,7 +117,7 @@ public void refreshTable( long allocId ) { log.warn( "todo" ); return; } - storeCatalog.addTable( currentJdbcSchema.createJdbcTable( storeCatalog, table ) ); + storeCatalog.replacePhysical( currentJdbcSchema.createJdbcTable( storeCatalog, table ) ); } diff --git a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java index bbdd6784c1..0dd605b480 100644 --- a/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java +++ b/plugins/postgres-adapter/src/main/java/org/polypheny/db/adapter/postgres/store/PostgresqlStore.java @@ -43,6 +43,7 @@ import org.polypheny.db.catalog.entity.logical.LogicalTable; import org.polypheny.db.catalog.entity.physical.PhysicalColumn; import org.polypheny.db.catalog.entity.physical.PhysicalTable; +import org.polypheny.db.catalog.exceptions.GenericRuntimeException; import org.polypheny.db.docker.DockerContainer; import org.polypheny.db.docker.DockerContainer.HostAndPort; import org.polypheny.db.docker.DockerInstance; @@ -144,7 +145,7 @@ protected ConnectionFactory deployRemote() { database = settings.get( "database" ); username = settings.get( "username" ); if ( !testConnection() ) { - throw new RuntimeException( "Unable to connect" ); + throw new GenericRuntimeException( "Unable to connect" ); } return createConnectionFactory(); } diff --git a/webui/src/main/java/org/polypheny/db/webui/Crud.java b/webui/src/main/java/org/polypheny/db/webui/Crud.java index 9713fd26c5..6a912bb076 100644 --- a/webui/src/main/java/org/polypheny/db/webui/Crud.java +++ b/webui/src/main/java/org/polypheny/db/webui/Crud.java @@ -86,6 +86,7 @@ import org.polypheny.db.adapter.DataStore; import org.polypheny.db.adapter.DataStore.FunctionalIndexInfo; import org.polypheny.db.adapter.index.IndexManager; +import org.polypheny.db.adapter.java.AdapterTemplate; import org.polypheny.db.algebra.AlgCollation; import org.polypheny.db.algebra.AlgCollations; import org.polypheny.db.algebra.AlgNode; @@ -2238,8 +2239,8 @@ void addAdapter( final Context ctx ) { if ( a.settings.containsKey( "method" ) ) { method = ConnectionMethod.valueOf( a.settings.get( "method" ).getValue().toUpperCase() ); } - Adapter adapter = AdapterManager.getInstance().getAdapter( a.adapterName ); - Map allSettings = adapter.getAvailableSettings( adapter.getClass() ).stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); + AdapterTemplate adapter = AdapterManager.getAdapterType( a.adapterName, a.type ); + Map allSettings = adapter.getAllSettings().stream().collect( Collectors.toMap( e -> e.name, e -> e ) ); for ( AdapterSettingValueModel entry : a.settings.values() ) { AbstractAdapterSetting set = allSettings.get( entry.getName() ); @@ -2295,11 +2296,11 @@ public void startAccessRequest( Context ctx ) { private Exception handleLinkFiles( Context ctx, AdapterModel a, AbstractAdapterSettingDirectory setting, Map settings ) { if ( !settings.containsKey( "directoryName" ) ) { - return new RuntimeException( "Security check for access was not performed; id missing." ); + return new GenericRuntimeException( "Security check for access was not performed; id missing." ); } Path path = Path.of( settings.get( "directoryName" ).defaultValue ); if ( !SecurityManager.getInstance().checkPathAccess( path ) ) { - return new RuntimeException( "Security check for access was not successful; not enough permissions." ); + return new GenericRuntimeException( "Security check for access was not successful; not enough permissions." ); } return null;