diff --git a/bin/startup.bat b/bin/startup.bat index 6644d99a1132..b3fd2f4851a1 100755 --- a/bin/startup.bat +++ b/bin/startup.bat @@ -65,8 +65,24 @@ set JAVA_OPTS=%JAVA_OPTS% -XX:+UseG1GC set JAVA_OPTS=%JAVA_OPTS% -Djava.endorsed.dirs=%DOTCMS_HOME%/WEB-INF/endorsed_libs +for /f tokens^=2-5^ delims^=.-_^" %%j in ('java -fullversion 2^>^&1') do set "JAVA_VERSION=%%j%%k" +echo JAVA_VERSION = %JAVA_VERSION% +set BYTE_BUDDY_VERSION=1.9.0 + +if %JAVA_VERSION% LSS 110 ( + set BYTE_BUDDY_VERSION=1.6.12 +) + +echo Using Byte-Buddy Version: %BYTE_BUDDY_VERSION% + +echo BYTE_BUDDY folder: %CATALINA_HOME%\webapps\ROOT\WEB-INF\lib\byte-buddy-* + +del %CATALINA_HOME%\webapps\ROOT\WEB-INF\lib\byte-buddy-* +copy %CATALINA_HOME%\bin\byte-buddy\%BYTE_BUDDY_VERSION%\* %CATALINA_BASE%\webapps\ROOT\WEB-INF\lib\ + + rem Set agent opts -set JAVA_OPTS=%JAVA_OPTS% -javaagent:%DOTCMS_HOME%/WEB-INF/lib/byte-buddy-agent-1.9.0.jar +set JAVA_OPTS=%JAVA_OPTS% -javaagent:%DOTCMS_HOME%/WEB-INF/lib/byte-buddy-agent-%BYTE_BUDDY_VERSION%.jar rem Uncomment the next line if you want to enable JMX rem set JAVA_OPTS=%JAVA_OPTS% -Dcom.sun.management.jmxremote.port=7788 -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote.authenticate=false -Djava.endorsed.dirs=$DOTCMS_HOME/WEB-INF/endorsed_libs diff --git a/bin/startup.sh b/bin/startup.sh index 964dcb21ca19..49e89a4dfd13 100755 --- a/bin/startup.sh +++ b/bin/startup.sh @@ -67,8 +67,23 @@ JAVA_OPTS="$JAVA_OPTS -XX:MaxMetaspaceSize=512m -Xmx1G" # Set GC opts JAVA_OPTS="$JAVA_OPTS -XX:+UseG1GC" +JAVA_VERSION="$(java -version 2>&1 | grep -i version | cut -d'"' -f2 | cut -d'.' -f1-2)" + +echo "JAVA_VERSION: $JAVA_VERSION" +BYTE_BUDDY_VERSION="1.9.0" + +# Lexicographic comparation +if [[ (($JAVA_VERSION < 11.0)) ]]; then + BYTE_BUDDY_VERSION="1.6.12"; +fi + +echo "Using BYTE_BUDDY_VERSION: $BYTE_BUDDY_VERSION" + +rm $DOTCMS_HOME/WEB-INF/lib/byte-buddy-* +cp $TOMCAT_HOME/bin/byte-buddy/$BYTE_BUDDY_VERSION/* $DOTCMS_HOME/WEB-INF/lib/ + # Set agent opts -JAVA_OPTS="$JAVA_OPTS -javaagent:$DOTCMS_HOME/WEB-INF/lib/byte-buddy-agent-1.9.0.jar" +JAVA_OPTS="$JAVA_OPTS -javaagent:$DOTCMS_HOME/WEB-INF/lib/byte-buddy-agent-$BYTE_BUDDY_VERSION.jar" # Set encoding JAVA_OPTS="$JAVA_OPTS -Dsun.jnu.encoding=UTF-8" diff --git a/dotCMS/build.gradle b/dotCMS/build.gradle index 22ed28c8b154..6b564f43bf69 100644 --- a/dotCMS/build.gradle +++ b/dotCMS/build.gradle @@ -183,7 +183,7 @@ dependencies { starter group: 'com.dotcms', name: 'starter', version: 'empty_20210429', ext: 'zip' //Uncomment this line if you want to download the starter that comes with data - //starter group: 'com.dotcms', name: 'starter', version: '20210429', ext: 'zip' + //starter group: 'com.dotcms', name: 'starter', version: '20210512', ext: 'zip' testsStarter group: 'com.dotcms', name: 'starter', version: 'empty_20210429', ext: 'zip' profiler group: 'glowroot-custom', name: 'glowroot-agent', version: '0.13.1' diff --git a/dotCMS/dependencies.gradle b/dotCMS/dependencies.gradle index 6fba40832567..aea82ec2b1ce 100644 --- a/dotCMS/dependencies.gradle +++ b/dotCMS/dependencies.gradle @@ -501,7 +501,7 @@ dependencies { transitive = false } //https://mvnrepository.com/artifact/mysql/mysql-connector-java - providedCompile (group: 'mysql', name: 'mysql-connector-java', version: '8.0.18') { + providedCompile (group: 'mysql', name: 'mysql-connector-java', version: '8.0.19') { transitive = false } //https://mvnrepository.com/artifact/com.microsoft.sqlserver/mssql-jdbc diff --git a/dotCMS/src/curl-test/Content Resource.postman_collection.json b/dotCMS/src/curl-test/Content Resource.postman_collection.json index 3934ef14dcfc..a34310c26a78 100644 --- a/dotCMS/src/curl-test/Content Resource.postman_collection.json +++ b/dotCMS/src/curl-test/Content Resource.postman_collection.json @@ -1,6 +1,6 @@ { "info": { - "_postman_id": "0773d5de-70a0-445f-862c-928bcdfa2dda", + "_postman_id": "766b73d8-a8cb-4c5b-ac3f-d971f6712b6d", "name": "Content Resource", "description": "Content Resource test", "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" @@ -456,14 +456,137 @@ } }, "url": { - "raw": "{{serverURL}}/api/content/", + "raw": "{{serverURL}}/api/content/save/1", "host": [ "{{serverURL}}" ], "path": [ "api", "content", - "" + "save", + "1" + ] + } + }, + "response": [] + }, + { + "name": "XML is not allowed on Save", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(\"Status code should be 400\", function () {", + " pm.response.to.have.status(400);", + "});", + "", + "", + "", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "basic", + "basic": [ + { + "key": "password", + "value": "admin", + "type": "string" + }, + { + "key": "username", + "value": "admin@dotcms.com", + "type": "string" + } + ] + }, + "method": "POST", + "header": [], + "body": { + "mode": "raw", + "raw": "\n webPageContent\n 1\n test content\n demo.dotcms.com\n test body\n", + "options": { + "raw": { + "language": "xml" + } + } + }, + "url": { + "raw": "{{serverURL}}/api/content/save/1", + "host": [ + "{{serverURL}}" + ], + "path": [ + "api", + "content", + "save", + "1" + ] + } + }, + "response": [] + }, + { + "name": "XML is not allowed on Publish", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(\"Status code should be 400\", function () {", + " pm.response.to.have.status(400);", + "});", + "", + "", + "", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "basic", + "basic": [ + { + "key": "password", + "value": "admin", + "type": "string" + }, + { + "key": "username", + "value": "admin@dotcms.com", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [], + "body": { + "mode": "raw", + "raw": "\n \n \n \n 2\n \n databaseMetaData\n \n \n \n 3\n \n \n \n 1008\n true\n 1000\n 0\n 2\n 0\n 0\n 0\n true\n 1004\n false\n jdbc/dotCMSPool\n \n \n \n \n \n \n -1\n -1\n -1\n -1\n -1\n -1\n -1\n -1\n -1\n -1\n \n \n foo\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n", + "options": { + "raw": { + "language": "xml" + } + } + }, + "url": { + "raw": "{{serverURL}}/api/content/publish/1", + "host": [ + "{{serverURL}}" + ], + "path": [ + "api", + "content", + "publish", + "1" ] } }, diff --git a/dotCMS/src/integration-test/java/com/dotcms/MainSuite.java b/dotCMS/src/integration-test/java/com/dotcms/MainSuite.java index 5f42880d535e..8c34642ea85e 100644 --- a/dotCMS/src/integration-test/java/com/dotcms/MainSuite.java +++ b/dotCMS/src/integration-test/java/com/dotcms/MainSuite.java @@ -96,6 +96,7 @@ import com.dotmarketing.startup.runonce.Task210319CreateStorageTableTest; import com.dotmarketing.startup.runonce.Task210321RemoveOldMetadataFilesTest; import com.dotmarketing.startup.runonce.Task210506UpdateStorageTableTest; +import com.dotmarketing.startup.runonce.Task210510UpdateStorageTableDropMetadataColumnTest; import com.dotmarketing.util.ConfigTest; import com.dotmarketing.util.HashBuilderTest; import com.dotmarketing.util.TestConfig; @@ -413,6 +414,7 @@ FileAssetTemplateUtilTest.class, SiteSearchJobImplTest.class, Task210506UpdateStorageTableTest.class, + Task210510UpdateStorageTableDropMetadataColumnTest.class, StaticPushPublishBundleGeneratorTest.class }) public class MainSuite { diff --git a/dotCMS/src/integration-test/java/com/dotcms/content/elasticsearch/business/ESMappingAPITest.java b/dotCMS/src/integration-test/java/com/dotcms/content/elasticsearch/business/ESMappingAPITest.java index d3a6f66b71f4..800a2425620d 100644 --- a/dotCMS/src/integration-test/java/com/dotcms/content/elasticsearch/business/ESMappingAPITest.java +++ b/dotCMS/src/integration-test/java/com/dotcms/content/elasticsearch/business/ESMappingAPITest.java @@ -2,9 +2,11 @@ import static com.dotcms.content.elasticsearch.business.ESMappingAPIImpl.INCLUDE_DOTRAW_METADATA_FIELDS; import static com.dotcms.content.elasticsearch.business.ESMappingAPIImpl.INDEX_DOTRAW_METADATA_FIELDS; +import static com.dotcms.content.elasticsearch.business.ESMappingAPIImpl.NO_METADATA; import static com.dotcms.content.elasticsearch.business.ESMappingAPIImpl.TEXT; import static com.dotcms.content.elasticsearch.business.ESMappingAPIImpl.WRITE_METADATA_ON_REINDEX; import static com.dotcms.datagen.TestDataUtils.getCommentsLikeContentType; +import static com.dotcms.datagen.TestDataUtils.getFileAssetContent; import static com.dotcms.datagen.TestDataUtils.getMultipleImageBinariesContent; import static com.dotcms.datagen.TestDataUtils.getNewsLikeContentType; import static com.dotcms.datagen.TestDataUtils.relateContentTypes; @@ -40,6 +42,7 @@ import com.dotcms.datagen.FileAssetDataGen; import com.dotcms.datagen.SiteDataGen; import com.dotcms.datagen.TestDataUtils; +import com.dotcms.datagen.TestDataUtils.TestFile; import com.dotcms.util.CollectionsUtils; import com.dotcms.util.IntegrationTestInitService; import com.dotmarketing.beans.Host; @@ -450,7 +453,7 @@ public void test_toMap_binary_field_shouldSuccess() throws Exception { assertEquals(320, contentletMap.get("metadata.width")); assertEquals(235, contentletMap.get("metadata.height")); assertEquals(true, contentletMap.get("metadata.isimage")); - assertTrue( contentletMap.get("metadata.content").toString().trim().isEmpty()); + assertTrue( contentletMap.get("metadata.content").toString().trim().equals(NO_METADATA)); } @@ -495,29 +498,24 @@ public void Test_toMap_Metadata_dotRaw() { //Test that with the dotRaw fields generated are part of the list of inclusions Assert.assertTrue(includedDotRawFields.containsAll(dotRawMetaList)); - //Now lets set an empty list to force skipping the defaults - Config.setProperty(INCLUDE_DOTRAW_METADATA_FIELDS, ""); - final Map contentletMapIncludingNone = esMappingAPI - .toMap(multipleBinariesContent); + final Contentlet fileAssetContent = getFileAssetContent(true, 1L, TestFile.PDF); + final Map contentletMapCustomInclude = esMappingAPI + .toMap(fileAssetContent); - //Now lets get the list of metadata keys - final List dotRawMetaListForceNoneExclusion = contentletMapIncludingNone.keySet() - .stream() - .filter(s -> s.startsWith("metadata") && s.endsWith("dotraw")) - .collect(Collectors.toList()); + assertTrue(contentletMapCustomInclude.containsKey("metadata.name")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.name_dotraw")); - Assert.assertTrue(dotRawMetaListForceNoneExclusion.isEmpty()); + assertTrue(contentletMapCustomInclude.containsKey("metadata.path")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.path_dotraw")); - //Now lets set a list with entries to exclude - Config.setProperty(INCLUDE_DOTRAW_METADATA_FIELDS, "isImage,content"); - final Map contentletMapCustomInclude = esMappingAPI - .toMap(multipleBinariesContent); + assertTrue(contentletMapCustomInclude.containsKey("metadata.title")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.title_dotraw")); - assertTrue(contentletMapCustomInclude.containsKey("metadata.isimage")); - assertTrue(contentletMapCustomInclude.containsKey("metadata.isimage_dotraw")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.moddate")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.moddate_dotraw")); - assertTrue(contentletMapCustomInclude.containsKey("metadata.content")); - assertTrue(contentletMapCustomInclude.containsKey("metadata.content_dotraw")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.filesize")); + assertTrue(contentletMapCustomInclude.containsKey("metadata.filesize_dotraw")); //Test disconnecting the dot raw fields generation Config.setProperty(INDEX_DOTRAW_METADATA_FIELDS, false); diff --git a/dotCMS/src/integration-test/java/com/dotcms/publishing/PublisherAPIImplTest.java b/dotCMS/src/integration-test/java/com/dotcms/publishing/PublisherAPIImplTest.java index 432e03919342..e300480dcfd5 100644 --- a/dotCMS/src/integration-test/java/com/dotcms/publishing/PublisherAPIImplTest.java +++ b/dotCMS/src/integration-test/java/com/dotcms/publishing/PublisherAPIImplTest.java @@ -1,5 +1,7 @@ package com.dotcms.publishing; +import com.dotcms.contenttype.model.field.Field; +import com.dotcms.contenttype.model.field.TextField; import com.dotcms.contenttype.model.type.ContentType; import com.dotcms.contenttype.transform.contenttype.StructureTransformer; import com.dotcms.datagen.*; @@ -49,6 +51,7 @@ import org.apache.commons.compress.archivers.tar.TarArchiveEntry; import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.elasticsearch.index.fielddata.FieldData; import org.jetbrains.annotations.Nullable; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -131,7 +134,8 @@ public static Object[] publishers() throws Exception { getLinkWithDependencies(), getWorkflowWithDependencies(), getLanguageWithDependencies(), - getRuleWithDependencies() + getRuleWithDependencies(), + getContentWithSeveralVersions() ); final List> publishers = list( GenerateBundlePublisher.class, @@ -143,13 +147,42 @@ public static Object[] publishers() throws Exception { for (final Class publisher : publishers) { for (TestAsset asset : assets) { cases.add(new TestCase(publisher, asset)); - cases.add(new TestCase(publisher, asset)); } } return cases.toArray(); } + private static TestAsset getContentWithSeveralVersions() throws DotDataException, DotSecurityException { + final Host host = new SiteDataGen().nextPersisted(); + + final Field textField = new FieldDataGen().type(TextField.class).next(); + final ContentType contentType = new ContentTypeDataGen() + .field(textField) + .host(host) + .nextPersisted(); + + final Contentlet liveVersion = new ContentletDataGen(contentType) + .setProperty(textField.variable(), "Live versions") + .host(host) + .nextPersisted(); + + ContentletDataGen.publish(liveVersion); + + final Contentlet workingVersion = ContentletDataGen.checkout(liveVersion); + workingVersion.setStringProperty(textField.variable(), "Working versions"); + ContentletDataGen.checkin(workingVersion); + + final WorkflowScheme systemWorkflowScheme = APILocator.getWorkflowAPI() + .findSystemWorkflowScheme(); + + final Language defaultLanguage = APILocator.getLanguageAPI().getDefaultLanguage(); + + return new TestAsset(workingVersion, + set(host, systemWorkflowScheme, contentType, liveVersion, defaultLanguage), + "/bundlers-test/contentlet/contentlet/contentlet.content.xml"); + } + private static TestAsset getRuleWithDependencies() { final Host host = new SiteDataGen().nextPersisted(); diff --git a/dotCMS/src/integration-test/java/com/dotcms/storage/StoragePersistenceAPITest.java b/dotCMS/src/integration-test/java/com/dotcms/storage/StoragePersistenceAPITest.java index 7b1430090b1e..12249547bc33 100644 --- a/dotCMS/src/integration-test/java/com/dotcms/storage/StoragePersistenceAPITest.java +++ b/dotCMS/src/integration-test/java/com/dotcms/storage/StoragePersistenceAPITest.java @@ -345,7 +345,6 @@ public static Object[] getRandomTestCases() throws Exception{ * @throws IOException * @throws NoSuchAlgorithmException */ - @Ignore //Ignored since it's failing on mySQL with Packet for query is too large (4,194,425 > 4,194,304). You can change this value on the server by setting the 'max_allowed_packet' variable. @Test @UseDataProvider("getLargeFileTestCases") public void Test_Push_Large_File(final TestCase testCase) diff --git a/dotCMS/src/integration-test/java/com/dotmarketing/common/db/DBTimeZoneCheckTest.java b/dotCMS/src/integration-test/java/com/dotmarketing/common/db/DBTimeZoneCheckTest.java index b87a7b65ac1e..d30fc4ada4c9 100644 --- a/dotCMS/src/integration-test/java/com/dotmarketing/common/db/DBTimeZoneCheckTest.java +++ b/dotCMS/src/integration-test/java/com/dotmarketing/common/db/DBTimeZoneCheckTest.java @@ -1,5 +1,6 @@ package com.dotmarketing.common.db; +import com.dotmarketing.db.DbConnectionFactory; import org.junit.BeforeClass; import org.junit.Test; import com.dotcms.util.IntegrationTestInitService; @@ -17,10 +18,13 @@ public static void prepare() throws Exception { @Test public void test_timezones_work() throws Exception { - assertTrue(DBTimeZoneCheck.isTimeZoneValid("CST6CDT")); - assertFalse(DBTimeZoneCheck.isTimeZoneValid("CST")); - assertTrue(DBTimeZoneCheck.isTimeZoneValid("EST")); - assertFalse(DBTimeZoneCheck.isTimeZoneValid("asdf435ergre")); + if (DbConnectionFactory.isPostgres()) { + assertTrue(DBTimeZoneCheck.isTimeZoneValid("CST6CDT")); + assertFalse(DBTimeZoneCheck.isTimeZoneValid("CST")); + } else { + assertTrue(DBTimeZoneCheck.isTimeZoneValid("CST6CDT")); + assertTrue(DBTimeZoneCheck.isTimeZoneValid("CST")); + } } } diff --git a/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFilesTest.java b/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFilesTest.java index f5c79b9c04a3..ae60b36e98a6 100644 --- a/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFilesTest.java +++ b/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFilesTest.java @@ -1,19 +1,24 @@ package com.dotmarketing.startup.runonce; +import static com.dotcms.datagen.TestDataUtils.getFileAssetContent; import static com.dotcms.datagen.TestDataUtils.getMultipleBinariesContent; import static com.dotcms.storage.StoragePersistenceProvider.DEFAULT_STORAGE_TYPE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import com.dotcms.contenttype.model.field.BinaryField; +import com.dotcms.datagen.TestDataUtils.TestFile; import com.dotcms.storage.StoragePersistenceProvider; import com.dotcms.storage.StorageType; +import com.dotcms.tika.TikaUtils; import com.dotcms.util.IntegrationTestInitService; import com.dotmarketing.business.APILocator; import com.dotmarketing.exception.DotDataException; +import com.dotmarketing.exception.DotSecurityException; import com.dotmarketing.portlets.contentlet.model.Contentlet; import com.dotmarketing.util.Config; import io.vavr.Tuple2; +import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; @@ -30,9 +35,19 @@ public static void prepare() throws Exception { IntegrationTestInitService.getInstance().init(); } + /** + * Given scenario: Recreate the asset + metadata structure under a temp ASSET_REAL_PATH + * Expected scenario: since we're passing a file asset we expect just one occurrence of the fileasset-metadata.json + * and one occurrence of /metaData/content + * @throws DotDataException + * @throws IOException + * @throws ExecutionException + * @throws InterruptedException + * @throws DotSecurityException + */ @Test public void Test_Upgrade_Task() - throws DotDataException, IOException, ExecutionException, InterruptedException { + throws DotDataException, IOException, ExecutionException, InterruptedException, DotSecurityException { final String assetRealPath = Config.getStringProperty("ASSET_REAL_PATH", null); final String stringProperty = Config.getStringProperty(DEFAULT_STORAGE_TYPE); StoragePersistenceProvider.INSTANCE.get().forceInitialize(); @@ -42,17 +57,20 @@ public void Test_Upgrade_Task() Config.setProperty(DEFAULT_STORAGE_TYPE, StorageType.FILE_SYSTEM.name()); final long langId = APILocator.getLanguageAPI().getDefaultLanguage().getId(); - final Contentlet contentlet = getMultipleBinariesContent(true, langId, null); - + //Recreate the metadata structure + final Contentlet contentlet = getFileAssetContent(true, langId, TestFile.PDF); + //And the metaData/content structure too APILocator.getFileMetadataAPI().generateContentletMetadata(contentlet); + new TikaUtils().generateMetaData(contentlet, true); final Task210321RemoveOldMetadataFiles task = new Task210321RemoveOldMetadataFiles(); assertTrue(task.forceRun()); task.executeUpgrade(); final Tuple2 tuple = task.getFuture().get(); - final long count = contentlet.getContentType().fields(BinaryField.class).stream().filter(field -> null != contentlet.get(field.variable())).count(); - assertTrue(tuple._1 >= count); - assertEquals(0, tuple._2.intValue()); + //1 fileasset-metadata.json + assertEquals(1, tuple._1.intValue()); + //1 metaData/content + assertEquals(1, tuple._2.intValue()); } finally { Config.setProperty(DEFAULT_STORAGE_TYPE, stringProperty); @@ -60,6 +78,7 @@ public void Test_Upgrade_Task() StoragePersistenceProvider.INSTANCE.get().forceInitialize(); } + } diff --git a/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210510UpdateStorageTableDropMetadataColumnTest.java b/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210510UpdateStorageTableDropMetadataColumnTest.java new file mode 100644 index 000000000000..2fcb1c1eb684 --- /dev/null +++ b/dotCMS/src/integration-test/java/com/dotmarketing/startup/runonce/Task210510UpdateStorageTableDropMetadataColumnTest.java @@ -0,0 +1,88 @@ +package com.dotmarketing.startup.runonce; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; + +import com.dotcms.util.IntegrationTestInitService; +import com.dotmarketing.common.db.DotConnect; +import com.dotmarketing.common.db.DotDatabaseMetaData; +import com.dotmarketing.db.DbConnectionFactory; +import com.dotmarketing.exception.DotDataException; +import com.dotmarketing.exception.DotRuntimeException; +import java.sql.Connection; +import java.sql.SQLException; +import org.junit.BeforeClass; +import org.junit.Test; + +public class Task210510UpdateStorageTableDropMetadataColumnTest { + @BeforeClass + public static void prepare() throws Exception { + // Setting web app environment + IntegrationTestInitService.getInstance().init(); + } + + + private String getPostgresScript() { + return "ALTER TABLE storage ADD COLUMN metadata text;"; + } + + private String getMyScript() { + return "ALTER TABLE storage ADD COLUMN metadata text;"; + } + + private String getMSSQLScript() { + return "ALTER TABLE storage ADD metadata text;"; + } + + private String getOracleScript() { + return "ALTER TABLE storage ADD metadata NCLOB "; + } + + /** + * Returns the SQL Script depending on the db + * @return + */ + private String getScript() { + if(DbConnectionFactory.isPostgres()){ + return getPostgresScript(); + } + if(DbConnectionFactory.isMySql()){ + return getMyScript(); + } + if(DbConnectionFactory.isMsSql()){ + return getMSSQLScript(); + } + if(DbConnectionFactory.isOracle()){ + return getOracleScript(); + } + throw new DotRuntimeException("Oh snap! dunno what database I'm running on."); + } + + /** + * Given scenario: We intend to test the upgrade task that drops the column if it exist + * Expected result: If the column does not exist it gets added. Once added we test it can be removed and the task wont run again. + * @throws DotDataException + * @throws SQLException + */ + @Test + public void testExecuteUpgrade() throws DotDataException, SQLException { + //First Recreate the old column situation + //add column if it does not exist + final Connection connection = DbConnectionFactory.getConnection(); + final boolean autoCommit = connection.getAutoCommit(); + connection.setAutoCommit(true); + try{ + final DotDatabaseMetaData dotDatabaseMetaData = new DotDatabaseMetaData(); + if (!dotDatabaseMetaData.hasColumn("storage", "metadata")) { + new DotConnect().executeStatement(getScript(), connection); + } + + final Task210510UpdateStorageTableDropMetadataColumn upgradeTask = new Task210510UpdateStorageTableDropMetadataColumn(); + assertTrue(upgradeTask.forceRun()); + upgradeTask.executeUpgrade(); + assertFalse(upgradeTask.forceRun()); + }finally { + connection.setAutoCommit(autoCommit); + } + } +} diff --git a/dotCMS/src/main/java/com/dotcms/concurrent/DotConcurrentFactory.java b/dotCMS/src/main/java/com/dotcms/concurrent/DotConcurrentFactory.java index 30bc226f65d9..523efdf33e79 100644 --- a/dotCMS/src/main/java/com/dotcms/concurrent/DotConcurrentFactory.java +++ b/dotCMS/src/main/java/com/dotcms/concurrent/DotConcurrentFactory.java @@ -738,6 +738,13 @@ public void waitForAll(long timeout, TimeUnit unit) throws ExecutionException { } } + @Override + public void waitForAll() throws ExecutionException { + while(executorService.isTerminated()) { + waitForAll(10, TimeUnit.MINUTES); + } + } + @Override public long getTaskCount() { throw new UnsupportedOperationException("Submit Delay not supported on single submitter, name: " + this.name); @@ -986,6 +993,13 @@ public void waitForAll(final long timeout, final TimeUnit unit) { } } + @Override + public void waitForAll(){ + while(!threadPoolExecutor.isTerminated()) { + waitForAll(10, TimeUnit.MINUTES); + } + } + public long getTaskCount() { return threadPoolExecutor.getTaskCount(); } diff --git a/dotCMS/src/main/java/com/dotcms/concurrent/DotSubmitter.java b/dotCMS/src/main/java/com/dotcms/concurrent/DotSubmitter.java index d6eaccae6e0a..f08084d63598 100644 --- a/dotCMS/src/main/java/com/dotcms/concurrent/DotSubmitter.java +++ b/dotCMS/src/main/java/com/dotcms/concurrent/DotSubmitter.java @@ -106,6 +106,8 @@ public interface DotSubmitter extends Executor, Serializable { */ void waitForAll(final long timeout, final TimeUnit unit) throws ExecutionException; + void waitForAll() throws ExecutionException; + default void waitForAll(final Collection> futures) throws ExecutionException { for(final Future future : futures) { try { diff --git a/dotCMS/src/main/java/com/dotcms/content/elasticsearch/business/ESMappingAPIImpl.java b/dotCMS/src/main/java/com/dotcms/content/elasticsearch/business/ESMappingAPIImpl.java index 5471f8d56ec8..3e3712613f79 100644 --- a/dotCMS/src/main/java/com/dotcms/content/elasticsearch/business/ESMappingAPIImpl.java +++ b/dotCMS/src/main/java/com/dotcms/content/elasticsearch/business/ESMappingAPIImpl.java @@ -9,6 +9,7 @@ import static com.dotmarketing.business.PermissionAPI.PERMISSION_PUBLISH; import static com.dotmarketing.business.PermissionAPI.PERMISSION_READ; import static com.dotmarketing.business.PermissionAPI.PERMISSION_WRITE; +import static com.dotmarketing.util.UtilMethods.isNotSet; import static com.liferay.util.StringPool.BLANK; import static com.liferay.util.StringPool.PERIOD; @@ -72,6 +73,7 @@ import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; @@ -436,10 +438,11 @@ private void writeMetadata(final Contentlet contentlet, final StringWriter strin fullMetadataMap.forEach((field, metadataValues) -> { if (null != metadataValues) { - final Set dotRawInclude = Sets.newHashSet( - Config.getStringArrayProperty( + final Set dotRawInclude = + Arrays.stream(Config.getStringArrayProperty( INCLUDE_DOTRAW_METADATA_FIELDS, - defaultIncludedDotRawMetadataFields)); + defaultIncludedDotRawMetadataFields)).map(String::toLowerCase) + .collect(Collectors.toSet()); metadataValues.getFieldsMeta().forEach((metadataKey, metadataValue) -> { @@ -452,7 +455,7 @@ private void writeMetadata(final Contentlet contentlet, final StringWriter strin mapLowered.put(compositeKey, value); if (Config.getBooleanProperty(INDEX_DOTRAW_METADATA_FIELDS, true) - && dotRawInclude.contains(metadataKey)) { + && dotRawInclude.contains(metadataKey.toLowerCase())) { mapLowered.put(compositeKey + DOTRAW, value); } @@ -475,7 +478,7 @@ private void writeMetadata(final Contentlet contentlet, final StringWriter strin */ private Object preProcessMetadataValue(final String compositeKey, final Object value) { if ("metadata.content".equals(compositeKey)) { - if (null == value) { + if (null == value || (value instanceof String && isNotSet((String)value))) { //This "NO_METADATA" constant is getting relocated from tika utils return NO_METADATA; } diff --git a/dotCMS/src/main/java/com/dotcms/publisher/business/PublisherQueueJob.java b/dotCMS/src/main/java/com/dotcms/publisher/business/PublisherQueueJob.java index db8e21f3b342..e7c333921feb 100644 --- a/dotCMS/src/main/java/com/dotcms/publisher/business/PublisherQueueJob.java +++ b/dotCMS/src/main/java/com/dotcms/publisher/business/PublisherQueueJob.java @@ -91,7 +91,6 @@ public class PublisherQueueJob implements StatefulJob { public static final Integer MAX_NUM_TRIES = Config.getIntProperty("PUBLISHER_QUEUE_MAX_TRIES", 3); - private Client restClient; private PublishAuditAPI pubAuditAPI = PublishAuditAPI.getInstance(); private PublishingEndPointAPI endpointAPI = APILocator.getPublisherEndPointAPI(); private PublisherAPI pubAPI = PublisherAPI.getInstance(); @@ -101,7 +100,7 @@ public class PublisherQueueJob implements StatefulJob { /** * Reads from the publishing queue table and depending of the publish date * will send a bundle to publish (see - * {@link com.dotcms.publishing.PublisherAPI#publish(PublisherConfig, BundleOutput)}). + * {@link com.dotcms.publishing.PublisherAPI#publish(PublisherConfig)}}). * * @param jobExecutionContext * - Context Containing the current job context information (the @@ -262,6 +261,9 @@ private Map> collectEndpointInfoFromRemote(f final Map> endpointTrackingMap = new HashMap<>(); final PublishAuditHistory localHistory = bundleAudit.getStatusPojo(); final Map> endpointsMap = localHistory.getEndpointsMap(); + + final Client client = getRestClient(); + // For each group (environment) for (final String groupID : endpointsMap.keySet() ) { final Map endpointsGroup = endpointsMap.get(groupID); @@ -276,7 +278,8 @@ private Map> collectEndpointInfoFromRemote(f try { // Try to get the status of the remote end-points to // update the local history - final PublishAuditHistory remoteHistory = getRemoteHistoryFromEndpoint(bundleAudit, targetEndpoint); + final PublishAuditHistory remoteHistory = getRemoteHistoryFromEndpoint( + bundleAudit, targetEndpoint, client); if (remoteHistory != null) { updateLocalPublishDatesFromRemote(localHistory, remoteHistory); endpointTrackingMap.putAll(remoteHistory.getEndpointsMap()); @@ -560,8 +563,9 @@ private void updateLocalPublishDatesFromRemote(final PublishAuditHistory localHi * @return The {@link PublishAuditHistory} of the bundle in the specified end-point. */ private PublishAuditHistory getRemoteHistoryFromEndpoint(final PublishAuditStatus bundleAudit, - final PublishingEndPoint targetEndpoint) { - final WebTarget webTarget = getRestClient().target(targetEndpoint.toURL() + "/api/auditPublishing"); + final PublishingEndPoint targetEndpoint, + final Client client) { + final WebTarget webTarget = client.target(targetEndpoint.toURL() + "/api/auditPublishing"); return PublishAuditHistory.getObjectFromString( webTarget .path("get") @@ -698,10 +702,7 @@ public int getCountGroupSaved() { * @return The REST {@link Client}. */ private Client getRestClient() { - if (null == this.restClient) { - this.restClient = RestClientBuilder.newClient(); - } - return this.restClient; + return RestClientBuilder.newClient(); } } diff --git a/dotCMS/src/main/java/com/dotcms/publisher/pusher/PushPublisher.java b/dotCMS/src/main/java/com/dotcms/publisher/pusher/PushPublisher.java index a421286b9305..c72eec4124e4 100644 --- a/dotCMS/src/main/java/com/dotcms/publisher/pusher/PushPublisher.java +++ b/dotCMS/src/main/java/com/dotcms/publisher/pusher/PushPublisher.java @@ -100,7 +100,6 @@ public class PushPublisher extends Publisher { private PublishAuditAPI pubAuditAPI = PublishAuditAPI.getInstance(); private PublishingEndPointAPI publishingEndPointAPI = APILocator.getPublisherEndPointAPI(); private LocalSystemEventsAPI localSystemEventsAPI = APILocator.getLocalSystemEventsAPI(); - private Client restClient; public static final String PROTOCOL_HTTP = "http"; public static final String PROTOCOL_HTTPS = "https"; @@ -141,6 +140,9 @@ public PublisherConfig process ( final PublishStatus status ) throws DotPublishi throw new RuntimeException("An Enterprise Pro License is required to run this publisher."); } PublishAuditHistory currentStatusHistory = null; + + Client client = getRestClient(); + try { //Compressing bundle File bundleRoot = BundlerUtil.getBundleRoot(this.config.getName(), false); @@ -159,7 +161,6 @@ public PublisherConfig process ( final PublishStatus status ) throws DotPublishi List environments = APILocator.getEnvironmentAPI().findEnvironmentsByBundleId(this.config.getId()); - Client client = getRestClient(); client.property(ClientProperties.REQUEST_ENTITY_PROCESSING, "CHUNKED"); client.property(ClientProperties.CHUNKED_ENCODING_SIZE, 1024); @@ -339,6 +340,8 @@ public PublisherConfig process ( final PublishStatus status ) throws DotPublishi } Logger.error(this.getClass(), e.getMessage(), e); throw new DotPublishingException(e.getMessage(),e); + } finally { + client.close(); } } @@ -532,10 +535,7 @@ public Set getProtocols(){ * @return The REST {@link Client}. */ private Client getRestClient() { - if (null == this.restClient) { - this.restClient = RestClientBuilder.newClient(); - } - return this.restClient; + return RestClientBuilder.newClient(); } /** diff --git a/dotCMS/src/main/java/com/dotcms/publishing/output/BundleOutput.java b/dotCMS/src/main/java/com/dotcms/publishing/output/BundleOutput.java index 8676b33d6504..1a04c981dfa3 100644 --- a/dotCMS/src/main/java/com/dotcms/publishing/output/BundleOutput.java +++ b/dotCMS/src/main/java/com/dotcms/publishing/output/BundleOutput.java @@ -1,9 +1,10 @@ package com.dotcms.publishing.output; +import static com.liferay.util.FileUtil.validateEmptyFile; + import com.dotcms.publishing.PublisherConfig; import com.dotmarketing.util.Config; -import com.dotmarketing.util.Logger; import com.liferay.util.FileUtil; import java.io.*; @@ -27,7 +28,7 @@ public BundleOutput(final PublisherConfig publisherConfig){ * @return * @throws IOException */ - public abstract OutputStream addFile(String filePath) throws IOException; + public abstract OutputStream addFile(String filePath) throws FileCreationException; /** * Add a new file into the output @@ -36,7 +37,7 @@ public BundleOutput(final PublisherConfig publisherConfig){ * @return * @throws IOException */ - public OutputStream addFile(File file) throws IOException { + public OutputStream addFile(File file) throws FileCreationException { return addFile(file.getPath()); } @@ -52,18 +53,30 @@ public void copyFile(File source, String destinationPath) throws IOException { Config.getBooleanProperty("CONTENT_VERSION_HARD_LINK", true) && this.useHardLinkByDefault(); + validateEmptyFile(source); + if (userHardLink) { FileUtil.copyFile(source, getFile(destinationPath), true); } else { - try(final OutputStream outputStream = addFile(destinationPath)) { - FileUtil.copyFile(source, outputStream); - } catch(IOException e) { - Logger.error(FileUtil.class, e); - throw e; - } + innerCopyFile(source, destinationPath); } } + /** + * + * Copy {@code source } to {@code destinationPath}, this method use by + * {@link BundleOutput#copyFile(File, String)} when {@link BundleOutput#useHardLinkByDefault()} + * return false, the default implementacion use {@link FileUtil#copyFile(File, File, boolean)} method to + * copy the file but it can be override by subclases to have a custom implementation. + * + * @param source file to be copied + * @param destinationPath destiniton path to copy + * @throws IOException if any is wrong in the copy + */ + protected void innerCopyFile(final File source, final String destinationPath) throws IOException { + FileUtil.copyFile(source, getFile(destinationPath), useHardLinkByDefault()); + } + /** * return true if by deafult should use hardlink to this output * @return diff --git a/dotCMS/src/main/java/com/dotcms/publishing/output/DirectoryBundleOutput.java b/dotCMS/src/main/java/com/dotcms/publishing/output/DirectoryBundleOutput.java index f347b26265e6..a356d3d55dcc 100644 --- a/dotCMS/src/main/java/com/dotcms/publishing/output/DirectoryBundleOutput.java +++ b/dotCMS/src/main/java/com/dotcms/publishing/output/DirectoryBundleOutput.java @@ -2,13 +2,8 @@ import com.dotcms.publishing.BundlerUtil; import com.dotcms.publishing.PublisherConfig; -import com.dotmarketing.beans.Host; -import com.dotmarketing.util.WebKeys; import com.google.common.annotations.VisibleForTesting; import com.liferay.util.FileUtil; -import io.vavr.Lazy; -import io.vavr.control.Try; -import org.jetbrains.annotations.NotNull; import java.io.File; import java.io.FileFilter; @@ -58,15 +53,19 @@ public void setLastModified(final String filePath, final long timeInMillis) { } @Override - public OutputStream addFile(final String filePath) throws IOException { + public OutputStream addFile(final String filePath) throws FileCreationException { final File fileAbsolute = getRealFile(filePath); fileAbsolute.getParentFile().mkdirs(); - if (!fileAbsolute.exists()) { - fileAbsolute.createNewFile(); - } + try { + if (!fileAbsolute.exists()) { + fileAbsolute.createNewFile(); + } - return Files.newOutputStream( fileAbsolute.toPath()); + return Files.newOutputStream(fileAbsolute.toPath()); + } catch (IOException e) { + throw new FileCreationException(e, filePath); + } } private File getRealFile(final String path) { diff --git a/dotCMS/src/main/java/com/dotcms/publishing/output/FileCreationException.java b/dotCMS/src/main/java/com/dotcms/publishing/output/FileCreationException.java new file mode 100644 index 000000000000..f7a7d39c30d5 --- /dev/null +++ b/dotCMS/src/main/java/com/dotcms/publishing/output/FileCreationException.java @@ -0,0 +1,25 @@ +package com.dotcms.publishing.output; + + +import java.io.IOException; + +public class FileCreationException extends IOException { + + private String filePath; + + public FileCreationException(final Throwable cause, final String filePath) { + super(cause); + this.filePath = filePath; + } + + @Override + public String getMessage(){ + final Throwable cause = this.getCause(); + + final String message = cause.getMessage().contains("Not a directory") ? + "At least one subfolder already exists as a file" : cause.getMessage(); + + return String.format("It is not possible create the File: %s because: %s", filePath, + message); + } +} diff --git a/dotCMS/src/main/java/com/dotcms/publishing/output/TarGzipBundleOutput.java b/dotCMS/src/main/java/com/dotcms/publishing/output/TarGzipBundleOutput.java index eb7ed1d7157c..68ca46bd9764 100644 --- a/dotCMS/src/main/java/com/dotcms/publishing/output/TarGzipBundleOutput.java +++ b/dotCMS/src/main/java/com/dotcms/publishing/output/TarGzipBundleOutput.java @@ -79,14 +79,31 @@ public void setLastModified(String myFile, long timeInMillis){ } + @Override + public void innerCopyFile(final File source, final String destinationPath) throws IOException { + synchronized (tarArchiveOutputStream) { + try { + final TarArchiveEntry tarArchiveEntry = new TarArchiveEntry(destinationPath); + tarArchiveEntry.setSize(source.length()); + + tarArchiveOutputStream.putArchiveEntry(tarArchiveEntry); + IOUtils.copy(new FileInputStream(source), tarArchiveOutputStream); + } finally { + tarArchiveOutputStream.closeArchiveEntry(); + } + } + } + public void mkdirs(final String path) { final TarArchiveEntry tarArchiveEntry = new TarArchiveEntry(path); - try { - tarArchiveOutputStream.putArchiveEntry(tarArchiveEntry); - tarArchiveOutputStream.closeArchiveEntry(); - } catch (IOException e) { - throw new DotRuntimeException(e); + synchronized (tarArchiveOutputStream) { + try { + tarArchiveOutputStream.putArchiveEntry(tarArchiveEntry); + tarArchiveOutputStream.closeArchiveEntry(); + } catch (IOException e) { + throw new DotRuntimeException(e); + } } } @@ -116,9 +133,12 @@ public void close() throws IOException { private void putEntry(byte[] bytes, TarArchiveEntry tarArchiveEntry) throws IOException { synchronized (tarArchiveOutputStream) { - tarArchiveOutputStream.putArchiveEntry(tarArchiveEntry); - IOUtils.copy(new ByteArrayInputStream(bytes), tarArchiveOutputStream); - tarArchiveOutputStream.closeArchiveEntry(); + try { + tarArchiveOutputStream.putArchiveEntry(tarArchiveEntry); + IOUtils.copy(new ByteArrayInputStream(bytes), tarArchiveOutputStream); + } finally { + tarArchiveOutputStream.closeArchiveEntry(); + } } } diff --git a/dotCMS/src/main/java/com/dotcms/rest/ContentResource.java b/dotCMS/src/main/java/com/dotcms/rest/ContentResource.java index eae6a37524ec..121d2590e513 100644 --- a/dotCMS/src/main/java/com/dotcms/rest/ContentResource.java +++ b/dotCMS/src/main/java/com/dotcms/rest/ContentResource.java @@ -101,6 +101,9 @@ @Path("/content") public class ContentResource { + // set this only from an environmental variable so it cannot be overrriden in our Config class + private final boolean USE_XSTREAM_FOR_DESERIALIZATION = System.getenv("USE_XSTREAM_FOR_DESERIALIZATION")!=null && "true".equals(System.getenv("USE_XSTREAM_FOR_DESERIALIZATION")); + public static final String[] ignoreFields = {"disabledWYSIWYG", "lowIndexPriority"}; private static final String RELATIONSHIP_KEY = "__##relationships##__"; @@ -1490,7 +1493,10 @@ private Response multipartPUTandPOST(final HttpServletRequest request,final Http final FormDataMultiPart multipart, final String params, final String method) throws URISyntaxException, DotDataException { - final InitDataObject init = webResource.init(params, request, response, false, null); + final InitDataObject init = new WebResource.InitBuilder(request, response) + .requiredAnonAccess(AnonymousAccess.WRITE) + .params(params) + .init(); final Contentlet contentlet = new Contentlet(); setRequestMetadata(contentlet, request); @@ -1541,6 +1547,7 @@ private Response multipartPUTandPOST(final HttpServletRequest request,final Http } } else if (mediaType.equals(MediaType.APPLICATION_XML_TYPE) || name.equals("xml")) { try { + processXML(contentlet, part.getEntityAs(InputStream.class)); } catch (Exception e) { if (e instanceof DotSecurityException) { @@ -1685,7 +1692,10 @@ public Response singlePOST(@Context HttpServletRequest request, private Response singlePUTandPOST(HttpServletRequest request, HttpServletResponse response, String params, String method) throws URISyntaxException { - InitDataObject init = webResource.init(params, request, response,false, null); + final InitDataObject init = new WebResource.InitBuilder(request, response) + .requiredAnonAccess(AnonymousAccess.WRITE) + .params(params) + .init(); Contentlet contentlet = new Contentlet(); setRequestMetadata(contentlet, request); @@ -1695,6 +1705,7 @@ private Response singlePUTandPOST(HttpServletRequest request, HttpServletRespons processJSON(contentlet, request.getInputStream()); } else if (request.getContentType().startsWith(MediaType.APPLICATION_XML)) { try { + processXML(contentlet, request.getInputStream()); } catch (DotSecurityException se) { SecurityLogger.logInfo(this.getClass(), @@ -1968,6 +1979,12 @@ private String getLongActionId (final String shortyId) { protected void processXML(Contentlet contentlet, InputStream inputStream) throws IOException, DotSecurityException, DotDataException { + // github issue #20364 + if(!USE_XSTREAM_FOR_DESERIALIZATION) { + SecurityLogger.logInfo(ContentResource.class, "Insecure XML PUT or Post Detected - possible vunerability probing"); + throw new DotStateException("Unable to deserialize XML"); + } + String input = IOUtils.toString(inputStream, "UTF-8"); // deal with XXE or SSRF security vunerabilities in XML docs // besides, we do not expect a fully formed xml doc - only an xml doc that can be transformed into a java.util.Map diff --git a/dotCMS/src/main/java/com/dotcms/rest/api/v1/authentication/ApiTokenResource.java b/dotCMS/src/main/java/com/dotcms/rest/api/v1/authentication/ApiTokenResource.java index de331904858c..bb3208579556 100644 --- a/dotCMS/src/main/java/com/dotcms/rest/api/v1/authentication/ApiTokenResource.java +++ b/dotCMS/src/main/java/com/dotcms/rest/api/v1/authentication/ApiTokenResource.java @@ -55,7 +55,6 @@ public class ApiTokenResource implements Serializable { private final ApiTokenAPI tokenApi; private final WebResource webResource; - private Client restClient; /** * Default constructor. @@ -266,18 +265,19 @@ public final Response getRemoteToken(@Context final HttpServletRequest httpReque } final String protocol = formData.protocol(); + final String remoteURL = String.format("%s://%s:%d/api/v1/apitoken", protocol, formData.host(), formData.port()); final Client client = getRestClient(); - final String remoteURL = String.format("%s://%s:%d/api/v1/apitoken", protocol, formData.host(), formData.port()); - final WebTarget webTarget = client.target(remoteURL); + try { - String password = ""; + final WebTarget webTarget = client.target(remoteURL); - if (UtilMethods.isSet(formData.password())) { - password = Base64.decodeAsString(formData.password()); - } + String password = ""; + + if (UtilMethods.isSet(formData.password())) { + password = Base64.decodeAsString(formData.password()); + } - try { final Response response = webTarget.request(MediaType.APPLICATION_JSON) .header("Authorization", "Basic " + Base64.encodeAsString(formData.login() + ":" + password)) .post(Entity.entity(formData.getTokenInfo(), MediaType.APPLICATION_JSON)); @@ -305,14 +305,13 @@ public final Response getRemoteToken(@Context final HttpServletRequest httpReque } else { throw e; } + } finally { + client.close(); } } private Client getRestClient() { - if (null == this.restClient) { - this.restClient = RestClientBuilder.newClient(); - } - return this.restClient; + return RestClientBuilder.newClient(); } /** diff --git a/dotCMS/src/main/java/com/dotcms/storage/DataBaseStoragePersistenceAPIImpl.java b/dotCMS/src/main/java/com/dotcms/storage/DataBaseStoragePersistenceAPIImpl.java index 43c8df785e25..f02721935510 100644 --- a/dotCMS/src/main/java/com/dotcms/storage/DataBaseStoragePersistenceAPIImpl.java +++ b/dotCMS/src/main/java/com/dotcms/storage/DataBaseStoragePersistenceAPIImpl.java @@ -25,6 +25,7 @@ import com.liferay.util.Encryptor; import com.liferay.util.Encryptor.Hashing; import com.liferay.util.HashBuilder; +import com.liferay.util.PropertiesUtil; import com.liferay.util.StringPool; import io.vavr.Tuple2; import io.vavr.control.Try; @@ -40,12 +41,14 @@ import java.sql.ResultSet; import java.sql.SQLException; import java.util.Collections; +import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.Future; +import java.util.function.Supplier; import java.util.stream.Collectors; import org.apache.commons.lang3.BooleanUtils; import org.apache.commons.lang3.mutable.MutableBoolean; @@ -61,6 +64,7 @@ public class DataBaseStoragePersistenceAPIImpl implements StoragePersistenceAPI { private static final String DATABASE_STORAGE_JDBC_POOL_NAME = "DATABASE_STORAGE_JDBC_POOL_NAME"; + private static final String DB_STORAGE_CHUNK_SIZE = "DB_STORAGE_CHUNK_SIZE"; /** * custom external connection provider method in case we want to store stuff outside our db @@ -405,9 +409,16 @@ private int deleteObjects(final Set storageHashSet, final DotConnect dot count += dotConnect.executeUpdate(connection, "DELETE FROM storage_x_data WHERE storage_hash = ?", storageHash); - for (final String hashId : dataIdHashSet) { - count += dotConnect.executeUpdate(connection, - "DELETE FROM storage_data WHERE hash_id = ?", hashId); + for (final String dataHash : dataIdHashSet) { + + //Check it is safe to remove the data chunk if none else's still referencing it from another entry. + if (dotConnect + .setSQL("SELECT data_hash FROM storage_x_data WHERE data_hash = ?") + .addParam(dataHash).loadObjectResults().isEmpty()) { + count += dotConnect.executeUpdate(connection, + "DELETE FROM storage_data WHERE hash_id = ?", dataHash); + } + } } @@ -499,9 +510,7 @@ private Object pushFileReference(final String groupName, final String path, fina final String pathLC = path.toLowerCase(); Logger.debug(DataBaseStoragePersistenceAPIImpl.class, String.format("Pushing new reference for group [%s] path [%s] hash [%s]", groupNameLC, pathLC, hashRef)); try { - new DotConnect().executeUpdate(connection, - "INSERT INTO storage(hash, path, group_name, hash_ref) VALUES (?, ?, ?, ?)", - fileHash, pathLC, groupNameLC, hashRef); + UpsertDelegate.newInstance().pushObjectReference(connection, fileHash, pathLC, groupNameLC, hashRef); return true; } catch (DotDataException e) { Logger.error(DataBaseStoragePersistenceAPIImpl.class, e.getMessage(), e); @@ -513,7 +522,9 @@ private Object pushNewFile(final String groupName, final String path, final String fileHash, final String hashRef, final File file, final Connection connection ) { final String groupNameLC = groupName.toLowerCase(); final String pathLC = path.toLowerCase(); - try (final FileByteSplitter fileSplitter = new FileByteSplitter(file)) { + final UpsertDelegate upsertDelegate = UpsertDelegate.newInstance(); + try (final FileByteSplitter fileSplitter = new FileByteSplitter(file, + Config.getIntProperty(DB_STORAGE_CHUNK_SIZE, 2048))) { final HashBuilder objectHashBuilder = Encryptor.Hashing.sha256(); final List chunkHashes = new LinkedList<>(); @@ -524,31 +535,17 @@ private Object pushNewFile(final String groupName, final String path, final String chunkHash = Encryptor.Hashing.sha256().append (bytesRead._1(), bytesRead._2()).buildUnixHash(); chunkHashes.add(chunkHash); - - new DotConnect().executeUpdate(connection, - "INSERT INTO storage_data(hash_id, data) VALUES (?, ?)", - chunkHash, - bytesRead._1().length == bytesRead._2() ? - bytesRead._1() : chunkBytes(bytesRead._2(), bytesRead._1())); - + upsertDelegate.pushDataChunk(connection, chunkHash, bytesRead._1().length == bytesRead._2() ? + bytesRead._1() : chunkBytes(bytesRead._2(), bytesRead._1())); } final String objectHash = objectHashBuilder.buildUnixHash(); assert objectHash.equals(fileHash) : "File hash and objectHash must match." ; - int order = 1; - for (final String chunkHash : chunkHashes) { - - new DotConnect().executeUpdate(connection, - "INSERT INTO storage_x_data(storage_hash, data_hash, data_order) VALUES (?, ?, ?)", - objectHash, chunkHash, order++); - } - - new DotConnect().executeUpdate(connection, - "INSERT INTO storage(hash, path, group_name, hash_ref) VALUES (?, ?, ?, ?)", - objectHash, pathLC, groupNameLC, hashRef); + upsertDelegate.pushHashReference(connection, objectHash, chunkHashes); + upsertDelegate.pushObjectReference(connection, objectHash, pathLC, groupNameLC, hashRef); return true; } catch (DotDataException | NoSuchAlgorithmException | IOException e) { @@ -727,7 +724,7 @@ private File createJoinFile(final String hashId, final Connection connection) th if (!hashId.equals(fileHashBuilder.buildUnixHash())) { throw new DotCorruptedDataException(String.format( - "The file hash `%s` isn't valid. it doesn't match the records in `storage_data/storage_data` or they don't exist. ", + "The file hash `%s` isn't valid. it doesn't match the records in `storage_data/storage_x_data` or they don't exist. ", hashId)); } } catch (Exception e) { @@ -772,4 +769,277 @@ public Future pullObjectAsync(final String groupName, final String path, () -> this.pullObject(groupName, path, readerDelegate) ); } -} + + /** + * SQL Upsert abstract Base Implementation + */ + static abstract class UpsertDelegate { + + final DotConnect dotConnect; + + final Supplier storageInsertSQL; + + final Supplier dataInsertSQL; + + final Supplier hashReferenceInsertSQL; + + UpsertDelegate(final DotConnect dotConnect, + final Supplier storageInsertSQL, + final Supplier dataInsertSQL, + final Supplier hashReferenceInsertSQL) { + this.dotConnect = dotConnect; + this.storageInsertSQL = storageInsertSQL; + this.dataInsertSQL = dataInsertSQL; + this.hashReferenceInsertSQL = hashReferenceInsertSQL; + } + + /** + * inserts reference into storage + * @param connection + * @param objectHash + * @param path + * @param groupName + * @param hashRef + * @throws DotDataException + */ + void pushObjectReference(final Connection connection, final String objectHash, final String path, final String groupName, final String hashRef) + throws DotDataException { + final int rows = dotConnect.executeUpdate(connection, + storageInsertSQL.get(), + objectHash, path, groupName, hashRef); + Logger.debug(DataBaseStoragePersistenceAPIImpl.class,"pushObjectReference inserted rows "+rows); + } + + /** + * inserts data chunks into storage_data and have them associated with a chunk hash + * @param connection + * @param chunkHash + * @param data + * @throws DotDataException + */ + void pushDataChunk(final Connection connection, final String chunkHash, final byte [] data) + throws DotDataException { + final int rows = dotConnect.executeUpdate(connection, + dataInsertSQL.get(), chunkHash, data); + Logger.debug(DataBaseStoragePersistenceAPIImpl.class,"pushDataChunk inserted rows "+rows); + } + + /** + * inserts references into storage_x_data + * @param connection + * @param objectHash + * @param chunkHashes + * @throws DotDataException + */ + void pushHashReference(final Connection connection, final String objectHash, final List chunkHashes) + throws DotDataException { + final String sql = hashReferenceInsertSQL.get(); + int order = 1; + for (final String chunkHash : chunkHashes) { + final int rows = dotConnect.executeUpdate(connection, sql, objectHash, chunkHash, order++); + Logger.debug(DataBaseStoragePersistenceAPIImpl.class,"pushHashReference inserted rows "+rows); + } + } + + /** + * Factory method + * @return + */ + static UpsertDelegate newInstance() { + if(DbConnectionFactory.isPostgres()){ + return new PostgresUpsertDelegate(new DotConnect()); + } + if(DbConnectionFactory.isMySql()){ + return new MySQLUpsertDelegate(new DotConnect()); + } + if(DbConnectionFactory.isMsSql()){ + return new MSSQLUpsertDelegate(new DotConnect()); + } + if(DbConnectionFactory.isOracle()){ + return new OracleUpsertDelegate(new DotConnect()); + } + throw new DotRuntimeException("Oh snap! dunno what database I'm running on."); + } + + } + + /** + * Postgres SQL Upsert + */ + static class PostgresUpsertDelegate extends UpsertDelegate { + + static final String STORAGE_INSERT = "INSERT INTO storage(hash, path, group_name, hash_ref) VALUES (?, ?, ?, ?) ON CONFLICT (path, group_name) DO NOTHING"; + + static final String STORAGE_DATA_INSERT ="INSERT INTO storage_data(hash_id, data) VALUES (?, ?) ON CONFLICT (hash_id) DO NOTHING"; + + static final String HASH_REFERENCE_INSERT = "INSERT INTO storage_x_data(storage_hash, data_hash, data_order) VALUES (?, ?, ?) ON CONFLICT (storage_hash,data_hash) DO NOTHING"; + + /** + * Constructor + * @param dotConnect + */ + PostgresUpsertDelegate(final DotConnect dotConnect) { + super(dotConnect, ()->STORAGE_INSERT, ()->STORAGE_DATA_INSERT, ()->HASH_REFERENCE_INSERT); + } + + } + + /** + * My SQL Upsert + */ + static class MySQLUpsertDelegate extends UpsertDelegate { + + static final String STORAGE_INSERT = "INSERT INTO storage(hash, path, group_name, hash_ref) VALUES (?, ?, ?, ?) ON DUPLICATE KEY UPDATE path = path, group_name = group_name "; + + static final String STORAGE_DATA_INSERT ="INSERT INTO storage_data(hash_id, data) VALUES (?, ?) ON DUPLICATE KEY UPDATE hash_id = hash_id, mod_date = CURRENT_TIMESTAMP "; + + static final String HASH_REFERENCE_INSERT = "INSERT INTO storage_x_data(storage_hash, data_hash, data_order) VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE storage_hash = storage_hash, data_hash = data_hash"; + + /** + * Constructor + * @param dotConnect + */ + MySQLUpsertDelegate(final DotConnect dotConnect) { + super(dotConnect, ()->STORAGE_INSERT, ()->STORAGE_DATA_INSERT, ()->HASH_REFERENCE_INSERT); + } + + } + + /** + * MS-SQL Upsert specific + */ + static class MSSQLUpsertDelegate extends UpsertDelegate { + + static final String STORAGE_INSERT = + "MERGE storage WITH (HOLDLOCK) AS [Target] \n" + + "USING ( VALUES (?, ?, ?, ?) ) AS [Source] (hash, path, group_name, hash_ref) ON ([Target].path = [Source].path AND [Target].group_name = [Source].group_name) \n" + + "WHEN MATCHED THEN \n" + + " UPDATE SET [Target].path = [Source].path, [Target].group_name = [Source].group_name \n" + + "WHEN NOT MATCHED THEN \n" + + " INSERT (hash, path, group_name, hash_ref) VALUES ([Source].hash, [Source].path, [Source].group_name, [Source].hash_ref);"; + + static final String STORAGE_DATA_INSERT = + "MERGE storage_data WITH (HOLDLOCK) AS [Target] \n" + + "USING (VALUES (?, ?) ) AS [Source] (hash_id, data) ON ([Target].hash_id = [Source].hash_id) \n" + + "WHEN MATCHED THEN \n" + + " UPDATE SET [Target].hash_id = [Source].hash_id, [Target].data = [Source].data, [Target].mod_date = GETDATE() \n" + + "WHEN NOT MATCHED THEN \n" + + " INSERT (hash_id, data) VALUES ([Source].hash_id, [Source].data);"; + + static final String HASH_REFERENCE_INSERT = + "MERGE storage_x_data WITH (HOLDLOCK) AS [Target] \n" + + "USING ( VALUES(?, ?, ?) ) AS [Source] (storage_hash, data_hash, data_order) ON ([Target].storage_hash = [Source].storage_hash AND [Target].data_hash = [Source].data_hash ) \n" + + "WHEN MATCHED THEN \n" + + " UPDATE SET [Target].storage_hash = [Source].storage_hash, [Target].data_hash = [Source].data_hash, [Target].data_order = [Source].data_order \n" + + "WHEN NOT MATCHED THEN \n" + + " INSERT (storage_hash, data_hash, data_order) VALUES ([Source].storage_hash,[Source].data_hash,[Source].data_order);\n"; + + /** + * Constructor + * @param dotConnect + */ + MSSQLUpsertDelegate(final DotConnect dotConnect) { + super(dotConnect, ()->STORAGE_INSERT, ()->STORAGE_DATA_INSERT, ()->HASH_REFERENCE_INSERT); + } + + } + + /** + * Oracle Upsert SQL Specific + */ + static class OracleUpsertDelegate extends UpsertDelegate { + + static final String STORAGE_INSERT = + "MERGE INTO storage Target \n" + + "USING (SELECT ? PATH, ? GROUP_NAME, ? HASH FROM DUAL) Source \n" + + "ON (Target.PATH = Source.PATH AND Target.GROUP_NAME = Source.GROUP_NAME) WHEN MATCHED THEN \n" + + " UPDATE SET Target.hash = Source.hash \n" + + "WHEN NOT MATCHED THEN \n" + + " INSERT (hash, path, group_name, hash_ref) \n" + + " VALUES (?, ?, ?, ?)\n"; + + static final String STORAGE_DATA_INSERT = + + "MERGE INTO storage_data Target \n" + + "USING (SELECT ? HASH_ID, ? DATA FROM DUAL) Source \n" + + "ON (Target.HASH_ID = Source.HASH_ID ) WHEN MATCHED THEN \n" + + " UPDATE SET Target.DATA = Source.DATA, Target.MOD_DATE = CURRENT_TIMESTAMP \n" + + "WHEN NOT MATCHED THEN \n" + + " INSERT (hash_id, data) VALUES (?, ?)\n"; + + static final String HASH_REFERENCE_INSERT = + "MERGE INTO storage_x_data Target \n" + + "USING (SELECT ? STORAGE_HASH, ? DATA_HASH, ? DATA_ORDER FROM DUAL) Source \n" + + "ON (Target.STORAGE_HASH = Source.STORAGE_HASH AND Target.DATA_HASH = Source.DATA_HASH ) WHEN MATCHED THEN \n" + + " UPDATE SET Target.DATA_ORDER = Source.DATA_ORDER, Target.MOD_DATE = CURRENT_TIMESTAMP \n" + + "WHEN NOT MATCHED THEN \n" + + " INSERT (storage_hash, data_hash, data_order) VALUES (?, ?, ?)\n"; + + /** + * Constructor + * @param dotConnect + */ + OracleUpsertDelegate(final DotConnect dotConnect) { + super(dotConnect, ()->STORAGE_INSERT, ()->STORAGE_DATA_INSERT, ()->HASH_REFERENCE_INSERT); + } + + /** + * As the SQL is a bit more complex and requires more params to be replaced we need to adjust + * @param connection + * @param objectHash + * @param path + * @param groupName + * @param hashRef + * @throws DotDataException + */ + @Override + void pushObjectReference(final Connection connection, final String objectHash, final String path, final String groupName, final String hashRef) + throws DotDataException { + final int rows = dotConnect.executeUpdate(connection, + storageInsertSQL.get(), + path, groupName, objectHash, + objectHash, path, groupName, hashRef); + Logger.debug(DataBaseStoragePersistenceAPIImpl.class,"pushObjectReference inserted rows "+rows); + } + + /** + * As the SQL is a bit more complex and requires more params to be replaced we need to adjust + * @param connection + * @param chunkHash + * @param data + * @throws DotDataException + */ + @Override + void pushDataChunk(final Connection connection, final String chunkHash, final byte [] data) + throws DotDataException { + final int rows = dotConnect.executeUpdate(connection, + dataInsertSQL.get(), + chunkHash, data, + chunkHash, data); + Logger.debug(DataBaseStoragePersistenceAPIImpl.class,"pushDataChunk inserted rows "+rows); + } + + /** + * As the SQL is a bit more complex and requires more params to be replaced we need to adjust + * @param connection + * @param objectHash + * @param chunkHashes + * @throws DotDataException + */ + @Override + void pushHashReference(final Connection connection, final String objectHash, final List chunkHashes) + throws DotDataException { + final String sql = hashReferenceInsertSQL.get(); + int order = 1; + for (final String chunkHash : chunkHashes) { + final int rows = dotConnect.executeUpdate(connection, sql, + objectHash, chunkHash, order, + objectHash, chunkHash, order++); + Logger.debug(DataBaseStoragePersistenceAPIImpl.class,"pushHashReference inserted rows "+rows); + } + } + + } + + +} \ No newline at end of file diff --git a/dotCMS/src/main/java/com/dotcms/storage/FileMetadataAPIImpl.java b/dotCMS/src/main/java/com/dotcms/storage/FileMetadataAPIImpl.java index a3e160be93a5..78091d609b65 100644 --- a/dotCMS/src/main/java/com/dotcms/storage/FileMetadataAPIImpl.java +++ b/dotCMS/src/main/java/com/dotcms/storage/FileMetadataAPIImpl.java @@ -2,6 +2,7 @@ import static com.dotmarketing.util.UtilMethods.isSet; +import com.dotcms.business.CloseDBIfOpened; import com.dotcms.contenttype.model.field.BinaryField; import com.dotcms.contenttype.model.field.Field; import com.dotcms.contenttype.model.field.FieldVariable; @@ -243,6 +244,7 @@ private Map generateFullMetadata(final Contentlet contentlet, * @return */ @VisibleForTesting + @CloseDBIfOpened Set getMetadataFields (final String fieldIdentifier) { final Optional customIndexMetaDataFieldsOpt = diff --git a/dotCMS/src/main/java/com/dotmarketing/common/db/DBTimeZoneCheck.java b/dotCMS/src/main/java/com/dotmarketing/common/db/DBTimeZoneCheck.java index 95a0956fd62b..b3b4f05e6c79 100644 --- a/dotCMS/src/main/java/com/dotmarketing/common/db/DBTimeZoneCheck.java +++ b/dotCMS/src/main/java/com/dotmarketing/common/db/DBTimeZoneCheck.java @@ -55,8 +55,12 @@ public static boolean isTimeZoneValid(final String timezone) throws InvalidTimeZ } TimeZone.setDefault(testingTimeZone); - connection = DriverManager.getConnection(hikari.getJdbcUrl(), hikari.getUsername(), hikari.getPassword()); + if (!DbConnectionFactory.isPostgres()) { + Logger.info(DBTimeZoneCheck.class, "Database is not postgres, so ignoring timezone check"); + return true; + } + connection = DriverManager.getConnection(hikari.getJdbcUrl(), hikari.getUsername(), hikari.getPassword()); statement = connection.prepareStatement("SELECT * FROM inode WHERE idate > ?"); statement.setTimestamp(1, new Timestamp(System.currentTimeMillis())); diff --git a/dotCMS/src/main/java/com/dotmarketing/db/HibernateUtil.java b/dotCMS/src/main/java/com/dotmarketing/db/HibernateUtil.java index b98dc22b75d3..68a60bc349a4 100644 --- a/dotCMS/src/main/java/com/dotmarketing/db/HibernateUtil.java +++ b/dotCMS/src/main/java/com/dotmarketing/db/HibernateUtil.java @@ -718,16 +718,21 @@ public boolean accept(File dir, String name) { } + public static Optional getSessionIfOpened() { + if (sessionFactory == null) { + buildSessionFactory(); + } + return Optional.ofNullable(sessionHolder.get()); + } + /** * Attempts to find a session associated with the Thread. If there isn't a * session, it will create one. */ public static Session getSession() { try{ - if (sessionFactory == null) { - buildSessionFactory(); - } - Session session = sessionHolder.get(); + final Optional sessionOptional = getSessionIfOpened(); + Session session = sessionOptional.isPresent() ? sessionOptional.get() : null; if (session == null) { session = sessionFactory.openSession(DbConnectionFactory.getConnection()); @@ -1287,9 +1292,11 @@ public void setDate(java.util.Date g) { } public static void evict(Object obj) throws DotHibernateException{ - Session session = getSession(); - try { - session.evict(obj); + final Optional sessionOptional = getSessionIfOpened(); + try { + if (sessionOptional.isPresent()) { + sessionOptional.get().evict(obj); + } } catch (HibernateException e) { throw new DotHibernateException("Unable to evict from Hibernate Session ", e); } diff --git a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task201014UpdateColumnsValuesInIdentifierTable.java b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task201014UpdateColumnsValuesInIdentifierTable.java index 760a134922e5..06d0c2da586a 100644 --- a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task201014UpdateColumnsValuesInIdentifierTable.java +++ b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task201014UpdateColumnsValuesInIdentifierTable.java @@ -18,6 +18,8 @@ public String getPostgresScript() { final StringBuilder query = new StringBuilder(); + query.append("ALTER TABLE identifier DISABLE TRIGGER ALL;\n"); + //update templates query.append(getQueryToUpdateNonContentletsPostgres("template")); @@ -42,12 +44,16 @@ public String getPostgresScript() { .append(" WHERE id=myID;\n"); + query.append("ALTER TABLE identifier ENABLE TRIGGER ALL;\n"); return query.toString(); } @Override public String getMySQLScript() { final StringBuilder query = new StringBuilder(); + query.append("DROP TRIGGER IF EXISTS check_parent_path_when_insert;\n"); + query.append("DROP TRIGGER IF EXISTS check_parent_path_when_update;\n"); + query.append("DROP TRIGGER IF EXISTS check_child_assets;\n"); //update templates query.append(getQueryToUpdateNonContentletsMySQL("template")); @@ -74,14 +80,71 @@ public String getMySQLScript() { .append("ident.asset_subtype=my_query.velocity_var_name\n") .append("WHERE ident.id=my_query.myID;"); + addTriggersBack(query); + return query.toString(); } + private void addTriggersBack(final StringBuilder query) { + query.append("CREATE TRIGGER check_parent_path_when_insert BEFORE INSERT\n") + .append("on identifier\n") + .append("FOR EACH ROW\n") + .append("BEGIN\n") + .append("DECLARE idCount INT;\n") + .append("DECLARE canInsert boolean default false;\n") + .append(" select count(id)into idCount from identifier where asset_type='folder' and CONCAT(parent_path,asset_name,'/')= NEW.parent_path and host_inode = NEW.host_inode and id <> NEW.id;\n") + .append(" IF(idCount > 0 OR NEW.parent_path = '/' OR NEW.parent_path = '/System folder') THEN\n") + .append(" SET canInsert := TRUE;\n") + .append(" END IF;\n") + .append(" IF(canInsert = FALSE) THEN\n") + .append(" delete from Cannot_insert_for_this_path_does_not_exist_for_the_given_host;\n") + .append(" END IF;\n") + .append("END\n") + .append("#\n"); + + query.append("CREATE TRIGGER check_parent_path_when_update BEFORE UPDATE\n") + .append("on identifier\n") + .append("FOR EACH ROW\n") + .append("BEGIN\n") + .append("DECLARE idCount INT;\n") + .append("DECLARE canUpdate boolean default false;\n") + .append(" IF @disable_trigger IS NULL THEN\n") + .append(" select count(id)into idCount from identifier where asset_type='folder' and CONCAT(parent_path,asset_name,'/')= NEW.parent_path and host_inode = NEW.host_inode and id <> NEW.id;\n") + .append(" IF(idCount > 0 OR NEW.parent_path = '/' OR NEW.parent_path = '/System folder') THEN\n") + .append(" SET canUpdate := TRUE;\n") + .append(" END IF;\n") + .append(" IF(canUpdate = FALSE) THEN\n") + .append(" delete from Cannot_update_for_this_path_does_not_exist_for_the_given_host;\n") + .append(" END IF;\n") + .append(" END IF;\n") + .append("END\n") + .append("#\n"); + + query.append("CREATE TRIGGER check_child_assets BEFORE DELETE\n") + .append("ON identifier\n") + .append("FOR EACH ROW\n") + .append("BEGIN\n") + .append(" DECLARE pathCount INT;\n") + .append(" IF(OLD.asset_type ='folder') THEN\n") + .append(" select count(*) into pathCount from identifier where parent_path = CONCAT(OLD.parent_path,OLD.asset_name,'/') and host_inode = OLD.host_inode;\n") + .append(" END IF;\n") + .append(" IF(OLD.asset_type ='contentlet') THEN\n") + .append("\t select count(*) into pathCount from identifier where host_inode = OLD.id;\n") + .append(" END IF;\n") + .append(" IF(pathCount > 0) THEN\n") + .append(" delete from Cannot_delete_as_this_path_has_children;\n") + .append(" END IF;\n") + .append("END\n") + .append("#"); + } + @Override public String getOracleScript() { final StringBuilder query = new StringBuilder(); + query.append("ALTER TABLE identifier DISABLE ALL TRIGGERS;\n"); + //update templates query.append(getQueryToUpdateNonContentletsOracle("template")); @@ -116,7 +179,9 @@ public String getOracleScript() { .append("WHERE temp.identifier=custom_select.identifier\n") .append("AND inode.inode=temp.inode and inode.idate=custom_select.idate \n") .append("AND temp.structure_inode = struc.inode) WHERE r = 1)\n") - .append(" WHERE myID = tt.id)"); + .append(" WHERE myID = tt.id);\n"); + + query.append("ALTER TABLE identifier ENABLE ALL TRIGGERS\n"); return query.toString(); } diff --git a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210218MigrateUserProxyTable.java b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210218MigrateUserProxyTable.java index 741d2d9c627f..4d75eeb4c967 100644 --- a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210218MigrateUserProxyTable.java +++ b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210218MigrateUserProxyTable.java @@ -27,7 +27,7 @@ public class Task210218MigrateUserProxyTable implements StartupTask { private static final String MYSQL_SCRIPT = "alter table user_ add additional_info text NULL;"; - private static final String ORACLE_SCRIPT = "alter table user_ add additional_info NCLOB NULL;"; + private static final String ORACLE_SCRIPT = "alter table user_ add additional_info NCLOB NULL"; private static final String MSSQL_SCRIPT = "alter table user_ add additional_info NVARCHAR(MAX) NULL;"; diff --git a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFiles.java b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFiles.java index 8060617876d7..0237742c6640 100644 --- a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFiles.java +++ b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210321RemoveOldMetadataFiles.java @@ -10,11 +10,13 @@ import com.dotmarketing.util.UUIDUtil; import io.vavr.Tuple; import io.vavr.Tuple2; +import java.io.File; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Comparator; import java.util.HashSet; import java.util.Optional; import java.util.Set; @@ -58,6 +60,12 @@ private Tuple2 deleteMetadataFiles(Path serverDir) { +- 5 (First level 1 char length ) + +- a568fd-dff45fg- (identifier like folder name) + + + +- fileasset1-metadata.json + + + +- metaData + + + +- content */ final Set firstLevelPaths = listDirectories(serverDir).stream() @@ -88,14 +96,22 @@ private Tuple2 deleteMetadataFiles(Path serverDir) { final Optional metaDataDir = Files.list(current) .filter(path -> Files.isDirectory(path)) - .filter(path -> path.toString().equals("metaData")) + .filter(path -> path.getFileName().toString().equals("metaData")) .findFirst(); - metaDataDir.ifPresent(path -> { - if(path.toFile().delete()){ - dirsCount.increment(); - Logger.debug(Task210321RemoveOldMetadataFiles.class,"Removed metaData dir: " + path); - } - }); + if(metaDataDir.isPresent()){ + + Files.walk(metaDataDir.get()).sorted(Comparator.reverseOrder()) + .map(Path::toFile) + .forEach(file -> { + if (file.delete()) { + if (file.getName().equals("metaData")) { + dirsCount.increment(); + Logger.debug(Task210321RemoveOldMetadataFiles.class, + "Removed metaData dir: " + file); + } + } + }); + } } } } diff --git a/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210510UpdateStorageTableDropMetadataColumn.java b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210510UpdateStorageTableDropMetadataColumn.java new file mode 100644 index 000000000000..7a91c4260bd5 --- /dev/null +++ b/dotCMS/src/main/java/com/dotmarketing/startup/runonce/Task210510UpdateStorageTableDropMetadataColumn.java @@ -0,0 +1,54 @@ +package com.dotmarketing.startup.runonce; + +import com.dotmarketing.common.db.DotDatabaseMetaData; +import com.dotmarketing.startup.AbstractJDBCStartupTask; +import com.dotmarketing.util.Logger; +import java.sql.SQLException; +import java.util.List; + +/** + * Apparently the table storage went out on the last releases with an old column named metadata which is no longer used + */ +public class Task210510UpdateStorageTableDropMetadataColumn extends AbstractJDBCStartupTask { + + @Override + public boolean forceRun() { + try { + return new DotDatabaseMetaData().hasColumn("storage", "metadata") ; + } catch (SQLException e) { + Logger.error(this, e.getMessage(),e); + return false; + } + } + + @Override + public String getPostgresScript() { + return "ALTER TABLE storage DROP COLUMN metadata;"; + } + + @Override + public String getMySQLScript() { + return "ALTER TABLE storage DROP COLUMN metadata;"; + } + + @Override + public String getOracleScript() { + return "ALTER TABLE storage DROP COLUMN metadata;"; + } + + @Override + public String getMSSQLScript() { + return "ALTER TABLE storage DROP COLUMN metadata;"; + } + + @Override + public String getH2Script() { + return null; + } + + @Override + protected List getTablesToDropConstraints() { + return null; + } + +} diff --git a/dotCMS/src/main/java/com/dotmarketing/util/TaskLocatorUtil.java b/dotCMS/src/main/java/com/dotmarketing/util/TaskLocatorUtil.java index 146711d8078a..122b61167326 100644 --- a/dotCMS/src/main/java/com/dotmarketing/util/TaskLocatorUtil.java +++ b/dotCMS/src/main/java/com/dotmarketing/util/TaskLocatorUtil.java @@ -297,6 +297,7 @@ public static List> getStartupRunOnceTaskClasses() { .add(Task210319CreateStorageTable.class) .add(Task210321RemoveOldMetadataFiles.class) .add(Task210506UpdateStorageTable.class) + .add(Task210510UpdateStorageTableDropMetadataColumn.class) .build(); return ret.stream().sorted(classNameComparator).collect(Collectors.toList()); diff --git a/dotCMS/src/main/java/com/liferay/util/FileUtil.java b/dotCMS/src/main/java/com/liferay/util/FileUtil.java index 9e8ea22406b8..0a857a3c983b 100644 --- a/dotCMS/src/main/java/com/liferay/util/FileUtil.java +++ b/dotCMS/src/main/java/com/liferay/util/FileUtil.java @@ -150,7 +150,7 @@ public static void copyFile(File source, File destination) throws IOException { copyFile(source, destination, Config.getBooleanProperty("CONTENT_VERSION_HARD_LINK", true)); } - private static void validateEmptyFile(File source) throws IOException{ + public static void validateEmptyFile(File source) throws IOException{ final String metaDataPath = "metaData" + File.separator + "content"; final String languagePropertyPath = "messages" + File.separator + "cms_language"; diff --git a/dotCMS/src/main/resources/apps/dotsaml-config.yml b/dotCMS/src/main/resources/apps/dotsaml-config.yml index e98f33247c6c..b432cacbbbfa 100644 --- a/dotCMS/src/main/resources/apps/dotsaml-config.yml +++ b/dotCMS/src/main/resources/apps/dotsaml-config.yml @@ -40,10 +40,13 @@ params: hint: "Depends on your IDP Configuration." required: true value: + - + label: "None" + value: "none" + selected: true - label: "Only Response" value: "response" - selected: true - label: "Only Assertion" value: "assertion"