diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java index d3a56299..2f2fce98 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java @@ -29,7 +29,6 @@ import com.salesforce.dataloader.action.AbstractExtractAction; import com.salesforce.dataloader.action.progress.ILoaderProgress; import com.salesforce.dataloader.client.HttpClientTransport; -import com.salesforce.dataloader.client.HttpTransportInterface; import com.salesforce.dataloader.config.AppConfig; import com.salesforce.dataloader.config.Messages; import com.salesforce.dataloader.controller.Controller; @@ -46,7 +45,6 @@ import com.sforce.ws.ConnectionException; import java.io.InputStream; -import java.net.HttpURLConnection; import java.net.URI; import java.nio.charset.StandardCharsets; import java.util.Base64; diff --git a/src/main/java/com/salesforce/dataloader/config/ConfigPropertyMetadata.java b/src/main/java/com/salesforce/dataloader/config/ConfigPropertyMetadata.java index c37a8139..57cc7528 100644 --- a/src/main/java/com/salesforce/dataloader/config/ConfigPropertyMetadata.java +++ b/src/main/java/com/salesforce/dataloader/config/ConfigPropertyMetadata.java @@ -38,7 +38,8 @@ import com.salesforce.dataloader.dao.csv.CSVFileWriter; import com.salesforce.dataloader.exception.DataAccessObjectException; import com.salesforce.dataloader.exception.DataAccessObjectInitializationException; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; +import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.ui.Labels; import com.salesforce.dataloader.util.AppUtil; @@ -237,11 +238,22 @@ public static void generateCSV(AppConfig appConfig) { return; } try { + ArrayList headerLabelList = new ArrayList(); + headerLabelList.add(COL_PROPERTY_NAME); + headerLabelList.add(COL_UI_LABEL); + headerLabelList.add(COL_DESCRIPTION); + headerLabelList.add(COL_DEFAULT_VAL); + headerLabelList.add(COL_IS_READ_ONLY); + headerLabelList.add(COL_IS_COMMAND_LINE_OPTION); + headerLabelList.add(COL_IS_ENCRYPTED); + ArrayList rowList = new ArrayList(propertiesMap.size()); + TableHeader header = new TableHeader(headerLabelList); + for (ConfigPropertyMetadata propMD : propertiesMap.values()) { if (propMD.isInternal()) { continue; } - Row row = new Row(); + TableRow row = new TableRow(header); row.put(COL_PROPERTY_NAME, propMD.getName()); row.put(COL_UI_LABEL, propMD.getUiLabelTemplate()); String description = propMD.getDescription(); @@ -253,13 +265,12 @@ public static void generateCSV(AppConfig appConfig) { row.put(COL_IS_READ_ONLY, propMD.isReadOnly()); row.put(COL_IS_COMMAND_LINE_OPTION, propMD.isCommandLineOption()); row.put(COL_IS_ENCRYPTED, propMD.isEncrypted()); - try { - csvWriter.writeRow(row); - } catch (DataAccessObjectException e) { - logger.warn(Messages.getFormattedString("ConfigPropertyMetadata.errorOutputPropInfo", propMD.getName())); - logger.warn(e.getStackTrace()); - continue; - } + rowList.add(row); + } + try { + csvWriter.writeTableRowList(rowList); + } catch (DataAccessObjectException e) { + logger.warn(e.getStackTrace()); } } finally { logger.debug(Messages.getFormattedString("ConfigPropertyMetadata.infoGeneratedCSVLocation", getFullPathToPropsFile(appConfig))); diff --git a/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileWriter.java b/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileWriter.java index 4b1d4bdd..e52b60f5 100644 --- a/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileWriter.java +++ b/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileWriter.java @@ -194,10 +194,36 @@ public boolean writeRow(Row row) throws DataAccessObjectException { } } + /* + * (non-Javadoc) + * @see com.salesforce.dataloader.dao.csv.Writer#writeRow(java.util.Map) + */ + public boolean writeTableRow(TableRow row) throws DataAccessObjectException { + if (this.columnNames == null || this.columnNames.isEmpty()) { + ListcolNames = getColumnNamesFromTableRow(row); + this.setColumnNames(colNames); + } + CSVColumnVisitor visitor = new CSVColumnVisitor(fileOut, false, this.columnDelimiter); + try { + visitTableRowColumns(columnNames, row, visitor); + fileOut.newLine(); + visitor.newRow(); + currentRowNumber++; + return true; // success unless there's an exception + } catch (IOException e) { + logger.error(Messages.getString("CSVWriter.errorWriting"), e); //$NON-NLS-1$ + throw new DataAccessObjectException(Messages.getString("CSVWriter.errorWriting"), e); //$NON-NLS-1$ + } + } + public List getColumnNamesFromRow(Row row) throws DataAccessObjectInitializationException { Set fieldNameSet = row.keySet(); return new ArrayList(fieldNameSet); } + + public List getColumnNamesFromTableRow(TableRow row) { + return row.getHeader().getColumns(); + } /* * (non-Javadoc) @@ -222,7 +248,7 @@ public boolean writeTableRowList(List rows) throws DataAccessObjectExc boolean success = true; // return the last result, should be same as others for (TableRow trow : rows) { - success = writeRow(trow.convertToRow()); + success = writeTableRow(trow); } return success; } @@ -256,6 +282,19 @@ static private void visitColumns(List columnNames, Row row, CSVColumnVis } } + + static private void visitTableRowColumns(List columnNames, TableRow row, CSVColumnVisitor visitor) throws IOException { + for (String colName : columnNames) { + Object colVal = row.get(colName); + if (colVal == null && colName.contains("(")) { + int lparenIdx = colName.indexOf('('); + int rparenIdx = colName.indexOf(')'); + colName = colName.substring(lparenIdx + 1, rparenIdx); + colVal = row.get(colName); + } + visitor.visit(colVal != null ? colVal.toString() : ""); + } + } @Override public List getColumnNames() { return columnNames; diff --git a/src/main/java/com/salesforce/dataloader/dao/database/DatabaseWriter.java b/src/main/java/com/salesforce/dataloader/dao/database/DatabaseWriter.java index f90d6ab7..1deee275 100644 --- a/src/main/java/com/salesforce/dataloader/dao/database/DatabaseWriter.java +++ b/src/main/java/com/salesforce/dataloader/dao/database/DatabaseWriter.java @@ -240,6 +240,13 @@ public boolean writeRow(Row inputRow) throws DataAccessObjectException { return writeRowList(inputRowList); } + public boolean writeTableRow(TableRow inputRow) throws DataAccessObjectException { + // FIXME: Think about refactoring this for the caller to writeRow() and here take care of batching internally + List inputRowList = new ArrayList(); + inputRowList.add(inputRow); + return writeTableRowList(inputRowList); + } + /** * @param sqe */ diff --git a/src/main/java/com/salesforce/dataloader/model/Row.java b/src/main/java/com/salesforce/dataloader/model/Row.java index 68dd7c3d..5f40b0e9 100644 --- a/src/main/java/com/salesforce/dataloader/model/Row.java +++ b/src/main/java/com/salesforce/dataloader/model/Row.java @@ -42,31 +42,21 @@ * class and not be spread in multiple class. */ public class Row implements Map { - - private static final int DEFAULT_COLUMN_COUNT = 16; // same as HashMap private final Map internalMap; private final Map keyMap = new HashMap(); public Row() { - this(DEFAULT_COLUMN_COUNT); - } - - public Row(int columnCount) { - internalMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); + this.internalMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); } public Row(Map internalMap) { - this(internalMap.size()); + this.internalMap = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); this.internalMap.putAll(internalMap); for (String key : internalMap.keySet()) { this.keyMap.put(key.toLowerCase(), key); } } - public static Row emptyRow() { - return new Row(Collections.emptyMap()); - } - public static Row singleEntryImmutableRow(String key, Object value) { return new Row(Collections.singletonMap(key, value)); } diff --git a/src/main/java/com/salesforce/dataloader/model/TableHeader.java b/src/main/java/com/salesforce/dataloader/model/TableHeader.java index b64cfcfd..bcb4ff21 100644 --- a/src/main/java/com/salesforce/dataloader/model/TableHeader.java +++ b/src/main/java/com/salesforce/dataloader/model/TableHeader.java @@ -25,6 +25,7 @@ */ package com.salesforce.dataloader.model; +import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -34,7 +35,7 @@ public class TableHeader { private int lastColPosition = 0; private List columns; public TableHeader(List cols) { - this.columns = cols; + this.columns = new ArrayList(cols); for (String colName : cols) { if (colName == null) { continue; @@ -49,6 +50,6 @@ public Integer getColumnPosition(String columnName) { } public List getColumns() { - return this.columns; + return new ArrayList(columns); } } \ No newline at end of file diff --git a/src/test/java/com/salesforce/dataloader/dao/CsvTest.java b/src/test/java/com/salesforce/dataloader/dao/CsvTest.java index 6c32f0ac..fd4d9fc6 100644 --- a/src/test/java/com/salesforce/dataloader/dao/CsvTest.java +++ b/src/test/java/com/salesforce/dataloader/dao/CsvTest.java @@ -36,7 +36,7 @@ import com.salesforce.dataloader.config.AppConfig; import com.salesforce.dataloader.dao.csv.CSVFileReader; import com.salesforce.dataloader.dao.csv.CSVFileWriter; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; @@ -49,8 +49,8 @@ public class CsvTest extends ConfigTestBase { private static final String COLUMN_2_NAME = "column2"; private static final String COLUMN_3_NAME = "column3"; private List writeHeader; - private Row row1; - private Row row2; + private TableRow row1; + private TableRow row2; @Before public void createTestData() { @@ -59,12 +59,17 @@ public void createTestData() { writeHeader.add("COL2"); writeHeader.add("COL3"); - row1 = new Row(); + ArrayList headerLabelList = new ArrayList(); + headerLabelList.add("COL1"); + headerLabelList.add("COL2"); + headerLabelList.add("COL3"); + TableHeader header = new TableHeader(headerLabelList); + row1 = new TableRow(header); row1.put("COL1", "row1col1"); row1.put("COL2", "row1col2"); row1.put("COL3", "row1col3"); - row2 = new Row(); + row2 = new TableRow(header); row2.put("COL1", "row2col1"); row2.put("COL2", "row2col2"); row2.put("COL3", "row2col3"); @@ -143,7 +148,7 @@ private void doTestCSVWriteBasic(String delimiter) throws Exception { File f = new File(getTestDataDir(), "csvtestTemp.csv"); String path = f.getAbsolutePath(); CSVFileWriter writer = new CSVFileWriter(path, getController().getAppConfig(), delimiter); - List rowList = new ArrayList(); + List rowList = new ArrayList(); rowList.add(row1); rowList.add(row2); @@ -151,7 +156,7 @@ private void doTestCSVWriteBasic(String delimiter) throws Exception { writer.open(); writer.setColumnNames(writeHeader); - writer.writeRowList(rowList); + writer.writeTableRowList(rowList); writer.close(); compareWriterFile(path, delimiter, false, false); // 3rd param false and 4th param false => CSV for a upload diff --git a/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTest.java b/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTest.java index cd3601bb..fe550c91 100644 --- a/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTest.java +++ b/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTest.java @@ -28,7 +28,6 @@ import com.salesforce.dataloader.ConfigTestBase; import com.salesforce.dataloader.controller.Controller; import com.salesforce.dataloader.exception.DataAccessObjectException; -import com.salesforce.dataloader.model.Row; import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AccountRowComparator; import org.apache.logging.log4j.Logger; @@ -36,6 +35,7 @@ import org.junit.After; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import java.sql.Timestamp; @@ -153,10 +153,10 @@ private static void verifyDbInsertOrUpdate(Controller theController, boolean isI TableRow readRow = readRowList.get(i); assertNotNull("Error reading data row #" + i + ": the row shouldn't be null", readRow); assertTrue("Error reading data row #" + i + ": the row shouldn't be empty", readRow.getNonEmptyCellsCount() > 0); - Row expectedRow = DatabaseTestUtil.getInsertOrUpdateAccountRow(isInsert, rowsProcessed, DatabaseTestUtil.DateType.VALIDATION); + TableRow expectedRow = DatabaseTestUtil.getInsertOrUpdateAccountRow(isInsert, rowsProcessed, DatabaseTestUtil.DateType.VALIDATION); // verify all expected data for (String colName : VALIDATE_COLS) { - verifyCol(colName, readRow.convertToRow(), expectedRow); + verifyCol(colName, readRow, expectedRow); } rowsProcessed++; @@ -169,7 +169,7 @@ private static void verifyDbInsertOrUpdate(Controller theController, boolean isI } } - private static void verifyCol(String colName, Row row, Row expectedRow) { + private static void verifyCol(String colName, TableRow row, TableRow expectedRow) { Object actualValue = row.get(colName); Object expectedValue = expectedRow.get(colName); assertNotNull("actual value is null", actualValue); diff --git a/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTestUtil.java b/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTestUtil.java index 53586d9f..63e97367 100644 --- a/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTestUtil.java +++ b/src/test/java/com/salesforce/dataloader/dao/database/DatabaseTestUtil.java @@ -29,7 +29,9 @@ import com.salesforce.dataloader.controller.Controller; import com.salesforce.dataloader.exception.DataAccessObjectException; import com.salesforce.dataloader.exception.DataAccessObjectInitializationException; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; +import com.salesforce.dataloader.model.TableRow; + import junit.framework.TestCase; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.logging.log4j.Logger; @@ -99,16 +101,16 @@ public static void insertOrUpdateAccountsDb(Controller theController, boolean is sqlConfig.getSqlParams().put(LAST_UPDATED_COL, dateClass.getName()); writer = new DatabaseWriter(theController.getAppConfig(), dbConfigName, dataSource, sqlConfig); writer.open(); - List accountRowList = new ArrayList(); + List accountRowList = new ArrayList(); int rowsProcessed = 0; for(int i=0; i < numAccounts; i++) { - Row accountRow = getInsertOrUpdateAccountRow(isInsert, i, dateType, insertNulls); + TableRow accountRow = getInsertOrUpdateAccountRow(isInsert, i, dateType, insertNulls); accountRowList.add(accountRow); if(accountRowList.size() >= 1000 || i == (numAccounts-1)) { rowsProcessed += accountRowList.size(); - writer.writeRowList(accountRowList); + writer.writeTableRowList(accountRowList); logger.info("Written " + rowsProcessed + " of " + numAccounts + " total accounts using database config: " + dbConfigName); - accountRowList = new ArrayList(); + accountRowList = new ArrayList(); } } } catch (DataAccessObjectInitializationException e) { @@ -121,7 +123,7 @@ public static void insertOrUpdateAccountsDb(Controller theController, boolean is } } - public static Row getInsertOrUpdateAccountRow(boolean isInsert, int seqNum, DateType dateType) { + public static TableRow getInsertOrUpdateAccountRow(boolean isInsert, int seqNum, DateType dateType) { return getInsertOrUpdateAccountRow(isInsert, seqNum, dateType, false); } @@ -142,8 +144,18 @@ public static DatabaseConfig getDatabaseConfig(Controller controller, String dbC * @param dateType Type for the date field values * @return Row containing account data based on seqNum */ - public static Row getInsertOrUpdateAccountRow(boolean isInsert, int seqNum, DateType dateType, boolean insertNulls) { - Row row = new Row(); + public static TableRow getInsertOrUpdateAccountRow(boolean isInsert, int seqNum, DateType dateType, boolean insertNulls) { + ArrayList headerLabelList = new ArrayList(); + headerLabelList.add(EXT_ID_COL); + headerLabelList.add(NAME_COL); + headerLabelList.add(SFDC_ID_COL); + headerLabelList.add(ACCOUNT_NUMBER_COL); + headerLabelList.add(PHONE_COL); + headerLabelList.add(REVENUE_COL); + headerLabelList.add(LAST_UPDATED_COL); + + TableHeader header = new TableHeader(headerLabelList); + TableRow row = new TableRow(header); String operation; int seqInt; // external id is the key, use normal sequencing for update so the same set of records gets updated as inserted diff --git a/src/test/java/com/salesforce/dataloader/process/BulkV1CsvProcessTest.java b/src/test/java/com/salesforce/dataloader/process/BulkV1CsvProcessTest.java index ce6f0f00..003bdd12 100644 --- a/src/test/java/com/salesforce/dataloader/process/BulkV1CsvProcessTest.java +++ b/src/test/java/com/salesforce/dataloader/process/BulkV1CsvProcessTest.java @@ -39,7 +39,8 @@ import com.salesforce.dataloader.config.AppConfig; import com.salesforce.dataloader.controller.Controller; import com.salesforce.dataloader.dao.csv.CSVFileWriter; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; +import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; import static org.junit.Assert.assertEquals; @@ -51,17 +52,21 @@ public class BulkV1CsvProcessTest extends ProcessTestBase { private static final String TARGET_DIR = getProperty("target.dir").trim(); private static final String CSV_DIR_PATH = TARGET_DIR + File.separator + "BatchTests"; private static final String CSV_FILE_PATH = CSV_DIR_PATH + File.separator + "BatchTests.csv"; - private static Row validRow; - private static Row invalidRow; + private static TableRow validRow; + private static TableRow invalidRow; private Map argMap; @BeforeClass public static void setUpData() { - validRow = new Row(); + ArrayList headerNames = new ArrayList(); + headerNames.add("Subject"); + headerNames.add("ReminderDateTime"); + TableHeader header = new TableHeader(headerNames); + validRow = new TableRow(header); validRow.put("Subject", TASK_SUBJECT); validRow.put("ReminderDateTime", ""); - invalidRow = new Row(); + invalidRow = new TableRow(header); invalidRow.put("Subject", TASK_SUBJECT); invalidRow.put("ReminderDateTime", "NULL"); // this makes date conversion fail } @@ -102,7 +107,7 @@ private ILoaderProgress runProcess(Map argMap, int numInserts, i return monitor; } - private void writeCsv(Row... rows) throws Exception { + private void writeCsv(TableRow... rows) throws Exception { File csvDir = new File(CSV_DIR_PATH); if (!csvDir.exists()) { boolean deleteCsvDirOk = csvDir.mkdirs(); @@ -118,8 +123,8 @@ private void writeCsv(Row... rows) throws Exception { try { writer = new CSVFileWriter(CSV_FILE_PATH, getController().getAppConfig(), AppUtil.COMMA); writer.open(); - writer.setColumnNames(new ArrayList(rows[0].keySet())); - writer.writeRowList(Arrays.asList(rows)); + writer.setColumnNames(rows[0].getHeader().getColumns()); + writer.writeTableRowList(Arrays.asList(rows)); } finally { if (writer != null) { writer.close(); diff --git a/src/test/java/com/salesforce/dataloader/process/NAProcessTest.java b/src/test/java/com/salesforce/dataloader/process/NAProcessTest.java index 9bcb44cd..75ae39e1 100644 --- a/src/test/java/com/salesforce/dataloader/process/NAProcessTest.java +++ b/src/test/java/com/salesforce/dataloader/process/NAProcessTest.java @@ -34,6 +34,7 @@ import org.junit.Assert; import org.junit.Before; +import org.junit.Ignore; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -46,7 +47,8 @@ import com.salesforce.dataloader.dao.csv.CSVFileReader; import com.salesforce.dataloader.dao.csv.CSVFileWriter; import com.salesforce.dataloader.model.NATextValue; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; +import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; import com.sforce.soap.partner.QueryResult; import com.sforce.soap.partner.SaveResult; @@ -87,9 +89,10 @@ public void populateUserId() throws Exception { @Parameterized.Parameters(name = "{0}") public static Collection getConfigGeneratorParams() { return Arrays.asList( + TestVariant.forSettings(TestSetting.BULK_API_DISABLED), TestVariant.forSettings(TestSetting.BULK_API_ENABLED), - TestVariant.forSettings(TestSetting.BULK_API_ENABLED, TestSetting.BULK_API_CACHE_DAO_UPLOAD_ENABLED), - TestVariant.forSettings(TestSetting.BULK_API_DISABLED)); + TestVariant.forSettings(TestSetting.BULK_API_ENABLED, TestSetting.BULK_API_CACHE_DAO_UPLOAD_ENABLED) + ); } @Test @@ -250,7 +253,13 @@ private void generateCsv(String nullFieldName, Object nullFieldValue, String id) assertTrue("Could not delete existing CSV file: " + CSV_FILE_PATH, deleteCsvFileOk); } - Row row = new Row(); + ArrayList headerLabelList = new ArrayList(); + headerLabelList.add("OwnerId"); + headerLabelList.add("Subject"); + if (id != null) headerLabelList.add("Id"); + headerLabelList.add(nullFieldName); + TableHeader header = new TableHeader(headerLabelList); + TableRow row = new TableRow(header); row.put("OwnerId", userId); row.put("Subject", TASK_SUBJECT); row.put(nullFieldName, nullFieldValue); @@ -260,8 +269,8 @@ private void generateCsv(String nullFieldName, Object nullFieldValue, String id) try { writer = new CSVFileWriter(CSV_FILE_PATH, getController().getAppConfig(), AppUtil.COMMA); writer.open(); - writer.setColumnNames(new ArrayList(row.keySet())); - writer.writeRow(row); + writer.setColumnNames(new ArrayList(header.getColumns())); + writer.writeTableRow(row); } finally { if (writer != null) writer.close(); }