diff --git a/src/main/java/com/salesforce/dataloader/action/AbstractLoadAction.java b/src/main/java/com/salesforce/dataloader/action/AbstractLoadAction.java index bce97bb4..cf13a298 100644 --- a/src/main/java/com/salesforce/dataloader/action/AbstractLoadAction.java +++ b/src/main/java/com/salesforce/dataloader/action/AbstractLoadAction.java @@ -28,7 +28,6 @@ import com.salesforce.dataloader.action.progress.ILoaderProgress; import com.salesforce.dataloader.action.visitor.DAOLoadVisitor; -import com.salesforce.dataloader.client.PartnerClient; import com.salesforce.dataloader.config.AppConfig; import com.salesforce.dataloader.controller.Controller; import com.salesforce.dataloader.dao.DataAccessObject; @@ -40,17 +39,11 @@ import com.salesforce.dataloader.exception.OperationException; import com.salesforce.dataloader.exception.ParameterLoadException; import com.salesforce.dataloader.mapping.LoadMapper; -import com.salesforce.dataloader.model.Row; import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.DAORowUtil; -import com.sforce.soap.partner.DescribeGlobalSObjectResult; -import com.sforce.soap.partner.DescribeSObjectResult; -import com.sforce.soap.partner.Field; -import com.sforce.soap.partner.FieldType; import com.sforce.ws.ConnectionException; import java.util.List; -import java.util.Map; /** * @author Lexi Viripaeff diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java index 2f2fce98..021e4925 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/AbstractQueryVisitor.java @@ -40,6 +40,7 @@ import com.salesforce.dataloader.exception.ParameterLoadException; import com.salesforce.dataloader.mapping.SOQLMapper; import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; import com.sforce.async.AsyncApiException; import com.sforce.soap.partner.fault.ApiFault; import com.sforce.ws.ConnectionException; @@ -247,14 +248,35 @@ private void writeBatch() throws DataAccessObjectException { private void writeSuccesses() throws DataAccessObjectException { final String msg = Messages.getMessage(getClass(), "statusItemQueried"); final Iterator ids = this.batchIds.iterator(); + if (this.batchRows == null || this.batchRows.isEmpty()) { + return; + } + ArrayList headerColumnList = new ArrayList(); + headerColumnList.add(AppConfig.ID_COLUMN_NAME); + Row firstRow = this.batchRows.get(0); + for (String fieldName : firstRow.keySet()) { + headerColumnList.add(fieldName); + } + headerColumnList.add(AppConfig.STATUS_COLUMN_NAME); + TableHeader header = new TableHeader(headerColumnList); for (final Row row : this.batchRows) { - writeSuccess(row, ids.next(), msg); + writeSuccess(row.convertToTableRow(header), ids.next(), msg); } } private void writeErrors(String errorMessage) throws DataAccessObjectException { + if (this.batchRows == null || this.batchRows.isEmpty()) { + return; + } + ArrayList headerColumnList = new ArrayList(); + Row firstRow = this.batchRows.get(0); + for (String fieldName : firstRow.keySet()) { + headerColumnList.add(fieldName); + } + headerColumnList.add(AppConfig.ERROR_COLUMN_NAME); + TableHeader header = new TableHeader(headerColumnList); for (final Row row : this.batchRows) { - writeError(row, errorMessage); + writeError(row.convertToTableRow(header), errorMessage); } } diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/AbstractVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/AbstractVisitor.java index fcdeec97..16e3532c 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/AbstractVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/AbstractVisitor.java @@ -32,7 +32,7 @@ import com.salesforce.dataloader.dao.DataWriter; import com.salesforce.dataloader.exception.DataAccessObjectException; import com.salesforce.dataloader.mapping.Mapper; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.LoadRateCalculator; import org.apache.logging.log4j.Logger; @@ -131,7 +131,7 @@ protected DataWriter getSuccessWriter() { return this.successWriter; } - protected void writeSuccess(Row row, String id, String message) throws DataAccessObjectException { + protected void writeSuccess(TableRow row, String id, String message) throws DataAccessObjectException { if (writeStatus()) { if (id != null && id.length() > 0) { row.put(AppConfig.ID_COLUMN_NAME, id); @@ -139,19 +139,19 @@ protected void writeSuccess(Row row, String id, String message) throws DataAcces if (message != null && message.length() > 0) { row.put(AppConfig.STATUS_COLUMN_NAME, message); } - this.successWriter.writeRow(row); + this.successWriter.writeTableRow(row); } addSuccess(); } - protected void writeError(Row row, String errorMessage) throws DataAccessObjectException { + protected void writeError(TableRow row, String errorMessage) throws DataAccessObjectException { if (writeStatus()) { if (row == null) { - row = Row.singleEntryImmutableRow(AppConfig.ERROR_COLUMN_NAME, errorMessage); + row = TableRow.singleEntryImmutableRow(AppConfig.ERROR_COLUMN_NAME, errorMessage); } else { row.put(AppConfig.ERROR_COLUMN_NAME, errorMessage); } - this.errorWriter.writeRow(row); + this.errorWriter.writeTableRow(row); } addErrors(); } diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/DAOLoadVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/DAOLoadVisitor.java index 39ca23f4..a3a7ab95 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/DAOLoadVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/DAOLoadVisitor.java @@ -153,8 +153,7 @@ public boolean visit(TableRow row) throws OperationException, DataAccessObjectEx this.daoRowList.add(row); } // the result are sforce fields mapped to data - Row sforceDataRow = getMapper().mapData(row); - + TableRow sforceDataRow = getMapper().mapData(row, processedDAORowCounter == 0); if (this.getConfig().getBoolean(AppConfig.PROP_TRUNCATE_FIELDS) && this.getConfig().isRESTAPIEnabled() && "update".equalsIgnoreCase(this.getConfig().getString(AppConfig.PROP_OPERATION))) { @@ -163,13 +162,13 @@ public boolean visit(TableRow row) throws OperationException, DataAccessObjectEx cachedFieldAttributesForOperation = partnerClient.getSObjectFieldAttributesForRow( this.getConfig().getString(AppConfig.PROP_ENTITY), sforceDataRow); } - for (Map.Entry field : sforceDataRow.entrySet()) { + for (String fieldName : sforceDataRow.getHeader().getColumns()) { for (Field fieldDescribe : cachedFieldAttributesForOperation) { // Field truncation is applicable to certain field types only. // See https://developer.salesforce.com/docs/atlas.en-us.api_tooling.meta/api_tooling/sforce_api_header_allowfieldtruncation.htm // for the list of field types that field truncation is applicable to. FieldType type = fieldDescribe.getType(); - if (fieldDescribe.getName().equalsIgnoreCase(field.getKey()) + if (fieldDescribe.getName().equalsIgnoreCase(fieldName) && (type == FieldType.email || type == FieldType.string || type == FieldType.picklist @@ -178,17 +177,17 @@ public boolean visit(TableRow row) throws OperationException, DataAccessObjectEx || type == FieldType.multipicklist) ) { int fieldLength = fieldDescribe.getLength(); - if (field.getValue().toString().length() > fieldLength) { + if (row.get(fieldName).toString().length() > fieldLength) { if (type == FieldType.email) { - String[] emailParts = field.getValue().toString().split("@"); + String[] emailParts = row.get(fieldName).toString().split("@"); if (emailParts.length == 2) { String firstPart = emailParts[0].substring(0, fieldLength - emailParts[1].length() - 1); - field.setValue(firstPart + "@" + emailParts[1]); + row.put(fieldName, firstPart + "@" + emailParts[1]); continue; } } - field.setValue(field.getValue().toString().substring(0, fieldLength)); + row.put(fieldName, row.get(fieldName).toString().substring(0, fieldLength)); } } } @@ -206,7 +205,7 @@ public boolean visit(TableRow row) throws OperationException, DataAccessObjectEx } try { convertBulkAPINulls(sforceDataRow); - DynaBean dynaBean = SforceDynaBean.convertToDynaBean(dynaClass, sforceDataRow); + DynaBean dynaBean = SforceDynaBean.convertToDynaBean(dynaClass, sforceDataRow.convertToRow()); Map fieldMap = BeanUtils.describe(dynaBean); for (String fName : fieldMap.keySet()) { if (fieldMap.get(fName) != null) { @@ -222,7 +221,7 @@ public boolean visit(TableRow row) throws OperationException, DataAccessObjectEx String errMsg = Messages.getMessage("Visitor", "conversionErrorMsg", conve.getMessage()); getLogger().error(errMsg, conve); - conversionFailed(row.convertToRow(), errMsg); + conversionFailed(row, errMsg); // this row cannot be added since conversion has failed return false; } catch (InvocationTargetException e) { @@ -252,12 +251,12 @@ protected boolean maxBatchBytesReached(List dynaArray) { * @throws DataAccessObjectException * @throws OperationException */ - protected void conversionFailed(Row row, String errMsg) throws DataAccessObjectException, + protected void conversionFailed(TableRow row, String errMsg) throws DataAccessObjectException, OperationException { writeError(row, errMsg); } - protected void convertBulkAPINulls(Row row) {} + protected void convertBulkAPINulls(TableRow row) {} public void flushRemaining() throws OperationException, DataAccessObjectException { // check if there are any entities left @@ -441,7 +440,7 @@ private Object getPhoneFieldValue(String fieldName, Object fieldValue) { } - protected void processResult(Row dataRow, boolean isSuccess, String id, Error[] errors) + protected void processResult(TableRow dataRow, boolean isSuccess, String id, Error[] errors) throws DataAccessObjectException { // process success vs. error // extract error message from error result diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/bulk/BulkLoadVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/bulk/BulkLoadVisitor.java index fac912ba..e1116605 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/bulk/BulkLoadVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/bulk/BulkLoadVisitor.java @@ -504,7 +504,7 @@ private void processResults(final DataReader dataReader, final BatchInfo batch, } } else { for (final TableRow row : rows) { - writeError(row.convertToRow(), errorMessage); + writeError(row, errorMessage); } } // update to process the next batch @@ -540,16 +540,16 @@ private void processBatchResults(final BatchInfo batch, final String errorMessag if (state == BatchStateEnum.Failed || errorMessage != null) { getLogger().warn( Messages.getMessage(getClass(), "logBatchInfoWithMessage", batch.getId(), state, errorMessage)); - writeError(row.convertToRow(), errorMessage); + writeError(row, errorMessage); } else if (res == null || res.isEmpty()) { String msg = Messages.getMessage(getClass(), "noResultForRow", row.toString(), batch.getId()); - writeError(row.convertToRow(), msg); + writeError(row, msg); getLogger().warn(msg); } else { // convert the row into a RowResults so its easy to inspect final RowResult rowResult = new RowResult(Boolean.valueOf(res.get(successIdx)), isDelete ? false : Boolean.valueOf(res.get(createdIdx)), res.get(idIdx), res.get(errIdx)); - writeRowResult(row.convertToRow(), rowResult); + writeRowResult(row, rowResult); } } } @@ -573,7 +573,7 @@ private DataReader resetDAO() throws DataAccessObjectInitializationException, Lo return dataReader; } - private void writeRowResult(Row row, RowResult resultRow) throws DataAccessObjectException { + private void writeRowResult(TableRow row, RowResult resultRow) throws DataAccessObjectException { if (resultRow.success) { String successMessage; switch (getConfig().getOperationInfo()) { @@ -632,16 +632,16 @@ private String parseAsyncApiError(final String errString) { } @Override - protected void convertBulkAPINulls(Row row) { - for (final Map.Entry entry : row.entrySet()) { - if (NATextValue.isNA(entry.getValue())) { - entry.setValue(NATextValue.getInstance()); + protected void convertBulkAPINulls(TableRow row) { + for (String columnName : row.getHeader().getColumns()) { + if (NATextValue.isNA(row.get(columnName))) { + row.put(columnName, NATextValue.getInstance()); } } } @Override - protected void conversionFailed(Row row, String errMsg) throws DataAccessObjectException, + protected void conversionFailed(TableRow row, String errMsg) throws DataAccessObjectException, OperationException { super.conversionFailed(row, errMsg); getLogger().warn("Skipping results for row " + row + " which failed before upload to Saleforce.com"); diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/partner/PartnerLoadVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/partner/PartnerLoadVisitor.java index 9a62a17f..1e68f5a3 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/partner/PartnerLoadVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/partner/PartnerLoadVisitor.java @@ -28,7 +28,8 @@ import java.util.List; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableRow; + import org.apache.commons.beanutils.DynaBean; import com.salesforce.dataloader.action.OperationInfo; @@ -115,7 +116,7 @@ private void writeOutputToWriter(Object[] results) // are a) not the same class yet b) not subclassed int batchRowCounter = 0; for (int i = 0; i < this.daoRowList.size(); i++) { - Row daoRow = this.daoRowList.get(i).convertToRow(); + TableRow daoRow = this.daoRowList.get(i); if (!isRowConversionSuccessful()) { continue; } diff --git a/src/main/java/com/salesforce/dataloader/action/visitor/rest/RESTLoadVisitor.java b/src/main/java/com/salesforce/dataloader/action/visitor/rest/RESTLoadVisitor.java index 7e3acbd8..fbceac86 100644 --- a/src/main/java/com/salesforce/dataloader/action/visitor/rest/RESTLoadVisitor.java +++ b/src/main/java/com/salesforce/dataloader/action/visitor/rest/RESTLoadVisitor.java @@ -42,6 +42,7 @@ import com.salesforce.dataloader.exception.LoadException; import com.salesforce.dataloader.exception.OperationException; import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableRow; import com.sforce.soap.partner.SaveResult; import com.sforce.soap.partner.fault.ApiFault; import com.sforce.ws.ConnectionException; @@ -82,7 +83,7 @@ private void writeOutputToWriter(Object[] results) // are a) not the same class yet b) not subclassed int batchRowCounter = 0; for (int i = 0; i < this.daoRowList.size(); i++) { - Row daoRow = this.daoRowList.get(i).convertToRow(); + TableRow daoRow = this.daoRowList.get(i); if (!isRowConversionSuccessful()) { continue; } diff --git a/src/main/java/com/salesforce/dataloader/client/PartnerClient.java b/src/main/java/com/salesforce/dataloader/client/PartnerClient.java index cb45548c..e389020f 100644 --- a/src/main/java/com/salesforce/dataloader/client/PartnerClient.java +++ b/src/main/java/com/salesforce/dataloader/client/PartnerClient.java @@ -44,7 +44,7 @@ import com.salesforce.dataloader.exception.PasswordExpiredException; import com.salesforce.dataloader.exception.RelationshipFormatException; import com.salesforce.dataloader.mapping.LoadMapper; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; import com.sforce.soap.partner.Connector; import com.sforce.soap.partner.DeleteResult; @@ -942,13 +942,13 @@ public DescribeSObjectResult describeSObject(String entity) throws ConnectionExc return result; } - public Field[] getSObjectFieldAttributesForRow(String sObjectName, Row dataRow) throws ConnectionException { + public Field[] getSObjectFieldAttributesForRow(String sObjectName, TableRow dataRow) throws ConnectionException { ArrayList attributesForRow = new ArrayList(); DescribeSObjectResult entityDescribe = describeSObject(sObjectName); - for (Map.Entry dataRowField : dataRow.entrySet()) { + for (String headerColumnName : dataRow.getHeader().getColumns()) { Field[] fieldAttributesArray = entityDescribe.getFields(); for (Field fieldAttributes : fieldAttributesArray) { - if (fieldAttributes.getName().equalsIgnoreCase(dataRowField.getKey())) { + if (fieldAttributes.getName().equalsIgnoreCase(headerColumnName)) { attributesForRow.add(fieldAttributes); } } diff --git a/src/main/java/com/salesforce/dataloader/dao/AbstractDataReaderImpl.java b/src/main/java/com/salesforce/dataloader/dao/AbstractDataReaderImpl.java index 534272d2..b91cede1 100644 --- a/src/main/java/com/salesforce/dataloader/dao/AbstractDataReaderImpl.java +++ b/src/main/java/com/salesforce/dataloader/dao/AbstractDataReaderImpl.java @@ -26,11 +26,13 @@ package com.salesforce.dataloader.dao; +import java.util.ArrayList; import java.util.List; import com.salesforce.dataloader.config.AppConfig; import com.salesforce.dataloader.exception.DataAccessObjectException; import com.salesforce.dataloader.exception.DataAccessObjectInitializationException; +import com.salesforce.dataloader.model.TableHeader; import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.DAORowUtil; @@ -39,6 +41,8 @@ public abstract class AbstractDataReaderImpl implements DataReader { private DAORowCache rowCache = new DAORowCache(); private int currentRowNumber; private int totalRows = 0; + private TableHeader tableHeader = null; + private List daoColsList = new ArrayList(); public AbstractDataReaderImpl(AppConfig appConfig) { this.appConfig = appConfig; @@ -62,12 +66,32 @@ public void open() throws DataAccessObjectInitializationException{ if (!appConfig.getBoolean(AppConfig.PROP_PROCESS_BULK_CACHE_DATA_FROM_DAO) || rowCache.size() == 0) { openDAO(); + this.daoColsList = initializeDaoColumnsList(); + initializeTableHeader(); } currentRowNumber = 0; rowCache.resetCurrentRowIndex(); setOpenFlag(true); } + private void initializeTableHeader() { + if (this.daoColsList == null) { + this.daoColsList = new ArrayList(); + } + ArrayList tableHeaderCols = new ArrayList<>(this.daoColsList); + if (tableHeaderCols.get(0) == null + || !tableHeaderCols.get(0).equalsIgnoreCase(AppConfig.ID_COLUMN_NAME)) { + tableHeaderCols.add(0, AppConfig.ID_COLUMN_NAME); + } + if (!tableHeaderCols.contains(AppConfig.STATUS_COLUMN_NAME)) { + tableHeaderCols.add(AppConfig.STATUS_COLUMN_NAME); + } + if (!tableHeaderCols.contains(AppConfig.ERROR_COLUMN_NAME)) { + tableHeaderCols.add(AppConfig.ERROR_COLUMN_NAME); + } + this.tableHeader = new TableHeader(tableHeaderCols); + } + public TableRow readTableRow() throws DataAccessObjectException { if (!isOpenFlag()) { open(); @@ -110,9 +134,19 @@ protected AppConfig getAppConfig() { return this.appConfig; } + protected TableHeader getTableHeader() { + return this.tableHeader; + } + + @Override + public List getColumnNames() { + return new ArrayList<>(this.daoColsList); + } + abstract protected void setOpenFlag(boolean open); abstract protected boolean isOpenFlag(); abstract protected void openDAO() throws DataAccessObjectInitializationException; abstract protected List readTableRowListFromDAO(int maxRows) throws DataAccessObjectException; abstract protected TableRow readTableRowFromDAO() throws DataAccessObjectException; + abstract protected List initializeDaoColumnsList() throws DataAccessObjectInitializationException; } \ No newline at end of file diff --git a/src/main/java/com/salesforce/dataloader/dao/DataWriter.java b/src/main/java/com/salesforce/dataloader/dao/DataWriter.java index f39a1173..66cb9772 100644 --- a/src/main/java/com/salesforce/dataloader/dao/DataWriter.java +++ b/src/main/java/com/salesforce/dataloader/dao/DataWriter.java @@ -55,6 +55,7 @@ public interface DataWriter extends DataAccessObject { * @throws DataAccessObjectException */ boolean writeRow(Row inputRow) throws DataAccessObjectException; + boolean writeTableRow(TableRow inputRow) throws DataAccessObjectException; /** * @param inputRowList diff --git a/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileReader.java b/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileReader.java index 6e56b389..8ce6db44 100644 --- a/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileReader.java +++ b/src/main/java/com/salesforce/dataloader/dao/csv/CSVFileReader.java @@ -47,7 +47,6 @@ import com.salesforce.dataloader.exception.DataAccessObjectException; import com.salesforce.dataloader.exception.DataAccessObjectInitializationException; import com.salesforce.dataloader.exception.DataAccessRowException; -import com.salesforce.dataloader.model.TableHeader; import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; import com.salesforce.dataloader.util.DAORowUtil; @@ -65,8 +64,6 @@ public class CSVFileReader extends AbstractDataReaderImpl { private File file; private FileInputStream input; private CSVReader csvReader; - private List headerRow; - private TableHeader tableHeader; private boolean isOpen; private char[] csvDelimiters; private boolean endOfFileReached = false; @@ -119,7 +116,6 @@ public void checkConnection() throws DataAccessObjectInitializationException { @Override protected void openDAO() throws DataAccessObjectInitializationException { initalizeInput(csvDelimiters); - readHeaderRow(); } /** @@ -183,57 +179,51 @@ record = csvReader.nextRecord(); return null; } - if (record.size() > headerRow.size()) { + if (record.size() > getColumnNames().size()) { String errMsg = Messages.getFormattedString("CSVFileDAO.errorRowTooLarge", new String[]{ - String.valueOf(getCurrentRowNumber()), String.valueOf(record.size()), String.valueOf(headerRow.size())}); + String.valueOf(getCurrentRowNumber()), String.valueOf(record.size()), String.valueOf(getColumnNames().size())}); throw new DataAccessRowException(errMsg); - } else if (record.size() < headerRow.size()) { + } else if (record.size() < getColumnNames().size()) { String errMsg = Messages.getFormattedString("CSVFileDAO.errorRowTooSmall", new String[]{ - String.valueOf(getCurrentRowNumber()), String.valueOf(record.size()), String.valueOf(headerRow.size())}); + String.valueOf(getCurrentRowNumber()), String.valueOf(record.size()), String.valueOf(getColumnNames().size())}); throw new DataAccessRowException(errMsg); } - TableRow trow = new TableRow(this.tableHeader); + TableRow trow = new TableRow(getTableHeader()); - for (int i = 0; i < headerRow.size(); i++) { + for (int i = 0; i < getColumnNames().size(); i++) { String value = record.get(i); if (value == null) { value = ""; } - trow.put(headerRow.get(i), value); + trow.put(getColumnNames().get(i), value); } return trow; } - /** - * @return Names of output columns being read during each readRow call - */ - @Override - public List getColumnNames() { - return headerRow; - } - - private void readHeaderRow() throws DataAccessObjectInitializationException { + protected List initializeDaoColumnsList() throws DataAccessObjectInitializationException { + List daoColsList = null; try { synchronized (lock) { - headerRow = csvReader.nextRecord(); + daoColsList = csvReader.nextRecord(); } - if (headerRow == null) { + if (daoColsList == null) { LOGGER.error(Messages.getString("CSVFileDAO.errorHeaderRow")); throw new DataAccessObjectInitializationException(Messages.getString("CSVFileDAO.errorHeaderRow")); } - tableHeader = new TableHeader(headerRow); + LOGGER.debug(Messages.getFormattedString( - "CSVFileDAO.debugMessageHeaderRowSize", headerRow.size())); + "CSVFileDAO.debugMessageHeaderRowSize", daoColsList.size())); - LOGGER.info("Columns in CSV header = " + headerRow.size()); + LOGGER.info("Columns in CSV header = " + daoColsList.size()); + return daoColsList; } catch (IOException e) { String errMsg = Messages.getString("CSVFileDAO.errorHeaderRow"); LOGGER.error(errMsg, e); throw new DataAccessObjectInitializationException(errMsg, e); } finally { // if there's a problem getting header row, the stream needs to be closed - if (headerRow == null) { + if (daoColsList == null) { IOUtils.closeQuietly(input); } } diff --git a/src/main/java/com/salesforce/dataloader/dao/database/DatabaseReader.java b/src/main/java/com/salesforce/dataloader/dao/database/DatabaseReader.java index 5efa42db..0a0e7e15 100644 --- a/src/main/java/com/salesforce/dataloader/dao/database/DatabaseReader.java +++ b/src/main/java/com/salesforce/dataloader/dao/database/DatabaseReader.java @@ -29,7 +29,6 @@ import java.sql.*; import java.util.*; -import com.salesforce.dataloader.model.TableHeader; import com.salesforce.dataloader.model.TableRow; import org.apache.commons.dbcp2.BasicDataSource; @@ -55,8 +54,6 @@ public class DatabaseReader extends AbstractDataReaderImpl { private static Logger logger = DLLogManager.getLogger(DatabaseReader.class); private final BasicDataSource dataSource; - private List headerRow = new ArrayList(); - private TableHeader tableHeader; private final SqlConfig sqlConfig; private final DatabaseContext dbContext; private boolean endOfTableReached = false; @@ -85,11 +82,6 @@ public DatabaseReader(AppConfig appConfig, String dbConfigName) throws DataAcces this.dataSource = dbConfig.getDataSource(); this.sqlConfig = dbConfig.getSqlConfig(); this.dbContext = new DatabaseContext(dbConfigName); - this.headerRow = sqlConfig.getColumnNames(); - if(headerRow == null) { - headerRow = new ArrayList(); - } - tableHeader = new TableHeader(headerRow); } /* @@ -189,9 +181,9 @@ protected TableRow readTableRowFromDAO() throws DataAccessObjectException { TableRow trow = null; ResultSet rs = dbContext.getDataResultSet(); if (rs != null && rs.next()) { - trow = new TableRow(tableHeader); + trow = new TableRow(getTableHeader()); - for (String columnName : headerRow) { + for (String columnName : getColumnNames()) { currentColumnName = columnName; Object value = rs.getObject(columnName); trow.put(columnName, value); @@ -217,11 +209,6 @@ protected TableRow readTableRowFromDAO() throws DataAccessObjectException { } } - @Override - public List getColumnNames() { - return headerRow; - } - /* * (non-Javadoc) * @see com.salesforce.dataloader.dao.DataAccessObject#checkConnection() @@ -246,4 +233,13 @@ public int getTotalRows() throws DataAccessObjectException { } return super.getTotalRows(); } + + @Override + protected List initializeDaoColumnsList() { + List daoColsList = sqlConfig.getColumnNames(); + if(daoColsList == null) { + daoColsList = new ArrayList(); + } + return daoColsList; + } } diff --git a/src/main/java/com/salesforce/dataloader/mapping/LoadMapper.java b/src/main/java/com/salesforce/dataloader/mapping/LoadMapper.java index f76aab36..68f77f7d 100644 --- a/src/main/java/com/salesforce/dataloader/mapping/LoadMapper.java +++ b/src/main/java/com/salesforce/dataloader/mapping/LoadMapper.java @@ -28,7 +28,7 @@ import com.salesforce.dataloader.client.PartnerClient; import com.salesforce.dataloader.exception.MappingInitializationException; -import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableHeader; import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; import com.sforce.soap.partner.Field; @@ -56,7 +56,9 @@ public class LoadMapper extends Mapper { private static final Logger logger = DLLogManager.getLogger(Mapper.class); - + private TableHeader localCompositeRowHeader = null; + private TableHeader sfdcRowHeader = null; + public LoadMapper(PartnerClient client, Collection columnNames, Field[] fields, String mappingFileName) throws MappingInitializationException { super(client, columnNames, fields, mappingFileName); @@ -92,11 +94,36 @@ public Map getMappingWithUnmappedColumns(boolean includeUnmapped result.put(currentMapEntry.getKey(), currentMapEntry.getValue()); } } - return result; } - public Row mapData(TableRow localRow) { + private void initializeHeaders() { + Map currentMappings = getMappingWithUnmappedColumns(false); + this.localCompositeRowHeader = new TableHeader(new ArrayList(currentMappings.keySet())); + ArrayList sfdcFieldList = new ArrayList(); + for (String localCompositeCol : currentMappings.keySet()) { + String sfdcNameList = getMapping(localCompositeCol, true, true); + if (StringUtils.hasText(sfdcNameList)) { + String sfdcNameArray[] = sfdcNameList.split(AppUtil.COMMA); + for (String sfdcName : sfdcNameArray) { + sfdcFieldList.add(sfdcName.trim()); + } + } + } + Map constantsMap = getConstantsMap(); + for (String sfdcNameList : constantsMap.keySet()) { + String sfdcNameArray[] = sfdcNameList.split(AppUtil.COMMA); + for (String sfdcName : sfdcNameArray) { + sfdcFieldList.add(sfdcName.trim()); + } + } + this.sfdcRowHeader = new TableHeader(sfdcFieldList); + } + + public TableRow mapData(TableRow localRow, boolean firstRow) { + if (firstRow) { + initializeHeaders(); + } Set compositeDAOCols = this.getCompositeDAOColumns(); HashMap compositeColValueMap = new HashMap(); HashMap compositeColSizeMap = this.getCompositeColSizeMap(); @@ -135,7 +162,7 @@ public Row mapData(TableRow localRow) { } } - Row localCompositeRow = new Row(); + TableRow localCompositeRow = new TableRow(this.localCompositeRowHeader); for (String compositeCol : compositeDAOCols) { Object[] compositeColValueArray = compositeColValueMap.get(compositeCol); Object compositeColValue = compositeColValueArray[0]; @@ -144,16 +171,16 @@ public Row mapData(TableRow localRow) { } localCompositeRow.put(compositeCol, compositeColValue); } - Row mappedData = new Row(); - for (Map.Entry entry : localCompositeRow.entrySet()) { - String sfdcNameList = getMapping(entry.getKey(), true, true); + TableRow mappedData = new TableRow(this.sfdcRowHeader); + for (String localCompositeRowElement : localCompositeRowHeader.getColumns()) { + String sfdcNameList = getMapping(localCompositeRowElement, true, true); if (StringUtils.hasText(sfdcNameList)) { String sfdcNameArray[] = sfdcNameList.split(AppUtil.COMMA); for (String sfdcName : sfdcNameArray) { - mappedData.put(sfdcName.trim(), entry.getValue()); + mappedData.put(sfdcName.trim(), localCompositeRow.get(localCompositeRowElement)); } } else { - logger.info("Mapping for field " + entry.getKey() + " will be ignored since destination column is empty"); + logger.info("Mapping for field " + localCompositeRowElement + " will be ignored since destination column is empty"); } } mapConstants(mappedData); diff --git a/src/main/java/com/salesforce/dataloader/mapping/Mapper.java b/src/main/java/com/salesforce/dataloader/mapping/Mapper.java index bc7caf89..872cd5c9 100644 --- a/src/main/java/com/salesforce/dataloader/mapping/Mapper.java +++ b/src/main/java/com/salesforce/dataloader/mapping/Mapper.java @@ -55,6 +55,7 @@ import com.salesforce.dataloader.dyna.ParentSObjectFormatter; import com.salesforce.dataloader.exception.MappingInitializationException; import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableRow; import com.salesforce.dataloader.util.AppUtil; import com.salesforce.dataloader.util.OrderedProperties; @@ -249,8 +250,10 @@ private static String extractConstant(String constantVal) { return constantVal.substring(1, constantVal.length() - 1); } - protected void mapConstants(Row rowMap) { - rowMap.putAll(constants); + protected void mapConstants(TableRow row) { + for (String constKey : constants.keySet()) { + row.put(constKey, constants.get(constKey)); + } } private Properties loadProperties(String fileName) throws MappingInitializationException { diff --git a/src/main/java/com/salesforce/dataloader/mapping/SOQLMapper.java b/src/main/java/com/salesforce/dataloader/mapping/SOQLMapper.java index 045ff689..b7ed2626 100644 --- a/src/main/java/com/salesforce/dataloader/mapping/SOQLMapper.java +++ b/src/main/java/com/salesforce/dataloader/mapping/SOQLMapper.java @@ -31,6 +31,7 @@ import com.salesforce.dataloader.mapping.SOQLInfo.SOQLFieldInfo; import com.salesforce.dataloader.mapping.SOQLInfo.SOQLParserException; import com.salesforce.dataloader.model.Row; +import com.salesforce.dataloader.model.TableRow; import com.sforce.soap.partner.DescribeSObjectResult; import com.sforce.soap.partner.Field; import com.sforce.soap.partner.FieldType; @@ -193,6 +194,13 @@ public Row mapCsvRowSfdcToLocal(List headers, List values, Strin return resultRow; } + + protected void mapConstants(Row row) { + for (String constKey : getConstantsMap().keySet()) { + row.put(constKey, getConstantsMap().get(constKey)); + } + } + public boolean parseSoql(String soql) throws InvalidMappingException { try { new SOQLInfo(soql); diff --git a/src/main/java/com/salesforce/dataloader/model/Row.java b/src/main/java/com/salesforce/dataloader/model/Row.java index 5f40b0e9..7f85efaf 100644 --- a/src/main/java/com/salesforce/dataloader/model/Row.java +++ b/src/main/java/com/salesforce/dataloader/model/Row.java @@ -25,6 +25,7 @@ */ package com.salesforce.dataloader.model; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; @@ -142,4 +143,12 @@ public String toString() { " columns=" + internalMap + '}'; } + + public TableRow convertToTableRow(TableHeader header) { + TableRow trow = new TableRow(header); + for (String headerColName : header.getColumns()) { + trow.put(headerColName, this.get(headerColName)); + } + return trow; + } } diff --git a/src/test/java/com/salesforce/dataloader/mapping/LoadMapperTest.java b/src/test/java/com/salesforce/dataloader/mapping/LoadMapperTest.java index 94741b6c..032d2825 100644 --- a/src/test/java/com/salesforce/dataloader/mapping/LoadMapperTest.java +++ b/src/test/java/com/salesforce/dataloader/mapping/LoadMapperTest.java @@ -154,7 +154,7 @@ public void testDuplicateConstants() throws MappingInitializationException { mappings.setProperty("", "Value6"); LoadMapper mapper = new LoadMapper(null, null, null, null); mapper.putPropertyFileMappings(mappings); - Row result = mapper.mapData(TableRow.emptyRow()); + TableRow result = mapper.mapData(TableRow.emptyRow(), true); assertEquals(constantValue, result.get("Name")); assertEquals(constantValue, result.get("field1__c")); assertEquals(constantValue, result.get("field2__c")); @@ -200,7 +200,7 @@ public void testColumnValueDoesNotOverrideConstant() throws MappingInitializatio //(src, dest). mapper.putMapping(csvFieldName, sfdcField); - Map result = mapper.mapData(input); + TableRow result = mapper.mapData(input, true); //verify that the old value holds assertEquals(constantValue, result.get(sfdcField)); @@ -235,7 +235,7 @@ public void testConstValueOverridesColumnValue() throws Exception { TableRow input = TableRow.singleEntryImmutableRow(constantValue, sfdcField); - Map result = mapper.mapData(input); + TableRow result = mapper.mapData(input, true); //verify that the old value holds assertEquals(constantValue, result.get(sfdcField)); @@ -248,9 +248,8 @@ public void testMapDataEmptyEntriesIgnored() throws Exception { TableRow inputData = TableRow.singleEntryImmutableRow("SOURCE_COL", "123"); - Map result = loadMapper.mapData(inputData); - - assertTrue("Empty destination column should have not been mapped", result.isEmpty()); + TableRow result = loadMapper.mapData(inputData, true); + assertTrue("Empty destination column should have not been mapped", result.getNonEmptyCellsCount() == 0); } @Test @@ -276,7 +275,7 @@ public void testVerifyMappingsAreValidUnknownColumn() throws Exception { * Helper method to verify that the LoadMapper has mapped the specified columns to their correct respective field, along with any constants in the mapping file. */ private void verifyMapping(LoadMapper mapper, String... destNames) { - Row destValueMap = mapper.mapData(this.sourceRow); + TableRow destValueMap = mapper.mapData(this.sourceRow, true); for (int i = 0; i < destNames.length; i++) { assertNotNull("Destination# " + i + "(" + destNames[i] + ") should have a mapped value",