Skip to content

Commit

Permalink
Changes for incremental
Browse files Browse the repository at this point in the history
In incremental mode new schema is not generated and old one will be used
Added validation for new date fields
  • Loading branch information
datasetutil committed Apr 15, 2015
1 parent 6d5c0fb commit e01ce88
Show file tree
Hide file tree
Showing 6 changed files with 141 additions and 33 deletions.
134 changes: 112 additions & 22 deletions src/main/java/com/sforce/dataset/loader/DatasetLoader.java
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@
import com.sforce.soap.partner.sobject.SObject;
import com.sforce.ws.ConnectionException;
import com.sforce.ws.ConnectorConfig;
import com.sforce.ws.util.Base64;

public class DatasetLoader {

Expand Down Expand Up @@ -217,35 +218,53 @@ public static boolean uploadDataset(String inputFileString,
throw new DatasetLoaderException("Operation terminated on user request");
}


schemaFile = ExternalFileSchema.getSchemaFile(inputFile, logger);
ExternalFileSchema schema = null;
logger.println("\n*******************************************************************************");
if(FilenameUtils.getExtension(inputFile.getName()).equalsIgnoreCase("csv"))
{
if(schemaFile != null && schemaFile.exists() && schemaFile.length()>0)
session.setStatus("LOADING SCHEMA");
else
session.setStatus("DETECTING SCHEMA");

schema = ExternalFileSchema.init(inputFile, inputFileCharset, logger);
if(schema==null)

if(schemaFile != null && schemaFile.exists() && schemaFile.length()>0)
{
//If this is incremental, fetch last uploaded json instead of generating a new one
if(Operation.equalsIgnoreCase("Append") || (Operation.equalsIgnoreCase("Upsert")) || (Operation.equalsIgnoreCase("Delete")))
{
logger.println("Failed to parse schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
throw new DatasetLoaderException("Failed to parse schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
schema = getLastUploadedJson(partnerConnection, datasetAlias, logger);
}
}else
}

if(session.isDone())
{
throw new DatasetLoaderException("Operation terminated on user request");
}

if(schema==null)
{
if(schemaFile != null && schemaFile.exists() && schemaFile.length()>0)
session.setStatus("LOADING SCHEMA");
schema = ExternalFileSchema.load(inputFile, inputFileCharset, logger);
if(schema==null)
logger.println("\n*******************************************************************************");
if(FilenameUtils.getExtension(inputFile.getName()).equalsIgnoreCase("csv"))
{
if(schemaFile != null && schemaFile.exists() && schemaFile.length()>0)
session.setStatus("LOADING SCHEMA");
else
session.setStatus("DETECTING SCHEMA");

schema = ExternalFileSchema.init(inputFile, inputFileCharset, logger);
if(schema==null)
{
logger.println("Failed to parse schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
throw new DatasetLoaderException("Failed to parse schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
}
}else
{
logger.println("Failed to load schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
throw new DatasetLoaderException("Failed to load schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
if(schemaFile != null && schemaFile.exists() && schemaFile.length()>0)
session.setStatus("LOADING SCHEMA");
schema = ExternalFileSchema.load(inputFile, inputFileCharset, logger);
if(schema==null)
{
logger.println("Failed to load schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
throw new DatasetLoaderException("Failed to load schema file {"+ ExternalFileSchema.getSchemaFile(inputFile, logger) +"}");
}
}
logger.println("*******************************************************************************\n");
}
logger.println("*******************************************************************************\n");



if(schema != null)
Expand Down Expand Up @@ -1765,7 +1784,78 @@ public static String getUploadedFileStatus(PartnerConnection partnerConnection,
}// End While
}
return status;
}
}


private static ExternalFileSchema getLastUploadedJson(PartnerConnection partnerConnection, String datasetAlias, PrintStream logger) throws Exception
{
String json = null;
ExternalFileSchema schema = null;
String soqlQuery = String.format("SELECT Id,Status,MetadataJson FROM InsightsExternalData WHERE Status = 'Completed' AND EdgemartAlias = '%s' ORDER BY LastModifiedDate DESC LIMIT 1",datasetAlias);
partnerConnection.setQueryOptions(2000);
QueryResult qr = partnerConnection.query(soqlQuery);
int rowsSoFar = 0;
boolean done = false;
if (qr.getSize() > 0)
{
while (!done)
{
SObject[] records = qr.getRecords();
for (int i = 0; i < records.length; ++i)
{
if(rowsSoFar==0) //only get the first one
{
String fieldName = "Id";
Object value = SfdcUtils.getFieldValueFromQueryResult(fieldName,records[i]);
fieldName = "Status";
Object Status = SfdcUtils.getFieldValueFromQueryResult(fieldName,records[i]);
fieldName = "MetadataJson";
if (value != null && Status != null && Status.toString().equalsIgnoreCase("Completed"))
{
Object temp = SfdcUtils.getFieldValueFromQueryResult(fieldName,records[i]);
if(temp!=null)
{
if(temp instanceof byte[])
{
json = IOUtils.toString((byte[])temp, "UTF-8");
}else if(temp instanceof InputStream)
{
json = IOUtils.toString((InputStream)temp, "UTF-8");
}else
{
String str = temp.toString();
if(Base64.isBase64(str))
{
json = IOUtils.toString(Base64.decode(str.getBytes()),"UTF-8");
}else
{
json = str;
}
}
}
}
}
rowsSoFar++;
}
if (qr.isDone()) {
done = true;
} else {
qr = partnerConnection.queryMore(qr.getQueryLocator());
}
}// End While
}
if(rowsSoFar>1)
{
logger.println("getLastIncompleteFileHdr() returned more than one row");
}

if(json != null)
{
schema = ExternalFileSchema.load(IOUtils.toInputStream(json), DatasetUtils.utf8Charset, logger);
}
return schema;
}



}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -404,7 +404,7 @@ private static void validateSchema(ExternalFileSchema schema, PrintStream logger
message.append("Duplicate field name {"+user_field.getName()+"}\n");
}

if(user_field.getName().endsWith("_sec_epoch") || user_field.getName().endsWith("_day_epoch") || user_field.getName().endsWith("_Day") || user_field.getName().endsWith("_Month") || user_field.getName().endsWith("_Year") || user_field.getName().endsWith("_Quarter") || user_field.getName().endsWith("_Week"))
if(user_field.getName().endsWith("_sec_epoch") || user_field.getName().endsWith("_day_epoch") || user_field.getName().endsWith("_Day") || user_field.getName().endsWith("_Month") || user_field.getName().endsWith("_Year") || user_field.getName().endsWith("_Quarter") || user_field.getName().endsWith("_Week") || user_field.getName().endsWith("_Hour")|| user_field.getName().endsWith("_Minute")|| user_field.getName().endsWith("_Second")|| user_field.getName().endsWith("_Month_Fiscal")|| user_field.getName().endsWith("_Year_Fiscal")|| user_field.getName().endsWith("_Quarter_Fiscal") || user_field.getName().endsWith("_Week_Fiscal"))
{
for(FieldType user_field_2:user_fields)
{
Expand Down
2 changes: 1 addition & 1 deletion src/main/java/com/sforce/dataset/server/ListServlet.java
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ protected void doGet(HttpServletRequest request, HttpServletResponse response) t
mapper.writeValue(response.getOutputStream(), folders);
}else if(value.equalsIgnoreCase("dataset"))
{
List<DatasetType> datasets = DatasetUtils.listDatasets(conn);
List<DatasetType> datasets = DatasetUtils.listDatasets(conn, false);
DatasetType def = new DatasetType();
def.name = "";
def._alias = "";
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ public DatasetAugmenter(String username,String password, String token, String en
@SuppressWarnings({ "rawtypes", "unchecked" })
public static void augmentEM(PartnerConnection partnerConnection) throws Exception
{
Map<String, Map> map = DatasetUtils.listPublicDataset(partnerConnection);
Map<String, Map> map = DatasetUtils.listPublicDataset(partnerConnection, true);
System.out.println("\n");
if(map==null || map.size()==0)
{
Expand Down
19 changes: 18 additions & 1 deletion src/main/java/com/sforce/dataset/util/DatasetType.java
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,23 @@ public class DatasetType {
class PermissionType {
public boolean modify=true;
public boolean manage=true;
public boolean view=true;
public boolean view=true;
@Override
public String toString() {
return "PermissionType [modify=" + modify + ", manage=" + manage
+ ", view=" + view + "]";
}
}

@Override
public String toString() {
return "DatasetType [assetIcon=" + assetIcon + ", _uid=" + _uid
+ ", _type=" + _type + ", _createdDateTime=" + _createdDateTime
+ ", _lastAccessed=" + _lastAccessed + ", name=" + name
+ ", _alias=" + _alias + ", _url=" + _url + ", assetIconUrl="
+ assetIconUrl + ", assetSharingUrl=" + assetSharingUrl
+ ", _permissions=" + _permissions + "]";
}


}
15 changes: 8 additions & 7 deletions src/main/java/com/sforce/dataset/util/DatasetUtils.java
Original file line number Diff line number Diff line change
Expand Up @@ -83,16 +83,16 @@ public class DatasetUtils {
public static final int DEFAULT_BUFFER_SIZE = 1024 * 4;
private static final SimpleDateFormat sfdcDateTimeFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");

private static final Charset utf8Charset = Charset.forName("UTF-8");
public static final Charset utf8Charset = Charset.forName("UTF-8");


private static boolean hasLoggedIn = false;

@SuppressWarnings("rawtypes")
public static Map<String,Map> listPublicDataset(PartnerConnection connection) throws Exception {
public static Map<String,Map> listPublicDataset(PartnerConnection connection, boolean isCurrent) throws Exception {
GetUserInfoResult userInfo = connection.getUserInfo();
String userID = userInfo.getUserId();
Map<String, Map> dataSetMap = listDataset(connection);
Map<String, Map> dataSetMap = listDataset(connection, isCurrent);
if(dataSetMap==null || dataSetMap.size()==0)
{
return dataSetMap;
Expand All @@ -115,7 +115,7 @@ public static Map<String,Map> listPublicDataset(PartnerConnection connection) th
}

@SuppressWarnings({ "rawtypes", "unchecked" })
public static Map<String,Map> listDataset(PartnerConnection partnerConnection) throws Exception {
public static Map<String,Map> listDataset(PartnerConnection partnerConnection, boolean isCurrent) throws Exception {
LinkedHashMap<String,Map> dataSetMap = new LinkedHashMap<String,Map>();
partnerConnection.getServerTimestamp();
ConnectorConfig config = partnerConnection.getConfig();
Expand All @@ -134,7 +134,7 @@ public static Map<String,Map> listDataset(PartnerConnection partnerConnection) t

URI u = new URI(serviceEndPoint);

URI listEMURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), "/insights/internal_api/v1.0/esObject/edgemart", "current=true",null);
URI listEMURI = new URI(u.getScheme(),u.getUserInfo(), u.getHost(), u.getPort(), "/insights/internal_api/v1.0/esObject/edgemart", "current="+isCurrent+"&sortOrder=Mru",null);
HttpGet listEMPost = new HttpGet(listEMURI);

listEMPost.setConfig(requestConfig);
Expand All @@ -143,6 +143,7 @@ public static Map<String,Map> listDataset(PartnerConnection partnerConnection) t
HttpEntity emresponseEntity = emresponse.getEntity();
InputStream emis = emresponseEntity.getContent();
String emList = IOUtils.toString(emis, "UTF-8");
System.out.println("Response Size:"+emList.length());
emis.close();
httpClient.close();

Expand Down Expand Up @@ -179,10 +180,10 @@ public static Map<String,Map> listDataset(PartnerConnection partnerConnection) t


@SuppressWarnings("rawtypes")
public static List<DatasetType> listDatasets(PartnerConnection connection) throws Exception
public static List<DatasetType> listDatasets(PartnerConnection connection, boolean isCurrent) throws Exception
{
List<DatasetType> datasetList = new LinkedList<DatasetType>();
Map<String, Map> dataSetMap = listDataset(connection);
Map<String, Map> dataSetMap = listDataset(connection, isCurrent);
if(dataSetMap != null && !dataSetMap.isEmpty())
{
for(String alias:dataSetMap.keySet())
Expand Down

0 comments on commit e01ce88

Please sign in to comment.