Skip to content

Commit

Permalink
Merge branch 'develop' into fix_identifier_validation_no_identifier_s…
Browse files Browse the repository at this point in the history
…cheme
  • Loading branch information
lbownik authored Nov 15, 2024
2 parents 9f34cf3 + 21e42d3 commit 031c4ba
Show file tree
Hide file tree
Showing 18 changed files with 777 additions and 206 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,11 @@ public Long getId() {
public DatasetFieldType getDatasetFieldType() {
return datasetFieldType;
}

public boolean isOfType(final DatasetFieldType type) {

return getDatasetFieldType().equals(type);
}

public DatasetVersion getDatasetVersion() {
return datasetVersion;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,8 @@ public class DatasetFieldType implements Serializable, Comparable<DatasetFieldTy
private int displayOrder;

private String displayFormat;

private boolean exportToFile = false;

/** Determines whether an instance of this field type may have multiple values. */
private boolean allowMultiples;
Expand Down Expand Up @@ -184,6 +186,12 @@ public boolean isRequiredInDataverse() {
public String getDisplayFormat() {
return displayFormat;
}


public boolean isExportToFile() {

return this.exportToFile;
}

public int getDisplayOrder() {
return this.displayOrder;
Expand Down Expand Up @@ -510,6 +518,11 @@ public void setRequiredInDataverse(boolean requiredInDataverse) {
public void setDisplayFormat(String displayFormat) {
this.displayFormat = displayFormat;
}

public void setExportToFile(final boolean exportToFile) {

this.exportToFile = exportToFile;
}

public void setDisplayOrder(int displayOrder) {
this.displayOrder = displayOrder;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,18 @@ public List<DatasetField> getDatasetFields() {
public List<DatasetField> getDatasetFieldsOptional() {
return datasetFieldsOptional;
}

public List<DatasetField> getDatasetFieldsAll() {

if (getDatasetFieldsOptional().isEmpty()) {
return getDatasetFields();
} else {
final List<DatasetField> result = new ArrayList<DatasetField>(
getDatasetFields());
result.addAll(getDatasetFieldsOptional());
return result;
}
}

public Date getArchiveTime() {
return archiveTime;
Expand Down
8 changes: 8 additions & 0 deletions dataverse-persistence/src/main/resources/Bundle_en.properties
Original file line number Diff line number Diff line change
Expand Up @@ -574,6 +574,13 @@ dashboard.card.maximumembargo.save.success=Maximum embargo length settings have
dashboard.card.maximumembargo.save.failure=Maximum embargo length settings have not been saved.
dashboard.card.maximumembargo.notSet.header=Not Set
dashboard.card.maximumembargo.notSet.text=No Maximum
dashboard.card.exportsearchresults.header=Export search results
dashboard.card.exportsearchresults.button=Manage
dashboard.card.exportsearchresults.manage=Manage exported metadata
dashboard.card.exportsearchresults.name=Name
dashboard.card.exportsearchresults.desc=Description
dashboard.card.exportsearchresults.exported=Exported to file


#dasboard-licenses.xhtml
dashboard.license.header=Licenses
Expand Down Expand Up @@ -1053,6 +1060,7 @@ dataverse.results.header=Search results
dataverse.results.btn.addData=Add Data
dataverse.results.btn.addData.newDataverse=New Dataverse
dataverse.results.btn.addData.newDataset=New Dataset
dataverse.results.btn.saveToFile=Safe to file
dataverse.results.dialog.addDataGuest.header=Add Data
dataverse.results.dialog.addDataGuest.msg=You need to <a href="/loginpage.xhtml{0}" title="Log into your Dataverse Account">Log In</a> to create a dataverse or add a dataset.
dataverse.results.dialog.addDataGuest.msg.signup=You need to <a href="/loginpage.xhtml{0}" title="Log into or sign up for your Dataverse Account">Log In/Sign Up</a> to create a dataverse or add a dataset.
Expand Down
7 changes: 7 additions & 0 deletions dataverse-persistence/src/main/resources/Bundle_pl.properties
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,12 @@ dashboard.card.maximumembargo.save.success=Ustawienia maksymalnej d\u0142ugo\u01
dashboard.card.maximumembargo.save.failure=Ustawienia maksymalnej d\u0142ugo\u015Bci embarga nie zosta\u0142y zapisane.
dashboard.card.maximumembargo.notSet.header=Nie ustawiono
dashboard.card.maximumembargo.notSet.text=Brak maksimum
dashboard.card.exportsearchresults.header=Eksport wynik\u00f3w wyszukiwania
dashboard.card.exportsearchresults.button=Zarz\u0105dzaj
dashboard.card.exportsearchresults.manage=Zarz\u0105dzaj eksportowanymi metadanymi
dashboard.card.exportsearchresults.name=Nazwa
dashboard.card.exportsearchresults.desc=Opis
dashboard.card.exportsearchresults.exported=Eksportuj do pliku

#dasboard-licenses.xhtml
dashboard.license.header=Licencje
Expand Down Expand Up @@ -1031,6 +1037,7 @@ dataverse.results.header=Wyniki wyszukiwania
dataverse.results.btn.addData=Dodaj dane
dataverse.results.btn.addData.newDataverse=Nowa kolekcja
dataverse.results.btn.addData.newDataset=Nowy zbi\u00F3r danych
dataverse.results.btn.saveToFile=Zapisz do pliku
dataverse.results.dialog.addDataGuest.header=Dodaj dane
dataverse.results.dialog.addDataGuest.msg=By utworzy\u0107 kolekcj\u0119 lub doda\u0107 zbi\u00F3r danych musisz si\u0119 <a href="/loginpage.xhtml{0}" title="Zaloguj si\u0119 na swoje konto w repozytorium">zalogowa\u0107</a> .
dataverse.results.dialog.addDataGuest.msg.signup=By utworzy\u0107 kolekcj\u0119 lub doda\u0107 zbi\u00F3r danych, musisz si\u0119 <a href="/loginpage.xhtml{0}" title="Zaloguj si\u0119 lub za\u0142\u00F3\u017C konto w repozytorium">zalogowa\u0107/zarejestrowa\u0107</a>.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
alter table datasetfieldtype add column exportToFile boolean not null default false;
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package edu.harvard.iq.dataverse.dashboard;

import static java.lang.Boolean.FALSE;
import static java.util.Comparator.comparing;
import static java.util.stream.Collectors.toList;
import static org.apache.commons.lang.StringUtils.EMPTY;

import java.io.Serializable;
import java.util.List;

import javax.inject.Inject;
import javax.inject.Named;

import org.omnifaces.cdi.ViewScoped;

import edu.harvard.iq.dataverse.DataverseDao;
import edu.harvard.iq.dataverse.DataverseSession;
import edu.harvard.iq.dataverse.PermissionsWrapper;
import edu.harvard.iq.dataverse.persistence.dataset.DatasetFieldType;
import edu.harvard.iq.dataverse.persistence.dataset.DatasetFieldTypeRepository;
import edu.harvard.iq.dataverse.util.SystemConfig;

@ViewScoped
@Named("ExportSearchResultsPage")
public class DashboardExportSearchResultsPage implements Serializable {

private final DataverseSession session;
private final PermissionsWrapper permissionsWrapper;
private final DataverseDao dataverseDao;
private final SystemConfig systemConfig;
private final DatasetFieldTypeRepository datasetFiledTypeRepo;

private List<Metadata> metadataTypes;

@Inject
public DashboardExportSearchResultsPage(final DataverseSession session,
final PermissionsWrapper permissionsWrapper,
final DataverseDao dataverseDao, final SystemConfig systemConfig,
final DatasetFieldTypeRepository datasetFiledTypeRepo) {
this.session = session;
this.permissionsWrapper = permissionsWrapper;
this.dataverseDao = dataverseDao;
this.systemConfig = systemConfig;
this.datasetFiledTypeRepo = datasetFiledTypeRepo;
}

public List<Metadata> getMetadataTypes() {
return this.metadataTypes;
}

public String init() {
if (canEdit()) {
initMetadataTypes();
return EMPTY;
} else {
return this.permissionsWrapper.notAuthorized();
}
}

private void initMetadataTypes() {
this.metadataTypes = this.datasetFiledTypeRepo.findAll().stream()
.map(Metadata::new).sorted(comparing(Metadata::getTitle))
.collect(toList());
}

private boolean canEdit() {
return !this.systemConfig.isReadonlyMode()
&& this.session.getUser().isSuperuser();
}

public String save() {
for (final DatasetFieldType fieldType : this.datasetFiledTypeRepo.findAll()) {
fieldType.setExportToFile(isExportedToFile(fieldType.getId()));
this.datasetFiledTypeRepo.save(fieldType);
}
return EMPTY;
}

private boolean isExportedToFile(final Long id) {
return this.metadataTypes.stream().filter(mt -> mt.getId().equals(id))
.findFirst().map(Metadata::isExportable).orElse(FALSE);
}

public String cancel() {
return "/dashboard.xhtml?faces-redirect=true&dataverseId="
+ this.dataverseDao.findRootDataverse().getId();
}

public static class Metadata {

private final Long id;
private final String title;
private final String description;
private boolean exportable;

private Metadata(final DatasetFieldType fieldType) {
this.id = fieldType.getId();
this.title = fieldType.getTitle();
this.description = fieldType.getDescription();
this.exportable = fieldType.isExportToFile();
}

public boolean isExportable() {
return this.exportable;
}

public void setExportable(final boolean exportable) {
this.exportable = exportable;
}

public Long getId() {
return this.id;
}

public String getTitle() {
return this.title;
}

public String getDescription() {
return this.description;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,10 @@
package edu.harvard.iq.dataverse.ingest;


import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import static java.lang.System.err;
import static java.lang.System.out;
import static java.nio.channels.FileChannel.MapMode.READ_ONLY;
import static org.apache.commons.lang.builder.ToStringStyle.MULTI_LINE_STYLE;

import java.io.ByteArrayInputStream;
import java.io.File;
Expand All @@ -32,6 +33,7 @@
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
Expand All @@ -44,8 +46,7 @@
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;

import static java.lang.System.err;
import static java.lang.System.out;
import org.apache.commons.lang.builder.ToStringBuilder;

/**
* This is a virtually unchanged DVN v2-3 implementation by
Expand Down Expand Up @@ -134,7 +135,7 @@ public IngestableDataChecker() {
/**
* test this byte buffer against SPSS-SAV spec
*/
public String testSAVformat(MappedByteBuffer buff) {
public String testSAVformat(ByteBuffer buff) {
String result = null;
buff.rewind();
boolean DEBUG = false;
Expand Down Expand Up @@ -176,7 +177,7 @@ public String testSAVformat(MappedByteBuffer buff) {
/**
* test this byte buffer against STATA DTA spec
*/
public String testDTAformat(MappedByteBuffer buff) {
public String testDTAformat(ByteBuffer buff) {
String result = null;
buff.rewind();
boolean DEBUG = false;
Expand Down Expand Up @@ -294,7 +295,7 @@ public String testDTAformat(MappedByteBuffer buff) {
/**
* test this byte buffer against SAS Transport(XPT) spec
*/
public String testXPTformat(MappedByteBuffer buff) {
public String testXPTformat(ByteBuffer buff) {
String result = null;
buff.rewind();
boolean DEBUG = false;
Expand Down Expand Up @@ -341,7 +342,7 @@ public String testXPTformat(MappedByteBuffer buff) {
/**
* test this byte buffer against SPSS Portable (POR) spec
*/
public String testPORformat(MappedByteBuffer buff) {
public String testPORformat(ByteBuffer buff) {
String result = null;
buff.rewind();
boolean DEBUG = false;
Expand Down Expand Up @@ -506,7 +507,7 @@ public String testPORformat(MappedByteBuffer buff) {
/**
* test this byte buffer against R data file
*/
public String testRDAformat(MappedByteBuffer buff) {
public String testRDAformat(ByteBuffer buff) {
String result = null;
buff.rewind();

Expand Down Expand Up @@ -590,18 +591,18 @@ public String testRDAformat(MappedByteBuffer buff) {
public String detectTabularDataFormat(File fh) {
boolean DEBUG = false;
String readableFormatType = null;
FileChannel srcChannel = null;
FileInputStream inp = null;
try {


try (final FileInputStream inp = new FileInputStream(fh)) {

int buffer_size = this.getBufferSize(fh);
dbgLog.fine("buffer_size: " + buffer_size);

// set-up a FileChannel instance for a given file object
inp = new FileInputStream(fh);
srcChannel = inp.getChannel();
final FileChannel srcChannel = inp.getChannel();

// create a read-only MappedByteBuffer
MappedByteBuffer buff = srcChannel.map(FileChannel.MapMode.READ_ONLY, 0, buffer_size);
MappedByteBuffer buff = srcChannel.map(READ_ONLY, 0, buffer_size);

//this.printHexDump(buff, "hex dump of the byte-buffer");

Expand Down Expand Up @@ -661,10 +662,7 @@ public String detectTabularDataFormat(File fh) {
} catch (IOException ie) {
dbgLog.fine("other io exception detected");
ie.printStackTrace();
} finally {
IOUtils.closeQuietly(srcChannel);
IOUtils.closeQuietly(inp);
}
}
return readableFormatType;
}

Expand Down Expand Up @@ -709,7 +707,7 @@ private int getBufferSize(File fh) {
return BUFFER_SIZE;
}

private int getGzipBufferSize(MappedByteBuffer buff) {
private int getGzipBufferSize(ByteBuffer buff) {
int GZIP_BUFFER_SIZE = 120;
/*
note:
Expand All @@ -729,7 +727,7 @@ private int getGzipBufferSize(MappedByteBuffer buff) {
/**
* dump the data buffer in HEX
*/
public void printHexDump(MappedByteBuffer buff, String hdr) {
public void printHexDump(ByteBuffer buff, String hdr) {
int counter = 0;
if (hdr != null) {
out.println(hdr);
Expand All @@ -751,7 +749,6 @@ public void printHexDump(MappedByteBuffer buff, String hdr) {

@Override
public String toString() {
return ToStringBuilder.reflectionToString(this,
ToStringStyle.MULTI_LINE_STYLE);
return ToStringBuilder.reflectionToString(this, MULTI_LINE_STYLE);
}
}
Loading

0 comments on commit 031c4ba

Please sign in to comment.