AmphibianServiceImpl.java
/*
* Copyright 2019 Global Crop Diversity Trust
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.genesys.server.service.impl;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import javax.servlet.ServletOutputStream;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.MapUtils;
import org.genesys.amphibian.client.api.DatasetsApi;
import org.genesys.amphibian.client.api.InfoApi;
import org.genesys.amphibian.client.api.PreviewApi;
import org.genesys.amphibian.client.invoker.ApiClient;
import org.genesys.amphibian.client.model.DatasetTable;
import org.genesys.amphibian.client.model.HeatMap;
import org.genesys.amphibian.client.model.IngestColumnRequest;
import org.genesys.amphibian.client.model.LongToWide;
import org.genesys.amphibian.client.model.ObservationChart;
import org.genesys.amphibian.client.model.ObservationHistogram;
import org.genesys.amphibian.client.model.Preview;
import org.genesys.amphibian.client.model.PreviewDataFilter;
import org.genesys.amphibian.client.model.RowMetadata;
import org.genesys.amphibian.client.model.StatisticsData;
import org.genesys.amphibian.client.model.TraitDataFilter;
import org.genesys.amphibian.client.model.ValidateCategoricalRequest;
import org.genesys.amphibian.client.model.ValidateNumericRequest;
import org.genesys.amphibian.client.model.ValidateTextRequest;
import org.genesys.amphibian.client.model.ValueMapping;
import org.genesys.blocks.security.SecurityContextUtil;
import org.genesys.filerepository.InvalidRepositoryFileDataException;
import org.genesys.filerepository.InvalidRepositoryPathException;
import org.genesys.filerepository.NoSuchRepositoryFileException;
import org.genesys.filerepository.model.RepositoryFile;
import org.genesys.filerepository.model.RepositoryFolder;
import org.genesys.filerepository.service.RepositoryService;
import org.genesys.server.component.security.AsAdminInvoker;
import org.genesys.server.exception.InvalidApiUsageException;
import org.genesys.server.exception.NotFoundElement;
import org.genesys.server.model.dataset.Dataset;
import org.genesys.server.model.genesys.Accession;
import org.genesys.server.model.traits.Descriptor;
import org.genesys.server.service.AccessionService;
import org.genesys.server.service.AmphibianService;
import org.genesys.server.service.DatasetService;
import org.genesys.server.service.DownloadService;
import org.genesys.server.service.filter.AccessionFilter;
import org.genesys.server.service.worker.AccessionRefMatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.multipart.MultipartFile;
import com.google.common.collect.Lists;
@Service
public class AmphibianServiceImpl implements AmphibianService, InitializingBean {
public static final Logger LOG = LoggerFactory.getLogger(AmphibianServiceImpl.class);
@Autowired(required = false)
ApiClient amphibianClient;
@Autowired
private RepositoryService repositoryService;
@Autowired
private AsAdminInvoker asAdminInvoker;
/** The file repository path. */
@Value("${file.repository.amphibian.folder}")
public String amphibianRepositoryPath;
@Autowired
private AccessionRefMatcher accessionRefMatcher;
@Autowired
@Lazy
private DatasetService datasetService;
@Autowired
@Lazy
private AccessionService accessionService;
@Autowired
private DownloadService downloadService;
private InfoApi infoApi;
private PreviewApi previewApi;
private DatasetsApi datasetsApi;
@Override
public void afterPropertiesSet() throws Exception {
if (amphibianClient != null) {
infoApi = new InfoApi(amphibianClient);
previewApi = new PreviewApi(amphibianClient);
datasetsApi = new DatasetsApi(amphibianClient);
try {
LOG.info("Amphibian {} at {}", infoApi.getVersion(), amphibianClient.getBasePath());
} catch (Throwable e) {
LOG.warn("Amphibian at {} not reachable: {}", amphibianClient.getBasePath(), e.getMessage());
}
} else {
LOG.info("Not using Amphibian.");
}
}
@Override
public Preview makePreview(UUID uuid, RepositoryFile repositoryFile) throws IOException, AmphibianException {
if (amphibianClient == null) {
throw new AmphibianNotAvailableException();
}
if (repositoryFile == null) {
throw new AmphibianException("File contents not available");
}
File localFile = Files.createTempFile("amph", "tmp").toFile();
try (OutputStream fos = new BufferedOutputStream(new FileOutputStream(localFile, false), 8 * 1000)) {
repositoryService.streamFileBytes(repositoryFile, fos);
}
try {
return previewApi.ingest(uuid, null, null, repositoryFile.getContentType(), localFile);
} catch (Throwable e) {
throw new AmphibianException("Error ingesting file", e);
} finally {
localFile.delete();
}
}
@Override
public Preview loadPreview(UUID uuid) {
if (amphibianClient == null) {
throw new AmphibianNotAvailableException();
}
return previewApi.getPreview(uuid);
}
@Override
public void deletePreview(Preview preview) {
if (amphibianClient == null) {
throw new AmphibianNotAvailableException();
}
previewApi.deletePreview(preview.getReferenceUuid());
}
@Override
public List<Object> getPreviewData(UUID uuid, int sheet, long startRow, int count, List<String> selectedColumns, PreviewDataFilter previewFilter) throws AmphibianException {
if (amphibianClient == null) {
throw new AmphibianNotAvailableException();
}
if (previewFilter == null) {
LOG.debug("Previewfilter empty!");
} else {
LOG.info("Filtering: {}", previewFilter.getFields());
}
try {
return previewApi.getData(uuid, sheet, startRow, count, selectedColumns, previewFilter);
} catch (Throwable e) {
throw new AmphibianException("Error getting preview data", e);
}
}
@Override
public List<StatisticsData> getStatisticsData(UUID uuid, int sheet, long startRow, int maxDistinct, List<String> selectedColumns) throws AmphibianException {
if (amphibianClient == null) {
throw new AmphibianNotAvailableException();
}
try {
return previewApi.getStatisticsData(uuid, sheet, selectedColumns, startRow, maxDistinct); // return max 100 distinct values
} catch (Throwable e) {
throw new AmphibianException("Error getting statistics data", e);
}
}
@Override
@PreAuthorize("hasAnyRole('ADMINISTRATOR', 'VETTEDUSER')")
public List<RepositoryFile> myFiles() throws InvalidRepositoryPathException {
Path path = Paths.get(amphibianRepositoryPath, SecurityContextUtil.getMe().getSid());
if (!repositoryService.hasPath(path)) {
// return empty list instead of exception if the user uploaded nothing yet
return List.of();
}
return repositoryService.getFiles(path, Sort.by(Sort.Direction.DESC, "createdDate"));
}
@Override
@Transactional
@PreAuthorize("hasAnyRole('ADMINISTRATOR', 'VETTEDUSER')")
public Preview uploadFile(MultipartFile file) throws IOException, InvalidRepositoryPathException, InvalidRepositoryFileDataException, AmphibianException {
// ensure folder exists
var repositoryFolder = ensureAmphibianFolder();
try {
var existingFile = repositoryService.getFile(repositoryFolder.getFolderPath(), file.getOriginalFilename());
LOG.warn("File with '{}' name already exists in path {}. Content of the file will be updated.", file.getOriginalFilename(), repositoryFolder.getFolderPath());
return updateFileContent(existingFile.getUuid(), file);
} catch (NoSuchRepositoryFileException e) {
// it's OK
}
LOG.info("Upload file {} to path {}", file.getOriginalFilename(), repositoryFolder.getFolderPath());
RepositoryFile uploadedFile = repositoryService.addFile(repositoryFolder.getFolderPath(), file.getOriginalFilename(), file.getContentType(), file.getInputStream(), null);
// file was uploaded, make a preview for it
return makePreview(uploadedFile.getUuid(), uploadedFile);
}
@Override
@Transactional
@PreAuthorize("hasAnyRole('ADMINISTRATOR', 'VETTEDUSER')")
public Preview updateFileContent(UUID uuid, MultipartFile file) throws IOException, NoSuchRepositoryFileException, AmphibianException {
RepositoryFile storedFile = null;
try {
Path path = Paths.get(amphibianRepositoryPath, SecurityContextUtil.getMe().getSid());
storedFile = repositoryService.streamFiles(path, Sort.by("id")).filter(f -> f.getUuid().equals(uuid)).findFirst().orElseThrow(NoSuchRepositoryFileException::new);
} catch (InvalidRepositoryPathException e) {
// the user doesn't have uploaded dataset files at all
throw new NoSuchRepositoryFileException();
}
// Update the original file
RepositoryFile toUpdate = new RepositoryFile();
toUpdate.setUuid(storedFile.getUuid());
toUpdate.setId(storedFile.getId());
toUpdate.setVersion(storedFile.getVersion());
toUpdate = repositoryService.updateBytes(toUpdate, file.getContentType(), file.getInputStream());
return makePreview(toUpdate.getUuid(), toUpdate);
}
@Override
@Transactional
@PreAuthorize("hasAnyRole('ADMINISTRATOR', 'VETTEDUSER')")
public void deleteFile(UUID uuid) throws IOException, NoSuchRepositoryFileException, AmphibianException {
RepositoryFile storedFile = null;
try {
Path path = Paths.get(amphibianRepositoryPath, SecurityContextUtil.getMe().getSid());
storedFile = repositoryService.streamFiles(path, Sort.by("id")).filter(f -> f.getUuid().equals(uuid)).findFirst().orElseThrow(NoSuchRepositoryFileException::new);
} catch (InvalidRepositoryPathException e) {
// the user doesn't have uploaded dataset files at all
throw new NoSuchRepositoryFileException();
}
repositoryService.removeFile(storedFile);
try {
previewApi.deletePreview(uuid);
} catch (Throwable e) {
throw new AmphibianException("Error removing preview", e);
}
}
private RepositoryFolder ensureAmphibianFolder() {
final String sid = SecurityContextUtil.getMe().getSid();
try {
return asAdminInvoker.invoke(() -> {
// Ensure target folder exists for the Amphibian
return repositoryService.ensureFolder(Paths.get(amphibianRepositoryPath, sid));
});
} catch (Exception e) {
LOG.warn("Could not create folder: {}", e.getMessage());
throw new InvalidApiUsageException("Could not create folder", e);
}
}
@Override
@Transactional
public long addAccessionRefs(UUID uuid, int sheet, List<PreviewAccessionRef> accessionRefs) {
if (amphibianClient == null) {
throw new AmphibianNotAvailableException();
}
LOG.warn("Matching {} refs for Preview {}", accessionRefs.size(), uuid);
var matchedRefs = accessionRefMatcher.rematchAccessionRefs(accessionRefs);
LOG.warn("Matched {} refs to {} accessions for Preview {}", accessionRefs.size(), matchedRefs.size(), uuid);
var previewRowMetas = matchedRefs.stream().map(ref -> {
var rowMeta = new RowMetadata();
rowMeta.setS(sheet);
rowMeta.setR(ref.row);
if (ref.getAccession() != null) {
var meta = new HashMap<String, Object>();
meta.put("accession", ref.getAccession().getUuid());
meta.put("doi", ref.getAccession().getDoi());
meta.put("instituteCode", ref.getInstCode());
meta.put("accessionNumber", ref.getAcceNumb());
meta.put("genus", ref.getGenus());
rowMeta.setMeta(meta);
}
return rowMeta;
}).collect(Collectors.toList());
LOG.warn("Adding {} refs to Preview {}", previewRowMetas.size(), uuid);
var response = previewApi.addMetadata(uuid, previewRowMetas);
LOG.info("Done adding refs");
return response;
}
@Override
public Set<?> ingestDryRun(UUID uuid, int sheet, long startAt, String field, String arraySeparator, List<ValueMapping> mapping, Descriptor descriptor) {
var dataType = descriptor.getDataType();
LOG.info("Dry run {}/{} startAt={} of field {} for \"{}\" as descriptor {} {}", uuid, sheet, startAt, field, descriptor.getTitle(), dataType);
if (Objects.equals("INSTCODE", descriptor.getColumnName())) {
LOG.info("Validating as INSTCODE");
return previewApi.validateText(uuid, sheet, startAt, new ValidateTextRequest()
.field(field)
.arraySeparator(arraySeparator)
.maxLength(7)
.regExp("^\\w{3}\\d{3,4}$")
.valueMapping(mapping));
} else if (Objects.equals("DOI", descriptor.getColumnName())) {
LOG.info("Validating as DOI");
return previewApi.validateText(uuid, sheet, startAt, new ValidateTextRequest()
.field(field)
.arraySeparator(arraySeparator)
.regExp("^10\\.\\d+/.+$")
.valueMapping(mapping));
}
switch (dataType) {
case TEXT: {
Integer maxLength = null;
String regExp = null;
return previewApi.validateText(uuid, sheet, startAt, new ValidateTextRequest()
.field(field)
.arraySeparator(arraySeparator)
.maxLength(maxLength)
.regExp(regExp)
.valueMapping(mapping));
}
case NUMERIC: {
var integerOnly = descriptor.getIntegerOnly();
var min = descriptor.getMinValue();
var max = descriptor.getMaxValue();
return previewApi.validateNumeric(uuid, sheet, startAt, new ValidateNumericRequest()
.field(field)
.arraySeparator(arraySeparator)
.integerOnly(integerOnly == null ? false : integerOnly)
.min(min)
.max(max)
.valueMapping(mapping));
}
case CODED: {
var terms = descriptor.getTerms();
return previewApi.validateCategorical(uuid, sheet, startAt, new ValidateCategoricalRequest()
.field(field)
.arraySeparator(arraySeparator)
.acceptedCategories(terms.stream().map(t -> t.getCode()).collect(Collectors.toList()))
.valueMapping(mapping));
}
case SCALE: {
var integerOnly = descriptor.getIntegerOnly();
var min = descriptor.getMinValue();
var max = descriptor.getMaxValue();
return previewApi.validateNumeric(uuid, sheet, startAt, new ValidateNumericRequest()
.field(field)
.arraySeparator(arraySeparator)
.integerOnly(integerOnly == null ? true : integerOnly)
.min(min)
.max(max)
.valueMapping(mapping));
}
case DATE: {
// return previewApi.validateDate(uuid, sheet, startAt, mapping);
break;
}
case BOOLEAN: {
// return previewApi.validateBoolean(uuid, sheet, startAt, mapping);
}
}
throw new InvalidApiUsageException("Unhandled type " + dataType);
}
@Override
public Preview longToWide(UUID uuid, LongToWide longToWide) {
return previewApi.longToWide(uuid, longToWide);
}
@Override
public DatasetTable getAmphibianDataset(Dataset dataset) {
var tableKey = dataset.getUuid().toString();
var amphDataset = datasetsApi.getDataset(tableKey);
if (amphDataset == null) {
throw new NotFoundElement("No Amphibian dataset for " + dataset.getUuid());
} else {
LOG.info("Found Amphibian dataset {}", amphDataset.getKey());
return amphDataset;
}
}
@Override
public List<DatasetTable> getAmphibianDatasetsList(List<UUID> datasetUuids) {
var tableKeys = datasetUuids.stream().map(UUID::toString).collect(Collectors.toList());
return datasetsApi.getDatasets(tableKeys);
}
@Override
public Set<?> ingestFromPreview(Dataset dataset, Descriptor descriptor, Preview preview, int sheet, long startAt, String field, String arraySeparator, List<ValueMapping> mapping) {
LOG.info("Ingest to {} from {}/{} startAt={} of field {} as descriptor {}", dataset.getUuid(), preview.getReferenceUuid(), sheet, startAt, field, descriptor.getTitle());
var tableKey = dataset.getUuid().toString();
var amphDataset = datasetsApi.getDataset(tableKey);
if (amphDataset == null) {
LOG.warn("Creating Amphibian dataset for {}", dataset.getUuid());
amphDataset = datasetsApi.createTable(new DatasetTable()
.key(tableKey)
.title(dataset.getTitle())
);
} else {
LOG.info("Using existing Amphibian dataset {}", amphDataset.getKey());
}
var targetType = IngestColumnRequest.TargetTypeEnum.STRING;
switch (descriptor.getDataType()) {
case NUMERIC:
targetType = (descriptor.getIntegerOnly() != null && descriptor.getIntegerOnly() == true) ? IngestColumnRequest.TargetTypeEnum.INTEGER : IngestColumnRequest.TargetTypeEnum.DECIMAL;
break;
case SCALE:
targetType = (descriptor.getIntegerOnly() != null && descriptor.getIntegerOnly() == true) ? IngestColumnRequest.TargetTypeEnum.INTEGER : IngestColumnRequest.TargetTypeEnum.DECIMAL;
break;
case CODED:
case TEXT:
targetType = IngestColumnRequest.TargetTypeEnum.STRING;
break;
default:
LOG.warn("Don't know how to target " + descriptor.getDataType());
break;
}
try {
return datasetsApi.ingestFromPreview(amphDataset.getKey(), descriptor.getUuid(), preview.getReferenceUuid(), sheet, startAt, new IngestColumnRequest()
.field(field)
.arraySeparator(arraySeparator)
.targetType(targetType)
.valueMapping(mapping));
} catch (HttpClientErrorException e) {
if (e.getRawStatusCode() == 400) {
LOG.warn("Bad request: {}: {}", e.getMessage(), e.getResponseBodyAsString());
throw new InvalidApiUsageException(e.getMessage(), e.getMostSpecificCause());
}
throw e;
}
}
@Override
public DatasetTable removeObservations(Dataset dataset, Descriptor descriptor) {
LOG.warn("Removing observations for descriptor {} from {}", descriptor.getUuid(), dataset.getUuid());
return datasetsApi.removeDescriptorData(dataset.getUuid().toString(), descriptor.getUuid());
}
@Override
public Page<?> getObservations(Dataset dataset, Pageable page, List<UUID> fields, TraitFilters filters) {
try {
return datasetsApi.getObservations(dataset.getUuid().toString(), fields, page.getPageNumber(), page.getPageSize(), toAmphibianFilter(List.of(dataset), filters));
} catch (NoAccessionsForFilterException e) {
return Page.empty();
}
}
/**
* Apply accession filters
*
* @param datasets
* @param filters
* @return
* @throws NoAccessionsForFilterException thrown when accession filters are specified, but no accessions match!
*/
private TraitDataFilter toAmphibianFilter(List<Dataset> datasets, TraitFilters filters) throws NoAccessionsForFilterException {
var amphibianFilter = new TraitDataFilter();
if (filters != null && MapUtils.isNotEmpty(filters.observations)) {
amphibianFilter.setObservations(filters.observations);
LOG.trace("Using observation filters: {}", amphibianFilter.getObservations());
}
if (filters != null && filters.accession != null) {
// Check if filter is "blank" -- and ignore it
var accessionsInDataset = AccessionFilter.normalize(filters.accession);
if (!accessionsInDataset.isEmpty()) {
accessionsInDataset.datasets = new HashSet<>(Lists.transform(datasets, dataset -> dataset.getUuid())); // Filter for these Datasets
LOG.info("Generating accession UUID set for filter: {}", accessionsInDataset);
amphibianFilter.accession(new ArrayList<>(accessionService.getAccessionUuids(accessionsInDataset)));
accessionsInDataset.datasets = null; // Clear it
if (CollectionUtils.isNotEmpty(amphibianFilter.getAccession())) {
LOG.debug("Will filter for {} accessions: {}", amphibianFilter.getAccession().size(), amphibianFilter.getAccession());
} else {
LOG.info("No accessions match passport filters!");
// Throw an exception!
throw new NoAccessionsForFilterException();
}
} else {
LOG.debug("Accessions filter is empty: {}", accessionsInDataset);
}
}
return amphibianFilter;
}
@Override
@Transactional(readOnly = true)
public AccessionObservations getAccessionObservations(UUID accessionUuid) {
assert (accessionUuid != null);
Accession accession = accessionService.getByUuid(accessionUuid);
if (accession == null) {
throw new NotFoundElement();
}
var datasets = datasetService.listByAccession(accession); // Published datasets
var firstPartyDatasets = datasets.stream().filter(dataset -> dataset.getOwner().equals(accession.getInstitute().getOwner())).collect(Collectors.toList());
var thirdPartyDatasets = datasets.stream().filter(dataset -> !firstPartyDatasets.contains(dataset)).collect(Collectors.toList());
if (CollectionUtils.isEmpty(datasets)) {
return new AccessionObservations();
}
LOG.debug("Accession has datasets: {}", datasets);
TraitDataFilter traitDataFilter = new TraitDataFilter();
traitDataFilter.accession(List.of(accession.getUuid()));
AccessionObservations observations = new AccessionObservations();
// Use cross-dataset search (first party data)
observations.firstPartyData = datasetsApi.getAccessionDatasetObservations(
Lists.transform(firstPartyDatasets, dataset -> dataset.getUuid().toString()), // Dataset UUIDs
null, // All fields
0, 1, // Page 0, size of 1
traitDataFilter // Filters
).getContent();
// Use cross-dataset search (third party data)
observations.thirdPartyData = datasetsApi.getAccessionDatasetObservations(
Lists.transform(thirdPartyDatasets, dataset -> dataset.getUuid().toString()), // Dataset UUIDs
null, // All fields
0, 1, // Page 0, size of 1
traitDataFilter // Filters
).getContent();
return observations;
}
@Override
public Page<?> getAccessionDatasetObservations(List<Dataset> datasets, List<UUID> descriptors, AccessionsDatasetsDataRequest request, Pageable page) throws Exception{
if (CollectionUtils.isEmpty(datasets)) {
throw new IllegalArgumentException("Datasets must be provided");
}
// if (CollectionUtils.isEmpty(filter.getAccession())) {
// throw new IllegalArgumentException("Accession UUIDs must be provided in the filter");
// }
var datasetUuids = Lists.transform(datasets, dataset -> dataset.getUuid().toString());
try {
var observations = datasetsApi.getAccessionDatasetObservations(datasetUuids, descriptors, page.getPageNumber(), page.getPageSize(), toAmphibianFilter(datasets, request.filters));
if (observations.isEmpty() || request.select == null || request.select.isEmpty()) {
return observations;
}
// Map of (accession.uuid, observations data)
List<Map<String, Object>> content = observations.getContent();
var acceUuidContentMap = content.stream()
.collect(Collectors.toMap(c -> UUID.fromString(String.valueOf(c.get("accession"))), c -> c));
// Add uuid to request.select if it is missing
AtomicBoolean removeSelectUuid = new AtomicBoolean(false);
if (request.select.stream().noneMatch(s -> s.equals("uuid"))) {
request.select.add("uuid");
removeSelectUuid.set(true);
}
accessionService.query((AccessionFilter) new AccessionFilter().uuid(acceUuidContentMap.keySet()), request.select, Pageable.unpaged(), false,
mcpdFieldMap -> {
var observationsContentMap = acceUuidContentMap.get(mcpdFieldMap.get("uuid"));
// exclude "uuid if it is not in initial request.select
if (removeSelectUuid.get()) {
mcpdFieldMap.remove("uuid");
}
observationsContentMap.put("mcpd", mcpdFieldMap);
}
);
return observations;
} catch (NoAccessionsForFilterException e) {
return Page.empty(page);
}
}
@Override
public List<ObservationChart> getDatasetsCharts(List<Dataset> datasets, List<UUID> descriptors, TraitFilters filter) {
if (CollectionUtils.isEmpty(datasets)) {
throw new IllegalArgumentException("Datasets must be provided");
}
var datasetUuids = Lists.transform(datasets, dataset -> dataset.getUuid().toString());
try {
return datasetsApi.getAccessionCharts(datasetUuids, descriptors, toAmphibianFilter(datasets, filter));
} catch (NoAccessionsForFilterException e) {
return Lists.newArrayList();
}
}
@Override
public HeatMap getDatasetsHeatMap(List<Dataset> datasets, UUID xCategoryField, UUID yCategoryField, TraitFilters filter) {
if (CollectionUtils.isEmpty(datasets)) {
throw new IllegalArgumentException("Datasets must be provided");
}
var datasetUuids = Lists.transform(datasets, dataset -> dataset.getUuid().toString());
try {
return datasetsApi.getAccessionsHeatMapData(datasetUuids, xCategoryField, yCategoryField, toAmphibianFilter(datasets, filter));
} catch (NoAccessionsForFilterException e) {
return null;
}
}
@Override
public List<ObservationHistogram> getDatasetsHistograms(List<Dataset> datasets, List<UUID> fields, TraitFilters filter, int binsNumber) {
if (CollectionUtils.isEmpty(datasets)) {
throw new IllegalArgumentException("Datasets must be provided");
}
var datasetUuids = Lists.transform(datasets, dataset -> dataset.getUuid().toString());
try {
return datasetsApi.getObservationsHistograms(datasetUuids, fields, binsNumber, toAmphibianFilter(datasets, filter));
} catch (NoAccessionsForFilterException e) {
return Lists.newArrayList();
}
}
@Override
@Transactional(readOnly = true)
public void downloadXlsxDatasetObservations(Dataset dataset, TraitFilters filter, ServletOutputStream outputStream) throws IOException, NoAccessionsForFilterException {
dataset = datasetService.loadDataset(dataset.getUuid());
var descriptors = datasetService.getDatasetDescriptors(dataset);
downloadService.writeXlsxDatasetObservations(dataset, toAmphibianFilter(List.of(dataset), filter), descriptors, outputStream);
}
@Override
public void removeDataset(Dataset dataset) {
var tableKey = dataset.getUuid().toString();
var amphDataset = datasetsApi.getDataset(tableKey);
if (amphDataset == null) {
LOG.warn("No datatable for dataset {}", tableKey);
return;
}
LOG.warn("Removing datatable {}: {}", amphDataset.getKey(), amphDataset.getTitle());
datasetsApi.dropTable(amphDataset);
}
}