DatasetServiceImpl.java

/*
 * Copyright 2018 Global Crop Diversity Trust
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.genesys.server.service.impl;

import static org.genesys.server.model.dataset.QDataset.dataset;
import static org.genesys.server.model.dataset.QDatasetCreator.datasetCreator;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;

import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.genesys.blocks.security.model.AclSid;
import org.genesys.blocks.security.serialization.Permissions;
import org.genesys.blocks.security.service.CustomAclService;
import org.genesys.filerepository.FolderNotEmptyException;
import org.genesys.filerepository.InvalidRepositoryFileDataException;
import org.genesys.filerepository.InvalidRepositoryPathException;
import org.genesys.filerepository.NoSuchRepositoryFileException;
import org.genesys.filerepository.model.RepositoryFile;
import org.genesys.filerepository.model.RepositoryFolder;
import org.genesys.filerepository.service.RepositoryService;
import org.genesys.server.component.aspect.NotifyForReview;
import org.genesys.server.component.aspect.NotifyOnPublished;
import org.genesys.server.component.security.AsAdminInvoker;
import org.genesys.server.component.security.SecurityUtils;
import org.genesys.server.exception.InvalidApiUsageException;
import org.genesys.server.exception.NotFoundElement;
import org.genesys.server.exception.SearchException;
import org.genesys.server.model.Partner;
import org.genesys.server.model.PublishState;
import org.genesys.server.model.UserRole;
import org.genesys.server.model.dataset.Dataset;
import org.genesys.server.model.dataset.DatasetAccessionRef;
import org.genesys.server.model.dataset.DatasetCreator;
import org.genesys.server.model.dataset.DatasetLang;
import org.genesys.server.model.dataset.DatasetLocation;
import org.genesys.server.model.dataset.DatasetVersions;
import org.genesys.server.model.dataset.QDataset;
import org.genesys.server.model.dataset.QDatasetAccessionRef;
import org.genesys.server.model.filters.DatasetFilter;
import org.genesys.server.model.genesys.Accession;
import org.genesys.server.model.genesys.QAccession;
import org.genesys.server.model.impl.FaoInstitute;
import org.genesys.server.model.traits.Descriptor;
import org.genesys.server.model.traits.QDescriptor;
import org.genesys.server.persistence.DatasetLangRepository;
import org.genesys.server.persistence.dataset.DatasetAccessionRefRepository;
import org.genesys.server.persistence.dataset.DatasetCreatorRepository;
import org.genesys.server.persistence.dataset.DatasetLocationRepository;
import org.genesys.server.persistence.dataset.DatasetRepository;
import org.genesys.server.persistence.dataset.DatasetVersionsRepository;
import org.genesys.server.persistence.traits.DescriptorRepository;
import org.genesys.server.service.AmphibianService;
import org.genesys.server.service.DatasetService;
import org.genesys.server.service.DatasetTranslationService;
import org.genesys.server.service.DescriptorService;
import org.genesys.server.service.DescriptorTranslationService;
import org.genesys.server.service.DownloadService;
import org.genesys.server.service.ElasticsearchService;
import org.genesys.server.service.TranslatorService;
import org.genesys.server.service.TranslatorService.FormattedText;
import org.genesys.server.service.TranslatorService.TextFormat;
import org.genesys.server.service.VersionManager;
import org.genesys.server.service.filter.AccessionFilter;
import org.genesys.server.service.worker.AccessionRefMatcher;
import org.genesys.util.JPAUtils;
import org.genesys.util.LoggerHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.context.annotation.Lazy;
import org.springframework.core.task.TaskExecutor;
import org.springframework.dao.ConcurrencyFailureException;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.data.jpa.repository.support.Querydsl;
import org.springframework.security.access.prepost.PostAuthorize;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.acls.domain.BasePermission;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Isolation;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.multipart.MultipartFile;

import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.querydsl.core.BooleanBuilder;
import com.querydsl.core.types.Predicate;
import com.querydsl.core.types.dsl.PathBuilder;
import com.querydsl.core.types.dsl.PathBuilderFactory;
import com.querydsl.jpa.JPAExpressions;
import com.querydsl.jpa.JPQLQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;
import com.vdurmont.semver4j.SemverException;

import lombok.extern.slf4j.Slf4j;

/**
 * The Class DatasetServiceImpl.
 */
@Service
@Transactional(readOnly = true)
@Validated
@Slf4j
public class DatasetServiceImpl  extends FilteredTranslatedCRUDServiceImpl<
	Dataset, DatasetLang, DatasetTranslationService.TranslatedDataset, DatasetFilter, DatasetRepository>
	implements DatasetService {

	/** The dataset versions repository. */
	@Autowired
	private DatasetVersionsRepository datasetVersionsRepository;

	/** The dataset creator repository. */
	@Autowired
	private DatasetCreatorRepository datasetCreatorRepository;

	/** The location repository. */
	@Autowired
	private DatasetLocationRepository locationRepository;

	@Autowired
	private DatasetAccessionRefRepository accessionRefRepository;

	/** The file repo service. */
	@Autowired
	private RepositoryService repositoryService;

	@Autowired
	private CustomAclService aclService;

	/** The securityUtils. */
	@Autowired
	private SecurityUtils securityUtils;

	/** Execute code as admin */
	@Autowired
	protected AsAdminInvoker asAdminInvoker;

	/** The download service. */
	@Autowired
	private DownloadService downloadService;

	/** The descriptor service. */
	@Autowired
	private DescriptorService descriptorService;

	/** The descriptor repository. */
	@Autowired
	private DescriptorRepository descriptorRepository;

	/** The file repository path. */
	@Value("${file.repository.datasets.folder}")
	public String datasetRepositoryPath;

	@Autowired
	private TaskExecutor taskExecutor;

	@Autowired
	private AccessionRefMatcher accessionRefMatcher;

	@PersistenceContext
	private EntityManager entityManager;

	@Autowired
	private VersionManager versionManager;

	@Autowired(required = false)
	private ElasticsearchService elasticsearchService;

	@Autowired
	private JPAQueryFactory jpaQueryFactory;

	@Autowired
	@Lazy
	private AmphibianService amphibianService;

	private Comparator<DatasetAccessionRef> distinctAcceRefsComparator;

	@Autowired
	private DescriptorTranslationService descriptorTranslationService;

	@Autowired(required = false)
	private TranslatorService translatorService;

	@Override
	public void afterPropertiesSet() throws Exception {
		distinctAcceRefsComparator = Comparator
			.comparing((DatasetAccessionRef ref) -> ref.getDataset().getId())
			.thenComparing(DatasetAccessionRef::getInstCode)
			.thenComparing(DatasetAccessionRef::getGenus)
			.thenComparing(DatasetAccessionRef::getAcceNumb);
	}

	@Component(value = "DatasetTranslationSupport")
	protected static class DatasetTranslationSupport
		extends BaseTranslationSupport<
		Dataset, DatasetLang, DatasetTranslationService.TranslatedDataset, DatasetFilter, DatasetLangRepository>
		implements DatasetTranslationService {

		@Override
		@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#source.entity, 'write')")
		public DatasetLang create(DatasetLang source) {
			return super.create(source);
		}

		@Override
		@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#target.entity, 'write')")
		public DatasetLang update(DatasetLang updated, DatasetLang target) {
			return super.update(updated, target);
		}

		@Override
		@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#source.entity, 'write')")
		public DatasetLang remove(DatasetLang source) {
			return super.remove(source);
		}
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#source.owner, 'write')")
	public Dataset createFast(Dataset source) {
		return create(source);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#source.owner, 'write')")
	public Dataset create(final Dataset source) {
		log.info("Create Dataset. Input data {}", source);
		final DatasetVersions datasetVersions = new DatasetVersions();
		datasetVersions.setCurrentVersion(null);
		datasetVersionsRepository.save(datasetVersions);

		Dataset dataset = new Dataset();
		dataset.setUuid(source.getUuid());
		dataset.setVersions(datasetVersions);
		dataset.setCurrent(null);
		copyValues(dataset, source);
		dataset.setState(PublishState.DRAFT);
		dataset.setOwner(source.getOwner());
		dataset.setVersionTag(source.getVersionTag());
		dataset.setDescriptors(new ArrayList<>());
		dataset.setRepositoryFiles(new ArrayList<>());
		dataset.setCreators(new ArrayList<>());
		dataset.setLocations(new ArrayList<>());
		dataset.setCrops(source.getCrops());
		dataset.setOriginalLanguageTag(source.getOriginalLanguageTag());

		copyDescriptors(dataset, source.getDescriptors());

		dataset = repository.save(dataset);

		// Make dataset publicly not-readable
		aclService.makePubliclyReadable(dataset, false);

		// Make dataset folder
		ensureDatasetFolder(dataset);

		// Grant all permissions to the Partner's SID
		final AclSid sid = aclService.ensureAuthoritySid(dataset.getOwner().getAuthorityName());
		aclService.setPermissions(dataset, sid, new Permissions().grantAll());

		return dataset;
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#target, 'write')")
	public Dataset update(Dataset updated, Dataset target) {
		copyValues(target, updated);
		target.setState(PublishState.DRAFT);

		return lazyLoad(repository.save(target));
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR')")
	public Dataset forceUpdate(Dataset updated) {
		var target = get(updated.getId());
		log.info("Force update Dataset. Input data {}", updated);

		if (updated.getOwner() != null && !target.getOwner().getUuid().equals(updated.getOwner().getUuid())) {
			throw new InvalidApiUsageException("Dataset owner can't be changed");
		}
		copyValues(target, updated);

		return repository.save(target);
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#target, 'write')")
	public Dataset updateFast(Dataset updated, Dataset target) {
		target = getUnpublishedDataset(target);
		log.info("Update Dataset. Input data {}", updated);

		if (updated.getOwner() != null && !target.getOwner().equals(updated.getOwner())) {
			throw new InvalidApiUsageException("Dataset owner can't be changed");
		}
		copyValues(target, updated);
		target.setState(PublishState.DRAFT);

		return repository.save(target);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#input, 'write')")
	public Dataset update(final Dataset input) {
		final Dataset dataset = getUnpublishedDataset(input);
		log.info("Update Dataset. Input data {}", input);

		if (input.getOwner() != null && !dataset.getOwner().equals(input.getOwner())) {
			throw new InvalidApiUsageException("Dataset owner can't be changed");
		}

		return update(input, dataset);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset setAccessionRefs(Dataset dataset, final Collection<DatasetAccessionRef> accessionRefs) {
		dataset = getUnpublishedDataset(dataset);
		log.info("Set accessions to Dataset {}. Input accessions {}", dataset.getUuid(), accessionRefs.size());

		deleteAccessionRefs(dataset);
		return addAccessionRefs(dataset, accessionRefs);
	}

	private void deleteAccessionRefs(Dataset dataset) {
		var removed = accessionRefRepository.deleteByList(dataset);
		log.warn("Removed {} accessionRefs from Dataset {}", removed, dataset.getUuid());
		dataset.getAccessionRefs().clear();
		log.warn("Cleared Dataset list");
		dataset.setAccessionCount(0);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public Dataset addDescriptors(Dataset dataset, final Descriptor... descriptors) {
		dataset = getUnpublishedDataset(dataset);

		log.info("Add descriptor to Dataset {}. Input descriptors {}", dataset, descriptors);
		final Set<UUID> descriptorUuids = dataset.getDescriptors().stream().map(descriptor -> descriptor.getUuid()).collect(Collectors.toSet());

		for (final Descriptor inputDescriptor : descriptors) {
			if (descriptorUuids.contains(inputDescriptor.getUuid())) {
				log.warn("Current Dataset already contains descriptor, ignoring");
			} else {
				dataset.getDescriptors().add(inputDescriptor);
				descriptorUuids.add(inputDescriptor.getUuid());
			}
		}
		// System.err.println("Processing done.");
		dataset = repository.save(dataset);
		// System.err.println("Saving done.");
		dataset = lazyLoad(dataset);
		// System.err.println("Lazy loading done.");
		return dataset;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public Dataset removeDescriptors(Dataset dataset, final Descriptor... descriptorsToRemove) {
		dataset = getUnpublishedDataset(dataset);
		log.info("Remove descriptors {} of dataset {}.", descriptorsToRemove, dataset);

		// Which UUIDs to remove?
		final Set<UUID> descriptorUuids = Arrays.stream(descriptorsToRemove).map(descriptor -> descriptor.getUuid()).collect(Collectors.toSet());

		// Keep descriptors that are not in the list
		dataset.setDescriptors(dataset.getDescriptors().stream().filter(descriptor -> !descriptorUuids.contains(descriptor.getUuid())).collect(Collectors.toList()));

		removeAmphibianObservations(dataset, List.of(descriptorsToRemove));

		return lazyLoad(repository.save(dataset));
	}

	private void removeAmphibianObservations(Dataset dataset, final Collection<Descriptor> descriptors) {
		if (CollectionUtils.isEmpty(descriptors)) return;
		if (amphibianService != null) {
			for (var descriptor : descriptors) {
				try {
					amphibianService.removeObservations(dataset, descriptor);
				} catch (Throwable e) {
					log.warn("Could not remove observations for {} from Amphibian dataset {}: {}", descriptor, dataset, e.getMessage());
				}
			}
		}
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public Dataset updateDescriptors(Dataset dataset, final List<Descriptor> descriptors) {
		dataset = getUnpublishedDataset(dataset);
		log.info("Update descriptors for Dataset {}. Input descriptors {}", dataset, descriptors);

		var removedDescriptors = dataset.getDescriptors().stream().filter(d -> !descriptors.contains(d)).collect(Collectors.toList());
		log.warn("Removed descriptors: {}", removedDescriptors);
		removeAmphibianObservations(dataset, removedDescriptors);

		dataset.getDescriptors().clear();
		copyDescriptors(dataset, descriptors);

		return lazyLoad(repository.save(dataset));
	}

	@Override
	@PostAuthorize("returnObject==null || returnObject.published || hasRole('ADMINISTRATOR') || hasPermission(returnObject, 'read')")
	public Dataset getDataset(UUID uuid) {
		return repository.findByUuid(uuid);
	}

	@Override
	@PostAuthorize("returnObject==null || returnObject.published || hasRole('ADMINISTRATOR') || hasPermission(returnObject, 'read')")
	public Dataset getDataset(UUID uuid, Integer version) {
		final Dataset dataset = repository.findByUuid(uuid);

		if (dataset == null) {
			throw new NotFoundElement("Record not found by uuid=" + uuid);
		}

		if (version != null && !dataset.getVersion().equals(version)) {
			log.warn("Dataset versions don't match anymore");
			throw new ConcurrencyFailureException("Object version changed to " + dataset.getVersion() + ", you provided " + version);
		}
		return dataset;
	}

	@Override
	public void writeXlsxMCPD(Dataset dataset, OutputStream outputStream) throws IOException {
		PathBuilder<Accession> builder = new PathBuilderFactory().create(Accession.class);
		Querydsl querydsl = new Querydsl(entityManager, builder);
		JPQLQuery<Long> queryAccessionId = querydsl.createQuery(QDatasetAccessionRef.datasetAccessionRef)
			// select id only
			.select(QDatasetAccessionRef.datasetAccessionRef.accession().id)
			// order by id
			.orderBy(QDatasetAccessionRef.datasetAccessionRef.accession().id.asc());

		// Apply where
		queryAccessionId.where(QDatasetAccessionRef.datasetAccessionRef.list().eq(dataset));

		downloadService.writeXlsxMCPD(queryAccessionId, outputStream, "", "/datasets/" + dataset.getUuid());
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("(hasRole('ADMINISTRATOR') || hasPermission(#source, 'write')) && #source.published")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset createNewVersion(Dataset source) {
		source = getDataset(source);

		final Dataset dataset = new Dataset();
		copyValues(dataset, source);
		try {
			dataset.setVersionTag(versionManager.nextMajor(source.getVersionTag()));
		} catch (SemverException e) {
			dataset.setVersionTag(source.getVersionTag() + ".1");
		}
		dataset.setState(PublishState.DRAFT);
		dataset.setCurrent(null);
		dataset.setUuid(null);
		dataset.setVersions(source.getVersions());

		// Copy descriptors
		dataset.setDescriptors(new ArrayList<>());
		copyDescriptors(dataset, source.getDescriptors());

		Dataset saved = repository.save(dataset);

		// copy accessionRefs
		saved = copyAccessionRefs(saved, source.getAccessionRefs());

		// Copy creators
		copyCreators(saved, source.getCreators());

		// Copy locations
		copyLocations(saved, source.getLocations());

		saved.setCurrentVersion(source.getUuid());

		// Make dataset publicly not-readable
		aclService.makePubliclyReadable(saved, false);

		// Make dataset folder
		ensureDatasetFolder(saved);

		return saved;
	}

	/**
	 * Copy and save dataset accessionRefs.
	 *
	 * @param target the target
	 * @param accessionRefs the dataset accessionRefs
	 * @return
	 */
	private Dataset copyAccessionRefs(final Dataset target, final List<DatasetAccessionRef> accessionRefs) {
		if (accessionRefs == null || accessionRefs.size() == 0) {
			return target;
		}

		Dataset loadedDataset = getDataset(target);
		List<DatasetAccessionRef> copiedAccessionRefs = Lists.newArrayList();
		getDistinctAccessionRefs(accessionRefs).forEach(dAccessionRef -> {
			DatasetAccessionRef copy = new DatasetAccessionRef();
			copyAccessionRef(copy, dAccessionRef);
			copy.setDataset(loadedDataset);
			copiedAccessionRefs.add(copy);
		});

		accessionRefRepository.saveAll(copiedAccessionRefs);

		loadedDataset.setAccessionCount((int) accessionRefRepository.countByList(loadedDataset));
		log.info("Done saving {} accession refs, have {} in dataset", accessionRefs.size(), loadedDataset.getAccessionCount());
		return repository.save(loadedDataset);
	}

	/**
	 * Copy and save dataset locations.
	 *
	 * @param target the target
	 * @param locations the dataset locations
	 */
	private void copyLocations(final Dataset target, final List<DatasetLocation> locations) {
		if (locations == null || locations.size() == 0) {
			return;
		}

		List<DatasetLocation> copiedLocations = Lists.newArrayList();
		locations.forEach(location -> {
			DatasetLocation copy = new DatasetLocation();
			copyLocation(copy, location);
			copy.setDataset(target);
			copiedLocations.add(copy);
		});
		target.setLocations(locationRepository.saveAll(copiedLocations));
	}

	/**
	 * Copy and save dataset creators.
	 *
	 * @param target the target
	 * @param creators the dataset creators
	 */
	private void copyCreators(final Dataset target, final List<DatasetCreator> creators) {
		if (creators == null || creators.size() == 0) {
			return;
		}

		List<DatasetCreator> copiedCreators = Lists.newArrayList();
		creators.forEach(creator -> {
			DatasetCreator copy = new DatasetCreator();
			copyCreator(copy, creator);
			copy.setDataset(target);
			copiedCreators.add(copy);
		});
		target.setCreators(datasetCreatorRepository.saveAll(copiedCreators));
	}

	private RepositoryFolder ensureDatasetFolder(final Dataset dataset) {
		try {
			final Path datasetPath = Paths.get(datasetRepositoryPath, dataset.getUuid().toString());

			// Ensure folder ownership
			return asAdminInvoker.invoke(() -> {
				// Ensure target folder exists for the Dataset
				return repositoryService.ensureFolder(datasetPath, dataset);
			});

		} catch (Exception e) {
			log.warn("Could not create folder: {}", e.getMessage());
			throw new InvalidApiUsageException("Could not create folder", e);
		}
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@PostAuthorize("hasRole('ADMINISTRATOR') || returnObject==null || returnObject.isPublished() || hasPermission(returnObject, 'read')")
	public Dataset loadDataset(final Dataset input) {
		final Dataset dataset = getDataset(input);
		return lazyLoad(dataset);
	}

	private Dataset getDataset(final Dataset input) {
		log.debug("Load Dataset. Input data {}", input);
		final Dataset dataset = repository.findById(input.getId()).orElse(null);

		if (dataset == null) {
			throw new NotFoundElement("Record not found by id=" + input.getId());
		}

		if (!dataset.getVersion().equals(input.getVersion())) {
			log.warn("Dataset versions don't match anymore");
			throw new ConcurrencyFailureException("Object version changed to " + dataset.getVersion() + ", you provided " + input.getVersion());
		}
		return dataset;
	}

	private Dataset getUnpublishedDataset(final Dataset input) {
		Dataset loadedDataset = getDataset(input);
		if (loadedDataset.isPublished()) {
			throw new InvalidApiUsageException("Cannot modify a published Dataset.");
		}
		return loadedDataset;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public Page<Dataset> list(final DatasetFilter filter, final Pageable page) throws SearchException {
		final BooleanBuilder published = new BooleanBuilder();
		published.and(QDataset.dataset.state.eq(PublishState.PUBLISHED).and(QDataset.dataset.current.isTrue()));
		Page<Dataset> res;
		final Pageable markdownSortPageRequest = JPAUtils.toMarkdownSort(page, "title");

		if (filter.isFulltextQuery()) {
			res = elasticsearchService.findAll(Dataset.class, filter, published, markdownSortPageRequest);
		} else {
			res = repository.findAll(published.and(filter.buildPredicate()), markdownSortPageRequest);
		}
		return new PageImpl<>(res.getContent(), page, res.getTotalElements());
	}

	@Override
	@Transactional(readOnly = true)
	public Page<DatasetTranslationService.TranslatedDataset> listTranslated(final DatasetFilter filter, final Pageable page) throws SearchException {
		var datasetPage = list(filter, page);
		var translatedDatasets = translationSupport.getTranslated(datasetPage.getContent());
		return new PageImpl<>(translatedDatasets, page, datasetPage.getTotalElements());
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public Map<String, ElasticsearchService.TermResult> getSuggestions(DatasetFilter filter) throws SearchException, IOException {
		assert (filter != null);

		Set<String> suggestions = Sets.newHashSet("crops");
		Map<String, ElasticsearchService.TermResult> suggestionRes = new HashMap<>(suggestions.size());

		if (elasticsearchService == null) {
			return suggestionRes; // If we don't have ES, then suggest nothing
		}

		for (String suggestionKey : suggestions) {
			DatasetFilter suggestionFilter = filter.copy(DatasetFilter.class);
			suggestionFilter.state(PublishState.PUBLISHED);
			try {
				suggestionFilter.clearFilter(suggestionKey);
			} catch (NoSuchFieldException | IllegalAccessException e) {
				log.error("Error while clearing filter: ", e.getMessage());
			}
			suggestionFilter.current(true);
			ElasticsearchService.TermResult suggestion = elasticsearchService.termStatisticsAuto(Dataset.class, suggestionFilter, 100, suggestionKey);
			suggestionRes.put(suggestionKey, suggestion);
		}
		return suggestionRes;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public long countDatasets(DatasetFilter filter) throws SearchException {
		if (filter.isFulltextQuery()) {
			return elasticsearchService.count(Dataset.class, filter);
		}
		return repository.count(filter.buildPredicate());
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public Page<Dataset> listDatasetsForCurrentUser(final DatasetFilter filter, final Pageable page) throws SearchException {
		final Pageable markdownSortPageRequest = JPAUtils.toMarkdownSort(page, "title");
		Page<Dataset> res;
		if (securityUtils.hasRole(UserRole.ADMINISTRATOR)) {
			if (filter.isFulltextQuery()) {
				res = elasticsearchService.findAll(Dataset.class, filter, markdownSortPageRequest);
			} else {
				res = repository.findAll(filter.buildPredicate(), markdownSortPageRequest);
			}
		} else {
			final HashSet<Long> partners = new HashSet<>(securityUtils.listObjectIdentityIdsForCurrentUser(Partner.class, BasePermission.WRITE));
			if (filter.isFulltextQuery()) {
				res = elasticsearchService.findAll(Dataset.class, filter, dataset.owner().id.in(partners), markdownSortPageRequest);
			} else {
				res = repository.findAll(dataset.owner().id.in(partners).and(filter.buildPredicate()), markdownSortPageRequest);
			}
		}
		return new PageImpl<>(res.getContent(), page, res.getTotalElements());
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public Page<Dataset> loadCurrentVersionList(final Pageable page) {
		return repository.listAllCurrentVersion(page);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@PostAuthorize("hasRole('ADMINISTRATOR') || returnObject==null || returnObject.isPublished() || hasPermission(returnObject, 'read')")
	public Dataset loadDataset(final UUID uuid) {
		final Dataset dataset = repository.findByUuid(uuid);
		return lazyLoad(dataset);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@PostAuthorize("hasRole('ADMINISTRATOR') || returnObject==null || returnObject.entity.isPublished() || hasPermission(returnObject.entity, 'read')")
	public DatasetTranslationService.TranslatedDataset loadTranslatedDataset(final UUID uuid) {
		final Dataset dataset = repository.findByUuid(uuid);
		if (dataset == null) {
			throw new NotFoundElement("No such dataset");
		}
		var translatedDataset = translationSupport.getTranslated(dataset);
		translatedDataset.setDescriptors(descriptorTranslationService.getTranslated(translatedDataset.getEntity().getDescriptors()));
		return translatedDataset;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@PostAuthorize("hasRole('ADMINISTRATOR') || returnObject==null || returnObject.isPublished() || hasPermission(returnObject, 'read')")
	public Dataset loadDataset(final UUID uuid, final int version) {
		final Dataset dataset = repository.findByUuidAndVersion(uuid, version);
		return lazyLoad(dataset);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') || #dataset.published || hasPermission(#dataset, 'read')")
	public Page<DatasetAccessionRef> listAccessions(Dataset dataset, final Pageable page) {
		dataset = repository.findByUuid(dataset.getUuid());

		return accessionRefRepository.findByList(dataset, page);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public List<Dataset> listByAccession(final Accession accession) {
		final Iterable<Dataset> datasets = repository.findAll(dataset.accessionRefs.any().accession().eq(accession).and(dataset.state.in(PublishState.PUBLISHED).and(dataset.current.isTrue())));
		return Lists.newArrayList(datasets);
	}

	/**
	 * {@inheritDoc}
	 */
	@Transactional
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public Dataset addDatasetFile(Dataset dataset, final MultipartFile file) throws NotFoundElement, IOException, InvalidRepositoryPathException,
			InvalidRepositoryFileDataException {
		dataset = getUnpublishedDataset(dataset);

		final RepositoryFile repositoryFile = repositoryService.addFile(getDatasetRepositoryFolder(dataset), file.getOriginalFilename(), file.getContentType(), file.getInputStream(), null);

		dataset.getRepositoryFiles().add(repositoryFile);
		return lazyLoad(repository.save(dataset));
	}

	@Override
	public Path getDatasetRepositoryFolder(Dataset dataset) {
		assert (dataset != null);
		assert (dataset.getUuid() != null);

		return Paths.get(datasetRepositoryPath, dataset.getUuid().toString()).toAbsolutePath();
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public Dataset updateDatasetFile(Dataset dataset, final RepositoryFile metadata) throws NoSuchRepositoryFileException {
		dataset = getUnpublishedDataset(dataset);

		final RepositoryFile datasetFile = dataset.getRepositoryFiles().stream().filter(df -> df.getUuid().equals(metadata.getUuid()) && df.getVersion().equals(metadata
			.getVersion())).findFirst().orElse(null);
		if (datasetFile == null) {
			throw new NotFoundElement("No such dataset file");
		}
		final RepositoryFile updated = repositoryService.updateMetadata(metadata);
		// replace it
		dataset.getRepositoryFiles().replaceAll(df -> df.getUuid().equals(datasetFile.getUuid()) ? updated : df);
		return lazyLoad(dataset);
	}

	/**
	 * {@inheritDoc}
	 */
	@Transactional
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public Dataset removeDatasetFile(Dataset dataset, final UUID fileUuid) throws NotFoundElement, NoSuchRepositoryFileException, IOException {
		dataset = getUnpublishedDataset(dataset);

		final RepositoryFile repositoryFile = repositoryService.getFile(fileUuid);
		dataset.setRepositoryFiles(dataset.getRepositoryFiles().stream().filter(file -> !file.getUuid().equals(fileUuid)).collect(Collectors.toList()));
		repositoryService.removeFile(repositoryFile);
		return lazyLoad(repository.save(dataset));
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@PreAuthorize("#dataset.isPublished() || hasRole('ADMINISTRATOR') || hasPermission(#dataset, 'read')")
	public List<RepositoryFile> listDatasetFiles(final Dataset dataset) throws NotFoundElement {
		var loaded = reload(dataset);
		loaded.getRepositoryFiles().size();
		return loaded.getRepositoryFiles();
	}

	/**
	 * Deep load dataset data.
	 *
	 * @param dataset dataset to deepload
	 * @return fully loaded dataset
	 */
	private Dataset lazyLoad(final Dataset dataset) {
		if (dataset == null) {
			throw new NotFoundElement("No such dataset");
		}

		if (dataset.getDescriptors() != null) {
			dataset.getDescriptors().size();
		}
		if (dataset.getVersions() != null && dataset.getVersions().getAllVersions() != null) {
			dataset.getVersions().getAllVersions().size();
		}
		if (dataset.getRepositoryFiles() != null) {
			dataset.getRepositoryFiles().size();
		}
		if (dataset.getCreators() != null) {
			dataset.getCreators().size();
		}
		if (dataset.getLocations() != null) {
			dataset.getLocations().size();
		}
		if (dataset.getCrops() != null) {
			dataset.getCrops().size();
		}
		return dataset;
	}

	/**
	 * {@inheritDoc}
	 */
	@Transactional
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'delete')")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset remove(final Dataset dataset) {
		final Dataset loadedDataset = getUnpublishedDataset(dataset);

		deleteAccessionRefs(loadedDataset);

		var files = List.copyOf(loadedDataset.getRepositoryFiles());
		loadedDataset.getRepositoryFiles().clear();
		entityManager.flush();

		for (RepositoryFile repositoryFile : files) {
			try {
				repositoryService.removeFile(repositoryFile);
			} catch (NoSuchRepositoryFileException | IOException e) {
				log.warn("Could not delete file {} from dataset {}", repositoryFile.getUuid(), dataset.getUuid());
			}
		}
		entityManager.flush();

		try {
			repositoryService.deleteFolder(getDatasetRepositoryFolder(dataset));
		} catch (InvalidRepositoryPathException | FolderNotEmptyException e) {
			log.warn("Could not delete folder {} from dataset {}: {}", getDatasetRepositoryFolder(dataset), dataset.getUuid(), e.getMessage());
		}

		repository.delete(loadedDataset);

		if (amphibianService != null) {
			try {
				amphibianService.removeDataset(loadedDataset);
			} catch (Throwable e) {
				log.warn("Could not remove Amphibian dataset {}: {}", loadedDataset, e.getMessage(), e);
			}
		}
		return loadedDataset;
	}

	/**
	 * {@inheritDoc}
	 */
	@Transactional
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset addAccessionRefs(final Dataset dataset, final Collection<DatasetAccessionRef> accessionRefs) throws NotFoundElement {
		final Dataset loadedDataset = getUnpublishedDataset(dataset.getUuid());

		final var existingRefCount = accessionRefRepository.countByList(loadedDataset);
		log.warn("Adding {} accession references to Dataset that now has {} refs.", accessionRefs.size(), existingRefCount);

		accessionRefs.forEach(ref -> ref.setDataset(loadedDataset)); // So that #equals works
		Lists.partition(new ArrayList<>(getDistinctAccessionRefs(accessionRefs)), 10000).stream().forEach(batch -> {
			List<DatasetAccessionRef> updatedRefs;
			if (existingRefCount == 0) {
				updatedRefs = accessionRefRepository.saveAllAndFlush(batch);
				log.warn("Added new {} accession references to Dataset.", updatedRefs.size());
			} else {
				updatedRefs = accessionRefRepository.findExisting(loadedDataset, batch);
				updatedRefs = accessionRefRepository.saveAllAndFlush(updatedRefs);
				log.info("Stored {} accession references to Dataset.", batch.size());
			}
			// Rematching is done in AccessionRefAspect!
		});

		loadedDataset.setAccessionCount(accessionRefRepository.countByList(loadedDataset));
		final var updatedDataset = repository.save(loadedDataset);
		log.info("Done saving {} accession refs to dataset {} has count={}", accessionRefs.size(), updatedDataset.getUuid(), updatedDataset.getAccessionCount());
		return lazyLoad(updatedDataset);
	}

	private Dataset getUnpublishedDataset(UUID uuid) {
		Dataset loadedDataset = repository.findByUuid(uuid);
		if (loadedDataset == null) {
			throw new NotFoundElement("Record not found by UUID=" + uuid);
		}
		if (loadedDataset.isPublished()) {
			throw new InvalidApiUsageException("Cannot modify a published Dataset.");
		}
		return loadedDataset;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR')")
	@NotifyOnPublished
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset approveDataset(final Dataset dataset) {
		Dataset loaded = getUnpublishedDataset(dataset);

		if (loaded.getState() == PublishState.DRAFT) {
			throw new InvalidApiUsageException("Dataset should be sent for review before publication");
		}

		for (final Descriptor descriptor : loaded.getDescriptors()) {
			if (descriptor.getState() != PublishState.PUBLISHED) {
				log.info("Publishing descriptor {}", descriptor);
				try {
					descriptorService.approveDescriptor(descriptor);
				} catch (InvalidApiUsageException e) {
					throw e;
				}
			}
		}

		loaded.setState(PublishState.PUBLISHED);

		// generate file with metadata and add it to the dataset
		try {
			File tempFile = File.createTempFile("dataset-", ".xlsx");
			try (final OutputStream bos = new BufferedOutputStream(new FileOutputStream(tempFile, false), 8 * 1000)) {
				downloadService.writeXlsxDataset(loaded, bos);

				final String fileName = "Metadata.xlsx";
				final String contentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
				try {
					final RepositoryFile datasetFile = repositoryService.getFile(Paths.get(datasetRepositoryPath, loaded.getUuid().toString()), fileName);
					repositoryService.updateBytes(datasetFile, contentType, tempFile);
				} catch (final NoSuchRepositoryFileException | InvalidRepositoryPathException e) {
					try {
						RepositoryFile metadata = new RepositoryFile();
						metadata.setTitle("Dataset metadata");
						metadata.setDescription("Auto-generated by Genesys");
						final RepositoryFile addedFile = repositoryService.addFile(getDatasetRepositoryFolder(loaded), fileName, contentType, tempFile, metadata);
						loaded.getRepositoryFiles().add(addedFile);
						loaded = lazyLoad(repository.save(loaded));
					} catch (final InvalidRepositoryPathException | InvalidRepositoryFileDataException e1) {
						log.warn("Error while adding metadata file.", e1);
					}
				}
			} finally {
				if (tempFile.exists()) tempFile.delete();
			}
		} catch (final IOException e) {
			log.warn("Error while generating metadata file.", e);
		}

		{
			// Relax permissions on dataset files: allow USERS and ANONYMOUS to read the
			// files
			for (final RepositoryFile datasetFile : loaded.getRepositoryFiles()) {
				aclService.makePubliclyReadable(datasetFile, true);
			}
		}

		// Make dataset publicly readable
		aclService.makePubliclyReadable(loaded, true);

		final DatasetVersions datasetVersions = loaded.getVersions();
		final Dataset oldCurrentDataset = datasetVersions.getAllVersions().stream().filter(s -> Objects.equals(s.getCurrent(), Boolean.TRUE)).findFirst().orElse(null);
		if (oldCurrentDataset != null) {
			oldCurrentDataset.setCurrent(null);
			repository.save(oldCurrentDataset);
		}
		loaded.setCurrent(Boolean.TRUE);
		loaded.setCurrentVersion(null);
		datasetVersions.setCurrentVersion(loaded);
		datasetVersionsRepository.save(datasetVersions);

		return lazyLoad(repository.save(loaded));
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	@NotifyForReview
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset reviewDataset(final Dataset dataset) {
		final Dataset loaded = getUnpublishedDataset(dataset);

		if (loaded.getState() == PublishState.REVIEWING) {
			throw new InvalidApiUsageException("The dataset is already under approval");
		}

		for (final Descriptor descriptor : loaded.getDescriptors()) {
			if (descriptor.getState() == PublishState.DRAFT) {
				log.info("Submitting descriptor for review {}", descriptor);
				try {
					descriptorService.reviewDescriptor(descriptor);
				} catch (InvalidApiUsageException e) {
					throw e;
				}
			}
		}

		loaded.setState(PublishState.REVIEWING);

		return lazyLoad(repository.save(loaded));
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'administration')")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset rejectDataset(final Dataset dataset) {
		Dataset loaded = getDataset(dataset);
		if (loaded == null) {
			throw new NotFoundElement("No dataset with specified uuid and version");
		}

		if (loaded.isPublished() && !securityUtils.hasRole(UserRole.ADMINISTRATOR)) {
			long oneDay = 24 * 60 * 60 * 1000;
			if (loaded.getLastModifiedDate() != null && loaded.getLastModifiedDate().toEpochMilli() <= (System.currentTimeMillis() - oneDay)) {
				throw new InvalidApiUsageException("Cannot be un-published. More than 24 hours have passed since the publication.");
			}
		}

		if (loaded.isPublished() && Objects.equals(loaded.getCurrent(), true)) {
			final DatasetVersions datasetVersions = loaded.getVersions();
			List<Dataset> notCurrentPublishedVersions = datasetVersions.getAllVersions().stream()
					.filter(d -> d.getCurrent() == null && d.isPublished()).collect(Collectors.toList());

			if (!notCurrentPublishedVersions.isEmpty()) {
				UUID youngestDatasetUUID = notCurrentPublishedVersions.stream()
						.max(Comparator.comparing(Dataset::getCreatedDate)).get().getUuid();

				loaded.setCurrent(null);
				repository.save(loaded);

				Dataset youngestDataset = repository.findByUuid(youngestDatasetUUID);
				youngestDataset.setCurrent(true);
				repository.save(youngestDataset);
				datasetVersions.setCurrentVersion(youngestDataset);
				loaded.setCurrentVersion(youngestDatasetUUID);
			} else {
				loaded.setCurrent(null);
				datasetVersions.setCurrentVersion(null);
			}
			datasetVersionsRepository.save(datasetVersions);
		} else if (loaded.isPublished() && Objects.isNull(loaded.getCurrent())) {
			throw new InvalidApiUsageException("Cannot be un-published. The dataset is not the latest version.");
		}

		loaded.setState(PublishState.DRAFT);

		{
			// Tighten permissions on dataset files
			for (final RepositoryFile datasetFile : loaded.getRepositoryFiles()) {
				aclService.makePubliclyReadable(datasetFile, false);
			}
		}

		// Make dataset publicly not-readable
		aclService.makePubliclyReadable(loaded, false);

		return lazyLoad(repository.save(loaded));
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public List<Object[]> lastPublished() {
		return repository.lastPublished();
	}

	/**
	 * {@inheritDoc}
	 */
	@Transactional
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public DatasetCreator createDatasetCreator(Dataset dataset, final DatasetCreator input) throws NotFoundElement {
		dataset = getUnpublishedDataset(dataset);
		input.setDataset(dataset);
		return datasetCreatorRepository.save(input);
	}

	/**
	 * {@inheritDoc}
	 */
	@Transactional
	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public DatasetCreator removeDatasetCreator(Dataset dataset, final DatasetCreator input) throws NotFoundElement {
		dataset = getUnpublishedDataset(dataset);

		final DatasetCreator datasetCreator = loadDatasetCreator(input);
		if (!datasetCreator.getDataset().getUuid().equals(dataset.getUuid())) {
			throw new InvalidApiUsageException("Creator does not belong to dataset");
		}
		datasetCreatorRepository.delete(datasetCreator);

		return datasetCreator;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public Page<DatasetCreator> listDatasetCreators(final UUID datasetUuid, final Pageable page) {
		return datasetCreatorRepository.listByUUidOfDataset(page, datasetUuid);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public DatasetCreator loadDatasetCreator(final DatasetCreator input) throws NotFoundElement {
		log.info("Load DatasetCreator {}", input);
		final DatasetCreator datasetCreator = datasetCreatorRepository.findByUuid(input.getUuid());

		if (datasetCreator == null) {
			log.error("DatasetCreator {} not found", input);
			throw new NotFoundElement("DatasetCreator by " + input.getUuid().toString() + " no found");
		}
		if (!datasetCreator.getVersion().equals(input.getVersion())) {
			log.error("Don't match the version");
			throw new ConcurrencyFailureException("Object version changed to " + datasetCreator.getVersion() + ", you provided " + input.getVersion());
		}
		return datasetCreator;
	}

	@Override
	public DatasetCreator loadDatasetCreator(final UUID datasetCreatorUuid) throws NotFoundElement {
		log.info("Load DatasetCreator {}", datasetCreatorUuid);
		final DatasetCreator datasetCreator = datasetCreatorRepository.findByUuid(datasetCreatorUuid);
		if (datasetCreator == null) {
			log.error("DatasetCreator {} not found", datasetCreatorUuid);
			throw new NotFoundElement("DatasetCreator by " + datasetCreatorUuid.toString() + " no found");
		}
		return datasetCreator;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public List<DatasetCreator> loadDatasetCreators(Dataset dataset) throws NotFoundElement {
		dataset = getDataset(dataset);
		return dataset.getCreators();
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public DatasetCreator updateDatasetCreator(Dataset dataset, final DatasetCreator input) throws NotFoundElement {
		dataset = getUnpublishedDataset(dataset);

		final DatasetCreator datasetCreator = loadDatasetCreator(input);
		if (!datasetCreator.getDataset().getUuid().equals(dataset.getUuid())) {
			throw new InvalidApiUsageException("Creator does not belong to dataset");
		}
		copyCreator(datasetCreator, input);
		return datasetCreatorRepository.save(datasetCreator);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public List<DatasetCreator> autocompleteCreators(final String text) {
		final HashSet<Long> ids = new HashSet<>(securityUtils.listObjectIdentityIdsForCurrentUser(Dataset.class, BasePermission.WRITE));
		final Predicate predicate = datasetCreator.dataset().id.in(ids).and(datasetCreator.fullName.startsWithIgnoreCase(text).or(datasetCreator.institutionalAffiliation
			.startsWithIgnoreCase(text)));
		return datasetCreatorRepository.findAll(predicate, PageRequest.of(0, 20, Sort.by("fullName"))).getContent();
	}

	/**
	 * Copy creator values.
	 *
	 * @param target the target
	 * @param source the source
	 */
	private void copyCreator(final DatasetCreator target, final DatasetCreator source) {
		target.setFullName(source.getFullName());
		target.setEmail(source.getEmail());
		target.setPhoneNumber(source.getPhoneNumber());
		target.setFax(source.getFax());
		target.setInstituteAddress(source.getInstituteAddress());
		target.setInstitutionalAffiliation(source.getInstitutionalAffiliation());
		target.setRole(source.getRole());
	}

	/**
	 * Copy location values.
	 *
	 * @param target the target
	 * @param source the source
	 */
	private void copyLocation(final DatasetLocation target, final DatasetLocation source) {
		target.setUserCountry(source.getUserCountry());
		target.setMapCountry(source.getMapCountry());
		target.setStateProvince(source.getStateProvince());
		target.setVerbatimLocality(source.getVerbatimLocality());
		target.setDecimalLatitude(source.getDecimalLatitude());
		target.setDecimalLongitude(source.getDecimalLongitude());
		target.setCountryCode(source.getCountryCode());
		target.setStartDate(source.getStartDate());
		target.setEndDate(source.getEndDate());
		target.setDescription(source.getDescription());
	}

	/**
	 * Copy dataset accessionRef values.
	 *
	 * @param target the target
	 * @param source the source
	 */
	private void copyAccessionRef(final DatasetAccessionRef target, final DatasetAccessionRef source) {
		target.setDoi(source.getDoi());
		target.setInstCode(source.getInstCode());
		target.setAcceNumb(source.getAcceNumb());
		target.setGenus(source.getGenus());
		target.setSpecies(source.getSpecies());
		target.setAccession(source.getAccession());
	}

	/**
	 * Copy values.
	 *
	 * @param target the target
	 * @param source the source
	 */
	private void copyValues(final Dataset target, final Dataset source) {
		target.setTitle(source.getTitle());
		target.setState(source.getState());
		target.setDescription(source.getDescription());
		target.setFormat(source.getFormat());
		target.setOriginalLanguageTag(source.getOriginalLanguageTag());
		target.setSubject(source.getSubject());
		target.setRights(source.getRights());
		target.setSource(source.getSource());
		target.setOwner(source.getOwner());
		target.setVersionTag(source.getVersionTag());
		target.setCreated(source.getCreated());
		target.setStartDate(source.getStartDate());
		target.setEndDate(source.getEndDate());

		if (source.getCrops() != null) {
			if (target.getCrops() == null) {
				target.setCrops(new HashSet<>());
			}
			target.getCrops().clear();
			target.getCrops().addAll(source.getCrops());
		}
	}

	/**
	 * Copy descriptors.
	 *
	 * @param target the target
	 * @param descriptors the descriptors
	 */
	private void copyDescriptors(final Dataset target, final List<Descriptor> descriptors) {
		if (descriptors == null || descriptors.size() == 0) {
			return;
		}

		target.getDescriptors().clear();
		target.getDescriptors().addAll(descriptors.stream().distinct().collect(Collectors.toList()));
	}

	@Override
	@Transactional(readOnly = false)
	@PreAuthorize("hasRole('ADMINISTRATOR')")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public void rematchDatasetAccessions() {
		repository.findAll().forEach(dataset -> {
			rematchDatasetAccessions(dataset);
		});
	}

	@Override
	@Transactional(readOnly = false)
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public Dataset rematchDatasetAccessions(Dataset dataset) {
		dataset = repository.findByUuid(dataset.getUuid());
		if (dataset == null) {
			return dataset;
		}
		List<DatasetAccessionRef> accessionRefs = dataset.getAccessionRefs();
		dataset.setAccessionCount(accessionRefs.size());
		dataset = repository.save(dataset);

		batchRematchAccessionRefs(accessionRefs);

		log.info("Done scheduling of relinking {} accession refs.", accessionRefs.size());
		return lazyLoad(dataset);
	}

	/**
	 * Schedule re-matching of AccessionRefs in batches
	 * 
	 * @param accessionRefs
	 */
	@Override
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public void batchRematchAccessionRefs(List<DatasetAccessionRef> accessionRefs) {
		Lists.partition(accessionRefs, 10000).parallelStream().forEach((batch) -> {
			taskExecutor.execute(() -> {
				try {
					log.info("Rematching {} dataset refs", batch.size());
					// Thread.sleep(100);
					accessionRefMatcher.rematchAccessionRefs(batch, accessionRefRepository);
				} catch (Throwable e) {
					log.info("Rematch failed with {}", e.getMessage(), e);
				}
			});
		});
	}

	@Override
	@Transactional(readOnly = false, propagation = Propagation.REQUIRED, isolation = Isolation.READ_UNCOMMITTED)
	@CacheEvict(value = "accessionAmphibianDatasets", allEntries = true)
	public int clearAccessionRefs(Collection<Accession> accessions) {
		if (accessions == null || accessions.isEmpty()) {
			return 0;
		}

		Iterable<DatasetAccessionRef> referencedRefs = accessionRefRepository.findAll(QDatasetAccessionRef.datasetAccessionRef.accession().in(accessions));
		AtomicInteger counter = new AtomicInteger(0);
		referencedRefs.forEach((ref) -> {
			ref.setAccession(null);
			counter.incrementAndGet();
		});
		accessionRefRepository.saveAll(referencedRefs);

		return counter.get();
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public DatasetLocation createLocation(Dataset dataset, final DatasetLocation input) throws NotFoundElement {
		dataset = getUnpublishedDataset(dataset);

		log.info("Create DatasetLocation {} for dataset {}", input, dataset.getUuid());
		input.setDataset(dataset);
		DatasetLocation savedLocation = locationRepository.save(input);

		dataset.getLocations().add(savedLocation);
		dataset.setLastModifiedDate(Instant.now());
		repository.save(dataset);

		return savedLocation;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public DatasetLocation removeLocation(Dataset dataset, final DatasetLocation input) throws NotFoundElement {
		dataset = getUnpublishedDataset(dataset);

		log.info("Remove DatasetLocation {} of dataset {}", input, dataset.getUuid());
		final DatasetLocation datasetLocation = loadLocation(input);
		if (!datasetLocation.getDataset().getUuid().equals(dataset.getUuid())) {
			throw new InvalidApiUsageException("Location does not belong to dataset");
		}
		locationRepository.delete(datasetLocation);

		/// Force update of Dataset#startDate, Dataset#endDate
		dataset.getLocations().remove(datasetLocation);
		dataset.setLastModifiedDate(Instant.now());
		repository.save(dataset);

		return datasetLocation;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public List<DatasetLocation> listLocation(Dataset dataset) throws NotFoundElement {
		log.info("Load list DatasetLocation for dataset {}", dataset.getUuid());
		dataset = getDataset(dataset);
		dataset.getLocations().size();
		return dataset.getLocations();
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public Page<DatasetLocation> listLocation(final UUID datasetUuid, final Pageable page) {
		log.info("Load list DatasetLocation for dataset {}", datasetUuid);
		return locationRepository.listByUUidOfDataset(page, datasetUuid);
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public DatasetLocation loadLocation(final DatasetLocation input) throws NotFoundElement {
		log.info("Load DatasetLocation {}", input);

		final DatasetLocation datasetLocation = locationRepository.findByUuid(input.getUuid());

		if (datasetLocation == null) {
			log.error("DatasetLocation by UUID {} not found", input);
			throw new NotFoundElement("DatasetLocation " + input.getUuid().toString() + " no found");
		}
		if (!datasetLocation.getVersion().equals(input.getVersion())) {
			log.error("Don't match the version");
			throw new ConcurrencyFailureException("Object version changed to " + datasetLocation.getVersion() + ", you provided " + input.getVersion());
		}
		return datasetLocation;
	}

	/**
	 * {@inheritDoc}
	 */
	@Override
	public DatasetLocation loadLocation(final UUID locationUuid) throws NotFoundElement {
		log.info("Load list DatasetLocation by uuid {}", locationUuid);

		final DatasetLocation datasetLocation = locationRepository.findByUuid(locationUuid);

		if (datasetLocation == null) {
			log.error("DatasetLocation by UUID {} not found", locationUuid);
			throw new NotFoundElement("DatasetCreator by " + locationUuid.toString() + " no found");
		}
		return datasetLocation;
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public DatasetLocation updateLocation(Dataset dataset, final DatasetLocation input) throws NotFoundElement {
		dataset = getUnpublishedDataset(dataset);

		final DatasetLocation datasetLocation = loadLocation(input);
		if (!datasetLocation.getDataset().getUuid().equals(dataset.getUuid())) {
			throw new InvalidApiUsageException("Location does not belong to dataset");
		}
		copyValue(datasetLocation, input);

		DatasetLocation saved = locationRepository.save(datasetLocation);
		dataset.getLocations().remove(input);
		dataset.getLocations().add(saved);
		dataset.setStartDate(saved.getStartDate());
		dataset.setEndDate(saved.getEndDate());
		update(dataset);

		return saved;
	}

	/**
	 * Copy value.
	 *
	 * @param target the target
	 * @param source the source
	 */
	protected void copyValue(final DatasetLocation target, final DatasetLocation source) {
		target.setDescription(source.getDescription());

		target.setUserCountry(source.getUserCountry());
		target.setMapCountry(source.getMapCountry());
		target.setCountryCode(source.getCountryCode());

		target.setStateProvince(source.getStateProvince());
		target.setVerbatimLocality(source.getVerbatimLocality());

		target.setDecimalLatitude(source.getDecimalLatitude());
		target.setDecimalLongitude(source.getDecimalLongitude());

		target.setStartDate(source.getStartDate());
		target.setEndDate(source.getEndDate());
	}

	@Override
	@Transactional(propagation = Propagation.MANDATORY) // Need to be part of an existing transaction!
	@PreAuthorize("hasRole('ADMINISTRATOR')")
	public long changeInstitute(FaoInstitute currentInstitute, FaoInstitute newInstitute) {
		// Note: Dataset links to a Partner, not FaoInstitute!
		log.warn("Migrating datasets from {} to {}", currentInstitute.getCode(), newInstitute.getCode());

		// Update accession references
		var qAccessionRef = QDatasetAccessionRef.datasetAccessionRef;
		return jpaQueryFactory.update(qAccessionRef)
			// Update instCode
			.set(qAccessionRef.instCode, newInstitute.getCode())
			// WHERE
			.where(qAccessionRef.instCode.eq(currentInstitute.getCode()))
			// Execute
			.execute();
	}

	@Override
	@PreAuthorize("#dataset==null || #dataset.published || hasRole('ADMINISTRATOR') || hasPermission(#dataset, 'read')")
	public List<Descriptor> getDatasetDescriptors(Dataset dataset) {
		var datasetDescriptors = repository.findByUuid(dataset.getUuid()).getDescriptors();
		datasetDescriptors.forEach(Descriptor::lazyLoad);
		return datasetDescriptors;
	}

	@Override
	@Transactional
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#dataset, 'write')")
	public List<Descriptor> synchronizeDescriptors(Dataset dataset) {
		List<Descriptor> datasetDescriptors = new ArrayList<>(repository.findByUuid(dataset.getUuid()).getDescriptors());
		var amphibianDataset = amphibianService.getAmphibianDataset(dataset);
		var amphibianDescriptors = amphibianDataset.getColumns().keySet();
		
		// Remove descriptors that are not in Amphibian
		datasetDescriptors.removeIf(descriptor -> !amphibianDescriptors.contains(String.valueOf(descriptor.getUuid())));
		
		// Get and add missing amphibian descriptors
		var descriptorUuids = datasetDescriptors.stream().map(Descriptor::getUuid).collect(Collectors.toSet());
		var missingDescriptorUuids = amphibianDescriptors.stream()
			.map(UUID::fromString)
			.filter(uuid -> !descriptorUuids.contains(uuid))
			.collect(Collectors.toSet());
		descriptorRepository.findAll(QDescriptor.descriptor.uuid.in(missingDescriptorUuids)).forEach(datasetDescriptors::add);

		dataset.setDescriptors(datasetDescriptors);
		var updated = repository.save(dataset);
		
		return updated.getDescriptors();
	}

	@Override
	@Cacheable(value = "accessionAmphibianDatasets", keyGenerator = "shortFilterKeyGenerator")
	public Set<UUID> findAccessionsAmphibianDatasets(AccessionFilter accessionFilter) {
		QDataset qDataset = QDataset.dataset;
		var query = jpaQueryFactory.from(qDataset).select(qDataset.uuid);
		var predicate = qDataset.state.in(Set.of(PublishState.PUBLISHED));
		predicate = predicate.and(qDataset.current.eq(true));
		if (! accessionFilter.isEmpty()) {
			if (accessionFilter.isFulltextQuery()) {
				throw new InvalidApiUsageException("Full-text search not supported");

//				log.warn("Using ES to find accessions");
//				var accessionIds = new LinkedList<Long>();
//				elasticsearchService.processById(Accession.class, accessionFilter, accessionIds::addAll, null);
//				var qDar = QDatasetAccessionRef.datasetAccessionRef;
//				var refQuery = jpaQueryFactory.from(qDar).select(qDar.list().id).distinct().where(qDar.accession().id.in(accessionIds));
//
//				predicate = predicate.and(qDataset.id.in(refQuery.fetch()));

			} else {
				var qA = QAccession.accession;
				var subQuery = JPAExpressions.selectFrom(qA);
				accessionFilter.buildJpaQuery(subQuery, qA);
				var qDar = QDatasetAccessionRef.datasetAccessionRef;
				var refQuery = jpaQueryFactory.from(qDar).select(qDar.list().id).distinct().where(qDar.accession().in(subQuery));
	
				predicate = predicate.and(qDataset.id.in(refQuery.fetch()));

			}
		} else {
			throw new InvalidApiUsageException("Accession filter must not be empty");
		}
		query.where(predicate);

		try {
			List<UUID> datasetUuids = LoggerHelper.withSqlLogging(() -> query.fetch());
			var amphibianDatasets = amphibianService.getAmphibianDatasetsList(datasetUuids);
			return Collections.unmodifiableSet(amphibianDatasets.stream().map(dt -> UUID.fromString(dt.getKey())).collect(Collectors.toSet()));
		} catch (Exception e) {
			throw new InvalidApiUsageException("Could not query for traits", e);
		}
	}

	private Set<DatasetAccessionRef> getDistinctAccessionRefs(Collection<DatasetAccessionRef> accessionRefs) {
		var distinctRefs = new TreeSet<>(distinctAcceRefsComparator);
		distinctRefs.addAll(accessionRefs);
		return distinctRefs;
	}

	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') || #dataset.isPublished() || hasPermission(#dataset, 'read')")
	public List<DescriptorTranslationService.TranslatedDescriptor> loadTranslatedDescriptors(Dataset dataset) {
		var reloaded = repository.findById(dataset.getId()).orElseThrow();
		return descriptorTranslationService.getTranslated(reloaded.getDescriptors());
	}

	@Override
	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#original, 'write')")
	public DatasetLang machineTranslate(Dataset original, String targetLanguage) throws TranslatorService.TranslatorException {
		if (Objects.equals(original.getOriginalLanguageTag(), targetLanguage)) {
			throw new InvalidApiUsageException("Source and target language are the same");
}

		var mt = new DatasetLang();
		mt.setMachineTranslated(true);
		mt.setLanguageTag(targetLanguage);
		mt.setEntity(original);

		if (translatorService == null) return mt;

		var builder = TranslatorService.TranslationStructuredRequest.builder()
			.targetLang(targetLanguage);

		if (! Objects.equals(Locale.ENGLISH.getLanguage(), targetLanguage) && ! Objects.equals(Locale.ENGLISH.getLanguage(), original.getOriginalLanguageTag())) {
			// Translations to other languages use the English version (either original or translated)
			var enTranslation = translationSupport.getLang(original, Locale.ENGLISH.getLanguage());
			if (enTranslation == null) {
				throw new InvalidApiUsageException("English text is not available.");
			}

			builder
				.sourceLang(enTranslation.getLanguageTag())
				// .context(enTranslation.getDescription())
				.texts(Map.of(
					"title", new FormattedText(TextFormat.markdown, enTranslation.getTitle()),
					"description", new FormattedText(TextFormat.markdown, enTranslation.getDescription())
				));

		} else {
			// Translations to English use the original texts
			builder
				.sourceLang(original.getOriginalLanguageTag())
				// .context(original.getDescription())
				.texts(Map.of(
					"title", new TranslatorService.FormattedText(TranslatorService.TextFormat.markdown, original.getTitle()),
					"description", new TranslatorService.FormattedText(TranslatorService.TextFormat.markdown, original.getDescription())
				));
		}

		var translations = translatorService.translate(builder.build());

		if (StringUtils.isNotBlank(original.getTitle())) {
			mt.setTitle(translations.getTexts().get("title"));
		}
		if (StringUtils.isNotBlank(original.getDescription())) {
			mt.setDescription(translations.getTexts().get("description"));
		}
		return mt;
	}
}