AccessionAuditLogDownload.java

/*
 * Copyright 2019 Global Crop Diversity Trust
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.genesys.server.service.worker;

import static org.genesys.util.ExcelUtils.updateCell;

import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;

import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Root;

import org.apache.commons.lang3.StringUtils;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.HorizontalAlignment;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.streaming.DeferredSXSSFSheet;
import org.apache.poi.xssf.streaming.DeferredSXSSFWorkbook;
import org.apache.poi.xssf.streaming.SXSSFSheet;
import org.genesys.blocks.auditlog.annotations.Audited;
import org.genesys.blocks.auditlog.model.AuditAction;
import org.genesys.blocks.auditlog.model.AuditLog;
import org.genesys.blocks.auditlog.model.QAuditLog;
import org.genesys.blocks.auditlog.service.ClassPKService;
import org.genesys.blocks.model.ClassPK;
import org.genesys.filerepository.model.RepositoryFolder;
import org.genesys.server.model.genesys.Accession;
import org.genesys.server.model.genesys.AccessionCollect;
import org.genesys.server.model.genesys.AccessionId;
import org.genesys.server.model.genesys.QAccession;
import org.genesys.server.model.genesys.Taxonomy2;
import org.genesys.server.model.impl.FaoInstitute;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
import com.google.common.collect.Lists;
import com.querydsl.core.Tuple;
import com.querydsl.jpa.impl.JPAQuery;
import com.querydsl.jpa.impl.JPAQueryFactory;

/**
 * Write audit logs for accessions and related entitites.
 * 
 * @author Matija Obreza
 */
@Component
@Transactional(readOnly = true)
public class AccessionAuditLogDownload implements InitializingBean {
	private static final Logger LOG = LoggerFactory.getLogger(AccessionAuditLogDownload.class);

	private static final Object TO_BE_RESOLVED = new Object();

	@Autowired
	private JPAQueryFactory jpaQueryFactory;

	@PersistenceContext
	private EntityManager entityManager;

	@Autowired
	private ClassPKService classPkService;

	private ClassPK cpkAccession;
	private ClassPK cpkAccessionId;
	private ClassPK cpkAccessionColl;

	private ObjectMapper objectMapper = new ObjectMapper();

	private static final Set<String> referencedTables = Collections.unmodifiableSet(new HashSet<>(Lists.newArrayList("taxonomy2", "country", "repositoryfolder", "faoinstitute")));

	@Override
	public void afterPropertiesSet() throws Exception {
		this.cpkAccession = classPkService.getClassPk(Accession.class);
		this.cpkAccessionId = classPkService.getClassPk(AccessionId.class);
		this.cpkAccessionColl = classPkService.getClassPk(AccessionCollect.class);
	}

	private static class WorkbookStyles {
		CellStyle dateTimeStyle;

		public static WorkbookStyles create(Workbook workbook) {
			WorkbookStyles wbStyles = new WorkbookStyles();

			wbStyles.dateTimeStyle = workbook.createCellStyle();
			wbStyles.dateTimeStyle.setDataFormat(workbook.createDataFormat().getFormat("dd-mmm-yyyy HH:MM:ss"));
			wbStyles.dateTimeStyle.setAlignment(HorizontalAlignment.RIGHT);

			return wbStyles;
		}
	}

	@PreAuthorize("hasRole('ADMINISTRATOR') or hasPermission(#faoInstitute, 'write')")
	public void writePassportAuditLog(FaoInstitute faoInstitute, Calendar startDate, Calendar endDate, OutputStream outputStream) throws IOException {
		final DeferredSXSSFWorkbook workbook = new DeferredSXSSFWorkbook();
		WorkbookStyles wbStyles = WorkbookStyles.create(workbook);

		workbook.createSheet("Accessions " + faoInstitute.getCode());
		final DeferredSXSSFSheet accessionsSheet = workbook.getStreamingSheet("Accessions " + faoInstitute.getCode());
		accessionsSheet.setRowGenerator((sheet) -> {
			final Row headerRow = sheet.createRow(0);
			updateCell(headerRow, 0, "entityId");
			updateCell(headerRow, 1, "INSTCODE");
			updateCell(headerRow, 2, "ACCENUMB");
			updateCell(headerRow, 3, "GENUS");
			updateCell(headerRow, 4, "SPECIES");
			updateCell(headerRow, 5, "DOI");
			updateCell(headerRow, 6, "accessiongeo.id");
			updateCell(headerRow, 7, "accessioncollect.id");
			writeAccessionsSheet(faoInstitute, startDate, endDate, sheet, wbStyles);
		});

		workbook.createSheet("Changes " + faoInstitute.getCode());
		final DeferredSXSSFSheet changesSheet = workbook.getStreamingSheet("Changes " + faoInstitute.getCode());
		changesSheet.setRowGenerator((sheet) -> {
			final Row headerRow = sheet.createRow(0);
			updateCell(headerRow, 0, "entity");
			updateCell(headerRow, 1, "entityId");
			updateCell(headerRow, 2, "ACTION");
			updateCell(headerRow, 3, "LOGDATE");
			updateCell(headerRow, 4, "PROPERTY");
			updateCell(headerRow, 5, "OLD");
			updateCell(headerRow, 6, "NEW");

			writeAuditLogSheet(faoInstitute, startDate, endDate, sheet, wbStyles);
		});

		workbook.createSheet("Deletes " + faoInstitute.getCode());
		final DeferredSXSSFSheet deletesSheet = workbook.getStreamingSheet("Deletes " + faoInstitute.getCode());
		deletesSheet.setRowGenerator((sheet) -> {
			final Row headerRow = sheet.createRow(0);
			updateCell(headerRow, 0, "entity");
			updateCell(headerRow, 1, "entityId");
			updateCell(headerRow, 2, "ACTION");
			updateCell(headerRow, 3, "LOGDATE");
			updateCell(headerRow, 4, "PROPERTY");
			updateCell(headerRow, 5, "OLD");
			updateCell(headerRow, 6, "NEW");
			writeAuditDeletesSheet(faoInstitute, startDate, endDate, sheet, wbStyles);
		});

		workbook.write(outputStream);
		workbook.close();
		workbook.dispose();
		outputStream.flush();
	}

	private void writeAuditDeletesSheet(FaoInstitute faoInstitute, Calendar startDate, Calendar endDate, SXSSFSheet sheet, WorkbookStyles wbStyles) {
		AtomicInteger rowIndex = new AtomicInteger(0);

		JPAQuery<Long> deletedAccessionIds = jpaQueryFactory.select(QAuditLog.auditLog.entityId).from(QAuditLog.auditLog).where(QAuditLog.auditLog.action.eq(AuditAction.DELETE),
			QAuditLog.auditLog.classPk.in(cpkAccession), QAuditLog.auditLog.propertyName.eq("instituteCode"), QAuditLog.auditLog.previousState.eq(faoInstitute.getCode())).orderBy(
				QAuditLog.auditLog.entityId.asc());

		if (startDate != null) {
			deletedAccessionIds.where(QAuditLog.auditLog.logDate.goe(startDate.toInstant()));
		}
		if (endDate != null) {
			deletedAccessionIds.where(QAuditLog.auditLog.logDate.loe(endDate.toInstant()));
		}

		long deletedCount = deletedAccessionIds.fetchCount();
		LOG.info("There are {} DELETED accessions for {}", deletedCount, faoInstitute.getCode());
		long batchSize = 100;
		long offset = -batchSize;
		deletedAccessionIds.limit(batchSize);

		List<Long> accessionIds;

		do {
			offset += batchSize;
			deletedAccessionIds.offset(offset);

			accessionIds = deletedAccessionIds.fetch();
			LOG.info("Processing {}/{} DELETEs for {}", offset, deletedCount, faoInstitute.getCode());

			JPAQuery<AuditLog> deleteLogs = jpaQueryFactory.selectFrom(QAuditLog.auditLog).where(QAuditLog.auditLog.classPk.in(cpkAccession, cpkAccessionId),
				QAuditLog.auditLog.entityId.in(accessionIds)).orderBy(QAuditLog.auditLog.entityId.asc(), QAuditLog.auditLog.logDate.desc());

			deleteLogs.fetch().forEach(deleteLog -> {
				final Row nextRow = sheet.createRow(rowIndex.incrementAndGet());
				int col = 0;
				updateCell(nextRow, col++, deleteLog.getClassPk().getShortName());
				updateCell(nextRow, col++, deleteLog.getEntityId());
				updateCell(nextRow, col++, deleteLog.getAction().toString());
				updateCell(nextRow, col++, deleteLog.getLogDate(), wbStyles.dateTimeStyle);
				updateCell(nextRow, col++, deleteLog.getPropertyName());
				updateCell(nextRow, col++, deleteLog.getPreviousState());
				updateCell(nextRow, col++, deleteLog.getNewState());
			});

			if (rowIndex.get() > 1000000) {
				LOG.warn("Breaking bad!");
				break;
			}

		} while (accessionIds.size() == batchSize); // while we get data
	}

	private void writeAccessionsSheet(FaoInstitute faoInstitute, Calendar startDate, Calendar endDate, SXSSFSheet sheet, WorkbookStyles wbStyles) {
		AtomicInteger rowIndex = new AtomicInteger(0);

		JPAQuery<Tuple> instituteAccessions = jpaQueryFactory.select(QAccession.accession.id, QAccession.accession.instituteCode, QAccession.accession.accessionNumber,
			QAccession.accession.taxonomy().genus, QAccession.accession.taxonomy().species, QAccession.accession.doi,
			QAccession.accession.accessionId().coll().id).from(QAccession.accession).where(QAccession.accession.institute().eq(faoInstitute)).orderBy(QAccession.accession.id.asc());

		long accessionCount = instituteAccessions.fetchCount();
		LOG.warn("There are {} accessions for {}", accessionCount, faoInstitute.getCode());
		long batchSize = 1000; // bigger batch size here
		long offset = -batchSize;
		instituteAccessions.limit(batchSize);

		List<Tuple> accessions;
		do {
			offset += batchSize;
			instituteAccessions.offset(offset);

			accessions = instituteAccessions.fetch();
			LOG.info("Processing {}/{} accessions for {}", offset, accessionCount, faoInstitute.getCode());

			accessions.forEach(tuple -> {
				final Row nextRow = sheet.createRow(rowIndex.incrementAndGet());
				Long accessionId = tuple.get(0, Long.class);
				updateCell(nextRow, 0, accessionId);
				updateCell(nextRow, 1, tuple.get(1, String.class));
				updateCell(nextRow, 2, tuple.get(2, String.class));
				updateCell(nextRow, 3, tuple.get(3, String.class));
				updateCell(nextRow, 4, tuple.get(4, String.class));
				updateCell(nextRow, 5, tuple.get(5, String.class));
				updateCell(nextRow, 6, tuple.get(6, Long.class));
			});

			if (rowIndex.get() > 1000000) {
				LOG.warn("Breaking bad!");
				break;
			}

		} while (accessions.size() == batchSize); // while we get data
	}

	private void writeAuditLogSheet(FaoInstitute faoInstitute, Calendar startDate, Calendar endDate, SXSSFSheet sheet, WorkbookStyles wbStyles) {
		AtomicInteger rowIndex = new AtomicInteger(0);

		JPAQuery<Tuple> instituteAccessions = jpaQueryFactory.select(QAccession.accession.id, QAccession.accession.accessionId().coll().id)
			.from(QAccession.accession).where(QAccession.accession.institute().eq(faoInstitute)).orderBy(QAccession.accession.id.asc());

		long accessionCount = instituteAccessions.fetchCount();
		LOG.warn("There are {} accessions for {}", accessionCount, faoInstitute.getCode());
		int batchSize = 100;
		long offset = -batchSize;
		instituteAccessions.limit(batchSize);

		LoadingCache<ClassPK, LoadingCache<Long, Object>> entityCache = entityCache();

		// The main deferred list of logs to print
		Map<ClassPK, Set<Long>> deferredEntities = new HashMap<>();
		deferredEntities.put(cpkAccessionColl, new HashSet<>());

		List<Tuple> accessions;
		do {
			offset += batchSize;
			instituteAccessions.offset(offset);

			accessions = instituteAccessions.fetch();
			LOG.info("Processing {}/{} accessions for {}", offset, accessionCount, faoInstitute.getCode());

			List<Long> accessionIds = new ArrayList<>(accessions.size());
			accessions.forEach(acce -> {
				Long accessionId = acce.get(0, Long.class);
				accessionIds.add(accessionId);
				Long collId = acce.get(1, Long.class);
				if (collId != null)
					deferredEntities.get(cpkAccessionColl).add(collId);
			});

			JPAQuery<AuditLog> auditLogs = jpaQueryFactory.selectFrom(QAuditLog.auditLog).where(QAuditLog.auditLog.classPk.in(cpkAccession, cpkAccessionId),
				QAuditLog.auditLog.entityId.in(accessionIds)).orderBy(QAuditLog.auditLog.entityId.asc(), QAuditLog.auditLog.logDate.desc());

			if (startDate != null) {
				auditLogs.where(QAuditLog.auditLog.logDate.goe(startDate.toInstant()));
			}
			if (endDate != null) {
				auditLogs.where(QAuditLog.auditLog.logDate.loe(endDate.toInstant()));
			}

			List<AuditLog> logs = auditLogs.fetch();
			logs.stream().filter(l -> l.getReferencedEntity() == null).forEach(auditLog -> {
				final Row nextRow = sheet.createRow(rowIndex.incrementAndGet());
				int col = 0;
				updateCell(nextRow, col++, auditLog.getClassPk().getShortName());
				updateCell(nextRow, col++, auditLog.getEntityId());
				updateCell(nextRow, col++, auditLog.getAction().toString());
				updateCell(nextRow, col++, auditLog.getLogDate(), wbStyles.dateTimeStyle);
				updateCell(nextRow, col++, auditLog.getPropertyName());
				updateCell(nextRow, col++, auditLog.getPreviousState());
				updateCell(nextRow, col++, auditLog.getNewState());
				if (auditLog.getReferencedEntity() != null) {
					updateCell(nextRow, col++, auditLog.getReferencedEntity().getShortName());
				}
			});

			List<AuditLog> refs = logs.stream().filter(l -> l.getReferencedEntity() != null).collect(Collectors.toList());
			Map<ClassPK, Map<Long, Object>> referencedEntities = new HashMap<>();
			refs.forEach(ref -> {
				ClassPK classPk = ref.getReferencedEntity();

				// Decide between printing or deferring
				if (referencedTables.contains(classPk.getShortName())) {
					// Print
					referencedEntities.putIfAbsent(classPk, new HashMap<>());

					Map<Long, Object> cpkMap = referencedEntities.get(classPk);
					Set<Long> ids = new HashSet<>();
					ids.addAll(parseToLong(ref.getPreviousState()));
					ids.addAll(parseToLong(ref.getNewState()));
					ids.forEach(id -> cpkMap.putIfAbsent(id, TO_BE_RESOLVED));
				} else {
					// Include audit log
					if (!deferredEntities.containsKey(classPk)) {
						deferredEntities.put(classPk, new HashSet<>());
					}
					deferredEntities.get(classPk).addAll(parseToLong(ref.getPreviousState()));
					deferredEntities.get(classPk).addAll(parseToLong(ref.getNewState()));

				}
			});

			referencedEntities.keySet().forEach(key -> {
				// LOG.warn("Need to resolve {} ids for {}", referencedEntities.get(key).size(),
				// key.getShortName());
				try {
					populateCache(key, referencedEntities.get(key).keySet(), entityCache.get(key));
				} catch (Exception e) {
					LOG.warn("Error populating cache: {}", e.getMessage(), e);
				}
			});

			refs.forEach(auditLog -> {
				final Row nextRow = sheet.createRow(rowIndex.incrementAndGet());
				int col = 0;
				updateCell(nextRow, col++, auditLog.getClassPk().getShortName());
				updateCell(nextRow, col++, auditLog.getEntityId());
				updateCell(nextRow, col++, auditLog.getAction().toString());
				updateCell(nextRow, col++, auditLog.getLogDate(), wbStyles.dateTimeStyle);
				updateCell(nextRow, col++, auditLog.getPropertyName());
				ClassPK referencedClassPk = auditLog.getReferencedEntity();
				if (referencedTables.contains(referencedClassPk.getShortName())) {
					try {
						LoadingCache<Long, Object> referencedEntityCache = entityCache.get(referencedClassPk);
						updateCell(nextRow, col++, printReferences(referencedEntityCache, auditLog.getPreviousState()));
						updateCell(nextRow, col++, printReferences(referencedEntityCache, auditLog.getNewState()));
					} catch (ExecutionException e1) {
						updateCell(nextRow, col++, auditLog.getPreviousState());
						updateCell(nextRow, col++, auditLog.getNewState());
					}
				} else {
					updateCell(nextRow, col++, auditLog.getPreviousState());
					updateCell(nextRow, col++, auditLog.getNewState());
				}

				if (referencedClassPk != null) {
					updateCell(nextRow, col++, referencedClassPk.getShortName());
				}
			});

			if (rowIndex.get() > 1000000) {
				LOG.warn("Breaking bad!");
				break;
			}

		} while (accessions.size() == batchSize); // while we get data

		deferredEntities.keySet().forEach(classPk -> {
			List<Long> entityIds = new ArrayList<>(deferredEntities.get(classPk));
			LOG.warn("There are {} deferred entries for {}", entityIds.size(), classPk.getClassname());
			for (int startAt = 0; startAt < entityIds.size(); startAt += batchSize) {
				LOG.info("Processing deferred {}/{} for {}", startAt, entityIds.size(), classPk.getClassname());
				addDereferencedAuditLogs(sheet, rowIndex, wbStyles, classPk, entityIds.subList(startAt, Math.min(entityIds.size(), startAt + batchSize)), startDate, endDate);
			}
		});

	}

	private void addDereferencedAuditLogs(SXSSFSheet sheet, AtomicInteger rowIndex, WorkbookStyles wbStyles, ClassPK classPk, Collection<Long> entityIds, Calendar startDate,
			Calendar endDate) {
		JPAQuery<AuditLog> auditLogs = jpaQueryFactory.selectFrom(QAuditLog.auditLog).where(QAuditLog.auditLog.classPk.in(classPk), QAuditLog.auditLog.entityId.in(entityIds))
			.orderBy(QAuditLog.auditLog.entityId.asc(), QAuditLog.auditLog.logDate.desc());

		if (startDate != null) {
			auditLogs.where(QAuditLog.auditLog.logDate.goe(startDate.toInstant()));
		}
		if (endDate != null) {
			auditLogs.where(QAuditLog.auditLog.logDate.loe(endDate.toInstant()));
		}

		List<AuditLog> logs = auditLogs.fetch();
		logs.stream().forEach(auditLog -> {
			final Row nextRow = sheet.createRow(rowIndex.incrementAndGet());
			int col = 0;
			updateCell(nextRow, col++, auditLog.getClassPk().getShortName());
			updateCell(nextRow, col++, auditLog.getEntityId());
			// col = writeAccession(nextRow, col, auditLog.getEntityId(), accessionCache);
			updateCell(nextRow, col++, auditLog.getAction().toString());
			updateCell(nextRow, col++, auditLog.getLogDate(), wbStyles.dateTimeStyle);
			updateCell(nextRow, col++, auditLog.getPropertyName());
			updateCell(nextRow, col++, auditLog.getPreviousState());
			updateCell(nextRow, col++, auditLog.getNewState());
			if (auditLog.getReferencedEntity() != null) {
				updateCell(nextRow, col++, auditLog.getReferencedEntity().getShortName());
			}

			if (rowIndex.get() > 1000000) {
				LOG.warn("Breaking bad!");
				return;
			}
		});
	}

	private String printReferences(LoadingCache<Long, Object> referencedEntityCache, String stringIds) {
		Collection<Long> ids = parseToLong(stringIds);
		if (ids.isEmpty()) {
			return "";
		}
		StringBuilder sb = new StringBuilder();
		ids.forEach(id -> {
			try {
				if (sb.length() > 0)
					sb.append("\n");
				sb.append(printEntity(referencedEntityCache.get(id)));
			} catch (ExecutionException e) {
				LOG.error("Cache error: {}", e.getMessage());
				sb.append(e.getMessage());
			}
		});
		return sb.toString();
	}

	private String printEntity(Object object) {
		if (object == null) {
			return "NULL";
		}
		if (object instanceof Taxonomy2) {
			return ((Taxonomy2) object).getTaxonName();
		}
		if (object instanceof JsonNode) {
			return ((JsonNode) object).toString();
		}
		if (object instanceof RepositoryFolder) {
			return ((RepositoryFolder) object).getPath();
		}
		return object.toString();
	}

	private static final Collection<Long> parseToLong(String arrayString) {
		if (StringUtils.isBlank(arrayString) || arrayString.equals("[]")) {
			return Collections.emptyList();
		}
		Set<Long> longs = new HashSet<>();
		if (arrayString.startsWith("[")) {
			String trim = arrayString.substring(1, arrayString.length() - 1);
			try {
				Arrays.stream(trim.split(", ")).forEach(s -> longs.add(Long.parseLong(s)));
			} catch (NumberFormatException e) {
				LOG.error("Weird input: {}", arrayString);
				throw e;
			}
		} else {
			longs.add(Long.parseLong(arrayString));
		}
		return longs;
	}

	private void populateCache(ClassPK classPk, Set<Long> set, LoadingCache<Long, Object> loadingCache) throws Exception {
		List<Long> notThere = new ArrayList<>(set.size());
		set.forEach(id -> {
			if (loadingCache.getIfPresent(id) == null) {
				notThere.add(id);
			}
		});

		if (notThere.size() > 0) {
			final Class<?> target = Class.forName(classPk.getClassname());

			if (notThere.size() == 1) {
				Long oneId = notThere.get(0);
				loadingCache.put(oneId, entityOrDeleted(target, oneId, entityManager.find(target, oneId)));
			} else {
				CriteriaBuilder cb = entityManager.getCriteriaBuilder();
				CriteriaQuery<javax.persistence.Tuple> criteriaQuery = cb.createTupleQuery();
				Root<?> root = criteriaQuery.from(target);
				Path<Long> rootId = root.<Long>get("id");
				criteriaQuery.multiselect(rootId, root);
				criteriaQuery.where(rootId.in(notThere));

				HashMap<Long, Object> map = new HashMap<>(); // database hits
				Set<Long> missing = new HashSet<>(notThere); // database misses
				entityManager.createQuery(criteriaQuery).getResultList().forEach(obj -> {
					Long entityId = (Long) obj.get(0);
					map.put(entityId, entityOrDeleted(target, entityId, obj.get(1)));
					missing.remove(entityId);
				});
				loadingCache.putAll(map);

				// bulk fetch misses from AuditLog?
			}
		}
	}

	private Object entityOrDeleted(Class<?> target, Long id, Object object) {
		if (object == null) {
			if (target.isAnnotationPresent(Audited.class)) {
				JPAQuery<AuditLog> deleteLogs = jpaQueryFactory.selectFrom(QAuditLog.auditLog).where(QAuditLog.auditLog.classPk.classname.eq(target.getName()),
					QAuditLog.auditLog.entityId.in(id), QAuditLog.auditLog.action.eq(AuditAction.DELETE)).orderBy(QAuditLog.auditLog.entityId.asc());

				List<AuditLog> deletes = deleteLogs.fetch();
				if (deletes.size() > 0) {
					LOG.warn("Cache: Loaded Audited DELETE {} with id={}", target, id);
					ObjectNode n = objectMapper.createObjectNode();
					deletes.forEach(deletedProp -> {
						n.put(deletedProp.getPropertyName(), deletedProp.getPreviousState());
					});
					return n;
				}
			}
		}
		return object;
	}

	private LoadingCache<ClassPK, LoadingCache<Long, Object>> entityCache() {
		CacheLoader<ClassPK, LoadingCache<Long, Object>> classPkCacheBuilder = new CacheLoader<ClassPK, LoadingCache<Long, Object>>() {
			@Override
			public LoadingCache<Long, Object> load(final ClassPK classPk) throws Exception {
				final Class<?> target = Class.forName(classPk.getClassname());
				LOG.warn("Creating new cache for entity {}", target);
				// LOG.debug("Loading");
				return CacheBuilder.newBuilder().maximumSize(1000).expireAfterAccess(60, TimeUnit.SECONDS).build(new CacheLoader<Long, Object>() {
					@Override
					public Object load(Long key) throws Exception {
						LOG.warn("Cache: Loading {} with id={}", target, key);
						Object entity = entityManager.find(target, key);
						return entityOrDeleted(target, key, entity);
					}
				});
			}
		};

		LoadingCache<ClassPK, LoadingCache<Long, Object>> entityCache = CacheBuilder.newBuilder().maximumSize(20).expireAfterAccess(60, TimeUnit.SECONDS).build(
			classPkCacheBuilder);

		return entityCache;
	}
}