Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package org.labkey.api.dataiterator;

import org.jetbrains.annotations.NotNull;

import java.util.Map;

/**
* Extension point for DataClass-specific transformations in the pre-trigger DataIterator pipeline.
* Registered per DataClass name via {@link org.labkey.api.exp.api.ExperimentService#registerDataClassDataIteratorTransformer}.
* A fresh instance is created for each import operation since implementations may be stateful
* between {@link #prepareTranslator} and {@link #wrapDataIterator}.
*/
public interface DataClassDataIteratorTransformer
{
/**
* Called during SimpleTranslator setup to inspect input columns and add placeholder columns
* (e.g., via {@link SimpleTranslator#addNullColumn}) that will be populated by the wrapping DataIterator.
*
* @return true if the transformer is active and {@link #wrapDataIterator} should be called
*/
boolean prepareTranslator(@NotNull SimpleTranslator step0,
@NotNull Map<String, Integer> inputColumnNameMap,
@NotNull DataIteratorContext context);

/**
* Wraps the DataIterator to apply DataClass-specific transformations.
* Called after the pre-trigger pipeline is assembled, only if {@link #prepareTranslator} returned true.
*/
@NotNull
DataIterator wrapDataIterator(@NotNull DataIterator input, @NotNull DataIteratorContext context);
}
19 changes: 19 additions & 0 deletions api/src/org/labkey/api/exp/api/ExperimentService.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import org.labkey.api.data.RemapCache;
import org.labkey.api.data.SQLFragment;
import org.labkey.api.data.TableInfo;
import org.labkey.api.dataiterator.DataClassDataIteratorTransformer;
import org.labkey.api.exp.ExperimentDataHandler;
import org.labkey.api.exp.ExperimentException;
import org.labkey.api.exp.ExperimentProtocolHandler;
Expand Down Expand Up @@ -74,6 +75,7 @@
import org.labkey.api.pipeline.RecordedActionSet;
import org.labkey.api.query.BatchValidationException;
import org.labkey.api.query.FilteredTable;
import org.labkey.api.query.QueryUpdateService;
import org.labkey.api.query.QueryKey;
import org.labkey.api.query.QueryViewProvider;
import org.labkey.api.query.UserSchema;
Expand Down Expand Up @@ -101,6 +103,8 @@
import java.util.Set;
import java.util.concurrent.locks.Lock;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;

import static org.labkey.api.exp.api.ExpDataClass.NEW_DATA_CLASS_ALIAS_VALUE;
import static org.labkey.api.exp.api.SampleTypeService.NEW_SAMPLE_TYPE_ALIAS_VALUE;
Expand Down Expand Up @@ -657,6 +661,21 @@ static void validateParentAlias(Map<String, String> aliasMap, Set<String> reserv

ExpDataClassDataTable createDataClassDataTable(String name, UserSchema schema, ContainerFilter cf, @NotNull ExpDataClass dataClass);

/**
* Registers a factory that creates a {@link DataClassDataIteratorTransformer}
* for the specified DataClass name. The transformer is applied in the pre-trigger DataIterator pipeline,
* allowing modules to add computed columns (e.g., transforming flat columns into JSON) that work
* uniformly for file imports, API imports, folder imports, and background pipeline jobs.
* A fresh instance is created per import via the factory since transformers may be stateful.
*/
void registerDataClassDataIteratorTransformer(String dataClassName, @NotNull Supplier<DataClassDataIteratorTransformer> factory);

/**
* Returns a fresh {@link DataClassDataIteratorTransformer} for the given
* DataClass name, or {@code null} if none is registered.
*/
@Nullable DataClassDataIteratorTransformer getDataClassDataIteratorTransformer(String dataClassName);

ExpProtocolTable createProtocolTable(String name, UserSchema schema, ContainerFilter cf);

ExpExperimentTable createExperimentTable(String name, UserSchema schema, ContainerFilter cf);
Expand Down
5 changes: 5 additions & 0 deletions api/src/org/labkey/api/query/AbstractQueryUpdateService.java
Original file line number Diff line number Diff line change
Expand Up @@ -399,6 +399,11 @@ protected int _importRowsUsingDIB(User user, Container container, DataIteratorBu
if (extraScriptContext != null)
{
context.setDataSource((String) extraScriptContext.get(DataIteratorUtil.DATA_SOURCE));
if (extraScriptContext.containsKey(AbstractQueryImportAction.Params.useTransactionAuditCache.name()))
{
boolean useTransactionAuditCache = Boolean.TRUE.equals(extraScriptContext.get(AbstractQueryImportAction.Params.useTransactionAuditCache.name()));
context.setUseTransactionAuditCache(useTransactionAuditCache);
}
}

preImportDIBValidation(in, null);
Expand Down
10 changes: 5 additions & 5 deletions audit/src/org/labkey/audit/AuditController.java
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@
import java.util.Collections;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Map;

import static org.labkey.api.data.ContainerManager.REQUIRE_USER_COMMENTS_PROPERTY_NAME;
Expand Down Expand Up @@ -382,17 +381,18 @@ public void validateForm(AuditTransactionForm form, Errors errors)
@Override
public Object execute(AuditTransactionForm form, BindException errors)
{
List<Long> rowIds;
AuditLogImpl.TransactionRowIds results;
User elevatedUser = ElevatedUser.ensureCanSeeAuditLogRole(getContainer(), getUser());
ContainerFilter cf = ContainerFilter.getContainerFilterByName(form.getContainerFilter(), getContainer(), elevatedUser);
if (form.isSampleType())
rowIds = AuditLogImpl.get().getTransactionSampleIds(form.getTransactionAuditId(), elevatedUser, getContainer(), cf);
results = AuditLogImpl.get().getTransactionSampleIds(form.getTransactionAuditId(), elevatedUser, getContainer(), cf);
else
rowIds = AuditLogImpl.get().getTransactionSourceIds(form.getTransactionAuditId(), elevatedUser, getContainer(), cf);
results = AuditLogImpl.get().getTransactionSourceIds(form.getTransactionAuditId(), elevatedUser, getContainer(), cf);

ApiSimpleResponse response = new ApiSimpleResponse();
response.put("success", true);
response.put("rowIds", rowIds);
response.put("rowIds", results.rowIds());
response.put("dataTypeRowCounts", results.dataTypeRowCounts());

return response;
}
Expand Down
91 changes: 46 additions & 45 deletions audit/src/org/labkey/audit/AuditLogImpl.java
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@

import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -247,70 +248,70 @@ public ActionURL getAuditUrl()
return new ActionURL(AuditController.ShowAuditLogAction.class, ContainerManager.getRoot());
}

public List<Long> getTransactionSampleIds(long transactionAuditId, User user, Container container, @Nullable ContainerFilter containerFilter)
public record TransactionRowIds(List<Long> rowIds, Map<Long, Long> dataTypeRowCounts) {}

public TransactionRowIds getTransactionSampleIds(long transactionAuditId, User user, Container container, @Nullable ContainerFilter containerFilter)
{
List<AuditTypeEvent> transactionEvents = TRANSACTION_EVENT_CACHE.get(transactionAuditId).second;
if (!transactionEvents.isEmpty())
List<SampleTimelineAuditEvent> events;
if (transactionEvents.isEmpty())
{
List<Long> ids = new ArrayList<>();
transactionEvents.forEach(event -> {
if (event instanceof SampleTimelineAuditEvent stEvent)
ids.add(stEvent.getSampleId());
});
return ids;
SimpleFilter filter = new SimpleFilter();
filter.addCondition(FieldKey.fromParts("TransactionID"), transactionAuditId);
events = AuditLogService.get().getAuditEvents(container, user, SampleTimelineAuditEvent.EVENT_TYPE, filter, null, containerFilter);
}

SimpleFilter filter = new SimpleFilter();
filter.addCondition(FieldKey.fromParts("TransactionID"), transactionAuditId);

List<SampleTimelineAuditEvent> events = AuditLogService.get().getAuditEvents(container, user, SampleTimelineAuditEvent.EVENT_TYPE, filter, null, containerFilter);
return events.stream().map(SampleTimelineAuditEvent::getSampleId).collect(Collectors.toList());
else
{
events = transactionEvents.stream()
.filter(SampleTimelineAuditEvent.class::isInstance)
.map(SampleTimelineAuditEvent.class::cast)
.toList();
}
Map<Long, Long> dataTypeRowCounts = new HashMap<>();
List<Long> sampleIds = new ArrayList<>();
events.forEach(event -> {
dataTypeRowCounts.merge(event.getSampleTypeId(), 1L, Long::sum);
sampleIds.add(event.getSampleId());
});
return new TransactionRowIds(sampleIds, dataTypeRowCounts);
}

public List<Long> getTransactionSourceIds(long transactionAuditId, User user, Container container, @Nullable ContainerFilter containerFilter)
public TransactionRowIds getTransactionSourceIds(long transactionAuditId, User user, Container container, @Nullable ContainerFilter containerFilter)
{
List<String> lsids = new ArrayList<>();
List<Long> sourceIds = new ArrayList<>();
Map<Long, Long> dataTypeRowCounts = new HashMap<>();
List<AuditTypeEvent> transactionEvents = TRANSACTION_EVENT_CACHE.get(transactionAuditId).second;
if (!transactionEvents.isEmpty())
{
transactionEvents.forEach(event -> {
if (event instanceof DetailedAuditTypeEvent detailedEvent)
{
if (detailedEvent.getNewRecordMap() != null)
{
Map<String, String> newRecord = new CaseInsensitiveHashMap<>(AbstractAuditTypeProvider.decodeFromDataMap(detailedEvent.getNewRecordMap()));
if (newRecord.containsKey("RowId") && !StringUtils.isEmpty(newRecord.get("RowId")))
sourceIds.add(Long.valueOf(newRecord.get("RowId")));
else if (newRecord.containsKey("LSID") && !StringUtils.isEmpty(newRecord.get("LSID")))
lsids.add(newRecord.get("LSID"));
}
}
});
}
else
{
List<DetailedAuditTypeEvent> events = QueryService.get().getQueryUpdateAuditRecords(user, container, transactionAuditId, containerFilter);
List<DetailedAuditTypeEvent> detailedEvents = transactionEvents.isEmpty()
? QueryService.get().getQueryUpdateAuditRecords(user, container, transactionAuditId, containerFilter)
: transactionEvents.stream()
.filter(DetailedAuditTypeEvent.class::isInstance)
.map(DetailedAuditTypeEvent.class::cast)
.toList();

detailedEvents.forEach(event -> {
if (event.getNewRecordMap() != null)
{
Map<String, String> newRecord = new CaseInsensitiveHashMap<>(AbstractAuditTypeProvider.decodeFromDataMap(event.getNewRecordMap()));
if (newRecord.containsKey("RowId") && !StringUtils.isEmpty(newRecord.get("RowId")))
sourceIds.add(Long.valueOf(newRecord.get("RowId")));
else if (newRecord.containsKey("LSID") && !StringUtils.isEmpty(newRecord.get("LSID")))
lsids.add(newRecord.get("LSID"));

events.forEach((event) -> {
if (event.getNewRecordMap() != null)
if (newRecord.containsKey("ClassId") && !StringUtils.isEmpty(newRecord.get("ClassId")))
{
Map<String, String> newRecord = new CaseInsensitiveHashMap<>(AbstractAuditTypeProvider.decodeFromDataMap(event.getNewRecordMap()));
if (newRecord.containsKey("RowId") && !StringUtils.isEmpty(newRecord.get("RowId")))
sourceIds.add(Long.valueOf(newRecord.get("RowId")));
else if (newRecord.containsKey("LSID") && !StringUtils.isEmpty(newRecord.get("LSID")))
lsids.add(newRecord.get("LSID"));

Long classId = Long.valueOf(newRecord.get("ClassId"));
dataTypeRowCounts.merge(classId, 1L, Long::sum);
}
});
}
}
});
if (!lsids.isEmpty())
{
SimpleFilter filter = SimpleFilter.createContainerFilter(container);
filter.addCondition(FieldKey.fromParts("LSID"), lsids, CompareType.IN);
TableSelector selector = new TableSelector(ExperimentService.get().getTinfoData(), Collections.singleton("RowId"), filter, null);
sourceIds.addAll(selector.getArrayList(Long.class));
}
return sourceIds;
return new TransactionRowIds(sourceIds, dataTypeRowCounts);
}
}
6 changes: 5 additions & 1 deletion experiment/src/org/labkey/experiment/ExpDataIterators.java
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@
import org.labkey.api.exp.query.SamplesSchema;
import org.labkey.api.qc.DataState;
import org.labkey.api.qc.SampleStatusService;
import org.labkey.api.query.AbstractQueryImportAction;
import org.labkey.api.query.BatchValidationException;
import org.labkey.api.query.FieldKey;
import org.labkey.api.query.FileColumnValueMapper;
Expand Down Expand Up @@ -2379,7 +2380,10 @@ public DataIterator getDataIterator(DataIteratorContext context)

// useTransactionAuditCache already set for import and merge in AbstractQueryImportAction.createDataIteratorContext
if (context.getInsertOption() == QueryUpdateService.InsertOption.INSERT)
context.setUseTransactionAuditCache(true);
{
if (Boolean.FALSE != context.getConfigParameter(AbstractQueryImportAction.Params.useTransactionAuditCache))
context.setUseTransactionAuditCache(true);
}

// add FileLink DataIterator if any input columns are of type FILE_LINK
if (null != _fileLinkDirectory)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import org.labkey.api.data.dialect.SqlDialect;
import org.labkey.api.dataiterator.AttachmentDataIterator;
import org.labkey.api.dataiterator.CachingDataIterator;
import org.labkey.api.dataiterator.DataClassDataIteratorTransformer;
import org.labkey.api.dataiterator.CoerceDataIterator;
import org.labkey.api.dataiterator.DataClassUpdateAddColumnsDataIterator;
import org.labkey.api.dataiterator.DataIterator;
Expand Down Expand Up @@ -143,6 +144,7 @@
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;

import static org.labkey.api.dataiterator.DataIteratorUtil.DUPLICATE_COLUMN_IN_DATA_ERROR;
Expand Down Expand Up @@ -1119,9 +1121,20 @@ else if (Column.ClassId.name().equalsIgnoreCase(name))
final int batchSize = _context.getInsertOption().batch ? BATCH_SIZE : 1;
step0.addSequenceColumn(genIdCol, _dataClass.getContainer(), ExpDataClassImpl.SEQUENCE_PREFIX, _dataClass.getRowId(), batchSize, _dataClass.getMinGenId());

// Apply registered DataClass-specific DataIterator transformer (e.g., Molecule Component-N/X → components JSON)
DataIteratorBuilder step1 = step0;
DataClassDataIteratorTransformer transformer = ExperimentService.get().getDataClassDataIteratorTransformer(_dataClass.getName());
if (transformer != null)
{
if (transformer.prepareTranslator(step0, columnNameMap, context))
step1 = LoggingDataIterator.wrap(transformer.wrapDataIterator(step0, context));
if (context.getErrors().hasErrors())
return null;
}

// Table Counters
ExpDataClassDataTableImpl queryTable = ExpDataClassDataTableImpl.this;
var counterDIB = ExpDataIterators.CounterDataIteratorBuilder.create(step0, _dataClass.getContainer(), queryTable, ExpDataClassImpl.SEQUENCE_PREFIX, _dataClass.getRowId());
var counterDIB = ExpDataIterators.CounterDataIteratorBuilder.create(step1, _dataClass.getContainer(), queryTable, ExpDataClassImpl.SEQUENCE_PREFIX, _dataClass.getRowId());
DataIterator di;

// Generate names
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@
import org.labkey.api.data.TableSelector;
import org.labkey.api.data.TempTableTracker;
import org.labkey.api.data.dialect.SqlDialect;
import org.labkey.api.dataiterator.DataClassDataIteratorTransformer;
import org.labkey.api.dataiterator.DataIteratorBuilder;
import org.labkey.api.defaults.DefaultValueService;
import org.labkey.api.exp.AbstractParameter;
Expand Down Expand Up @@ -284,11 +285,13 @@
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Predicate;
import java.util.function.Supplier;
import java.util.function.UnaryOperator;
import java.util.stream.Collectors;
import java.util.stream.Stream;

Expand Down Expand Up @@ -353,6 +356,7 @@ public class ExperimentServiceImpl implements ExperimentService, ObjectReference
private final Map<String, DataType> _dataTypes = new HashMap<>();
private final Map<String, ProtocolImplementation> _protocolImplementations = new HashMap<>();
private final Map<String, ExpProtocolInputCriteria.Factory> _protocolInputCriteriaFactories = new HashMap<>();
private final Map<String, Supplier<DataClassDataIteratorTransformer>> _dataClassDataIteratorTransformers = new ConcurrentHashMap<>();
private final Set<ExperimentProtocolHandler> _protocolHandlers = new HashSet<>();
private final List<ObjectReferencer> _objectReferencers = new ArrayList<>();
private final List<ColumnExporter> _columnExporters = new ArrayList<>();
Expand Down Expand Up @@ -1590,6 +1594,19 @@ public ExpDataClassDataTable createDataClassDataTable(String name, UserSchema sc
return new ExpDataClassDataTableImpl(name, schema, cf, (ExpDataClassImpl) dataClass);
}

@Override
public void registerDataClassDataIteratorTransformer(String dataClassName, @NotNull Supplier<DataClassDataIteratorTransformer> factory)
{
_dataClassDataIteratorTransformers.put(dataClassName.toLowerCase(), factory);
}

@Override
public @Nullable DataClassDataIteratorTransformer getDataClassDataIteratorTransformer(String dataClassName)
{
Supplier<DataClassDataIteratorTransformer> factory = _dataClassDataIteratorTransformers.get(dataClassName.toLowerCase());
return factory != null ? factory.get() : null;
}

@Override
public ExpMaterialInputTable createMaterialInputTable(String name, ExpSchema schema, ContainerFilter cf)
{
Expand Down