From ee6f7c3ee53009e1d1d81ff356def795cdaf2eca Mon Sep 17 00:00:00 2001 From: Lum Date: Thu, 4 Dec 2025 17:09:17 -0800 Subject: [PATCH] Don't save bulk import data or offer a download link --- study/src/org/labkey/study/StudyModule.java | 6 +- .../study/assay/StudyPublishManager.java | 88 ++++--------------- .../study/controllers/StudyController.java | 39 ++------ .../dataset/DatasetSnapshotProvider.java | 6 +- .../labkey/study/model/DatasetDefinition.java | 37 ++++++-- .../org/labkey/study/model/StudyManager.java | 6 +- .../src/org/labkey/study/model/UploadLog.java | 11 --- 7 files changed, 58 insertions(+), 135 deletions(-) diff --git a/study/src/org/labkey/study/StudyModule.java b/study/src/org/labkey/study/StudyModule.java index 8df47d6aa96..69570d36931 100644 --- a/study/src/org/labkey/study/StudyModule.java +++ b/study/src/org/labkey/study/StudyModule.java @@ -42,8 +42,6 @@ import org.labkey.api.exp.PropertyType; import org.labkey.api.exp.api.ExperimentService; import org.labkey.api.exp.property.PropertyService; -import org.labkey.api.files.FileContentService; -import org.labkey.api.files.TableUpdaterFileListener; import org.labkey.api.message.digest.ReportAndDatasetChangeDigestProvider; import org.labkey.api.migration.DatabaseMigrationService; import org.labkey.api.migration.DefaultMigrationSchemaHandler; @@ -228,7 +226,7 @@ public String getName() @Override public Double getSchemaVersion() { - return 25.003; + return 25.004; } @Override @@ -391,8 +389,6 @@ protected void startupAfterSpringConfig(ModuleContext moduleContext) folderRegistry.addFactories(new StudyWriterFactory(), new StudyImporterFactory()); } - FileContentService.get().addFileListener(new TableUpdaterFileListener(StudySchema.getInstance().getTableInfoUploadLog(), "FilePath", TableUpdaterFileListener.Type.filePath, "RowId")); - DatasetDefinition.cleanupOrphanedDatasetDomains(); OptionalFeatureService.get().addExperimentalFeatureFlag(StudyQuerySchema.EXPERIMENTAL_STUDY_SUBSCHEMAS, "Use sub-schemas in Study", diff --git a/study/src/org/labkey/study/assay/StudyPublishManager.java b/study/src/org/labkey/study/assay/StudyPublishManager.java index d4a2cc2c894..7fb7c09303c 100644 --- a/study/src/org/labkey/study/assay/StudyPublishManager.java +++ b/study/src/org/labkey/study/assay/StudyPublishManager.java @@ -18,13 +18,11 @@ import org.apache.commons.beanutils.ConvertUtils; import org.apache.commons.collections4.MapUtils; -import org.apache.commons.io.IOUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.labkey.api.assay.AbstractAssayProvider; -import org.labkey.api.assay.AssayFileWriter; import org.labkey.api.assay.AssayProtocolSchema; import org.labkey.api.assay.AssayProvider; import org.labkey.api.assay.AssayService; @@ -113,9 +111,7 @@ import org.labkey.api.study.publish.StudyDatasetLinkedColumn; import org.labkey.api.study.publish.StudyPublishService; import org.labkey.api.study.query.PublishResultsQueryView; -import org.labkey.api.util.DateUtil; import org.labkey.api.util.FileStream; -import org.labkey.api.util.FileUtil; import org.labkey.api.util.PageFlowUtil; import org.labkey.api.util.Pair; import org.labkey.api.util.StringExpressionFactory; @@ -140,10 +136,7 @@ import java.io.Closeable; import java.io.IOException; -import java.io.OutputStream; import java.math.BigDecimal; -import java.nio.file.Files; -import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -932,60 +925,10 @@ private static String createUniqueDatasetName(Study study, String assayName) return name; } - public UploadLog saveUploadData(User user, Dataset dsd, FileStream tsv, String filename) throws IOException - { - PipeRoot pipelineRoot = PipelineService.get().findPipelineRoot(dsd.getContainer()); - if (null == pipelineRoot || !pipelineRoot.isValid()) - throw new IOException("Please have your administrator set up a pipeline root for this folder."); - - Path dir = pipelineRoot.resolveToNioPath(AssayFileWriter.DIR_NAME); - if (null == dir) - throw new IOException("Cannot create directory uploaded data: " + AssayFileWriter.DIR_NAME); - - if (!Files.exists(dir)) - { - FileUtil.createDirectory(dir); - } - - //File name is studyname_datasetname_date_hhmm.ss - Date dateCreated = new Date(); - String dateString = DateUtil.formatDateTime(dateCreated, "yyy-MM-dd-HHmm"); - int id = 0; - Path file; - do - { - String extension = Objects.toString(filename == null ? "tsv" : FileUtil.getExtension(filename), "tsv"); - String extra = id++ == 0 ? "" : String.valueOf(id); - String fileName = dsd.getStudy().getLabel() + "-" + dsd.getLabel() + "-" + dateString + extra + "." + extension; - fileName = fileName.replace('\\', '_').replace('/', '_').replace(':', '_'); - file = FileUtil.appendName(dir, fileName); - } - while (Files.exists(file)); - - try (OutputStream out = Files.newOutputStream(file)) - { - IOUtils.copy(tsv.openInputStream(), out); - tsv.closeInputStream(); - } - - UploadLog ul = new UploadLog(); - ul.setContainer(dsd.getContainer()); - ul.setDatasetId(dsd.getDatasetId()); - ul.setCreated(dateCreated); - ul.setUserId(user.getUserId()); - ul.setStatus("Initializing"); - String filePath = FileUtil.hasCloudScheme(file) ? FileUtil.pathToString(file) : file.toFile().getPath(); - ul.setFilePath(filePath); - - return Table.insert(user, getTinfoUpdateLog(), ul); - } - - /** - * Return an array of LSIDs from the newly created dataset entries, - * along with the upload log. + * Return an array of LSIDs from the newly created dataset entries. */ - public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study, DatasetDefinition dsd, DataLoader dl, LookupResolutionType lookupResolutionType, FileStream fileIn, String originalFileName, Map columnMap, BatchValidationException errors, QueryUpdateService.InsertOption insertOption, @Nullable AuditBehaviorType auditBehaviorType) + public List importDatasetTSV(User user, StudyImpl study, DatasetDefinition dsd, DataLoader dl, LookupResolutionType lookupResolutionType, FileStream fileIn, String originalFileName, Map columnMap, BatchValidationException errors, QueryUpdateService.InsertOption insertOption, @Nullable AuditBehaviorType auditBehaviorType) { DbScope scope = StudySchema.getInstance().getScope(); @@ -995,7 +938,13 @@ public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study try { if (null != fileIn) - ul = saveUploadData(user, dsd, fileIn, originalFileName); + { + ul = new UploadLog(); + ul.setContainer(dsd.getContainer()); + ul.setDatasetId(dsd.getDatasetId()); + ul.setUserId(user.getUserId()); + ul.setStatus("Initializing"); + } try (DbScope.Transaction transaction = scope.ensureTransaction()) { @@ -1025,8 +974,9 @@ public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study ul.setStatus("ERROR"); String description = ul.getDescription(); ul.setDescription(description == null ? "" : description + "\n" + new Date() + ":" + x.getMessage()); - ul = Table.update(user, StudySchema.getInstance().getTableInfoUploadLog(), ul, ul.getRowId()); - return Pair.of(lsids, ul); + Table.insert(user, getTinfoUpdateLog(), ul); + + return lsids; } } @@ -1035,7 +985,7 @@ public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study //Update the status assert ul != null : "Upload log should always exist if no errors have occurred."; ul.setStatus("SUCCESS"); - ul = Table.update(user, getTinfoUpdateLog(), ul, ul.getRowId()); + Table.insert(user, getTinfoUpdateLog(), ul); } else if (ul != null) { @@ -1048,17 +998,9 @@ else if (ul != null) sep = "\n"; } ul.setDescription(sb.toString()); - ul = Table.update(user, getTinfoUpdateLog(), ul, ul.getRowId()); + Table.insert(user, getTinfoUpdateLog(), ul); } - return Pair.of(lsids, ul); - } - - public UploadLog getUploadLog(Container c, int id) - { - SimpleFilter filter = SimpleFilter.createContainerFilter(c); - filter.addCondition(FieldKey.fromParts("rowId"), id); - - return new TableSelector(getTinfoUpdateLog(), filter, null).getObject(UploadLog.class); + return lsids; } @Override diff --git a/study/src/org/labkey/study/controllers/StudyController.java b/study/src/org/labkey/study/controllers/StudyController.java index 4cd599b94ee..bb845ca7f24 100644 --- a/study/src/org/labkey/study/controllers/StudyController.java +++ b/study/src/org/labkey/study/controllers/StudyController.java @@ -90,7 +90,6 @@ import org.labkey.api.data.RuntimeSQLException; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.ShowRows; -import org.labkey.api.data.SimpleDisplayColumn; import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.Sort; import org.labkey.api.data.SqlExecutor; @@ -227,7 +226,6 @@ import org.labkey.api.view.template.EmptyView; import org.labkey.api.view.template.PageConfig; import org.labkey.api.writer.FileSystemFile; -import org.labkey.api.writer.HtmlWriter; import org.labkey.api.writer.VirtualFile; import org.labkey.data.xml.TablesDocument; import org.labkey.study.CohortFilterFactory; @@ -2712,19 +2710,19 @@ protected int importData(DataLoader dl, FileStream file, String originalName, Ba columnMap.put(_form.getSequenceNum(), column); } - Pair, UploadLog> result = StudyPublishManager.getInstance().importDatasetTSV(getUser(), _study, _def, dl, getLookupResolutionType(), file, originalName, columnMap, errors, _form.getInsertOption(), auditBehaviorType); + List lsids = StudyPublishManager.getInstance().importDatasetTSV(getUser(), _study, _def, dl, getLookupResolutionType(), file, originalName, columnMap, errors, _form.getInsertOption(), auditBehaviorType); - if (!result.getKey().isEmpty()) + if (!lsids.isEmpty()) { // Log the import when SUMMARY is configured, if DETAILED is configured the DetailedAuditLogDataIterator will handle each row change. // It would be nice in the future to replace the DetailedAuditLogDataIterator with a general purpose AuditLogDataIterator // that can delegate the audit behavior type to the AuditDataHandler, so this code can go away // - String comment = "Dataset data imported. " + result.getKey().size() + " rows imported"; - new DatasetDefinition.DatasetAuditHandler(_def).addAuditEvent(getUser(), getContainer(), AuditBehaviorType.SUMMARY, comment, result.getValue()); + String comment = "Dataset data imported. " + lsids.size() + " rows imported"; + new DatasetDefinition.DatasetAuditHandler(_def).addAuditEvent(getUser(), getContainer(), AuditBehaviorType.SUMMARY, comment); } - return result.getKey().size(); + return lsids.size(); } @Override @@ -2855,15 +2853,6 @@ public ModelAndView getView(IdForm form, BindException errors) DataRegion dr = new DataRegion(); dr.addColumns(tInfo, "RowId,Created,CreatedBy,Status,Description"); GridView gv = new GridView(dr, errors); - DisplayColumn dc = new SimpleDisplayColumn(null) { - @Override - public void renderGridCellContents(RenderContext ctx, HtmlWriter out) - { - ActionURL url = new ActionURL(DownloadTsvAction.class, ctx.getContainer()).addParameter("id", String.valueOf(ctx.get("RowId"))); - out.write(LinkBuilder.labkeyLink("Download Data File", url)); - } - }; - dr.addDisplayColumn(dc); SimpleFilter filter = SimpleFilter.createContainerFilter(getContainer()); if (form.getId() != 0) @@ -2885,24 +2874,6 @@ public void addNavTrail(NavTree root) } } - @RequiresPermission(UpdatePermission.class) - public static class DownloadTsvAction extends SimpleViewAction - { - @Override - public ModelAndView getView(IdForm form, BindException errors) throws Exception - { - UploadLog ul = StudyPublishManager.getInstance().getUploadLog(getContainer(), form.getId()); - PageFlowUtil.streamFile(getViewContext().getResponse(), new File(ul.getFilePath()).toPath(), true); - - return null; - } - - @Override - public void addNavTrail(NavTree root) - { - } - } - @RequiresPermission(ReadPermission.class) public static class DatasetItemDetailsAction extends SimpleViewAction { diff --git a/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java b/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java index 435c41ae701..a75d69d22a0 100644 --- a/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java +++ b/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java @@ -487,7 +487,7 @@ public synchronized ActionURL updateSnapshot(QuerySnapshotForm form, BindExcepti ViewContext context = form.getViewContext(); new DatasetDefinition.DatasetAuditHandler(dsDef).addAuditEvent(context.getUser(), context.getContainer(), AuditBehaviorType.DETAILED, - "Dataset snapshot was updated. " + numRowsDeleted + " rows were removed and replaced with " + newRows.size() + " rows.", null); + "Dataset snapshot was updated. " + numRowsDeleted + " rows were removed and replaced with " + newRows.size() + " rows."); def.setLastUpdated(new Date()); def.save(form.getViewContext().getUser()); @@ -504,7 +504,7 @@ public synchronized ActionURL updateSnapshot(QuerySnapshotForm form, BindExcepti { ViewContext context = form.getViewContext(); new DatasetDefinition.DatasetAuditHandler(dsDef).addAuditEvent(context.getUser(), context.getContainer(), AuditBehaviorType.DETAILED, - "Dataset snapshot was not updated. Cause of failure: " + e.getMessage(), null); + "Dataset snapshot was not updated. Cause of failure: " + e.getMessage()); } } } @@ -791,7 +791,7 @@ public void run() DatasetDefinition dsDef = StudyManager.getInstance().getDatasetDefinitionByName(study, _def.getName()); if (dsDef != null) new DatasetDefinition.DatasetAuditHandler(dsDef).addAuditEvent(context.getUser(), context.getContainer(), AuditBehaviorType.DETAILED, - "Dataset snapshot was not updated. Cause of failure: " + errors.getMessage(), null); + "Dataset snapshot was not updated. Cause of failure: " + errors.getMessage()); } } } diff --git a/study/src/org/labkey/study/model/DatasetDefinition.java b/study/src/org/labkey/study/model/DatasetDefinition.java index 387169c205c..6dc3e2bae93 100644 --- a/study/src/org/labkey/study/model/DatasetDefinition.java +++ b/study/src/org/labkey/study/model/DatasetDefinition.java @@ -36,8 +36,37 @@ import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.collections.CaseInsensitiveHashSet; import org.labkey.api.collections.Sets; -import org.labkey.api.data.*; +import org.labkey.api.data.AuditConfigurable; +import org.labkey.api.data.BaseColumnInfo; +import org.labkey.api.data.BeanObjectFactory; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.ConnectionWrapper; +import org.labkey.api.data.Container; +import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.DataColumn; +import org.labkey.api.data.DatabaseCache; +import org.labkey.api.data.DatabaseTableType; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbScope; import org.labkey.api.data.DbScope.Transaction; +import org.labkey.api.data.DisplayColumn; +import org.labkey.api.data.DisplayColumnFactory; +import org.labkey.api.data.ExceptionFramework; +import org.labkey.api.data.JdbcType; +import org.labkey.api.data.NullColumnInfo; +import org.labkey.api.data.ObjectFactory; +import org.labkey.api.data.PropertyManager; +import org.labkey.api.data.RuntimeSQLException; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SchemaTableInfo; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SqlExecutor; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.Table; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.Transient; +import org.labkey.api.data.UpdateableTableInfo; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.dataiterator.DataIterator; import org.labkey.api.dataiterator.DataIteratorBuilder; @@ -1843,7 +1872,7 @@ else if (existingRecord != null && !existingRecord.isEmpty()) * @param requiredAuditType The expected audit behavior type. If this does not match the type set on the * dataset, then the event will not be logged. */ - public void addAuditEvent(User user, Container c, AuditBehaviorType requiredAuditType, String comment, @Nullable UploadLog ul) + public void addAuditEvent(User user, Container c, AuditBehaviorType requiredAuditType, String comment) { TableInfo table = _dataset.getTableInfo(user); @@ -1851,10 +1880,6 @@ public void addAuditEvent(User user, Container c, AuditBehaviorType requiredAudi return; DatasetAuditProvider.DatasetAuditEvent event = new DatasetAuditProvider.DatasetAuditEvent(c, comment, _dataset.getDatasetId()); - if (ul != null) - { - event.setLsid(ul.getFilePath()); - } AuditLogService.get().addEvent(user, event); } } diff --git a/study/src/org/labkey/study/model/StudyManager.java b/study/src/org/labkey/study/model/StudyManager.java index 207daa48924..2cdcd6926f4 100644 --- a/study/src/org/labkey/study/model/StudyManager.java +++ b/study/src/org/labkey/study/model/StudyManager.java @@ -16,9 +16,9 @@ package org.labkey.study.model; -import org.apache.commons.collections4.MapUtils; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.collections4.ListUtils; +import org.apache.commons.collections4.MapUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Strings; import org.apache.logging.log4j.Level; @@ -226,12 +226,12 @@ import java.util.stream.Collectors; import static org.labkey.api.action.SpringActionController.ERROR_MSG; -import static org.labkey.api.util.IntegerUtils.asInteger; import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.PERSONNEL_TABLE_NAME; import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.PRODUCT_ANTIGEN_TABLE_NAME; import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.PRODUCT_TABLE_NAME; import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.TREATMENT_PRODUCT_MAP_TABLE_NAME; import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.TREATMENT_TABLE_NAME; +import static org.labkey.api.util.IntegerUtils.asInteger; public class StudyManager { @@ -2566,7 +2566,7 @@ public void deleteDataset(StudyImpl study, User user, DatasetDefinition ds, bool SchemaKey schemaPath = SchemaKey.fromParts(SCHEMA.getSchemaName()); QueryService.get().fireQueryDeleted(user, study.getContainer(), null, schemaPath, Collections.singleton(ds.getName())); - new DatasetDefinition.DatasetAuditHandler(ds).addAuditEvent(user, study.getContainer(), AuditBehaviorType.DETAILED, "Dataset deleted: " + ds.getName(), null); + new DatasetDefinition.DatasetAuditHandler(ds).addAuditEvent(user, study.getContainer(), AuditBehaviorType.DETAILED, "Dataset deleted: " + ds.getName()); transaction.addCommitTask(() -> unindexDataset(ds), diff --git a/study/src/org/labkey/study/model/UploadLog.java b/study/src/org/labkey/study/model/UploadLog.java index ff7e40e56fb..9b640f57034 100644 --- a/study/src/org/labkey/study/model/UploadLog.java +++ b/study/src/org/labkey/study/model/UploadLog.java @@ -33,7 +33,6 @@ public class UploadLog private Date created; private int userId; private String description; - private String filePath; private int datasetId; private String status; @@ -87,16 +86,6 @@ public void setDescription(String description) this.description = description; } - public String getFilePath() - { - return filePath; - } - - public void setFilePath(String filePath) - { - this.filePath = filePath; - } - public int getDatasetId() { return datasetId;