diff --git a/api/src/org/labkey/api/action/PermissionCheckableAction.java b/api/src/org/labkey/api/action/PermissionCheckableAction.java
index 5795a2e2693..984aa69a8a4 100644
--- a/api/src/org/labkey/api/action/PermissionCheckableAction.java
+++ b/api/src/org/labkey/api/action/PermissionCheckableAction.java
@@ -16,6 +16,7 @@
package org.labkey.api.action;
import jakarta.servlet.http.HttpServletResponse;
+import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.data.Container;
import org.labkey.api.module.IgnoresForbiddenProjectCheck;
@@ -39,6 +40,7 @@
import org.labkey.api.security.roles.RoleManager;
import org.labkey.api.util.ConfigurationException;
import org.labkey.api.util.HttpUtil;
+import org.labkey.api.util.logging.LogHelper;
import org.labkey.api.view.BadRequestException;
import org.labkey.api.view.NotFoundException;
import org.labkey.api.view.RedirectException;
@@ -55,6 +57,7 @@
public abstract class PermissionCheckableAction implements Controller, PermissionCheckable, HasViewContext
{
+ private static final Logger LOG = LogHelper.getLogger(PermissionCheckableAction.class, "Permission checks for actions");
private static final HttpUtil.Method[] arrayGetPost = new HttpUtil.Method[] {Method.GET, Method.POST};
private ViewContext _context = null;
UnauthorizedException.Type _unauthorizedType = UnauthorizedException.Type.redirectToLogin;
@@ -148,6 +151,8 @@ private void _checkActionPermissions(Set contextualRoles) throws Unauthori
Container c = context.getContainer();
User user = context.getUser();
Class extends Controller> actionClass = getClass();
+ if (LOG.isDebugEnabled())
+ LOG.debug(actionClass.getName() + ": checking permissions for user " + (user == null ? "" : user.getName() + " (impersonated=" + user.isImpersonated() + ")"));
if (!actionClass.isAnnotationPresent(IgnoresForbiddenProjectCheck.class))
c.throwIfForbiddenProject(user);
@@ -159,18 +164,22 @@ private void _checkActionPermissions(Set contextualRoles) throws Unauthori
methodsAllowed = methodsAllowedAnnotation.value();
if (Arrays.stream(methodsAllowed).noneMatch(s -> s.equals(method)))
{
- throw new BadRequestException("Method Not Allowed: " + method, null, HttpServletResponse.SC_METHOD_NOT_ALLOWED);
+ String msg = "Method Not Allowed: " + method;
+ LOG.debug(msg);
+ throw new BadRequestException(msg, null, HttpServletResponse.SC_METHOD_NOT_ALLOWED);
}
boolean requiresSiteAdmin = actionClass.isAnnotationPresent(RequiresSiteAdmin.class);
if (requiresSiteAdmin && !user.hasSiteAdminPermission())
{
+ LOG.debug(actionClass.getName() + ": action requires site admin permissions");
throw new UnauthorizedException();
}
boolean requiresLogin = actionClass.isAnnotationPresent(RequiresLogin.class);
if (requiresLogin && user.isGuest())
{
+ LOG.debug(actionClass.getName() + ": action requires login (non-guest)");
throw new UnauthorizedException();
}
@@ -214,7 +223,10 @@ private void _checkActionPermissions(Set contextualRoles) throws Unauthori
// Must have all permissions in permissionsRequired
if (!SecurityManager.hasAllPermissions(this.getClass().getName()+"_checkActionPermissions",
c, user, permissionsRequired, contextualRoles))
+ {
+ LOG.debug(actionClass.getName() + ": action requires all permissions: " + permissionsRequired);
throw new UnauthorizedException();
+ }
CSRF.Method csrfCheck = actionClass.isAnnotationPresent(CSRF.class) ? actionClass.getAnnotation(CSRF.class).value() : CSRF.Method.POST;
csrfCheck.validate(context);
@@ -228,7 +240,10 @@ private void _checkActionPermissions(Set contextualRoles) throws Unauthori
Collections.addAll(permissionsAnyOf, requiresAnyOf.value());
if (!SecurityManager.hasAnyPermissions(this.getClass().getName() + "_checkActionPermissions",
c, user, permissionsAnyOf, contextualRoles))
+ {
+ LOG.debug(actionClass.getName() + ": action requires any permissions: " + permissionsAnyOf);
throw new UnauthorizedException();
+ }
}
boolean requiresNoPermission = actionClass.isAnnotationPresent(RequiresNoPermission.class);
diff --git a/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java b/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java
index 825f84d91f4..f2432753ab3 100644
--- a/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java
+++ b/api/src/org/labkey/api/assay/AbstractAssayTsvDataHandler.java
@@ -706,7 +706,8 @@ private DataIterator checkData(
DomainProperty wellLocationPropFinder = null;
DomainProperty wellLsidPropFinder = null;
- RemapCache cache = new RemapCache();
+ RemapCache cacheWithoutPkLookup = new RemapCache();
+ RemapCache cacheWithPkLookup = new RemapCache();
Map remappableLookup = new HashMap<>();
Map materialCache = new LongHashMap<>();
Map> plateWellCache = new LongHashMap<>();
@@ -879,7 +880,12 @@ else if (entry.getKey().equalsIgnoreCase(ProvenanceService.PROVENANCE_INPUT_PROP
{
String s = o instanceof String ? (String) o : o.toString();
TableInfo lookupTable = remappableLookup.get(pd);
- Object remapped = cache.remap(lookupTable, s, true);
+
+ // GitHub Issue #443: similar to LookupResolutionType.alternateThenPrimaryKey, we want to check if the string value remaps using alternate keys (titleColumn) first
+ Object remapped = cacheWithoutPkLookup.remap(lookupTable, s, false);
+ if (remapped == null)
+ remapped = cacheWithPkLookup.remap(lookupTable, s, true);
+
if (remapped == null)
{
if (SAMPLE_CONCEPT_URI.equals(pd.getConceptURI()))
@@ -1026,7 +1032,7 @@ else if (o instanceof MvFieldWrapper mvWrapper)
try
{
if (material == null)
- material = exp.findExpMaterial(lookupContainer, user, materialName, byNameSS, cache, materialCache);
+ material = exp.findExpMaterial(lookupContainer, user, materialName, byNameSS, cacheWithoutPkLookup, materialCache);
}
catch (ValidationException ve)
{
diff --git a/api/src/org/labkey/api/attachments/LookAndFeelResourceType.java b/api/src/org/labkey/api/attachments/LookAndFeelResourceType.java
index 0cf38f92686..4495a645f8d 100644
--- a/api/src/org/labkey/api/attachments/LookAndFeelResourceType.java
+++ b/api/src/org/labkey/api/attachments/LookAndFeelResourceType.java
@@ -41,6 +41,7 @@ private LookAndFeelResourceType()
@Override
public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn)
{
+ // Keep in sync with CoreMigrationSchemaHandler.copyAttachments()
sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoContainers(), "c").append(") AND (");
sql.append(documentNameColumn).append(" IN (?, ?) OR ");
sql.add(AttachmentCache.FAVICON_FILE_NAME);
diff --git a/api/src/org/labkey/api/data/Container.java b/api/src/org/labkey/api/data/Container.java
index b2ca3dd25f2..0d5186156a7 100644
--- a/api/src/org/labkey/api/data/Container.java
+++ b/api/src/org/labkey/api/data/Container.java
@@ -41,6 +41,7 @@
import org.labkey.api.query.QueryService;
import org.labkey.api.security.HasPermission;
import org.labkey.api.security.SecurableResource;
+import org.labkey.api.security.SecurityLogger;
import org.labkey.api.security.SecurityManager;
import org.labkey.api.security.SecurityPolicy;
import org.labkey.api.security.SecurityPolicyManager;
@@ -550,7 +551,11 @@ private boolean handleForbiddenProject(User user, Set contextualRoles, boo
if (null != impersonationProject && !impersonationProject.equals(currentProject))
{
if (shouldThrow)
- throw new ForbiddenProjectException("You are not allowed to access this folder while impersonating within a different project.");
+ {
+ String msg = "You are not allowed to access this folder while impersonating within a different project.";
+ SecurityLogger.log(msg, user, null, null);
+ throw new ForbiddenProjectException(msg);
+ }
return true;
}
@@ -563,7 +568,11 @@ private boolean handleForbiddenProject(User user, Set contextualRoles, boo
if (lockState.isLocked() && ContainerManager.LOCKED_PROJECT_HANDLER.isForbidden(currentProject, user, contextualRoles, lockState))
{
if (shouldThrow)
- throw new ForbiddenProjectException("You are not allowed to access this folder; it is " + lockState.getDescription() + ".");
+ {
+ String msg = "You are not allowed to access this folder; it is " + lockState.getDescription() + ".";
+ SecurityLogger.log(msg, user, null, null);
+ throw new ForbiddenProjectException(msg);
+ }
return true;
}
diff --git a/api/src/org/labkey/api/data/SQLParameterException.java b/api/src/org/labkey/api/data/SQLParameterException.java
new file mode 100644
index 00000000000..0d13252128a
--- /dev/null
+++ b/api/src/org/labkey/api/data/SQLParameterException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (c) 2011 LabKey Corporation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.labkey.api.data;
+
+import org.labkey.api.util.SkipMothershipLogging;
+
+/**
+ * Signals there was a problem with a SQL parameter, such as a conversion problem.
+ */
+public class SQLParameterException extends SQLGenerationException implements SkipMothershipLogging
+{
+ public SQLParameterException(String message)
+ {
+ super(message);
+ }
+}
diff --git a/api/src/org/labkey/api/data/SimpleFilter.java b/api/src/org/labkey/api/data/SimpleFilter.java
index 8dff53f20ff..5393bbe41aa 100644
--- a/api/src/org/labkey/api/data/SimpleFilter.java
+++ b/api/src/org/labkey/api/data/SimpleFilter.java
@@ -1865,6 +1865,41 @@ private int howManyLessThan(List userIdsDesc, int max)
}
return howMany;
}
+
+ /**
+ * This used to be a PostgreSQL-specific test, but it should run and pass on SQL Server as well. It's largely
+ * redundant with testLargeInClause() above, but causes no harm.
+ */
+ @Test
+ public void testTempTableInClause()
+ {
+ DbSchema core = CoreSchema.getInstance().getSchema();
+ SqlDialect d = core.getSqlDialect();
+
+ Collection allUserIds = new TableSelector(CoreSchema.getInstance().getTableInfoUsersData(), Collections.singleton("UserId")).getCollection(Integer.class);
+ SQLFragment shortSql = new SQLFragment("SELECT * FROM core.UsersData WHERE UserId");
+ d.appendInClauseSql(shortSql, allUserIds);
+ assertEquals(allUserIds.size(), new SqlSelector(core, shortSql).getRowCount());
+
+ ArrayList
-
+
- | Schema |
- Query |
- View Name |
- Flags |
- Owner |
- Created |
- Created By |
- Modified |
- Modified By |
+ Schema |
+ Query |
+ View Name |
+ Flags |
+ Owner |
+ Created |
+ Created By |
+ Modified |
+ Modified By |
+ |
<% if (getViewContext().hasPermission(UpdatePermission.class))
{
+ int count = 1;
for (CstmView view : views)
{
+ count++;
List flags = new ArrayList<>();
if (view.getCustomViewId() == 0)
flags.add("session");
@@ -133,7 +138,7 @@
if (mgr.isSnapshot(view.getFlags()))
flags.add("shapshot");
%>
-
+
| <%=h(view.getSchema())%>
|
<%=h(view.getQueryName())%>
diff --git a/search/src/org/labkey/search/SearchModule.java b/search/src/org/labkey/search/SearchModule.java
index d4cd37bca6e..f12c4ea4294 100644
--- a/search/src/org/labkey/search/SearchModule.java
+++ b/search/src/org/labkey/search/SearchModule.java
@@ -29,6 +29,7 @@
import org.labkey.api.data.UpgradeCode;
import org.labkey.api.mbean.LabKeyManagement;
import org.labkey.api.mbean.SearchMXBean;
+import org.labkey.api.migration.DatabaseMigrationConfiguration;
import org.labkey.api.migration.DatabaseMigrationService;
import org.labkey.api.migration.DefaultMigrationSchemaHandler;
import org.labkey.api.module.DefaultModule;
@@ -195,6 +196,13 @@ public List getTablesToCopy()
{
return List.of(); // Leave empty -- target server will re-index all documents
}
+
+ @Override
+ public void afterMigration(DatabaseMigrationConfiguration configuration)
+ {
+ // Clear index and all last indexed tracking
+ SearchService.get().deleteIndex("Database was just migrated");
+ }
});
}
diff --git a/specimen/src/org/labkey/specimen/importer/SpecimenImporter.java b/specimen/src/org/labkey/specimen/importer/SpecimenImporter.java
index 13e5ed7ba1b..86e5d0465a8 100644
--- a/specimen/src/org/labkey/specimen/importer/SpecimenImporter.java
+++ b/specimen/src/org/labkey/specimen/importer/SpecimenImporter.java
@@ -57,6 +57,7 @@
import org.labkey.api.dataiterator.DataIteratorBuilder;
import org.labkey.api.dataiterator.DataIteratorContext;
import org.labkey.api.dataiterator.DataIteratorUtil;
+import org.labkey.api.dataiterator.ImportProgress;
import org.labkey.api.dataiterator.ListofMapsDataIterator;
import org.labkey.api.dataiterator.LoggingDataIterator;
import org.labkey.api.dataiterator.MapDataIterator;
@@ -69,7 +70,6 @@
import org.labkey.api.exp.api.ExpSampleType;
import org.labkey.api.exp.api.ExperimentService;
import org.labkey.api.exp.api.SampleTypeService;
-import org.labkey.api.exp.list.ListImportProgress;
import org.labkey.api.iterator.MarkableIterator;
import org.labkey.api.pipeline.PipelineJob;
import org.labkey.api.query.DefaultSchema;
@@ -1993,7 +1993,7 @@ public Object getValue(Map row)
DataIteratorBuilder standardEtl = StandardDataIteratorBuilder.forInsert(target, specimenWrapped, getContainer(), getUser(), dix);
DataIteratorBuilder persist = ((UpdateableTableInfo)target).persistRows(standardEtl, dix);
Pump pump = new Pump(persist, dix);
- pump.setProgress(new ListImportProgress()
+ pump.setProgress(new ImportProgress()
{
long heartBeat = HeartBeat.currentTimeMillis();
diff --git a/study/resources/schemas/dbscripts/postgresql/study-25.004-25.005.sql b/study/resources/schemas/dbscripts/postgresql/study-25.004-25.005.sql
new file mode 100644
index 00000000000..dc5448ecf70
--- /dev/null
+++ b/study/resources/schemas/dbscripts/postgresql/study-25.004-25.005.sql
@@ -0,0 +1 @@
+ALTER TABLE study.uploadlog DROP CONSTRAINT UQ_UploadLog_FilePath;
\ No newline at end of file
diff --git a/study/resources/schemas/dbscripts/sqlserver/study-25.004-25.005.sql b/study/resources/schemas/dbscripts/sqlserver/study-25.004-25.005.sql
new file mode 100644
index 00000000000..dc5448ecf70
--- /dev/null
+++ b/study/resources/schemas/dbscripts/sqlserver/study-25.004-25.005.sql
@@ -0,0 +1 @@
+ALTER TABLE study.uploadlog DROP CONSTRAINT UQ_UploadLog_FilePath;
\ No newline at end of file
diff --git a/study/src/org/labkey/study/StudyModule.java b/study/src/org/labkey/study/StudyModule.java
index 8df47d6aa96..fbb6ab9dc93 100644
--- a/study/src/org/labkey/study/StudyModule.java
+++ b/study/src/org/labkey/study/StudyModule.java
@@ -42,8 +42,6 @@
import org.labkey.api.exp.PropertyType;
import org.labkey.api.exp.api.ExperimentService;
import org.labkey.api.exp.property.PropertyService;
-import org.labkey.api.files.FileContentService;
-import org.labkey.api.files.TableUpdaterFileListener;
import org.labkey.api.message.digest.ReportAndDatasetChangeDigestProvider;
import org.labkey.api.migration.DatabaseMigrationService;
import org.labkey.api.migration.DefaultMigrationSchemaHandler;
@@ -228,7 +226,7 @@ public String getName()
@Override
public Double getSchemaVersion()
{
- return 25.003;
+ return 25.005;
}
@Override
@@ -391,8 +389,6 @@ protected void startupAfterSpringConfig(ModuleContext moduleContext)
folderRegistry.addFactories(new StudyWriterFactory(), new StudyImporterFactory());
}
- FileContentService.get().addFileListener(new TableUpdaterFileListener(StudySchema.getInstance().getTableInfoUploadLog(), "FilePath", TableUpdaterFileListener.Type.filePath, "RowId"));
-
DatasetDefinition.cleanupOrphanedDatasetDomains();
OptionalFeatureService.get().addExperimentalFeatureFlag(StudyQuerySchema.EXPERIMENTAL_STUDY_SUBSCHEMAS, "Use sub-schemas in Study",
diff --git a/study/src/org/labkey/study/assay/StudyPublishManager.java b/study/src/org/labkey/study/assay/StudyPublishManager.java
index d4a2cc2c894..7fb7c09303c 100644
--- a/study/src/org/labkey/study/assay/StudyPublishManager.java
+++ b/study/src/org/labkey/study/assay/StudyPublishManager.java
@@ -18,13 +18,11 @@
import org.apache.commons.beanutils.ConvertUtils;
import org.apache.commons.collections4.MapUtils;
-import org.apache.commons.io.IOUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.labkey.api.assay.AbstractAssayProvider;
-import org.labkey.api.assay.AssayFileWriter;
import org.labkey.api.assay.AssayProtocolSchema;
import org.labkey.api.assay.AssayProvider;
import org.labkey.api.assay.AssayService;
@@ -113,9 +111,7 @@
import org.labkey.api.study.publish.StudyDatasetLinkedColumn;
import org.labkey.api.study.publish.StudyPublishService;
import org.labkey.api.study.query.PublishResultsQueryView;
-import org.labkey.api.util.DateUtil;
import org.labkey.api.util.FileStream;
-import org.labkey.api.util.FileUtil;
import org.labkey.api.util.PageFlowUtil;
import org.labkey.api.util.Pair;
import org.labkey.api.util.StringExpressionFactory;
@@ -140,10 +136,7 @@
import java.io.Closeable;
import java.io.IOException;
-import java.io.OutputStream;
import java.math.BigDecimal;
-import java.nio.file.Files;
-import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
@@ -932,60 +925,10 @@ private static String createUniqueDatasetName(Study study, String assayName)
return name;
}
- public UploadLog saveUploadData(User user, Dataset dsd, FileStream tsv, String filename) throws IOException
- {
- PipeRoot pipelineRoot = PipelineService.get().findPipelineRoot(dsd.getContainer());
- if (null == pipelineRoot || !pipelineRoot.isValid())
- throw new IOException("Please have your administrator set up a pipeline root for this folder.");
-
- Path dir = pipelineRoot.resolveToNioPath(AssayFileWriter.DIR_NAME);
- if (null == dir)
- throw new IOException("Cannot create directory uploaded data: " + AssayFileWriter.DIR_NAME);
-
- if (!Files.exists(dir))
- {
- FileUtil.createDirectory(dir);
- }
-
- //File name is studyname_datasetname_date_hhmm.ss
- Date dateCreated = new Date();
- String dateString = DateUtil.formatDateTime(dateCreated, "yyy-MM-dd-HHmm");
- int id = 0;
- Path file;
- do
- {
- String extension = Objects.toString(filename == null ? "tsv" : FileUtil.getExtension(filename), "tsv");
- String extra = id++ == 0 ? "" : String.valueOf(id);
- String fileName = dsd.getStudy().getLabel() + "-" + dsd.getLabel() + "-" + dateString + extra + "." + extension;
- fileName = fileName.replace('\\', '_').replace('/', '_').replace(':', '_');
- file = FileUtil.appendName(dir, fileName);
- }
- while (Files.exists(file));
-
- try (OutputStream out = Files.newOutputStream(file))
- {
- IOUtils.copy(tsv.openInputStream(), out);
- tsv.closeInputStream();
- }
-
- UploadLog ul = new UploadLog();
- ul.setContainer(dsd.getContainer());
- ul.setDatasetId(dsd.getDatasetId());
- ul.setCreated(dateCreated);
- ul.setUserId(user.getUserId());
- ul.setStatus("Initializing");
- String filePath = FileUtil.hasCloudScheme(file) ? FileUtil.pathToString(file) : file.toFile().getPath();
- ul.setFilePath(filePath);
-
- return Table.insert(user, getTinfoUpdateLog(), ul);
- }
-
-
/**
- * Return an array of LSIDs from the newly created dataset entries,
- * along with the upload log.
+ * Return an array of LSIDs from the newly created dataset entries.
*/
- public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study, DatasetDefinition dsd, DataLoader dl, LookupResolutionType lookupResolutionType, FileStream fileIn, String originalFileName, Map columnMap, BatchValidationException errors, QueryUpdateService.InsertOption insertOption, @Nullable AuditBehaviorType auditBehaviorType)
+ public List importDatasetTSV(User user, StudyImpl study, DatasetDefinition dsd, DataLoader dl, LookupResolutionType lookupResolutionType, FileStream fileIn, String originalFileName, Map columnMap, BatchValidationException errors, QueryUpdateService.InsertOption insertOption, @Nullable AuditBehaviorType auditBehaviorType)
{
DbScope scope = StudySchema.getInstance().getScope();
@@ -995,7 +938,13 @@ public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study
try
{
if (null != fileIn)
- ul = saveUploadData(user, dsd, fileIn, originalFileName);
+ {
+ ul = new UploadLog();
+ ul.setContainer(dsd.getContainer());
+ ul.setDatasetId(dsd.getDatasetId());
+ ul.setUserId(user.getUserId());
+ ul.setStatus("Initializing");
+ }
try (DbScope.Transaction transaction = scope.ensureTransaction())
{
@@ -1025,8 +974,9 @@ public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study
ul.setStatus("ERROR");
String description = ul.getDescription();
ul.setDescription(description == null ? "" : description + "\n" + new Date() + ":" + x.getMessage());
- ul = Table.update(user, StudySchema.getInstance().getTableInfoUploadLog(), ul, ul.getRowId());
- return Pair.of(lsids, ul);
+ Table.insert(user, getTinfoUpdateLog(), ul);
+
+ return lsids;
}
}
@@ -1035,7 +985,7 @@ public Pair, UploadLog> importDatasetTSV(User user, StudyImpl study
//Update the status
assert ul != null : "Upload log should always exist if no errors have occurred.";
ul.setStatus("SUCCESS");
- ul = Table.update(user, getTinfoUpdateLog(), ul, ul.getRowId());
+ Table.insert(user, getTinfoUpdateLog(), ul);
}
else if (ul != null)
{
@@ -1048,17 +998,9 @@ else if (ul != null)
sep = "\n";
}
ul.setDescription(sb.toString());
- ul = Table.update(user, getTinfoUpdateLog(), ul, ul.getRowId());
+ Table.insert(user, getTinfoUpdateLog(), ul);
}
- return Pair.of(lsids, ul);
- }
-
- public UploadLog getUploadLog(Container c, int id)
- {
- SimpleFilter filter = SimpleFilter.createContainerFilter(c);
- filter.addCondition(FieldKey.fromParts("rowId"), id);
-
- return new TableSelector(getTinfoUpdateLog(), filter, null).getObject(UploadLog.class);
+ return lsids;
}
@Override
diff --git a/study/src/org/labkey/study/controllers/StudyController.java b/study/src/org/labkey/study/controllers/StudyController.java
index b0dd79f0383..ee8f5c75b14 100644
--- a/study/src/org/labkey/study/controllers/StudyController.java
+++ b/study/src/org/labkey/study/controllers/StudyController.java
@@ -90,7 +90,6 @@
import org.labkey.api.data.RuntimeSQLException;
import org.labkey.api.data.SQLFragment;
import org.labkey.api.data.ShowRows;
-import org.labkey.api.data.SimpleDisplayColumn;
import org.labkey.api.data.SimpleFilter;
import org.labkey.api.data.Sort;
import org.labkey.api.data.SqlExecutor;
@@ -227,7 +226,6 @@
import org.labkey.api.view.template.EmptyView;
import org.labkey.api.view.template.PageConfig;
import org.labkey.api.writer.FileSystemFile;
-import org.labkey.api.writer.HtmlWriter;
import org.labkey.api.writer.VirtualFile;
import org.labkey.data.xml.TablesDocument;
import org.labkey.study.CohortFilterFactory;
@@ -2713,19 +2711,19 @@ protected int importData(DataLoader dl, FileStream file, String originalName, Ba
columnMap.put(_form.getSequenceNum(), column);
}
- Pair, UploadLog> result = StudyPublishManager.getInstance().importDatasetTSV(getUser(), _study, _def, dl, getLookupResolutionType(), file, originalName, columnMap, errors, _form.getInsertOption(), auditBehaviorType);
+ List lsids = StudyPublishManager.getInstance().importDatasetTSV(getUser(), _study, _def, dl, getLookupResolutionType(), file, originalName, columnMap, errors, _form.getInsertOption(), auditBehaviorType);
- if (!result.getKey().isEmpty())
+ if (!lsids.isEmpty())
{
// Log the import when SUMMARY is configured, if DETAILED is configured the DetailedAuditLogDataIterator will handle each row change.
// It would be nice in the future to replace the DetailedAuditLogDataIterator with a general purpose AuditLogDataIterator
// that can delegate the audit behavior type to the AuditDataHandler, so this code can go away
//
- String comment = "Dataset data imported. " + result.getKey().size() + " rows imported";
- new DatasetDefinition.DatasetAuditHandler(_def).addAuditEvent(getUser(), getContainer(), AuditBehaviorType.SUMMARY, comment, result.getValue());
+ String comment = "Dataset data imported. " + lsids.size() + " rows imported";
+ new DatasetDefinition.DatasetAuditHandler(_def).addAuditEvent(getUser(), getContainer(), AuditBehaviorType.SUMMARY, comment);
}
- return result.getKey().size();
+ return lsids.size();
}
@Override
@@ -2856,15 +2854,6 @@ public ModelAndView getView(IdForm form, BindException errors)
DataRegion dr = new DataRegion();
dr.addColumns(tInfo, "RowId,Created,CreatedBy,Status,Description");
GridView gv = new GridView(dr, errors);
- DisplayColumn dc = new SimpleDisplayColumn(null) {
- @Override
- public void renderGridCellContents(RenderContext ctx, HtmlWriter out)
- {
- ActionURL url = new ActionURL(DownloadTsvAction.class, ctx.getContainer()).addParameter("id", String.valueOf(ctx.get("RowId")));
- out.write(LinkBuilder.labkeyLink("Download Data File", url));
- }
- };
- dr.addDisplayColumn(dc);
SimpleFilter filter = SimpleFilter.createContainerFilter(getContainer());
if (form.getId() != 0)
@@ -2886,24 +2875,6 @@ public void addNavTrail(NavTree root)
}
}
- @RequiresPermission(UpdatePermission.class)
- public static class DownloadTsvAction extends SimpleViewAction
- {
- @Override
- public ModelAndView getView(IdForm form, BindException errors) throws Exception
- {
- UploadLog ul = StudyPublishManager.getInstance().getUploadLog(getContainer(), form.getId());
- PageFlowUtil.streamFile(getViewContext().getResponse(), new File(ul.getFilePath()).toPath(), true);
-
- return null;
- }
-
- @Override
- public void addNavTrail(NavTree root)
- {
- }
- }
-
@RequiresPermission(ReadPermission.class)
public static class DatasetItemDetailsAction extends SimpleViewAction
{
diff --git a/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java b/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java
index 435c41ae701..a75d69d22a0 100644
--- a/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java
+++ b/study/src/org/labkey/study/dataset/DatasetSnapshotProvider.java
@@ -487,7 +487,7 @@ public synchronized ActionURL updateSnapshot(QuerySnapshotForm form, BindExcepti
ViewContext context = form.getViewContext();
new DatasetDefinition.DatasetAuditHandler(dsDef).addAuditEvent(context.getUser(), context.getContainer(), AuditBehaviorType.DETAILED,
- "Dataset snapshot was updated. " + numRowsDeleted + " rows were removed and replaced with " + newRows.size() + " rows.", null);
+ "Dataset snapshot was updated. " + numRowsDeleted + " rows were removed and replaced with " + newRows.size() + " rows.");
def.setLastUpdated(new Date());
def.save(form.getViewContext().getUser());
@@ -504,7 +504,7 @@ public synchronized ActionURL updateSnapshot(QuerySnapshotForm form, BindExcepti
{
ViewContext context = form.getViewContext();
new DatasetDefinition.DatasetAuditHandler(dsDef).addAuditEvent(context.getUser(), context.getContainer(), AuditBehaviorType.DETAILED,
- "Dataset snapshot was not updated. Cause of failure: " + e.getMessage(), null);
+ "Dataset snapshot was not updated. Cause of failure: " + e.getMessage());
}
}
}
@@ -791,7 +791,7 @@ public void run()
DatasetDefinition dsDef = StudyManager.getInstance().getDatasetDefinitionByName(study, _def.getName());
if (dsDef != null)
new DatasetDefinition.DatasetAuditHandler(dsDef).addAuditEvent(context.getUser(), context.getContainer(), AuditBehaviorType.DETAILED,
- "Dataset snapshot was not updated. Cause of failure: " + errors.getMessage(), null);
+ "Dataset snapshot was not updated. Cause of failure: " + errors.getMessage());
}
}
}
diff --git a/study/src/org/labkey/study/model/DatasetDefinition.java b/study/src/org/labkey/study/model/DatasetDefinition.java
index 387169c205c..6dc3e2bae93 100644
--- a/study/src/org/labkey/study/model/DatasetDefinition.java
+++ b/study/src/org/labkey/study/model/DatasetDefinition.java
@@ -36,8 +36,37 @@
import org.labkey.api.collections.CaseInsensitiveHashMap;
import org.labkey.api.collections.CaseInsensitiveHashSet;
import org.labkey.api.collections.Sets;
-import org.labkey.api.data.*;
+import org.labkey.api.data.AuditConfigurable;
+import org.labkey.api.data.BaseColumnInfo;
+import org.labkey.api.data.BeanObjectFactory;
+import org.labkey.api.data.ColumnInfo;
+import org.labkey.api.data.ConnectionWrapper;
+import org.labkey.api.data.Container;
+import org.labkey.api.data.ContainerManager;
+import org.labkey.api.data.DataColumn;
+import org.labkey.api.data.DatabaseCache;
+import org.labkey.api.data.DatabaseTableType;
+import org.labkey.api.data.DbSchema;
+import org.labkey.api.data.DbScope;
import org.labkey.api.data.DbScope.Transaction;
+import org.labkey.api.data.DisplayColumn;
+import org.labkey.api.data.DisplayColumnFactory;
+import org.labkey.api.data.ExceptionFramework;
+import org.labkey.api.data.JdbcType;
+import org.labkey.api.data.NullColumnInfo;
+import org.labkey.api.data.ObjectFactory;
+import org.labkey.api.data.PropertyManager;
+import org.labkey.api.data.RuntimeSQLException;
+import org.labkey.api.data.SQLFragment;
+import org.labkey.api.data.SchemaTableInfo;
+import org.labkey.api.data.SimpleFilter;
+import org.labkey.api.data.SqlExecutor;
+import org.labkey.api.data.SqlSelector;
+import org.labkey.api.data.Table;
+import org.labkey.api.data.TableInfo;
+import org.labkey.api.data.TableSelector;
+import org.labkey.api.data.Transient;
+import org.labkey.api.data.UpdateableTableInfo;
import org.labkey.api.data.dialect.SqlDialect;
import org.labkey.api.dataiterator.DataIterator;
import org.labkey.api.dataiterator.DataIteratorBuilder;
@@ -1843,7 +1872,7 @@ else if (existingRecord != null && !existingRecord.isEmpty())
* @param requiredAuditType The expected audit behavior type. If this does not match the type set on the
* dataset, then the event will not be logged.
*/
- public void addAuditEvent(User user, Container c, AuditBehaviorType requiredAuditType, String comment, @Nullable UploadLog ul)
+ public void addAuditEvent(User user, Container c, AuditBehaviorType requiredAuditType, String comment)
{
TableInfo table = _dataset.getTableInfo(user);
@@ -1851,10 +1880,6 @@ public void addAuditEvent(User user, Container c, AuditBehaviorType requiredAudi
return;
DatasetAuditProvider.DatasetAuditEvent event = new DatasetAuditProvider.DatasetAuditEvent(c, comment, _dataset.getDatasetId());
- if (ul != null)
- {
- event.setLsid(ul.getFilePath());
- }
AuditLogService.get().addEvent(user, event);
}
}
diff --git a/study/src/org/labkey/study/model/StudyManager.java b/study/src/org/labkey/study/model/StudyManager.java
index 409da6b5536..fb1174eb8a6 100644
--- a/study/src/org/labkey/study/model/StudyManager.java
+++ b/study/src/org/labkey/study/model/StudyManager.java
@@ -16,9 +16,9 @@
package org.labkey.study.model;
-import org.apache.commons.collections4.MapUtils;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.ListUtils;
+import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Strings;
import org.apache.logging.log4j.Level;
@@ -226,12 +226,12 @@
import java.util.stream.Collectors;
import static org.labkey.api.action.SpringActionController.ERROR_MSG;
-import static org.labkey.api.util.IntegerUtils.asInteger;
import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.PERSONNEL_TABLE_NAME;
import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.PRODUCT_ANTIGEN_TABLE_NAME;
import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.PRODUCT_TABLE_NAME;
import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.TREATMENT_PRODUCT_MAP_TABLE_NAME;
import static org.labkey.api.studydesign.query.StudyDesignQuerySchema.TREATMENT_TABLE_NAME;
+import static org.labkey.api.util.IntegerUtils.asInteger;
public class StudyManager
{
@@ -2566,7 +2566,7 @@ public void deleteDataset(StudyImpl study, User user, DatasetDefinition ds, bool
SchemaKey schemaPath = SchemaKey.fromParts(SCHEMA.getSchemaName());
QueryService.get().fireQueryDeleted(user, study.getContainer(), null, schemaPath, Collections.singleton(ds.getName()));
- new DatasetDefinition.DatasetAuditHandler(ds).addAuditEvent(user, study.getContainer(), AuditBehaviorType.DETAILED, "Dataset deleted: " + ds.getName(), null);
+ new DatasetDefinition.DatasetAuditHandler(ds).addAuditEvent(user, study.getContainer(), AuditBehaviorType.DETAILED, "Dataset deleted: " + ds.getName());
transaction.addCommitTask(() ->
unindexDataset(ds),
diff --git a/study/src/org/labkey/study/model/UploadLog.java b/study/src/org/labkey/study/model/UploadLog.java
index ff7e40e56fb..9b640f57034 100644
--- a/study/src/org/labkey/study/model/UploadLog.java
+++ b/study/src/org/labkey/study/model/UploadLog.java
@@ -33,7 +33,6 @@ public class UploadLog
private Date created;
private int userId;
private String description;
- private String filePath;
private int datasetId;
private String status;
@@ -87,16 +86,6 @@ public void setDescription(String description)
this.description = description;
}
- public String getFilePath()
- {
- return filePath;
- }
-
- public void setFilePath(String filePath)
- {
- this.filePath = filePath;
- }
-
public int getDatasetId()
{
return datasetId;
diff --git a/study/test/src/org/labkey/test/tests/study/AssayTest.java b/study/test/src/org/labkey/test/tests/study/AssayTest.java
index 5812f79bedf..b455a46c0a7 100644
--- a/study/test/src/org/labkey/test/tests/study/AssayTest.java
+++ b/study/test/src/org/labkey/test/tests/study/AssayTest.java
@@ -1036,7 +1036,10 @@ public void testAssayLookupValidatorConversion()
_listHelper.bulkImportData(TestDataUtils.tsvStringFromRowMaps(List.of(
Map.of(valueField.getName(), "One"),
Map.of(valueField.getName(), "Two"),
- Map.of(valueField.getName(), "123")
+ Map.of(valueField.getName(), "123"),
+ // GitHub Issue #443: value is the primary key for another row
+ Map.of(valueField.getName(), "5"), // pk = 4
+ Map.of(valueField.getName(), "6") // pk = 5
), List.of(valueField.getName()), true));
log("Create an assay with a results lookup field to the list, with lookup validator set");
@@ -1060,6 +1063,24 @@ public void testAssayLookupValidatorConversion()
.setLookupValidatorEnabled(false);
designerPage.clickFinish();
verifyAssayImportForLookupValidator(ISSUE_53625_ASSAY, lookupField, "RunWithoutLookupValidator", false);
+
+ log("GitHub Issue #443: Verify that importing a value that is also a primary key maps to the titleColumn value");
+ verifyAssayImportForPKValueThatIsTitleColumn(ISSUE_53625_ASSAY, lookupField, "RunWithPKandTitleColumn");
+ }
+
+ private void verifyAssayImportForPKValueThatIsTitleColumn(String assayName, FieldInfo lookupField, String runName)
+ {
+ String runDataStr = TestDataUtils.tsvStringFromRowMaps(List.of(
+ Map.of(lookupField.getName(), "4"), // pk 4, value 5
+ Map.of(lookupField.getName(), "5"), // pk 4, value 5
+ Map.of(lookupField.getName(), "6")), // pk 5, value 6
+ List.of(lookupField.getName()), true
+ );
+ importAssayData(assayName, runName, runDataStr);
+ clickAndWait(Locator.linkWithText(runName));
+ DataRegionTable dataTable = new DataRegionTable("Data", getDriver());
+ checker().verifyEquals("Incorrect number of results shown.", 3, dataTable.getDataRowCount());
+ checker().fatal().verifyEquals("Lookup values not as expected.", List.of("5", "5", "6"), dataTable.getColumnDataAsText(lookupField.getLabel()));
}
private void verifyAssayImportForLookupValidator(String assayName, FieldInfo lookupField, String runName, boolean validatorOn)
|