diff --git a/announcements/src/org/labkey/announcements/AnnouncementModule.java b/announcements/src/org/labkey/announcements/AnnouncementModule.java index 4967d394f2c..9c0d618d1d6 100644 --- a/announcements/src/org/labkey/announcements/AnnouncementModule.java +++ b/announcements/src/org/labkey/announcements/AnnouncementModule.java @@ -31,13 +31,19 @@ import org.labkey.api.announcements.CommSchema; import org.labkey.api.announcements.api.AnnouncementService; import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.audit.AuditLogService; import org.labkey.api.audit.provider.MessageAuditProvider; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; +import org.labkey.api.data.DbSchema; import org.labkey.api.data.SqlExecutor; +import org.labkey.api.data.TableInfo; import org.labkey.api.message.digest.DailyMessageDigest; import org.labkey.api.message.settings.MessageConfigService; +import org.labkey.api.migration.DatabaseMigrationConfiguration; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.module.DefaultModule; import org.labkey.api.module.ModuleContext; import org.labkey.api.rss.RSSService; @@ -53,6 +59,7 @@ import org.labkey.api.view.ViewContext; import org.labkey.api.view.WebPartFactory; import org.labkey.api.view.WebPartView; +import org.labkey.api.wiki.WikiService; import java.util.ArrayList; import java.util.Collection; @@ -165,6 +172,42 @@ public void doStartup(ModuleContext moduleContext) { fsr.addFactories(new NotificationSettingsWriterFactory(), new NotificationSettingsImporterFactory()); } + + // AnnouncementModule owns the schema, so it registers the schema handler... even though it's mostly about wiki + DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(CommSchema.getInstance().getSchema()) + { + @Override + public void beforeSchema() + { + new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages DROP CONSTRAINT FK_Pages_PageVersions"); + new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages DROP CONSTRAINT FK_Pages_Parent"); + } + + @Override + public List getTablesToCopy() + { + List tablesToCopy = super.getTablesToCopy(); + tablesToCopy.add(CommSchema.getInstance().getTableInfoPages()); + tablesToCopy.add(CommSchema.getInstance().getTableInfoPageVersions()); + + return tablesToCopy; + } + + @Override + public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema) + { + new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages ADD CONSTRAINT FK_Pages_PageVersions FOREIGN KEY (PageVersionId) REFERENCES comm.PageVersions (RowId)"); + new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages ADD CONSTRAINT FK_Pages_Parent FOREIGN KEY (Parent) REFERENCES comm.Pages (RowId)"); + } + + @Override + public @NotNull Collection getAttachmentTypes() + { + // It's theoretically possible to deploy Announcement without Wiki, so conditionalize + WikiService ws = WikiService.get(); + return ws != null ? List.of(AnnouncementType.get(), ws.getAttachmentType()) : List.of(AnnouncementType.get()); + } + }); } diff --git a/announcements/src/org/labkey/announcements/model/AnnouncementType.java b/announcements/src/org/labkey/announcements/model/AnnouncementType.java index 5590b98fdfe..85d44b1f495 100644 --- a/announcements/src/org/labkey/announcements/model/AnnouncementType.java +++ b/announcements/src/org/labkey/announcements/model/AnnouncementType.java @@ -16,6 +16,7 @@ package org.labkey.announcements.model; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.announcements.CommSchema; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.SQLFragment; @@ -40,8 +41,8 @@ public static AttachmentType get() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CommSchema.getInstance().getTableInfoAnnouncements(), "ann").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(CommSchema.getInstance().getTableInfoAnnouncements(), "ann"); } } diff --git a/api/src/org/labkey/api/ApiModule.java b/api/src/org/labkey/api/ApiModule.java index a14f28c3fef..afd068e7a0e 100644 --- a/api/src/org/labkey/api/ApiModule.java +++ b/api/src/org/labkey/api/ApiModule.java @@ -31,7 +31,6 @@ import org.labkey.api.attachments.AttachmentService; import org.labkey.api.attachments.ImageServlet; import org.labkey.api.attachments.LookAndFeelResourceType; -import org.labkey.api.attachments.SecureDocumentType; import org.labkey.api.audit.query.AbstractAuditDomainKind; import org.labkey.api.cache.BlockingCache; import org.labkey.api.collections.ArrayListMap; @@ -222,7 +221,6 @@ protected void init() AttachmentService.get().registerAttachmentType(LookAndFeelResourceType.get()); AttachmentService.get().registerAttachmentType(AuthenticationLogoType.get()); AttachmentService.get().registerAttachmentType(AvatarType.get()); - AttachmentService.get().registerAttachmentType(SecureDocumentType.get()); PropertyManager.registerEncryptionMigrationHandler(); AuthenticationManager.registerEncryptionMigrationHandler(); diff --git a/api/src/org/labkey/api/attachments/AttachmentService.java b/api/src/org/labkey/api/attachments/AttachmentService.java index 538d25674f9..01cd199f9de 100644 --- a/api/src/org/labkey/api/attachments/AttachmentService.java +++ b/api/src/org/labkey/api/attachments/AttachmentService.java @@ -133,6 +133,11 @@ static AttachmentService get() void registerAttachmentType(AttachmentType type); + /** + * Returns a collection of all registered AttachmentTypes + **/ + Collection getAttachmentTypes(); + HttpView getAdminView(ActionURL currentUrl); HttpView getFindAttachmentParentsView(); diff --git a/api/src/org/labkey/api/attachments/AttachmentType.java b/api/src/org/labkey/api/attachments/AttachmentType.java index 51175668c0d..d9f6a2b67de 100644 --- a/api/src/org/labkey/api/attachments/AttachmentType.java +++ b/api/src/org/labkey/api/attachments/AttachmentType.java @@ -16,14 +16,17 @@ package org.labkey.api.attachments; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.data.SQLFragment; /** * Tags {@link Attachment} objects based on their intended use and what they're attached to. Does not - * necessarily indicate that they are a file of a particular type/format. + * indicate that they are a file of a particular type/format. */ public interface AttachmentType { + SQLFragment NO_ENTITY_IDS = new SQLFragment("SELECT NULL AS EntityId WHERE 1 = 0"); + AttachmentType UNKNOWN = new AttachmentType() { @NotNull @@ -43,10 +46,29 @@ public void addWhereSql(SQLFragment sql, String parentColumn, String documentNam @NotNull String getUniqueName(); /** - * Append to the where clause of a query that wants to select attachments of the implementing type + * Append to the where clause of a query that wants to select attachments of the implementing type from the + * core.Documents table * @param sql Implementers MUST append a valid where clause to this SQLFragment * @param parentColumn Column identifier for use in where clause. Usually represents 'core.Documents.Parent' * @param documentNameColumn Column identifier for use in where clause. Usually represents 'core.Documents.DocumentName' */ - void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn); + default void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + { + SQLFragment selectSql = getSelectParentEntityIdsSql(); + if (selectSql == null) + throw new IllegalStateException("Must override either addWhereSql() or getSelectParentEntityIdsSql()"); + sql.append(parentColumn).append(" IN (").append(selectSql).append(")"); + } + + /** + * Return a SQLFragment that selects all the EntityIds that might be attachment parents from the table(s) that + * provide attachments of this type, without involving the core.Documents table. For example, + * {@code SELECT EntityId FROM comm.Announcements}. Return null if this is not-yet-implemented or inappropriate. + * For example, some attachments' parents are container IDs. If the method determines that no parents exist, then + * return a valid query that selects no rows, for example, {@code NO_ENTITY_IDS}. + */ + default @Nullable SQLFragment getSelectParentEntityIdsSql() + { + return null; + } } diff --git a/api/src/org/labkey/api/attachments/SecureDocumentType.java b/api/src/org/labkey/api/attachments/SecureDocumentType.java deleted file mode 100644 index 5146449c416..00000000000 --- a/api/src/org/labkey/api/attachments/SecureDocumentType.java +++ /dev/null @@ -1,45 +0,0 @@ -/* - * Copyright (c) 2017 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.api.attachments; - -import org.jetbrains.annotations.NotNull; -import org.labkey.api.data.SQLFragment; - -public class SecureDocumentType implements AttachmentType -{ - private static final SecureDocumentType INSTANCE = new SecureDocumentType(); - - public static SecureDocumentType get() - { - return INSTANCE; - } - - private SecureDocumentType() - { - } - - @Override - public @NotNull String getUniqueName() - { - return getClass().getName(); - } - - @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) - { - sql.append("1 = 0"); // No secure documents in current deployments - } -} \ No newline at end of file diff --git a/api/src/org/labkey/api/collections/ArrayListValuedTreeMap.java b/api/src/org/labkey/api/collections/ArrayListValuedTreeMap.java new file mode 100644 index 00000000000..6db1e645242 --- /dev/null +++ b/api/src/org/labkey/api/collections/ArrayListValuedTreeMap.java @@ -0,0 +1,22 @@ +package org.labkey.api.collections; + +import org.apache.commons.collections4.multimap.AbstractListValuedMap; + +import java.util.ArrayList; +import java.util.Comparator; +import java.util.List; +import java.util.TreeMap; + +public class ArrayListValuedTreeMap extends AbstractListValuedMap +{ + public ArrayListValuedTreeMap(Comparator comparator) + { + super(new TreeMap<>(comparator)); + } + + @Override + protected List createCollection() + { + return new ArrayList<>(); + } +} diff --git a/api/src/org/labkey/api/data/DatabaseMigrationConfiguration.java b/api/src/org/labkey/api/data/DatabaseMigrationConfiguration.java deleted file mode 100644 index d6222ff90ac..00000000000 --- a/api/src/org/labkey/api/data/DatabaseMigrationConfiguration.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.labkey.api.data; - -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.DatabaseMigrationService.MigrationSchemaHandler; -import org.labkey.api.data.DatabaseMigrationService.MigrationTableHandler; - -import java.util.Set; -import java.util.function.Predicate; - -public interface DatabaseMigrationConfiguration -{ - boolean shouldInsertData(); - default void beforeMigration(){}; - DbScope getSourceScope(); - DbScope getTargetScope(); - @NotNull Set getSkipSchemas(); - Predicate getColumnNameFilter(); - @Nullable TableSelector getTableSelector(DbSchemaType schemaType, TableInfo sourceTable, TableInfo targetTable, Set selectColumnNames, MigrationSchemaHandler schemaHandler, @Nullable MigrationTableHandler tableHandler); - - class DefaultDatabaseMigrationConfiguration implements DatabaseMigrationConfiguration - { - @Override - public boolean shouldInsertData() - { - return true; - } - - @Override - public @Nullable DbScope getSourceScope() - { - return null; - } - - @Override - public DbScope getTargetScope() - { - return null; - } - - @Override - public @NotNull Set getSkipSchemas() - { - return Set.of(); - } - - @Override - public Predicate getColumnNameFilter() - { - return null; - } - - @Override - public TableSelector getTableSelector(DbSchemaType schemaType, TableInfo sourceTable, TableInfo targetTable, Set selectColumnNames, MigrationSchemaHandler schemaHandler, @Nullable MigrationTableHandler tableHandler) - { - return null; - } - } -} diff --git a/api/src/org/labkey/api/data/DatabaseMigrationService.java b/api/src/org/labkey/api/data/DatabaseMigrationService.java deleted file mode 100644 index b32dd40f0c9..00000000000 --- a/api/src/org/labkey/api/data/DatabaseMigrationService.java +++ /dev/null @@ -1,374 +0,0 @@ -package org.labkey.api.data; - -import org.apache.logging.log4j.Logger; -import org.jetbrains.annotations.NotNull; -import org.jetbrains.annotations.Nullable; -import org.labkey.api.data.DatabaseMigrationConfiguration.DefaultDatabaseMigrationConfiguration; -import org.labkey.api.data.SimpleFilter.AndClause; -import org.labkey.api.data.SimpleFilter.FilterClause; -import org.labkey.api.data.SimpleFilter.InClause; -import org.labkey.api.data.SimpleFilter.OrClause; -import org.labkey.api.data.SimpleFilter.SQLClause; -import org.labkey.api.query.FieldKey; -import org.labkey.api.query.SchemaKey; -import org.labkey.api.query.TableSorter; -import org.labkey.api.services.ServiceRegistry; -import org.labkey.api.util.ConfigurationException; -import org.labkey.api.util.GUID; -import org.labkey.api.util.StringUtilsLabKey; -import org.labkey.api.util.logging.LogHelper; -import org.labkey.vfs.FileLike; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Set; -import java.util.stream.Collectors; - -public interface DatabaseMigrationService -{ - Logger LOG = LogHelper.getLogger(DatabaseMigrationService.class, "Information about database migration"); - - record DataFilter(Set containers, String column, FilterClause condition) {} - - static @NotNull DatabaseMigrationService get() - { - DatabaseMigrationService ret = ServiceRegistry.get().getService(DatabaseMigrationService.class); - return ret != null ? ret : new DatabaseMigrationService() {}; - } - - static void setInstance(DatabaseMigrationService impl) - { - ServiceRegistry.get().registerService(DatabaseMigrationService.class, impl); - } - - default DatabaseMigrationConfiguration getDatabaseMigrationConfiguration(FileLike labkeyRoot, @Nullable String migration) - { - return new DefaultDatabaseMigrationConfiguration(); - } - - // By default, no-op implementation that simply logs - default void migrate(DatabaseMigrationConfiguration configuration) - { - LOG.warn("Database migration service is not present; database migration is a premium feature."); - } - - // By default, no-op implementations - default void registerSchemaHandler(MigrationSchemaHandler schemaHandler) {} - default void registerTableHandler(MigrationTableHandler tableHandler) {} - default void registerMigrationFilter(MigrationFilter filter) {} - - default @Nullable MigrationFilter getMigrationFilter(String propertyName) - { - return null; - } - - default void copySourceTableToTargetTable(DatabaseMigrationConfiguration configuration, TableInfo sourceTable, TableInfo targetTable, DbSchemaType schemaType, MigrationSchemaHandler schemaHandler) {}; - - interface MigrationSchemaHandler - { - // Marker for tables to declare themselves as site-wide (no container filtering) - FieldKey SITE_WIDE_TABLE = FieldKey.fromParts("site-wide"); - - DbSchema getSchema(); - - void beforeVerification(); - - void beforeSchema(); - - List getTablesToCopy(); - - // Create a filter clause that selects from all specified containers and (in some overrides) applies table-specific filters - FilterClause getTableFilterClause(TableInfo sourceTable, Set containers); - - // Create a filter clause that selects from all specified containers - FilterClause getContainerClause(TableInfo sourceTable, Set containers); - - // Return the FieldKey that can be used to filter this table by container. Special values SITE_WIDE_TABLE and - // DUMMY_FIELD_KEY can be returned for special behaviors. DUMMY_FIELD_KEY ensures that the handler's custom - // getContainerClause() is always called. SITE_WIDE_TABLE is used to select all rows. - @Nullable FieldKey getContainerFieldKey(TableInfo sourceTable); - - // Create a filter clause that selects all rows from unfiltered containers plus filtered rows from the filtered containers - FilterClause getDomainDataFilterClause(Set copyContainers, Set filteredContainers, List domainFilters, TableInfo sourceTable, Set selectColumnNames); - - void addDomainDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames); - - // Do any necessary clean up after the target table has been populated. notCopiedFilter selects all rows in the - // source table that were NOT copied to the target table. (For example, rows in a global table not copied due to - // container filtering or rows in a provisioned table not copied due to domain data filtering.) - void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilter notCopiedFilter); - - void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema); - } - - class DefaultMigrationSchemaHandler implements MigrationSchemaHandler - { - private final DbSchema _schema; - - public DefaultMigrationSchemaHandler(DbSchema schema) - { - _schema = schema; - } - - @Override - public DbSchema getSchema() - { - return _schema; - } - - @Override - public void beforeVerification() - { - } - - @Override - public void beforeSchema() - { - } - - @Override - public List getTablesToCopy() - { - Set sortedTables = new LinkedHashSet<>(TableSorter.sort(getSchema(), true)); - - Set allTables = getSchema().getTableNames().stream() - .map(getSchema()::getTable) - .collect(Collectors.toCollection(HashSet::new)); - allTables.removeAll(sortedTables); - - if (!allTables.isEmpty()) - { - LOG.info("These tables were removed by TableSorter: {}", allTables); - } - - return sortedTables.stream() - // Skip all views and virtual tables (e.g., test.Containers2, which is a table on SS but a view on PG) - .filter(table -> table.getTableType() == DatabaseTableType.TABLE) - .collect(Collectors.toCollection(ArrayList::new)); // Ensure mutable - } - - @Override - public FilterClause getTableFilterClause(TableInfo sourceTable, Set containers) - { - return getContainerClause(sourceTable, containers); - } - - @Override - public FilterClause getContainerClause(TableInfo sourceTable, Set containers) - { - FieldKey containerFieldKey = getContainerFieldKey(sourceTable); - - if (containerFieldKey == SITE_WIDE_TABLE) - return new SQLClause(new SQLFragment("TRUE")); - - return new InClause(containerFieldKey, containers); - } - - @Override - public @Nullable FieldKey getContainerFieldKey(TableInfo table) - { - FieldKey fKey = table.getContainerFieldKey(); - - if (fKey != null) - return fKey; - - for (ColumnInfo col : table.getColumns()) - { - ForeignKey fk = TableSorter.getForeignKey(table, col, true); - if (fk != null) - { - // Use the table's schema (or a migration schema retrieved from the table's scope), since we want a Migration schema with XML metadata applied - DbSchema tableSchema = table.getSchema(); - DbSchema lookupSchema = fk.getLookupSchemaKey().equals(new SchemaKey(null, tableSchema.getName())) ? - tableSchema : - tableSchema.getScope().getSchema(fk.getLookupSchemaName(), DbSchemaType.Migration); - TableInfo lookupTableInfo = lookupSchema.getTable(fk.getLookupTableName()); - if (lookupTableInfo != null) - { - fKey = lookupTableInfo.getContainerFieldKey(); - - if (null == fKey) - { - // Ignore self joins - if (!lookupTableInfo.getName().equalsIgnoreCase(table.getName())) - { - fKey = getContainerFieldKey(lookupTableInfo); - } - } - - if (fKey != null) - return FieldKey.fromParts(col.getFieldKey(), fKey); - } - } - } - - return null; - } - - @Override - public final FilterClause getDomainDataFilterClause(Set copyContainers, Set filteredContainers, List domainFilters, TableInfo sourceTable, Set selectColumnNames) - { - // Filtered case: remove the filtered containers from the unconditional container set - Set otherContainers = new HashSet<>(copyContainers); - otherContainers.removeAll(filteredContainers); - FilterClause ret = getContainerClause(sourceTable, otherContainers); - - OrClause orClause = new OrClause(); - - // Delegate to the MigrationSchemaHandler to add domain-filtered containers back with their special filter applied - domainFilters.forEach(filter -> addDomainDataFilterClause(orClause, filter, sourceTable, selectColumnNames)); - - if (!orClause.getClauses().isEmpty()) - { - orClause.addClause(ret); - ret = orClause; - } - - return ret; - } - - @Override - public void addDomainDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames) - { - addDataFilterClause(orClause, filter, sourceTable, selectColumnNames); - } - - // Add a filter and return true if the column exists directly on the table - protected boolean addDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames) - { - boolean columnExists = selectColumnNames.contains(filter.column()); - - if (columnExists) - { - // Select all rows in this domain-filtered container that meet its criteria - orClause.addClause( - new AndClause( - getContainerClause(sourceTable, filter.containers()), - filter.condition() - ) - ); - } - - return columnExists; - } - - // Add a clause that selects all rows where the object property with equals the filter value. This - // is only for provisioned tables that lack an ObjectId, MaterialId, or DataId column. - protected void addObjectPropertyClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, int propertyId) - { - SQLFragment flagWhere = new SQLFragment("lsid IN (SELECT ObjectURI FROM exp.Object o INNER JOIN exp.ObjectProperty op ON o.ObjectId = op.ObjectId WHERE StringValue = ? AND PropertyId = ?)", filter.condition().getParamVals()[0], propertyId); - - orClause.addClause( - new AndClause( - getContainerClause(sourceTable, filter.containers()), - new SQLClause(flagWhere) - ) - ); - } - - private Integer _commentPropertyId = null; - - protected synchronized int getCommentPropertyId(DbScope scope) - { - if (_commentPropertyId == null) - { - // Get the exp.PropertyDescriptor table from the source scope - TableInfo propertyDescriptor = scope.getSchema("exp", DbSchemaType.Migration).getTable("PropertyDescriptor"); - // Select the PropertyId associated with built-in Flag fields ("urn:exp.labkey.org/#Comment") - Integer propertyId = new TableSelector(propertyDescriptor, Collections.singleton("PropertyId"), new SimpleFilter(FieldKey.fromParts("PropertyURI"), "urn:exp.labkey.org/#Comment"), null).getObject(Integer.class); - if (propertyId == null) - throw new RuntimeException("PropertyDescriptor for built-in Flag field not found"); - else - _commentPropertyId = propertyId; - } - - return _commentPropertyId; - } - - protected String rowsNotCopied(int count) - { - return " " + StringUtilsLabKey.pluralize(count, "row") + " not copied"; - } - - @Override - public void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilter notCopiedFilter) - { - } - - @Override - public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema) - { - } - } - - /** - * Rarely needed, this interface allows a module to provide a clause that filters the rows of another module's - * table. The specific use case: Core manages core.Documents and LabBook implements its global attachment manager - * on top of core.Documents. When copying data from core.Documents, we want LabBook to filter out the rows that - * are not referenced by notebooks in the subset of containers being copied. - */ - interface MigrationTableHandler - { - TableInfo getTableInfo(); - FilterClause getAdditionalFilterClause(Set containers); - } - - /** - * A MigrationFilter adds support for the named filter property in the migration configuration file. If present, - * saveFilter() is called with the container guid and property value. Modules can register these to present - * module-specific filters. - */ - interface MigrationFilter - { - String getName(); - // Implementations should validate guid nullity - void saveFilter(@Nullable GUID guid, String value); - } - - interface ExperimentDeleteService - { - static @NotNull ExperimentDeleteService get() - { - ExperimentDeleteService ret = ServiceRegistry.get().getService(ExperimentDeleteService.class); - if (ret == null) - throw new IllegalStateException("ExperimentDeleteService not found"); - return ret; - } - - static void setInstance(ExperimentDeleteService impl) - { - ServiceRegistry.get().registerService(ExperimentDeleteService.class, impl); - } - - /** - * Deletes all rows from exp.Data, exp.Object, and related tables associated with the provided ObjectIds - */ - void deleteDataRows(Collection objectIds); - } - - // Helper method that parses a data filter then adds it and its container to the provided collections, coalescing - // cases where multiple containers specify the same filter - static void addDataFilter(String filterName, List dataFilters, Set filteredContainers, GUID guid, String filter) - { - String[] filterParts = filter.split("="); - if (filterParts.length != 2) - throw new ConfigurationException("Bad " + filterName + " value; expected =: " + filter); - - if (!filteredContainers.add(guid)) - throw new ConfigurationException("Duplicate " + filterName + " entry for container " + guid); - - String column = filterParts[0]; - String value = filterParts[1]; - FilterClause clause = CompareType.EQUAL.createFilterClause(new FieldKey(null, column), value); - // If another container is already using this filter clause, then simply add this guid to that domain filter. - // Otherwise, add a new domain filter to the list. - dataFilters.stream() - .filter(df -> df.column().equals(column) && df.condition().equals(clause)) - .findFirst() - .ifPresentOrElse(df -> df.containers().add(guid), () -> dataFilters.add(new DataFilter(new HashSet<>(Set.of(guid)), filterParts[0], clause))); - } -} diff --git a/api/src/org/labkey/api/data/DbSchemaType.java b/api/src/org/labkey/api/data/DbSchemaType.java index 208dc2a61b9..cd54c930c5d 100644 --- a/api/src/org/labkey/api/data/DbSchemaType.java +++ b/api/src/org/labkey/api/data/DbSchemaType.java @@ -19,6 +19,7 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.cache.CacheManager; import org.labkey.api.exp.api.ProvisionedDbSchema; +import org.labkey.api.migration.MigrationDbSchema; import org.labkey.api.module.Module; import org.labkey.api.module.ModuleLoader; diff --git a/api/src/org/labkey/api/data/InClauseGenerator.java b/api/src/org/labkey/api/data/InClauseGenerator.java index ec5a60acdb4..0ee8fdb56a6 100644 --- a/api/src/org/labkey/api/data/InClauseGenerator.java +++ b/api/src/org/labkey/api/data/InClauseGenerator.java @@ -22,8 +22,6 @@ /** * Implementors generate and append SQL that acts as an "is one of" filter. This can be an actual IN clause or a * database-specific implementation that scales or performs better (e.g., arrays or in-line parameter expansion) - * User: adam - * Date: 8/3/12 */ public interface InClauseGenerator { diff --git a/api/src/org/labkey/api/data/TempTableInClauseGenerator.java b/api/src/org/labkey/api/data/TempTableInClauseGenerator.java index 7876a83fae6..14276fb551d 100644 --- a/api/src/org/labkey/api/data/TempTableInClauseGenerator.java +++ b/api/src/org/labkey/api/data/TempTableInClauseGenerator.java @@ -33,10 +33,9 @@ import java.util.List; import java.util.Set; import java.util.TreeSet; +import java.util.function.Supplier; /** - * Created by davebradlee on 6/5/15. - * * Generator for very long in-clauses */ public class TempTableInClauseGenerator implements InClauseGenerator @@ -44,6 +43,22 @@ public class TempTableInClauseGenerator implements InClauseGenerator private static final Cache _tempTableCache = CacheManager.getStringKeyCache(200, CacheManager.MINUTE * 5, "IN clause temp tables"); + // Need to set a supplier instead of setting the default temp schema directly because this class is constructed at + // dialect init time, before schemas can be referenced. + private final Supplier _tempSchemaSupplier; + + // By default, use the primary database temp schema + public TempTableInClauseGenerator() + { + this(DbSchema::getTemp); + } + + // Use in cases where the default temp schema won't do, e.g., you need to apply a large IN clause in an external data source + public TempTableInClauseGenerator(Supplier tempSchemaSupplier) + { + _tempSchemaSupplier = tempSchemaSupplier; + } + /** * @param sql fragment to append to * @param params list of values @@ -87,19 +102,20 @@ else if (jdbcType == JdbcType.VARCHAR) TempTableInfo tempTableInfo = _tempTableCache.get(cacheKey); if (tempTableInfo == null) { - tempTableInfo = new TempTableInfo("InClause", Collections.singletonList(new BaseColumnInfo("Id", jdbcType, 0, false)), null); + DbSchema tempSchema = _tempSchemaSupplier.get(); + tempTableInfo = new TempTableInfo(tempSchema, "InClause", Collections.singletonList(new BaseColumnInfo("Id", jdbcType, 0, false)), null); SQLFragment sqlCreate = new SQLFragment("CREATE TABLE "); sqlCreate.append(tempTableInfo) - .append("\n(Id ") - .append(DbSchema.getTemp().getSqlDialect().getSqlTypeName(jdbcType)) - .append(jdbcType == JdbcType.VARCHAR ? "(450)" : "") - .append(")"); + .append("\n(Id ") + .append(tempSchema.getSqlDialect().getSqlTypeName(jdbcType)) + .append(jdbcType == JdbcType.VARCHAR ? "(450)" : "") + .append(")"); // When the in clause receives more parameters than it is set to handle, a temporary table is created to handle the overflow. // While the associated mutating operations are necessary, they are not a viable CSRF attack vector. try (var ignored = SpringActionController.ignoreSqlUpdates()) { - new SqlExecutor(DbSchema.getTemp()).execute(sqlCreate); + new SqlExecutor(tempSchema).execute(sqlCreate); } tempTableInfo.track(); String tableName = tempTableInfo.getSelectName(); @@ -110,11 +126,11 @@ else if (jdbcType == JdbcType.VARCHAR) try (var ignored = SpringActionController.ignoreSqlUpdates()) { if (jdbcType == JdbcType.VARCHAR) - Table.batchExecute1String(DbSchema.getTemp(), sql1, (ArrayList) sortedParameters); + Table.batchExecute1String(tempSchema, sql1, (ArrayList) sortedParameters); else if (jdbcType == JdbcType.INTEGER) - Table.batchExecute1Integer(DbSchema.getTemp(), sql1, sql100, (ArrayList) sortedParameters); + Table.batchExecute1Integer(tempSchema, sql1, sql100, (ArrayList) sortedParameters); else - Table.batchExecute1Long(DbSchema.getTemp(), sql1, sql100, (ArrayList) sortedParameters); + Table.batchExecute1Long(tempSchema, sql1, sql100, (ArrayList) sortedParameters); } } catch (SQLException e) @@ -125,14 +141,14 @@ else if (jdbcType == JdbcType.INTEGER) String indexSql = "CREATE INDEX IX_Id" + new GUID().toStringNoDashes() + " ON " + tableName + "(Id)"; try (var ignored = SpringActionController.ignoreSqlUpdates()) { - new SqlExecutor(DbSchema.getTemp()).execute(indexSql); + new SqlExecutor(tempSchema).execute(indexSql); } TempTableInfo cacheEntry = tempTableInfo; // Don't bother caching if we're in a transaction // a) The table won't be visible to other connections until we commit // b) It is more likely that this temptable is only used once anyway (e.g. used by a data iterator) - if (!DbSchema.getTemp().getScope().isTransactionActive()) + if (!tempSchema.getScope().isTransactionActive()) _tempTableCache.put(cacheKey, cacheEntry); } diff --git a/api/src/org/labkey/api/data/TempTableInfo.java b/api/src/org/labkey/api/data/TempTableInfo.java index 21b56de3341..dbc6c63cd50 100644 --- a/api/src/org/labkey/api/data/TempTableInfo.java +++ b/api/src/org/labkey/api/data/TempTableInfo.java @@ -19,15 +19,8 @@ import java.util.List; -/** -* User: matt -* Date: Oct 23, 2010 -* Time: 3:08:13 PM -*/ public class TempTableInfo extends SchemaTableInfo { - private final String _tempTableName; - private TempTableTracker _ttt; public TempTableInfo(String name, List cols, List pk) @@ -35,7 +28,7 @@ public TempTableInfo(String name, List cols, List pk) this(DbSchema.getTemp(), name, cols, pk); } - private TempTableInfo(DbSchema schema, String name, List cols, List pk) + public TempTableInfo(DbSchema schema, String name, List cols, List pk) { super(schema, DatabaseTableType.TABLE, name, name, new SQLFragment().appendIdentifier(schema.getName()).append(".").appendIdentifier(name + "$" + new GUID().toStringNoDashes())); @@ -43,9 +36,6 @@ private TempTableInfo(DbSchema schema, String name, List cols, List< // make sure TempTableTracker is initialized _before_ caller executes CREATE TABLE TempTableTracker.init(); - // TODO: Do away with _tempTableName? getSelectName() is synonymous. - _tempTableName = getSelectName(); - for (var col : cols) { ((BaseColumnInfo)col).setParentTable(this); @@ -58,15 +48,14 @@ private TempTableInfo(DbSchema schema, String name, List cols, List< public String getTempTableName() { - return _tempTableName; + return getSelectName(); } - /** Call this method when table is physically created */ public void track() { // Remove the schema name and dot - String tableName = _tempTableName.substring(getSchema().getName().length() + 1); + String tableName = getTempTableName().substring(getSchema().getName().length() + 1); _ttt = TempTableTracker.track(tableName, this); } diff --git a/api/src/org/labkey/api/data/dialect/BasePostgreSqlDialect.java b/api/src/org/labkey/api/data/dialect/BasePostgreSqlDialect.java index 3df6920cacb..4a971cca55f 100644 --- a/api/src/org/labkey/api/data/dialect/BasePostgreSqlDialect.java +++ b/api/src/org/labkey/api/data/dialect/BasePostgreSqlDialect.java @@ -291,10 +291,16 @@ public String addReselect(SQLFragment sql, ColumnInfo column, @Nullable String p @Override public SQLFragment appendInClauseSql(SQLFragment sql, @NotNull Collection params) + { + return appendInClauseSql(sql, params, _tempTableInClauseGenerator); + } + + @Override + public SQLFragment appendInClauseSql(SQLFragment sql, @NotNull Collection params, InClauseGenerator tempTableGenerator) { if (params.size() >= TEMPTABLE_GENERATOR_MINSIZE) { - SQLFragment ret = _tempTableInClauseGenerator.appendInClauseSql(sql, params); + SQLFragment ret = tempTableGenerator.appendInClauseSql(sql, params); if (null != ret) return ret; } diff --git a/api/src/org/labkey/api/data/dialect/SqlDialect.java b/api/src/org/labkey/api/data/dialect/SqlDialect.java index 0edb407cf39..5c19524f10c 100644 --- a/api/src/org/labkey/api/data/dialect/SqlDialect.java +++ b/api/src/org/labkey/api/data/dialect/SqlDialect.java @@ -516,7 +516,7 @@ protected Set getJdbcKeywords(SqlExecutor executor) throws SQLException, * @param sql And INSERT or UPDATE statement that needs re-selecting * @param column Column from which to reselect * @param proposedVariable Null to return a result set via code; Not null to select the value into a SQL variable - * @return If proposedVariable is not null then actual variable used in the SQL. Otherwise null. Callers using + * @return If proposedVariable is not null then actual variable used in the SQL. Otherwise, null. Callers using * proposedVariable must use the returned variable name in subsequent code, since it may differ from what was * proposed. */ @@ -527,7 +527,14 @@ protected Set getJdbcKeywords(SqlExecutor executor) throws SQLException, private static final InClauseGenerator DEFAULT_GENERATOR = new ParameterMarkerInClauseGenerator(); + // Most callers should use this method public SQLFragment appendInClauseSql(SQLFragment sql, @NotNull Collection params) + { + return appendInClauseSql(sql, params, null); + } + + // Use in cases where the default temp schema won't do, e.g., you need to apply a large IN clause in an external data source + public SQLFragment appendInClauseSql(SQLFragment sql, @NotNull Collection params, InClauseGenerator tempTableGenerator) { return DEFAULT_GENERATOR.appendInClauseSql(sql, params); } @@ -539,10 +546,10 @@ public SQLFragment appendCaseInsensitiveLikeClause(SQLFragment sql, @NotNull Str String prefixLike = prefix + CompareType.escapeLikePattern(matchStr, escapeChar) + suffix; String escapeToken = " ESCAPE '" + escapeChar + "'"; sql.append(" ") - .append(getCaseInsensitiveLikeOperator()) - .append(" ") - .appendValue(prefixLike) - .append(escapeToken); + .append(getCaseInsensitiveLikeOperator()) + .append(" ") + .appendValue(prefixLike) + .append(escapeToken); return sql; } diff --git a/api/src/org/labkey/api/exp/AbstractFileXarSource.java b/api/src/org/labkey/api/exp/AbstractFileXarSource.java index b52b70d40c3..83211296351 100644 --- a/api/src/org/labkey/api/exp/AbstractFileXarSource.java +++ b/api/src/org/labkey/api/exp/AbstractFileXarSource.java @@ -110,6 +110,7 @@ public boolean shouldIgnoreDataFiles() @Override public String canonicalizeDataFileURL(String dataFileURL) { + dataFileURL = dataFileURL.replace("\\", "/"); Path xarDirectory = getRootPath(); URI uri = FileUtil.createUri(dataFileURL); if (!uri.isAbsolute()) diff --git a/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentParent.java b/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentParent.java index d1340315f73..fef7e961471 100644 --- a/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentParent.java +++ b/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentParent.java @@ -43,6 +43,6 @@ public String getContainerId() @Override public @NotNull AttachmentType getAttachmentType() { - return ExpRunAttachmentType.get(); + return ExpProtocolAttachmentType.get(); } } diff --git a/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentType.java b/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentType.java index 25ee35b78f3..65cd70f0969 100644 --- a/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentType.java +++ b/api/src/org/labkey/api/exp/api/ExpProtocolAttachmentType.java @@ -16,6 +16,7 @@ package org.labkey.api.exp.api; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.SQLFragment; @@ -39,8 +40,8 @@ private ExpProtocolAttachmentType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(ExperimentService.get().getTinfoProtocol(), "ep").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(ExperimentService.get().getTinfoProtocol(), "ep"); } } \ No newline at end of file diff --git a/api/src/org/labkey/api/exp/api/ExpRunAttachmentType.java b/api/src/org/labkey/api/exp/api/ExpRunAttachmentType.java index 9be08401bfc..3e9fb3b298c 100644 --- a/api/src/org/labkey/api/exp/api/ExpRunAttachmentType.java +++ b/api/src/org/labkey/api/exp/api/ExpRunAttachmentType.java @@ -16,6 +16,7 @@ package org.labkey.api.exp.api; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.SQLFragment; @@ -39,8 +40,8 @@ private ExpRunAttachmentType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "er").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(ExperimentService.get().getTinfoExperimentRun(), "er"); } } \ No newline at end of file diff --git a/filecontent/src/org/labkey/filecontent/FileSystemAttachmentType.java b/api/src/org/labkey/api/files/FileSystemAttachmentType.java similarity index 79% rename from filecontent/src/org/labkey/filecontent/FileSystemAttachmentType.java rename to api/src/org/labkey/api/files/FileSystemAttachmentType.java index ec7ac0a22e5..b39e09dba99 100644 --- a/filecontent/src/org/labkey/filecontent/FileSystemAttachmentType.java +++ b/api/src/org/labkey/api/files/FileSystemAttachmentType.java @@ -1,47 +1,48 @@ -/* - * Copyright (c) 2017 LabKey Corporation - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.labkey.filecontent; - -import org.jetbrains.annotations.NotNull; -import org.labkey.api.attachments.AttachmentType; -import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.SQLFragment; - -public class FileSystemAttachmentType implements AttachmentType -{ - private static final FileSystemAttachmentType INSTANCE = new FileSystemAttachmentType(); - - public static FileSystemAttachmentType get() - { - return INSTANCE; - } - - private FileSystemAttachmentType() - { - } - - @Override - public @NotNull String getUniqueName() - { - return getClass().getName(); - } - - @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) - { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CoreSchema.getInstance().getMappedDirectories(), "md").append(")"); - } -} +/* + * Copyright (c) 2017 LabKey Corporation + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.labkey.api.files; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentType; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.SQLFragment; + +public class FileSystemAttachmentType implements AttachmentType +{ + private static final FileSystemAttachmentType INSTANCE = new FileSystemAttachmentType(); + + public static FileSystemAttachmentType get() + { + return INSTANCE; + } + + private FileSystemAttachmentType() + { + } + + @Override + public @NotNull String getUniqueName() + { + return getClass().getName(); + } + + @Override + public @Nullable SQLFragment getSelectParentEntityIdsSql() + { + return new SQLFragment("SELECT EntityId FROM ").append(CoreSchema.getInstance().getMappedDirectories(), "md"); + } +} diff --git a/api/src/org/labkey/api/migration/AssaySkipContainers.java b/api/src/org/labkey/api/migration/AssaySkipContainers.java new file mode 100644 index 00000000000..8e38eb20a75 --- /dev/null +++ b/api/src/org/labkey/api/migration/AssaySkipContainers.java @@ -0,0 +1,34 @@ +package org.labkey.api.migration; + +import org.labkey.api.util.GUID; + +import java.util.HashSet; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArraySet; + +// Need to make the assay-skip containers available to both experiment and assay +public class AssaySkipContainers +{ + private static final Set SKIP_CONTAINERS = new CopyOnWriteArraySet<>(); + + private AssaySkipContainers() + { + } + + public static void addContainers(Set containers) + { + SKIP_CONTAINERS.addAll(containers); + } + + public static Set getContainers() + { + return SKIP_CONTAINERS; + } + + public static Set getFilteredContainers(Set containers) + { + Set filteredContainers = new HashSet<>(containers); + filteredContainers.removeAll(AssaySkipContainers.getContainers()); + return filteredContainers; + } +} diff --git a/api/src/org/labkey/api/migration/DatabaseMigrationConfiguration.java b/api/src/org/labkey/api/migration/DatabaseMigrationConfiguration.java new file mode 100644 index 00000000000..9d0a549cb12 --- /dev/null +++ b/api/src/org/labkey/api/migration/DatabaseMigrationConfiguration.java @@ -0,0 +1,25 @@ +package org.labkey.api.migration; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; + +import java.util.Set; +import java.util.function.Predicate; + +public interface DatabaseMigrationConfiguration +{ + boolean shouldInsertData(); + default void beforeMigration(){} + DbScope getSourceScope(); + DbScope getTargetScope(); + @NotNull Set getSkipSchemas(); + Predicate getColumnNameFilter(); + @Nullable TableSelector getTableSelector(DbSchemaType schemaType, TableInfo sourceTable, TableInfo targetTable, Set selectColumnNames, MigrationSchemaHandler schemaHandler); + default void copyAttachments(DbSchema sourceSchema, DbSchema targetSchema, MigrationSchemaHandler schemaHandler){} + default void afterMigration(){} +} diff --git a/api/src/org/labkey/api/migration/DatabaseMigrationService.java b/api/src/org/labkey/api/migration/DatabaseMigrationService.java new file mode 100644 index 00000000000..ec2c0760a7c --- /dev/null +++ b/api/src/org/labkey/api/migration/DatabaseMigrationService.java @@ -0,0 +1,82 @@ +package org.labkey.api.migration; + +import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.CompareType; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.TableInfo; +import org.labkey.api.query.FieldKey; +import org.labkey.api.services.ServiceRegistry; +import org.labkey.api.util.ConfigurationException; +import org.labkey.api.util.GUID; +import org.labkey.api.util.logging.LogHelper; +import org.labkey.vfs.FileLike; + +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +public interface DatabaseMigrationService +{ + Logger LOG = LogHelper.getLogger(DatabaseMigrationService.class, "Information about database migration"); + + record DataFilter(Set containers, String column, FilterClause condition) {} + + static @NotNull DatabaseMigrationService get() + { + DatabaseMigrationService ret = ServiceRegistry.get().getService(DatabaseMigrationService.class); + return ret != null ? ret : new DatabaseMigrationService() {}; + } + + static void setInstance(DatabaseMigrationService impl) + { + ServiceRegistry.get().registerService(DatabaseMigrationService.class, impl); + } + + default DatabaseMigrationConfiguration getDatabaseMigrationConfiguration(FileLike labkeyRoot, @Nullable String migration) + { + return new DefaultDatabaseMigrationConfiguration(); + } + + // By default, no-op implementation that simply logs + default void migrate(DatabaseMigrationConfiguration configuration) + { + LOG.warn("Database migration service is not present; database migration is a premium feature."); + } + + // By default, no-op implementations + default void registerSchemaHandler(MigrationSchemaHandler schemaHandler) {} + default void registerMigrationFilter(MigrationFilter filter) {} + + default @Nullable MigrationFilter getMigrationFilter(String propertyName) + { + return null; + } + + default void copySourceTableToTargetTable(DatabaseMigrationConfiguration configuration, TableInfo sourceTable, TableInfo targetTable, DbSchemaType schemaType, boolean updateSequences, String additionalLogMessage, MigrationSchemaHandler schemaHandler) {} + default void updateSequences(TableInfo sourceTable, TableInfo targetTable) {} + + // Helper method that parses a data filter then adds it and its container to the provided collections, coalescing + // cases where multiple containers specify the same filter + static void addDataFilter(String filterName, List dataFilters, Set filteredContainers, @NotNull GUID guid, String filter) + { + String[] filterParts = filter.split("="); + if (filterParts.length != 2) + throw new ConfigurationException("Bad " + filterName + " value; expected =: " + filter); + + if (!filteredContainers.add(guid)) + throw new ConfigurationException("Duplicate " + filterName + " entry for container " + guid); + + String column = filterParts[0]; + String value = filterParts[1]; + FilterClause clause = CompareType.EQUAL.createFilterClause(new FieldKey(null, column), value); + // If another container is already using this filter clause, then simply add this guid to that filter. + // Otherwise, add a new domain filter to the list. + dataFilters.stream() + .filter(df -> df.column().equals(column) && df.condition().equals(clause)) + .findFirst() + .ifPresentOrElse(df -> df.containers().add(guid), () -> dataFilters.add(new DataFilter(new HashSet<>(Set.of(guid)), filterParts[0], clause))); + } +} diff --git a/api/src/org/labkey/api/migration/DefaultDatabaseMigrationConfiguration.java b/api/src/org/labkey/api/migration/DefaultDatabaseMigrationConfiguration.java new file mode 100644 index 00000000000..7dea199479d --- /dev/null +++ b/api/src/org/labkey/api/migration/DefaultDatabaseMigrationConfiguration.java @@ -0,0 +1,50 @@ +package org.labkey.api.migration; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; + +import java.util.Set; +import java.util.function.Predicate; + +public class DefaultDatabaseMigrationConfiguration implements DatabaseMigrationConfiguration +{ + @Override + public boolean shouldInsertData() + { + return true; + } + + @Override + public @Nullable DbScope getSourceScope() + { + return null; + } + + @Override + public DbScope getTargetScope() + { + return null; + } + + @Override + public @NotNull Set getSkipSchemas() + { + return Set.of(); + } + + @Override + public Predicate getColumnNameFilter() + { + return null; + } + + @Override + public TableSelector getTableSelector(DbSchemaType schemaType, TableInfo sourceTable, TableInfo targetTable, Set selectColumnNames, MigrationSchemaHandler schemaHandler) + { + return null; + } +} diff --git a/api/src/org/labkey/api/migration/DefaultMigrationSchemaHandler.java b/api/src/org/labkey/api/migration/DefaultMigrationSchemaHandler.java new file mode 100644 index 00000000000..019d5f6e30d --- /dev/null +++ b/api/src/org/labkey/api/migration/DefaultMigrationSchemaHandler.java @@ -0,0 +1,314 @@ +package org.labkey.api.migration; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.attachments.AttachmentType; +import org.labkey.api.data.ColumnInfo; +import org.labkey.api.data.CoreSchema; +import org.labkey.api.data.DatabaseTableType; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.ForeignKey; +import org.labkey.api.data.InClauseGenerator; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SimpleFilter.AndClause; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SimpleFilter.InClause; +import org.labkey.api.data.SimpleFilter.OrClause; +import org.labkey.api.data.SimpleFilter.SQLClause; +import org.labkey.api.data.SqlSelector; +import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.data.TempTableInClauseGenerator; +import org.labkey.api.migration.DatabaseMigrationService.DataFilter; +import org.labkey.api.query.FieldKey; +import org.labkey.api.query.SchemaKey; +import org.labkey.api.query.TableSorter; +import org.labkey.api.util.GUID; +import org.labkey.api.util.StringUtilsLabKey; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +public class DefaultMigrationSchemaHandler implements MigrationSchemaHandler +{ + private final DbSchema _schema; + + public DefaultMigrationSchemaHandler(DbSchema schema) + { + _schema = schema; + } + + @Override + public DbSchema getSchema() + { + return _schema; + } + + @Override + public void beforeVerification() + { + } + + @Override + public void beforeSchema() + { + } + + @Override + public List getTablesToCopy() + { + Set sortedTables = new LinkedHashSet<>(TableSorter.sort(getSchema(), true)); + + Set allTables = getSchema().getTableNames().stream() + .map(getSchema()::getTable) + .collect(Collectors.toCollection(HashSet::new)); + allTables.removeAll(sortedTables); + + if (!allTables.isEmpty()) + { + DatabaseMigrationService.LOG.info("These tables were removed by TableSorter: {}", allTables); + } + + return sortedTables.stream() + // Skip all views and virtual tables (e.g., test.Containers2, which is a table on SS but a view on PG) + .filter(table -> table.getTableType() == DatabaseTableType.TABLE) + .collect(Collectors.toCollection(ArrayList::new)); // Ensure mutable + } + + @Override + public FilterClause getTableFilterClause(TableInfo sourceTable, Set containers) + { + return getContainerClause(sourceTable, containers); + } + + @Override + public FilterClause getContainerClause(TableInfo sourceTable, Set containers) + { + FieldKey containerFieldKey = getContainerFieldKey(sourceTable); + + if (containerFieldKey == SITE_WIDE_TABLE) + return new SQLClause(new SQLFragment("TRUE")); + + return new InClause(containerFieldKey, containers); + } + + @Override + public @Nullable FieldKey getContainerFieldKey(TableInfo table) + { + FieldKey fKey = table.getContainerFieldKey(); + + if (fKey != null) + return fKey; + + for (ColumnInfo col : table.getColumns()) + { + ForeignKey fk = TableSorter.getForeignKey(table, col, true); + if (fk != null) + { + // Use the table's schema (or a migration schema retrieved from the table's scope), since we want a Migration schema with XML metadata applied + DbSchema tableSchema = table.getSchema(); + DbSchema lookupSchema = fk.getLookupSchemaKey().equals(new SchemaKey(null, tableSchema.getName())) ? + tableSchema : + tableSchema.getScope().getSchema(fk.getLookupSchemaName(), DbSchemaType.Migration); + TableInfo lookupTableInfo = lookupSchema.getTable(fk.getLookupTableName()); + if (lookupTableInfo != null) + { + fKey = lookupTableInfo.getContainerFieldKey(); + + if (null == fKey) + { + // Ignore self joins + if (!lookupTableInfo.getName().equalsIgnoreCase(table.getName())) + { + fKey = getContainerFieldKey(lookupTableInfo); + } + } + + if (fKey != null) + return FieldKey.fromParts(col.getFieldKey(), fKey); + } + } + } + + return null; + } + + @Override + public final FilterClause getDomainDataFilterClause(Set copyContainers, Set filteredContainers, List domainFilters, TableInfo sourceTable, Set selectColumnNames) + { + // Filtered case: remove the filtered containers from the unconditional container set + Set otherContainers = new HashSet<>(copyContainers); + otherContainers.removeAll(filteredContainers); + FilterClause ret = getContainerClause(sourceTable, otherContainers); + + OrClause orClause = new OrClause(); + + // Delegate to the MigrationSchemaHandler to add domain-filtered containers back with their special filter applied + domainFilters.forEach(filter -> addDomainDataFilterClause(orClause, filter, sourceTable, selectColumnNames)); + + if (!orClause.getClauses().isEmpty()) + { + orClause.addClause(ret); + ret = orClause; + } + + return ret; + } + + @Override + public void addDomainDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames) + { + addDataFilterClause(orClause, filter, sourceTable, selectColumnNames); + } + + // Add a filter and return true if the column exists directly on the table + protected boolean addDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames) + { + boolean columnExists = selectColumnNames.contains(filter.column()); + + if (columnExists) + { + // Select all rows in this domain-filtered container that meet its criteria + orClause.addClause( + new AndClause( + getContainerClause(sourceTable, filter.containers()), + filter.condition() + ) + ); + } + + return columnExists; + } + + // Add a clause that selects all rows where the object property with equals the filter value. This + // is only for provisioned tables that lack an ObjectId, MaterialId, or DataId column. + protected void addObjectPropertyClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, int propertyId) + { + SQLFragment flagWhere = new SQLFragment("lsid IN (SELECT ObjectURI FROM exp.Object o INNER JOIN exp.ObjectProperty op ON o.ObjectId = op.ObjectId WHERE StringValue = ? AND PropertyId = ?)", filter.condition().getParamVals()[0], propertyId); + + orClause.addClause( + new AndClause( + getContainerClause(sourceTable, filter.containers()), + new SQLClause(flagWhere) + ) + ); + } + + private Integer _commentPropertyId = null; + + protected synchronized int getCommentPropertyId(DbScope scope) + { + if (_commentPropertyId == null) + { + // Get the exp.PropertyDescriptor table from the source scope + TableInfo propertyDescriptor = scope.getSchema("exp", DbSchemaType.Migration).getTable("PropertyDescriptor"); + // Select the PropertyId associated with built-in Flag fields ("urn:exp.labkey.org/#Comment") + Integer propertyId = new TableSelector(propertyDescriptor, Collections.singleton("PropertyId"), new SimpleFilter(FieldKey.fromParts("PropertyURI"), "urn:exp.labkey.org/#Comment"), null).getObject(Integer.class); + if (propertyId == null) + throw new RuntimeException("PropertyDescriptor for built-in Flag field not found"); + else + _commentPropertyId = propertyId; + } + + return _commentPropertyId; + } + + protected String rowsNotCopied(int count) + { + return " " + StringUtilsLabKey.pluralize(count, "row") + " not copied"; + } + + @Override + public void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilter notCopiedFilter) + { + } + + @Override + public void copyAttachments(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema, Set copyContainers) + { + // Now that the target tables in this schema have been populated, copy all associated attachments. By + // default, use this handler's attachment types to select from the target tables all EntityIds that might be + // attachment parents (this avoids re-running potentially expensive queries on the source tables). Use the + // set of EntityIds to copy those attachments from the core.Documents table in the source database. Override + // if special behavior is required, for example, AttachmentTypes that use documentNameColumn since that + // requires querying and re-filtering the source tables instead. + getAttachmentTypes().forEach(type -> { + SQLFragment sql = type.getSelectParentEntityIdsSql(); + if (sql != null) + { + Collection entityIds = new SqlSelector(targetSchema, sql).getCollection(String.class); + SQLFragment selectParents = new SQLFragment("Parent"); + // This query against the source database is likely to contain a large IN clause, so use an alternative InClauseGenerator + sourceSchema.getSqlDialect().appendInClauseSql(selectParents, entityIds, getTempTableInClauseGenerator(sourceSchema.getScope())); + copyAttachments(configuration, sourceSchema, new SQLClause(selectParents), type); + } + + // TODO: fail if type.getSelectParentEntityIdsSql() returns null? + // TODO: throw if some registered AttachmentType is not seen + }); + } + + // Creates a TempTableInClauseGenerator that targets the *source* temp schema instead of the default + // DbSchema.getTemp(). Required for large IN clauses used against the source database. + protected InClauseGenerator getTempTableInClauseGenerator(DbScope sourceScope) + { + return new TempTableInClauseGenerator(() -> sourceScope.getSchema("temp", DbSchemaType.Bare)); + } + + private static final Set SEEN = new HashSet<>(); + + // Copy all core.Documents rows that match the provided filter clause + protected void copyAttachments(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, FilterClause filterClause, AttachmentType... type) + { + SEEN.addAll(Arrays.asList(type)); + String additionalMessage = " associated with " + Arrays.stream(type).map(t -> t.getClass().getSimpleName()).collect(Collectors.joining(", ")); + TableInfo sourceDocumentsTable = sourceSchema.getScope().getSchema("core", DbSchemaType.Migration).getTable("Documents"); + TableInfo targetDocumentsTable = CoreSchema.getInstance().getTableInfoDocuments(); + DatabaseMigrationService.get().copySourceTableToTargetTable(configuration, sourceDocumentsTable, targetDocumentsTable, DbSchemaType.Module, false, additionalMessage, new DefaultMigrationSchemaHandler(CoreSchema.getInstance().getSchema()) + { + @Override + public FilterClause getTableFilterClause(TableInfo sourceTable, Set containers) + { + return filterClause; + } + }); + } + + public static void logUnseenAttachmentTypes() + { + Set unseen = new HashSet<>(AttachmentService.get().getAttachmentTypes()); + unseen.removeAll(SEEN); + + if (SEEN.isEmpty()) + DatabaseMigrationService.LOG.info("All AttachmentTypes have been seen"); + else + DatabaseMigrationService.LOG.info("These AttachmentTypes have not been seen: {}", unseen.stream().map(type -> type.getClass().getSimpleName()).collect(Collectors.joining(", "))); + } + + @Override + public @NotNull Collection getAttachmentTypes() + { + return List.of(); + } + + @Override + public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema) + { + } + + @Override + public void afterMigration(DatabaseMigrationConfiguration configuration) + { + } +} diff --git a/api/src/org/labkey/api/migration/ExperimentDeleteService.java b/api/src/org/labkey/api/migration/ExperimentDeleteService.java new file mode 100644 index 00000000000..033eea366fa --- /dev/null +++ b/api/src/org/labkey/api/migration/ExperimentDeleteService.java @@ -0,0 +1,27 @@ +package org.labkey.api.migration; + +import org.jetbrains.annotations.NotNull; +import org.labkey.api.services.ServiceRegistry; + +import java.util.Collection; + +public interface ExperimentDeleteService +{ + static @NotNull ExperimentDeleteService get() + { + ExperimentDeleteService ret = ServiceRegistry.get().getService(ExperimentDeleteService.class); + if (ret == null) + throw new IllegalStateException("ExperimentDeleteService not found"); + return ret; + } + + static void setInstance(ExperimentDeleteService impl) + { + ServiceRegistry.get().registerService(ExperimentDeleteService.class, impl); + } + + /** + * Deletes all rows from exp.Data, exp.Object, and related tables associated with the provided ObjectIds + */ + void deleteDataRows(Collection objectIds); +} diff --git a/api/src/org/labkey/api/data/MigrationDbSchema.java b/api/src/org/labkey/api/migration/MigrationDbSchema.java similarity index 76% rename from api/src/org/labkey/api/data/MigrationDbSchema.java rename to api/src/org/labkey/api/migration/MigrationDbSchema.java index cd94dc17c01..4cba722d0c3 100644 --- a/api/src/org/labkey/api/data/MigrationDbSchema.java +++ b/api/src/org/labkey/api/migration/MigrationDbSchema.java @@ -1,5 +1,9 @@ -package org.labkey.api.data; +package org.labkey.api.migration; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; +import org.labkey.api.data.DbScope; +import org.labkey.api.data.SchemaTableInfoFactory; import org.labkey.api.module.Module; import java.util.Map; diff --git a/api/src/org/labkey/api/migration/MigrationFilter.java b/api/src/org/labkey/api/migration/MigrationFilter.java new file mode 100644 index 00000000000..305fd747e9c --- /dev/null +++ b/api/src/org/labkey/api/migration/MigrationFilter.java @@ -0,0 +1,17 @@ +package org.labkey.api.migration; + +import org.jetbrains.annotations.Nullable; +import org.labkey.api.util.GUID; + +/** + * A MigrationFilter adds support for the named filter property in the migration configuration file. If present, + * saveFilter() is called with the container guid and property value. Modules can register these to present + * module-specific filters. + */ +public interface MigrationFilter +{ + String getName(); + + // Implementations should validate guid nullity + void saveFilter(@Nullable GUID guid, String value); +} diff --git a/api/src/org/labkey/api/migration/MigrationSchemaHandler.java b/api/src/org/labkey/api/migration/MigrationSchemaHandler.java new file mode 100644 index 00000000000..daa6aaa02e3 --- /dev/null +++ b/api/src/org/labkey/api/migration/MigrationSchemaHandler.java @@ -0,0 +1,60 @@ +package org.labkey.api.migration; + +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentType; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.SimpleFilter; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SimpleFilter.OrClause; +import org.labkey.api.data.TableInfo; +import org.labkey.api.migration.DatabaseMigrationService.DataFilter; +import org.labkey.api.query.FieldKey; +import org.labkey.api.util.GUID; + +import java.util.Collection; +import java.util.List; +import java.util.Set; + +public interface MigrationSchemaHandler +{ + // Marker for tables to declare themselves as site-wide (no container filtering) + FieldKey SITE_WIDE_TABLE = FieldKey.fromParts("site-wide"); + + DbSchema getSchema(); + + void beforeVerification(); + + void beforeSchema(); + + List getTablesToCopy(); + + // Create a filter clause that selects from all specified containers and (in some overrides) applies table-specific filters + FilterClause getTableFilterClause(TableInfo sourceTable, Set containers); + + // Create a filter clause that selects from all specified containers + FilterClause getContainerClause(TableInfo sourceTable, Set containers); + + // Return the FieldKey that can be used to filter this table by container. Special values SITE_WIDE_TABLE and + // DUMMY_FIELD_KEY can be returned for special behaviors. DUMMY_FIELD_KEY ensures that the handler's custom + // getContainerClause() is always called. SITE_WIDE_TABLE is used to select all rows. + @Nullable FieldKey getContainerFieldKey(TableInfo sourceTable); + + // Create a filter clause that selects all rows from unfiltered containers plus filtered rows from the filtered containers + FilterClause getDomainDataFilterClause(Set copyContainers, Set filteredContainers, List domainFilters, TableInfo sourceTable, Set selectColumnNames); + + void addDomainDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames); + + // Do any necessary clean up after the target table has been populated. notCopiedFilter selects all rows in the + // source table that were NOT copied to the target table. (For example, rows in a global table not copied due to + // container filtering or rows in a provisioned table not copied due to domain data filtering.) + void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilter notCopiedFilter); + + void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema); + + void copyAttachments(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema, Set copyContainers); + + @NotNull Collection getAttachmentTypes(); + + void afterMigration(DatabaseMigrationConfiguration configuration); +} diff --git a/api/src/org/labkey/api/module/ModuleLoader.java b/api/src/org/labkey/api/module/ModuleLoader.java index 1dfd255e7c0..49d5743d602 100644 --- a/api/src/org/labkey/api/module/ModuleLoader.java +++ b/api/src/org/labkey/api/module/ModuleLoader.java @@ -39,8 +39,6 @@ import org.labkey.api.data.Container; import org.labkey.api.data.ConvertHelper; import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DatabaseMigrationConfiguration; -import org.labkey.api.data.DatabaseMigrationService; import org.labkey.api.data.DatabaseTableType; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbSchemaType; @@ -61,6 +59,8 @@ import org.labkey.api.data.TableSelector; import org.labkey.api.data.dialect.DatabaseNotSupportedException; import org.labkey.api.data.dialect.SqlDialect; +import org.labkey.api.migration.DatabaseMigrationConfiguration; +import org.labkey.api.migration.DatabaseMigrationService; import org.labkey.api.module.ModuleUpgrader.Execution; import org.labkey.api.resource.Resource; import org.labkey.api.security.SecurityManager; diff --git a/api/src/org/labkey/api/reports/report/ReportType.java b/api/src/org/labkey/api/reports/report/ReportType.java index a48245a776d..2954f9e5035 100644 --- a/api/src/org/labkey/api/reports/report/ReportType.java +++ b/api/src/org/labkey/api/reports/report/ReportType.java @@ -40,8 +40,8 @@ private ReportType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @NotNull SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoReport(), "reports").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoReport(), "reports"); } } diff --git a/api/src/org/labkey/api/search/SearchService.java b/api/src/org/labkey/api/search/SearchService.java index 5b09af23b83..4d64b9ce747 100644 --- a/api/src/org/labkey/api/search/SearchService.java +++ b/api/src/org/labkey/api/search/SearchService.java @@ -394,7 +394,7 @@ public String normalizeHref(Path contextPath, Container c) DbSchema getSchema(); - WebPartView getSearchView(boolean includeSubfolders, int textBoxWidth, boolean includeHelpLink, boolean isWebpart); + WebPartView getSearchView(boolean includeSubfolders, int textBoxWidth, boolean includeHelpLink, boolean isWebpart); SearchResult search(SearchOptions options) throws IOException; @@ -462,7 +462,7 @@ public String normalizeHref(Path contextPath, Container c) void addResourceResolver(@NotNull String prefix, @NotNull ResourceResolver resolver); WebdavResource resolveResource(@NotNull String resourceIdentifier); - HttpView getCustomSearchResult(User user, @NotNull String resourceIdentifier); + HttpView getCustomSearchResult(User user, @NotNull String resourceIdentifier); Map getCustomSearchJson(User user, @NotNull String resourceIdentifier); Map> getCustomSearchJsonMap(User user, @NotNull Collection resourceIdentifiers); diff --git a/api/src/org/labkey/api/security/AuthenticationLogoType.java b/api/src/org/labkey/api/security/AuthenticationLogoType.java index d469bce41d4..b9e5dcba0dc 100644 --- a/api/src/org/labkey/api/security/AuthenticationLogoType.java +++ b/api/src/org/labkey/api/security/AuthenticationLogoType.java @@ -16,6 +16,7 @@ package org.labkey.api.security; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.CoreSchema; import org.labkey.api.data.SQLFragment; @@ -40,8 +41,8 @@ private AuthenticationLogoType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoAuthenticationConfigurations(), "acs").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoAuthenticationConfigurations(), "acs"); } } diff --git a/api/src/org/labkey/api/security/AvatarType.java b/api/src/org/labkey/api/security/AvatarType.java index 04b6c46366c..3446afc1d85 100644 --- a/api/src/org/labkey/api/security/AvatarType.java +++ b/api/src/org/labkey/api/security/AvatarType.java @@ -16,12 +16,13 @@ package org.labkey.api.security; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.CoreSchema; import org.labkey.api.data.SQLFragment; /** - * Identifies avatar files (user-account associated image/icon + * Identifies avatar (user-account associated image/icon) attachments */ public class AvatarType implements AttachmentType { @@ -43,8 +44,8 @@ private AvatarType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoUsers(), "users").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(CoreSchema.getInstance().getTableInfoUsers(), "users"); } } diff --git a/api/src/org/labkey/api/study/SpecimenService.java b/api/src/org/labkey/api/study/SpecimenService.java index f7b197d30af..e56b3f55f09 100644 --- a/api/src/org/labkey/api/study/SpecimenService.java +++ b/api/src/org/labkey/api/study/SpecimenService.java @@ -20,6 +20,7 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.labkey.api.annotations.Migrate; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.Container; import org.labkey.api.data.TableInfo; import org.labkey.api.exp.Lsid; @@ -93,6 +94,8 @@ static SpecimenService get() void registerRequestCustomizer(SpecimenRequestCustomizer customizer); + AttachmentType getSpecimenRequestEventType(); + /** Hooks to allow other modules to control a few items about how specimens are treated */ interface SpecimenRequestCustomizer { diff --git a/api/src/org/labkey/api/wiki/WikiService.java b/api/src/org/labkey/api/wiki/WikiService.java index dfae120d75b..1c0f2afc7a6 100644 --- a/api/src/org/labkey/api/wiki/WikiService.java +++ b/api/src/org/labkey/api/wiki/WikiService.java @@ -19,6 +19,7 @@ import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentFile; import org.labkey.api.attachments.AttachmentParent; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.Container; import org.labkey.api.data.TableInfo; import org.labkey.api.security.User; @@ -94,4 +95,6 @@ static void setInstance(WikiService impl) */ @Nullable String updateAttachments(Container c, User user, String wikiName, @Nullable List attachmentFiles, @Nullable List deleteAttachmentNames); + + AttachmentType getAttachmentType(); } diff --git a/assay/src/org/labkey/assay/AssayIntegrationTestCase.jsp b/assay/src/org/labkey/assay/AssayIntegrationTestCase.jsp index bf601732f18..ef861a19ca8 100644 --- a/assay/src/org/labkey/assay/AssayIntegrationTestCase.jsp +++ b/assay/src/org/labkey/assay/AssayIntegrationTestCase.jsp @@ -13,9 +13,11 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +<%@ page import="org.apache.commons.collections.MapUtils" %> <%@ page import="org.apache.logging.log4j.LogManager" %> <%@ page import="org.apache.logging.log4j.Logger" %> <%@ page import="org.hamcrest.MatcherAssert" %> +<%@ page import="org.jetbrains.annotations.Nullable" %> <%@ page import="org.junit.After" %> <%@ page import="org.junit.Before" %> <%@ page import="org.junit.Test" %> @@ -47,19 +49,24 @@ <%@ page import="org.labkey.api.exp.api.ExpMaterial" %> <%@ page import="org.labkey.api.exp.api.ExpProtocol" %> <%@ page import="org.labkey.api.exp.api.ExpRun" %> +<%@ page import="org.labkey.api.exp.api.ExpSampleType" %> <%@ page import="org.labkey.api.exp.api.ExperimentService" %> +<%@ page import="org.labkey.api.exp.api.SampleTypeService" %> <%@ page import="org.labkey.api.exp.property.Domain" %> <%@ page import="org.labkey.api.exp.property.DomainProperty" %> <%@ page import="org.labkey.api.exp.property.PropertyService" %> <%@ page import="org.labkey.api.exp.query.ExpSchema" %> +<%@ page import="org.labkey.api.exp.query.SamplesSchema" %> <%@ page import="org.labkey.api.files.FileContentService" %> <%@ page import="org.labkey.api.files.FilesAdminOptions" %> <%@ page import="org.labkey.api.gwt.client.assay.model.GWTProtocol" %> <%@ page import="org.labkey.api.gwt.client.model.GWTDomain" %> <%@ page import="org.labkey.api.gwt.client.model.GWTPropertyDescriptor" %> +<%@ page import="org.labkey.api.pipeline.PipeRoot" %> <%@ page import="org.labkey.api.pipeline.PipelineService" %> <%@ page import="org.labkey.api.query.BatchValidationException" %> <%@ page import="org.labkey.api.query.FieldKey" %> +<%@ page import="org.labkey.api.query.QueryService" %> <%@ page import="org.labkey.api.query.QueryUpdateService" %> <%@ page import="org.labkey.api.query.ValidationException" %> <%@ page import="org.labkey.api.security.User" %> @@ -71,31 +78,24 @@ <%@ page import="org.labkey.api.view.ViewContext" %> <%@ page import="org.labkey.assay.AssayDomainServiceImpl" %> <%@ page import="org.labkey.assay.TsvAssayProvider" %> +<%@ page import="org.labkey.vfs.FileSystemLike" %> <%@ page import="org.springframework.mock.web.MockMultipartHttpServletRequest" %> <%@ page import="java.io.File" %> +<%@ page import="static org.junit.Assert.*" %> +<%@ page import="static org.labkey.api.files.FileContentService.UPLOADED_FILE" %> +<%@ page import="static org.hamcrest.CoreMatchers.hasItem" %> +<%@ page import="static org.hamcrest.CoreMatchers.not" %> +<%@ page import="java.io.IOException" %> <%@ page import="java.nio.charset.StandardCharsets" %> +<%@ page import="static java.util.Collections.emptyList" %> <%@ page import="java.nio.file.Files" %> <%@ page import="java.util.ArrayList" %> +<%@ page import="static org.labkey.api.exp.query.SamplesSchema.SCHEMA_SAMPLES" %> <%@ page import="java.util.Collections" %> <%@ page import="java.util.HashSet" %> <%@ page import="java.util.List" %> <%@ page import="java.util.Map" %> <%@ page import="java.util.Set" %> -<%@ page import="static org.junit.Assert.*" %> -<%@ page import="static org.labkey.api.files.FileContentService.UPLOADED_FILE" %> -<%@ page import="static org.hamcrest.CoreMatchers.hasItem" %> -<%@ page import="static org.hamcrest.CoreMatchers.not" %> -<%@ page import="org.labkey.api.exp.api.ExpSampleType" %> -<%@ page import="org.labkey.api.exp.api.SampleTypeService" %> -<%@ page import="static java.util.Collections.emptyList" %> -<%@ page import="org.labkey.api.exp.query.SamplesSchema" %> -<%@ page import="org.labkey.api.query.QueryService" %> -<%@ page import="static org.labkey.api.exp.query.SamplesSchema.SCHEMA_SAMPLES" %> -<%@ page import="org.labkey.api.pipeline.PipeRoot" %> -<%@ page import="org.jetbrains.annotations.Nullable" %> -<%@ page import="java.io.IOException" %> -<%@ page import="org.apache.commons.collections.MapUtils" %> -<%@ page import="org.labkey.vfs.FileSystemLike" %> <%@ page import="static org.junit.Assert.assertEquals" %> <%@ page import="static org.junit.Assert.assertNotEquals" %> @@ -580,6 +580,7 @@ updated.put("ResultProp", 200); updated.put("RowId", resultRowId); errors = new BatchValidationException(); + Thread.sleep(5); // SQL Server timestamps aren't granular enough to guarantee different modified time resultsQUS.updateRows(user, c, Collections.singletonList(updated), null, errors, null, null); // verify result created matches run's created in query table, but result modified now differs from run's created diff --git a/assay/src/org/labkey/assay/AssayModule.java b/assay/src/org/labkey/assay/AssayModule.java index 3f34529b3be..fc095cdbbc3 100644 --- a/assay/src/org/labkey/assay/AssayModule.java +++ b/assay/src/org/labkey/assay/AssayModule.java @@ -39,8 +39,9 @@ import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; import org.labkey.api.data.ContainerType; -import org.labkey.api.data.DatabaseMigrationService; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; +import org.labkey.api.data.SQLFragment; +import org.labkey.api.data.SimpleFilter.FilterClause; +import org.labkey.api.data.SimpleFilter.SQLClause; import org.labkey.api.data.TableInfo; import org.labkey.api.data.UpgradeCode; import org.labkey.api.data.generator.DataGeneratorRegistry; @@ -48,6 +49,9 @@ import org.labkey.api.exp.api.ExpProtocol; import org.labkey.api.exp.api.ExperimentService; import org.labkey.api.exp.property.PropertyService; +import org.labkey.api.migration.AssaySkipContainers; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.module.AdminLinkManager; import org.labkey.api.module.FolderTypeManager; import org.labkey.api.module.Module; @@ -66,6 +70,7 @@ import org.labkey.api.security.roles.RoleManager; import org.labkey.api.usageMetrics.UsageMetricsService; import org.labkey.api.util.ContextListener; +import org.labkey.api.util.GUID; import org.labkey.api.util.JspTestCase; import org.labkey.api.util.PageFlowUtil; import org.labkey.api.util.StartupListener; @@ -296,15 +301,26 @@ public void moduleStartupComplete(ServletContext servletContext) { return PlateTypeTable.NAME.equals(sourceTable.getName()) ? SITE_WIDE_TABLE : super.getContainerFieldKey(sourceTable); } + + @Override + // Override to filter the container set + public FilterClause getContainerClause(TableInfo sourceTable, Set containers) + { + return super.getContainerClause(sourceTable, AssaySkipContainers.getFilteredContainers(containers)); + } }); - // Tables in the "assaywell" provisioned schema are all single-container, so no filtering is needed + // Tables in the "assaywell" provisioned schema join to assay.Well to find their container DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(PlateMetadataDomainKind.getSchema()) { @Override - public @Nullable FieldKey getContainerFieldKey(TableInfo sourceTable) + public FilterClause getContainerClause(TableInfo sourceTable, Set containers) { - return SITE_WIDE_TABLE; + return new SQLClause( + new SQLFragment("LSID IN (SELECT LSID FROM assay.Well WHERE Container") + .appendInClause(AssaySkipContainers.getFilteredContainers(containers), sourceTable.getSqlDialect()) + .append(")") + ); } }); diff --git a/assay/src/org/labkey/assay/AssayResultMigrationSchemaHandler.java b/assay/src/org/labkey/assay/AssayResultMigrationSchemaHandler.java index 218fc955ac6..16e0a57ad82 100644 --- a/assay/src/org/labkey/assay/AssayResultMigrationSchemaHandler.java +++ b/assay/src/org/labkey/assay/AssayResultMigrationSchemaHandler.java @@ -1,10 +1,8 @@ package org.labkey.assay; +import org.apache.commons.lang3.Strings; import org.apache.logging.log4j.Logger; import org.labkey.api.assay.AbstractTsvAssayProvider; -import org.labkey.api.data.DatabaseMigrationService.DataFilter; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; -import org.labkey.api.data.DatabaseMigrationService.ExperimentDeleteService; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.SQLFragment; @@ -14,10 +12,18 @@ import org.labkey.api.data.SimpleFilter.SQLClause; import org.labkey.api.data.SqlSelector; import org.labkey.api.data.TableInfo; +import org.labkey.api.data.TableSelector; +import org.labkey.api.migration.AssaySkipContainers; +import org.labkey.api.migration.DatabaseMigrationService.DataFilter; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; +import org.labkey.api.migration.ExperimentDeleteService; import org.labkey.api.util.GUID; +import org.labkey.api.util.StringUtilsLabKey; import org.labkey.api.util.logging.LogHelper; +import org.labkey.assay.plate.PlateReplicateStatsDomainKind; import java.util.Collection; +import java.util.Collections; import java.util.Set; class AssayResultMigrationSchemaHandler extends DefaultMigrationSchemaHandler @@ -29,52 +35,66 @@ public AssayResultMigrationSchemaHandler() super(DbSchema.get(AbstractTsvAssayProvider.ASSAY_SCHEMA_NAME, DbSchemaType.Provisioned)); } - // Provisioned assay result tables occasionally have no DataId column; hopefully they have an LSID column. - private boolean hasDataIdColumn(TableInfo sourceTable) + private boolean skipTable(TableInfo sourceTable) { - return sourceTable.getColumn("DataId") != null; + // For now, we're ignoring this table since it's empty in our first migration client's database + return Strings.CI.endsWith(sourceTable.getName(), PlateReplicateStatsDomainKind.ASSAY_PLATE_REPLICATE); } @Override public FilterClause getContainerClause(TableInfo sourceTable, Set containers) { - return new SQLClause( - new SQLFragment(hasDataIdColumn(sourceTable) ? "DataId IN (SELECT RowId" : "LSID IN (SELECT LSID") - .append(" FROM exp.Data WHERE Container") - .appendInClause(containers, sourceTable.getSqlDialect()) - .append(")") - ); + final SQLFragment sql; + + if (skipTable(sourceTable)) + { + sql = new SQLFragment("1 = 0"); + } + else + { + sql = new SQLFragment("DataId IN (SELECT RowId FROM exp.Data WHERE Container") + .appendInClause(AssaySkipContainers.getFilteredContainers(containers), sourceTable.getSqlDialect()) + .append(")"); + } + + return new SQLClause(sql); } @Override public void addDomainDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames) { - // We want no rows from containers with a domain data filter, so don't add any clauses + // No filtering on assay results for now; just add the passed in containers. Note that these will be filtered + // if AssaySkipContainers is configured. + orClause.addClause(getContainerClause(sourceTable, filter.containers())); } @Override public void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilter notCopiedFilter) { - SQLFragment objectIdSql = new SQLFragment("SELECT ObjectId FROM exp.Data WHERE ") - .append(hasDataIdColumn(sourceTable) ? "RowId IN (SELECT DataId" : "LSID IN (SELECT LSID") - .append(" FROM ") - .appendIdentifier(sourceTable.getSelectName()) - .append(" ") - .append(notCopiedFilter.getSQLFragment(sourceTable.getSqlDialect())) - .append(")"); + if (!skipTable(sourceTable)) + { + SQLFragment objectIdSql = new SQLFragment("SELECT ObjectId FROM exp.Data WHERE RowId IN (SELECT DataId FROM ") + .appendIdentifier(sourceTable.getSelectName()) + .append(" ") + .append(notCopiedFilter.getSQLFragment(sourceTable.getSqlDialect())) + .append(")"); - Collection notCopiedObjectIds = new SqlSelector(sourceTable.getSchema(), objectIdSql).getCollection(Long.class); + Collection notCopiedObjectIds = new SqlSelector(sourceTable.getSchema(), objectIdSql).getCollection(Long.class); - if (notCopiedObjectIds.isEmpty()) - { - LOG.info(rowsNotCopied(0)); - } - else - { - LOG.info("{} -- deleting associated rows from exp.Data, exp.Object, etc.", rowsNotCopied(notCopiedObjectIds.size())); + if (notCopiedObjectIds.isEmpty()) + { + LOG.info(rowsNotCopied(0)); + } + else + { + LOG.info("{} -- deleting associated rows from exp.Data, exp.Object, etc.", rowsNotCopied(notCopiedObjectIds.size())); + + // Delete exp.Data, exp.Object, etc. rows associated with the rows that weren't copied + ExperimentDeleteService.get().deleteDataRows(notCopiedObjectIds); + } - // Delete exp.Data, exp.Object, etc. rows associated with the rows that weren't copied - ExperimentDeleteService.get().deleteDataRows(notCopiedObjectIds); + // TODO: Temp! + LOG.info(" " + StringUtilsLabKey.pluralize(new TableSelector(sourceTable, Collections.singleton("DataId")).stream(Integer.class).distinct().count(), "distinct DataId")); } } } diff --git a/assay/src/org/labkey/assay/actions/ImportRunApiAction.java b/assay/src/org/labkey/assay/actions/ImportRunApiAction.java index 3f7ce48a844..1e53cc32c22 100644 --- a/assay/src/org/labkey/assay/actions/ImportRunApiAction.java +++ b/assay/src/org/labkey/assay/actions/ImportRunApiAction.java @@ -36,17 +36,13 @@ import org.labkey.api.assay.AssayRunUploadContext; import org.labkey.api.assay.AssayUrls; import org.labkey.api.assay.DefaultAssayRunCreator; -import org.labkey.api.audit.AuditLogService; import org.labkey.api.audit.TransactionAuditProvider; -import org.labkey.api.audit.provider.FileSystemAuditProvider; import org.labkey.api.collections.CaseInsensitiveHashMap; import org.labkey.api.data.DbScope; import org.labkey.api.data.TSVMapWriter; import org.labkey.api.dataiterator.MapDataIterator; import org.labkey.api.exp.ExperimentException; import org.labkey.api.exp.api.AssayJSONConverter; -import org.labkey.api.exp.api.DataType; -import org.labkey.api.exp.api.ExpData; import org.labkey.api.exp.api.ExpExperiment; import org.labkey.api.exp.api.ExpProtocol; import org.labkey.api.exp.api.ExpRun; @@ -66,7 +62,6 @@ import org.labkey.api.security.RequiresPermission; import org.labkey.api.security.permissions.InsertPermission; import org.labkey.api.security.permissions.ReadPermission; -import org.labkey.api.util.FileUtil; import org.labkey.api.util.JsonUtil; import org.labkey.api.util.NetworkDrive; import org.labkey.api.util.PageFlowUtil; @@ -74,7 +69,6 @@ import org.labkey.api.view.ActionURL; import org.labkey.api.view.NotFoundException; import org.labkey.api.view.UnauthorizedException; -import org.labkey.assay.FileBasedModuleDataHandler; import org.labkey.vfs.FileLike; import org.labkey.vfs.FileSystemLike; import org.springframework.beans.MutablePropertyValues; @@ -84,19 +78,16 @@ import org.springframework.web.multipart.MultipartFile; import java.io.File; -import java.io.IOException; +import java.nio.file.InvalidPathException; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Set; import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static org.labkey.api.assay.AssayDataCollector.PRIMARY_FILE; import static org.labkey.api.assay.AssayFileWriter.createFile; -import static org.labkey.api.util.FileUtil.toFileForWrite; @ActionNames("importRun") @RequiresPermission(InsertPermission.class) @@ -236,23 +227,31 @@ public ApiResponse execute(ImportRunApiForm form, BindException errors) throws E } else { - // Resolve file under pipeline root - PipeRoot root = PipelineService.get().findPipelineRoot(getContainer()); - if (root == null) - throw new NotFoundException("Pipeline root not configured"); + try + { + // Resolve file under pipeline root + PipeRoot root = PipelineService.get().findPipelineRoot(getContainer()); + if (root == null) + throw new NotFoundException("Pipeline root not configured"); - if (!root.hasPermission(getContainer(), getUser(), ReadPermission.class)) - throw new UnauthorizedException(); + if (!root.hasPermission(getContainer(), getUser(), ReadPermission.class)) + throw new UnauthorizedException(); - // Attempt absolute path first, then relative path from pipeline root - File f = new File(runFilePath); - if (!root.isUnderRoot(f)) - f = root.resolvePath(runFilePath); + // Attempt absolute path first, then relative path from pipeline root + File f = new File(runFilePath); + if (!root.isUnderRoot(f)) + f = root.resolvePath(runFilePath); - if (!NetworkDrive.exists(f) || !root.isUnderRoot(f)) - throw new NotFoundException("File not found: " + runFilePath); + if (!NetworkDrive.exists(f) || !root.isUnderRoot(f)) + throw new NotFoundException("File not found: " + runFilePath); - file = f; + file = f; + } + catch (InvalidPathException e) + { + LOG.info("Invalid path: " + runFilePath, e); + throw new NotFoundException("File not found: " + runFilePath); + } } } diff --git a/core/src/org/labkey/core/CoreMigrationSchemaHandler.java b/core/src/org/labkey/core/CoreMigrationSchemaHandler.java index bc33e9c6b62..2e42998c397 100644 --- a/core/src/org/labkey/core/CoreMigrationSchemaHandler.java +++ b/core/src/org/labkey/core/CoreMigrationSchemaHandler.java @@ -1,11 +1,13 @@ package org.labkey.core; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentCache; +import org.labkey.api.attachments.AttachmentType; +import org.labkey.api.attachments.LookAndFeelResourceType; import org.labkey.api.data.CompareType; import org.labkey.api.data.CompareType.CompareClause; import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DatabaseMigrationConfiguration; -import org.labkey.api.data.DatabaseMigrationService; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.DbScope; @@ -19,15 +21,24 @@ import org.labkey.api.data.Table; import org.labkey.api.data.TableInfo; import org.labkey.api.data.TestSchema; +import org.labkey.api.files.FileSystemAttachmentType; +import org.labkey.api.migration.DatabaseMigrationConfiguration; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; +import org.labkey.api.migration.MigrationFilter; import org.labkey.api.module.ModuleLoader; import org.labkey.api.query.FieldKey; +import org.labkey.api.reports.report.ReportType; +import org.labkey.api.security.AuthenticationLogoType; +import org.labkey.api.security.AvatarType; import org.labkey.api.util.ConfigurationException; import org.labkey.api.util.GUID; +import java.util.Collection; import java.util.List; import java.util.Set; -class CoreMigrationSchemaHandler extends DatabaseMigrationService.DefaultMigrationSchemaHandler implements DatabaseMigrationService.MigrationFilter +class CoreMigrationSchemaHandler extends DefaultMigrationSchemaHandler implements MigrationFilter { static void register() { @@ -35,7 +46,7 @@ static void register() DatabaseMigrationService.get().registerSchemaHandler(schemaHandler); DatabaseMigrationService.get().registerMigrationFilter(schemaHandler); - DatabaseMigrationService.get().registerSchemaHandler(new DatabaseMigrationService.DefaultMigrationSchemaHandler(PropertySchema.getInstance().getSchema()){ + DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(PropertySchema.getInstance().getSchema()){ @Override public @Nullable FieldKey getContainerFieldKey(TableInfo sourceTable) { @@ -43,7 +54,7 @@ static void register() } }); - DatabaseMigrationService.get().registerSchemaHandler(new DatabaseMigrationService.DefaultMigrationSchemaHandler(TestSchema.getInstance().getSchema()){ + DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(TestSchema.getInstance().getSchema()){ @Override public List getTablesToCopy() { @@ -53,7 +64,7 @@ public List getTablesToCopy() if (ModuleLoader.getInstance().getModule(DbScope.getLabKeyScope(), "vehicle") != null) { - DatabaseMigrationService.get().registerSchemaHandler(new DatabaseMigrationService.DefaultMigrationSchemaHandler(DbSchema.get("vehicle", DbSchemaType.Module)) + DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(DbSchema.get("vehicle", DbSchemaType.Module)) { @Override public List getTablesToCopy() @@ -95,6 +106,7 @@ public List getTablesToCopy() tablesToCopy.remove(CoreSchema.getInstance().getTableInfoModules()); tablesToCopy.remove(CoreSchema.getInstance().getTableInfoSqlScripts()); tablesToCopy.remove(CoreSchema.getInstance().getTableInfoUpgradeSteps()); + tablesToCopy.remove(CoreSchema.getInstance().getTableInfoDocuments()); return tablesToCopy; } @@ -180,6 +192,41 @@ public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema s new SqlExecutor(getSchema()).execute("ALTER TABLE core.ViewCategory ADD CONSTRAINT FK_ViewCategory_Parent FOREIGN KEY (Parent) REFERENCES core.ViewCategory(RowId)"); } + @Override + public void copyAttachments(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema, Set copyContainers) + { + // Default handling for core's standard attachment types + super.copyAttachments(configuration, sourceSchema, targetSchema, copyContainers); + + // Special handling for LookAndFeelResourceType, which must select from the source database + SQLFragment sql = new SQLFragment() + .append("Parent").appendInClause(copyContainers, sourceSchema.getSqlDialect()) + .append("AND (DocumentName IN (?, ?) OR ") + .add(AttachmentCache.FAVICON_FILE_NAME) + .add(AttachmentCache.STYLESHEET_FILE_NAME) + .append("DocumentName LIKE '" + AttachmentCache.LOGO_FILE_NAME_PREFIX + "%' OR ") + .append("DocumentName LIKE '" + AttachmentCache.MOBILE_LOGO_FILE_NAME_PREFIX + "%')"); + copyAttachments(configuration, sourceSchema, new SQLClause(sql), LookAndFeelResourceType.get()); + } + + @Override + public @NotNull Collection getAttachmentTypes() + { + return List.of( + AuthenticationLogoType.get(), + AvatarType.get(), + FileSystemAttachmentType.get(), + ReportType.get() + ); + } + + @Override + public void afterMigration(DatabaseMigrationConfiguration configuration) + { + // Now that all schemas have copied their attachments into core.Documents, update that table's sequence + DatabaseMigrationService.get().updateSequences(configuration.getSourceScope().getSchema("core", DbSchemaType.Migration).getTable("Documents"), CoreSchema.getInstance().getTableInfoDocuments()); + } + // MigrationFilter implementation below private SQLFragment _groupFilterCondition = null; diff --git a/core/src/org/labkey/core/attachment/AttachmentServiceImpl.java b/core/src/org/labkey/core/attachment/AttachmentServiceImpl.java index 9755d824801..1aa5400126b 100644 --- a/core/src/org/labkey/core/attachment/AttachmentServiceImpl.java +++ b/core/src/org/labkey/core/attachment/AttachmentServiceImpl.java @@ -747,6 +747,12 @@ public void registerAttachmentType(AttachmentType type) ATTACHMENT_TYPE_MAP.put(type.getUniqueName(), type); } + @Override + public Collection getAttachmentTypes() + { + return ATTACHMENT_TYPE_MAP.values(); + } + @Override public HttpView getAdminView(ActionURL currentUrl) { @@ -761,7 +767,7 @@ public HttpView getAdminView(ActionURL currentUrl) // core.Documents for each type is needed to associate the Type values with the associated rows. List selectStatements = new LinkedList<>(); - for (AttachmentType type : ATTACHMENT_TYPE_MAP.values()) + for (AttachmentType type : getAttachmentTypes()) { SQLFragment selectStatement = new SQLFragment(); @@ -785,7 +791,7 @@ public HttpView getAdminView(ActionURL currentUrl) SQLFragment whereSql = new SQLFragment(); String sep = ""; - for (AttachmentType type : ATTACHMENT_TYPE_MAP.values()) + for (AttachmentType type : getAttachmentTypes()) { whereSql.append(sep); sep = " OR"; diff --git a/core/src/org/labkey/core/dialect/PostgreSql92Dialect.java b/core/src/org/labkey/core/dialect/PostgreSql92Dialect.java index 4ec4bf45c5c..8e171ab8689 100644 --- a/core/src/org/labkey/core/dialect/PostgreSql92Dialect.java +++ b/core/src/org/labkey/core/dialect/PostgreSql92Dialect.java @@ -244,7 +244,7 @@ SELECT SchemaName, TableName, ColumnName, LastValue FROM ( s.relkind = 'S' -- Sequence AND t.relkind IN ('r', 'P') -- Table (regular table or partitioned table) AND d.deptype IN ('a', 'i') -- Automatic dependency for DEFAULT or index-related for PK - ) + ) AS x WHERE SchemaName ILIKE ? AND TableName ILIKE ? """, table.getSchema().getName(), diff --git a/devtools/src/org/labkey/devtools/ToolsController.java b/devtools/src/org/labkey/devtools/ToolsController.java index f6a077a417a..b6971385135 100644 --- a/devtools/src/org/labkey/devtools/ToolsController.java +++ b/devtools/src/org/labkey/devtools/ToolsController.java @@ -10,11 +10,16 @@ import org.labkey.api.action.SimpleErrorView; import org.labkey.api.action.SimpleViewAction; import org.labkey.api.action.SpringActionController; +import org.labkey.api.collections.ArrayListValuedTreeMap; +import org.labkey.api.collections.LabKeyCollectors; +import org.labkey.api.data.BaseColumnInfo; import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.DbScope; import org.labkey.api.data.FileSqlScriptProvider; +import org.labkey.api.data.SchemaTableInfo; +import org.labkey.api.data.TableInfo; import org.labkey.api.data.TableInfo.IndexDefinition; import org.labkey.api.data.TableInfo.IndexType; import org.labkey.api.data.dialect.SqlDialect; @@ -60,8 +65,10 @@ import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; +import java.sql.SQLException; import java.util.Arrays; import java.util.Collection; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; @@ -76,6 +83,9 @@ import java.util.stream.Stream; import static org.labkey.api.util.DOM.Attribute.style; +import static org.labkey.api.util.DOM.BR; +import static org.labkey.api.util.DOM.DIV; +import static org.labkey.api.util.DOM.at; import static org.labkey.api.util.PageFlowUtil.filter; public class ToolsController extends SpringActionController @@ -732,12 +742,12 @@ public ModelAndView getView(Object o, boolean reshow, BindException errors) new HtmlView(DOM.createHtmlFragment( Arrays.stream(OverlapType.values()).flatMap(type -> Stream.of( - type != OverlapType.UniqueOverlappingNonUnique ? DOM.BR() : null, - DOM.STRONG(StringUtilsLabKey.pluralize(multiMap.get(type).size(), "index has ", "indices have ") + type.getDescription() + ":", DOM.BR()), + type != OverlapType.UniqueOverlappingNonUnique ? BR() : null, + DOM.STRONG(StringUtilsLabKey.pluralize(multiMap.get(type).size(), "index has ", "indices have ") + type.getDescription() + ":", BR()), DOM.TABLE( multiMap.get(type).stream() .map(overlap -> DOM.TR( - DOM.TD(DOM.at(style, "width:120px;"), overlap.schemaName()), + DOM.TD(at(style, "width:120px;"), overlap.schemaName()), DOM.TD(type.getMessage(overlap)), "\n" )) @@ -746,7 +756,7 @@ public ModelAndView getView(Object o, boolean reshow, BindException errors) ) )), new HtmlView(DOM.createHtmlFragment( - DOM.BR(), + BR(), new ButtonBuilder("Create SQL Scripts That Drop Overlapping Indices").href(OverlappingIndicesAction.class, getContainer()).usePost()) ) ); @@ -1092,4 +1102,70 @@ protected void dropIndex(Writer writer, String schemaName, String tableName, Str writer.write("DROP INDEX " + dropIndex + " ON " + schemaName + "." + tableName + ";\n"); } } + + @RequiresPermission(AdminPermission.class) + public class ForeignKeysAction extends SimpleViewAction + { + @Override + public ModelAndView getView(Object o, BindException errors) + { + DbScope scope = DbScope.getLabKeyScope(); + MultiValuedMap map = scope.getSchemaNames().stream() + .map(name -> scope.getSchema(name, DbSchemaType.Bare)) + .flatMap(schema -> schema.getTableNames().stream().map(schema::getTable)) + .flatMap(table -> { + try + { + // We're querying the metadata directly (not using cached FK information) because we want to + // capture every FK in the database (not just those owned by the currently deployed modules) and + // we want to ignore "virtual" FKs. + return BaseColumnInfo.createFromDatabaseMetaData(table.getSchema().getName(), (SchemaTableInfo) table, null).stream(); + } + catch (SQLException e) + { + throw new RuntimeException(e); + } + }) + .filter(col -> col.getFk() != null) + .collect(LabKeyCollectors.toMultiValuedMap( + BaseColumnInfo::getFkTableInfo, + col -> col, + () -> new ArrayListValuedTreeMap<>(Comparator.comparing(TableInfo::getSelectName)) + )); + + HtmlString delim = HtmlStringBuilder.of(HtmlString.BR).append("\n").append(HtmlString.NBSP).append(HtmlString.NBSP).getHtmlString(); + HtmlStringBuilder builder = HtmlStringBuilder.of(); + map.asMap().forEach((targetTable, columns) -> builder.append(targetTable.getSchema().getName() + "." + targetTable.getName() + "\n") + .append(delim) + .append(columns.stream().map(column -> { + TableInfo sourceTable = column.getParentTable(); + return HtmlString.of(sourceTable.getSchema().getName() + "." + sourceTable.getName() + "." + column.getName() + "\n"); + }).collect(LabKeyCollectors.joining(delim))) + .append(HtmlString.BR) + .append(HtmlString.BR) + ); + + + return new VBox( + new HtmlView(DOM.createHtmlFragment( + DIV(at(style, "width: 1200px;"), """ + A simple report that shows the incoming foreign keys that target each table in the database. This report is most useful + when attempting to optimize the performance of deletes from a particular target table (and potentially updates to its + PK, though that's not a common operation). Note that all tables and foreign keys in the database are shown here since + they all can affect performance, regardless of whether their owning modules are deployed currently. This report will + be improved in the future by adding index information. + """), + BR() + )), + new HtmlView(builder) + ); + } + + @Override + public void addNavTrail(NavTree root) + { + addBeginNavTrail(root); + root.addChild("Foreign Keys"); + } + } } diff --git a/experiment/src/client/test/integration/AssayImportRunAction.ispec.ts b/experiment/src/client/test/integration/AssayImportRunAction.ispec.ts index 93f2fc05ec5..6cb47738cd0 100644 --- a/experiment/src/client/test/integration/AssayImportRunAction.ispec.ts +++ b/experiment/src/client/test/integration/AssayImportRunAction.ispec.ts @@ -221,9 +221,9 @@ describe('assay-importRun.api', () => { const run = await getRunQueryRow(server, ASSAY_A_NAME, runId, topFolderOptions); const expectedUrl = `/${encodeURIComponent(PROJECT_NAME)}/core-downloadFileLink.view?propertyId=`; const runBatchField = `Batch/${BATCH_FILE_FIELD_NAME}`; - expect(run[runBatchField].value).toEqual(`assaydata/${batchFileName}`); + expect(run[runBatchField].value.replaceAll('\\', '/')).toEqual(`assaydata/${batchFileName}`); expect(run[runBatchField].url).toContain(expectedUrl); - expect(run[RUN_FILE_FIELD_NAME].value).toEqual(`assaydata/${runFileName}`); + expect(run[RUN_FILE_FIELD_NAME].value.replaceAll('\\', '/')).toEqual(`assaydata/${runFileName}`); expect(run[RUN_FILE_FIELD_NAME].url).toContain(expectedUrl); // Verify audit log @@ -504,9 +504,9 @@ describe('assay-importRun.api', () => { const run = await getRunQueryRow(server, ASSAY_A_NAME, runId, topFolderOptions); const expectedUrl = `/${encodeURIComponent(PROJECT_NAME)}/core-downloadFileLink.view?propertyId=`; const runBatchField = `Batch/${BATCH_FILE_FIELD_TWO_NAME}`; - expect(run[runBatchField].value).toEqual(`assaydata/${batchFileName}`); + expect(run[runBatchField].value.replaceAll('\\', '/')).toEqual(`assaydata/${batchFileName}`); expect(run[runBatchField].url).toContain(expectedUrl); - expect(run[RUN_FILE_FIELD_NAME].value).toEqual(`assaydata/${runFileName}`); + expect(run[RUN_FILE_FIELD_NAME].value.replaceAll('\\', '/')).toEqual(`assaydata/${runFileName}`); expect(run[RUN_FILE_FIELD_NAME].url).toContain(expectedUrl); // Verify audit log diff --git a/experiment/src/client/test/integration/utils.ts b/experiment/src/client/test/integration/utils.ts index 29afefd71d8..d6fc55d3081 100644 --- a/experiment/src/client/test/integration/utils.ts +++ b/experiment/src/client/test/integration/utils.ts @@ -463,7 +463,7 @@ async function verifyDomainCreateFailure(server: IntegrationTestServer, domainTy }, {...folderOptions, ...userOptions}); expect(badDomainNameResp['body']['success']).toBeFalsy(); - expect(badDomainNameResp['body']['exception']).toBe(error.replace('REPLACE', badDomainName)); + expect(badDomainNameResp['body']['exception']).toBe(error.replace('REPLACE', () => badDomainName)); } async function verifyDomainUpdateFailure(server: IntegrationTestServer, domainId: number, domainURI: string, dataTypeRowId/*needed for updating dataclass*/: number, badDomainName: string, error: string, folderOptions: RequestOptions, userOptions: RequestOptions, domainFields?: any[]) { @@ -488,7 +488,7 @@ async function verifyDomainUpdateFailure(server: IntegrationTestServer, domainId const badDomainNameResp = await server.post('property', 'saveDomain', updatedDomainPayload, {...folderOptions, ...userOptions}); expect(badDomainNameResp['body']['success']).toBeFalsy(); - expect(badDomainNameResp['body']['exception']).toBe(error.replace('REPLACE', badDomainName)); + expect(badDomainNameResp['body']['exception']).toBe(error.replace('REPLACE', () => badDomainName)); } async function verifyDomainCreateSuccess(server: IntegrationTestServer, domainType: string, domainName: string, folderOptions: RequestOptions, userOptions: RequestOptions) { @@ -502,6 +502,8 @@ async function verifyDomainCreateSuccess(server: IntegrationTestServer, domainTy } }, {...folderOptions, ...userOptions}).expect((result) => { const domain = JSON.parse(result.text); + expect(domain).toHaveProperty('domainId'); + expect(domain).toHaveProperty('domainURI'); domainId = domain.domainId; domainURI = domain.domainURI; return true; @@ -546,7 +548,7 @@ export async function checkDomainName(server: IntegrationTestServer, domainType: // spaces should be trimmed before validation await verifyDomainCreateSuccess(server, domainType, ' startWithSpace', folderOptions, userOptions); - const domainName = selectRandomN(alphaNumeric, 2).join('') + selectRandomN(LEGAL_CHARSET, 5).join(''); + const domainName = selectRandomN(alphaNumeric, 2).join('') + selectRandomN(LEGAL_CHARSET, 5).join('').replaceAll(' -', ' _-'); // name may not contain space followed by dash const { domainId, domainURI } = await verifyDomainCreateSuccess(server, domainType, domainName, folderOptions, userOptions); let dataTypeRowId = 0; @@ -626,6 +628,8 @@ export async function verifyRequiredLineageInsertUpdate(server: IntegrationTestS } }, {...topFolderOptions, ...designerReaderOptions}).expect((result) => { const domain = JSON.parse(result.text); + expect(domain).toHaveProperty('domainId'); + expect(domain).toHaveProperty('domainURI'); childDomainId = domain.domainId; childDomainURI = domain.domainURI; return true; diff --git a/experiment/src/org/labkey/experiment/DataClassMigrationSchemaHandler.java b/experiment/src/org/labkey/experiment/DataClassMigrationSchemaHandler.java index eb152a86611..45232046c93 100644 --- a/experiment/src/org/labkey/experiment/DataClassMigrationSchemaHandler.java +++ b/experiment/src/org/labkey/experiment/DataClassMigrationSchemaHandler.java @@ -1,12 +1,9 @@ package org.labkey.experiment; import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.collections.Sets; -import org.labkey.api.data.DatabaseMigrationConfiguration; -import org.labkey.api.data.DatabaseMigrationService; -import org.labkey.api.data.DatabaseMigrationService.DataFilter; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; -import org.labkey.api.data.DatabaseMigrationService.ExperimentDeleteService; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.DbScope; @@ -22,15 +19,22 @@ import org.labkey.api.data.TableSelector; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.exp.api.ExperimentService; +import org.labkey.api.migration.DatabaseMigrationConfiguration; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.DatabaseMigrationService.DataFilter; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; +import org.labkey.api.migration.ExperimentDeleteService; import org.labkey.api.query.FieldKey; import org.labkey.api.util.GUID; import org.labkey.api.util.StringUtilsLabKey; import org.labkey.api.util.logging.LogHelper; import org.labkey.experiment.api.DataClassDomainKind; +import org.labkey.experiment.api.ExpDataClassType; import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Set; class DataClassMigrationSchemaHandler extends DefaultMigrationSchemaHandler implements ExperimentDeleteService @@ -68,7 +72,7 @@ public FilterClause getContainerClause(TableInfo sourceTable, Set containe @Override public void addDomainDataFilterClause(OrClause orClause, DataFilter filter, TableInfo sourceTable, Set selectColumnNames) { - // Data classes have a built-in Flag field + // Data classes have an implicit Flag field if (filter.column().equalsIgnoreCase("Flag")) { addObjectPropertyClause(orClause, filter, sourceTable, getCommentPropertyId(sourceTable.getSchema().getScope())); @@ -89,7 +93,7 @@ public void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilte // Select all ObjectIds associated with the not-copied rows from the source database. Our notCopiedFilter // works on the data class provisioned table, so we need to use a sub-select (as opposed to a join) to avoid // ambiguous column references. - SQLFragment objectIdSql = new SQLFragment("SELECT ObjectId FROM exp.Data WHERE LSID IN (SELECT LSID FROM ") + SQLFragment objectIdSql = new SQLFragment("SELECT ObjectId FROM exp.Object WHERE ObjectURI IN (SELECT LSID FROM ") .appendIdentifier(sourceTable.getSelectName()) .append(" ") .append(notCopiedFilter.getSQLFragment(sourceTable.getSqlDialect())) @@ -177,7 +181,7 @@ public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema s TableInfo sourceTable = biologicsSourceSchema.getTable("SequenceIdentity"); TableInfo targetTable = biologicsTargetSchema.getTable("SequenceIdentity"); - DatabaseMigrationService.get().copySourceTableToTargetTable(configuration, sourceTable, targetTable, DbSchemaType.Module, new DefaultMigrationSchemaHandler(biologicsTargetSchema) + DatabaseMigrationService.get().copySourceTableToTargetTable(configuration, sourceTable, targetTable, DbSchemaType.Module, true, null, new DefaultMigrationSchemaHandler(biologicsTargetSchema) { @Override public FilterClause getTableFilterClause(TableInfo sourceTable, Set containers) @@ -188,4 +192,10 @@ public FilterClause getTableFilterClause(TableInfo sourceTable, Set contai }); } } + + @Override + public @NotNull Collection getAttachmentTypes() + { + return List.of(ExpDataClassType.get()); + } } diff --git a/experiment/src/org/labkey/experiment/ExperimentMigrationSchemaHandler.java b/experiment/src/org/labkey/experiment/ExperimentMigrationSchemaHandler.java index 69399581f3f..db5b16f572d 100644 --- a/experiment/src/org/labkey/experiment/ExperimentMigrationSchemaHandler.java +++ b/experiment/src/org/labkey/experiment/ExperimentMigrationSchemaHandler.java @@ -1,11 +1,11 @@ package org.labkey.experiment; import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.CompareType; import org.labkey.api.data.CompareType.CompareClause; -import org.labkey.api.data.DatabaseMigrationConfiguration; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; import org.labkey.api.data.DbSchema; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.SimpleFilter.AndClause; @@ -16,11 +16,16 @@ import org.labkey.api.data.SqlExecutor; import org.labkey.api.data.TableInfo; import org.labkey.api.exp.OntologyManager; +import org.labkey.api.exp.api.ExpProtocolAttachmentType; +import org.labkey.api.exp.api.ExpRunAttachmentType; +import org.labkey.api.migration.DatabaseMigrationConfiguration; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.query.FieldKey; import org.labkey.api.util.GUID; import org.labkey.api.util.logging.LogHelper; import org.labkey.experiment.api.ExperimentServiceImpl; +import java.util.Collection; import java.util.List; import java.util.Set; @@ -70,8 +75,17 @@ public List getTablesToCopy() @Override public FilterClause getContainerClause(TableInfo sourceTable, Set containers) { +// Set assayFilteredContainers = assayFilteredContainers(containers); return switch (sourceTable.getName()) { +// case "ExperimentRun", "ProtocolApplication" -> super.getContainerClause(sourceTable, assayFilteredContainers); +// case "Data" -> new AndClause( +// new InClause(FieldKey.fromParts("Container"), containers), +// new OrClause( +// new CompareClause(FieldKey.fromParts("RunId"), CompareType.ISBLANK, null), +// new InClause(FieldKey.fromParts("RunId", "Container"), assayFilteredContainers) +// ) +// ); case "DataInput" -> new AndClause( new InClause(FieldKey.fromParts("DataId", "Container"), containers), new InClause(FieldKey.fromParts("TargetApplicationId", "RunId", "Container"), containers) @@ -154,4 +168,13 @@ public static void deleteObjectIds(SQLFragment objectIdClause) .append(objectIdClause) ); } + + @Override + public @NotNull Collection getAttachmentTypes() + { + return List.of( + ExpProtocolAttachmentType.get(), + ExpRunAttachmentType.get() + ); + } } diff --git a/experiment/src/org/labkey/experiment/ExperimentModule.java b/experiment/src/org/labkey/experiment/ExperimentModule.java index c64f5ea669b..67ab8161805 100644 --- a/experiment/src/org/labkey/experiment/ExperimentModule.java +++ b/experiment/src/org/labkey/experiment/ExperimentModule.java @@ -30,7 +30,6 @@ import org.labkey.api.data.ContainerFilter; import org.labkey.api.data.ContainerManager; import org.labkey.api.data.CoreSchema; -import org.labkey.api.data.DatabaseMigrationService; import org.labkey.api.data.DbSchema; import org.labkey.api.data.JdbcType; import org.labkey.api.data.NameGenerator; @@ -73,6 +72,8 @@ import org.labkey.api.exp.xar.LsidUtils; import org.labkey.api.files.FileContentService; import org.labkey.api.files.TableUpdaterFileListener; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.ExperimentDeleteService; import org.labkey.api.module.ModuleContext; import org.labkey.api.module.ModuleLoader; import org.labkey.api.module.SpringModule; @@ -877,7 +878,7 @@ SELECT COUNT(DISTINCT DD.DomainURI) FROM DatabaseMigrationService.get().registerSchemaHandler(new SampleTypeMigrationSchemaHandler()); DataClassMigrationSchemaHandler dcHandler = new DataClassMigrationSchemaHandler(); DatabaseMigrationService.get().registerSchemaHandler(dcHandler); - DatabaseMigrationService.ExperimentDeleteService.setInstance(dcHandler); + ExperimentDeleteService.setInstance(dcHandler); } @Override diff --git a/experiment/src/org/labkey/experiment/SampleTypeMigrationSchemaHandler.java b/experiment/src/org/labkey/experiment/SampleTypeMigrationSchemaHandler.java index c963d1d57e0..4c2d2f89d42 100644 --- a/experiment/src/org/labkey/experiment/SampleTypeMigrationSchemaHandler.java +++ b/experiment/src/org/labkey/experiment/SampleTypeMigrationSchemaHandler.java @@ -1,8 +1,6 @@ package org.labkey.experiment; import org.apache.logging.log4j.Logger; -import org.labkey.api.data.DatabaseMigrationService.DataFilter; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.SimpleFilter.FilterClause; @@ -14,6 +12,8 @@ import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.exp.OntologyManager; import org.labkey.api.exp.api.SampleTypeDomainKind; +import org.labkey.api.migration.DatabaseMigrationService.DataFilter; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.util.GUID; import org.labkey.api.util.logging.LogHelper; @@ -56,7 +56,7 @@ public void addDomainDataFilterClause(OrClause orClause, DataFilter filter, Tabl { String joinColumnName = getJoinColumnName(sourceTable); - // Select all rows where the built-in flag column equals the filter value + // Select all rows where the implicit flag column equals the filter value orClause.addClause( new SQLClause(new SQLFragment() .appendIdentifier(joinColumnName) diff --git a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java index 5bf36aeea71..be783a6b625 100644 --- a/experiment/src/org/labkey/experiment/api/ExpDataClassType.java +++ b/experiment/src/org/labkey/experiment/api/ExpDataClassType.java @@ -17,6 +17,7 @@ import org.apache.commons.lang3.StringUtils; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; @@ -54,7 +55,7 @@ public static AttachmentType get() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { TableInfo tableInfo = ExperimentService.get().getTinfoDataClass(); @@ -79,10 +80,9 @@ public void addWhereSql(SQLFragment sql, String parentColumn, String documentNam selectStatements.add("\n SELECT " + expressionToExtractObjectId + " AS ID FROM expdataclass." + domain.getStorageTableName() + " WHERE " + where); }); - if (selectStatements.isEmpty()) - sql.append("1 = 0"); // No ExpDataClasses with attachment columns - else - sql.append(parentColumn).append(" IN (").append(StringUtils.join(selectStatements, "\n UNION")).append(")"); + return selectStatements.isEmpty() ? + NO_ENTITY_IDS : // No ExpDataClasses with attachment columns + new SQLFragment(StringUtils.join(selectStatements, "\n UNION")); } } diff --git a/filecontent/src/org/labkey/filecontent/FileContentModule.java b/filecontent/src/org/labkey/filecontent/FileContentModule.java index 5b79bcfdd03..a4dfb2e733a 100644 --- a/filecontent/src/org/labkey/filecontent/FileContentModule.java +++ b/filecontent/src/org/labkey/filecontent/FileContentModule.java @@ -28,6 +28,7 @@ import org.labkey.api.data.TableInfo; import org.labkey.api.exp.property.PropertyService; import org.labkey.api.files.FileContentService; +import org.labkey.api.files.FileSystemAttachmentType; import org.labkey.api.files.view.FilesWebPart; import org.labkey.api.message.digest.DailyMessageDigest; import org.labkey.api.message.settings.MessageConfigService; diff --git a/filecontent/src/org/labkey/filecontent/FileSystemAttachmentParent.java b/filecontent/src/org/labkey/filecontent/FileSystemAttachmentParent.java index e425e51da14..0d7859a9767 100644 --- a/filecontent/src/org/labkey/filecontent/FileSystemAttachmentParent.java +++ b/filecontent/src/org/labkey/filecontent/FileSystemAttachmentParent.java @@ -26,6 +26,7 @@ import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.Container; import org.labkey.api.files.FileContentService; +import org.labkey.api.files.FileSystemAttachmentType; import org.labkey.api.security.User; import org.labkey.api.util.FileUtil; @@ -247,7 +248,6 @@ public void deleteAttachment(User user, @Nullable String name) LOG.warn(e.getMessage()); } } - }); } } diff --git a/issues/src/org/labkey/issue/IssueMigrationSchemaHandler.java b/issues/src/org/labkey/issue/IssueMigrationSchemaHandler.java index 90e3c4e19eb..cc71ce1bca8 100644 --- a/issues/src/org/labkey/issue/IssueMigrationSchemaHandler.java +++ b/issues/src/org/labkey/issue/IssueMigrationSchemaHandler.java @@ -1,32 +1,36 @@ package org.labkey.issue; import org.apache.logging.log4j.Logger; +import org.jetbrains.annotations.NotNull; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.collections.CsvSet; -import org.labkey.api.data.DatabaseMigrationConfiguration; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; import org.labkey.api.data.DbSchema; import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.SimpleFilter; import org.labkey.api.data.SimpleFilter.InClause; -import org.labkey.api.data.SimpleFilter.NotClause; import org.labkey.api.data.SimpleFilter.SQLClause; import org.labkey.api.data.Table; import org.labkey.api.data.TableInfo; import org.labkey.api.data.TableSelector; import org.labkey.api.issues.IssuesSchema; +import org.labkey.api.migration.DatabaseMigrationConfiguration; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.query.FieldKey; import org.labkey.api.util.StringUtilsLabKey; import org.labkey.api.util.logging.LogHelper; +import org.labkey.issue.model.IssueCommentType; +import java.util.Collection; import java.util.HashSet; +import java.util.List; import java.util.Set; public class IssueMigrationSchemaHandler extends DefaultMigrationSchemaHandler { private static final Logger LOG = LogHelper.getLogger(IssueMigrationSchemaHandler.class, "Issue migration status"); - private final Set ISSUE_IDS = new HashSet<>(); + private final Set COPIED_ISSUE_IDS = new HashSet<>(); public IssueMigrationSchemaHandler() { @@ -38,7 +42,7 @@ public void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilte { // Collect the issue IDs that were copied into the target table. We're assuming this set is much smaller than // the set of issues IDs that *weren't* copied. - int startSize = ISSUE_IDS.size(); + int startSize = COPIED_ISSUE_IDS.size(); // Join the provisioned table to the issues table to get the IssueIds associated with the rows that were copied SQLClause joinOnEntityId = new SQLClause( @@ -48,32 +52,35 @@ public void afterTable(TableInfo sourceTable, TableInfo targetTable, SimpleFilte ); new TableSelector(IssuesSchema.getInstance().getTableInfoIssues(), new CsvSet("IssueId, EntityId"), new SimpleFilter(joinOnEntityId), null).stream(Integer.class) - .forEach(ISSUE_IDS::add); - LOG.info(" {} added to the IssueId set", StringUtilsLabKey.pluralize(ISSUE_IDS.size() - startSize, "IssueId was", "IssueIds were")); + .forEach(COPIED_ISSUE_IDS::add); + LOG.info(" {} added to the IssueId set", StringUtilsLabKey.pluralize(COPIED_ISSUE_IDS.size() - startSize, "IssueId was", "IssueIds were")); } @Override public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema) { - LOG.info(" Deleting related issues, comments, and issues rows associated with {}", StringUtilsLabKey.pluralize(ISSUE_IDS.size(), "issue")); + LOG.info("{} were copied. Now deleting related issues, comments, and issues rows associated with all issues that were not copied.", StringUtilsLabKey.pluralize(COPIED_ISSUE_IDS.size(), "issue")); - if (!ISSUE_IDS.isEmpty()) - { - // Delete all issues, comments, and related issues that were NOT copied - SimpleFilter deleteRelatedFilter = new SimpleFilter( - new NotClause( - new InClause(FieldKey.fromParts("RelatedIssueId"), ISSUE_IDS) - ) - ); - Table.delete(IssuesSchema.getInstance().getTableInfoRelatedIssues(), deleteRelatedFilter); - SimpleFilter deleteFilter = new SimpleFilter( - new NotClause( - new InClause(FieldKey.fromParts("IssueId"), ISSUE_IDS) - ) - ); - Table.delete(IssuesSchema.getInstance().getTableInfoRelatedIssues(), deleteFilter); - Table.delete(IssuesSchema.getInstance().getTableInfoComments(), deleteFilter); - Table.delete(IssuesSchema.getInstance().getTableInfoIssues(), deleteFilter); - } + // Delete all issues, comments, and related issues that were NOT copied + SimpleFilter deleteRelatedFilter = new SimpleFilter( + new InClause(FieldKey.fromParts("RelatedIssueId"), COPIED_ISSUE_IDS, false, true) // Negated + ); + int deletedRowCount = Table.delete(IssuesSchema.getInstance().getTableInfoRelatedIssues(), deleteRelatedFilter); + LOG.info(" Deleted {} from RelatedIssues (RelatedIssueId)", StringUtilsLabKey.pluralize(deletedRowCount, "row")); + SimpleFilter deleteFilter = new SimpleFilter( + new InClause(FieldKey.fromParts("IssueId"), COPIED_ISSUE_IDS, false, true) // Negated + ); + deletedRowCount = Table.delete(IssuesSchema.getInstance().getTableInfoRelatedIssues(), deleteFilter); + LOG.info(" Deleted {} from RelatedIssues (IssueId)", StringUtilsLabKey.pluralize(deletedRowCount, "row")); + deletedRowCount = Table.delete(IssuesSchema.getInstance().getTableInfoComments(), deleteFilter); + LOG.info(" Deleted {} from Comments", StringUtilsLabKey.pluralize(deletedRowCount, "row")); + deletedRowCount = Table.delete(IssuesSchema.getInstance().getTableInfoIssues(), deleteFilter); + LOG.info(" Deleted {} from Issues", StringUtilsLabKey.pluralize(deletedRowCount, "row")); + } + + @Override + public @NotNull Collection getAttachmentTypes() + { + return List.of(IssueCommentType.get()); } } diff --git a/issues/src/org/labkey/issue/IssuesModule.java b/issues/src/org/labkey/issue/IssuesModule.java index 64591419cf5..51ad08c0d1a 100644 --- a/issues/src/org/labkey/issue/IssuesModule.java +++ b/issues/src/org/labkey/issue/IssuesModule.java @@ -22,13 +22,13 @@ import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; import org.labkey.api.data.DataRegion; -import org.labkey.api.data.DatabaseMigrationService; import org.labkey.api.data.SqlExecutor; import org.labkey.api.data.SqlSelector; import org.labkey.api.exp.property.PropertyService; import org.labkey.api.issues.IssueService; import org.labkey.api.issues.IssuesListDefService; import org.labkey.api.issues.IssuesSchema; +import org.labkey.api.migration.DatabaseMigrationService; import org.labkey.api.module.DefaultModule; import org.labkey.api.module.ModuleContext; import org.labkey.api.query.QueryService; diff --git a/issues/src/org/labkey/issue/model/IssueCommentType.java b/issues/src/org/labkey/issue/model/IssueCommentType.java index d2074b563f0..88eeda3106c 100644 --- a/issues/src/org/labkey/issue/model/IssueCommentType.java +++ b/issues/src/org/labkey/issue/model/IssueCommentType.java @@ -40,8 +40,8 @@ private IssueCommentType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @NotNull SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(IssuesSchema.getInstance().getTableInfoComments(), "comments").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(IssuesSchema.getInstance().getTableInfoComments(), "comments"); } } diff --git a/list/src/org/labkey/list/ListModule.java b/list/src/org/labkey/list/ListModule.java index c98987eafd6..97e96f1220b 100644 --- a/list/src/org/labkey/list/ListModule.java +++ b/list/src/org/labkey/list/ListModule.java @@ -19,6 +19,7 @@ import org.jetbrains.annotations.NotNull; import org.labkey.api.admin.FolderSerializationRegistry; import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.audit.AuditLogService; import org.labkey.api.data.Container; import org.labkey.api.data.DbSchema; @@ -30,6 +31,8 @@ import org.labkey.api.exp.property.PropertyService; import org.labkey.api.lists.permissions.DesignListPermission; import org.labkey.api.lists.permissions.ManagePicklistsPermission; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.module.AdminLinkManager; import org.labkey.api.module.ModuleContext; import org.labkey.api.module.SpringModule; @@ -160,6 +163,14 @@ public void startupAfterSpringConfig(ModuleContext moduleContext) return metric; }); } + + DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(ListSchema.getInstance().getSchema()){ + @Override + public @NotNull Collection getAttachmentTypes() + { + return Set.of(ListItemType.get()); + } + }); } @NotNull diff --git a/list/src/org/labkey/list/model/ListImporter.java b/list/src/org/labkey/list/model/ListImporter.java index 12f9c6c1786..32a625c893a 100644 --- a/list/src/org/labkey/list/model/ListImporter.java +++ b/list/src/org/labkey/list/model/ListImporter.java @@ -354,14 +354,14 @@ public void processMany(VirtualFile listsDir, Container c, User user, List fileTypeMap = new HashMap<>(); + Map fileNameMap = new HashMap<>(); - //get corresponding data file name and extension + //get corresponding list and data file names for (String f : listsDir.list()) { if (f.endsWith(".tsv") || f.endsWith(".xlsx") || f.endsWith(".xls")) { - fileTypeMap.put(FileUtil.makeLegalName(FileUtil.getBaseName(f)), FileUtil.getExtension(f)); + fileNameMap.put(FileUtil.getBaseName(FileUtil.makeLegalName(f)), f); } } @@ -371,17 +371,15 @@ public void processMany(VirtualFile listsDir, Container c, User user, List } @Override - public WebPartView getSearchView(boolean includeSubfolders, int textBoxWidth, boolean includeHelpLink, boolean isWebpart) + public SearchWebPart getSearchView(boolean includeSubfolders, int textBoxWidth, boolean includeHelpLink, boolean isWebpart) { return new SearchWebPart(includeSubfolders, textBoxWidth, includeHelpLink, isWebpart); } diff --git a/specimen/src/org/labkey/specimen/SpecimenServiceImpl.java b/specimen/src/org/labkey/specimen/SpecimenServiceImpl.java index 3ed5279f565..5be823cc36d 100644 --- a/specimen/src/org/labkey/specimen/SpecimenServiceImpl.java +++ b/specimen/src/org/labkey/specimen/SpecimenServiceImpl.java @@ -19,6 +19,7 @@ import org.apache.logging.log4j.Logger; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.Container; import org.labkey.api.data.DbSchema; import org.labkey.api.data.PropertyManager; @@ -52,6 +53,7 @@ import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewBackgroundInfo; import org.labkey.specimen.importer.SpecimenColumn; +import org.labkey.specimen.model.SpecimenRequestEventType; import org.labkey.specimen.pipeline.SpecimenReloadJob; import org.labkey.specimen.requirements.SpecimenRequestRequirementProvider; @@ -366,6 +368,12 @@ public void registerRequestCustomizer(SpecimenRequestCustomizer customizer) _specimenRequestCustomizer = customizer; } + @Override + public AttachmentType getSpecimenRequestEventType() + { + return SpecimenRequestEventType.get(); + } + @Override public void fireSpecimensChanged(Container c, User user, Logger logger) { diff --git a/specimen/src/org/labkey/specimen/model/SpecimenRequestEventType.java b/specimen/src/org/labkey/specimen/model/SpecimenRequestEventType.java index a7bcd30cf03..b14f4d1d179 100644 --- a/specimen/src/org/labkey/specimen/model/SpecimenRequestEventType.java +++ b/specimen/src/org/labkey/specimen/model/SpecimenRequestEventType.java @@ -16,6 +16,7 @@ package org.labkey.specimen.model; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.SQLFragment; import org.labkey.api.specimen.SpecimenSchema; @@ -40,8 +41,8 @@ private SpecimenRequestEventType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(SpecimenSchema.get().getTableInfoSampleRequestEvent(), "sre").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(SpecimenSchema.get().getTableInfoSampleRequestEvent(), "sre"); } } \ No newline at end of file diff --git a/study/src/org/labkey/study/StudyModule.java b/study/src/org/labkey/study/StudyModule.java index 63b2e9f25b8..8df47d6aa96 100644 --- a/study/src/org/labkey/study/StudyModule.java +++ b/study/src/org/labkey/study/StudyModule.java @@ -25,11 +25,12 @@ import org.labkey.api.admin.FolderSerializationRegistry; import org.labkey.api.admin.notification.NotificationService; import org.labkey.api.attachments.AttachmentService; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.audit.AuditLogService; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.DatabaseMigrationService; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; +import org.labkey.api.data.DbSchema; +import org.labkey.api.data.DbSchemaType; import org.labkey.api.data.PropertySchema; import org.labkey.api.data.SqlExecutor; import org.labkey.api.data.SqlSelector; @@ -44,6 +45,8 @@ import org.labkey.api.files.FileContentService; import org.labkey.api.files.TableUpdaterFileListener; import org.labkey.api.message.digest.ReportAndDatasetChangeDigestProvider; +import org.labkey.api.migration.DatabaseMigrationService; +import org.labkey.api.migration.DefaultMigrationSchemaHandler; import org.labkey.api.module.AdminLinkManager; import org.labkey.api.module.DefaultFolderType; import org.labkey.api.module.FolderTypeManager; @@ -81,6 +84,7 @@ import org.labkey.api.specimen.model.PrimaryTypeDomainKind; import org.labkey.api.specimen.model.SpecimenDomainKind; import org.labkey.api.specimen.model.SpecimenEventDomainKind; +import org.labkey.api.specimen.model.SpecimenTablesProvider; import org.labkey.api.specimen.model.VialDomainKind; import org.labkey.api.study.ParticipantCategory; import org.labkey.api.study.SpecimenService; @@ -537,6 +541,19 @@ SELECT COUNT(DISTINCT DD.DomainURI) FROM { return "StudySnapshot".equals(sourceTable.getName()) ? FieldKey.fromParts("Source") : super.getContainerFieldKey(sourceTable); } + + @Override + public @NotNull Collection getAttachmentTypes() + { + SpecimenService ss = SpecimenService.get(); + + return ss != null ? + List.of( + ProtocolDocumentType.get(), + ss.getSpecimenRequestEventType() + ) : + List.of(ProtocolDocumentType.get()); + } }); DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(StudySchema.getInstance().getDatasetSchema()) @@ -548,6 +565,16 @@ SELECT COUNT(DISTINCT DD.DomainURI) FROM return SITE_WIDE_TABLE; } }); + + DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(DbSchema.get(SpecimenTablesProvider.SCHEMA_NAME, DbSchemaType.Provisioned)) + { + @Override + public @Nullable FieldKey getContainerFieldKey(TableInfo sourceTable) + { + // The "_specimen" tables lack both a container column and an FK to a table that does, but they're single-container tables + return sourceTable.getName().endsWith("_specimen") ? SITE_WIDE_TABLE : super.getContainerFieldKey(sourceTable); + } + }); } @Override diff --git a/study/src/org/labkey/study/model/ProtocolDocumentType.java b/study/src/org/labkey/study/model/ProtocolDocumentType.java index 88f08432613..25b25aa616b 100644 --- a/study/src/org/labkey/study/model/ProtocolDocumentType.java +++ b/study/src/org/labkey/study/model/ProtocolDocumentType.java @@ -16,6 +16,7 @@ package org.labkey.study.model; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.SQLFragment; import org.labkey.study.StudySchema; @@ -40,8 +41,8 @@ private ProtocolDocumentType() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT ProtocolDocumentEntityId FROM ").append(StudySchema.getInstance().getTableInfoStudy(), "s").append(")"); + return new SQLFragment("SELECT ProtocolDocumentEntityId FROM ").append(StudySchema.getInstance().getTableInfoStudy(), "s"); } } diff --git a/wiki/src/org/labkey/wiki/WikiManager.java b/wiki/src/org/labkey/wiki/WikiManager.java index 4cc65405268..fafd3cd108f 100644 --- a/wiki/src/org/labkey/wiki/WikiManager.java +++ b/wiki/src/org/labkey/wiki/WikiManager.java @@ -29,6 +29,7 @@ import org.labkey.api.attachments.AttachmentParent; import org.labkey.api.attachments.AttachmentService; import org.labkey.api.attachments.AttachmentService.DuplicateFilenameException; +import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerService; import org.labkey.api.data.CoreSchema; @@ -70,6 +71,7 @@ import org.labkey.api.wiki.WikiRenderingService.SubstitutionMode; import org.labkey.api.wiki.WikiService; import org.labkey.wiki.model.Wiki; +import org.labkey.wiki.model.WikiType; import org.labkey.wiki.model.WikiVersion; import org.labkey.wiki.model.WikiVersionsGrid; import org.labkey.wiki.model.WikiView; @@ -1056,6 +1058,12 @@ public void deleteWiki(Container c, User user, String wikiName, boolean deleteSu return null; } + @Override + public AttachmentType getAttachmentType() + { + return WikiType.get(); + } + public static class TestCase extends Assert { WikiManager _m = null; @@ -1074,7 +1082,6 @@ public void testSchema() assertNotNull(_m.comm.getTableInfoPages().getColumn("EntityId")); assertNotNull(_m.comm.getTableInfoPages().getColumn("Name")); - assertNotNull("couldn't find table PageVersions", _m.comm.getTableInfoPageVersions()); assertNotNull(_m.comm.getTableInfoPageVersions().getColumn("PageEntityId")); assertNotNull(_m.comm.getTableInfoPageVersions().getColumn("Title")); diff --git a/wiki/src/org/labkey/wiki/WikiModule.java b/wiki/src/org/labkey/wiki/WikiModule.java index 2287744d497..6e6f4c4e18e 100644 --- a/wiki/src/org/labkey/wiki/WikiModule.java +++ b/wiki/src/org/labkey/wiki/WikiModule.java @@ -25,12 +25,7 @@ import org.labkey.api.attachments.AttachmentService; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerManager; -import org.labkey.api.data.DatabaseMigrationConfiguration; -import org.labkey.api.data.DatabaseMigrationService; -import org.labkey.api.data.DatabaseMigrationService.DefaultMigrationSchemaHandler; -import org.labkey.api.data.DbSchema; import org.labkey.api.data.SqlExecutor; -import org.labkey.api.data.TableInfo; import org.labkey.api.module.CodeOnlyModule; import org.labkey.api.module.ModuleContext; import org.labkey.api.module.ModuleLoader; @@ -122,32 +117,6 @@ public void doStartup(ModuleContext moduleContext) WikiSchema.register(this); WikiController.registerAdminConsoleLinks(); - DatabaseMigrationService.get().registerSchemaHandler(new DefaultMigrationSchemaHandler(CommSchema.getInstance().getSchema()) - { - @Override - public void beforeSchema() - { - new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages DROP CONSTRAINT FK_Pages_PageVersions"); - new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages DROP CONSTRAINT FK_Pages_Parent"); - } - - @Override - public List getTablesToCopy() - { - List tablesToCopy = super.getTablesToCopy(); - tablesToCopy.add(CommSchema.getInstance().getTableInfoPages()); - tablesToCopy.add(CommSchema.getInstance().getTableInfoPageVersions()); - - return tablesToCopy; - } - - @Override - public void afterSchema(DatabaseMigrationConfiguration configuration, DbSchema sourceSchema, DbSchema targetSchema) - { - new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages ADD CONSTRAINT FK_Pages_PageVersions FOREIGN KEY (PageVersionId) REFERENCES comm.PageVersions (RowId)"); - new SqlExecutor(getSchema()).execute("ALTER TABLE comm.Pages ADD CONSTRAINT FK_Pages_Parent FOREIGN KEY (Parent) REFERENCES comm.Pages (RowId)"); - } - }); } private void bootstrap(ModuleContext moduleContext) diff --git a/wiki/src/org/labkey/wiki/model/WikiType.java b/wiki/src/org/labkey/wiki/model/WikiType.java index 6ca8ec4b5e7..d45905f108f 100644 --- a/wiki/src/org/labkey/wiki/model/WikiType.java +++ b/wiki/src/org/labkey/wiki/model/WikiType.java @@ -16,6 +16,7 @@ package org.labkey.wiki.model; import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; import org.labkey.api.announcements.CommSchema; import org.labkey.api.attachments.AttachmentType; import org.labkey.api.data.SQLFragment; @@ -40,8 +41,8 @@ public static AttachmentType get() } @Override - public void addWhereSql(SQLFragment sql, String parentColumn, String documentNameColumn) + public @Nullable SQLFragment getSelectParentEntityIdsSql() { - sql.append(parentColumn).append(" IN (SELECT EntityId FROM ").append(CommSchema.getInstance().getTableInfoPages(), "pages").append(")"); + return new SQLFragment("SELECT EntityId FROM ").append(CommSchema.getInstance().getTableInfoPages(), "pages"); } }