opendj-server-legacy/src/main/java/org/opends/server/api/Backend.java
@@ -975,22 +975,6 @@ } /** * Returns if <tt>exclude</tt> contains at least a DN or <tt>include</tt> contains anything else than one of * <tt>baseDNs</tt>. * * @param baseDNs the baseDNs of a backend * @param includeBranches a set of include DNs * @param excludeBranches a set of exclude DNs * @return true if <tt>exclude</tt> contains at least a DN or <tt>include</tt> contains anything else than one of * <tt>baseDNs</tt> */ public static boolean importIncludesOrExcludesBranches(Collection<DN> baseDNs, Collection<DN> includeBranches, Collection<DN> excludeBranches) { return !excludeBranches.isEmpty() || includeBranches.size() != 1 || !baseDNs.containsAll(includeBranches); } /** * Indicates whether a backend should be used to handle operations * for the provided entry given the set of base DNs and exclude DNs. * opendj-server-legacy/src/main/java/org/opends/server/backends/pluggable/BackendImpl.java
@@ -55,6 +55,7 @@ import org.opends.server.api.MonitorProvider; import org.opends.server.backends.RebuildConfig; import org.opends.server.backends.VerifyConfig; import org.opends.server.backends.pluggable.ImportSuffixCommand.SuffixImportStrategy; import org.opends.server.backends.pluggable.spi.Storage; import org.opends.server.backends.pluggable.spi.Storage.AccessMode; import org.opends.server.backends.pluggable.spi.StorageInUseException; @@ -664,12 +665,15 @@ { throw new DirectoryException(getServerErrorResultCode(), ERR_IMPORT_BACKEND_ONLINE.get()); } if (importIncludesOrExcludesBranches(cfg.getBaseDN(), importConfig.getIncludeBranches(), importConfig.getExcludeBranches())) for (DN dn : cfg.getBaseDN()) { ImportSuffixCommand openMethod = new ImportSuffixCommand(dn, importConfig); if (openMethod.getSuffixImportStrategy() == SuffixImportStrategy.MERGE_DB_WITH_LDIF) { // fail-fast to avoid ending up in an unrecoverable state for the server throw new DirectoryException(ResultCode.UNWILLING_TO_PERFORM, ERR_IMPORT_UNSUPPORTED_WITH_BRANCH.get()); } } try { opendj-server-legacy/src/main/java/org/opends/server/backends/pluggable/ImportSuffixCommand.java
New file @@ -0,0 +1,185 @@ /* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License, Version 1.0 only * (the "License"). You may not use this file except in compliance * with the License. * * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt * or http://forgerock.org/license/CDDLv1.0.html. * See the License for the specific language governing permissions * and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at legal-notices/CDDLv1_0.txt. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: * Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END * * * Copyright 2015 ForgeRock AS */ package org.opends.server.backends.pluggable; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Set; import org.opends.server.types.DN; import org.opends.server.types.DirectoryException; import org.opends.server.types.LDIFImportConfig; /** Command that describes how a suffix should be imported. Gives the strategy to use and the data to drive the * import operation of a single suffix. */ public class ImportSuffixCommand { /** Strategy for importing a suffix. */ public static enum SuffixImportStrategy { /** * Create a {@link Suffix} specifying just the {@link EntryContainer} for the baseDN, no include or exclude * branches are needed, normally used for append or clear backend modes. */ APPEND_OR_REPLACE, /** * Do not create a {@link Suffix}. */ SKIP_SUFFIX, /** * Before creating a {@link Suffix}, clear the {@link EntryContainer} of the baseDN. */ CLEAR_SUFFIX, /** * Create a temporary {@link EntryContainer} to merge LDIF with original data. */ MERGE_DB_WITH_LDIF, /** * Create a {@link Suffix} specifying include and exclude branches and optionally a source {@link EntryContainer}. */ INCLUDE_EXCLUDE_BRANCHES}; private List<DN> includeBranches; private List<DN> excludeBranches; private SuffixImportStrategy strategy = SuffixImportStrategy.APPEND_OR_REPLACE; List<DN> getIncludeBranches() { return includeBranches; } List<DN> getExcludeBranches() { return excludeBranches; } SuffixImportStrategy getSuffixImportStrategy() { return strategy; } ImportSuffixCommand(DN baseDN, LDIFImportConfig importCfg) throws DirectoryException { strategy = decideSuffixStrategy(baseDN, importCfg); } private SuffixImportStrategy decideSuffixStrategy(DN baseDN, LDIFImportConfig importCfg) throws DirectoryException { if (importCfg.appendToExistingData() || importCfg.clearBackend()) { return SuffixImportStrategy.APPEND_OR_REPLACE; } if (importCfg.getExcludeBranches().contains(baseDN)) { // This entire base DN was explicitly excluded. Skip. return SuffixImportStrategy.SKIP_SUFFIX; } excludeBranches = getDescendants(baseDN, importCfg.getExcludeBranches()); if (!importCfg.getIncludeBranches().isEmpty()) { includeBranches = getDescendants(baseDN, importCfg.getIncludeBranches()); if (includeBranches.isEmpty()) { // There are no branches in the explicitly defined include list under this base DN. // Skip this base DN altogether. return SuffixImportStrategy.SKIP_SUFFIX; } // Remove any overlapping include branches. Iterator<DN> includeBranchIterator = includeBranches.iterator(); while (includeBranchIterator.hasNext()) { DN includeDN = includeBranchIterator.next(); if (!isAnyNotEqualAndAncestorOf(includeBranches, includeDN)) { includeBranchIterator.remove(); } } // Remove any exclude branches that are not are not under a include branch // since they will be migrated as part of the existing entries // outside of the include branches anyways. Iterator<DN> excludeBranchIterator = excludeBranches.iterator(); while (excludeBranchIterator.hasNext()) { DN excludeDN = excludeBranchIterator.next(); if (!isAnyAncestorOf(includeBranches, excludeDN)) { excludeBranchIterator.remove(); } } if (excludeBranches.isEmpty() && includeBranches.size() == 1 && includeBranches.get(0).equals(baseDN)) { // This entire base DN is explicitly included in the import with // no exclude branches that we need to migrate. // Just clear the entry container. return SuffixImportStrategy.CLEAR_SUFFIX; } return SuffixImportStrategy.MERGE_DB_WITH_LDIF; } return SuffixImportStrategy.INCLUDE_EXCLUDE_BRANCHES; } private List<DN> getDescendants(DN baseDN, Set<DN> dns) { final List<DN> results = new ArrayList<>(); for (DN dn : dns) { if (baseDN.isAncestorOf(dn)) { results.add(dn); } } return results; } private boolean isAnyAncestorOf(List<DN> dns, DN childDN) { for (DN dn : dns) { if (dn.isAncestorOf(childDN)) { return true; } } return false; } private boolean isAnyNotEqualAndAncestorOf(List<DN> dns, DN childDN) { for (DN dn : dns) { if (!dn.equals(childDN) && dn.isAncestorOf(childDN)) { return false; } } return true; } } opendj-server-legacy/src/main/java/org/opends/server/backends/pluggable/OnDiskMergeBufferImporter.java
@@ -56,13 +56,11 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableSet; import java.util.Set; import java.util.SortedSet; import java.util.Timer; import java.util.TimerTask; @@ -97,7 +95,6 @@ import org.opends.server.admin.std.server.BackendIndexCfg; import org.opends.server.admin.std.server.PDBBackendCfg; import org.opends.server.admin.std.server.PluggableBackendCfg; import org.opends.server.api.Backend; import org.opends.server.backends.RebuildConfig; import org.opends.server.backends.RebuildConfig.RebuildMode; import org.opends.server.backends.pdb.PDBStorage; @@ -634,67 +631,21 @@ private Suffix getSuffix(WriteableTransaction txn, EntryContainer entryContainer) throws ConfigException, DirectoryException { if (importCfg.appendToExistingData() || importCfg.clearBackend()) { return new Suffix(entryContainer); } final DN baseDN = entryContainer.getBaseDN(); if (importCfg.getExcludeBranches().contains(baseDN)) { // This entire base DN was explicitly excluded. Skip. return null; } DN baseDN = entryContainer.getBaseDN(); ImportSuffixCommand openMethod = new ImportSuffixCommand(baseDN, importCfg); EntryContainer sourceEntryContainer = null; List<DN> excludeBranches = getDescendants(baseDN, importCfg.getExcludeBranches()); List<DN> includeBranches = null; if (!importCfg.getIncludeBranches().isEmpty()) switch(openMethod.getSuffixImportStrategy()) { includeBranches = getDescendants(baseDN, importCfg.getIncludeBranches()); if (includeBranches.isEmpty()) { // There are no branches in the explicitly defined include list under this base DN. // Skip this base DN altogether. case APPEND_OR_REPLACE: return new Suffix(entryContainer); case SKIP_SUFFIX: return null; } // Remove any overlapping include branches. Iterator<DN> includeBranchIterator = includeBranches.iterator(); while (includeBranchIterator.hasNext()) { DN includeDN = includeBranchIterator.next(); if (!isAnyNotEqualAndAncestorOf(includeBranches, includeDN)) { includeBranchIterator.remove(); } } // Remove any exclude branches that are not are not under a include branch // since they will be migrated as part of the existing entries // outside of the include branches anyways. Iterator<DN> excludeBranchIterator = excludeBranches.iterator(); while (excludeBranchIterator.hasNext()) { DN excludeDN = excludeBranchIterator.next(); if (!isAnyAncestorOf(includeBranches, excludeDN)) { excludeBranchIterator.remove(); } } if (!Backend.importIncludesOrExcludesBranches(Collections.singleton(baseDN), includeBranches, excludeBranches)) { // This entire base DN is explicitly included in the import with // no exclude branches that we need to migrate. // Just clear the entry container. case CLEAR_SUFFIX: clearSuffix(entryContainer); } else { break; case MERGE_DB_WITH_LDIF: sourceEntryContainer = entryContainer; // Create a temp entry container DN tempDN = DN.valueOf(baseDN.rdn() + "_importTmp"); if (baseDN.size() > 1) @@ -702,22 +653,16 @@ tempDN = baseDN.parent().child(tempDN); } entryContainer = rootContainer.openEntryContainer(tempDN, txn); } } return new Suffix(entryContainer, sourceEntryContainer, includeBranches, excludeBranches); break; case INCLUDE_EXCLUDE_BRANCHES: break; default: throw new DirectoryException(getServerErrorResultCode(), ERR_IMPORT_UNKNOWN_SUFFIX_COMMAND_STRATEGY.get(baseDN)); } private List<DN> getDescendants(DN baseDN, Set<DN> dns) { final List<DN> results = new ArrayList<>(); for (DN dn : dns) { if (baseDN.isAncestorOf(dn)) { results.add(dn); } } return results; return new Suffix(entryContainer, sourceEntryContainer, openMethod.getIncludeBranches(), openMethod.getExcludeBranches()); } private static void clearSuffix(EntryContainer entryContainer) @@ -727,30 +672,6 @@ entryContainer.unlock(); } private static boolean isAnyNotEqualAndAncestorOf(List<DN> dns, DN childDN) { for (DN dn : dns) { if (!dn.equals(childDN) && dn.isAncestorOf(childDN)) { return false; } } return true; } private static boolean isAnyAncestorOf(List<DN> dns, DN childDN) { for (DN dn : dns) { if (dn.isAncestorOf(childDN)) { return true; } } return false; } /** * Rebuild the indexes using the specified root container. * opendj-server-legacy/src/main/java/org/opends/server/backends/pluggable/OnDiskMergeStorageImporter.java
@@ -48,7 +48,6 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -85,7 +84,6 @@ import org.opends.server.admin.std.meta.BackendIndexCfgDefn.IndexType; import org.opends.server.admin.std.server.BackendIndexCfg; import org.opends.server.admin.std.server.PluggableBackendCfg; import org.opends.server.api.Backend; import org.opends.server.backends.pluggable.AttributeIndex.MatchingRuleIndex; import org.opends.server.backends.pluggable.ImportLDIFReader.EntryInformation; import org.opends.server.backends.pluggable.OnDiskMergeBufferImporter.DNCache; @@ -1409,66 +1407,21 @@ private Suffix getSuffix(WriteableTransaction txn, EntryContainer entryContainer) throws ConfigException, DirectoryException { if (importCfg.appendToExistingData() || importCfg.clearBackend()) { return new Suffix(entryContainer); } final DN baseDN = entryContainer.getBaseDN(); if (importCfg.getExcludeBranches().contains(baseDN)) { // This entire base DN was explicitly excluded. Skip. return null; } DN baseDN = entryContainer.getBaseDN(); ImportSuffixCommand openMethod = new ImportSuffixCommand(baseDN, importCfg); EntryContainer sourceEntryContainer = null; List<DN> excludeBranches = getDescendants(baseDN, importCfg.getExcludeBranches()); List<DN> includeBranches = null; if (!importCfg.getIncludeBranches().isEmpty()) switch(openMethod.getSuffixImportStrategy()) { includeBranches = getDescendants(baseDN, importCfg.getIncludeBranches()); if (includeBranches.isEmpty()) { // There are no branches in the explicitly defined include list under this base DN. // Skip this base DN altogether. case APPEND_OR_REPLACE: return new Suffix(entryContainer); case SKIP_SUFFIX: return null; } // Remove any overlapping include branches. Iterator<DN> includeBranchIterator = includeBranches.iterator(); while (includeBranchIterator.hasNext()) { DN includeDN = includeBranchIterator.next(); if (!isAnyNotEqualAndAncestorOf(includeBranches, includeDN)) { includeBranchIterator.remove(); } } // Remove any exclude branches that are not are not under a include branch // since they will be migrated as part of the existing entries // outside of the include branches anyways. Iterator<DN> excludeBranchIterator = excludeBranches.iterator(); while (excludeBranchIterator.hasNext()) { DN excludeDN = excludeBranchIterator.next(); if (!isAnyAncestorOf(includeBranches, excludeDN)) { excludeBranchIterator.remove(); } } if (!Backend.importIncludesOrExcludesBranches(Collections.singleton(baseDN), includeBranches, excludeBranches)) { // This entire base DN is explicitly included in the import with // no exclude branches that we need to migrate. // Just clear the entry container. case CLEAR_SUFFIX: clearSuffix(entryContainer); } else { break; case MERGE_DB_WITH_LDIF: sourceEntryContainer = entryContainer; // Create a temp entry container DN tempDN = DN.valueOf(baseDN.rdn() + "_importTmp"); if (baseDN.size() > 1) @@ -1476,22 +1429,16 @@ tempDN = baseDN.parent().child(tempDN); } entryContainer = rootContainer.openEntryContainer(tempDN, txn); } } return new Suffix(entryContainer, sourceEntryContainer, includeBranches, excludeBranches); break; case INCLUDE_EXCLUDE_BRANCHES: break; default: throw new DirectoryException(getServerErrorResultCode(), ERR_IMPORT_UNKNOWN_SUFFIX_COMMAND_STRATEGY.get(baseDN)); } private List<DN> getDescendants(DN baseDN, Set<DN> dns) { final List<DN> results = new ArrayList<>(); for (DN dn : dns) { if (baseDN.isAncestorOf(dn)) { results.add(dn); } } return results; return new Suffix(entryContainer, sourceEntryContainer, openMethod.getIncludeBranches(), openMethod.getExcludeBranches()); } private static void clearSuffix(EntryContainer entryContainer) @@ -1501,30 +1448,6 @@ entryContainer.unlock(); } private static boolean isAnyNotEqualAndAncestorOf(List<DN> dns, DN childDN) { for (DN dn : dns) { if (!dn.equals(childDN) && dn.isAncestorOf(childDN)) { return false; } } return true; } private static boolean isAnyAncestorOf(List<DN> dns, DN childDN) { for (DN dn : dns) { if (dn.isAncestorOf(childDN)) { return true; } } return false; } private LDIFImportResult processImport() throws Exception { try { opendj-server-legacy/src/messages/org/opends/messages/backend.properties
@@ -1338,3 +1338,4 @@ verification process can start ERR_IMPORT_UNSUPPORTED_WITH_BRANCH_580=Import operation is not supported \ when exclude or include sub-branches have been specified ERR_IMPORT_UNKNOWN_SUFFIX_COMMAND_STRATEGY_581=Unknown suffix strategy while importing suffix "%s"