From a47912185abb46815cc3104f2187bf63c22bcf03 Mon Sep 17 00:00:00 2001
From: Jean-Noel Rouvignac <jean-noel.rouvignac@forgerock.com>
Date: Tue, 25 Nov 2014 10:56:13 +0000
Subject: [PATCH] OPENDJ-1545 (CR-5399) Remove Workflow, NetworkGroups and related attempts at building a proxy
---
opendj3-server-dev/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElement.java | 400 ++++++++++++--
opendj3-server-dev/src/server/org/opends/server/core/WorkflowResultCode.java | 10
opendj3-server-dev/src/server/org/opends/server/types/AbstractOperation.java | 37
opendj3-server-dev/src/server/org/opends/server/core/AddOperationBasis.java | 41 -
opendj3-server-dev/src/server/org/opends/server/core/ModifyDNOperationBasis.java | 31
opendj3-server-dev/src/server/org/opends/server/core/DirectoryServer.java | 19
/dev/null | 430 ---------------
opendj3-server-dev/src/server/org/opends/server/core/BindOperationBasis.java | 24
opendj3-server-dev/src/server/org/opends/server/core/SearchOperationBasis.java | 29
opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElementTest.java | 281 ++++++++++
opendj3-server-dev/src/server/org/opends/server/core/ModifyOperationBasis.java | 38 -
opendj3-server-dev/src/server/org/opends/server/core/DeleteOperationBasis.java | 38 -
opendj3-server-dev/src/server/org/opends/server/backends/RootDSEBackend.java | 71 --
opendj3-server-dev/src/server/org/opends/server/core/CompareOperationBasis.java | 142 +----
14 files changed, 729 insertions(+), 862 deletions(-)
diff --git a/opendj3-server-dev/src/server/org/opends/server/backends/RootDSEBackend.java b/opendj3-server-dev/src/server/org/opends/server/backends/RootDSEBackend.java
index 662659e..8f8a20d 100644
--- a/opendj3-server-dev/src/server/org/opends/server/backends/RootDSEBackend.java
+++ b/opendj3-server-dev/src/server/org/opends/server/backends/RootDSEBackend.java
@@ -31,7 +31,6 @@
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
-import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -59,8 +58,6 @@
import org.opends.server.core.ModifyDNOperation;
import org.opends.server.core.ModifyOperation;
import org.opends.server.core.SearchOperation;
-import org.opends.server.core.WorkflowTopologyNode;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.types.*;
import org.opends.server.util.BuildVersion;
import org.opends.server.util.LDIFWriter;
@@ -459,29 +456,13 @@
{
HashMap<AttributeType,List<Attribute>> dseUserAttrs =
new HashMap<AttributeType,List<Attribute>>();
-
HashMap<AttributeType,List<Attribute>> dseOperationalAttrs =
new HashMap<AttributeType,List<Attribute>>();
- // Add the "namingContexts" attribute.
- final Collection<DN> namingContexts;
- if (connection == null)
- {
- namingContexts = DirectoryServer.getPublicNamingContexts().keySet();
- }
- else
- {
- namingContexts = new LinkedList<DN>();
- for (WorkflowTopologyNode node : NetworkGroup.getDefaultNetworkGroup()
- .getNamingContexts().getPublicNamingContexts())
- {
- namingContexts.add(node.getBaseDN());
- }
- }
-
- Attribute publicNamingContextAttr = createDNAttribute(ATTR_NAMING_CONTEXTS,
- ATTR_NAMING_CONTEXTS_LC, namingContexts);
+ Attribute publicNamingContextAttr = createDNAttribute(
+ ATTR_NAMING_CONTEXTS, ATTR_NAMING_CONTEXTS_LC,
+ DirectoryServer.getPublicNamingContexts().keySet());
addAttribute(publicNamingContextAttr, dseUserAttrs, dseOperationalAttrs);
@@ -680,45 +661,6 @@
}
/**
- * Determines the workflow nodes which handle subordinate naming contexts.
- * A workflow node is handling a subordinate naming context if the workflow
- * base DN is in the list of the RootDSE subordinate naming contexts.
- *
- * @param nodes
- * The list from which we search the workflow nodes which
- * are handling subordinate naming contexts
- *
- * @return The list of workflow nodes that are handling subordinate
- * naming contexts
- */
- public Iterable<WorkflowTopologyNode> getSubordinateNamingContexts(
- Iterable<WorkflowTopologyNode> nodes)
- {
- // If the list of subordinate naming contexts is null
- // then return the whole list of workflow nodes.
- if (subordinateBaseDNs == null)
- {
- return nodes;
- }
-
- // The returned list of subordinate naming contexts
- List<WorkflowTopologyNode> subNC = new ArrayList<WorkflowTopologyNode>();
-
- // Determine which workflow node is handling a subordinate naming context.
- for (WorkflowTopologyNode node : nodes)
- {
- DN dn = node.getBaseDN();
- if (subordinateBaseDNs.containsKey(dn))
- {
- subNC.add(node);
- }
- }
-
- return subNC;
- }
-
-
- /**
* Creates an attribute for the root DSE meant to hold a set of DNs.
*
* @param name The name for the attribute.
@@ -945,8 +887,13 @@
}
}
+ /**
+ * Returns the subordinate base DNs of the root DSE.
+ *
+ * @return the subordinate base DNs of the root DSE
+ */
@SuppressWarnings({ "unchecked", "rawtypes" })
- private Map<DN, Backend<?>> getSubordinateBaseDNs()
+ public Map<DN, Backend<?>> getSubordinateBaseDNs()
{
if (subordinateBaseDNs != null)
{
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/AddOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/AddOperationBasis.java
index f67ea0b..dbf5acc 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/AddOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/AddOperationBasis.java
@@ -26,11 +26,6 @@
*/
package org.opends.server.core;
-import static org.opends.messages.CoreMessages.*;
-import static org.opends.server.config.ConfigConstants.*;
-import static org.opends.server.loggers.AccessLogger.*;
-import static org.opends.server.util.StaticUtils.*;
-
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -41,7 +36,6 @@
import org.forgerock.opendj.ldap.ResultCode;
import org.opends.server.api.ClientConnection;
import org.opends.server.api.plugin.PluginResult;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.protocols.ldap.LDAPAttribute;
import org.opends.server.protocols.ldap.LDAPResultCode;
import org.opends.server.types.*;
@@ -49,6 +43,12 @@
import org.opends.server.types.operation.PreParseAddOperation;
import org.opends.server.workflowelement.localbackend.LocalBackendAddOperation;
+import static org.opends.messages.CoreMessages.*;
+import static org.opends.server.config.ConfigConstants.*;
+import static org.opends.server.loggers.AccessLogger.*;
+import static org.opends.server.util.StaticUtils.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
+
/**
* This class defines an operation that may be used to add a new entry to the
* Directory Server.
@@ -570,19 +570,7 @@
return;
}
- // Retrieve the network group attached to the client connection
- // and get a workflow to process the operation.
- Workflow workflow = NetworkGroup.getWorkflowCandidate(entryDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
- workflowExecuted = true;
-
+ workflowExecuted = execute(this, entryDN);
}
catch(CanceledOperationException coe)
{
@@ -633,8 +621,7 @@
* elements of the workflow, otherwise invoke the post response plugins
* that have been registered with the current operation.
*
- * @param workflowExecuted <code>true</code> if a workflow has been
- * executed
+ * @param workflowExecuted <code>true</code> if a workflow has been executed
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
private void invokePostResponsePlugins(boolean workflowExecuted)
@@ -667,15 +654,9 @@
}
}
-
-
- /**
- * Updates the error message and the result code of the operation.
- *
- * This method is called because no workflows were found to process
- * the operation.
- */
- private void updateOperationErrMsgAndResCode()
+ /** {@inheritDoc} */
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
DN entryDN = getEntryDN();
DN parentDN = entryDN.getParentDNInSuffix();
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/BindOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/BindOperationBasis.java
index 4ec7e5a..6f33b29 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/BindOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/BindOperationBasis.java
@@ -35,7 +35,6 @@
import org.forgerock.opendj.ldap.ResultCode;
import org.opends.server.api.ClientConnection;
import org.opends.server.api.plugin.PluginResult;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.types.*;
import org.opends.server.types.operation.PreParseBindOperation;
import org.opends.server.workflowelement.localbackend.LocalBackendBindOperation;
@@ -44,6 +43,7 @@
import static org.opends.messages.CoreMessages.*;
import static org.opends.server.core.DirectoryServer.*;
import static org.opends.server.loggers.AccessLogger.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
/**
* This class defines an operation that may be used to authenticate a user to
@@ -557,16 +557,7 @@
}
}
- Workflow workflow = NetworkGroup.getWorkflowCandidate(bindDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
- workflowExecuted = true;
+ workflowExecuted = execute(this, bindDN);
}
catch(CanceledOperationException coe)
{
@@ -636,14 +627,9 @@
}
}
-
- /**
- * Updates the error message and the result code of the operation.
- *
- * This method is called because no workflows were found to process
- * the operation.
- */
- private void updateOperationErrMsgAndResCode()
+ /** {@inheritDoc} */
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
LocalizableMessage message = ERR_BIND_OPERATION_UNKNOWN_USER.get();
setResultCode(ResultCode.INVALID_CREDENTIALS);
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/CompareOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/CompareOperationBasis.java
index 037e9ce..87fc848 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/CompareOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/CompareOperationBasis.java
@@ -26,10 +26,6 @@
*/
package org.opends.server.core;
-import static org.opends.messages.CoreMessages.*;
-import static org.opends.server.loggers.AccessLogger.*;
-import static org.opends.server.util.StaticUtils.*;
-
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
@@ -40,12 +36,16 @@
import org.forgerock.opendj.ldap.ResultCode;
import org.opends.server.api.ClientConnection;
import org.opends.server.api.plugin.PluginResult;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.types.*;
import org.opends.server.types.operation.PostResponseCompareOperation;
import org.opends.server.types.operation.PreParseCompareOperation;
import org.opends.server.workflowelement.localbackend.LocalBackendCompareOperation;
+import static org.opends.messages.CoreMessages.*;
+import static org.opends.server.loggers.AccessLogger.*;
+import static org.opends.server.util.StaticUtils.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
+
/**
* This class defines an operation that may be used to determine whether a
* specified entry in the Directory Server contains a given attribute-value
@@ -156,22 +156,14 @@
attributeOptions = new HashSet<String>();
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final ByteString getRawEntryDN()
{
return rawEntryDN;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final void setRawEntryDN(ByteString rawEntryDN)
{
@@ -180,11 +172,7 @@
entryDN = null;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final DN getEntryDN()
{
@@ -204,22 +192,14 @@
return entryDN;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final String getRawAttributeType()
{
return rawAttributeType;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final void setRawAttributeType(String rawAttributeType)
{
@@ -229,8 +209,6 @@
attributeOptions = null;
}
-
-
private void getAttributeTypeAndOptions() {
String baseName;
int semicolonPos = rawAttributeType.indexOf(';');
@@ -257,9 +235,7 @@
attributeType = DirectoryServer.getAttributeType(baseName, true);
}
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final AttributeType getAttributeType()
{
@@ -269,22 +245,14 @@
return attributeType;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public void setAttributeType(AttributeType attributeType)
{
this.attributeType = attributeType;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public Set<String> getAttributeOptions()
{
@@ -294,43 +262,29 @@
return attributeOptions;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public void setAttributeOptions(Set<String> attributeOptions)
{
this.attributeOptions = attributeOptions;
}
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final ByteString getAssertionValue()
{
return assertionValue;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public final void setAssertionValue(ByteString assertionValue)
{
this.assertionValue = assertionValue;
}
-
-
- /**
- * {@inheritDoc}
- */
- @Override()
+ /** {@inheritDoc} */
+ @Override
public final OperationType getOperationType()
{
// Note that no debugging will be done in this method because it is a likely
@@ -354,45 +308,29 @@
return proxiedAuthorizationDN;
}
-
-
- /**
- * {@inheritDoc}
- */
+ /** {@inheritDoc} */
@Override
public void setProxiedAuthorizationDN(DN proxiedAuthorizationDN)
{
this.proxiedAuthorizationDN = proxiedAuthorizationDN;
}
-
-
- /**
- * {@inheritDoc}
- */
- @Override()
+ /** {@inheritDoc} */
+ @Override
public final List<Control> getResponseControls()
{
return responseControls;
}
-
-
- /**
- * {@inheritDoc}
- */
- @Override()
+ /** {@inheritDoc} */
+ @Override
public final void addResponseControl(Control control)
{
responseControls.add(control);
}
-
-
- /**
- * {@inheritDoc}
- */
- @Override()
+ /** {@inheritDoc} */
+ @Override
public final void removeResponseControl(Control control)
{
responseControls.remove(control);
@@ -466,19 +404,7 @@
return;
}
- // Retrieve the network group registered with the client connection
- // and get a workflow to process the operation.
- Workflow workflow = NetworkGroup.getWorkflowCandidate(entryDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
- workflowExecuted = true;
-
+ workflowExecuted = execute(this, entryDN);
}
catch(CanceledOperationException coe)
{
@@ -520,11 +446,10 @@
/**
* Invokes the post response plugins. If a workflow has been executed
* then invoke the post response plugins provided by the workflow
- * elements of the worklfow, otherwise invoke the post reponse plugins
+ * elements of the workflow, otherwise invoke the post response plugins
* that have been registered with the current operation.
*
- * @param workflowExecuted <code>true</code> if a workflow has been
- * executed
+ * @param workflowExecuted <code>true</code> if a workflow has been executed
*/
private void invokePostResponsePlugins(boolean workflowExecuted)
{
@@ -563,18 +488,15 @@
* This method is called because no workflow was found to process
* the operation.
*/
- private void updateOperationErrMsgAndResCode()
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
setResultCode(ResultCode.NO_SUCH_OBJECT);
appendErrorMessage(ERR_COMPARE_NO_SUCH_ENTRY.get(getEntryDN()));
}
-
-
- /**
- * {@inheritDoc}
- */
- @Override()
+ /** {@inheritDoc} */
+ @Override
public final void toString(StringBuilder buffer)
{
buffer.append("CompareOperation(connID=");
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/DeleteOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/DeleteOperationBasis.java
index 2e7af23..56080ae 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/DeleteOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/DeleteOperationBasis.java
@@ -26,8 +26,6 @@
*/
package org.opends.server.core;
-import static org.opends.messages.CoreMessages.*;
-import static org.opends.server.loggers.AccessLogger.*;
import java.util.ArrayList;
import java.util.List;
@@ -36,12 +34,15 @@
import org.forgerock.opendj.ldap.ResultCode;
import org.opends.server.api.ClientConnection;
import org.opends.server.api.plugin.PluginResult;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.types.*;
import org.opends.server.types.operation.PostResponseDeleteOperation;
import org.opends.server.types.operation.PreParseDeleteOperation;
import org.opends.server.workflowelement.localbackend.LocalBackendDeleteOperation;
+import static org.opends.messages.CoreMessages.*;
+import static org.opends.server.loggers.AccessLogger.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
+
/**
* This class defines an operation that may be used to remove an entry from the
* Directory Server.
@@ -263,19 +264,7 @@
return;
}
- // Retrieve the network group attached to the client connection
- // and get a workflow to process the operation.
- Workflow workflow = NetworkGroup.getWorkflowCandidate(entryDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
- workflowExecuted = true;
-
+ workflowExecuted = execute(this, entryDN);
}
catch(CanceledOperationException coe)
{
@@ -323,11 +312,10 @@
/**
* Invokes the post response plugins. If a workflow has been executed
* then invoke the post response plugins provided by the workflow
- * elements of the worklfow, otherwise invoke the post reponse plugins
+ * elements of the workflow, otherwise invoke the post response plugins
* that have been registered with the current operation.
*
- * @param workflowExecuted <code>true</code> if a workflow has been
- * executed
+ * @param workflowExecuted <code>true</code> if a workflow has been executed
*/
private void invokePostResponsePlugins(boolean workflowExecuted)
{
@@ -359,14 +347,9 @@
}
}
-
- /**
- * Updates the error message and the result code of the operation.
- *
- * This method is called because no workflows were found to process
- * the operation.
- */
- private void updateOperationErrMsgAndResCode()
+ /** {@inheritDoc} */
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
setResultCode(ResultCode.NO_SUCH_OBJECT);
appendErrorMessage(ERR_DELETE_NO_SUCH_ENTRY.get(getEntryDN()));
@@ -384,4 +367,3 @@
}
}
-
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/DirectoryServer.java b/opendj3-server-dev/src/server/org/opends/server/core/DirectoryServer.java
index 2b9cbe7..24fda7a 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/DirectoryServer.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/DirectoryServer.java
@@ -125,7 +125,6 @@
import org.opends.server.config.JMXMBean;
import org.opends.server.controls.PasswordPolicyErrorType;
import org.opends.server.controls.PasswordPolicyResponseControl;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.crypto.CryptoManagerImpl;
import org.opends.server.crypto.CryptoManagerSync;
import org.opends.server.extensions.ConfigFileHandler;
@@ -229,7 +228,6 @@
import static org.opends.server.util.DynamicConstants.*;
import static org.opends.server.util.ServerConstants.*;
import static org.opends.server.util.StaticUtils.*;
-import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
/**
* This class defines the core of the Directory Server. It manages the startup
@@ -2162,12 +2160,7 @@
*/
private static void createWorkflow(DN baseDN, Backend<?> backend) throws DirectoryException
{
- // Create a root workflow element to encapsulate the backend
- final String backendID = backend.getBackendID();
- LocalBackendWorkflowElement rootWE = createAndRegister(backendID, backend);
-
- // Create the workflow for the base DN and register the workflow with the server
- NetworkGroup.getDefaultNetworkGroup().registerWorkflow(backendID, baseDN, rootWE);
+ LocalBackendWorkflowElement.createAndRegister(baseDN, backend);
}
/**
@@ -5190,7 +5183,10 @@
directoryServer.backends = newBackends;
// Don't need anymore the local backend workflow element so we can remove it
- LocalBackendWorkflowElement.remove(backend.getBackendID());
+ for (DN baseDN : backend.getBaseDNs())
+ {
+ LocalBackendWorkflowElement.remove(baseDN);
+ }
BackendMonitor monitor = backend.getBackendMonitor();
@@ -5391,7 +5387,7 @@
// Now we need to deregister the workflow that was associated with the base DN
if (!baseDN.equals(DN.valueOf("cn=config")))
{
- NetworkGroup.getDefaultNetworkGroup().deregisterWorkflow(baseDN);
+ LocalBackendWorkflowElement.remove(baseDN);
}
}
}
@@ -7147,9 +7143,6 @@
logger.traceException(e);
}
- // Deregister all workflows and network group configuration.
- NetworkGroup.deregisterAllOnShutdown();
-
// Force a new InternalClientConnection to be created on restart.
InternalConnectionHandler.clearRootClientConnectionAtShutdown();
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/ModifyDNOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/ModifyDNOperationBasis.java
index 5e59b3c..05cbbab 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/ModifyDNOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/ModifyDNOperationBasis.java
@@ -34,7 +34,6 @@
import org.forgerock.opendj.ldap.ResultCode;
import org.opends.server.api.ClientConnection;
import org.opends.server.api.plugin.PluginResult;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.types.*;
import org.opends.server.types.operation.PostResponseModifyDNOperation;
import org.opends.server.types.operation.PreParseModifyDNOperation;
@@ -42,6 +41,7 @@
import static org.opends.messages.CoreMessages.*;
import static org.opends.server.loggers.AccessLogger.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
/**
* This class defines an operation that may be used to alter the DN of an entry
@@ -449,18 +449,7 @@
return;
}
- // Retrieve the network group attached to the client connection
- // and get a workflow to process the operation.
- Workflow workflow = NetworkGroup.getWorkflowCandidate(entryDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
- workflowExecuted = true;
+ workflowExecuted = execute(this, entryDN);
}
catch(CanceledOperationException coe)
{
@@ -507,11 +496,10 @@
/**
* Invokes the post response plugins. If a workflow has been executed
* then invoke the post response plugins provided by the workflow
- * elements of the worklfow, otherwise invoke the post reponse plugins
+ * elements of the workflow, otherwise invoke the post response plugins
* that have been registered with the current operation.
*
- * @param workflowExecuted <code>true</code> if a workflow has been
- * executed
+ * @param workflowExecuted <code>true</code> if a workflow has been executed
*/
private void invokePostResponsePlugins(boolean workflowExecuted)
{
@@ -545,14 +533,9 @@
}
}
-
- /**
- * Updates the error message and the result code of the operation.
- *
- * This method is called because no workflows were found to process
- * the operation.
- */
- private void updateOperationErrMsgAndResCode()
+ /** {@inheritDoc} */
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
setResultCode(ResultCode.NO_SUCH_OBJECT);
appendErrorMessage(ERR_MODDN_NO_BACKEND_FOR_CURRENT_ENTRY.get(entryDN));
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/ModifyOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/ModifyOperationBasis.java
index 664c906..2584508 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/ModifyOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/ModifyOperationBasis.java
@@ -34,7 +34,6 @@
import org.forgerock.opendj.ldap.ResultCode;
import org.opends.server.api.ClientConnection;
import org.opends.server.api.plugin.PluginResult;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.protocols.ldap.LDAPAttribute;
import org.opends.server.protocols.ldap.LDAPModification;
import org.opends.server.protocols.ldap.LDAPResultCode;
@@ -45,6 +44,7 @@
import static org.opends.messages.CoreMessages.*;
import static org.opends.server.loggers.AccessLogger.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
/**
* This class defines an operation that may be used to modify an entry in the
@@ -69,10 +69,7 @@
/** The set of response controls for this modify operation. */
private List<Control> responseControls;
- /**
- * The raw, unprocessed set of modifications as included in the client
- * request.
- */
+ /** The raw, unprocessed set of modifications as included in the client request. */
private List<RawModification> rawModifications;
/** The set of modifications for this modify operation. */
@@ -368,19 +365,7 @@
return;
}
- // Retrieve the network group attached to the client connection
- // and get a workflow to process the operation.
- Workflow workflow = NetworkGroup.getWorkflowCandidate(entryDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
- workflowExecuted = true;
-
+ workflowExecuted = execute(this, entryDN);
}
catch(CanceledOperationException coe)
{
@@ -427,11 +412,10 @@
/**
* Invokes the post response plugins. If a workflow has been executed
* then invoke the post response plugins provided by the workflow
- * elements of the worklfow, otherwise invoke the post reponse plugins
+ * elements of the workflow, otherwise invoke the post response plugins
* that have been registered with the current operation.
*
- * @param workflowExecuted <code>true</code> if a workflow has been
- * executed
+ * @param workflowExecuted <code>true</code> if a workflow has been executed
*/
private void invokePostResponsePlugins(boolean workflowExecuted)
{
@@ -464,15 +448,9 @@
}
}
-
-
- /**
- * Updates the error message and the result code of the operation.
- *
- * This method is called because no workflows were found to process
- * the operation.
- */
- private void updateOperationErrMsgAndResCode()
+ /** {@inheritDoc} */
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
setResultCode(ResultCode.NO_SUCH_OBJECT);
appendErrorMessage(ERR_MODIFY_NO_SUCH_ENTRY.get(getEntryDN()));
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/RootDseWorkflowTopology.java b/opendj3-server-dev/src/server/org/opends/server/core/RootDseWorkflowTopology.java
deleted file mode 100644
index 35aaca5..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/RootDseWorkflowTopology.java
+++ /dev/null
@@ -1,189 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2008-2010 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core;
-
-import org.forgerock.i18n.LocalizableMessage;
-import org.forgerock.i18n.LocalizableMessageBuilder;
-import org.forgerock.opendj.ldap.ResultCode;
-import org.forgerock.opendj.ldap.SearchScope;
-import org.opends.server.core.networkgroups.NetworkGroupNamingContexts;
-import org.opends.server.types.CanceledOperationException;
-import org.opends.server.types.DN;
-import org.opends.server.types.Operation;
-import org.opends.server.types.OperationType;
-import org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement;
-
-/**
- * This class implements the workflow node that handles the root DSE entry.
- * As opposed to the WorkflowTopologyNode class, the root DSE node has no
- * parent node nor subordinate nodes. Instead, the root DSE node has a set
- * of naming contexts, each of which is a WorkflowTopologyNode object with
- * no parent.
- */
-public class RootDseWorkflowTopology extends WorkflowTopology
-{
-
- /**
- * The naming contexts known by the root DSE. These naming contexts
- * are defined in the scope of a network group.
- */
- private final NetworkGroupNamingContexts namingContexts;
-
- /**
- * Creates a workflow node to handle the root DSE entry.
- *
- * @param backendId
- * the backendId
- * @param baseDN
- * identifies the data handled by the workflow
- * @param rootWorkflowElement
- * the root node of the task tree
- * @param namingContexts
- * the list of naming contexts being registered with the network
- * group the root DSE belongs to
- */
- public RootDseWorkflowTopology(String backendId, DN baseDN, LocalBackendWorkflowElement rootWorkflowElement,
- NetworkGroupNamingContexts namingContexts)
- {
- super(backendId, baseDN, rootWorkflowElement);
- this.namingContexts = namingContexts;
- }
-
-
- /**
- * Executes an operation on the root DSE entry.
- *
- * @param operation the operation to execute
- *
- * @throws CanceledOperationException if this operation should
- * be cancelled.
- */
- @Override
- public void execute(Operation operation) throws CanceledOperationException
- {
- OperationType operationType = operation.getOperationType();
- if (operationType == OperationType.SEARCH)
- {
- executeSearch((SearchOperation) operation);
- }
- else
- {
- super.execute(operation);
- }
- }
-
-
- /**
- * Executes a search operation on the the root DSE entry.
- *
- * @param searchOp the operation to execute
- *
- * @throws CanceledOperationException if this operation should
- * be cancelled.
- */
- private void executeSearch(SearchOperation searchOp)
- throws CanceledOperationException {
- // Keep a the original search scope because we will alter it in the
- // operation.
- SearchScope originalScope = searchOp.getScope();
-
- // Search base?
- // The root DSE entry itself is never returned unless the operation
- // is a search base on the null suffix.
- if (originalScope == SearchScope.BASE_OBJECT)
- {
- super.execute(searchOp);
- return;
- }
-
- // Create a workflow result code in case we need to perform search in
- // subordinate workflows.
- WorkflowResultCode workflowResultCode = new WorkflowResultCode(
- searchOp.getResultCode(), searchOp.getErrorMessage());
-
- // The search scope is not 'base', so let's do a search on all the public
- // naming contexts with appropriate new search scope and new base DN.
- SearchScope newScope = elaborateScopeForSearchInSubordinates(originalScope);
- searchOp.setScope(newScope);
- DN originalBaseDN = searchOp.getBaseDN();
-
- Iterable<WorkflowTopologyNode> ncToSearch =
- DirectoryServer.getRootDSEBackend().getSubordinateNamingContexts(
- namingContexts.getPublicNamingContexts());
-
- for (WorkflowTopologyNode namingContext: ncToSearch)
- {
- // We have to change the operation request base DN to match the
- // subordinate workflow base DN. Otherwise the workflow will
- // return a no such entry result code as the operation request
- // base DN is a superior of the workflow base DN!
- DN ncDN = namingContext.getBaseDN();
-
- // Set the new request base DN then do execute the operation
- // in the naming context workflow.
- searchOp.setBaseDN(ncDN);
- namingContext.execute(searchOp);
- boolean sendReferenceEntry =
- workflowResultCode.elaborateGlobalResultCode(
- searchOp.getResultCode(), searchOp.getErrorMessage());
- if (sendReferenceEntry)
- {
- // TODO jdemendi - turn a referral result code into a reference entry
- // and send the reference entry to the client application
- }
- }
-
- // Now restore the original request base DN and original search scope
- searchOp.setBaseDN(originalBaseDN);
- searchOp.setScope(originalScope);
-
- // If the result code is still uninitialized (ie no naming context),
- // we should return NO_SUCH_OBJECT
- workflowResultCode.elaborateGlobalResultCode(
- ResultCode.NO_SUCH_OBJECT, new LocalizableMessageBuilder(LocalizableMessage.EMPTY));
-
- // Set the operation result code and error message
- searchOp.setResultCode(workflowResultCode.resultCode());
- searchOp.setErrorMessage(workflowResultCode.errorMessage());
- }
-
-
- /**
- * Dumps info from the current workflow for debug purpose.
- *
- * @param leftMargin white spaces used to indent the traces
- * @return a string buffer that contains trace information
- */
- public StringBuilder toString(String leftMargin)
- {
- StringBuilder sb = new StringBuilder();
- sb.append(leftMargin).append("Workflow ID = ").append(getWorkflowId()).append("\n");
- sb.append(leftMargin).append(" baseDN:[ \"\" ]\n");
- return sb;
- }
-
-}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/SearchOperationBasis.java b/opendj3-server-dev/src/server/org/opends/server/core/SearchOperationBasis.java
index 815393f..affc470 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/SearchOperationBasis.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/SearchOperationBasis.java
@@ -46,7 +46,6 @@
import org.opends.server.api.plugin.PluginResult;
import org.opends.server.controls.AccountUsableResponseControl;
import org.opends.server.controls.MatchedValuesControl;
-import org.opends.server.core.networkgroups.NetworkGroup;
import org.opends.server.protocols.ldap.LDAPFilter;
import org.opends.server.types.AbstractOperation;
import org.opends.server.types.Attribute;
@@ -74,6 +73,7 @@
import static org.opends.server.loggers.AccessLogger.*;
import static org.opends.server.util.ServerConstants.*;
import static org.opends.server.util.StaticUtils.*;
+import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
/**
* This class defines an operation that may be used to locate entries in the
@@ -889,9 +889,9 @@
buffer.append(", baseDN=");
buffer.append(rawBaseDN);
buffer.append(", scope=");
- buffer.append(scope.toString());
+ buffer.append(scope);
buffer.append(", filter=");
- buffer.append(rawFilter.toString());
+ buffer.append(rawFilter);
buffer.append(")");
}
@@ -1089,17 +1089,7 @@
return;
}
- // Retrieve the network group attached to the client connection
- // and get a workflow to process the operation.
- Workflow workflow = NetworkGroup.getWorkflowCandidate(baseDN);
- if (workflow == null)
- {
- // We have found no workflow for the requested base DN, just return
- // a no such entry result code and stop the processing.
- updateOperationErrMsgAndResCode();
- return;
- }
- workflow.execute(this);
+ execute(this, baseDN);
}
catch(CanceledOperationException coe)
{
@@ -1162,14 +1152,9 @@
pluginConfigManager.invokePostResponseSearchPlugins(this);
}
-
- /**
- * Updates the error message and the result code of the operation.
- *
- * This method is called because no workflows were found to process
- * the operation.
- */
- private void updateOperationErrMsgAndResCode()
+ /** {@inheritDoc} */
+ @Override
+ public void updateOperationErrMsgAndResCode()
{
setResultCode(ResultCode.NO_SUCH_OBJECT);
appendErrorMessage(ERR_SEARCH_BASE_DOESNT_EXIST.get(getBaseDN()));
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/Workflow.java b/opendj3-server-dev/src/server/org/opends/server/core/Workflow.java
deleted file mode 100644
index ecab32b..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/Workflow.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2008 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core;
-
-import org.opends.server.types.DN;
-import org.opends.server.types.Operation;
-import org.opends.server.types.CanceledOperationException;
-
-/**
- * This class defines the workflow interface.
- *
- * Each workflow is a node in a workflow tree (aka workflow topology).
- * Each node in the tree is linked to a workflow object of the first implementation
- * and the base DN of the node is the base DN of the attached workflow object.
- * The relationship of the nodes in the tree is based on the base DNs of the nodes.
- * A workflow node is a subordinate of another workflow node when the base DN
- * of the former is a superior of the base DN of the latter.
- * Workflow topology are useful, for example, in subtree searches:
- * search is performed on a node as well as on all the subordinate nodes.
- */
-public interface Workflow
-{
- /**
- * Gets the base DN which identifies the set of data upon which the
- * workflow is to be executed.
- *
- * @return the base DN of the workflow
- */
- public DN getBaseDN();
-
-
- /**
- * Executes all the tasks defined by the workflow task tree for a given
- * operation.
- *
- * @param operation the operation to execute
- *
- * @throws CanceledOperationException if this operation should
- * be cancelled.
- */
- public void execute(Operation operation)
- throws CanceledOperationException;
-}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/WorkflowResultCode.java b/opendj3-server-dev/src/server/org/opends/server/core/WorkflowResultCode.java
index 636309f..f20e656 100644
--- a/opendj3-server-dev/src/server/org/opends/server/core/WorkflowResultCode.java
+++ b/opendj3-server-dev/src/server/org/opends/server/core/WorkflowResultCode.java
@@ -34,7 +34,7 @@
* This class implements the workflow result code. The workflow result code
* contains an LDAP result code along with an LDAP error message.
*/
-class WorkflowResultCode
+public class WorkflowResultCode
{
/** The global result code. */
private ResultCode resultCode = ResultCode.UNDEFINED;
@@ -58,7 +58,7 @@
* @param resultCode the initial value for the result code
* @param errorMessage the initial value for the error message
*/
- WorkflowResultCode(ResultCode resultCode, LocalizableMessageBuilder errorMessage)
+ public WorkflowResultCode(ResultCode resultCode, LocalizableMessageBuilder errorMessage)
{
this.resultCode = resultCode;
this.errorMessage = errorMessage;
@@ -106,7 +106,7 @@
* @param newErrorMessage the new error message associated to the new error code
* @return <code>true</code> if a referral result code must be turned into a reference entry
*/
- boolean elaborateGlobalResultCode(ResultCode newResultCode, LocalizableMessageBuilder newErrorMessage)
+ public boolean elaborateGlobalResultCode(ResultCode newResultCode, LocalizableMessageBuilder newErrorMessage)
{
// if global result code has not been set yet then just take the new
// result code as is
@@ -181,7 +181,7 @@
*
* @return the global result code.
*/
- ResultCode resultCode()
+ public ResultCode resultCode()
{
return resultCode;
}
@@ -191,7 +191,7 @@
*
* @return the global error message.
*/
- LocalizableMessageBuilder errorMessage()
+ public LocalizableMessageBuilder errorMessage()
{
return errorMessage;
}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/WorkflowTopology.java b/opendj3-server-dev/src/server/org/opends/server/core/WorkflowTopology.java
deleted file mode 100644
index 62c06a2..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/WorkflowTopology.java
+++ /dev/null
@@ -1,161 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2008 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core;
-
-import org.forgerock.opendj.ldap.SearchScope;
-import org.forgerock.util.Reject;
-import org.opends.server.types.CanceledOperationException;
-import org.opends.server.types.DN;
-import org.opends.server.types.Operation;
-import org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement;
-
-/**
- * This class is the base class used to build the workflow topology.
- * A workflow topology is a tree of workflows. Each node in the tree
- * is attached to a WorkflowImpl which contains the task tree (ie. the
- * processing).
- *
- * There are two types of workflow nodes. The first one is used to build
- * nodes in the workflow topology (WorkflowTopologyNode) and the second
- * one is used to implement the root DSE node (RootDseWorkflowTopology).
- */
-abstract class WorkflowTopology implements Workflow
-{
- /** The workflow identifier used by the configuration. */
- private final String workflowId;
-
- /** The root of the workflow task tree. */
- private final LocalBackendWorkflowElement rootWorkflowElement;
-
- /** The base DN of the data handled by the workflow. */
- private final DN baseDN;
-
- /**
- * Create a new instance of the workflow topology base class.
- *
- * @param backendId
- * the backendId
- * @param baseDN
- * identifies the data handled by the workflow
- * @param rootWorkflowElement
- * the root node of the task tree
- */
- protected WorkflowTopology(String backendId, DN baseDN, LocalBackendWorkflowElement rootWorkflowElement)
- {
- Reject.ifNull(rootWorkflowElement);
- // The workflow ID is "backendID + baseDN".
- // We cannot use backendID as workflow identifier because a backend
- // may handle several base DNs. We cannot use baseDN either because
- // we might want to configure several workflows handling the same
- // baseDN through different network groups.
- // So a mix of both backendID and baseDN should be ok.
- this.workflowId = backendId + "#" + baseDN;
- this.baseDN = baseDN;
- this.rootWorkflowElement = rootWorkflowElement;
- }
-
- /**
- * Gets the base DN of the workflow node. The base DN of the workflow
- * node is the base DN of the attached workflow implementation containing
- * the processing.
- *
- * @return the base DN of the workflow containing the processing.
- */
- @Override
- public DN getBaseDN()
- {
- return this.baseDN;
- }
-
- /**
- * Gets the root workflow element for test purpose only.
- *
- * @return the root workflow element.
- */
- LocalBackendWorkflowElement getRootWorkflowElement()
- {
- return rootWorkflowElement;
- }
-
- /**
- * Gets the workflow internal identifier.
- *
- * @return the workflow internal identifier
- */
- public String getWorkflowId()
- {
- return workflowId;
- }
-
- /**
- * Executes all the tasks defined by the workflow task tree for a given
- * operation.
- *
- * @param operation
- * the operation to execute
- * @throws CanceledOperationException
- * if this operation should be canceled.
- */
- @Override
- public void execute(Operation operation) throws CanceledOperationException
- {
- rootWorkflowElement.execute(operation);
- }
-
- /**
- * Elaborates a new search scope according to the current search scope.
- * The new scope is intended to be used for searches on subordinate
- * workflows.
- *
- * @param currentScope the current search scope
- * @return the new scope to use for searches on subordinate workflows,
- * <code>null</code> when current scope is 'base'
- */
- protected SearchScope elaborateScopeForSearchInSubordinates(SearchScope currentScope)
- {
- switch (currentScope.asEnum())
- {
- case BASE_OBJECT:
- return null;
- case SINGLE_LEVEL:
- return SearchScope.BASE_OBJECT;
- case SUBORDINATES:
- case WHOLE_SUBTREE:
- return SearchScope.WHOLE_SUBTREE;
- default:
- return currentScope;
- }
- }
-
- /** {@inheritDoc} */
- @Override
- public String toString()
- {
- return getClass().getSimpleName() + " workflow=" + workflowId;
- }
-
-}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/WorkflowTopologyNode.java b/opendj3-server-dev/src/server/org/opends/server/core/WorkflowTopologyNode.java
deleted file mode 100644
index 056d730..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/WorkflowTopologyNode.java
+++ /dev/null
@@ -1,470 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2007-2008 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core;
-
-import java.util.ArrayList;
-
-import org.forgerock.opendj.ldap.SearchScope;
-import org.opends.server.types.CanceledOperationException;
-import org.opends.server.types.DN;
-import org.opends.server.types.Operation;
-import org.opends.server.types.OperationType;
-import org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement;
-
-/**
- * This class implements a workflow node. A workflow node is used
- * to build a tree of workflows (aka workflow topology). Each node
- * may have a parent node and/or subordinate nodes. A node with no
- * parent is a naming context.
- *
- * Each node in the workflow topology is linked to a WorkflowImpl
- * which contains the real processing. The base DN of the workflow
- * node is the base DN of the related WorkflowImpl.
- *
- * How the workflow topology is built?
- * A workflow node is a subordinate of another workflow node when
- * the base DN of the former workflow is an ancestor of the base DN
- * of the latter workflow.
- *
- * A subtree search on a workflow node is performed on the node itself as
- * well as on all the subordinate nodes.
- */
-public class WorkflowTopologyNode extends WorkflowTopology
-{
- /** Parent node of the current workflow node. */
- private WorkflowTopologyNode parent;
-
- /** The list of subordinate nodes of the current workflow node. */
- private final ArrayList<WorkflowTopologyNode> subordinates = new ArrayList<WorkflowTopologyNode>();
-
- /**
- * Creates a new node for a workflow topology. The new node is initialized
- * with a WorkflowImpl which contains the real processing. Optionally, the
- * node may have tasks to be executed before and/or after the real processing.
- * In the current implementation, such pre and post workflow elements are not
- * used.
- *
- * @param backendId
- * the backendId
- * @param baseDN
- * identifies the data handled by the workflow
- * @param rootWorkflowElement
- * the root node of the task tree
- */
- public WorkflowTopologyNode(String backendId, DN baseDN, LocalBackendWorkflowElement rootWorkflowElement)
- {
- super(backendId, baseDN, rootWorkflowElement);
- }
-
- /**
- * Executes an operation on a set of data being identified by the
- * workflow node base DN.
- *
- * @param operation the operation to execute
- *
- * @throws CanceledOperationException if this operation should
- * be canceled.
- */
- @Override
- public void execute(Operation operation) throws CanceledOperationException
- {
- super.execute(operation);
-
- // For subtree search operation we need to go through the subordinate nodes.
- if (operation.getOperationType() == OperationType.SEARCH)
- {
- executeSearchOnSubordinates((SearchOperation) operation);
- }
- }
-
-
- /**
- * Executes a search operation on the subordinate workflows.
- *
- * @param searchOp the search operation to execute
- *
- * @throws CanceledOperationException if this operation should
- * be canceled.
- */
- private void executeSearchOnSubordinates(SearchOperation searchOp)
- throws CanceledOperationException {
- // If the scope of the search is 'base' then it's useless to search
- // in the subordinate workflows.
- SearchScope originalScope = searchOp.getScope();
- if (originalScope == SearchScope.BASE_OBJECT)
- {
- return;
- }
-
- // Elaborate the new search scope before executing the search operation
- // in the subordinate workflows.
- SearchScope newScope = elaborateScopeForSearchInSubordinates(originalScope);
- searchOp.setScope(newScope);
-
- // Let's search in the subordinate workflows.
- WorkflowResultCode workflowResultCode = new WorkflowResultCode(
- searchOp.getResultCode(), searchOp.getErrorMessage());
- DN originalBaseDN = searchOp.getBaseDN();
- for (WorkflowTopologyNode subordinate: getSubordinates())
- {
- // We have to change the operation request base DN to match the
- // subordinate workflow base DN. Otherwise the workflow will
- // return a no such entry result code as the operation request
- // base DN is a superior of the subordinate workflow base DN.
- DN subordinateDN = subordinate.getBaseDN();
-
- // If the new search scope is 'base' and the search base DN does not
- // map the subordinate workflow then skip the subordinate workflow.
- if (newScope == SearchScope.BASE_OBJECT
- && !subordinateDN.parent().equals(originalBaseDN))
- {
- continue;
- }
-
- // If the request base DN is not a subordinate of the subordinate
- // workflow base DN then do not search in the subordinate workflow.
- if (! originalBaseDN.isAncestorOf(subordinateDN))
- {
- continue;
- }
-
- // Set the new request base DN and do execute the
- // operation in the subordinate workflow.
- searchOp.setBaseDN(subordinateDN);
- subordinate.execute(searchOp);
- boolean sendReferenceEntry =
- workflowResultCode.elaborateGlobalResultCode(
- searchOp.getResultCode(), searchOp.getErrorMessage());
- if (sendReferenceEntry)
- {
- // TODO jdemendi - turn a referral result code into a reference entry
- // and send the reference entry to the client application
- }
- }
-
- // Now we are done with the operation, let's restore the original
- // base DN and search scope in the operation.
- searchOp.setBaseDN(originalBaseDN);
- searchOp.setScope(originalScope);
-
- // Update the operation result code and error message
- searchOp.setResultCode(workflowResultCode.resultCode());
- searchOp.setErrorMessage(workflowResultCode.errorMessage());
- }
-
-
- /**
- * Sets the parent workflow.
- *
- * @param parent the parent workflow of the current workflow
- */
- public void setParent(WorkflowTopologyNode parent)
- {
- this.parent = parent;
- }
-
-
- /**
- * Gets the parent workflow.
- *
- * @return the parent workflow.
- */
- public WorkflowTopologyNode getParent()
- {
- return parent;
- }
-
-
- /**
- * Indicates whether the root workflow element is encapsulating a private
- * local backend or not.
- *
- * @return <code>true</code> if the root workflow element encapsulates
- * a private local backend
- */
- public boolean isPrivate()
- {
- LocalBackendWorkflowElement rwe = getRootWorkflowElement();
- return rwe != null && rwe.isPrivate();
- }
-
-
- /**
- * Gets the base DN of the workflow that handles a given dn. The elected
- * workflow may be the current workflow or one of its subordinate workflows.
- *
- * @param dn the DN for which we are looking a parent DN
- * @return the base DN which is the parent of the <code>dn</code>,
- * <code>null</code> if no parent DN was found
- */
- DN getParentBaseDN(DN dn)
- {
- if (dn == null)
- {
- return null;
- }
-
- // Is the dn a subordinate of the current base DN?
- DN curBaseDN = getBaseDN();
- if (curBaseDN != null && dn.isDescendantOf(curBaseDN))
- {
- // The dn may be handled by the current workflow.
- // Now we have to check whether the dn is handled by a subordinate.
- for (WorkflowTopologyNode subordinate: getSubordinates())
- {
- final DN parentBaseDN = subordinate.getParentBaseDN(dn);
- if (parentBaseDN != null)
- {
- // the dn is handled by a subordinate
- return parentBaseDN;
- }
- }
-
- // no subordinate handle the DN, then it is handled by the current workflow
- return curBaseDN;
- }
- return null;
- }
-
- /**
- * Adds a workflow to the list of workflow subordinates without
- * additional control.
- *
- * @param newWorkflow the workflow to add to the subordinate list
- * @param parentWorkflow the parent workflow of the new workflow
- */
- private void addSubordinateNoCheck(WorkflowTopologyNode newWorkflow, WorkflowTopologyNode parentWorkflow)
- {
- subordinates.add(newWorkflow);
- newWorkflow.setParent(parentWorkflow);
- }
-
-
- /**
- * Adds a workflow to the subordinate list of the current workflow.
- * Before we can add the new workflow, we have to check whether
- * the new workflow is a parent workflow of any of the current
- * subordinates (if so, then we have to add the subordinate in the
- * subordinate list of the new workflow).
- *
- * @param newWorkflow the workflow to add in the subordinate list
- */
- private void addSubordinate(WorkflowTopologyNode newWorkflow)
- {
- if (newWorkflow == this)
- {
- // Do not try to add the workflow to itself.
- return;
- }
-
- // Check whether subordinates of current workflow should move to the
- // new workflow subordinate list.
- ArrayList<WorkflowTopologyNode> curSubordinateList =
- new ArrayList<WorkflowTopologyNode>(getSubordinates());
-
- for (WorkflowTopologyNode curSubordinate: curSubordinateList)
- {
- DN newDN = newWorkflow.getBaseDN();
- DN subordinateDN = curSubordinate.getBaseDN();
-
- if (newDN.equals(subordinateDN)) {
- // Do not try to add workflow when baseDNs are the same on both workflows.
- return;
- }
-
- if (subordinateDN.isDescendantOf(newDN))
- {
- removeSubordinate(curSubordinate);
- newWorkflow.addSubordinate(curSubordinate);
- }
- }
-
- // add the new workflow in the current workflow subordinate list
- addSubordinateNoCheck(newWorkflow, this);
- }
-
-
- /**
- * Remove a workflow from the subordinate list.
- *
- * @param subordinate the subordinate to remove from the subordinate list
- */
- private void removeSubordinate(WorkflowTopologyNode subordinate)
- {
- subordinates.remove(subordinate);
- }
-
-
- /**
- * Tries to insert a new workflow in the subordinate list of one of the
- * current workflow subordinate, or in the current workflow subordinate list.
- *
- * @param newWorkflow the new workflow to insert
- *
- * @return <code>true</code> if the new workflow has been inserted
- * in any subordinate list
- */
- public boolean insertSubordinate(WorkflowTopologyNode newWorkflow)
- {
- DN parentBaseDN = getBaseDN();
- DN newBaseDN = newWorkflow.getBaseDN();
- if (newWorkflow == this
- || parentBaseDN.equals(newBaseDN)
- || !newBaseDN.isDescendantOf(parentBaseDN))
- {
- return false;
- }
-
- // the new workflow is a subordinate for this parent DN
- for (WorkflowTopologyNode subordinate : getSubordinates())
- {
- if (subordinate.insertSubordinate(newWorkflow))
- {
- // the newBaseDN is handled by a subordinate
- return true;
- }
- }
-
- // no subordinate handle the newBaseDN, then it is handled by the current workflow
- addSubordinate(newWorkflow);
- return true;
- }
-
-
- /**
- * Removes the current workflow from the parent subordinate list
- * and attach the workflow subordinates to the parent workflow.
- *
- * Example: the workflow to remove is w2
- *
- * w1 w1
- * | / \
- * w2 ==> w3 w4
- * / \
- * w3 w4
- *
- * - Subordinate list of w1 is updated with w3 and w4.
- * - Parent workflow of w3 and w4 is now w1.
- */
- public void remove()
- {
- // First of all, remove the workflow from the parent subordinate list
- WorkflowTopologyNode parent = getParent();
- if (parent != null)
- {
- parent.removeSubordinate(this);
- }
-
- // Then set the parent of each subordinate and attach the subordinate to
- // the parent.
- for (WorkflowTopologyNode subordinate: getSubordinates())
- {
- subordinate.setParent(parent);
- if (parent != null)
- {
- parent.addSubordinateNoCheck(subordinate, parent);
- }
- }
- }
-
-
- /**
- * Gets the list of workflow subordinates.
- *
- * @return the list of workflow subordinates
- */
- public ArrayList<WorkflowTopologyNode> getSubordinates()
- {
- return subordinates;
- }
-
-
- /**
- * Gets the highest workflow in the topology that can handle the requestDN.
- * The highest workflow is either the current workflow or one of its
- * subordinates.
- *
- * @param requestDN The DN for which we search for a workflow
- * @return the highest workflow that can handle the requestDN
- * <code>null</code> if none was found
- */
- public WorkflowTopologyNode getWorkflowCandidate(DN requestDN)
- {
- DN baseDN = getParentBaseDN(requestDN);
- if (baseDN == null)
- {
- // the current workflow does not handle the requestDN
- return null;
- }
-
- // is there any subordinate that can handle the requestDN?
- for (WorkflowTopologyNode subordinate : getSubordinates())
- {
- WorkflowTopologyNode candidate = subordinate.getWorkflowCandidate(requestDN);
- if (candidate != null)
- {
- return candidate;
- }
- }
-
- // none of the subordinates can handle the requestDN, so the current
- // workflow is the best root workflow candidate
- return this;
- }
-
-
- /**
- * Dumps info from the current workflow for debug purpose.
- *
- * @param leftMargin white spaces used to indent the traces
- * @return a string buffer that contains trace information
- */
- public StringBuilder toString(String leftMargin)
- {
- final StringBuilder sb = new StringBuilder();
- sb.append(leftMargin).append("Workflow ID = ").append(getWorkflowId()).append("\n");
- sb.append(leftMargin).append(" baseDN:[").append(" \"").append(getBaseDN()).append("\" ]\n");
- sb.append(leftMargin).append(" Root Workflow Element: ").append(getRootWorkflowElement()).append("\n");
- sb.append(leftMargin).append(" Parent: ").append(getParent()).append("\n");
-
- sb.append(leftMargin).append(" List of subordinates:\n");
- ArrayList<WorkflowTopologyNode> subordinates = getSubordinates();
- if (!subordinates.isEmpty())
- {
- for (WorkflowTopologyNode subordinate: getSubordinates())
- {
- sb.append(subordinate.toString(leftMargin + " "));
- }
- }
- else
- {
- sb.append(leftMargin).append(" NONE\n");
- }
- return sb;
- }
-
-}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/NetworkGroup.java b/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/NetworkGroup.java
deleted file mode 100644
index 6318d85..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/NetworkGroup.java
+++ /dev/null
@@ -1,403 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2007-2010 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core.networkgroups;
-
-import java.util.TreeMap;
-
-import org.forgerock.i18n.LocalizableMessage;
-import org.forgerock.opendj.ldap.ResultCode;
-import org.opends.server.core.RootDseWorkflowTopology;
-import org.opends.server.core.Workflow;
-import org.opends.server.core.WorkflowTopologyNode;
-import org.opends.server.types.DN;
-import org.opends.server.types.DirectoryException;
-import org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement;
-
-import static org.forgerock.util.Reject.*;
-import static org.opends.messages.CoreMessages.*;
-
-/**
- * This class defines the network group. A network group is used to
- * categorize client connections. A network group is defined by a set of
- * criteria, a set of policies and a set of workflow nodes. A client
- * connection belongs to a network group whenever it satisfies all the
- * network group criteria. As soon as a client connection belongs to a
- * network group, it has to comply with all the network group policies.
- * Any cleared client operation can be routed to one the network group
- * workflow nodes.
- */
-public class NetworkGroup
-{
-
- /**
- * The default network group has no criterion, no policy, and gives
- * access to all the workflows. The purpose of the default network
- * group is to allow new clients to perform a first operation before
- * they can be attached to a specific network group.
- */
- private static final String DEFAULT_NETWORK_GROUP_NAME = "default";
- private static NetworkGroup defaultNetworkGroup = new NetworkGroup(DEFAULT_NETWORK_GROUP_NAME);
-
- /**
- * Deregisters all network groups that have been registered. This
- * should be called when the server is shutting down.
- */
- public static void deregisterAllOnShutdown()
- {
- // Invalidate all NetworkGroups so they cannot accidentally be
- // used after a restart.
- defaultNetworkGroup.invalidate();
- defaultNetworkGroup = new NetworkGroup(DEFAULT_NETWORK_GROUP_NAME);
- }
-
- /**
- * Returns the default network group. The default network group is
- * always defined and has no criterion, no policy and provide full
- * access to all the registered workflows.
- *
- * @return the default network group
- */
- public static NetworkGroup getDefaultNetworkGroup()
- {
- return defaultNetworkGroup;
- }
-
- /** List of naming contexts handled by the network group. */
- private NetworkGroupNamingContexts namingContexts = new NetworkGroupNamingContexts();
-
- /** The network group internal identifier. */
- private final String networkGroupID;
-
- /**
- * Workflow nodes registered with the current network group.
- * Keys are workflowIDs.
- */
- private TreeMap<String, WorkflowTopologyNode> registeredWorkflowNodes =
- new TreeMap<String, WorkflowTopologyNode>();
-
- /** A lock to protect concurrent access to the registered Workflow nodes. */
- private final Object registeredWorkflowNodesLock = new Object();
-
- /**
- * The workflow node for the rootDSE entry. The RootDSE workflow node
- * is not stored in the list of registered workflow nodes.
- */
- private RootDseWorkflowTopology rootDSEWorkflowNode;
-
- /**
- * Creates a new system network group using the provided ID.
- *
- * @param networkGroupID
- * The network group internal identifier.
- */
- NetworkGroup(String networkGroupID)
- {
- this.networkGroupID = networkGroupID;
- }
-
- /**
- * Deregisters a workflow with the network group. The workflow to
- * deregister is identified by its baseDN.
- *
- * @param baseDN
- * the baseDN of the workflow to deregister, may be null
- */
- public void deregisterWorkflow(DN baseDN)
- {
- if (baseDN == null)
- {
- return;
- }
-
- if (baseDN.isRootDN())
- {
- deregisterWorkflow(rootDSEWorkflowNode);
- }
- else
- {
- WorkflowTopologyNode node = findWorkflowNode(baseDN);
- if (node != null)
- {
- // Call deregisterWorkflow() instead of deregisterWorkflowNode()
- // because we want the naming context list to be updated as well.
- deregisterWorkflow(node);
- }
- }
- }
-
- private WorkflowTopologyNode findWorkflowNode(DN baseDN)
- {
- synchronized (registeredWorkflowNodesLock)
- {
- for (WorkflowTopologyNode node : registeredWorkflowNodes.values())
- {
- if (node.getBaseDN().equals(baseDN))
- {
- return node;
- }
- }
- return null;
- }
- }
-
- /**
- * Returns the list of naming contexts handled by the network group.
- *
- * @return the list of naming contexts
- */
- public NetworkGroupNamingContexts getNamingContexts()
- {
- return namingContexts;
- }
-
- /**
- * Gets the highest workflow in the topology that can handle the
- * baseDN.
- *
- * @param baseDN
- * the base DN of the request
- * @return the highest workflow in the topology that can handle the
- * base DN, <code>null</code> if none was found
- */
- public static Workflow getWorkflowCandidate(DN baseDN)
- {
- return getDefaultNetworkGroup().getWorkflowCandidatePriv(baseDN);
- }
-
- private Workflow getWorkflowCandidatePriv(DN baseDN)
- {
- if (baseDN.isRootDN())
- {
- return rootDSEWorkflowNode;
- }
- // Search the highest workflow in the topology that can handle the baseDN.
- // First search the private workflows
- // The order is important to ensure that the admin network group
- // is not broken and can always find cn=config
- for (WorkflowTopologyNode curWorkflow : namingContexts.getPrivateNamingContexts())
- {
- WorkflowTopologyNode workflowCandidate = curWorkflow.getWorkflowCandidate(baseDN);
- if (workflowCandidate != null)
- {
- return workflowCandidate;
- }
- }
-
- // Not found, search the public
- for (WorkflowTopologyNode curWorkflow : namingContexts.getPublicNamingContexts())
- {
- WorkflowTopologyNode workflowCandidate = curWorkflow.getWorkflowCandidate(baseDN);
- if (workflowCandidate != null)
- {
- return workflowCandidate;
- }
- }
- return null;
- }
-
- /**
- * Checks whether the base DN of a new workflow to register is present
- * in a workflow already registered with the network group.
- *
- * @param workflowNode
- * the workflow to check
- * @throws DirectoryException
- * If the base DN of the workflow is already present in the
- * network group
- */
- private void checkNotRegistered(WorkflowTopologyNode workflowNode)
- throws DirectoryException
- {
- String workflowID = workflowNode.getWorkflowId();
- ifNull(workflowID);
-
- // The workflow base DN should not be already present in the
- // network group. Bypass the check for the private workflows...
- WorkflowTopologyNode node = findWorkflowNode(workflowNode.getBaseDN());
- if (node != null)
- {
- LocalizableMessage message = ERR_REGISTER_WORKFLOW_BASE_DN_ALREADY_EXISTS.get(
- workflowID, networkGroupID, node.getWorkflowId(), workflowNode.getBaseDN());
- throw new DirectoryException(ResultCode.UNWILLING_TO_PERFORM, message);
- }
- }
-
- /**
- * Deregisters a workflow node with the network group.
- *
- * @param workflow
- * the workflow node to deregister
- */
- private void deregisterWorkflow(Workflow workflow)
- {
- if (rootDSEWorkflowNode == workflow)
- {
- rootDSEWorkflowNode = null;
- return;
- }
-
- final WorkflowTopologyNode workflowNode = (WorkflowTopologyNode) workflow;
- deregisterWorkflowNode(workflowNode);
- // Remove it from the workflow topology.
- workflowNode.remove();
-
- rebuildNamingContextList();
- }
-
-
-
- /**
- * Deregisters the current workflow (this) with the server.
- *
- * @param workflowNode
- * the workflow node to deregister
- */
- private void deregisterWorkflowNode(WorkflowTopologyNode workflowNode)
- {
- synchronized (registeredWorkflowNodesLock)
- {
- TreeMap<String, WorkflowTopologyNode> newWorkflowNodes =
- new TreeMap<String, WorkflowTopologyNode>(registeredWorkflowNodes);
- newWorkflowNodes.remove(workflowNode.getWorkflowId());
- registeredWorkflowNodes = newWorkflowNodes;
- }
- }
-
-
-
- /**
- * We've seen parts of the server hold references to a NetworkGroup
- * during an in-core server restart. To help detect when this happens,
- * we null out the member variables, so we will fail fast with an NPE
- * if an invalidate NetworkGroup is used.
- */
- private void invalidate()
- {
- namingContexts = null;
- rootDSEWorkflowNode = null;
- registeredWorkflowNodes = null;
- }
-
- /**
- * Rebuilds the list of naming contexts handled by the network group.
- * This operation should be performed whenever a workflow topology has
- * been updated (workflow registration or de-registration).
- */
- private void rebuildNamingContextList()
- {
- namingContexts.resetLists();
-
- for (WorkflowTopologyNode workflowNode : registeredWorkflowNodes.values())
- {
- WorkflowTopologyNode parent = workflowNode.getParent();
- if (parent == null)
- {
- // a registered workflow with no parent is a naming context
- namingContexts.addNamingContext(workflowNode);
- }
- }
- }
-
-
-
- /**
- * Registers a workflow with the network group.
- *
- * @param backendId
- * the workflow backendId
- * @param baseDN
- * identifies the data handled by the workflow
- * @param rootWorkflowElement
- * the root node of the task tree
- * @throws DirectoryException
- * If the workflow ID for the provided workflow conflicts
- * with the workflow ID of an existing workflow or if the
- * base DN of the workflow is the same than the base DN of
- * another workflow already registered
- */
- public void registerWorkflow(String backendId, DN baseDN, LocalBackendWorkflowElement rootWorkflowElement)
- throws DirectoryException
- {
- if (baseDN.isRootDN())
- {
- // NOTE - The rootDSE workflow is stored with the registeredWorkflows.
- rootDSEWorkflowNode = new RootDseWorkflowTopology(backendId, baseDN, rootWorkflowElement, namingContexts);
- return;
- }
-
- // Try to insert it in the workflow topology.
- WorkflowTopologyNode workflowNode = new WorkflowTopologyNode(backendId, baseDN, rootWorkflowElement);
- registerWorkflowNode(workflowNode);
-
- // Now add the workflow in the workflow topology...
- for (WorkflowTopologyNode curNode : registeredWorkflowNodes.values())
- {
- // Try to insert the new workflow under an existing workflow...
- if (!curNode.insertSubordinate(workflowNode))
- {
- // ... or try to insert the existing workflow below the new workflow
- workflowNode.insertSubordinate(curNode);
- }
- }
-
- rebuildNamingContextList();
- }
-
- /**
- * Registers a workflow node with the network group.
- *
- * @param workflowNode
- * the workflow node to register
- * @throws DirectoryException
- * If the workflow node ID for the provided workflow node
- * conflicts with the workflow node ID of an existing
- * workflow node.
- */
- private void registerWorkflowNode(WorkflowTopologyNode workflowNode)
- throws DirectoryException
- {
- String workflowID = workflowNode.getWorkflowId();
- ifNull(workflowID);
-
- synchronized (registeredWorkflowNodesLock)
- {
- if (registeredWorkflowNodes.containsKey(workflowID))
- {
- LocalizableMessage message = ERR_REGISTER_WORKFLOW_NODE_ALREADY_EXISTS.get(workflowID, networkGroupID);
- throw new DirectoryException(ResultCode.UNWILLING_TO_PERFORM, message);
- }
-
- checkNotRegistered(workflowNode);
-
- // All is fine, let's register the workflow
- TreeMap<String, WorkflowTopologyNode> newRegisteredWorkflowNodes =
- new TreeMap<String, WorkflowTopologyNode>(registeredWorkflowNodes);
- newRegisteredWorkflowNodes.put(workflowID, workflowNode);
- registeredWorkflowNodes = newRegisteredWorkflowNodes;
- }
- }
-}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/NetworkGroupNamingContexts.java b/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/NetworkGroupNamingContexts.java
deleted file mode 100644
index d9418b7..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/NetworkGroupNamingContexts.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2008 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core.networkgroups;
-
-import java.util.Collections;
-import java.util.List;
-import java.util.concurrent.CopyOnWriteArrayList;
-
-import org.opends.server.core.WorkflowTopologyNode;
-
-/**
- * This classes defines a list of naming contexts for a network group.
- */
-public class NetworkGroupNamingContexts
-{
- /** List of naming contexts. */
- private final List<WorkflowTopologyNode> namingContexts = new CopyOnWriteArrayList<WorkflowTopologyNode>();
- /** If list of naming contexts is returned, ensure it is immutable. */
- private final List<WorkflowTopologyNode> _namingContexts = Collections.unmodifiableList(namingContexts);
-
- /** List of public naming contexts. */
- private final List<WorkflowTopologyNode> publicNamingContexts = new CopyOnWriteArrayList<WorkflowTopologyNode>();
- /** If list of public naming contexts is returned, ensure it is immutable. */
- private final List<WorkflowTopologyNode> _publicNamingContexts = Collections.unmodifiableList(publicNamingContexts);
-
- /** List of private naming contexts. */
- private final List<WorkflowTopologyNode> privateNamingContexts = new CopyOnWriteArrayList<WorkflowTopologyNode>();
- /** If list of private naming contexts is returned, ensure it is immutable. */
- private final List<WorkflowTopologyNode> _privateNamingContexts = Collections.unmodifiableList(privateNamingContexts);
-
- /**
- * Reset the list of naming contexts.
- */
- void resetLists()
- {
- namingContexts.clear();
- privateNamingContexts.clear();
- publicNamingContexts.clear();
- }
-
-
- /**
- * Add a workflow in the list of naming context.
- *
- * @param workflow the workflow to add in the list of naming contexts
- */
- void addNamingContext(WorkflowTopologyNode workflow)
- {
- // add the workflow to the list of naming context
- namingContexts.add (workflow);
-
- // add the workflow to the private/public list of naming contexts
- if (workflow.isPrivate())
- {
- privateNamingContexts.add (workflow);
- }
- else
- {
- publicNamingContexts.add (workflow);
- }
- }
-
-
- /**
- * Get the list of naming contexts.
- *
- * <br>Note: the returned iterable instance is immutable and attempts to
- * remove elements will throw an UnsupportedOperationException exception.
- *
- * @return the list of all the naming contexts
- */
- public Iterable<WorkflowTopologyNode> getNamingContexts()
- {
- return _namingContexts;
- }
-
-
- /**
- * Get the list of private naming contexts.
- *
- * <br>Note: the returned iterable instance is immutable and attempts to
- * remove elements will throw an UnsupportedOperationException exception.
- *
- * @return the list of private naming contexts
- */
- public Iterable<WorkflowTopologyNode> getPrivateNamingContexts()
- {
- return _privateNamingContexts;
- }
-
-
- /**
- * Get the list of public naming contexts.
- *
- * <br>Note: the returned iterable instance is immutable and attempts to
- * remove elements will throw an UnsupportedOperationException exception.
- *
- * @return the list of public naming contexts
- */
- public Iterable<WorkflowTopologyNode> getPublicNamingContexts()
- {
- return _publicNamingContexts;
- }
-
-
- /**
- * Dumps info from the current network group for debug purpose.
- *
- * @param leftMargin white spaces used to indent traces
- * @return a string buffer that contains trace information
- */
- public StringBuilder toString (String leftMargin)
- {
- StringBuilder sb = new StringBuilder();
- String newMargin = leftMargin + " ";
-
- sb.append(leftMargin).append("List of naming contexts:\n");
- for (WorkflowTopologyNode w: namingContexts)
- {
- sb.append (w.toString (newMargin));
- }
-
- sb.append(leftMargin).append("List of PRIVATE naming contexts:\n");
- for (WorkflowTopologyNode w: privateNamingContexts)
- {
- sb.append (w.toString (newMargin));
- }
-
- sb.append(leftMargin).append("List of PUBLIC naming contexts:\n");
- for (WorkflowTopologyNode w: publicNamingContexts)
- {
- sb.append (w.toString (newMargin));
- }
-
- return sb;
- }
-
-}
diff --git a/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/package-info.java b/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/package-info.java
deleted file mode 100644
index aa58579..0000000
--- a/opendj3-server-dev/src/server/org/opends/server/core/networkgroups/package-info.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2008 Sun Microsystems, Inc.
- */
-
-
-
-/**
- * Contains various classes that comprise the network group criteria
- * and resource limits of the Directory Server codebase. This includes:
- * <BR>
- * <UL>
- * <LI>
- * The code that is invoked to initialize the network group criteria
- * and check if a connection matches a criteria.
- * </LI>
- * <LI>
- * The code that is invoked to initialize the network group resource limits
- * and check that a new connection or a new operation is within
- * the resource limits.
- * </LI>
- * </UL>
- */
-@org.opends.server.types.PublicAPI(
- stability=org.opends.server.types.StabilityLevel.PRIVATE)
-package org.opends.server.core.networkgroups;
-
diff --git a/opendj3-server-dev/src/server/org/opends/server/types/AbstractOperation.java b/opendj3-server-dev/src/server/org/opends/server/types/AbstractOperation.java
index 041a9c3..c4ae513 100644
--- a/opendj3-server-dev/src/server/org/opends/server/types/AbstractOperation.java
+++ b/opendj3-server-dev/src/server/org/opends/server/types/AbstractOperation.java
@@ -72,35 +72,22 @@
protected static final List<Control> NO_RESPONSE_CONTROLS =
new ArrayList<Control>(0);
- /**
- * The client connection with which this operation is associated.
- */
+ /** The client connection with which this operation is associated. */
protected final ClientConnection clientConnection;
- /**
- * The message ID for this operation.
- */
+ /** The message ID for this operation. */
protected final int messageID;
- /**
- * The operation ID for this operation.
- */
+ /** The operation ID for this operation. */
protected final long operationID;
- /**
- * Whether nanotime was used for this operation.
- */
+ /** Whether nanotime was used for this operation. */
protected final boolean useNanoTime;
- /**
- * The cancel request for this operation.
- */
+ /** The cancel request for this operation. */
protected CancelRequest cancelRequest;
-
- /**
- * The cancel result for this operation.
- */
+ /** The cancel result for this operation. */
protected CancelResult cancelResult;
/**
@@ -762,7 +749,6 @@
{
return true;
}
-
if (obj instanceof Operation)
{
Operation other = (Operation) obj;
@@ -771,7 +757,6 @@
return other.getOperationID() == operationID;
}
}
-
return false;
}
@@ -799,5 +784,13 @@
}
}
}
-}
+ /**
+ * Updates the error message and the result code of the operation. This method
+ * is called because no workflows were found to process the operation.
+ */
+ public void updateOperationErrMsgAndResCode()
+ {
+ // do nothing by default
+ }
+}
diff --git a/opendj3-server-dev/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElement.java b/opendj3-server-dev/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElement.java
index abed6ed..49fbdc5 100644
--- a/opendj3-server-dev/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElement.java
+++ b/opendj3-server-dev/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElement.java
@@ -27,16 +27,20 @@
package org.opends.server.workflowelement.localbackend;
import java.util.ArrayList;
+import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.TreeMap;
import org.forgerock.i18n.LocalizableMessage;
+import org.forgerock.i18n.LocalizableMessageBuilder;
import org.forgerock.i18n.LocalizableMessageDescriptor;
import org.forgerock.i18n.slf4j.LocalizedLogger;
import org.forgerock.opendj.ldap.ResultCode;
+import org.forgerock.opendj.ldap.SearchScope;
import org.opends.server.api.AccessControlHandler;
import org.opends.server.api.Backend;
+import org.opends.server.backends.RootDSEBackend;
import org.opends.server.controls.LDAPPostReadRequestControl;
import org.opends.server.controls.LDAPPostReadResponseControl;
import org.opends.server.controls.LDAPPreReadRequestControl;
@@ -47,17 +51,6 @@
import static org.opends.messages.CoreMessages.*;
/**
- * This class defines a workflow element, i.e. a task in a workflow.
- *
- * [outdated]
- * A workflow element can wrap a physical
- * repository such as a local backend, a remote LDAP server or a local LDIF
- * file. A workflow element can also be used to route operations.
- * This is the case for load balancing and distribution.
- * And workflow element can be used in a virtual environment to transform data
- * (DN and attribute renaming, attribute value renaming...).
- * [/outdated]
- *
* This class defines a local backend workflow element; e-g an entity that
* handle the processing of an operation against a local backend.
*/
@@ -65,42 +58,30 @@
{
private static final LocalizedLogger logger = LocalizedLogger.getLoggerForThisClass();
- /**
- * An information indicating the type of the current workflow element. This
- * information is for debug and tooling purpose only.
- */
- private String workflowElementTypeInfo = "not defined";
-
- /** The workflow element identifier. */
- private String workflowElementID;
+ /** The backend's baseDN mapped by this object. */
+ private DN baseDN;
/** the backend associated with the local workflow element. */
private Backend<?> backend;
/** the set of local backend workflow elements registered with the server. */
- private static TreeMap<String, LocalBackendWorkflowElement> registeredLocalBackends =
- new TreeMap<String, LocalBackendWorkflowElement>();
+ private static TreeMap<DN, LocalBackendWorkflowElement> registeredLocalBackends =
+ new TreeMap<DN, LocalBackendWorkflowElement>();
/** A lock to guarantee safe concurrent access to the registeredLocalBackends variable. */
private static final Object registeredLocalBackendsLock = new Object();
- /** A string indicating the type of the workflow element. */
- private static final String BACKEND_WORKFLOW_ELEMENT = "Backend";
-
/**
* Initializes a new instance of the local backend workflow element.
- * This method is intended to be called by DirectoryServer when
- * workflow configuration mode is auto as opposed to
- * initializeWorkflowElement which is invoked when workflow
- * configuration mode is manual.
*
- * @param workflowElementID the workflow element identifier
- * @param backend the backend associated to that workflow element
+ * @param baseDN
+ * the backend's baseDN mapped by this object
+ * @param backend
+ * the backend associated to that workflow element
*/
- private void initialize(String workflowElementID, Backend<?> backend)
+ private void initialize(DN baseDN, Backend<?> backend)
{
- this.workflowElementID = workflowElementID;
- this.workflowElementTypeInfo = BACKEND_WORKFLOW_ELEMENT;
+ this.baseDN = baseDN;
this.backend = backend;
}
@@ -122,30 +103,27 @@
*/
public void finalizeWorkflowElement()
{
- this.workflowElementID = null;
- this.workflowElementTypeInfo = null;
+ this.baseDN = null;
this.backend = null;
}
/**
* Creates and registers a local backend with the server.
*
- * @param workflowElementID the identifier of the workflow element to create
- * @param backend the backend to associate with the local backend
- * workflow element
- *
- * @return the existing local backend workflow element if it was
- * already created or a newly created local backend workflow
- * element.
+ * @param baseDN
+ * the backend's baseDN mapped by this object
+ * @param backend
+ * the backend to associate with the local backend workflow element
+ * @return the existing local backend workflow element if it was already
+ * created or a newly created local backend workflow element.
*/
- public static LocalBackendWorkflowElement createAndRegister(
- String workflowElementID, Backend<?> backend)
+ public static LocalBackendWorkflowElement createAndRegister(DN baseDN, Backend<?> backend)
{
- LocalBackendWorkflowElement localBackend = registeredLocalBackends.get(workflowElementID);
+ LocalBackendWorkflowElement localBackend = registeredLocalBackends.get(baseDN);
if (localBackend == null)
{
localBackend = new LocalBackendWorkflowElement();
- localBackend.initialize(workflowElementID, backend);
+ localBackend.initialize(baseDN, backend);
registerLocalBackend(localBackend);
}
@@ -157,11 +135,12 @@
/**
* Removes a local backend that was registered with the server.
*
- * @param workflowElementID the identifier of the workflow element to remove
+ * @param baseDN
+ * the identifier of the workflow to remove
*/
- public static void remove(String workflowElementID)
+ public static void remove(DN baseDN)
{
- deregisterLocalBackend(workflowElementID);
+ deregisterLocalBackend(baseDN);
}
@@ -176,7 +155,7 @@
{
for (LocalBackendWorkflowElement localBackend : registeredLocalBackends.values())
{
- deregisterLocalBackend(localBackend.getWorkflowElementID());
+ deregisterLocalBackend(localBackend.getBaseDN());
}
}
}
@@ -457,13 +436,13 @@
{
synchronized (registeredLocalBackendsLock)
{
- String localBackendID = localBackend.getWorkflowElementID();
- LocalBackendWorkflowElement existingLocalBackend = registeredLocalBackends.get(localBackendID);
+ DN baseDN = localBackend.getBaseDN();
+ LocalBackendWorkflowElement existingLocalBackend = registeredLocalBackends.get(baseDN);
if (existingLocalBackend == null)
{
- TreeMap<String, LocalBackendWorkflowElement> newLocalBackends =
- new TreeMap<String, LocalBackendWorkflowElement>(registeredLocalBackends);
- newLocalBackends.put(localBackendID, localBackend);
+ TreeMap<DN, LocalBackendWorkflowElement> newLocalBackends =
+ new TreeMap<DN, LocalBackendWorkflowElement>(registeredLocalBackends);
+ newLocalBackends.put(baseDN, localBackend);
registeredLocalBackends = newLocalBackends;
}
}
@@ -474,25 +453,26 @@
/**
* Deregisters a local backend with the server.
*
- * @param workflowElementID the identifier of the workflow element to remove
+ * @param baseDN
+ * the identifier of the local backend to remove
*/
- private static void deregisterLocalBackend(String workflowElementID)
+ private static void deregisterLocalBackend(DN baseDN)
{
synchronized (registeredLocalBackendsLock)
{
- LocalBackendWorkflowElement existingLocalBackend = registeredLocalBackends.get(workflowElementID);
+ LocalBackendWorkflowElement existingLocalBackend = registeredLocalBackends.get(baseDN);
if (existingLocalBackend != null)
{
- TreeMap<String, LocalBackendWorkflowElement> newLocalBackends =
- new TreeMap<String, LocalBackendWorkflowElement>(registeredLocalBackends);
- newLocalBackends.remove(workflowElementID);
+ TreeMap<DN, LocalBackendWorkflowElement> newLocalBackends =
+ new TreeMap<DN, LocalBackendWorkflowElement>(registeredLocalBackends);
+ newLocalBackends.remove(baseDN);
registeredLocalBackends = newLocalBackends;
}
}
}
/**
- * Executes the workflow element for an operation.
+ * Executes the workflow for an operation.
*
* @param operation
* the operation to execute
@@ -576,9 +556,9 @@
*
* @return the workflow element identifier
*/
- public String getWorkflowElementID()
+ public DN getBaseDN()
{
- return workflowElementID;
+ return baseDN;
}
/**
@@ -641,13 +621,299 @@
}
}
+ /**
+ * Executes the supplied operation.
+ *
+ * @param operation
+ * the operation to execute
+ * @param entryDN
+ * the entry DN whose backend will be used
+ * @return true if the operation successfully executed, false otherwise
+ * @throws CanceledOperationException
+ * if this operation should be cancelled.
+ */
+ public static boolean execute(Operation operation, DN entryDN) throws CanceledOperationException
+ {
+ LocalBackendWorkflowElement workflow = getLocalBackendWorkflowElement(entryDN);
+ if (workflow == null)
+ {
+ // We have found no backend for the requested base DN,
+ // just return a no such entry result code and stop the processing.
+ if (operation instanceof AbstractOperation)
+ {
+ ((AbstractOperation) operation).updateOperationErrMsgAndResCode();
+ }
+ return false;
+ }
+
+ if (workflow.getBaseDN().isRootDN())
+ {
+ executeOnRootDSE(operation, workflow);
+ }
+ else
+ {
+ executeOnNonRootDSE(operation, workflow);
+ }
+ return true;
+ }
+
+ private static LocalBackendWorkflowElement getLocalBackendWorkflowElement(DN entryDN)
+ {
+ while (entryDN != null)
+ {
+ final LocalBackendWorkflowElement workflow = registeredLocalBackends.get(entryDN);
+ if (workflow != null)
+ {
+ return workflow;
+ }
+ entryDN = entryDN.parent();
+ }
+ return null;
+ }
+
+ /**
+ * Executes an operation on the root DSE entry.
+ *
+ * @param operation
+ * the operation to execute
+ * @param workflow
+ * the workflow where to execute the operation
+ * @throws CanceledOperationException
+ * if this operation should be cancelled.
+ */
+ private static void executeOnRootDSE(Operation operation, LocalBackendWorkflowElement workflow)
+ throws CanceledOperationException
+ {
+ OperationType operationType = operation.getOperationType();
+ if (operationType == OperationType.SEARCH)
+ {
+ executeSearch((SearchOperation) operation, workflow);
+ }
+ else
+ {
+ workflow.execute(operation);
+ }
+ }
+
+ /**
+ * Executes a search operation on the the root DSE entry.
+ *
+ * @param searchOp
+ * the operation to execute
+ * @param workflow
+ * the workflow where to execute the operation
+ * @throws CanceledOperationException
+ * if this operation should be cancelled.
+ */
+ private static void executeSearch(SearchOperation searchOp, LocalBackendWorkflowElement workflow)
+ throws CanceledOperationException
+ {
+ // Keep a the original search scope because we will alter it in the operation
+ SearchScope originalScope = searchOp.getScope();
+
+ // Search base?
+ // The root DSE entry itself is never returned unless the operation
+ // is a search base on the null suffix.
+ if (originalScope == SearchScope.BASE_OBJECT)
+ {
+ workflow.execute(searchOp);
+ return;
+ }
+
+ // Create a workflow result code in case we need to perform search in
+ // subordinate workflows.
+ WorkflowResultCode workflowResultCode =
+ new WorkflowResultCode(searchOp.getResultCode(), searchOp.getErrorMessage());
+
+ // The search scope is not 'base', so let's do a search on all the public
+ // naming contexts with appropriate new search scope and new base DN.
+ SearchScope newScope = elaborateScopeForSearchInSubordinates(originalScope);
+ searchOp.setScope(newScope);
+ DN originalBaseDN = searchOp.getBaseDN();
+
+ for (LocalBackendWorkflowElement subordinate : getRootDSESubordinates())
+ {
+ // We have to change the operation request base DN to match the
+ // subordinate workflow base DN. Otherwise the workflow will
+ // return a no such entry result code as the operation request
+ // base DN is a superior of the workflow base DN!
+ DN ncDN = subordinate.getBaseDN();
+
+ // Set the new request base DN then do execute the operation
+ // in the naming context workflow.
+ searchOp.setBaseDN(ncDN);
+ execute(searchOp, ncDN);
+ boolean sendReferenceEntry = workflowResultCode.elaborateGlobalResultCode(
+ searchOp.getResultCode(), searchOp.getErrorMessage());
+ if (sendReferenceEntry)
+ {
+ // TODO jdemendi - turn a referral result code into a reference entry
+ // and send the reference entry to the client application
+ }
+ }
+
+ // Now restore the original request base DN and original search scope
+ searchOp.setBaseDN(originalBaseDN);
+ searchOp.setScope(originalScope);
+
+ // If the result code is still uninitialized (ie no naming context),
+ // we should return NO_SUCH_OBJECT
+ workflowResultCode.elaborateGlobalResultCode(
+ ResultCode.NO_SUCH_OBJECT, new LocalizableMessageBuilder(LocalizableMessage.EMPTY));
+
+ // Set the operation result code and error message
+ searchOp.setResultCode(workflowResultCode.resultCode());
+ searchOp.setErrorMessage(workflowResultCode.errorMessage());
+ }
+
+ private static Collection<LocalBackendWorkflowElement> getRootDSESubordinates()
+ {
+ final RootDSEBackend rootDSEBackend = DirectoryServer.getRootDSEBackend();
+
+ final List<LocalBackendWorkflowElement> results = new ArrayList<LocalBackendWorkflowElement>();
+ for (DN subordinateBaseDN : rootDSEBackend.getSubordinateBaseDNs().keySet())
+ {
+ results.add(registeredLocalBackends.get(subordinateBaseDN));
+ }
+ return results;
+ }
+
+ private static void executeOnNonRootDSE(Operation operation, LocalBackendWorkflowElement workflow)
+ throws CanceledOperationException
+ {
+ workflow.execute(operation);
+
+ // For subtree search operation we need to go through the subordinate nodes.
+ if (operation.getOperationType() == OperationType.SEARCH)
+ {
+ executeSearchOnSubordinates((SearchOperation) operation, workflow);
+ }
+ }
+
+ /**
+ * Executes a search operation on the subordinate workflows.
+ *
+ * @param searchOp
+ * the search operation to execute
+ * @param workflow
+ * the workflow element
+ * @throws CanceledOperationException
+ * if this operation should be canceled.
+ */
+ private static void executeSearchOnSubordinates(SearchOperation searchOp, LocalBackendWorkflowElement workflow)
+ throws CanceledOperationException {
+ // If the scope of the search is 'base' then it's useless to search
+ // in the subordinate workflows.
+ SearchScope originalScope = searchOp.getScope();
+ if (originalScope == SearchScope.BASE_OBJECT)
+ {
+ return;
+ }
+
+ // Elaborate the new search scope before executing the search operation
+ // in the subordinate workflows.
+ SearchScope newScope = elaborateScopeForSearchInSubordinates(originalScope);
+ searchOp.setScope(newScope);
+
+ // Let's search in the subordinate workflows.
+ WorkflowResultCode workflowResultCode = new WorkflowResultCode(
+ searchOp.getResultCode(), searchOp.getErrorMessage());
+ DN originalBaseDN = searchOp.getBaseDN();
+ for (LocalBackendWorkflowElement subordinate : getSubordinates(workflow))
+ {
+ // We have to change the operation request base DN to match the
+ // subordinate workflow base DN. Otherwise the workflow will
+ // return a no such entry result code as the operation request
+ // base DN is a superior of the subordinate workflow base DN.
+ DN subordinateDN = subordinate.getBaseDN();
+
+ // If the new search scope is 'base' and the search base DN does not
+ // map the subordinate workflow then skip the subordinate workflow.
+ if (newScope == SearchScope.BASE_OBJECT && !subordinateDN.parent().equals(originalBaseDN))
+ {
+ continue;
+ }
+
+ // If the request base DN is not a subordinate of the subordinate
+ // workflow base DN then do not search in the subordinate workflow.
+ if (!originalBaseDN.isAncestorOf(subordinateDN))
+ {
+ continue;
+ }
+
+ // Set the new request base DN and do execute the
+ // operation in the subordinate workflow.
+ searchOp.setBaseDN(subordinateDN);
+ execute(searchOp, subordinateDN);
+ boolean sendReferenceEntry =
+ workflowResultCode.elaborateGlobalResultCode(searchOp.getResultCode(), searchOp.getErrorMessage());
+ if (sendReferenceEntry)
+ {
+ // TODO jdemendi - turn a referral result code into a reference entry
+ // and send the reference entry to the client application
+ }
+ }
+
+ // Now we are done with the operation, let's restore the original
+ // base DN and search scope in the operation.
+ searchOp.setBaseDN(originalBaseDN);
+ searchOp.setScope(originalScope);
+
+ // Update the operation result code and error message
+ searchOp.setResultCode(workflowResultCode.resultCode());
+ searchOp.setErrorMessage(workflowResultCode.errorMessage());
+ }
+
+ private static Collection<LocalBackendWorkflowElement> getSubordinates(LocalBackendWorkflowElement workflow)
+ {
+ final DN baseDN = workflow.getBaseDN();
+ final Backend<?> backend = workflow.getBackend();
+
+ final ArrayList<LocalBackendWorkflowElement> results = new ArrayList<LocalBackendWorkflowElement>();
+ for (Backend<?> subordinate : backend.getSubordinateBackends())
+ {
+ for (DN subordinateDN : subordinate.getBaseDNs())
+ {
+ if (subordinateDN.isDescendantOf(baseDN))
+ {
+ results.add(registeredLocalBackends.get(subordinateDN));
+ }
+ }
+ }
+ return results;
+ }
+
+ /**
+ * Elaborates a new search scope according to the current search scope. The
+ * new scope is intended to be used for searches on subordinate workflows.
+ *
+ * @param currentScope
+ * the current search scope
+ * @return the new scope to use for searches on subordinate workflows,
+ * <code>null</code> when current scope is 'base'
+ */
+ private static SearchScope elaborateScopeForSearchInSubordinates(SearchScope currentScope)
+ {
+ switch (currentScope.asEnum())
+ {
+ case BASE_OBJECT:
+ return null;
+ case SINGLE_LEVEL:
+ return SearchScope.BASE_OBJECT;
+ case SUBORDINATES:
+ case WHOLE_SUBTREE:
+ return SearchScope.WHOLE_SUBTREE;
+ default:
+ return currentScope;
+ }
+ }
+
/** {@inheritDoc} */
@Override
public String toString()
{
return getClass().getSimpleName()
- + " backend=" + backend
- + " workflowElementID=" + this.workflowElementID
- + " workflowElementTypeInfo=" + this.workflowElementTypeInfo;
+ + " backend=" + this.backend
+ + " baseDN=" + this.baseDN;
}
}
diff --git a/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/core/WorkflowTopologyTest.java b/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/core/WorkflowTopologyTest.java
deleted file mode 100644
index 1d0511f..0000000
--- a/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/core/WorkflowTopologyTest.java
+++ /dev/null
@@ -1,845 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2006-2008 Sun Microsystems, Inc.
- * Portions Copyright 2014 ForgeRock AS
- */
-package org.opends.server.core;
-
-import java.util.ArrayList;
-
-import org.forgerock.i18n.LocalizableMessageBuilder;
-import org.forgerock.opendj.ldap.ResultCode;
-import org.opends.server.TestCaseUtils;
-import org.opends.server.types.DN;
-import org.opends.server.util.UtilTestCase;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
-import static org.testng.Assert.*;
-
-/**
- * This set of tests checks that workflow topology is properly created.
- * Topology is based on DN hierarchical relationship. Once the topolofy
- * is created, we check that the route operation returns the best workflow
- * candidate for a given request base DN.
- */
-@SuppressWarnings("javadoc")
-public class WorkflowTopologyTest extends UtilTestCase
-{
- //===========================================================================
- //
- // B E F O R E C L A S S
- //
- //===========================================================================
-
- /**
- * Set up the environment for performing the tests in this suite.
- *
- * @throws Exception if the environment could not be set up.
- */
- @BeforeClass
- public void setUp()
- throws Exception
- {
- // This test suite depends on having the schema available,
- // so we'll start the server.
- TestCaseUtils.startServer();
- }
-
-
- //===========================================================================
- //
- // D A T A P R O V I D E R
- //
- //===========================================================================
-
- /**
- * Provide a set of DNs to create a single workflow. Each set of DNs contains
- * one baseDN for the new workflow to be created, one subordinateDN and one
- * unrelatedDN which has no hierarchical relationship with the baseDN.
- *
- * baseDN + subordinateDN + unrelatedDN
- *
- * Sample scenario for a test using this set of DNs:
- * 1) creating a workflow with the baseDN
- * 2) trying to fetch the workflow using the subordinateDN
- * 3) checking that the workflow cannot be candidate to route a request
- * with the unrelatedDN
- *
- * @return set of DNs
- * @throws Exception when DN.decode fails
- */
- @DataProvider(name = "DNSet_1")
- public Object[][] initDNSet_1()
- throws Exception
- {
- DN dnNull = DN.valueOf ("");
- DN baseDN1 = DN.valueOf ("o=test");
- DN subordinateDN1 = DN.valueOf ("ou=subtest,o=test");
- DN unrelatedDN = DN.valueOf ("o=dummy");
-
- // Sets of DNs
- Object[][] myData =
- {
- // SET 1
- // baseDN is null suffix. There is no unrelatedDN because any DN
- // is descendant of the null suffix.
- {
- dnNull,
- subordinateDN1,
- null
- },
-
- // SET 2
- // One baseDN, one subordinateDN and one unrelatedDN
- {
- baseDN1,
- subordinateDN1,
- unrelatedDN
- },
- };
-
- return myData;
- }
-
-
- /**
- * Provide a set of DNs to create a topology of 3 workflows, and the 3
- * workflows are in the same hierarchy of DNs: baseDN1 is the superior
- * of baseDN2 which is the superior of baseDN3:
- *
- * baseDN1 + subordinateDN1
- * |
- * baseDN2 + subordinateDN2 + unrelatedDN
- * |
- * baseDN3 + subordinateDN3
- *
- * Each baseDN has a subordinateDN: the workflow with the baseDN should be
- * the candidate for a request when request base DN is the subordinateDN.
- *
- * There is an unrelatedDN which has no hierarchical relationship with any
- * of the baseDNs. The unrelatedDN is used to check that none of the
- * workflow can be candidate for the route when a request is using the
- * unrelatedDN.
- *
- * @return set of DNs
- * @throws Exception when DN.decode fails
- */
- @DataProvider(name = "DNSet_2")
- public Object[][] initDNSet_2()
- throws Exception
- {
- DN unrelatedDN = null;
- int nbElem = 3;
- DN[] baseDNs = new DN[nbElem];
- DN[] subordinateDNs = new DN[nbElem];
- DN rootDSE = null;
-
- // Create the topology of DNs:
- //
- // o=dummy ou=test1 (==> W1)
- // |
- // |
- // +--------------+
- // | |
- // | |
- // ou=subordinate1 ou=test2 (==> W2)
- // |
- // |
- // +--------------------+
- // | |
- // | |
- // ou=test3 (==> W3) ou=subordinate2
- // |
- // |
- // +--------------+
- // | |
- // | |
- // ou=subordinate3
- {
- String suffix = "ou=test1";
- String baseDN1 = suffix;
- String baseDN2 = "ou=test2," + baseDN1;
- String baseDN3 = "ou=test3," + baseDN2;
- String subordinateDN1 = "ou=subordinate1," + baseDN1;
- String subordinateDN2 = "ou=subordinate2," + baseDN2;
- String subordinateDN3 = "ou=subordinate3," + baseDN3;
-
- int i = 0;
- baseDNs[i] = DN.valueOf (baseDN1);
- subordinateDNs[i] = DN.valueOf (subordinateDN1);
- i++;
- baseDNs[i] = DN.valueOf (baseDN2);
- subordinateDNs[i] = DN.valueOf (subordinateDN2);
- i++;
- baseDNs[i] = DN.valueOf (baseDN3);
- subordinateDNs[i] = DN.valueOf (subordinateDN3);
-
- unrelatedDN = DN.valueOf ("o=dummy");
- rootDSE = DN.valueOf ("");
- }
-
- // Sets of DNs
- Object[][] myData =
- {
- // SET 1
- {
- baseDNs[0], baseDNs[1], baseDNs[2],
- subordinateDNs[0], subordinateDNs[1], subordinateDNs[2],
- unrelatedDN
- },
-
- // SET 2
- // Same than SET 1, but the first baseDN is the null suffix DN.
- // Hence there is no unrelatedDN as any DN is a subordinate of
- // the null suffix.
- {
- rootDSE, baseDNs[1], baseDNs[2],
- subordinateDNs[0], subordinateDNs[1], subordinateDNs[2],
- null
- }
- };
-
- return myData;
- }
-
-
- /**
- * Provide a set of DNs to create the following topology:
- *
- * [W1]
- * baseDN1
- * |
- * +---------+--------+
- * | |
- * | |
- * subordinateDN1 +------+------+
- * | |
- * [W2] [W3]
- * baseDN2 baseDN3
- * | |
- * | |
- * subordinateDN2 subordinateDN3
- *
- *
- * @return set of DNs
- * @throws Exception when DN.decode fails
- */
- @DataProvider(name = "DNSet_3")
- public Object[][] initDNSet_3()
- throws Exception
- {
- DN unrelatedDN = null;
- int nbElem = 3;
- DN[] baseDNs = new DN[nbElem];
- DN[] subordinateDNs = new DN[nbElem];
- DN rootDSE = null;
-
- // Create the topology of DNs:
- //
- // o=dummy dc=example,dc=com
- // |
- // |
- // +--------------+-----------------+
- // | | |
- // ou=subordinate1 ou=group ou=people
- // | |
- // | |
- // ou=subordinate2 ou=subordinate3
- {
- String suffix = "dc=example,dc=com";
- String baseDN1 = suffix;
- String baseDN2 = "ou=group," + baseDN1;
- String baseDN3 = "ou=people," + baseDN1;
- String subordinateDN1 = "ou=subordinate1," + baseDN1;
- String subordinateDN2 = "ou=subordinate2," + baseDN2;
- String subordinateDN3 = "ou=subordinate3," + baseDN3;
-
- int i = 0;
- baseDNs[i] = DN.valueOf (baseDN1);
- subordinateDNs[i] = DN.valueOf (subordinateDN1);
- i++;
- baseDNs[i] = DN.valueOf (baseDN2);
- subordinateDNs[i] = DN.valueOf (subordinateDN2);
- i++;
- baseDNs[i] = DN.valueOf (baseDN3);
- subordinateDNs[i] = DN.valueOf (subordinateDN3);
-
- unrelatedDN = DN.valueOf ("o=dummy");
- rootDSE = DN.valueOf ("");
- }
-
- // Sets of DNs
- Object[][] myData =
- {
- // SET 1
- //
- // o=dummy dc=example,dc=com
- // |
- // |
- // +--------------+-----------------+
- // | | |
- // ou=subordinate1 ou=group ou=people
- // | |
- // | |
- // ou=subordinate2 ou=subordinate3
- {
- baseDNs[0],
- baseDNs[1],
- baseDNs[2],
- subordinateDNs[0],
- subordinateDNs[1],
- subordinateDNs[2],
- unrelatedDN
- },
-
- // SET 2
- //
- // The top baseDN is the null suffix. Hence there is no unrelatedDN
- // as any DN is a subordinate of the null suffix.
- //
- // "" (rootDSE)
- // |
- // |
- // +--------------+-----------------+
- // | | |
- // ou=subordinate1 ou=group ou=people
- // | |
- // | |
- // ou=subordinate2 ou=subordinate3
- {
- rootDSE,
- baseDNs[1],
- baseDNs[2],
- subordinateDNs[0],
- subordinateDNs[1],
- subordinateDNs[2],
- null
- }
- };
-
- return myData;
- }
-
-
- /**
- * Provide a set of result codes to test the elaboration of the global
- * result code.
- *
- * @return set of result codes to test
- */
- @DataProvider(name = "ResultCodes_1")
- public Object[][] initResultCodes_1()
- {
- // Short names...
- ResultCode rcSuccess = ResultCode.SUCCESS;
- ResultCode rcNoSuchObject = ResultCode.NO_SUCH_OBJECT;
- ResultCode rcReferral = ResultCode.REFERRAL;
- ResultCode rcOther = ResultCode.ALIAS_PROBLEM;
- ResultCode rcOther2 = ResultCode.AUTHORIZATION_DENIED;
-
- // Sets of DNs
- Object[][] myData =
- {
- // received current expected
- // result code result code result code
- { rcSuccess, rcNoSuchObject, rcSuccess },
- { rcReferral, rcSuccess, rcSuccess },
- { rcSuccess, rcOther, rcOther },
- { rcNoSuchObject, rcSuccess, rcSuccess },
- { rcNoSuchObject, rcReferral, rcReferral },
- { rcNoSuchObject, rcOther, rcOther },
- { rcReferral, rcSuccess, rcSuccess },
- { rcReferral, rcReferral, rcSuccess },
- { rcReferral, rcNoSuchObject, rcReferral },
- { rcReferral, rcOther, rcOther },
- { rcOther, rcSuccess, rcOther },
- { rcOther, rcReferral, rcOther },
- { rcOther, rcNoSuchObject, rcOther },
- { rcOther, rcOther2, rcOther2 }
- };
-
- return myData;
- }
-
-
- //===========================================================================
- //
- // T E S T C A S E S
- //
- //===========================================================================
-
- /**
- * Create a single workflow using a baseDN. There is no workflow element
- * in the workflow nor in the DIT attached to the workflow. Once the
- * workflow has been created, we are trying to fetch it using the baseDN
- * and/or the subordinateDN and/or the unrelatedDN.
- *
- * @param baseDN baseDN of the workflow to create
- * @param subordinateDN a subordinate DN of baseDN
- * @param dummyDN a DN not registered in any workflow
- */
- @Test (dataProvider = "DNSet_1", groups = "virtual")
- public void createWorkflow_basic(
- DN baseDN,
- DN subordinateDN,
- DN dummyDN
- )
- {
- WorkflowTopologyNode workflowNode = newWorkflowTopologyNode(baseDN);
-
- // The base DN in the workflow should match baseDN parameter
- DN workflowBaseDN = workflowNode.getBaseDN();
- assertEquals (workflowBaseDN, baseDN);
-
- // There should be no parent workflow.
- WorkflowTopologyNode parent = workflowNode.getParent();
- assertEquals (parent, null);
-
- // The workflow should handle the baseDN and subordinateDN.
- DN readBaseDN = null;
- readBaseDN = workflowNode.getParentBaseDN (baseDN);
- assertEquals (readBaseDN, baseDN);
- readBaseDN = workflowNode.getParentBaseDN (subordinateDN);
- assertEquals (readBaseDN, baseDN);
-
- // The workflow should not handle the dummyDN.
- if (dummyDN != null)
- {
- readBaseDN = workflowNode.getParentBaseDN (dummyDN);
- assertNull (readBaseDN);
- }
- }
-
- /**
- * Create a topology with 2 workflows. The test case contains creation
- * of clean topologies as well as bad topologies (same baseDN for the parent
- * and subordinate, subordinate above parent...).
- * <pre>
- * W1 (baseDN)
- * |
- * |
- * W2 (subordinateDN)
- * </pre>
- *
- * There is no workflow element attached to the DITs.
- *
- * @param baseDN base DN for the parent workflow (W1)
- * @param subordinateDN base DN for the subordinate workflow (W2)
- * @param unrelatedDN base DN with no hierarchical relationship with any
- * of the two baseDNs; parameter may be null
- */
- @Test (dataProvider = "DNSet_1", groups = "virtual")
- public void createWorkflow_simpleTopology1(
- DN baseDN,
- DN subordinateDN,
- DN unrelatedDN
- )
- {
- WorkflowTopologyNode w1 = newWorkflowTopologyNode(baseDN);
- WorkflowTopologyNode w1bis = newWorkflowTopologyNode(baseDN);
- WorkflowTopologyNode w2 = newWorkflowTopologyNode(subordinateDN);
- WorkflowTopologyNode w3 = newWorkflowTopologyNode(unrelatedDN);
-
- // Try to create a topology with unrelated workflows:
- //
- // w1 (baseDN)
- // |
- // w3 (dnDummy)
- //
- // Insert should be rejected
- if (w3 != null)
- {
- assertFalse(w1.insertSubordinate(w3));
- }
-
- // Try to create a topology with the very same workflow:
- //
- // w1 (baseDN)
- // |
- // w1 (baseDN)
- //
- // Insert should be rejected
- assertFalse(w1.insertSubordinate(w1));
-
- // Try to create a topology with a workflow whose baseDN is the same than
- // parent baseDN:
- //
- // w1 (baseDN)
- // |
- // w1bis (baseDN)
- //
- // Insert should be rejected
- assertFalse(w1.insertSubordinate(w1bis));
-
- // Try to create a topology where subordinate is above the parent:
- //
- // w2 (subordinateDN)
- // |
- // w1 (baseDN)
- //
- // Insert should be rejected
- assertFalse(w2.insertSubordinate(w1));
-
- // Try to create a clean topology:
- //
- // w1 (baseDN)
- // |
- // w2 (subordinateDN)
- //
- // Expected results:
- //
- // - insert should be working
- assertTrue(w1.insertSubordinate(w2));
-
- // - w1 should be the parent of w2
- WorkflowTopologyNode parent1 = w2.getParent();
- assertEquals (parent1, w1);
-
- // - w2 should be in the w1 subordinate list
- ArrayList<WorkflowTopologyNode> subordinates1 = w1.getSubordinates();
- assertEquals (subordinates1.size(), 1);
- assertEquals (subordinates1.get(0), w2);
-
- // - w2 should have no subordinate
- ArrayList<WorkflowTopologyNode> subordinates2 = w2.getSubordinates();
- assertEquals (subordinates2.size(), 0);
- }
-
- private WorkflowTopologyNode newWorkflowTopologyNode(DN baseDN)
- {
- if (baseDN != null)
- {
- final String workflowId = baseDN.toString();
- return new WorkflowTopologyNode(workflowId, baseDN, createAndRegister(workflowId, null));
- }
- return null;
- }
-
-
- /**
- * Create a topology with 3 workflows and check that we are getting the
- * right workflow for a given DN. Then remove a workflow in the chain and
- * check that topology is properly updated in term of parent/subordinate
- * links.
- * <pre>
- * W1 (baseDN1)
- * |
- * +----> subordinateDN1
- * |
- * W2 (baseDN2)
- * |
- * +----> subordinateDN2
- * |
- * W3 (baseDN3)
- * |
- * +----> subordinateDN3
- * |
- * </pre>
- *
- * There is no workflow element attached to the DITs.
- *
- * @param baseDN1 base DN for the top workflow (W1)
- * @param baseDN2 base DN for the first subordinate workflow (W2)
- * @param baseDN3 base DN for the second subordinate workflow (W3)
- * @param subordinateDN1 subordinate DN of baseDN1
- * @param subordinateDN2 subordinate DN of baseDN2
- * @param subordinateDN3 subordinate DN of baseDN3
- * @param unrelatedDN a DN not registered in any workflow
- */
- @Test (dataProvider = "DNSet_2", groups = "virtual")
- public void createWorkflow_simpleTopology2(
- DN baseDN1,
- DN baseDN2,
- DN baseDN3,
- DN subordinateDN1,
- DN subordinateDN2,
- DN subordinateDN3,
- DN unrelatedDN
- )
- {
- WorkflowTopologyNode w1 = newWorkflowTopologyNode(baseDN1);
- WorkflowTopologyNode w2 = newWorkflowTopologyNode(baseDN2);
- WorkflowTopologyNode w3 = newWorkflowTopologyNode(baseDN3);
-
- // insert status
- boolean insert;
-
- // Create a first topology with:
- //
- // w1 (baseDN1)
- // |
- // w3 (baseDN3)
- //
- insert = w1.insertSubordinate (w3);
- assertEquals (insert, true);
-
- // Now insert w2 between w1 and w3
- //
- // w1 (baseDN1)
- // |
- // w2 (baseDN2)
- // |
- // w3 (baseDN3)
- //
- insert = w1.insertSubordinate (w2);
- assertEquals (insert, true);
-
- // Check the topology:
- // - w1 has no parent and has only w2 as subordinate
- WorkflowTopologyNode parent = w1.getParent();
- assertNull (parent);
- ArrayList<WorkflowTopologyNode> subordinates = w1.getSubordinates();
- assertEquals (subordinates.size(), 1);
- assertEquals (subordinates.get(0), w2);
-
- // - w2 has w1 as parent and w3 as subordinate
- parent = w2.getParent();
- assertEquals (parent, w1);
- subordinates = w2.getSubordinates();
- assertEquals (subordinates.size(), 1);
- assertEquals (subordinates.get(0), w3);
-
- // -w3 has w2 as parent and no subordinate
- parent = w3.getParent();
- assertEquals (parent, w2);
- subordinates = w3.getSubordinates();
- assertEquals (subordinates.size(), 0);
-
- // ======================================================
- // Topology is clean, now let's check the route algorithm.
- // ======================================================
-
- DN readDN1 = null;
- DN readDN2 = null;
- DN readDN3 = null;
-
- // subordinate1 should be handled by w1 only
- readDN1 = w1.getParentBaseDN (subordinateDN1);
- readDN2 = w1.getParentBaseDN (subordinateDN2);
- readDN3 = w1.getParentBaseDN (subordinateDN3);
- assertEquals (readDN1, baseDN1);
- assertEquals (readDN2, baseDN2);
- assertEquals (readDN3, baseDN3);
-
- // subordinate2 should be handled by w2 only
- readDN1 = w2.getParentBaseDN (subordinateDN1);
- readDN2 = w2.getParentBaseDN (subordinateDN2);
- readDN3 = w2.getParentBaseDN (subordinateDN3);
- assertEquals (readDN1, null);
- assertEquals (readDN2, baseDN2);
- assertEquals (readDN3, baseDN3);
-
- // subordinate3 should be handled by w3 only
- readDN1 = w3.getParentBaseDN (subordinateDN1);
- readDN2 = w3.getParentBaseDN (subordinateDN2);
- readDN3 = w3.getParentBaseDN (subordinateDN3);
- assertEquals (readDN1, null);
- assertEquals (readDN2, null);
- assertEquals (readDN3, baseDN3);
-
- // unrelatedDN should be handled by none of the workflows
- readDN1 = w1.getParentBaseDN (unrelatedDN);
- readDN2 = w2.getParentBaseDN (unrelatedDN);
- readDN3 = w3.getParentBaseDN (unrelatedDN);
- assertEquals (readDN1, null);
- assertEquals (readDN2, null);
- assertEquals (readDN3, null);
-
- // ======================================================
- // Remove a workflow in the chain and check that
- // the route algorithm is still working
- // ======================================================
-
- // Remove w2...
- //
- // w1 (baseDN1) w1
- // | |
- // w2 (baseDN2) ==> |
- // | |
- // w3 (baseDN3) w3
- //
- w2.remove();
-
- // subordinate1 and subordinate2 should now be handled by w1 only
- readDN1 = w1.getParentBaseDN (subordinateDN1);
- readDN2 = w1.getParentBaseDN (subordinateDN2);
- readDN3 = w1.getParentBaseDN (subordinateDN3);
- assertEquals (readDN1, baseDN1);
- assertEquals (readDN2, baseDN1); // was baseDN2 before the removal...
- assertEquals (readDN3, baseDN3);
-
- // sanity check1
- // subordinate3 should be handled by w3 only
- readDN1 = w3.getParentBaseDN (subordinateDN1);
- readDN2 = w3.getParentBaseDN (subordinateDN2);
- readDN3 = w3.getParentBaseDN (subordinateDN3);
- assertEquals (readDN1, null);
- assertEquals (readDN2, null);
- assertEquals (readDN3, baseDN3);
-
- // sanity check2
- // unrelatedDN should be handled by none of the workflows
- readDN1 = w1.getParentBaseDN (unrelatedDN);
- readDN2 = w2.getParentBaseDN (unrelatedDN);
- readDN3 = w3.getParentBaseDN (unrelatedDN);
- assertEquals (readDN1, null);
- assertEquals (readDN2, null);
- assertEquals (readDN3, null);
-
- } // createWorkflow_simpleTopology2
-
-
- /**
- * Create a topology of workflows.
- * <pre>
- * W1
- * baseDN1
- * /\
- * / \
- * / \
- * W2 W3
- * baseDN2 baseDN3
- * </pre>
- *
- * There is no workflow element attached to the DITs.
- *
- * @param baseDN1 base DN for the top workflow (W1)
- * @param baseDN2 base DN for the first subordinate workflow (W2)
- * @param baseDN3 base DN for the second subordinate workflow (W3)
- * @param subordinateDN1 subordinate DN of baseDN1
- * @param subordinateDN2 subordinate DN of baseDN2
- * @param subordinateDN3 subordinate DN of baseDN3
- * @param unrelatedDN a DN not registered in any workflow
- */
- @Test (dataProvider = "DNSet_3", groups = "virtual")
- public void createWorkflow_complexTopology1(
- DN baseDN1,
- DN baseDN2,
- DN baseDN3,
- DN subordinateDN1,
- DN subordinateDN2,
- DN subordinateDN3,
- DN unrelatedDN
- )
- {
- WorkflowTopologyNode w1 = newWorkflowTopologyNode(baseDN1);
- WorkflowTopologyNode w2 = newWorkflowTopologyNode(baseDN2);
- WorkflowTopologyNode w3 = newWorkflowTopologyNode(baseDN3);
-
- // Put all the workflows in a pool
- WorkflowTopologyNode[] workflowPool = {w1, w2, w3};
-
- // Create the workflow topology: to do so, try to insert each workflow
- // in the other workflows. This is basically how workflow topology is
- // built by the network group.
- for (WorkflowTopologyNode parent: workflowPool)
- {
- for (WorkflowTopologyNode subordinate: workflowPool)
- {
- if (parent == subordinate)
- {
- // makes no sense to try to insert a workflow in itself!
- // let's do it anyway... but it should fail ;-)
- assertFalse(parent.insertSubordinate(parent));
- }
- else
- {
- parent.insertSubordinate(subordinate); // we do not check the output?
- }
- }
- }
-
- // Check the topology
- // ------------------
-
- // W1 should have 2 subordinates: W2 and W3
- ArrayList<WorkflowTopologyNode> subordinates1 = w1.getSubordinates();
- assertEquals (subordinates1.size(), 2);
-
- // W2 and W3 should have no subordinate
- ArrayList<WorkflowTopologyNode> subordinates2 = w2.getSubordinates();
- assertEquals (subordinates2.size(), 0);
- ArrayList<WorkflowTopologyNode> subordinates3 = w3.getSubordinates();
- assertEquals (subordinates3.size(), 0);
-
- // W1 should be the parent of W2 and W3
- WorkflowTopologyNode parent2 = w2.getParent();
- assertEquals (parent2, w1);
- WorkflowTopologyNode parent3 = w3.getParent();
- assertEquals (parent3, w1);
-
- // Check the route algorithm
- // -------------------------
-
- // candidate for baseDN1 and subordinateBaseDN1 should be W1
- WorkflowTopologyNode candidate1 = w1.getWorkflowCandidate (baseDN1);
- assertEquals (candidate1, w1);
- candidate1 = w1.getWorkflowCandidate (subordinateDN1);
- assertEquals (candidate1, w1);
-
- // candidate for baseDN2/3 and subordinateBaseDN2/3 should be W2/3
- WorkflowTopologyNode candidate2 = w1.getWorkflowCandidate (baseDN2);
- assertEquals (candidate2, w2);
- candidate2 = w1.getWorkflowCandidate (subordinateDN2);
- assertEquals (candidate2, w2);
-
- WorkflowTopologyNode candidate3 = w1.getWorkflowCandidate (baseDN3);
- assertEquals (candidate3, w3);
- candidate3 = w1.getWorkflowCandidate (subordinateDN3);
- assertEquals (candidate3, w3);
-
- // there should be no candidate for dummyDN
- if (unrelatedDN != null)
- {
- WorkflowTopologyNode candidateDummy = w1.getWorkflowCandidate (unrelatedDN);
- assertEquals (candidateDummy, null);
- }
-
- // dump the topology
- StringBuilder sb = w1.toString ("");
- System.out.println (sb);
- }
-
- /**
- * Test the elaboration of the global result code by the workflow.
- */
- @Test (dataProvider = "ResultCodes_1", groups = "virtual")
- public void testGlobalResultCode(
- ResultCode receivedResultCode,
- ResultCode initialResultCode,
- ResultCode expectedGlobalResultCode
- )
- throws Exception
- {
- // Check the function that elaborates the global result code
- WorkflowResultCode globalResultCode = new WorkflowResultCode (
- initialResultCode, new LocalizableMessageBuilder(""));
- globalResultCode.elaborateGlobalResultCode (
- receivedResultCode, new LocalizableMessageBuilder(""));
- assertEquals (globalResultCode.resultCode(), expectedGlobalResultCode);
- }
-}
diff --git a/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/core/networkgroups/NetworkGroupTest.java b/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/core/networkgroups/NetworkGroupTest.java
deleted file mode 100644
index a8a46b7..0000000
--- a/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/core/networkgroups/NetworkGroupTest.java
+++ /dev/null
@@ -1,430 +0,0 @@
-/*
- * CDDL HEADER START
- *
- * The contents of this file are subject to the terms of the
- * Common Development and Distribution License, Version 1.0 only
- * (the "License"). You may not use this file except in compliance
- * with the License.
- *
- * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
- * or http://forgerock.org/license/CDDLv1.0.html.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- *
- * When distributing Covered Code, include this CDDL HEADER in each
- * file and include the License file at legal-notices/CDDLv1_0.txt.
- * If applicable, add the following below this CDDL HEADER, with the
- * fields enclosed by brackets "[]" replaced with your own identifying
- * information:
- * Portions Copyright [yyyy] [name of copyright owner]
- *
- * CDDL HEADER END
- *
- *
- * Copyright 2006-2010 Sun Microsystems, Inc.
- * Portions Copyright 2011-2014 ForgeRock AS.
- */
-package org.opends.server.core.networkgroups;
-
-import java.util.ArrayList;
-
-import org.forgerock.opendj.ldap.ModificationType;
-import org.forgerock.opendj.ldap.ResultCode;
-import org.forgerock.opendj.ldap.SearchScope;
-import org.opends.server.DirectoryServerTestCase;
-import org.opends.server.TestCaseUtils;
-import org.opends.server.core.ModifyOperation;
-import org.opends.server.core.SearchOperation;
-import org.opends.server.core.Workflow;
-import org.opends.server.protocols.internal.InternalClientConnection;
-import org.opends.server.protocols.internal.SearchRequest;
-import org.opends.server.types.Attribute;
-import org.opends.server.types.Attributes;
-import org.opends.server.types.DN;
-import org.opends.server.types.DirectoryException;
-import org.opends.server.types.Modification;
-import org.opends.server.util.StaticUtils;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.DataProvider;
-import org.testng.annotations.Test;
-
-import static org.opends.messages.CoreMessages.*;
-import static org.opends.server.config.ConfigConstants.*;
-import static org.opends.server.protocols.internal.InternalClientConnection.*;
-import static org.opends.server.protocols.internal.Requests.*;
-import static org.opends.server.workflowelement.localbackend.LocalBackendWorkflowElement.*;
-import static org.testng.Assert.*;
-
-/**
- * This set of tests test the network groups.
- */
-@SuppressWarnings("javadoc")
-public class NetworkGroupTest extends DirectoryServerTestCase {
-
- @BeforeClass
- public void setUp() throws Exception
- {
- // This test suite depends on having the schema available,
- // so we'll start the server.
- TestCaseUtils.startServer();
- }
-
- /**
- * Provides information to create a network group with one workflow inside.
- *
- * Each set of DNs contains:
- * - one network group identifier
- * - one base DN for the workflow to register with the network group
-
- */
- @DataProvider (name = "DNSet_0")
- public Object[][] initDNSet_0() throws Exception
- {
- // Network group ID
- String networkGroupID1 = "networkGroup1";
- String networkGroupID2 = "networkGroup2";
-
- // Workflow base DNs
- DN dn1 = DN.valueOf("o=test1");
- DN dn2 = DN.valueOf("o=test2");
-
- // Network group info
- return new Object[][] {
- // Test1: create a network group with the identifier networkGroupID1
- { networkGroupID1, dn1 },
-
- // Test2: create the same network group to check that previous
- // network group was properly cleaned.
- { networkGroupID1, dn1 },
-
- // Test3: create another network group
- { networkGroupID2, dn2 },
- };
- }
-
-
- /**
- * Provides a single DN to search a workflow in a network group.
- *
- * Each set of DNs is composed of:
- * - one baseDN
- * - one subordinateDN
- * - a boolean telling whether we expect to find a workflow for the baseDN
- * in the default network group
- * - a boolean telling whether we expect to find a workflow for the baseDN
- * in the administration network group
- * - a boolean telling whether we expect to find a workflow for the baseDN
- * in the internal network group
- *
- * @return set of DNs
- * @throws Exception when DN.decode fails
- */
- @DataProvider(name = "DNSet_1")
- public Object[][] initDNSet_1() throws Exception
- {
- DN dnRootDSE = DN.valueOf("");
- DN dnConfig = DN.valueOf("cn=config");
- DN dnMonitor = DN.valueOf("cn=monitor");
- DN dnSchema = DN.valueOf("cn=schema");
- DN dnTasks = DN.valueOf("cn=tasks");
- DN dnBackups = DN.valueOf("cn=backups");
- DN dnDummy = DN.valueOf("o=dummy_suffix");
-
- DN dnSubordinateConfig = DN.valueOf("cn=Work Queue,cn=config");
- DN dnSubordinateMonitor = DN.valueOf("cn=schema Backend,cn=monitor");
- DN dnSubordinateTasks = DN.valueOf("cn=Scheduled Tasks,cn=tasks");
- // No DN subordinate for schema because the schema backend is
- // currently empty.
- // No DN subordinate for cn=backups because by default there is no
- // child entry under cn=backups.
-
- // Sets of DNs
- return new Object[][] {
- { dnRootDSE, null, true, },
- { dnConfig, dnSubordinateConfig, true, },
- { dnMonitor, dnSubordinateMonitor, true, },
- { dnTasks, dnSubordinateTasks, true, },
- { dnSchema, null, true, },
- { dnBackups, null, true, },
- { dnDummy, null, false, },
- };
- }
-
- /**
- * Tests the network group registration.
- *
- * @param networkGroupID the ID of the network group to register
- * @param workflowBaseDN1 the base DN of the first workflow node to register
- * in the network group
- */
- @Test (dataProvider = "DNSet_0", groups = "virtual")
- public void testNetworkGroupRegistration(String networkGroupID, DN workflowBaseDN) throws Exception
- {
- NetworkGroup networkGroup = new NetworkGroup(networkGroupID);
- registerWorkflow(networkGroup, workflowBaseDN);
-
- try
- {
- registerWorkflow(networkGroup, workflowBaseDN);
- fail("DirectoryException should have been thrown on double registration");
- }
- catch (DirectoryException de)
- {
- assertTrue(StaticUtils.hasDescriptor(de.getMessageObject(),
- ERR_REGISTER_WORKFLOW_NODE_ALREADY_EXISTS));
- }
- }
-
- /**
- * This test checks that network groups are updated as appropriate when
- * backend base DNs are added or removed. When a new backend base DN is
- * added, the new suffix should be accessible for the route process - ie.
- * a workflow should be created and be a potential candidate for the route
- * process. Similarly, when a backend base DN is removed its associated
- * workflow should be removed; subsequently, any request targeting the
- * removed suffix should be rejected and a no such entry status code be
- * returned.
- */
- @Test
- public void testBackendBaseDNModification() throws Exception
- {
- String suffix = "dc=example,dc=com";
- String suffix2 = "o=networkgroup suffix";
- String backendBaseDNName = "ds-cfg-base-dn";
-
- // Initialize a backend with a base entry.
- TestCaseUtils.clearJEBackend(true, "userRoot", suffix);
-
- // Check that suffix is accessible while suffix2 is not.
- searchEntry(suffix, ResultCode.SUCCESS);
- searchEntry(suffix2, ResultCode.NO_SUCH_OBJECT);
-
- // Add a new suffix in the backend and create a base entry for the
- // new suffix.
- String backendConfigDN = "ds-cfg-backend-id=userRoot," + DN_BACKEND_BASE;
- modifyAttribute(backendConfigDN, ModificationType.ADD, backendBaseDNName, suffix2);
- addBaseEntry(suffix2, "networkgroup suffix");
-
- // Both old and new suffix should be accessible.
- searchEntry(suffix, ResultCode.SUCCESS);
- searchEntry(suffix2, ResultCode.SUCCESS);
-
- // Remove the new suffix...
- modifyAttribute(backendConfigDN, ModificationType.DELETE, backendBaseDNName, suffix2);
-
- // ...and check that the removed suffix is no more accessible.
- searchEntry(suffix, ResultCode.SUCCESS);
- searchEntry(suffix2, ResultCode.NO_SUCH_OBJECT);
-
- // Replace the suffix with suffix2 in the backend
- modifyAttribute(backendConfigDN, ModificationType.REPLACE, backendBaseDNName, suffix2);
-
- // Now none of the suffixes are accessible: this means the entries
- // under the old suffix are not moved to the new suffix.
- searchEntry(suffix, ResultCode.NO_SUCH_OBJECT);
- searchEntry(suffix2, ResultCode.NO_SUCH_OBJECT);
-
- // Add a base entry for the new suffix
- addBaseEntry(suffix2, "networkgroup suffix");
-
- // The new suffix is accessible while the old one is not.
- searchEntry(suffix, ResultCode.NO_SUCH_OBJECT);
- searchEntry(suffix2, ResultCode.SUCCESS);
-
- // Reset the configuration with previous suffix
- modifyAttribute(backendConfigDN, ModificationType.REPLACE, backendBaseDNName, suffix);
- }
-
- /**
- * Tests the mechanism to attribute a network group to a client connection,
- * comparing the priority.
- * Create 2 network groups with different priorities.
- */
- @Test(groups = "virtual")
- public void testNetworkGroupPriority() throws Exception
- {
- String ng1 = "group1";
- String ng2 = "group2";
- DN dn1 = DN.valueOf("o=test1");
- DN dn2 = DN.valueOf("o=test2");
-
- // Create and register the network group with the server.
- NetworkGroup networkGroup1 = new NetworkGroup(ng1);
- NetworkGroup networkGroup2 = new NetworkGroup(ng2);
-
- // Register the workflow with the network group.
- registerWorkflow(networkGroup1, dn1);
- registerWorkflow(networkGroup2, dn2);
- }
-
- private void registerWorkflow(NetworkGroup networkGroup, DN dn) throws DirectoryException
- {
- String workflowId = dn.toString();
- networkGroup.registerWorkflow(workflowId, dn, createAndRegister(workflowId, null));
- }
-
- /**
- * This test checks that the network group takes into account the
- * subordinate naming context defined in the RootDSEBackend.
- */
- @Test
- public void testRootDseSubordinateNamingContext() throws Exception
- {
- // Backends for the test
- String backend1 = "o=test-rootDSE-subordinate-naming-context-1";
- String backend2 = "o=test-rootDSE-subordinate-naming-context-2";
- String backendID1 = "test-rootDSE-subordinate-naming-context-1";
- String backendID2 = "test-rootDSE-subordinate-naming-context-2";
-
- TestCaseUtils.clearDataBackends();
-
- // At this point, the list of subordinate naming context is not defined
- // yet (null): any public backend should be visible. Create a backend
- // with a base entry and check that the test naming context is visible.
- TestCaseUtils.initializeMemoryBackend(backendID1, backend1, true);
- searchPublicNamingContexts(ResultCode.SUCCESS, 1);
-
- // Create another test backend and check that the new backend is visible
- TestCaseUtils.initializeMemoryBackend(backendID2, backend2, true);
- searchPublicNamingContexts(ResultCode.SUCCESS, 2);
-
- // Now put in the list of subordinate naming context the backend1 naming context.
- // This white list will prevent the backend2 to be visible.
- TestCaseUtils.dsconfig(
- "set-root-dse-backend-prop",
- "--set", "subordinate-base-dn:" + backend1);
- searchPublicNamingContexts(ResultCode.SUCCESS, 1);
-
- // === Cleaning
-
- // Reset the subordinate naming context list.
- // Both naming context should be visible again.
- TestCaseUtils.dsconfig(
- "set-root-dse-backend-prop",
- "--reset", "subordinate-base-dn");
- searchPublicNamingContexts(ResultCode.SUCCESS, 2);
-
- // Clean the test backends. There is no more naming context.
- TestCaseUtils.clearMemoryBackend(backendID1);
- TestCaseUtils.clearMemoryBackend(backendID2);
- searchPublicNamingContexts(ResultCode.NO_SUCH_OBJECT, 0);
- }
-
-
- /**
- * Searches the list of naming contexts.
- *
- * @param expectedRC the expected result code
- * @param expectedNamingContexts the number of expected naming contexts
- */
- private void searchPublicNamingContexts(ResultCode expectedRC, int expectedNamingContexts) throws Exception
- {
- InternalClientConnection conn = InternalClientConnection.getRootConnection();
- SearchRequest request = newSearchRequest(DN.rootDN(), SearchScope.SINGLE_LEVEL);
- SearchOperation search = conn.processSearch(request);
-
- // Check the number of found naming context
- assertEquals(search.getResultCode(), expectedRC);
- if (expectedRC == ResultCode.SUCCESS)
- {
- assertEquals(search.getEntriesSent(), expectedNamingContexts);
- }
- }
-
-
- /**
- * Searches an entry on a given connection.
- *
- * @param baseDN the request base DN string
- * @param expectedRC the expected result code
- */
- private void searchEntry(String baseDN, ResultCode expectedRC) throws Exception
- {
- SearchRequest request = newSearchRequest(DN.valueOf(baseDN), SearchScope.BASE_OBJECT);
- SearchOperation search = getRootConnection().processSearch(request);
- assertEquals(search.getResultCode(), expectedRC);
- }
-
-
- /**
- * Creates a base entry for the given suffix.
- *
- * @param suffix the suffix for which the base entry is to be created
- */
- private void addBaseEntry(String suffix, String namingAttribute) throws Exception
- {
- TestCaseUtils.addEntry(
- "dn: " + suffix,
- "objectClass: top",
- "objectClass: organization",
- "o: " + namingAttribute);
- }
-
-
- /**
- * Adds/Deletes/Replaces an attribute in a given entry.
- *
- * @param baseDN the request base DN string
- * @param modType the modification type (add/delete/replace)
- * @param attributeName the name of the attribute to add/delete/replace
- * @param attributeValue the value of the attribute to add/delete/replace
- */
- private void modifyAttribute(
- String baseDN,
- ModificationType modType,
- String attributeName,
- String attributeValue
- ) throws Exception
- {
- ArrayList<Modification> mods = new ArrayList<Modification>();
- Attribute attributeToModify = Attributes.create(attributeName, attributeValue);
- mods.add(new Modification(modType, attributeToModify));
- ModifyOperation modifyOperation = getRootConnection().processModify(DN.valueOf(baseDN), mods);
- assertEquals(modifyOperation.getResultCode(), ResultCode.SUCCESS);
- }
-
-
- /**
- * Checks the DN routing through a network group.
- *
- * @param dnToSearch the DN of a workflow in the network group; may
- * be null
- * @param dnSubordinate a subordinate of dnToSearch
- * @param unrelatedDN a DN with no hierarchical relationship with
- * any of the DNs above, may be null
- * @param shouldExist true if we are supposed to find a workflow for
- * dnToSearch
- */
- @Test (dataProvider = "DNSet_1", groups = "virtual")
- public void doCheckNetworkGroup(
- DN dnToSearch,
- DN dnSubordinate,
- boolean shouldExist
- )
- {
- if (dnToSearch == null)
- {
- return;
- }
-
- // Let's retrieve the workflow that maps best the dnToSearch
- Workflow workflow = NetworkGroup.getWorkflowCandidate(dnToSearch);
- if (shouldExist)
- {
- assertNotNull(workflow);
- }
- else
- {
- assertNull(workflow);
- }
-
- // let's retrieve the workflow that handles the DN subordinate:
- // it should be the same than the one for dnToSearch
- if (dnSubordinate != null)
- {
- Workflow workflow2 = NetworkGroup.getWorkflowCandidate(dnSubordinate);
- assertEquals(workflow2, workflow);
- }
- }
-
-}
diff --git a/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElementTest.java b/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElementTest.java
new file mode 100644
index 0000000..494c30a
--- /dev/null
+++ b/opendj3-server-dev/tests/unit-tests-testng/src/server/org/opends/server/workflowelement/localbackend/LocalBackendWorkflowElementTest.java
@@ -0,0 +1,281 @@
+/*
+ * CDDL HEADER START
+ *
+ * The contents of this file are subject to the terms of the
+ * Common Development and Distribution License, Version 1.0 only
+ * (the "License"). You may not use this file except in compliance
+ * with the License.
+ *
+ * You can obtain a copy of the license at legal-notices/CDDLv1_0.txt
+ * or http://forgerock.org/license/CDDLv1.0.html.
+ * See the License for the specific language governing permissions
+ * and limitations under the License.
+ *
+ * When distributing Covered Code, include this CDDL HEADER in each
+ * file and include the License file at legal-notices/CDDLv1_0.txt.
+ * If applicable, add the following below this CDDL HEADER, with the
+ * fields enclosed by brackets "[]" replaced with your own identifying
+ * information:
+ * Portions Copyright [yyyy] [name of copyright owner]
+ *
+ * CDDL HEADER END
+ *
+ *
+ * Copyright 2006-2010 Sun Microsystems, Inc.
+ * Portions Copyright 2011-2014 ForgeRock AS.
+ */
+package org.opends.server.workflowelement.localbackend;
+
+import java.util.ArrayList;
+
+import org.forgerock.opendj.ldap.ModificationType;
+import org.forgerock.opendj.ldap.ResultCode;
+import org.forgerock.opendj.ldap.SearchScope;
+import org.opends.server.DirectoryServerTestCase;
+import org.opends.server.TestCaseUtils;
+import org.opends.server.core.ModifyOperation;
+import org.opends.server.core.SearchOperation;
+import org.opends.server.protocols.internal.SearchRequest;
+import org.opends.server.types.Attribute;
+import org.opends.server.types.Attributes;
+import org.opends.server.types.DN;
+import org.opends.server.types.DirectoryException;
+import org.opends.server.types.Modification;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Test;
+
+import static org.opends.server.config.ConfigConstants.*;
+import static org.opends.server.protocols.internal.InternalClientConnection.*;
+import static org.opends.server.protocols.internal.Requests.*;
+import static org.testng.Assert.*;
+
+/**
+ * This set of tests test the LocalBackendWorkflowElement.
+ */
+@SuppressWarnings("javadoc")
+public class LocalBackendWorkflowElementTest extends DirectoryServerTestCase
+{
+
+ @BeforeClass
+ public void setUp() throws Exception
+ {
+ TestCaseUtils.startServer();
+ }
+
+ /**
+ * This test checks that workflows are updated as appropriate when backend
+ * base DNs are added or removed.
+ * <p>
+ * When a new backend base DN is added, the new suffix should be accessible
+ * for the route process - ie. a workflow should be created and be a potential
+ * candidate for the route process.
+ * <p>
+ * Similarly, when a backend base DN is removed its associated workflow should
+ * be removed; subsequently, any request targeting the removed suffix should
+ * be rejected and a no such entry status code be returned.
+ */
+ @Test
+ public void testBackendBaseDNModification() throws Exception
+ {
+ String suffix = "dc=example,dc=com";
+ String suffix2 = "o=workflow suffix";
+ String backendBaseDNName = "ds-cfg-base-dn";
+
+ // Initialize a backend with a base entry.
+ TestCaseUtils.clearJEBackend(true, "userRoot", suffix);
+
+ // Check that suffix is accessible while suffix2 is not.
+ searchEntry(suffix, ResultCode.SUCCESS);
+ searchEntry(suffix2, ResultCode.NO_SUCH_OBJECT);
+
+ // Add a new suffix in the backend and create a base entry for the new suffix
+ String backendConfigDN = "ds-cfg-backend-id=userRoot," + DN_BACKEND_BASE;
+ modifyAttribute(backendConfigDN, ModificationType.ADD, backendBaseDNName, suffix2);
+ addBaseEntry(suffix2, "workflow suffix");
+
+ // Both old and new suffix should be accessible.
+ searchEntry(suffix, ResultCode.SUCCESS);
+ searchEntry(suffix2, ResultCode.SUCCESS);
+
+ // Remove the new suffix...
+ modifyAttribute(backendConfigDN, ModificationType.DELETE, backendBaseDNName, suffix2);
+
+ // ...and check that the removed suffix is no more accessible.
+ searchEntry(suffix, ResultCode.SUCCESS);
+ searchEntry(suffix2, ResultCode.NO_SUCH_OBJECT);
+
+ // Replace the suffix with suffix2 in the backend
+ modifyAttribute(backendConfigDN, ModificationType.REPLACE, backendBaseDNName, suffix2);
+
+ // Now none of the suffixes are accessible: this means the entries
+ // under the old suffix are not moved to the new suffix.
+ searchEntry(suffix, ResultCode.NO_SUCH_OBJECT);
+ searchEntry(suffix2, ResultCode.NO_SUCH_OBJECT);
+
+ // Add a base entry for the new suffix
+ addBaseEntry(suffix2, "workflow suffix");
+
+ // The new suffix is accessible while the old one is not.
+ searchEntry(suffix, ResultCode.NO_SUCH_OBJECT);
+ searchEntry(suffix2, ResultCode.SUCCESS);
+
+ // Reset the configuration with previous suffix
+ modifyAttribute(backendConfigDN, ModificationType.REPLACE, backendBaseDNName, suffix);
+ }
+
+ /**
+ * This test checks that the workflow takes into account the subordinate
+ * naming context defined in the RootDSEBackend.
+ */
+ @Test
+ public void testNonRootDseSubordinateNamingContext() throws Exception
+ {
+ // Backends for the test
+ String backendID1 = "test-dc-example-dc-com-subordinate1,dc=example,dc=com";
+ String backendID2 = "test-dc-example-dc-com-subordinate2,dc=example,dc=com";
+ String backend1 = "o=" + backendID1;
+ String backend2 = "o=" + backendID2;
+
+ try
+ {
+ TestCaseUtils.clearDataBackends();
+
+ // At this point, the list of subordinate naming context is not defined
+ // yet (null): any public backend should be visible. Create a backend
+ // with a base entry and check that the test naming context is visible.
+ TestCaseUtils.initializeMemoryBackend(backendID1, backend1, true);
+ searchEntries("dc=example,dc=com", ResultCode.SUCCESS, 1);
+
+ // Create another test backend and check that the new backend is visible
+ TestCaseUtils.initializeMemoryBackend(backendID2, backend2, true);
+ searchEntries("dc=example,dc=com", ResultCode.SUCCESS, 2);
+ }
+ finally
+ {
+ // Clean the test backends. There is no more naming context.
+ TestCaseUtils.clearMemoryBackend(backendID1);
+ TestCaseUtils.clearMemoryBackend(backendID2);
+ searchEntries("dc=example,dc=com", ResultCode.NO_SUCH_OBJECT, 0);
+ }
+ }
+
+ /**
+ * This test checks that the workflow takes into account the subordinate
+ * naming context defined in the RootDSEBackend.
+ */
+ @Test
+ public void testRootDseSubordinateNamingContext() throws Exception
+ {
+ // Backends for the test
+ String backend1 = "o=test-rootDSE-subordinate-naming-context-1";
+ String backend2 = "o=test-rootDSE-subordinate-naming-context-2";
+ String backendID1 = "test-rootDSE-subordinate-naming-context-1";
+ String backendID2 = "test-rootDSE-subordinate-naming-context-2";
+
+ try
+ {
+ TestCaseUtils.clearDataBackends();
+
+ // At this point, the list of subordinate naming context is not defined
+ // yet (null): any public backend should be visible. Create a backend
+ // with a base entry and check that the test naming context is visible.
+ TestCaseUtils.initializeMemoryBackend(backendID1, backend1, true);
+ searchPublicNamingContexts(ResultCode.SUCCESS, 1);
+
+ // Create another test backend and check that the new backend is visible
+ TestCaseUtils.initializeMemoryBackend(backendID2, backend2, true);
+ searchPublicNamingContexts(ResultCode.SUCCESS, 2);
+
+ // Now put in the list of subordinate naming context the backend1 naming context.
+ // This white list will prevent the backend2 to be visible.
+ TestCaseUtils.dsconfig(
+ "set-root-dse-backend-prop",
+ "--set", "subordinate-base-dn:" + backend1);
+ searchPublicNamingContexts(ResultCode.SUCCESS, 1);
+
+ // === Cleaning
+
+ // Reset the subordinate naming context list.
+ // Both naming context should be visible again.
+ TestCaseUtils.dsconfig(
+ "set-root-dse-backend-prop",
+ "--reset", "subordinate-base-dn");
+ searchPublicNamingContexts(ResultCode.SUCCESS, 2);
+ }
+ finally
+ {
+ // Clean the test backends. There is no more naming context.
+ TestCaseUtils.clearMemoryBackend(backendID1);
+ TestCaseUtils.clearMemoryBackend(backendID2);
+ searchPublicNamingContexts(ResultCode.NO_SUCH_OBJECT, 0);
+ }
+ }
+
+ /**
+ * Searches the list of naming contexts.
+ *
+ * @param expectedRC the expected result code
+ * @param expectedNamingContexts the number of expected naming contexts
+ */
+ private void searchPublicNamingContexts(ResultCode expectedRC, int expectedNamingContexts) throws Exception
+ {
+ searchEntries("", expectedRC, expectedNamingContexts);
+ }
+
+ private void searchEntries(String baseDN, ResultCode expectedRC, int expectedNbEntries) throws DirectoryException
+ {
+ SearchRequest request = newSearchRequest(DN.valueOf(baseDN), SearchScope.SINGLE_LEVEL);
+ SearchOperation search = getRootConnection().processSearch(request);
+
+ assertEquals(search.getResultCode(), expectedRC);
+ if (expectedRC == ResultCode.SUCCESS)
+ {
+ assertEquals(search.getEntriesSent(), expectedNbEntries);
+ }
+ }
+
+ /**
+ * Searches an entry on a given connection.
+ *
+ * @param baseDN the request base DN string
+ * @param expectedRC the expected result code
+ */
+ private void searchEntry(String baseDN, ResultCode expectedRC) throws Exception
+ {
+ SearchRequest request = newSearchRequest(DN.valueOf(baseDN), SearchScope.BASE_OBJECT);
+ SearchOperation search = getRootConnection().processSearch(request);
+ assertEquals(search.getResultCode(), expectedRC);
+ }
+
+ /**
+ * Creates a base entry for the given suffix.
+ *
+ * @param suffix the suffix for which the base entry is to be created
+ */
+ private void addBaseEntry(String suffix, String namingAttribute) throws Exception
+ {
+ TestCaseUtils.addEntry(
+ "dn: " + suffix,
+ "objectClass: top",
+ "objectClass: organization",
+ "o: " + namingAttribute);
+ }
+
+ /**
+ * Adds/Deletes/Replaces an attribute in a given entry.
+ *
+ * @param baseDN the request base DN string
+ * @param modType the modification type (add/delete/replace)
+ * @param attributeName the name of the attribute to add/delete/replace
+ * @param attributeValue the value of the attribute to add/delete/replace
+ */
+ private void modifyAttribute(String baseDN, ModificationType modType, String attributeName, String attributeValue)
+ throws Exception
+ {
+ ArrayList<Modification> mods = new ArrayList<Modification>();
+ Attribute attributeToModify = Attributes.create(attributeName, attributeValue);
+ mods.add(new Modification(modType, attributeToModify));
+ ModifyOperation modifyOperation = getRootConnection().processModify(DN.valueOf(baseDN), mods);
+ assertEquals(modifyOperation.getResultCode(), ResultCode.SUCCESS);
+ }
+}
\ No newline at end of file
--
Gitblit v1.10.0