From dc1ee0335a6bace8b0ffc4facc2327af7b129b62 Mon Sep 17 00:00:00 2001
From: gbellato <gbellato@localhost>
Date: Mon, 05 Nov 2007 09:24:47 +0000
Subject: [PATCH] Fix for 2263 : Provide a way to initialize schema between 2 servers.
---
opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplicationBroker.java | 2
opendj-sdk/opends/src/server/org/opends/server/types/AttributeType.java | 21 ++
opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplLDIFInputStream.java | 2
opendj-sdk/opends/src/server/org/opends/server/types/ObjectClass.java | 23 ++
opendj-sdk/opends/src/messages/messages/schema.properties | 1
opendj-sdk/opends/src/server/org/opends/server/backends/SchemaBackend.java | 465 ++++++++++++++++++++++++++++++++++++++++++++++++---
opendj-sdk/opends/tests/unit-tests-testng/src/server/org/opends/server/backends/SchemaBackendTestCase.java | 22 +
7 files changed, 498 insertions(+), 38 deletions(-)
diff --git a/opendj-sdk/opends/src/messages/messages/schema.properties b/opendj-sdk/opends/src/messages/messages/schema.properties
index faec7b3..171ec7f 100644
--- a/opendj-sdk/opends/src/messages/messages/schema.properties
+++ b/opendj-sdk/opends/src/messages/messages/schema.properties
@@ -910,3 +910,4 @@
SEVERE_WARN_ATTR_SYNTAX_GENERALIZED_TIME_ILLEGAL_TIME_278=The provided value \
%s is not a valid generalized time value because it represents an invalid \
time (e.g., a date that does not exist): %s
+NOTICE_SCHEMA_IMPORT_FAILED_279=A schema element could not be imported: %s, %s
diff --git a/opendj-sdk/opends/src/server/org/opends/server/backends/SchemaBackend.java b/opendj-sdk/opends/src/server/org/opends/server/backends/SchemaBackend.java
index 0d33093..b108606 100644
--- a/opendj-sdk/opends/src/server/org/opends/server/backends/SchemaBackend.java
+++ b/opendj-sdk/opends/src/server/org/opends/server/backends/SchemaBackend.java
@@ -47,6 +47,7 @@
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
+import java.util.concurrent.ConcurrentHashMap;
import java.util.zip.Deflater;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@@ -80,18 +81,20 @@
import org.opends.server.schema.MatchingRuleUseSyntax;
import org.opends.server.schema.NameFormSyntax;
import org.opends.server.schema.ObjectClassSyntax;
-import org.opends.server.types.CryptoManagerException;
import org.opends.server.types.*;
import org.opends.server.util.DynamicConstants;
import org.opends.server.util.LDIFException;
+import org.opends.server.util.LDIFReader;
import org.opends.server.util.LDIFWriter;
import org.opends.server.util.Validator;
import static org.opends.messages.BackendMessages.*;
import static org.opends.messages.ConfigMessages.*;
+import static org.opends.messages.SchemaMessages.*;
import static org.opends.server.config.ConfigConstants.*;
import static org.opends.server.loggers.debug.DebugLogger.*;
import static org.opends.server.loggers.ErrorLogger.*;
+import static org.opends.server.schema.SchemaConstants.*;
import static org.opends.server.util.ServerConstants.*;
import static org.opends.server.util.StaticUtils.*;
@@ -118,6 +121,9 @@
"org.opends.server.backends.SchemaBackend";
+ private static final String CONFIG_SCHEMA_ELEMENTS_FILE = "02-config.ldif";
+
+
// The set of user-defined attributes that will be included in the schema
// entry.
@@ -644,7 +650,7 @@
{
if (entryDN.equals(baseDN))
{
- return getSchemaEntry(entryDN);
+ return getSchemaEntry(entryDN, false);
}
}
@@ -658,11 +664,14 @@
/**
* Generates and returns a schema entry for the Directory Server.
*
- * @param entryDN The DN to use for the generated entry.
+ * @param entryDN The DN to use for the generated entry.
+ * @param includeSchemaFile A boolean indicating if the X-SCHEMA-FILE
+ * extension should be used when generating
+ * the entry.
*
* @return The schema entry that was generated.
*/
- public Entry getSchemaEntry(DN entryDN)
+ public Entry getSchemaEntry(DN entryDN, boolean includeSchemaFile)
{
LinkedHashMap<AttributeType,List<Attribute>> userAttrs =
new LinkedHashMap<AttributeType,List<Attribute>>();
@@ -699,15 +708,45 @@
}
}
+ Schema schema = DirectoryServer.getSchema();
// Add the "attributeTypes" attribute.
LinkedHashSet<AttributeValue> valueSet =
DirectoryServer.getAttributeTypeSet();
+ // Add the file name to the description of the attribute type if this
+ // was requested by the caller.
+ if (includeSchemaFile)
+ {
+ LinkedHashSet<AttributeValue> newValueSet =
+ new LinkedHashSet<AttributeValue>(valueSet.size());
+
+ for (AttributeValue value : valueSet)
+ {
+ try
+ {
+ // Build a new attribute from this value,
+ // get the File name from this attribute, build a new attribute
+ // including this file name.
+ AttributeType attrType = AttributeTypeSyntax.decodeAttributeType(
+ value.getValue(), schema, false);
+ attrType = DirectoryServer.getAttributeType(attrType.getOID());
+
+ newValueSet.add(new AttributeValue(
+ attributeTypesType, attrType.getDefinitionWithFileName()));
+ }
+ catch (DirectoryException e)
+ {
+ newValueSet.add(value);
+ }
+ }
+ valueSet = newValueSet;
+ }
+
Attribute attr;
if(AttributeTypeSyntax.isStripSyntaxMinimumUpperBound())
attr = stripMinUpperBoundValues(valueSet);
- else
+ else
attr = new Attribute(attributeTypesType, ATTR_ATTRIBUTE_TYPES,
valueSet);
ArrayList<Attribute> attrList = new ArrayList<Attribute>(1);
@@ -721,9 +760,37 @@
userAttrs.put(attributeTypesType, attrList);
}
-
// Add the "objectClasses" attribute.
valueSet = DirectoryServer.getObjectClassSet();
+
+ // Add the file name to the description if this was requested by the
+ // caller.
+ if (includeSchemaFile)
+ {
+ LinkedHashSet<AttributeValue> newValueSet =
+ new LinkedHashSet<AttributeValue>(valueSet.size());
+
+ for (AttributeValue value : valueSet)
+ {
+ try
+ {
+ // Build a new attribute from this value,
+ // get the File name from this attribute, build a new attribute
+ // including this file name.
+ ObjectClass oc = ObjectClassSyntax.decodeObjectClass(
+ value.getValue(), schema, false);
+ oc = DirectoryServer.getObjectClass(oc.getOID());
+ newValueSet.add(new AttributeValue(
+ objectClassesType, oc.getDefinitionWithFileName()));
+ }
+ catch (DirectoryException e)
+ {
+ newValueSet.add(value);
+ }
+ }
+ valueSet = newValueSet;
+ }
+
attr = new Attribute(objectClassesType, ATTR_OBJECTCLASSES, valueSet);
attrList = new ArrayList<Attribute>(1);
attrList.add(attr);
@@ -1438,7 +1505,42 @@
}
- // If we've gotten here, then everything looks OK. We'll re-write all
+ // If we've gotten here, then everything looks OK, re-write all the
+ // modified Schema Files.
+ updateSchemaFiles(newSchema, modifiedSchemaFiles);
+
+ // Finally set DirectoryServer to use the new Schema.
+ DirectoryServer.setSchema(newSchema);
+
+
+ DN authzDN = modifyOperation.getAuthorizationDN();
+ if (authzDN == null)
+ {
+ authzDN = DN.nullDN();
+ }
+
+ modifiersName = new AttributeValue(modifiersNameType, authzDN.toString());
+ modifyTimestamp = GeneralizedTimeSyntax.createGeneralizedTimeValue(
+ System.currentTimeMillis());
+ }
+
+
+
+ /**
+ * Re-write all schema files using the provided new Schema and list of
+ * modified files.
+ *
+ * @param newSchema The new schema that should be used.
+ *
+ * @param modifiedSchemaFiles The list of files that should be modified.
+ *
+ * @throws DirectoryException When the new file cannot be written.
+ */
+ private void updateSchemaFiles(
+ Schema newSchema, TreeSet<String> modifiedSchemaFiles)
+ throws DirectoryException
+ {
+ // We'll re-write all
// impacted schema files by first creating them in a temporary location
// and then replacing the existing schema files with the new versions.
// If all that goes successfully, then activate the new schema.
@@ -1452,7 +1554,6 @@
}
installSchemaFiles(tempSchemaFiles);
- DirectoryServer.setSchema(newSchema);
}
catch (DirectoryException de)
{
@@ -1485,17 +1586,6 @@
// that we can use on startup to detect whether the schema files have been
// edited with the server offline.
Schema.writeConcatenatedSchema();
-
-
- DN authzDN = modifyOperation.getAuthorizationDN();
- if (authzDN == null)
- {
- authzDN = DN.nullDN();
- }
-
- modifiersName = new AttributeValue(modifiersNameType, authzDN.toString());
- modifyTimestamp = GeneralizedTimeSyntax.createGeneralizedTimeValue(
- System.currentTimeMillis());
}
@@ -3914,7 +4004,7 @@
// Get the schema entry and see if it matches the filter. If so, then send
// it to the client.
- Entry schemaEntry = getSchemaEntry(baseDN);
+ Entry schemaEntry = getSchemaEntry(baseDN, false);
SearchFilter filter = searchOperation.getFilter();
if (filter.matchesEntry(schemaEntry))
{
@@ -3989,7 +4079,7 @@
// writer when we're done.
try
{
- ldifWriter.writeEntry(getSchemaEntry(baseDNs[0]));
+ ldifWriter.writeEntry(getSchemaEntry(baseDNs[0], true));
}
catch (Exception e)
{
@@ -4027,9 +4117,7 @@
@Override()
public boolean supportsLDIFImport()
{
- // This backend does not support LDIF imports.
- // FIXME -- Should we support them?
- return false;
+ return true;
}
@@ -4041,9 +4129,330 @@
public LDIFImportResult importLDIF(LDIFImportConfig importConfig)
throws DirectoryException
{
- // This backend does not support LDIF imports.
- Message message = ERR_SCHEMA_IMPORT_NOT_SUPPORTED.get();
- throw new DirectoryException(ResultCode.UNWILLING_TO_PERFORM, message);
+ LDIFReader reader;
+ try
+ {
+ reader = new LDIFReader(importConfig);
+ }
+ catch (Exception e)
+ {
+ Message message =
+ ERR_MEMORYBACKEND_CANNOT_CREATE_LDIF_READER.get(String.valueOf(e));
+ throw new DirectoryException(DirectoryServer.getServerErrorResultCode(),
+ message, e);
+ }
+
+
+ try
+ {
+ while (true)
+ {
+ Entry e = null;
+ try
+ {
+ e = reader.readEntry();
+ if (e == null)
+ {
+ break;
+ }
+ }
+ catch (LDIFException le)
+ {
+ if (! le.canContinueReading())
+ {
+ Message message =
+ ERR_MEMORYBACKEND_ERROR_READING_LDIF.get(String.valueOf(e));
+ throw new DirectoryException(
+ DirectoryServer.getServerErrorResultCode(),
+ message, le);
+ }
+ else
+ {
+ continue;
+ }
+ }
+
+ importEntry(e);
+ }
+
+ return new LDIFImportResult(reader.getEntriesRead(),
+ reader.getEntriesRejected(),
+ reader.getEntriesIgnored());
+ }
+ catch (DirectoryException de)
+ {
+ throw de;
+ }
+ catch (Exception e)
+ {
+ Message message =
+ ERR_MEMORYBACKEND_ERROR_DURING_IMPORT.get(String.valueOf(e));
+ throw new DirectoryException(DirectoryServer.getServerErrorResultCode(),
+ message, e);
+ }
+ finally
+ {
+ reader.close();
+ }
+ }
+
+
+ /**
+ * Import an entry in a new schema by :
+ * - duplicating the schema
+ * - iterating over each element of the newSchemaEntry and comparing
+ * with the xisting schema
+ * - if the new schema element do not exist : add it
+ * - if the new schema.
+ *
+ * FIXME : attributeTypes and objectClasses are the only elements
+ * currently taken into account.
+ *
+ * @param newSchemaEntry The entry to be imported.
+ */
+ private void importEntry(Entry newSchemaEntry)
+ throws DirectoryException
+ {
+ Schema schema = DirectoryServer.getSchema();
+ Schema newSchema = DirectoryServer.getSchema().duplicate();
+ TreeSet<String> modifiedSchemaFiles = new TreeSet<String>();
+
+ // Get the attributeTypes attribute from the entry.
+ AttributeTypeSyntax attrTypeSyntax;
+ try
+ {
+ attrTypeSyntax = (AttributeTypeSyntax)
+ schema.getSyntax(SYNTAX_ATTRIBUTE_TYPE_OID);
+ if (attrTypeSyntax == null)
+ {
+ attrTypeSyntax = new AttributeTypeSyntax();
+ attrTypeSyntax.initializeSyntax(null);
+ }
+ }
+ catch (Exception e)
+ {
+ if (debugEnabled())
+ {
+ TRACER.debugCaught(DebugLogLevel.ERROR, e);
+ }
+
+ attrTypeSyntax = new AttributeTypeSyntax();
+ }
+
+ AttributeType attributeAttrType =
+ schema.getAttributeType(ATTR_ATTRIBUTE_TYPES_LC);
+ if (attributeAttrType == null)
+ {
+ attributeAttrType =
+ DirectoryServer.getDefaultAttributeType(ATTR_ATTRIBUTE_TYPES,
+ attrTypeSyntax);
+ }
+
+ // loop on the attribute types in the entry just received
+ // and add them in the existing schema.
+ List<Attribute> attrList = newSchemaEntry.getAttribute(attributeAttrType);
+ Set<String> oidList = new HashSet<String>(1000);
+ if ((attrList != null) && (! attrList.isEmpty()))
+ {
+ for (Attribute a : attrList)
+ {
+ // Look for attributetypes that could have been added to the schema
+ // or modified in the schema
+ for (AttributeValue v : a.getValues())
+ {
+ // Parse the attribute type.
+ AttributeType attrType = AttributeTypeSyntax.decodeAttributeType(
+ v.getValue(), schema, false);
+ String schemaFile = attrType.getSchemaFile();
+ if (schemaFile.equals(CONFIG_SCHEMA_ELEMENTS_FILE))
+ {
+ // Don't import the file containing the definitions of the
+ // Schema elements used for configuration because these
+ // definitions may vary between versions of OpenDS.
+ continue;
+ }
+
+ oidList.add(attrType.getOID());
+ try
+ {
+ // Register this attribute type in the new schema
+ // unless it is already defined with the same syntax.
+ AttributeType oldAttrType =
+ schema.getAttributeType(attrType.getOID());
+ if ((oldAttrType == null) ||
+ (!oldAttrType.toString().equals(attrType.toString())))
+ {
+ newSchema.registerAttributeType(attrType, true);
+
+ if (schemaFile != null)
+ {
+ modifiedSchemaFiles.add(schemaFile);
+ }
+ }
+ }
+ catch (DirectoryException de)
+ {
+ Message message =
+ NOTE_SCHEMA_IMPORT_FAILED.get(
+ attrType.toString(), de.getMessage());
+ logError(message);
+ }
+ catch (Exception e)
+ {
+ Message message =
+ NOTE_SCHEMA_IMPORT_FAILED.get(
+ attrType.toString(), e.getMessage());
+ logError(message);
+ }
+ }
+ }
+ }
+
+ // loop on all the attribute types in the current schema and delete
+ // them from the new schema if they are not in the imported schema entry.
+ ConcurrentHashMap<String, AttributeType> currentAttrTypes =
+ newSchema.getAttributeTypes();
+
+ for (AttributeType removeType : currentAttrTypes.values())
+ {
+ String schemaFile = removeType.getSchemaFile();
+ if (schemaFile.equals(CONFIG_SCHEMA_ELEMENTS_FILE))
+ {
+ // Don't import the file containing the definitiong of the
+ // Schema elements used for configuration because these
+ // definitions may vary between versions of OpenDS.
+ continue;
+ }
+ if (!oidList.contains(removeType.getOID()))
+ {
+ newSchema.deregisterAttributeType(removeType);
+
+ if (schemaFile != null)
+ {
+ modifiedSchemaFiles.add(schemaFile);
+ }
+ }
+ }
+
+ // loop on the objectClasses from the entry, search if they are
+ // already in the current schema, add them if not.
+ ObjectClassSyntax ocSyntax;
+ try
+ {
+ ocSyntax = (ObjectClassSyntax) schema.getSyntax(SYNTAX_OBJECTCLASS_OID);
+ if (ocSyntax == null)
+ {
+ ocSyntax = new ObjectClassSyntax();
+ ocSyntax.initializeSyntax(null);
+ }
+ }
+ catch (Exception e)
+ {
+ if (debugEnabled())
+ {
+ TRACER.debugCaught(DebugLogLevel.ERROR, e);
+ }
+
+ ocSyntax = new ObjectClassSyntax();
+ }
+
+ AttributeType objectclassAttrType =
+ schema.getAttributeType(ATTR_OBJECTCLASSES_LC);
+ if (objectclassAttrType == null)
+ {
+ objectclassAttrType =
+ DirectoryServer.getDefaultAttributeType(ATTR_OBJECTCLASSES,
+ ocSyntax);
+ }
+
+ oidList.clear();
+ List<Attribute> ocList = newSchemaEntry.getAttribute(objectclassAttrType);
+ if ((ocList != null) && (! ocList.isEmpty()))
+ {
+ for (Attribute a : ocList)
+ {
+ for (AttributeValue v : a.getValues())
+ {
+ ObjectClass newObjectClass = ObjectClassSyntax.decodeObjectClass(
+ v.getValue(), schema, false);
+ String schemaFile = newObjectClass.getSchemaFile();
+ if (schemaFile.equals(CONFIG_SCHEMA_ELEMENTS_FILE))
+ {
+ // Don't import the file containing the definitions of the
+ // Schema elements used for configuration because these
+ // definitions may vary between versions of OpenDS.
+ continue;
+ }
+
+ oidList.add(newObjectClass.getOID());
+ try
+ {
+ // Register this ObjectClass in the new schema
+ // unless it is already defined with the same syntax.
+ ObjectClass oldObjectClass =
+ schema.getObjectClass(newObjectClass.getOID());
+ if ((oldObjectClass == null) ||
+ (!oldObjectClass.toString().equals(newObjectClass.toString())))
+ {
+ newSchema.registerObjectClass(newObjectClass, true);
+
+ if (schemaFile != null)
+ {
+ modifiedSchemaFiles.add(schemaFile);
+ }
+ }
+ }
+ catch (DirectoryException de)
+ {
+ Message message =
+ NOTE_SCHEMA_IMPORT_FAILED.get(
+ newObjectClass.toString(), de.getMessage());
+ logError(message);
+ }
+ catch (Exception e)
+ {
+ Message message =
+ NOTE_SCHEMA_IMPORT_FAILED.get(
+ newObjectClass.toString(), e.getMessage());
+ logError(message);
+ }
+ }
+ }
+ }
+
+ // loop on all the attribute types in the current schema and delete
+ // them from the new schema if they are not in the imported schema entry.
+ ConcurrentHashMap<String, ObjectClass> currentObjectClasses =
+ newSchema.getObjectClasses();
+
+ for (ObjectClass removeClass : currentObjectClasses.values())
+ {
+ String schemaFile = removeClass.getSchemaFile();
+ if (schemaFile.equals(CONFIG_SCHEMA_ELEMENTS_FILE))
+ {
+ // Don't import the file containing the definitiong of the
+ // Schema elements used for configuration because these
+ // definitions may vary between versions of OpenDS.
+ continue;
+ }
+ if (!oidList.contains(removeClass.getOID()))
+ {
+ newSchema.deregisterObjectClass(removeClass);
+
+ if (schemaFile != null)
+ {
+ modifiedSchemaFiles.add(schemaFile);
+ }
+ }
+ }
+
+ // Finally, if there were some modifications, save the new schema
+ // in the Schema Files and update DirectoryServer.
+ if (!modifiedSchemaFiles.isEmpty())
+ {
+ updateSchemaFiles(newSchema, modifiedSchemaFiles);
+ DirectoryServer.setSchema(newSchema);
+ }
}
@@ -4948,7 +5357,7 @@
Set<DN> newBaseDNs;
try
{
- newBaseDNs = backendCfg.getSchemaEntryDN();
+ newBaseDNs = new HashSet<DN>(backendCfg.getSchemaEntryDN());
if (newBaseDNs.isEmpty())
{
newBaseDNs.add(DN.decode(DN_DEFAULT_SCHEMA_ROOT));
diff --git a/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplLDIFInputStream.java b/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplLDIFInputStream.java
index 532b815..5c0aaef 100644
--- a/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplLDIFInputStream.java
+++ b/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplLDIFInputStream.java
@@ -130,7 +130,7 @@
}
index += copiedLength;
- if (copiedLength <= len)
+ if (receivedLength <= len)
bytes = null;
return copiedLength;
diff --git a/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplicationBroker.java b/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplicationBroker.java
index 871d567..94fddf0 100644
--- a/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplicationBroker.java
+++ b/opendj-sdk/opends/src/server/org/opends/server/replication/plugin/ReplicationBroker.java
@@ -773,6 +773,8 @@
replicationServer = "stopped";
shutdown = true;
connected = false;
+ if (heartbeatMonitor!= null)
+ heartbeatMonitor.shutdown();
try
{
if (debugEnabled())
diff --git a/opendj-sdk/opends/src/server/org/opends/server/types/AttributeType.java b/opendj-sdk/opends/src/server/org/opends/server/types/AttributeType.java
index fb02b4b..88a6aec 100644
--- a/opendj-sdk/opends/src/server/org/opends/server/types/AttributeType.java
+++ b/opendj-sdk/opends/src/server/org/opends/server/types/AttributeType.java
@@ -411,7 +411,26 @@
return definition;
}
-
+ /**
+ * Retrieves the definition string used to create this attribute
+ * type and including the X-SCHEMA-FILE extension.
+ *
+ * @return The definition string used to create this attribute
+ * type including the X-SCHEMA-FILE extension.
+ */
+ public String getDefinitionWithFileName()
+ {
+ if (getSchemaFile() != null)
+ {
+ int pos = definition.lastIndexOf(')');
+ String defStr = definition.substring(0, pos).trim() + " " +
+ SCHEMA_PROPERTY_FILENAME + " '" +
+ getSchemaFile() + "' )";
+ return defStr;
+ }
+ else
+ return definition;
+ }
/**
* Creates a new instance of this attribute type based on the
diff --git a/opendj-sdk/opends/src/server/org/opends/server/types/ObjectClass.java b/opendj-sdk/opends/src/server/org/opends/server/types/ObjectClass.java
index b77b57a..ce49cd3 100644
--- a/opendj-sdk/opends/src/server/org/opends/server/types/ObjectClass.java
+++ b/opendj-sdk/opends/src/server/org/opends/server/types/ObjectClass.java
@@ -275,6 +275,29 @@
/**
+ * Retrieves the definition string used to create this objectclass
+ * including the X-SCHEMA-FILE extension.
+ *
+ * @return The definition string used to create this objectclass
+ * including the X-SCHEMA-FILE extension.
+ */
+ public String getDefinitionWithFileName()
+ {
+ if (getSchemaFile() != null)
+ {
+ int pos = definition.lastIndexOf(')');
+ String defStr = definition.substring(0, pos).trim() + " " +
+ SCHEMA_PROPERTY_FILENAME + " '" +
+ getSchemaFile() + "' )";
+ return defStr;
+ }
+ else
+ return definition;
+ }
+
+
+
+ /**
* Creates a new instance of this objectclass based on the
* definition string. It will also preserve other state information
* associated with this objectclass that is not included in the
diff --git a/opendj-sdk/opends/tests/unit-tests-testng/src/server/org/opends/server/backends/SchemaBackendTestCase.java b/opendj-sdk/opends/tests/unit-tests-testng/src/server/org/opends/server/backends/SchemaBackendTestCase.java
index 9c7db9c..c433753 100644
--- a/opendj-sdk/opends/tests/unit-tests-testng/src/server/org/opends/server/backends/SchemaBackendTestCase.java
+++ b/opendj-sdk/opends/tests/unit-tests-testng/src/server/org/opends/server/backends/SchemaBackendTestCase.java
@@ -185,7 +185,7 @@
throws Exception
{
DN schemaDN = DN.decode("cn=schema");
- Entry schemaEntry = schemaBackend.getSchemaEntry(schemaDN);
+ Entry schemaEntry = schemaBackend.getSchemaEntry(schemaDN, false);
assertNotNull(schemaEntry);
assertEquals(schemaEntry.getDN(), schemaDN);
@@ -203,7 +203,7 @@
schemaDN = DN.decode("cn=subschema");
- schemaEntry = schemaBackend.getSchemaEntry(schemaDN);
+ schemaEntry = schemaBackend.getSchemaEntry(schemaDN, false);
assertNotNull(schemaEntry);
assertEquals(schemaEntry.getDN(), schemaDN);
@@ -316,7 +316,7 @@
true, null);
schemaBackend.renameEntry(currentSchemaDN,
- schemaBackend.getSchemaEntry(newSchemaDN),
+ schemaBackend.getSchemaEntry(newSchemaDN, false),
modifyDNOperation);
}
@@ -462,7 +462,7 @@
AttributeType s = DirectoryServer.getAttributeType("ldapsyntaxes");
assertFalse(schemaBackend.showAllAttributes());
- Entry schemaEntry = schemaBackend.getSchemaEntry(schemaDN);
+ Entry schemaEntry = schemaBackend.getSchemaEntry(schemaDN, false);
assertTrue(schemaEntry.hasOperationalAttribute(a));
assertTrue(schemaEntry.hasOperationalAttribute(o));
assertTrue(schemaEntry.hasOperationalAttribute(m));
@@ -470,7 +470,7 @@
schemaBackend.setShowAllAttributes(true);
assertTrue(schemaBackend.showAllAttributes());
- schemaEntry = schemaBackend.getSchemaEntry(schemaDN);
+ schemaEntry = schemaBackend.getSchemaEntry(schemaDN, false);
assertFalse(schemaEntry.hasOperationalAttribute(a));
assertFalse(schemaEntry.hasOperationalAttribute(o));
assertFalse(schemaEntry.hasOperationalAttribute(m));
@@ -478,7 +478,7 @@
schemaBackend.setShowAllAttributes(false);
assertFalse(schemaBackend.showAllAttributes());
- schemaEntry = schemaBackend.getSchemaEntry(schemaDN);
+ schemaEntry = schemaBackend.getSchemaEntry(schemaDN, false);
assertTrue(schemaEntry.hasOperationalAttribute(a));
assertTrue(schemaEntry.hasOperationalAttribute(o));
assertTrue(schemaEntry.hasOperationalAttribute(m));
@@ -5399,16 +5399,22 @@
*
* @throws Exception If an unexpected problem occurs.
*/
- @Test(expectedExceptions = { DirectoryException.class })
+ @Test()
public void testImportLDIF()
throws Exception
{
File tempFile = File.createTempFile("schema", "testImportLDIF");
tempFile.deleteOnExit();
+
+ LDIFExportConfig exportConfig =
+ new LDIFExportConfig(tempFile.getAbsolutePath(),
+ ExistingFileBehavior.OVERWRITE);
+
+ schemaBackend.exportLDIF(exportConfig);
LDIFImportConfig importConfig =
new LDIFImportConfig(tempFile.getAbsolutePath());
-
+
schemaBackend.importLDIF(importConfig);
}
--
Gitblit v1.10.0