AutoRefactor'ed Use Diamond Operator
| | |
| | | AbstractManagedObjectDefinition<? super C, ? super S> parent) { |
| | | this.name = name; |
| | | this.parent = parent; |
| | | this.constraints = new LinkedList<Constraint>(); |
| | | this.propertyDefinitions = new HashMap<String, PropertyDefinition<?>>(); |
| | | this.relationDefinitions = new HashMap<String, RelationDefinition<?,?>>(); |
| | | this.reverseRelationDefinitions = new HashSet<RelationDefinition<C,S>>(); |
| | | this.allPropertyDefinitions = new HashMap<String, PropertyDefinition<?>>(); |
| | | this.allRelationDefinitions = |
| | | new HashMap<String, RelationDefinition<?, ?>>(); |
| | | this.aggregationPropertyDefinitions = |
| | | new HashMap<String, AggregationPropertyDefinition<?,?>>(); |
| | | this.reverseAggregationPropertyDefinitions = |
| | | new Vector<AggregationPropertyDefinition<?,?>>(); |
| | | this.allAggregationPropertyDefinitions = |
| | | new HashMap<String, AggregationPropertyDefinition<?, ?>>(); |
| | | this.allTags = new HashSet<Tag>(); |
| | | this.constraints = new LinkedList<>(); |
| | | this.propertyDefinitions = new HashMap<>(); |
| | | this.relationDefinitions = new HashMap<>(); |
| | | this.reverseRelationDefinitions = new HashSet<>(); |
| | | this.allPropertyDefinitions = new HashMap<>(); |
| | | this.allRelationDefinitions = new HashMap<>(); |
| | | this.aggregationPropertyDefinitions = new HashMap<>(); |
| | | this.reverseAggregationPropertyDefinitions = new Vector<>(); |
| | | this.allAggregationPropertyDefinitions = new HashMap<>(); |
| | | this.allTags = new HashSet<>(); |
| | | this.options = EnumSet.noneOf(ManagedObjectOption.class); |
| | | |
| | | this.children = new HashMap<String, |
| | | AbstractManagedObjectDefinition<? extends C, ? extends S>>(); |
| | | this.children = new HashMap<>(); |
| | | |
| | | // If we have a parent definition then inherit its features. |
| | | if (parent != null) { |
| | |
| | | public final Collection<AbstractManagedObjectDefinition |
| | | <? extends C, ? extends S>> getAllChildren() { |
| | | List<AbstractManagedObjectDefinition<? extends C, ? extends S>> list = |
| | | new ArrayList<AbstractManagedObjectDefinition<? extends C, ? extends S>>( |
| | | children.values()); |
| | | new ArrayList<>(children.values()); |
| | | |
| | | for (AbstractManagedObjectDefinition<? extends C, ? extends S> child : |
| | | children.values()) { |
| | | for (AbstractManagedObjectDefinition<? extends C, ? extends S> child : children.values()) { |
| | | list.addAll(child.getAllChildren()); |
| | | } |
| | | |
| | |
| | | */ |
| | | public final Collection<Constraint> getAllConstraints() { |
| | | // This method does not used a cached set of constraints because |
| | | // constraints may be updated after child definitions have been |
| | | // defined. |
| | | List<Constraint> allConstraints = new LinkedList<Constraint>(); |
| | | // constraints may be updated after child definitions have been defined. |
| | | List<Constraint> allConstraints = new LinkedList<>(); |
| | | |
| | | if (parent != null) { |
| | | allConstraints.addAll(parent.getAllConstraints()); |
| | |
| | | public final Collection<RelationDefinition<? super C, ? super S>> |
| | | getAllReverseRelationDefinitions() { |
| | | // This method does not used a cached set of relations because |
| | | // relations may be updated after child definitions have been |
| | | // defined. |
| | | List<RelationDefinition<? super C, ? super S>> rdlist = |
| | | new LinkedList<RelationDefinition<? super C, ? super S>>(); |
| | | // relations may be updated after child definitions have been defined. |
| | | List<RelationDefinition<? super C, ? super S>> rdlist = new LinkedList<>(); |
| | | |
| | | if (parent != null) { |
| | | rdlist.addAll(parent.getAllReverseRelationDefinitions()); |
| | |
| | | // This method does not used a cached set of aggregation properties because |
| | | // aggregation properties may be updated after child definitions have been |
| | | // defined. |
| | | List<AggregationPropertyDefinition<?, ?>> apdlist = |
| | | new LinkedList<AggregationPropertyDefinition<?, ?>>(); |
| | | List<AggregationPropertyDefinition<?, ?>> apdlist = new LinkedList<>(); |
| | | |
| | | if (parent != null) { |
| | | apdlist.addAll(parent.getAllReverseAggregationPropertyDefinitions()); |
| | |
| | | private static final boolean ADMIN_ALLOW_LDAP_V2 = false; |
| | | private static final boolean ADMIN_ALLOW_START_TLS = false; |
| | | |
| | | private static final SortedSet<AddressMask> ADMIN_ALLOWED_CLIENT = new TreeSet<AddressMask>(); |
| | | private static final SortedSet<AddressMask> ADMIN_DENIED_CLIENT = new TreeSet<AddressMask>(); |
| | | private static final SortedSet<AddressMask> ADMIN_ALLOWED_CLIENT = new TreeSet<>(); |
| | | private static final SortedSet<AddressMask> ADMIN_DENIED_CLIENT = new TreeSet<>(); |
| | | |
| | | private static final boolean ADMIN_ENABLED = true; |
| | | private static final boolean ADMIN_KEEP_STATS = true; |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2012-2014 ForgeRock AS |
| | | * Portions Copyright 2012-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.admin; |
| | | |
| | |
| | | return; |
| | | } |
| | | |
| | | LinkedList<Modification> mods = new LinkedList<Modification>(); |
| | | LinkedList<Modification> mods = new LinkedList<>(); |
| | | // adminport |
| | | String attName = "adminport"; |
| | | AttributeType attrType = DirectoryServer.getAttributeType(attName |
| | |
| | | throw new IllegalStateException("Relation definition undefined"); |
| | | } |
| | | |
| | | return new AggregationPropertyDefinition<C, S>(d, propertyName, options, |
| | | return new AggregationPropertyDefinition<>(d, propertyName, options, |
| | | adminAction, defaultBehavior, parentPathString, rdName, |
| | | targetNeedsEnablingCondition, targetIsEnabledCondition); |
| | | } |
| | | |
| | | } |
| | | |
| | | |
| | |
| | | ServerManagedObject<?> parent = context.getManagedObject(parentPath); |
| | | |
| | | // Create entries in the listener tables. |
| | | List<ReferentialIntegrityDeleteListener> dlist = |
| | | new LinkedList<ReferentialIntegrityDeleteListener>(); |
| | | List<ReferentialIntegrityDeleteListener> dlist = new LinkedList<>(); |
| | | deleteListeners.put(managedObject.getDN(), dlist); |
| | | |
| | | List<ReferentialIntegrityChangeListener> clist = |
| | | new LinkedList<ReferentialIntegrityChangeListener>(); |
| | | List<ReferentialIntegrityChangeListener> clist = new LinkedList<>(); |
| | | changeListeners.put(managedObject.getDN(), clist); |
| | | |
| | | for (String name : managedObject |
| | |
| | | List<ManagedObject<? extends CC>> findInstances( |
| | | ManagementContext context, AbstractManagedObjectDefinition<CC, ?> mod) |
| | | throws AuthorizationException, CommunicationException { |
| | | List<ManagedObject<? extends CC>> instances = |
| | | new LinkedList<ManagedObject<? extends CC>>(); |
| | | List<ManagedObject<? extends CC>> instances = new LinkedList<>(); |
| | | |
| | | if (mod == RootCfgDefn.getInstance()) { |
| | | instances.add((ManagedObject<? extends CC>) context |
| | |
| | | public static <C extends ConfigurationClient, S extends Configuration> |
| | | Builder<C, S> createBuilder( |
| | | AbstractManagedObjectDefinition<?, ?> d, String propertyName) { |
| | | return new Builder<C, S>(d, propertyName); |
| | | return new Builder<>(d, propertyName); |
| | | } |
| | | |
| | | /** |
| | | * The active server-side referential integrity change listeners |
| | | * associated with this property. |
| | | */ |
| | | private final Map<DN, List<ReferentialIntegrityChangeListener>> |
| | | changeListeners = new HashMap<DN, |
| | | List<ReferentialIntegrityChangeListener>>(); |
| | | private final Map<DN, List<ReferentialIntegrityChangeListener>> changeListeners = new HashMap<>(); |
| | | |
| | | /** |
| | | * The active server-side referential integrity delete listeners |
| | | * associated with this property. |
| | | */ |
| | | private final Map<DN, List<ReferentialIntegrityDeleteListener>> |
| | | deleteListeners = new HashMap<DN, |
| | | List<ReferentialIntegrityDeleteListener>>(); |
| | | private final Map<DN, List<ReferentialIntegrityDeleteListener>> deleteListeners = new HashMap<>(); |
| | | |
| | | /** |
| | | * The name of the managed object which is the parent of the |
| | |
| | | * the standard boolean string parser and supports common true/false synonyms |
| | | * used in configuration. |
| | | */ |
| | | private static final Map<String, Boolean> VALUE_MAP; |
| | | private static final Map<String, Boolean> VALUE_MAP = new HashMap<>(); |
| | | static { |
| | | VALUE_MAP = new HashMap<String, Boolean>(); |
| | | |
| | | // We could have more possibilities but decided against in issue 1960. |
| | | VALUE_MAP.put("false", Boolean.FALSE); |
| | | VALUE_MAP.put("true", Boolean.TRUE); |
| | |
| | | } |
| | | |
| | | /** Set of registered Jar files. */ |
| | | private Set<File> jarFiles = new HashSet<File>(); |
| | | private Set<File> jarFiles = new HashSet<>(); |
| | | |
| | | /** |
| | | * Underlying class loader used to load classes and resources (null |
| | |
| | | File libPath = new File(DirectoryServer.getInstanceRoot(), LIB_DIR); |
| | | File extensionsPath = new File(libPath, EXTENSIONS_DIR); |
| | | |
| | | ArrayList<File> files = new ArrayList<File>(extensions.length); |
| | | ArrayList<File> files = new ArrayList<>(extensions.length); |
| | | for (String extension : extensions) { |
| | | File file = new File(extensionsPath, extension); |
| | | |
| | |
| | | "Class loader provider already disabled."); |
| | | } |
| | | loader = null; |
| | | jarFiles = new HashSet<File>(); |
| | | jarFiles = new HashSet<>(); |
| | | } |
| | | |
| | | |
| | |
| | | private synchronized void addExtension(File... extensions) |
| | | throws InitializationException { |
| | | // First add the Jar files to the class loader. |
| | | List<JarFile> jars = new LinkedList<JarFile>(); |
| | | List<JarFile> jars = new LinkedList<>(); |
| | | for (File extension : extensions) { |
| | | if (jarFiles.contains(extension)) { |
| | | // Skip this file as it is already loaded. |
| | |
| | | */ |
| | | private void loadDefinitionClasses(InputStream is) |
| | | throws InitializationException { |
| | | BufferedReader reader = new BufferedReader(new InputStreamReader( |
| | | is)); |
| | | List<AbstractManagedObjectDefinition<?, ?>> definitions = |
| | | new LinkedList<AbstractManagedObjectDefinition<?,?>>(); |
| | | BufferedReader reader = new BufferedReader(new InputStreamReader(is)); |
| | | List<AbstractManagedObjectDefinition<?, ?>> definitions = new LinkedList<>(); |
| | | while (true) { |
| | | String className; |
| | | try { |
| | |
| | | |
| | | |
| | | |
| | | import static org.forgerock.util.Reject.ifNull; |
| | | import static org.forgerock.util.Reject.*; |
| | | import static org.opends.server.admin.PropertyException.*; |
| | | |
| | | import java.util.Collections; |
| | |
| | | AbstractBuilder<String, ClassPropertyDefinition> { |
| | | |
| | | /** List of interfaces which property values must implement. */ |
| | | private List<String> instanceOfInterfaces; |
| | | |
| | | |
| | | private List<String> instanceOfInterfaces = new LinkedList<>(); |
| | | |
| | | /** Private constructor. */ |
| | | private Builder( |
| | | AbstractManagedObjectDefinition<?, ?> d, String propertyName) { |
| | | private Builder(AbstractManagedObjectDefinition<?, ?> d, String propertyName) { |
| | | super(d, propertyName); |
| | | |
| | | this.instanceOfInterfaces = new LinkedList<String>(); |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Add an class name which property values must implement. |
| | | * |
| | |
| | | /** List of interfaces which property values must implement. */ |
| | | private final List<String> instanceOfInterfaces; |
| | | |
| | | |
| | | |
| | | /** Private constructor. */ |
| | | private ClassPropertyDefinition( |
| | | AbstractManagedObjectDefinition<?, ?> d, String propertyName, |
| | |
| | | private final ManagedObjectDefinition<C, S> definition; |
| | | |
| | | /** The string encoded default managed object's properties. */ |
| | | private final Map<String, List<String>> propertyStringValues = |
| | | new HashMap<String, List<String>>(); |
| | | |
| | | |
| | | private final Map<String, List<String>> propertyStringValues = new HashMap<>(); |
| | | |
| | | /** |
| | | * Creates a new default managed object builder. |
| | |
| | | * @return Returns the new default managed object. |
| | | */ |
| | | public DefaultManagedObject<C, S> getInstance() { |
| | | return new DefaultManagedObject<C, S>(definition, propertyStringValues); |
| | | return new DefaultManagedObject<>(definition, propertyStringValues); |
| | | } |
| | | |
| | | |
| | |
| | | definition.getPropertyDefinition(pd.getName()); |
| | | |
| | | // Do a defensive copy. |
| | | SortedSet<T> values = new TreeSet<T>(pd); |
| | | SortedSet<T> values = new TreeSet<>(pd); |
| | | List<String> stringValues = propertyStringValues.get(pd.getName()); |
| | | if (stringValues != null) { |
| | | for (String stringValue : stringValues) { |
| | |
| | | this.values = Arrays.asList(values); |
| | | } |
| | | |
| | | |
| | | |
| | | /** {@inheritDoc} */ |
| | | public <R, P> R accept(DefaultBehaviorProviderVisitor<T, R, P> v, P p) { |
| | | return v.visitDefined(this, p); |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Get a copy of the default values. |
| | | * |
| | |
| | | * default values. |
| | | */ |
| | | public Collection<String> getDefaultValues() { |
| | | return new ArrayList<String>(values); |
| | | return new ArrayList<>(values); |
| | | } |
| | | |
| | | } |
| | |
| | | throw new IllegalStateException("Enumeration class undefined"); |
| | | } |
| | | |
| | | return new EnumPropertyDefinition<E>(d, propertyName, options, |
| | | return new EnumPropertyDefinition<>(d, propertyName, options, |
| | | adminAction, defaultBehavior, enumClass); |
| | | } |
| | | } |
| | |
| | | */ |
| | | public static <E extends Enum<E>> Builder<E> createBuilder( |
| | | AbstractManagedObjectDefinition<?, ?> d, String propertyName) { |
| | | return new Builder<E>(d, propertyName); |
| | | return new Builder<>(d, propertyName); |
| | | } |
| | | |
| | | /** The enumeration class. */ |
| | |
| | | this.enumClass = enumClass; |
| | | |
| | | // Initialize the decoding map. |
| | | this.decodeMap = new HashMap<String, E>(); |
| | | this.decodeMap = new HashMap<>(); |
| | | for (E value : EnumSet.<E> allOf(enumClass)) { |
| | | String s = value.toString().trim().toLowerCase(); |
| | | this.decodeMap.put(s, value); |
| | |
| | | * The optional default managed objects associated with this |
| | | * instantiable relation definition. |
| | | */ |
| | | private final Map<String, DefaultManagedObject<? extends C, ? extends S>> |
| | | defaultManagedObjects = new HashMap<String, |
| | | DefaultManagedObject<? extends C, ? extends S>>(); |
| | | private final Map<String, DefaultManagedObject<? extends C, ? extends S>> defaultManagedObjects = new HashMap<>(); |
| | | |
| | | |
| | | /** |
| | |
| | | @Override |
| | | protected InstantiableRelationDefinition<C, S> buildInstance( |
| | | Common<C, S> common) { |
| | | return new InstantiableRelationDefinition<C, S>(common, pluralName, |
| | | return new InstantiableRelationDefinition<>(common, pluralName, |
| | | namingPropertyDefinition, defaultManagedObjects); |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | /** The list of profile wrappers. */ |
| | | private final LinkedList<Wrapper> profiles = new LinkedList<Wrapper>();; |
| | | private final LinkedList<Wrapper> profiles = new LinkedList<>();; |
| | | |
| | | /** The LDAP profile property table. */ |
| | | private final ManagedObjectDefinitionResource resource = |
| | |
| | | */ |
| | | public List<String> getObjectClasses(AbstractManagedObjectDefinition<?, ?> d) |
| | | throws MissingResourceException { |
| | | LinkedList<String> objectClasses = new LinkedList<String>(); |
| | | Set<String> s = new HashSet<String>(); |
| | | LinkedList<String> objectClasses = new LinkedList<>(); |
| | | Set<String> s = new HashSet<>(); |
| | | |
| | | // Add the object classes from the parent hierarchy. |
| | | while (d != null) { |
| | |
| | | |
| | | |
| | | /** |
| | | * Get an LDAP RDN sequence associatied with a relation. |
| | | * Get an LDAP RDN sequence associated with a relation. |
| | | * |
| | | * @param r |
| | | * The relation. |
| | | * @return Returns the LDAP RDN sequence associatied with a |
| | | * @return Returns the LDAP RDN sequence associated with a |
| | | * relation. |
| | | * @throws MissingResourceException |
| | | * If the LDAP profile properties file associated with the |
| | |
| | | public final class ManagedObjectDefinitionI18NResource { |
| | | |
| | | /** Application-wide set of instances. */ |
| | | private static final Map<String, ManagedObjectDefinitionI18NResource> |
| | | INSTANCES = new HashMap<String, ManagedObjectDefinitionI18NResource>(); |
| | | |
| | | |
| | | private static final Map<String, ManagedObjectDefinitionI18NResource> INSTANCES = new HashMap<>(); |
| | | |
| | | /** |
| | | * Gets the internationalized resource instance which can be used to |
| | |
| | | |
| | | /** Private constructor. */ |
| | | private ManagedObjectDefinitionI18NResource(String prefix) { |
| | | this.resources = new HashMap<AbstractManagedObjectDefinition<?, ?>, |
| | | Map<Locale, ResourceBundle>>(); |
| | | this.resources = new HashMap<>(); |
| | | this.prefix = prefix; |
| | | } |
| | | |
| | |
| | | // necessary. |
| | | Map<Locale, ResourceBundle> map = resources.get(d); |
| | | if (map == null) { |
| | | map = new HashMap<Locale, ResourceBundle>(); |
| | | map = new HashMap<>(); |
| | | resources.put(d, map); |
| | | } |
| | | |
| | |
| | | // necessary. |
| | | Map<Locale, ResourceBundle> map = resources.get(d); |
| | | if (map == null) { |
| | | map = new HashMap<Locale, ResourceBundle>(); |
| | | map = new HashMap<>(); |
| | | resources.put(d, map); |
| | | } |
| | | |
| | |
| | | public final class ManagedObjectDefinitionResource { |
| | | |
| | | /** Mapping from definition to property tables. */ |
| | | private final Map<AbstractManagedObjectDefinition<?, ?>, |
| | | Properties> properties; |
| | | |
| | | private final Map<AbstractManagedObjectDefinition<?, ?>, Properties> properties = new HashMap<>(); |
| | | /** The resource name prefix. */ |
| | | private final String prefix; |
| | | |
| | | |
| | | |
| | | /** |
| | | * Creates a new resource instance for the named profile. |
| | | * |
| | |
| | | * The name of the profile. |
| | | * @return Returns the resource instance for the named profile. |
| | | */ |
| | | public static ManagedObjectDefinitionResource createForProfile( |
| | | String profile) { |
| | | return new ManagedObjectDefinitionResource("admin.profiles." |
| | | + profile); |
| | | public static ManagedObjectDefinitionResource createForProfile(String profile) { |
| | | return new ManagedObjectDefinitionResource("admin.profiles." + profile); |
| | | } |
| | | |
| | | |
| | | |
| | | /** Private constructor. */ |
| | | private ManagedObjectDefinitionResource(String prefix) { |
| | | this.properties = |
| | | new HashMap<AbstractManagedObjectDefinition<?, ?>, Properties>(); |
| | | this.prefix = prefix; |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Get the resource value associated with the specified key. |
| | | * |
| | |
| | | InstantiableElement<C, S> create( |
| | | InstantiableRelationDefinition<? super C, ? super S> r, |
| | | AbstractManagedObjectDefinition<C, S> d, String name) { |
| | | return new InstantiableElement<C, S>(r, d, name); |
| | | return new InstantiableElement<>(r, d, name); |
| | | } |
| | | |
| | | /** The name of the managed object. */ |
| | |
| | | S extends Configuration> OptionalElement<C, S> create( |
| | | OptionalRelationDefinition<? super C, ? super S> r, |
| | | AbstractManagedObjectDefinition<C, S> d) { |
| | | return new OptionalElement<C, S>(r, d); |
| | | return new OptionalElement<>(r, d); |
| | | } |
| | | |
| | | /** The optional relation. */ |
| | |
| | | SetElement<C, S> create( |
| | | SetRelationDefinition<? super C, ? super S> r, |
| | | AbstractManagedObjectDefinition<C, S> d) { |
| | | return new SetElement<C, S>(r, d); |
| | | return new SetElement<>(r, d); |
| | | } |
| | | |
| | | /** The set relation. */ |
| | |
| | | S extends Configuration> SingletonElement<C, S> create( |
| | | SingletonRelationDefinition<? super C, ? super S> r, |
| | | AbstractManagedObjectDefinition<C, S> d) { |
| | | return new SingletonElement<C, S>(r, d); |
| | | return new SingletonElement<>(r, d); |
| | | } |
| | | |
| | | /** The singleton relation. */ |
| | |
| | | |
| | | /** Single instance of a root path. */ |
| | | private static final ManagedObjectPath<RootCfgClient, RootCfg> EMPTY_PATH = |
| | | new ManagedObjectPath<RootCfgClient, RootCfg>( |
| | | new LinkedList<Element<?, ?>>(), null, RootCfgDefn.getInstance()); |
| | | new ManagedObjectPath<>(new LinkedList<Element<?, ?>>(), null, RootCfgDefn.getInstance()); |
| | | |
| | | /** A regular expression used to parse path elements. */ |
| | | private static final Pattern PE_REGEXP = Pattern |
| | |
| | | } |
| | | |
| | | // Parse the elements. |
| | | LinkedList<Element<?, ?>> elements = new LinkedList<Element<?, ?>>(); |
| | | LinkedList<Element<?, ?>> elements = new LinkedList<>(); |
| | | Element<?, ?> lastElement = null; |
| | | AbstractManagedObjectDefinition<?, ?> definition = RootCfgDefn |
| | | .getInstance(); |
| | | AbstractManagedObjectDefinition<?, ?> definition = RootCfgDefn.getInstance(); |
| | | |
| | | if (!ns.startsWith("/")) { |
| | | throw new IllegalArgumentException("Invalid path \"" + ns |
| | |
| | | private static <C extends ConfigurationClient, S extends Configuration> |
| | | ManagedObjectPath<C, S> create( |
| | | LinkedList<Element<?, ?>> elements, Element<C, S> lastElement) { |
| | | return new ManagedObjectPath<C, S>(elements, lastElement |
| | | .getRelationDefinition(), lastElement.getManagedObjectDefinition()); |
| | | return new ManagedObjectPath<>( |
| | | elements, lastElement.getRelationDefinition(), lastElement.getManagedObjectDefinition()); |
| | | } |
| | | |
| | | |
| | |
| | | throw new IllegalArgumentException( |
| | | "Empty or blank managed object names are not allowed"); |
| | | } |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<Element<?, ?>>( |
| | | elements); |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<>(elements); |
| | | celements.add(new InstantiableElement<M, N>(r, d, name)); |
| | | return new ManagedObjectPath<M, N>(celements, r, d); |
| | | return new ManagedObjectPath<>(celements, r, d); |
| | | } |
| | | |
| | | |
| | |
| | | ManagedObjectPath<M, N> child( |
| | | OptionalRelationDefinition<? super M, ? super N> r, |
| | | AbstractManagedObjectDefinition<M, N> d) { |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<Element<?, ?>>( |
| | | elements); |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<>(elements); |
| | | celements.add(new OptionalElement<M, N>(r, d)); |
| | | return new ManagedObjectPath<M, N>(celements, r, d); |
| | | return new ManagedObjectPath<>(celements, r, d); |
| | | } |
| | | |
| | | |
| | |
| | | ManagedObjectPath<M, N> child( |
| | | SingletonRelationDefinition<? super M, ? super N> r, |
| | | AbstractManagedObjectDefinition<M, N> d) { |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<Element<?, ?>>( |
| | | elements); |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<>(elements); |
| | | celements.add(new SingletonElement<M, N>(r, d)); |
| | | return new ManagedObjectPath<M, N>(celements, r, d); |
| | | return new ManagedObjectPath<>(celements, r, d); |
| | | } |
| | | |
| | | |
| | |
| | | SetRelationDefinition<? super M, ? super N> r, |
| | | AbstractManagedObjectDefinition<M, N> d) |
| | | throws IllegalArgumentException { |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<Element<?, ?>>( |
| | | elements); |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<>(elements); |
| | | celements.add(new SetElement<M, N>(r, d)); |
| | | return new ManagedObjectPath<M, N>(celements, r, d); |
| | | return new ManagedObjectPath<>(celements, r, d); |
| | | } |
| | | |
| | | |
| | |
| | | return emptyPath(); |
| | | } |
| | | |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<Element<?, ?>>( |
| | | LinkedList<Element<?, ?>> celements = new LinkedList<>( |
| | | elements.subList(0, elements.size() - offset)); |
| | | return create(celements, celements.getLast()); |
| | | } |
| | |
| | | @Override |
| | | protected OptionalRelationDefinition<C, S> buildInstance( |
| | | Common<C, S> common) { |
| | | return new OptionalRelationDefinition<C, S>(common, defaultManagedObject); |
| | | return new OptionalRelationDefinition<>(common, defaultManagedObject); |
| | | } |
| | | |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * The optional default managed object associated with this |
| | | * optional relation. |
| | | */ |
| | | private final DefaultManagedObject<? extends C, ? extends S> |
| | | defaultManagedObject; |
| | | /** The optional default managed object associated with this optional relation. */ |
| | | private final DefaultManagedObject<? extends C, ? extends S> defaultManagedObject; |
| | | |
| | | |
| | | |
| | |
| | | * @param propertyName |
| | | * The property name. |
| | | */ |
| | | protected AbstractBuilder(AbstractManagedObjectDefinition<?, ?> d, |
| | | String propertyName) { |
| | | protected AbstractBuilder(AbstractManagedObjectDefinition<?, ?> d, String propertyName) { |
| | | this.definition = d; |
| | | this.propertyName = propertyName; |
| | | this.options = EnumSet.noneOf(PropertyOption.class); |
| | | this.adminAction = new AdministratorAction(AdministratorAction.Type.NONE, |
| | | d, propertyName); |
| | | this.defaultBehavior = new UndefinedDefaultBehaviorProvider<T>(); |
| | | this.adminAction = new AdministratorAction(AdministratorAction.Type.NONE, d, propertyName); |
| | | this.defaultBehavior = new UndefinedDefaultBehaviorProvider<>(); |
| | | } |
| | | |
| | | |
| | |
| | | return LocalizableMessage.raw(name.substring(i + 1).toUpperCase()); |
| | | } |
| | | } else { |
| | | Set<String> values = new TreeSet<String>(); |
| | | Set<String> values = new TreeSet<>(); |
| | | for (Object value : EnumSet.allOf(d.getEnumClass())) { |
| | | values.add(value.toString().trim().toLowerCase()); |
| | | } |
| | |
| | | + s + "\""); |
| | | } |
| | | |
| | | return new Reference<C, S>(p, rd, name); |
| | | return new Reference<>(p, rd, name); |
| | | } |
| | | |
| | | |
| | |
| | | throw new IllegalArgumentException("Empty names are not allowed"); |
| | | } |
| | | |
| | | return new Reference<C, S>(p, rd, s); |
| | | return new Reference<>(p, rd, s); |
| | | } |
| | | |
| | | /** The name of the referenced managed object. */ |
| | |
| | | */ |
| | | protected AbstractBuilder(AbstractManagedObjectDefinition<?, ?> pd, |
| | | String name, AbstractManagedObjectDefinition<C, S> cd) { |
| | | this.common = new Common<C, S>(pd, name, cd); |
| | | this.common = new Common<>(pd, name, cd); |
| | | } |
| | | |
| | | |
| | |
| | | * The optional default managed objects associated with this |
| | | * set relation definition. |
| | | */ |
| | | private final Map<String, |
| | | DefaultManagedObject<? extends C, ? extends S>> |
| | | defaultManagedObjects = |
| | | new HashMap<String, DefaultManagedObject<? extends C, ? extends S>>(); |
| | | private final Map<String, DefaultManagedObject<? extends C, ? extends S>> |
| | | defaultManagedObjects = new HashMap<>(); |
| | | |
| | | |
| | | |
| | |
| | | |
| | | /** {@inheritDoc} */ |
| | | @Override |
| | | protected SetRelationDefinition<C, S> buildInstance( |
| | | Common<C, S> common) |
| | | protected SetRelationDefinition<C, S> buildInstance(Common<C, S> common) |
| | | { |
| | | return new SetRelationDefinition<C, S>(common, pluralName, |
| | | defaultManagedObjects); |
| | | return new SetRelationDefinition<>(common, pluralName, defaultManagedObjects); |
| | | } |
| | | |
| | | } |
| | | |
| | | |
| | | |
| | | /** The plural name of the relation. */ |
| | | private final String pluralName; |
| | | |
| | | /** |
| | | * The optional default managed objects associated with this |
| | | * set relation definition. |
| | | */ |
| | | private final Map<String, |
| | | DefaultManagedObject<? extends C, ? extends S>> |
| | | defaultManagedObjects; |
| | | |
| | | |
| | | /** The optional default managed objects associated with this set relation definition. */ |
| | | private final Map<String, DefaultManagedObject<? extends C, ? extends S>> defaultManagedObjects; |
| | | |
| | | /** Private constructor. */ |
| | | private SetRelationDefinition( |
| | |
| | | @Override |
| | | protected SingletonRelationDefinition<C, S> buildInstance( |
| | | Common<C, S> common) { |
| | | return new SingletonRelationDefinition<C, S>(common, |
| | | defaultManagedObject); |
| | | return new SingletonRelationDefinition<>(common, defaultManagedObject); |
| | | } |
| | | |
| | | } |
| | | |
| | | |
| | |
| | | * The optional default managed object associated with this |
| | | * singleton relation. |
| | | */ |
| | | private final DefaultManagedObject<? extends C, ? extends S> |
| | | defaultManagedObject; |
| | | private final DefaultManagedObject<? extends C, ? extends S> defaultManagedObject; |
| | | |
| | | |
| | | |
| | |
| | | public final class Tag implements Comparable<Tag> { |
| | | |
| | | /** All the tags. */ |
| | | private static final Map<String, Tag> tags = new HashMap<String, Tag>(); |
| | | private static final Map<String, Tag> tags = new HashMap<>(); |
| | | |
| | | |
| | | |
| | |
| | | Collection<PropertyException> causes, boolean isCreate) { |
| | | super(createMessage(causes)); |
| | | |
| | | this.causes = new ArrayList<PropertyException>(causes); |
| | | this.causes = new ArrayList<>(causes); |
| | | this.ufn = ufn; |
| | | this.isCreate = isCreate; |
| | | } |
| | |
| | | Collection<LocalizableMessage> messages) { |
| | | super(getDefaultMessage(messages)); |
| | | |
| | | this.messages = new ArrayList<LocalizableMessage>(messages); |
| | | this.messages = new ArrayList<>(messages); |
| | | this.type = type; |
| | | this.ufn = ufn; |
| | | } |
| | |
| | | public static JNDIDirContextAdaptor simpleBind(String host, int port, |
| | | String name, String password) throws CommunicationException, |
| | | AuthenticationNotSupportedException, AuthenticationException { |
| | | Hashtable<String, Object> env = new Hashtable<String, Object>(); |
| | | Hashtable<String, Object> env = new Hashtable<>(); |
| | | env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); |
| | | String hostname = getHostNameForLdapUrl(host); |
| | | env.put(Context.PROVIDER_URL, "ldap://" + hostname + ":" + port); |
| | |
| | | public static JNDIDirContextAdaptor simpleSSLBind(String host, int port, |
| | | String name, String password) throws CommunicationException, |
| | | AuthenticationNotSupportedException, AuthenticationException { |
| | | Hashtable<String, Object> env = new Hashtable<String, Object>(); |
| | | Hashtable<String, Object> env = new Hashtable<>(); |
| | | env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory"); |
| | | String hostname = getHostNameForLdapUrl(host); |
| | | env.put(Context.PROVIDER_URL, "ldaps://" + hostname + ":" + port); |
| | |
| | | SearchControls controls = new SearchControls(); |
| | | controls.setSearchScope(SearchControls.ONELEVEL_SCOPE); |
| | | |
| | | List<LdapName> children = new LinkedList<LdapName>(); |
| | | NamingEnumeration<SearchResult> results = dirContext.search(dn, filter, |
| | | controls); |
| | | List<LdapName> children = new LinkedList<>(); |
| | | NamingEnumeration<SearchResult> results = dirContext.search(dn, filter, controls); |
| | | try |
| | | { |
| | | while (results.hasMore()) { |
| | |
| | | ManagedObjectDefinition<? extends C, ? extends S> mod = |
| | | getEntryDefinition(d, dn); |
| | | |
| | | ArrayList<String> attrIds = new ArrayList<String>(); |
| | | ArrayList<String> attrIds = new ArrayList<>(); |
| | | for (PropertyDefinition<?> pd : mod.getAllPropertyDefinitions()) { |
| | | String attrId = profile.getAttributeName(mod, pd); |
| | | attrIds.add(attrId); |
| | |
| | | Attributes attributes = connection.readEntry(dn, attrIds); |
| | | |
| | | // Build the managed object's properties. |
| | | List<PropertyException> exceptions = new LinkedList<PropertyException>(); |
| | | List<PropertyException> exceptions = new LinkedList<>(); |
| | | PropertySet newProperties = new PropertySet(); |
| | | for (PropertyDefinition<?> pd : mod.getAllPropertyDefinitions()) { |
| | | String attrID = profile.getAttributeName(mod, pd); |
| | |
| | | Attribute attribute = attributes.get(attrID); |
| | | |
| | | // Decode the values. |
| | | SortedSet<PD> values = new TreeSet<PD>(pd); |
| | | SortedSet<PD> values = new TreeSet<>(pd); |
| | | if (attribute != null) { |
| | | NamingEnumeration<?> ldapValues = attribute.getAll(); |
| | | while (ldapValues.hasMore()) { |
| | |
| | | /** {@inheritDoc} */ |
| | | @Override |
| | | public ManagedObject<RootCfgClient> getRootConfigurationManagedObject() { |
| | | return new LDAPManagedObject<RootCfgClient>(this, |
| | | return new LDAPManagedObject<>(this, |
| | | RootCfgDefn.getInstance(), ManagedObjectPath.emptyPath(), |
| | | new PropertySet(), true, null); |
| | | } |
| | |
| | | builder.append(')'); |
| | | String filter = builder.toString(); |
| | | |
| | | List<String> children = new ArrayList<String>(); |
| | | List<String> children = new ArrayList<>(); |
| | | try { |
| | | for (LdapName child : connection.listEntries(dn, filter)) { |
| | | children.add(child.getRdn(child.size() - 1).getValue().toString()); |
| | |
| | | builder.append(')'); |
| | | String filter = builder.toString(); |
| | | |
| | | List<String> children = new ArrayList<String>(); |
| | | List<String> children = new ArrayList<>(); |
| | | try { |
| | | for (LdapName child : connection.listEntries(dn, filter)) { |
| | | children.add(child.getRdn(child.size() - 1).getValue().toString()); |
| | |
| | | (InstantiableRelationDefinition<?, ?>) rd; |
| | | pd = ird.getNamingPropertyDefinition(); |
| | | } |
| | | return new LDAPManagedObject<M>(this, d, p.asSubType(d), properties, true, pd); |
| | | return new LDAPManagedObject<>(this, d, p.asSubType(d), properties, true, pd); |
| | | } |
| | | |
| | | |
| | |
| | | PropertyException exception = null; |
| | | |
| | | // Get the property's active values. |
| | | SortedSet<PD> activeValues = new TreeSet<PD>(pd); |
| | | SortedSet<PD> activeValues = new TreeSet<>(pd); |
| | | if (attribute != null) { |
| | | NamingEnumeration<?> ldapValues = attribute.getAll(); |
| | | while (ldapValues.hasMore()) { |
| | |
| | | throw new DefinitionDecodingException(d, Reason.NO_TYPE_INFORMATION); |
| | | } |
| | | |
| | | final Set<String> objectClasses = new HashSet<String>(); |
| | | final Set<String> objectClasses = new HashSet<>(); |
| | | NamingEnumeration<?> values = oc.getAll(); |
| | | while (values.hasMore()) { |
| | | Object value = values.next(); |
| | |
| | | ManagedObjectDefinition<M, ?> d, ManagedObjectPath<M, ?> path, |
| | | PropertySet properties, boolean existsOnServer, |
| | | PropertyDefinition<?> namingPropertyDefinition) { |
| | | return new LDAPManagedObject<M>(driver, d, path, properties, |
| | | return new LDAPManagedObject<>(driver, d, path, properties, |
| | | existsOnServer, namingPropertyDefinition); |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | /** The list of RDNs in big-endian order. */ |
| | | private final LinkedList<Rdn> rdns; |
| | | |
| | | private final LinkedList<Rdn> rdns = new LinkedList<>(); |
| | | /** The LDAP profile. */ |
| | | private final LDAPProfile profile; |
| | | |
| | | |
| | | |
| | | /** |
| | | * Create a new JNDI LDAP name builder. |
| | | * |
| | |
| | | * names. |
| | | */ |
| | | public LDAPNameBuilder(LDAPProfile profile) { |
| | | this.rdns = new LinkedList<Rdn>(); |
| | | this.profile = profile; |
| | | } |
| | | |
| | | |
| | | |
| | | /** {@inheritDoc} */ |
| | | public <C extends ConfigurationClient, S extends Configuration> |
| | | void appendManagedObjectPathElement( |
| | |
| | | OperationRejectedException, AuthorizationException, |
| | | CommunicationException { |
| | | // First make sure all mandatory properties are defined. |
| | | List<PropertyException> exceptions = new LinkedList<PropertyException>(); |
| | | List<PropertyException> exceptions = new LinkedList<>(); |
| | | |
| | | for (PropertyDefinition<?> pd : definition.getAllPropertyDefinitions()) { |
| | | Property<?> p = getProperty(pd); |
| | |
| | | } |
| | | |
| | | // Now enforce any constraints. |
| | | List<LocalizableMessage> messages = new LinkedList<LocalizableMessage>(); |
| | | List<LocalizableMessage> messages = new LinkedList<>(); |
| | | boolean isAcceptable = true; |
| | | ManagementContext context = getDriver().getManagementContext(); |
| | | |
| | |
| | | /** {@inheritDoc} */ |
| | | public final <PD> SortedSet<PD> getPropertyDefaultValues( |
| | | PropertyDefinition<PD> pd) throws IllegalArgumentException { |
| | | return new TreeSet<PD>(getProperty(pd).getDefaultValues()); |
| | | return new TreeSet<>(getProperty(pd).getDefaultValues()); |
| | | } |
| | | |
| | | |
| | |
| | | public final <PD> PD getPropertyValue(PropertyDefinition<PD> pd) |
| | | throws IllegalArgumentException { |
| | | Set<PD> values = getProperty(pd).getEffectiveValues(); |
| | | if (values.isEmpty()) { |
| | | return null; |
| | | } else { |
| | | if (!values.isEmpty()) { |
| | | return values.iterator().next(); |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | |
| | |
| | | /** {@inheritDoc} */ |
| | | public final <PD> SortedSet<PD> getPropertyValues(PropertyDefinition<PD> pd) |
| | | throws IllegalArgumentException { |
| | | return new TreeSet<PD>(getProperty(pd).getEffectiveValues()); |
| | | return new TreeSet<>(getProperty(pd).getEffectiveValues()); |
| | | } |
| | | |
| | | |
| | |
| | | Collection<T> activeValues) { |
| | | this.d = pd; |
| | | |
| | | SortedSet<T> sortedDefaultValues = new TreeSet<T>(pd); |
| | | SortedSet<T> sortedDefaultValues = new TreeSet<>(pd); |
| | | sortedDefaultValues.addAll(defaultValues); |
| | | this.defaultValues = Collections |
| | | .unmodifiableSortedSet(sortedDefaultValues); |
| | | |
| | | this.activeValues = new TreeSet<T>(pd); |
| | | this.activeValues = new TreeSet<>(pd); |
| | | this.activeValues.addAll(activeValues); |
| | | |
| | | // Initially the pending values is the same as the active |
| | | // values. |
| | | this.pendingValues = new TreeSet<T>(this.activeValues); |
| | | // Initially the pending values is the same as the active values. |
| | | this.pendingValues = new TreeSet<>(this.activeValues); |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Makes the pending values active. |
| | | */ |
| | | /** Makes the pending values active. */ |
| | | public void commit() { |
| | | activeValues.clear(); |
| | | activeValues.addAll(pendingValues); |
| | | } |
| | | |
| | | |
| | | |
| | | /** {@inheritDoc} */ |
| | | public SortedSet<T> getActiveValues() { |
| | | return Collections.unmodifiableSortedSet(activeValues); |
| | | } |
| | | |
| | | |
| | | |
| | | /** {@inheritDoc} */ |
| | | public SortedSet<T> getDefaultValues() { |
| | | return defaultValues; |
| | | } |
| | | |
| | | |
| | | |
| | | /** {@inheritDoc} */ |
| | | public SortedSet<T> getEffectiveValues() { |
| | | SortedSet<T> values = getPendingValues(); |
| | |
| | | } |
| | | |
| | | /** The properties. */ |
| | | private final Map<PropertyDefinition<?>, MyProperty<?>> properties; |
| | | private final Map<PropertyDefinition<?>, MyProperty<?>> properties = new HashMap<>(); |
| | | |
| | | |
| | | |
| | | /** |
| | | * Creates a new empty property set. |
| | | */ |
| | | /** Creates a new empty property set. */ |
| | | public PropertySet() { |
| | | this.properties = new HashMap<PropertyDefinition<?>, MyProperty<?>>(); |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Creates a property with the provided sets of pre-validated |
| | | * default and active values. |
| | |
| | | */ |
| | | public <T> void addProperty(PropertyDefinition<T> pd, |
| | | Collection<T> defaultValues, Collection<T> activeValues) { |
| | | MyProperty<T> p = new MyProperty<T>(pd, defaultValues, activeValues); |
| | | MyProperty<T> p = new MyProperty<>(pd, defaultValues, activeValues); |
| | | properties.put(pd, p); |
| | | } |
| | | |
| | |
| | | private <T> void buildImpl(PropertyDefinition<T> pd) |
| | | throws PropertyException { |
| | | T value = pd.decodeValue(propertyStringValue); |
| | | this.impl = new Impl<T>(pd, value); |
| | | this.impl = new Impl<>(pd, value); |
| | | } |
| | | |
| | | /** |
| | |
| | | TreeMap<String, RelationDefinition> catMap = |
| | | catTopRelList.get(tag.getName()); |
| | | if (catMap == null) { |
| | | catMap = new TreeMap<String, RelationDefinition>(); |
| | | catMap = new TreeMap<>(); |
| | | catTopRelList.put(tag.getName(), catMap); |
| | | } |
| | | catMap.put(mo.getName(), rel); |
| | |
| | | TreeMap<String, AbstractManagedObjectDefinition> catMap = |
| | | catTopMoList.get(tag.getName()); |
| | | if (catMap == null) { |
| | | catMap = new TreeMap<String, AbstractManagedObjectDefinition>(); |
| | | catMap = new TreeMap<>(); |
| | | catTopMoList.put(tag.getName(), catMap); |
| | | } |
| | | catMap.put(topObject.getName(), topObject); |
| | |
| | | paragraph("A description of each property follows."); |
| | | newline(); |
| | | |
| | | TreeMap<String, PropertyDefinition> basicProps = |
| | | new TreeMap<String, PropertyDefinition>(); |
| | | TreeMap<String, PropertyDefinition> advancedProps = |
| | | new TreeMap<String, PropertyDefinition>(); |
| | | TreeMap<String, PropertyDefinition> basicProps = new TreeMap<>(); |
| | | TreeMap<String, PropertyDefinition> advancedProps = new TreeMap<>(); |
| | | // Properties actually defined in this managed object |
| | | @SuppressWarnings("unchecked") |
| | | Collection<PropertyDefinition> props = mo.getAllPropertyDefinitions(); |
| | |
| | | paragraph( |
| | | "The following components have a direct AGGREGATION relation FROM " + |
| | | mo.getUserFriendlyPluralName() + " :"); |
| | | TreeMap<String, AbstractManagedObjectDefinition> componentList = |
| | | new TreeMap<String, AbstractManagedObjectDefinition>(); |
| | | TreeMap<String, AbstractManagedObjectDefinition> componentList = new TreeMap<>(); |
| | | for ( AggregationPropertyDefinition agg : aggregProps) { |
| | | RelationDefinition rel = agg.getRelationDefinition(); |
| | | AbstractManagedObjectDefinition childRel = rel.getChildDefinition(); |
| | |
| | | } |
| | | for (AbstractManagedObjectDefinition component : componentList.values()) { |
| | | beginList(); |
| | | link(component.getUserFriendlyName().toString(), component.getName() + |
| | | ".html"); |
| | | link(component.getUserFriendlyName().toString(), component.getName() + ".html"); |
| | | endList(); |
| | | } |
| | | } |
| | |
| | | paragraph( |
| | | "The following components have a direct AGGREGATION relation TO " + |
| | | mo.getUserFriendlyPluralName() + " :"); |
| | | TreeMap<String, AbstractManagedObjectDefinition> componentList = |
| | | new TreeMap<String, AbstractManagedObjectDefinition>(); |
| | | TreeMap<String, AbstractManagedObjectDefinition> componentList = new TreeMap<>(); |
| | | for ( AggregationPropertyDefinition agg : reverseAggregProps) { |
| | | AbstractManagedObjectDefinition fromMo = |
| | | agg.getManagedObjectDefinition(); |
| | |
| | | private void genPropertiesIndex() { |
| | | |
| | | // Build a sorted list of (property name + its managed object name) |
| | | TreeSet<String> propMoList = new TreeSet<String>(); |
| | | TreeSet<String> propMoList = new TreeSet<>(); |
| | | for (AbstractManagedObjectDefinition<?, ?> mo : moList.values()) { |
| | | for (PropertyDefinition<?> prop : mo.getPropertyDefinitions()) { |
| | | propMoList.add( |
| | |
| | | if (coll == null) { |
| | | return null; |
| | | } |
| | | TreeMap<String, AbstractManagedObjectDefinition> map = |
| | | new TreeMap<String, AbstractManagedObjectDefinition>(); |
| | | TreeMap<String, AbstractManagedObjectDefinition> map = new TreeMap<>(); |
| | | for (AbstractManagedObjectDefinition mo : coll) { |
| | | if (mo.hasOption(ManagedObjectOption.HIDDEN)) |
| | | { |
| | |
| | | if (coll == null) { |
| | | return null; |
| | | } |
| | | TreeMap<String, RelationDefinition> map = |
| | | new TreeMap<String, RelationDefinition>(); |
| | | TreeMap<String, RelationDefinition> map = new TreeMap<>(); |
| | | for (RelationDefinition rel : coll) { |
| | | map.put(rel.getChildDefinition().getName(), rel); |
| | | } |
| | |
| | | if (coll == null) { |
| | | return null; |
| | | } |
| | | TreeMap<String, PropertyDefinition> map = |
| | | new TreeMap<String, PropertyDefinition>(); |
| | | TreeMap<String, PropertyDefinition> map = new TreeMap<>(); |
| | | for (PropertyDefinition prop : coll) { |
| | | map.put(prop.getName(), prop); |
| | | } |
| | |
| | | } |
| | | |
| | | /** Relation List from RootConfiguration. */ |
| | | private final TreeMap<String, RelationDefinition> topRelList = |
| | | new TreeMap<String, RelationDefinition>(); |
| | | private final TreeMap<String, RelationDefinition> relList = |
| | | new TreeMap<String, RelationDefinition>(); |
| | | private final TreeMap<String, TreeMap<String, RelationDefinition>> |
| | | catTopRelList = new TreeMap<String, TreeMap<String, RelationDefinition>>(); |
| | | private final TreeMap<String, RelationDefinition> topRelList = new TreeMap<>(); |
| | | private final TreeMap<String, RelationDefinition> relList = new TreeMap<>(); |
| | | private final TreeMap<String, TreeMap<String, RelationDefinition>> catTopRelList = new TreeMap<>(); |
| | | /** Managed object list. */ |
| | | private final TreeMap<String, AbstractManagedObjectDefinition> moList = |
| | | new TreeMap<String, AbstractManagedObjectDefinition>(); |
| | | private final TreeMap<String, AbstractManagedObjectDefinition> topMoList = |
| | | new TreeMap<String, AbstractManagedObjectDefinition>(); |
| | | private final TreeMap<String, |
| | | TreeMap<String, AbstractManagedObjectDefinition>> |
| | | catTopMoList = |
| | | new TreeMap<String, TreeMap<String, AbstractManagedObjectDefinition>>(); |
| | | private final TreeMap<String, AbstractManagedObjectDefinition> moList = new TreeMap<>(); |
| | | private final TreeMap<String, AbstractManagedObjectDefinition> topMoList = new TreeMap<>(); |
| | | private final TreeMap<String, TreeMap<String, AbstractManagedObjectDefinition>> |
| | | catTopMoList = new TreeMap<>(); |
| | | private final int ind = 0; |
| | | private StringBuffer htmlBuff = new StringBuffer(); |
| | | private static String generationDir; |
| | |
| | | } |
| | | |
| | | // Let the add listener decide. |
| | | List<LocalizableMessage> reasons = new LinkedList<LocalizableMessage>(); |
| | | List<LocalizableMessage> reasons = new LinkedList<>(); |
| | | if (listener.isConfigurationAddAcceptable(cachedManagedObject, reasons)) { |
| | | return true; |
| | | } else { |
| | |
| | | */ |
| | | public static <T> void find(ManagedObjectPath<?, ?> path, |
| | | PropertyDefinition<T> pd, Collection<DN> dependencies) { |
| | | Visitor<T> v = new Visitor<T>(dependencies); |
| | | Visitor<T> v = new Visitor<>(dependencies); |
| | | DefaultBehaviorProvider<T> db = pd.getDefaultBehaviorProvider(); |
| | | db.accept(v, path); |
| | | } |
| | |
| | | // This change listener should be notified when dependent entries |
| | | // are modified. Determine the dependencies and register change |
| | | // listeners against them. |
| | | this.dependencies = new HashSet<DN>(); |
| | | this.dependencies = new HashSet<>(); |
| | | this.dependencyListener = new ConfigChangeListener() { |
| | | |
| | | public ConfigChangeResult applyConfigurationChange( |
| | |
| | | } |
| | | |
| | | // Let the change listener decide. |
| | | List<LocalizableMessage> reasons = new LinkedList<LocalizableMessage>(); |
| | | List<LocalizableMessage> reasons = new LinkedList<>(); |
| | | if (listener.isConfigurationChangeAcceptable(cachedManagedObject,reasons)) { |
| | | return true; |
| | | } else { |
| | |
| | | return false; |
| | | } |
| | | |
| | | List<LocalizableMessage> reasons = new LinkedList<LocalizableMessage>(); |
| | | List<LocalizableMessage> reasons = new LinkedList<>(); |
| | | |
| | | // Enforce any constraints. |
| | | boolean isDeleteAllowed = true; |
| | |
| | | super(getDefaultMessage(messages)); |
| | | |
| | | this.managedObject = managedObject; |
| | | this.messages = new ArrayList<LocalizableMessage>(messages); |
| | | this.messages = new ArrayList<>(messages); |
| | | } |
| | | |
| | | |
| | |
| | | throws IllegalArgumentException, ConfigException { |
| | | validateRelationDefinition(d); |
| | | DN baseDN = DNBuilder.create(path, d); |
| | | ConfigAddListener adaptor = new ConfigAddListenerAdaptor<M>(path, d, |
| | | listener); |
| | | ConfigAddListener adaptor = new ConfigAddListenerAdaptor<>(path, d, listener); |
| | | registerAddListener(baseDN, adaptor); |
| | | } |
| | | |
| | |
| | | public <M extends Configuration> void registerAddListener( |
| | | OptionalRelationDefinition<?, M> d, ConfigurationAddListener<M> listener) |
| | | throws IllegalArgumentException, ConfigException { |
| | | registerAddListener(d, new ServerManagedObjectAddListenerAdaptor<M>( |
| | | listener)); |
| | | registerAddListener(d, new ServerManagedObjectAddListenerAdaptor<M>(listener)); |
| | | } |
| | | |
| | | |
| | |
| | | throws IllegalArgumentException, ConfigException { |
| | | validateRelationDefinition(d); |
| | | DN baseDN = DNBuilder.create(path, d).parent(); |
| | | ConfigAddListener adaptor = new ConfigAddListenerAdaptor<M>(path, d, |
| | | listener); |
| | | ConfigAddListener adaptor = new ConfigAddListenerAdaptor<>(path, d, listener); |
| | | registerAddListener(baseDN, adaptor); |
| | | } |
| | | |
| | |
| | | throws IllegalArgumentException, ConfigException { |
| | | validateRelationDefinition(d); |
| | | DN baseDN = DNBuilder.create(path, d); |
| | | ConfigAddListener adaptor = new ConfigAddListenerAdaptor<M>(path, d, |
| | | listener); |
| | | ConfigAddListener adaptor = new ConfigAddListenerAdaptor<>(path, d, listener); |
| | | registerAddListener(baseDN, adaptor); |
| | | } |
| | | |
| | |
| | | */ |
| | | public void registerChangeListener( |
| | | ServerManagedObjectChangeListener<? super S> listener) { |
| | | ConfigChangeListener adaptor = new ConfigChangeListenerAdaptor<S>(path, |
| | | listener); |
| | | ConfigChangeListener adaptor = new ConfigChangeListenerAdaptor<>(path, listener); |
| | | configEntry.registerChangeListener(adaptor); |
| | | |
| | | // Change listener registration usually signifies that a managed |
| | |
| | | throws IllegalArgumentException, ConfigException { |
| | | validateRelationDefinition(d); |
| | | DN baseDN = DNBuilder.create(path, d); |
| | | ConfigDeleteListener adaptor = new ConfigDeleteListenerAdaptor<M>(path, d, |
| | | listener); |
| | | ConfigDeleteListener adaptor = new ConfigDeleteListenerAdaptor<>(path, d, listener); |
| | | registerDeleteListener(baseDN, adaptor); |
| | | } |
| | | |
| | |
| | | throws IllegalArgumentException, ConfigException { |
| | | validateRelationDefinition(d); |
| | | DN baseDN = DNBuilder.create(path, d).parent(); |
| | | ConfigDeleteListener adaptor = new ConfigDeleteListenerAdaptor<M>(path, d, |
| | | listener); |
| | | ConfigDeleteListener adaptor = new ConfigDeleteListenerAdaptor<>(path, d, listener); |
| | | registerDeleteListener(baseDN, adaptor); |
| | | } |
| | | |
| | |
| | | throws IllegalArgumentException, ConfigException { |
| | | validateRelationDefinition(d); |
| | | DN baseDN = DNBuilder.create(path, d); |
| | | ConfigDeleteListener adaptor = new ConfigDeleteListenerAdaptor<M>(path, d, |
| | | listener); |
| | | ConfigDeleteListener adaptor = new ConfigDeleteListenerAdaptor<>(path, d, listener); |
| | | registerDeleteListener(baseDN, adaptor); |
| | | } |
| | | |
| | |
| | | void ensureIsUsable() throws ConstraintViolationException { |
| | | // Enforce any constraints. |
| | | boolean isUsable = true; |
| | | List<LocalizableMessage> reasons = new LinkedList<LocalizableMessage>(); |
| | | List<LocalizableMessage> reasons = new LinkedList<>(); |
| | | for (Constraint constraint : definition.getAllConstraints()) { |
| | | for (ServerConstraintHandler handler : constraint |
| | | .getServerConstraintHandlers()) { |
| | |
| | | public Collection<T> visitDefined(DefinedDefaultBehaviorProvider<T> d, |
| | | Void p) { |
| | | Collection<String> stringValues = d.getDefaultValues(); |
| | | List<T> values = new ArrayList<T>(stringValues.size()); |
| | | List<T> values = new ArrayList<>(stringValues.size()); |
| | | |
| | | for (String stringValue : stringValues) { |
| | | try { |
| | |
| | | if (values.isEmpty()) { |
| | | // Recursively retrieve this property's default values. |
| | | Collection<T> tmp = find(target, pd2); |
| | | Collection<T> pvalues = new ArrayList<T>(tmp.size()); |
| | | Collection<T> pvalues = new ArrayList<>(tmp.size()); |
| | | for (T value : tmp) { |
| | | pd1.validateValue(value); |
| | | pvalues.add(value); |
| | | } |
| | | return pvalues; |
| | | } else { |
| | | Collection<T> pvalues = new ArrayList<T>(values.size()); |
| | | Collection<T> pvalues = new ArrayList<>(values.size()); |
| | | for (ByteString value : values) { |
| | | pvalues.add(ValueDecoder.decode(pd1, value)); |
| | | } |
| | |
| | | |
| | | |
| | | /** Singleton instance. */ |
| | | private static final ServerManagementContext INSTANCE = |
| | | new ServerManagementContext(); |
| | | private static final ServerManagementContext INSTANCE = new ServerManagementContext(); |
| | | |
| | | /** |
| | | * The root server managed object. |
| | | */ |
| | | private static final ServerManagedObject<RootCfg> ROOT = |
| | | new ServerManagedObject<RootCfg>( |
| | | /** The root server managed object. */ |
| | | private static final ServerManagedObject<RootCfg> ROOT = new ServerManagedObject<>( |
| | | ManagedObjectPath.emptyPath(), RootCfgDefn.getInstance(), Collections |
| | | .<PropertyDefinition<?>, SortedSet<?>> emptyMap(), null); |
| | | private static final LocalizedLogger logger = LocalizedLogger.getLoggerForThisClass(); |
| | |
| | | |
| | | // Retrieve the children. |
| | | Set<DN> children = configEntry.getChildren().keySet(); |
| | | ArrayList<String> names = new ArrayList<String>(children.size()); |
| | | ArrayList<String> names = new ArrayList<>(children.size()); |
| | | for (DN child : children) { |
| | | // Assume that RDNs are single-valued and can be trimmed. |
| | | ByteString av = child.rdn().getAttributeValue(0); |
| | |
| | | |
| | | // Retrieve the children. |
| | | Set<DN> children = configEntry.getChildren().keySet(); |
| | | ArrayList<String> names = new ArrayList<String>(children.size()); |
| | | ArrayList<String> names = new ArrayList<>(children.size()); |
| | | for (DN child : children) { |
| | | // Assume that RDNs are single-valued and can be trimmed. |
| | | ByteString av = child.rdn().getAttributeValue(0); |
| | |
| | | .resolveManagedObjectDefinition(resolver); |
| | | |
| | | // Build the managed object's properties. |
| | | List<PropertyException> exceptions = new LinkedList<PropertyException>(); |
| | | Map<PropertyDefinition<?>, SortedSet<?>> properties = |
| | | new HashMap<PropertyDefinition<?>, SortedSet<?>>(); |
| | | List<PropertyException> exceptions = new LinkedList<>(); |
| | | Map<PropertyDefinition<?>, SortedSet<?>> properties = new HashMap<>(); |
| | | for (PropertyDefinition<?> pd : mod.getAllPropertyDefinitions()) { |
| | | List<ByteString> values = getAttribute(mod, pd, configEntry); |
| | | try { |
| | |
| | | |
| | | // If there were no decoding problems then return the managed |
| | | // object, otherwise throw an operations exception. |
| | | ServerManagedObject<? extends S> mo = decodeAux(path, mod, properties, |
| | | configEntry); |
| | | if (exceptions.isEmpty()) { |
| | | return mo; |
| | | } else { |
| | | ServerManagedObject<? extends S> mo = decodeAux(path, mod, properties, configEntry); |
| | | if (!exceptions.isEmpty()) { |
| | | throw new ServerManagedObjectDecodingException(mo, exceptions); |
| | | } |
| | | return mo; |
| | | } |
| | | |
| | | |
| | |
| | | Map<PropertyDefinition<?>, SortedSet<?>> properties, |
| | | ConfigEntry configEntry) { |
| | | ManagedObjectPath<C, S> newPath = path.asSubType(d); |
| | | return new ServerManagedObject<S>(newPath, d, properties, configEntry); |
| | | return new ServerManagedObject<>(newPath, d, properties, configEntry); |
| | | } |
| | | |
| | | |
| | |
| | | PropertyDefinition<T> pd, List<ByteString> values, |
| | | ConfigEntry newConfigEntry) throws PropertyException { |
| | | PropertyException exception = null; |
| | | SortedSet<T> pvalues = new TreeSet<T>(pd); |
| | | SortedSet<T> pvalues = new TreeSet<>(pd); |
| | | |
| | | if (!values.isEmpty()) { |
| | | // The property has values defined for it. |
| | |
| | | AttributeType type = DirectoryServer.getAttributeType(attrID, true); |
| | | List<Attribute> attributes = configEntry.getEntry().getAttribute(type, true); |
| | | |
| | | List<ByteString> results = new LinkedList<ByteString>(); |
| | | List<ByteString> results = new LinkedList<>(); |
| | | if (attributes != null) |
| | | { |
| | | for (Attribute a : attributes) |
| | |
| | | private <T> Collection<T> getDefaultValues(ManagedObjectPath<?, ?> p, |
| | | PropertyDefinition<T> pd, ConfigEntry newConfigEntry) |
| | | throws PropertyException { |
| | | DefaultValueFinder<T> v = new DefaultValueFinder<T>(newConfigEntry); |
| | | DefaultValueFinder<T> v = new DefaultValueFinder<>(newConfigEntry); |
| | | return v.find(p, pd); |
| | | } |
| | | |
| | |
| | | |
| | | import java.util.ArrayList; |
| | | import java.util.Collection; |
| | | import java.util.Collections; |
| | | import java.util.LinkedHashSet; |
| | | import java.util.List; |
| | | import java.util.Queue; |
| | |
| | | private WritabilityMode writabilityMode = WritabilityMode.ENABLED; |
| | | |
| | | /** The set of persistent searches registered with this backend. */ |
| | | private final ConcurrentLinkedQueue<PersistentSearch> persistentSearches = |
| | | new ConcurrentLinkedQueue<PersistentSearch>(); |
| | | private final ConcurrentLinkedQueue<PersistentSearch> persistentSearches = new ConcurrentLinkedQueue<>(); |
| | | |
| | | /** |
| | | * Configure this backend based on the information in the provided configuration. |
| | |
| | | */ |
| | | public final synchronized void addSubordinateBackend(Backend<?> subordinateBackend) |
| | | { |
| | | LinkedHashSet<Backend<?>> backendSet = new LinkedHashSet<Backend<?>>(); |
| | | |
| | | for (Backend<?> b : subordinateBackends) |
| | | { |
| | | backendSet.add(b); |
| | | } |
| | | LinkedHashSet<Backend<?>> backendSet = new LinkedHashSet<>(); |
| | | Collections.addAll(backendSet, subordinateBackends); |
| | | |
| | | if (backendSet.add(subordinateBackend)) |
| | | { |
| | | Backend<?>[] newSubordinateBackends = new Backend[backendSet.size()]; |
| | | backendSet.toArray(newSubordinateBackends); |
| | | subordinateBackends = newSubordinateBackends; |
| | | subordinateBackends = backendSet.toArray(new Backend[backendSet.size()]); |
| | | } |
| | | } |
| | | |
| | |
| | | */ |
| | | public final synchronized void removeSubordinateBackend(Backend<?> subordinateBackend) |
| | | { |
| | | ArrayList<Backend<?>> backendList = new ArrayList<Backend<?>>(subordinateBackends.length); |
| | | ArrayList<Backend<?>> backendList = new ArrayList<>(subordinateBackends.length); |
| | | |
| | | boolean found = false; |
| | | for (Backend<?> b : subordinateBackends) |
| | |
| | | |
| | | if (found) |
| | | { |
| | | Backend<?>[] newSubordinateBackends = new Backend[backendList.size()]; |
| | | backendList.toArray(newSubordinateBackends); |
| | | subordinateBackends = newSubordinateBackends; |
| | | subordinateBackends = backendList.toArray(new Backend[backendList.size()]); |
| | | } |
| | | } |
| | | |
| | |
| | | private boolean finalized; |
| | | |
| | | /** The set of privileges assigned to this client connection. */ |
| | | private HashSet<Privilege> privileges; |
| | | private HashSet<Privilege> privileges = new HashSet<>(); |
| | | |
| | | /** The size limit for use with this client connection. */ |
| | | private int sizeLimit; |
| | | |
| | | /** The time limit for use with this client connection. */ |
| | | private int timeLimit; |
| | | |
| | | /** The lookthrough limit for use with this client connection. */ |
| | | private int lookthroughLimit; |
| | | |
| | | /** The time that this client connection was established. */ |
| | | private final long connectTime; |
| | | |
| | | /** The idle time limit for this client connection. */ |
| | | private long idleTimeLimit; |
| | | |
| | |
| | | */ |
| | | private Object saslAuthState; |
| | | |
| | | /** |
| | | * A string representation of the time that this client connection was |
| | | * established. |
| | | */ |
| | | /** A string representation of the time that this client connection was established. */ |
| | | private final String connectTimeString; |
| | | |
| | | /** A set of persistent searches registered for this client. */ |
| | | private final CopyOnWriteArrayList<PersistentSearch> |
| | | persistentSearches; |
| | | private final CopyOnWriteArrayList<PersistentSearch> persistentSearches = new CopyOnWriteArrayList<>(); |
| | | |
| | | /** |
| | | * Performs the appropriate initialization generic to all client |
| | | * connections. |
| | | */ |
| | | /** Performs the appropriate initialization generic to all client connections. */ |
| | | protected ClientConnection() |
| | | { |
| | | connectTime = TimeThread.getTime(); |
| | |
| | | saslAuthState = null; |
| | | saslBindInProgress = new AtomicBoolean(false); |
| | | bindOrStartTLSInProgress = new AtomicBoolean(false); |
| | | persistentSearches = new CopyOnWriteArrayList<PersistentSearch>(); |
| | | sizeLimit = DirectoryServer.getSizeLimit(); |
| | | timeLimit = DirectoryServer.getTimeLimit(); |
| | | idleTimeLimit = DirectoryServer.getIdleTimeLimit(); |
| | | lookthroughLimit = DirectoryServer.getLookthroughLimit(); |
| | | finalized = false; |
| | | privileges = new HashSet<Privilege>(); |
| | | } |
| | | |
| | | |
| | |
| | | { |
| | | if (entry == null) |
| | | { |
| | | return new HashSet<Privilege>(0); |
| | | return new HashSet<>(0); |
| | | } |
| | | |
| | | HashSet<Privilege> newPrivileges = new HashSet<>(); |
| | |
| | | return Collections.<Group<?>>emptySet(); |
| | | } |
| | | |
| | | HashSet<Group<?>> groupSet = new HashSet<Group<?>>(); |
| | | for (Group<?> g : |
| | | DirectoryServer.getGroupManager().getGroupInstances()) |
| | | HashSet<Group<?>> groupSet = new HashSet<>(); |
| | | for (Group<?> g : DirectoryServer.getGroupManager().getGroupInstances()) |
| | | { |
| | | if (g.isMember(userEntry)) |
| | | { |
| | | groupSet.add(g); |
| | | } |
| | | } |
| | | |
| | | return groupSet; |
| | | } |
| | | |
| | |
| | | public class CompressedSchema |
| | | { |
| | | /** Maps attribute description to ID. */ |
| | | private final List<Entry<AttributeType, Set<String>>> adDecodeMap; |
| | | |
| | | private final List<Entry<AttributeType, Set<String>>> adDecodeMap = new CopyOnWriteArrayList<>(); |
| | | /** Maps ID to attribute description. */ |
| | | private final Map<Entry<AttributeType, Set<String>>, Integer> adEncodeMap; |
| | | |
| | | private final Map<Entry<AttributeType, Set<String>>, Integer> adEncodeMap = new ConcurrentHashMap<>(); |
| | | /** The map between encoded representations and object class sets. */ |
| | | private final List<Map<ObjectClass, String>> ocDecodeMap; |
| | | |
| | | private final List<Map<ObjectClass, String>> ocDecodeMap = new CopyOnWriteArrayList<>(); |
| | | /** The map between object class sets and encoded representations. */ |
| | | private final Map<Map<ObjectClass, String>, Integer> ocEncodeMap; |
| | | |
| | | |
| | | |
| | | /** |
| | | * Creates a new empty instance of this compressed schema. |
| | | */ |
| | | public CompressedSchema() |
| | | { |
| | | adDecodeMap = new CopyOnWriteArrayList<Entry<AttributeType, Set<String>>>(); |
| | | ocDecodeMap = new CopyOnWriteArrayList<Map<ObjectClass, String>>(); |
| | | adEncodeMap = new ConcurrentHashMap<Entry<AttributeType, Set<String>>, |
| | | Integer>(); |
| | | ocEncodeMap = new ConcurrentHashMap<Map<ObjectClass, String>, Integer>(); |
| | | } |
| | | |
| | | |
| | | private final Map<Map<ObjectClass, String>, Integer> ocEncodeMap = new ConcurrentHashMap<>(); |
| | | |
| | | /** |
| | | * Decodes the contents of the provided array as an attribute at the current |
| | |
| | | // Re-use or allocate a new ID. |
| | | final AttributeType type = attribute.getAttributeType(); |
| | | final Set<String> options = attribute.getOptions(); |
| | | final Entry<AttributeType, Set<String>> ad = |
| | | new SimpleImmutableEntry<AttributeType, Set<String>>(type, options); |
| | | final Entry<AttributeType, Set<String>> ad = new SimpleImmutableEntry<>(type, options); |
| | | |
| | | // Use double checked locking to avoid lazy registration races. |
| | | Integer id = adEncodeMap.get(ad); |
| | |
| | | { |
| | | private int id = 0; |
| | | |
| | | |
| | | |
| | | @Override |
| | | public boolean hasNext() |
| | | { |
| | | return id < ocDecodeMap.size(); |
| | | } |
| | | |
| | | |
| | | |
| | | @Override |
| | | public Entry<byte[], Collection<String>> next() |
| | | { |
| | | final byte[] encodedObjectClasses = encodeId(id); |
| | | final Map<ObjectClass, String> ocMap = ocDecodeMap.get(id++); |
| | | return new SimpleImmutableEntry<byte[], Collection<String>>( |
| | | encodedObjectClasses, ocMap.values()); |
| | | return new SimpleImmutableEntry<>(encodedObjectClasses, ocMap.values()); |
| | | } |
| | | |
| | | |
| | | |
| | | @Override |
| | | public void remove() |
| | | { |
| | |
| | | }; |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Loads an encoded attribute into this compressed schema. This method may |
| | | * called by implementations during initialization when loading content from |
| | |
| | | options = Collections.singleton(attributeOptions.iterator().next()); |
| | | break; |
| | | default: |
| | | options = new LinkedHashSet<String>(attributeOptions); |
| | | options = new LinkedHashSet<>(attributeOptions); |
| | | break; |
| | | } |
| | | final Entry<AttributeType, Set<String>> ad = |
| | | new SimpleImmutableEntry<AttributeType, Set<String>>(type, options); |
| | | final Entry<AttributeType, Set<String>> ad = new SimpleImmutableEntry<>(type, options); |
| | | final int id = decodeId(encodedAttribute); |
| | | synchronized (adEncodeMap) |
| | | { |
| | |
| | | final byte[] encodedObjectClasses, |
| | | final Collection<String> objectClassNames) |
| | | { |
| | | final LinkedHashMap<ObjectClass, String> ocMap = |
| | | new LinkedHashMap<ObjectClass, String>(objectClassNames.size()); |
| | | final LinkedHashMap<ObjectClass, String> ocMap = new LinkedHashMap<>(objectClassNames.size()); |
| | | for (final String name : objectClassNames) |
| | | { |
| | | final String lowerName = toLowerCase(name); |
| | |
| | | * |
| | | * |
| | | * Copyright 2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2011-2014 ForgeRock AS |
| | | * Portions Copyright 2011-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.api; |
| | | |
| | |
| | | private int size; |
| | | |
| | | /** Backing Map implementation. */ |
| | | private Map<DN,Node<T>> ditCacheMap; |
| | | private final Map<DN,Node<T>> ditCacheMap = new HashMap<>(); |
| | | |
| | | /** Default constructor. */ |
| | | public DITCacheMap() |
| | | { |
| | | ditCacheMap = new HashMap<DN,Node<T>>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | */ |
| | | public DITCacheMap(Map<? extends DN, ? extends T> m) |
| | | { |
| | | ditCacheMap = new HashMap<DN,Node<T>>(); |
| | | this.putAll(m); |
| | | } |
| | | |
| | |
| | | return returnValue; |
| | | } |
| | | |
| | | Node<T> node = new Node<T>(); |
| | | Node<T> node = new Node<>(); |
| | | node.dn = key; |
| | | node.element = value; |
| | | node.parent = null; |
| | |
| | | if (parentNode == null) |
| | | { |
| | | // Add glue node. |
| | | final Node<T> newParentNode = new Node<T>(); |
| | | final Node<T> newParentNode = new Node<>(); |
| | | newParentNode.dn = parentDN; |
| | | newParentNode.element = null; |
| | | newParentNode.parent = null; |
| | |
| | | |
| | | /** The current logical thread's state. */ |
| | | private volatile AtomicReference<ThreadState> threadState = |
| | | new AtomicReference<ThreadState>(ThreadState.IDLE); |
| | | new AtomicReference<>(ThreadState.IDLE); |
| | | |
| | | /** |
| | | * A thread group for all directory threads. This implements a |
| | |
| | | private static class DirectoryThreadGroup extends ThreadGroup |
| | | implements AlertGenerator |
| | | { |
| | | private final LinkedHashMap<String,String> alerts; |
| | | private final LinkedHashMap<String,String> alerts = new LinkedHashMap<>(); |
| | | |
| | | /** |
| | | * Private constructor for DirectoryThreadGroup. |
| | | */ |
| | | /** Private constructor for DirectoryThreadGroup. */ |
| | | private DirectoryThreadGroup() |
| | | { |
| | | super("Directory Server Thread Group"); |
| | | alerts = new LinkedHashMap<String,String>(); |
| | | alerts.put(ALERT_TYPE_UNCAUGHT_EXCEPTION, |
| | | ALERT_DESCRIPTION_UNCAUGHT_EXCEPTION); |
| | | } |
| | |
| | | */ |
| | | public Map<String, String> getDebugProperties() |
| | | { |
| | | Map<String, String> properties = new LinkedHashMap<String, String>(); |
| | | Map<String, String> properties = new LinkedHashMap<>(); |
| | | |
| | | properties.put("parentThread", parentThread.getName() + |
| | | "(" + parentThread.getId() + ")"); |
| | |
| | | * A hash map that relates the plugin type names to the |
| | | * corresponding plugin type. |
| | | */ |
| | | private static final Map<String, PluginType> PLUGIN_TYPE_MAP; |
| | | private static final Map<String, PluginType> PLUGIN_TYPE_MAP = new HashMap<>(PluginType.values().length); |
| | | static |
| | | { |
| | | PLUGIN_TYPE_MAP = |
| | | new HashMap<String, PluginType>(PluginType.values().length); |
| | | for (PluginType type : PluginType.values()) |
| | | { |
| | | PLUGIN_TYPE_MAP.put(type.name, type); |
| | | } |
| | | } |
| | | |
| | | |
| | | |
| | | /** The name for this plugin type. */ |
| | | private String name; |
| | | |
| | | |
| | | |
| | | /** |
| | | * Creates a new plugin type instance with the specified name. |
| | | * |
| | |
| | | this.name = name; |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Retrieves the name for this plugin type. |
| | | * |
| | |
| | | */ |
| | | public static Set<String> decodeOID(String expr, LocalizableMessage msg) |
| | | throws AciException { |
| | | Set<String> OIDs = new HashSet<String>(); |
| | | Set<String> OIDs = new HashSet<>(); |
| | | //Quick check to see if the expression is valid. |
| | | if (Pattern.matches(oidListRegex, expr)) { |
| | | // Remove the spaces in the oid string and |
| | |
| | | throws AciException { |
| | | String version=null, name=null; |
| | | int startPos=0; |
| | | List<PermBindRulePair> permBindRulePairs= |
| | | new ArrayList<PermBindRulePair>(); |
| | | List<PermBindRulePair> permBindRulePairs = new ArrayList<>(); |
| | | Pattern bodyPattern = Pattern.compile(header); |
| | | Matcher bodyMatcher = bodyPattern.matcher(input); |
| | | if(bodyMatcher.find()) { |
| | |
| | | * Table of ACIs that have targattrfilter keywords that matched. Used |
| | | * in geteffectiverights attributeLevel write evaluation. |
| | | */ |
| | | private final HashMap<Aci,Aci> targAttrFilterAcis=new HashMap<Aci, Aci>(); |
| | | private final HashMap<Aci,Aci> targAttrFilterAcis = new HashMap<>(); |
| | | |
| | | /** |
| | | * The name of a ACI that decided an evaluation and contained a |
| | |
| | | // Check if the attributes aclRights and aclRightsInfo were requested and |
| | | // add attributes less those two attributes to a new list of attribute |
| | | // types. |
| | | List<AttributeType> nonRightsAttrs = new LinkedList<AttributeType>(); |
| | | List<AttributeType> nonRightsAttrs = new LinkedList<>(); |
| | | int attrMask = ACI_NULL; |
| | | for (String a : searchAttributes) |
| | | { |
| | |
| | | private void createApplicableList(List<Aci> candidates, |
| | | AciTargetMatchContext targetMatchCtx) |
| | | { |
| | | List<Aci> denys = new LinkedList<Aci>(); |
| | | List<Aci> allows = new LinkedList<Aci>(); |
| | | List<Aci> denys = new LinkedList<>(); |
| | | List<Aci> allows = new LinkedList<>(); |
| | | for (Aci aci : candidates) |
| | | { |
| | | if (Aci.isApplicable(aci, targetMatchCtx)) |
| | |
| | | */ |
| | | private List<AttributeType> getAllAttrs(Entry e) |
| | | { |
| | | List<AttributeType> typeList = new LinkedList<AttributeType>(); |
| | | List<AttributeType> typeList = new LinkedList<>(); |
| | | /* |
| | | * When a search is not all attributes returned, the "objectclass" |
| | | * attribute type is missing from the entry. |
| | |
| | | */ |
| | | private void processConfigAcis() throws InitializationException |
| | | { |
| | | LinkedList<LocalizableMessage> failedACIMsgs = new LinkedList<LocalizableMessage>(); |
| | | LinkedList<LocalizableMessage> failedACIMsgs = new LinkedList<>(); |
| | | InternalClientConnection conn = getRootConnection(); |
| | | |
| | | ConfigHandler<?> configBackend = DirectoryServer.getConfigHandler(); |
| | |
| | | */ |
| | | private final AciChangeListenerPlugin plugin; |
| | | |
| | | /** |
| | | * The aci attribute type is operational so we need to specify it to |
| | | * be returned. |
| | | */ |
| | | private static LinkedHashSet<String> attrs = |
| | | new LinkedHashSet<String>(); |
| | | |
| | | /** The aci attribute type is operational so we need to specify it to be returned. */ |
| | | private static LinkedHashSet<String> attrs = new LinkedHashSet<>(); |
| | | static |
| | | { |
| | | /* |
| | | * Set up the filter used to search private and public contexts. |
| | | */ |
| | | // Set up the filter used to search private and public contexts. |
| | | try |
| | | { |
| | | aciFilter = SearchFilter.createFilterFromString("(aci=*)"); |
| | |
| | | logger.warn(WARN_ACI_ATTRIBUTE_NOT_INDEXED, backend.getBackendID(), "aci"); |
| | | } |
| | | |
| | | LinkedList<LocalizableMessage> failedACIMsgs = new LinkedList<LocalizableMessage>(); |
| | | LinkedList<LocalizableMessage> failedACIMsgs = new LinkedList<>(); |
| | | |
| | | InternalClientConnection conn = getRootConnection(); |
| | | // Add manageDsaIT control so any ACIs in referral entries will be |
| | |
| | | @Override |
| | | public LinkedHashMap<String, String> getAlerts() |
| | | { |
| | | LinkedHashMap<String, String> alerts = |
| | | new LinkedHashMap<String, String>(); |
| | | LinkedHashMap<String, String> alerts = new LinkedHashMap<>(); |
| | | alerts.put(ALERT_TYPE_ACCESS_CONTROL_PARSE_FAILED, |
| | | ALERT_DESCRIPTION_ACCESS_CONTROL_PARSE_FAILED); |
| | | return alerts; |
| | | |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Log the exception messages from the failed ACI decode and then put |
| | | * the server in lockdown mode -- if needed. |
| | |
| | | */ |
| | | public void logMsgsSetLockDownMode(LinkedList<LocalizableMessage> failedACIMsgs) |
| | | { |
| | | |
| | | for (LocalizableMessage msg : failedACIMsgs) |
| | | { |
| | | logger.warn(WARN_ACI_SERVER_DECODE_FAILED, msg); |
| | |
| | | * |
| | | * |
| | | * Copyright 2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2013-2014 ForgeRock AS |
| | | * Portions Copyright 2013-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.authorization.dseecompat; |
| | | |
| | |
| | | public class BindRule { |
| | | |
| | | /** This hash table holds the keyword bind rule mapping. */ |
| | | private final HashMap<String, KeywordBindRule> keywordRuleMap = new HashMap<String, KeywordBindRule>(); |
| | | private final HashMap<String, KeywordBindRule> keywordRuleMap = new HashMap<>(); |
| | | |
| | | /** True is a boolean "not" was seen. */ |
| | | private boolean negate; |
| | |
| | | LocalizableMessage message = WARN_ACI_SYNTAX_INVALID_DNS_EXPRESSION.get(expr); |
| | | throw new AciException(message); |
| | | } |
| | | List<String> dns = new LinkedList<String>(); |
| | | List<String> dns = new LinkedList<>(); |
| | | int valuePos = 1; |
| | | Pattern valuePattern = Pattern.compile(valueRegex); |
| | | Matcher valueMatcher = valuePattern.matcher(expr); |
| | |
| | | public static KeywordBindRule decode(String expr, EnumBindRuleType type) |
| | | throws AciException |
| | | { |
| | | List<EnumDayOfWeek> days = new LinkedList<EnumDayOfWeek>(); |
| | | List<EnumDayOfWeek> days = new LinkedList<>(); |
| | | String[] dayArray=expr.split(",", -1); |
| | | for (String element : dayArray) |
| | | { |
| | |
| | | * |
| | | * |
| | | * Copyright 2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2013 ForgeRock AS |
| | | * Portions Copyright 2013-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.authorization.dseecompat; |
| | | |
| | |
| | | /** |
| | | * HashSet of OID strings parsed from the decode. |
| | | */ |
| | | private Set<String> extOpOIDs = new HashSet<String>(); |
| | | private Set<String> extOpOIDs = new HashSet<>(); |
| | | |
| | | /** |
| | | * Enumeration representing the extop operator. |
| | |
| | | WARN_ACI_SYNTAX_INVALID_GROUPDN_EXPRESSION.get(expr); |
| | | throw new AciException(message); |
| | | } |
| | | List<DN> groupDNs = new LinkedList<DN>(); |
| | | List<DN> groupDNs = new LinkedList<>(); |
| | | int ldapURLPos = 1; |
| | | Pattern ldapURLPattern = Pattern.compile(LDAP_URL); |
| | | Matcher ldapURLMatcher = ldapURLPattern.matcher(expr); |
| | |
| | | throws AciException { |
| | | //Split on the ','. |
| | | String[] ipStrs=expr.split("\\,", -1); |
| | | List<PatternIP> patternIPList= new LinkedList<PatternIP>(); |
| | | List<PatternIP> patternIPList= new LinkedList<>(); |
| | | for (String ipStr : ipStrs) { |
| | | if (!Pattern.matches(ipRegEx, ipStr)) { |
| | | LocalizableMessage message = |
| | |
| | | public static PatternDN decode(String dnString) |
| | | throws DirectoryException |
| | | { |
| | | ArrayList<PatternRDN> rdnComponents = new ArrayList<PatternRDN>(); |
| | | ArrayList<Integer> doubleWildPos = new ArrayList<Integer>(); |
| | | ArrayList<PatternRDN> rdnComponents = new ArrayList<>(); |
| | | ArrayList<Integer> doubleWildPos = new ArrayList<>(); |
| | | |
| | | // A null or empty DN is acceptable. |
| | | if (dnString == null) |
| | |
| | | // RDN component and return the DN. |
| | | if (pos >= length) |
| | | { |
| | | ArrayList<ByteString> arrayList = new ArrayList<ByteString>(1); |
| | | ArrayList<ByteString> arrayList = new ArrayList<>(1); |
| | | arrayList.add(ByteString.empty()); |
| | | rdnComponents.add(new PatternRDN(name, arrayList, dnString)); |
| | | break; |
| | |
| | | |
| | | |
| | | // Parse the value for this RDN component. |
| | | ArrayList<ByteString> parsedValue = new ArrayList<ByteString>(); |
| | | ArrayList<ByteString> parsedValue = new ArrayList<>(); |
| | | pos = parseValuePattern(dnString, pos, parsedValue); |
| | | |
| | | |
| | |
| | | // the RDN component and return the DN. |
| | | if (pos >= length) |
| | | { |
| | | ArrayList<ByteString> arrayList = new ArrayList<ByteString>(1); |
| | | ArrayList<ByteString> arrayList = new ArrayList<>(1); |
| | | arrayList.add(ByteString.empty()); |
| | | rdn.addValue(name, arrayList, dnString); |
| | | rdnComponents.add(rdn); |
| | |
| | | |
| | | |
| | | // Parse the value for this RDN component. |
| | | parsedValue = new ArrayList<ByteString>(); |
| | | parsedValue = new ArrayList<>(); |
| | | pos = parseValuePattern(dnString, pos, parsedValue); |
| | | |
| | | |
| | |
| | | { |
| | | PatternRDN[] subInitial = null; |
| | | PatternRDN[] subFinal = null; |
| | | List<PatternRDN[]> subAnyElements = new ArrayList<PatternRDN[]>(); |
| | | List<PatternRDN[]> subAnyElements = new ArrayList<>(); |
| | | |
| | | int i = 0; |
| | | int numComponents = rdnComponents.size(); |
| | |
| | | */ |
| | | private BitSet wildCardBitSet; |
| | | |
| | | /** |
| | | Hash map of valid netmask strings. Used in parsing netmask values. |
| | | */ |
| | | private static HashMap<String,String> validNetMasks = |
| | | new HashMap<String, String>(); |
| | | /** Hash map of valid netmask strings. Used in parsing netmask values. */ |
| | | private static HashMap<String,String> validNetMasks = new HashMap<>(); |
| | | |
| | | /** Initialize valid netmask hash map. */ |
| | | static { |
| | |
| | | |
| | | numValues = 1; |
| | | typePatterns = new String[] { type }; |
| | | valuePatterns = new ArrayList<ArrayList<ByteString>>(1); |
| | | valuePatterns = new ArrayList<>(1); |
| | | valuePatterns.add(valuePattern); |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | // Sort the attribute-value pairs by attribute type. |
| | | TreeMap<String,ArrayList<ByteString>> patternMap = |
| | | new TreeMap<String, ArrayList<ByteString>>(); |
| | | TreeMap<String, ByteString> rdnMap = new TreeMap<String, ByteString>(); |
| | | TreeMap<String,ArrayList<ByteString>> patternMap = new TreeMap<>(); |
| | | TreeMap<String, ByteString> rdnMap = new TreeMap<>(); |
| | | |
| | | for (int i = 0; i < rdn.getNumValues(); i++) |
| | | { |
| | |
| | | */ |
| | | public static TargAttrFilterList decode(int mask, String expression) |
| | | throws AciException { |
| | | LinkedHashMap<AttributeType, SearchFilter> attrFilterList = |
| | | new LinkedHashMap<AttributeType, SearchFilter>(); |
| | | LinkedHashMap<AttributeType, SearchFilter> attrFilterList = new LinkedHashMap<>(); |
| | | String[] subExpressions=expression.split(filterListSeperator, -1); |
| | | //Iterate over each sub-expression, parse and add them to the list |
| | | //if there are no errors. |
| | |
| | | /** Flags that is set if all operational attributes pattern seen "+". */ |
| | | private boolean allOpAttributes; |
| | | /** Set of the attribute types parsed by the constructor. */ |
| | | private HashSet<AttributeType> attributes = new HashSet<AttributeType>(); |
| | | private HashSet<AttributeType> attributes = new HashSet<>(); |
| | | /** Set of the operational attribute types parsed by the constructor. */ |
| | | private HashSet<AttributeType> opAttributes = new HashSet<AttributeType>(); |
| | | private HashSet<AttributeType> opAttributes = new HashSet<>(); |
| | | |
| | | /** |
| | | * Regular expression that matches one or more ATTR_NAME's separated by |
| | |
| | | * |
| | | * |
| | | * Copyright 2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2013 ForgeRock AS |
| | | * Portions Copyright 2013-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.authorization.dseecompat; |
| | | |
| | |
| | | */ |
| | | public class TargetControl { |
| | | |
| | | /** |
| | | * HashSet of OID strings parsed from the decode. |
| | | */ |
| | | private Set<String> controlOIDS = new HashSet<String>(); |
| | | |
| | | /** |
| | | * Enumeration representing the targetcontrol operator. |
| | | */ |
| | | /** HashSet of OID strings parsed from the decode. */ |
| | | private Set<String> controlOIDS = new HashSet<>(); |
| | | /** Enumeration representing the targetcontrol operator. */ |
| | | private EnumTargetOperator op = EnumTargetOperator.EQUALITY; |
| | | |
| | | /** |
| | |
| | | EnumBindRuleType type) throws AciException { |
| | | |
| | | String[] vals=expression.split("[|][|]"); |
| | | List<UserDNTypeURL> urlList = new LinkedList<UserDNTypeURL>(); |
| | | List<UserDNTypeURL> urlList = new LinkedList<>(); |
| | | for (String val : vals) |
| | | { |
| | | StringBuilder value = new StringBuilder(val.trim()); |
| | |
| | | |
| | | // Determine the set of backup directories that we will use by default. |
| | | Set<String> values = currentConfig.getBackupDirectory(); |
| | | backupDirectories = new LinkedHashMap<File,CachedBackupDirectory>(values.size()); |
| | | backupDirectories = new LinkedHashMap<>(values.size()); |
| | | for (String s : values) |
| | | { |
| | | File dir = getFileForPath(s); |
| | |
| | | |
| | | |
| | | // Construct the backup base entry. |
| | | LinkedHashMap<ObjectClass,String> objectClasses = |
| | | new LinkedHashMap<ObjectClass,String>(2); |
| | | LinkedHashMap<ObjectClass,String> objectClasses = new LinkedHashMap<>(2); |
| | | objectClasses.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | |
| | | ObjectClass untypedOC = |
| | | DirectoryServer.getObjectClass(OC_UNTYPED_OBJECT_LC, true); |
| | | objectClasses.put(untypedOC, OC_UNTYPED_OBJECT); |
| | | |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(1); |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = new LinkedHashMap<>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = new LinkedHashMap<>(1); |
| | | |
| | | RDN rdn = backupBaseDN.rdn(); |
| | | int numAVAs = rdn.getNumValues(); |
| | | for (int i=0; i < numAVAs; i++) |
| | | { |
| | | AttributeType attrType = rdn.getAttributeType(i); |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | attrList.add(Attributes.create(attrType, rdn |
| | | .getAttributeValue(i))); |
| | | |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(Attributes.create(attrType, rdn.getAttributeValue(i))); |
| | | userAttrs.put(attrType, attrList); |
| | | } |
| | | |
| | | backupBaseEntry = new Entry(backupBaseDN, objectClasses, userAttrs, |
| | | opAttrs); |
| | | backupBaseEntry = new Entry(backupBaseDN, objectClasses, userAttrs, opAttrs); |
| | | |
| | | currentConfig.addBackupChangeListener(this); |
| | | |
| | |
| | | |
| | | |
| | | // Construct the backup directory entry to return. |
| | | LinkedHashMap<ObjectClass,String> ocMap = |
| | | new LinkedHashMap<ObjectClass,String>(2); |
| | | LinkedHashMap<ObjectClass,String> ocMap = new LinkedHashMap<>(2); |
| | | ocMap.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | |
| | | ObjectClass backupDirOC = |
| | | DirectoryServer.getObjectClass(OC_BACKUP_DIRECTORY, true); |
| | | ocMap.put(backupDirOC, OC_BACKUP_DIRECTORY); |
| | | |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(3); |
| | | |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = new LinkedHashMap<>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = new LinkedHashMap<>(3); |
| | | userAttrs.put(t, asList(t, v)); |
| | | |
| | | t = DirectoryServer.getAttributeType(ATTR_BACKUP_BACKEND_DN, true); |
| | |
| | | } |
| | | |
| | | // Construct the backup entry to return. |
| | | LinkedHashMap<ObjectClass, String> ocMap = |
| | | new LinkedHashMap<ObjectClass, String>(3); |
| | | LinkedHashMap<ObjectClass, String> ocMap = new LinkedHashMap<>(3); |
| | | ocMap.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | |
| | | ObjectClass oc = DirectoryServer.getObjectClass(OC_BACKUP_INFO, true); |
| | |
| | | oc = DirectoryServer.getObjectClass(OC_EXTENSIBLE_OBJECT_LC, true); |
| | | ocMap.put(oc, OC_EXTENSIBLE_OBJECT); |
| | | |
| | | LinkedHashMap<AttributeType, List<Attribute>> opAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(0); |
| | | LinkedHashMap<AttributeType, List<Attribute>> userAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | |
| | | LinkedHashMap<AttributeType, List<Attribute>> opAttrs = new LinkedHashMap<>(0); |
| | | LinkedHashMap<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<>(); |
| | | userAttrs.put(idType, asList(idType, idValue)); |
| | | |
| | | backupInfo.getBackupDirectory(); |
| | |
| | | for (String s : dependencies) { |
| | | builder.add(s); |
| | | } |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(builder.toAttribute()); |
| | | userAttrs.put(t, attrList); |
| | | } |
| | |
| | | |
| | | private ArrayList<Attribute> asList(AttributeType attrType, ByteString value) |
| | | { |
| | | final ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | final ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(Attributes.create(attrType, value)); |
| | | return attrList; |
| | | } |
| | |
| | | final ConfigChangeResult ccr = new ConfigChangeResult(); |
| | | |
| | | Set<String> values = cfg.getBackupDirectory(); |
| | | backupDirectories = new LinkedHashMap<File,CachedBackupDirectory>(values.size()); |
| | | backupDirectories = new LinkedHashMap<>(values.size()); |
| | | for (String s : values) |
| | | { |
| | | File dir = getFileForPath(s); |
| | |
| | | |
| | | /** The set of objectclasses that will be used in root entry. */ |
| | | private static final Map<ObjectClass, String> |
| | | CHANGELOG_ROOT_OBJECT_CLASSES = new LinkedHashMap<ObjectClass, String>(2); |
| | | |
| | | CHANGELOG_ROOT_OBJECT_CLASSES = new LinkedHashMap<>(2); |
| | | static |
| | | { |
| | | CHANGELOG_ROOT_OBJECT_CLASSES.put(DirectoryServer.getObjectClass(OC_TOP, true), OC_TOP); |
| | |
| | | |
| | | /** The set of objectclasses that will be used in ECL entries. */ |
| | | private static final Map<ObjectClass, String> |
| | | CHANGELOG_ENTRY_OBJECT_CLASSES = new LinkedHashMap<ObjectClass, String>(2); |
| | | |
| | | CHANGELOG_ENTRY_OBJECT_CLASSES = new LinkedHashMap<>(2); |
| | | static |
| | | { |
| | | CHANGELOG_ENTRY_OBJECT_CLASSES.put(DirectoryServer.getObjectClass(OC_TOP, true), OC_TOP); |
| | |
| | | private final ECLEnabledDomainPredicate domainPredicate; |
| | | |
| | | /** The set of cookie-based persistent searches registered with this backend. */ |
| | | private final ConcurrentLinkedQueue<PersistentSearch> cookieBasedPersistentSearches = |
| | | new ConcurrentLinkedQueue<PersistentSearch>(); |
| | | /** |
| | | * The set of change number-based persistent searches registered with this |
| | | * backend. |
| | | */ |
| | | private final ConcurrentLinkedQueue<PersistentSearch> cookieBasedPersistentSearches = new ConcurrentLinkedQueue<>(); |
| | | /** The set of change number-based persistent searches registered with this backend. */ |
| | | private final ConcurrentLinkedQueue<PersistentSearch> changeNumberBasedPersistentSearches = |
| | | new ConcurrentLinkedQueue<PersistentSearch>(); |
| | | new ConcurrentLinkedQueue<>(); |
| | | |
| | | /** |
| | | * Creates a new backend with the provided replication server. |
| | |
| | | } |
| | | |
| | | DBCursor<ChangeNumberIndexRecord> cnIndexDBCursor = null; |
| | | final AtomicReference<MultiDomainDBCursor> replicaUpdatesCursor = new AtomicReference<MultiDomainDBCursor>(); |
| | | final AtomicReference<MultiDomainDBCursor> replicaUpdatesCursor = new AtomicReference<>(); |
| | | try |
| | | { |
| | | cnIndexDBCursor = getCNIndexDBCursor(entrySender.lowestChangeNumber); |
| | |
| | | dnString = "replicationCSN=" + csn + "," + baseDN + "," + DN_EXTERNAL_CHANGELOG_ROOT; |
| | | } |
| | | |
| | | final Map<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final Map<AttributeType, List<Attribute>> opAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final Map<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<>(); |
| | | final Map<AttributeType, List<Attribute>> opAttrs = new LinkedHashMap<>(); |
| | | |
| | | // Operational standard attributes |
| | | addAttributeByType(ATTR_SUBSCHEMA_SUBENTRY_LC, ATTR_SUBSCHEMA_SUBENTRY_LC, |
| | |
| | | { |
| | | final String hasSubordinatesStr = Boolean.toString(baseChangelogHasSubordinates()); |
| | | |
| | | final Map<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final Map<AttributeType, List<Attribute>> operationalAttrs = new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final Map<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<>(); |
| | | final Map<AttributeType, List<Attribute>> operationalAttrs = new LinkedHashMap<>(); |
| | | |
| | | // We never return the numSubordinates attribute for the base changelog entry |
| | | // and there is a very good reason for that: |
| | |
| | | */ |
| | | private static class SendEntryData<K extends Comparable<K>> |
| | | { |
| | | private final AtomicReference<SearchPhase> searchPhase = new AtomicReference<SearchPhase>(SearchPhase.INITIAL); |
| | | private final AtomicReference<SearchPhase> searchPhase = new AtomicReference<>(SearchPhase.INITIAL); |
| | | private final Object transitioningLock = new Object(); |
| | | private volatile K lastKeySentByInitialSearch; |
| | | |
| | |
| | | private ChangeNumberEntrySender(SearchOperation searchOp, SearchPhase startPhase, ChangeNumberRange range) |
| | | { |
| | | this.searchOp = searchOp; |
| | | this.sendEntryData = new SendEntryData<Long>(startPhase); |
| | | this.sendEntryData = new SendEntryData<>(startPhase); |
| | | this.lowestChangeNumber = range.lowerBound; |
| | | this.highestChangeNumber = range.upperBound; |
| | | } |
| | |
| | | private final Set<DN> excludedBaseDNs; |
| | | private final MultiDomainServerState cookie; |
| | | private final ConcurrentSkipListMap<ReplicaId, SendEntryData<CSN>> replicaIdToSendEntryData = |
| | | new ConcurrentSkipListMap<ReplicaId, SendEntryData<CSN>>(); |
| | | new ConcurrentSkipListMap<>(); |
| | | |
| | | private CookieEntrySender(SearchOperation searchOp, SearchPhase startPhase, MultiDomainServerState cookie, |
| | | Set<DN> excludedBaseDNs) |
| | |
| | | SendEntryData<CSN> data = replicaIdToSendEntryData.get(replicaId); |
| | | if (data == null) |
| | | { |
| | | final SendEntryData<CSN> newData = new SendEntryData<CSN>(startPhase); |
| | | final SendEntryData<CSN> newData = new SendEntryData<>(startPhase); |
| | | data = replicaIdToSendEntryData.putIfAbsent(replicaId, newData); |
| | | return data == null ? newData : data; |
| | | } |
| | |
| | | private DN[] baseDNs; |
| | | |
| | | /** The mapping between parent DNs and their immediate children. */ |
| | | private final Map<DN, Set<DN>> childDNs; |
| | | private final Map<DN, Set<DN>> childDNs = new HashMap<>(); |
| | | |
| | | /** The base DNs for this backend, in a hash set. */ |
| | | private Set<DN> baseDNSet; |
| | |
| | | private LDIFBackendCfg currentConfig; |
| | | |
| | | /** The mapping between entry DNs and the corresponding entries. */ |
| | | private final Map<DN, Entry> entryMap; |
| | | private final Map<DN, Entry> entryMap = new LinkedHashMap<>(); |
| | | |
| | | /** A read-write lock used to protect access to this backend. */ |
| | | private final ReentrantReadWriteLock backendLock; |
| | | private final ReentrantReadWriteLock backendLock = new ReentrantReadWriteLock(); |
| | | |
| | | /** The path to the LDIF file containing the data for this backend. */ |
| | | private String ldifFilePath; |
| | | |
| | | |
| | | |
| | | /** |
| | | * Creates a new backend with the provided information. All backend |
| | | * implementations must implement a default constructor that use |
| | |
| | | */ |
| | | public LDIFBackend() |
| | | { |
| | | entryMap = new LinkedHashMap<DN,Entry>(); |
| | | childDNs = new HashMap<DN, Set<DN>>(); |
| | | backendLock = new ReentrantReadWriteLock(); |
| | | } |
| | | |
| | | /** {@inheritDoc} */ |
| | |
| | | Set<DN> childDNSet = childDNs.get(parentDN); |
| | | if (childDNSet == null) |
| | | { |
| | | childDNSet = new HashSet<DN>(); |
| | | childDNSet = new HashSet<>(); |
| | | childDNs.put(parentDN, childDNSet); |
| | | } |
| | | childDNSet.add(entryDN); |
| | |
| | | parentChildDNs = childDNs.get(newParentDN); |
| | | if (parentChildDNs == null) |
| | | { |
| | | parentChildDNs = new HashSet<DN>(); |
| | | parentChildDNs = new HashSet<>(); |
| | | childDNs.put(newParentDN, parentChildDNs); |
| | | } |
| | | parentChildDNs.add(newDN); |
| | |
| | | Set<DN> parentChildren = childDNs.get(newParentDN); |
| | | if (parentChildren == null) |
| | | { |
| | | parentChildren = new HashSet<DN>(); |
| | | parentChildren = new HashSet<>(); |
| | | childDNs.put(newParentDN, parentChildren); |
| | | } |
| | | parentChildren.add(newEntryDN); |
| | |
| | | Set<DN> childDNSet = childDNs.get(parentDN); |
| | | if (childDNSet == null) |
| | | { |
| | | childDNSet = new HashSet<DN>(); |
| | | childDNSet = new HashSet<>(); |
| | | childDNs.put(parentDN, childDNSet); |
| | | } |
| | | |
| | |
| | | throw new ConfigException(ERR_LDIF_BACKEND_MULTIPLE_BASE_DNS.get(currentConfig.dn())); |
| | | } |
| | | |
| | | baseDNSet = new HashSet<DN>(); |
| | | baseDNSet = new HashSet<>(); |
| | | Collections.addAll(baseDNSet, baseDNs); |
| | | |
| | | ldifFilePath = currentConfig.getLDIFFile(); |
| | |
| | | @Override |
| | | public Map<String,String> getAlerts() |
| | | { |
| | | Map<String,String> alerts = new LinkedHashMap<String,String>(); |
| | | |
| | | Map<String,String> alerts = new LinkedHashMap<>(); |
| | | alerts.put(ALERT_TYPE_LDIF_BACKEND_CANNOT_WRITE_UPDATE, |
| | | ALERT_DESCRIPTION_LDIF_BACKEND_CANNOT_WRITE_UPDATE); |
| | | |
| | | return alerts; |
| | | } |
| | | } |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | baseDNSet = new HashSet<DN>(); |
| | | baseDNSet = new HashSet<>(); |
| | | for (DN dn : baseDNs) |
| | | { |
| | | baseDNSet.add(dn); |
| | | } |
| | | |
| | | entryMap = new LinkedHashMap<DN,Entry>(); |
| | | childDNs = new HashMap<DN,HashSet<DN>>(); |
| | | entryMap = new LinkedHashMap<>(); |
| | | childDNs = new HashMap<>(); |
| | | |
| | | for (DN dn : baseDNs) |
| | | { |
| | |
| | | HashSet<DN> children = childDNs.get(parentDN); |
| | | if (children == null) |
| | | { |
| | | children = new HashSet<DN>(); |
| | | children = new HashSet<>(); |
| | | childDNs.put(parentDN, children); |
| | | } |
| | | |
| | |
| | | { |
| | | if (children != null) |
| | | { |
| | | HashSet<DN> childrenCopy = new HashSet<DN>(children); |
| | | HashSet<DN> childrenCopy = new HashSet<>(children); |
| | | for (DN childDN : childrenCopy) |
| | | { |
| | | try |
| | |
| | | private ArrayList<Attribute> userDefinedAttributes; |
| | | |
| | | /** The set of objectclasses that will be used in monitor entries. */ |
| | | private final HashMap<ObjectClass, String> monitorObjectClasses = new LinkedHashMap<ObjectClass, String>(2); |
| | | private final HashMap<ObjectClass, String> monitorObjectClasses = new LinkedHashMap<>(2); |
| | | |
| | | /** The DN of the configuration entry for this backend. */ |
| | | private DN configEntryDN; |
| | |
| | | final ConfigChangeResult ccr = new ConfigChangeResult(); |
| | | |
| | | // Check to see if there is a new set of user-defined attributes. |
| | | final ArrayList<Attribute> userAttrs = new ArrayList<Attribute>(); |
| | | final ArrayList<Attribute> userAttrs = new ArrayList<>(); |
| | | try |
| | | { |
| | | final ConfigEntry configEntry = DirectoryServer |
| | |
| | | // Get the set of user-defined attributes for the configuration entry. Any |
| | | // attributes that we don't recognize will be included directly in the base |
| | | // monitor entry. |
| | | userDefinedAttributes = new ArrayList<Attribute>(); |
| | | userDefinedAttributes = new ArrayList<>(); |
| | | addAll(userDefinedAttributes, configEntry.getEntry().getUserAttributes().values()); |
| | | addAll(userDefinedAttributes, configEntry.getEntry().getOperationalAttributes().values()); |
| | | |
| | |
| | | final ObjectClass extensibleObjectOC = DirectoryServer.getObjectClass(OC_EXTENSIBLE_OBJECT_LC, true); |
| | | final HashMap<ObjectClass, String> monitorClasses = newObjectClasses(extensibleObjectOC, OC_EXTENSIBLE_OBJECT); |
| | | |
| | | final HashMap<AttributeType, List<Attribute>> monitorUserAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final HashMap<AttributeType, List<Attribute>> monitorOperationalAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final HashMap<AttributeType, List<Attribute>> monitorUserAttrs = new LinkedHashMap<>(); |
| | | final HashMap<AttributeType, List<Attribute>> monitorOperationalAttrs = new LinkedHashMap<>(); |
| | | |
| | | put(monitorUserAttrs, Attributes.create(ATTR_COMMON_NAME, "monitor")); |
| | | put(monitorUserAttrs, Attributes.create(ATTR_PRODUCT_NAME, DynamicConstants.PRODUCT_NAME)); |
| | |
| | | List<Attribute> attrs = attrsMap.get(type); |
| | | if (attrs == null) |
| | | { |
| | | attrs = new ArrayList<Attribute>(); |
| | | attrs = new ArrayList<>(); |
| | | attrsMap.put(type, attrs); |
| | | } |
| | | attrs.add(a); |
| | |
| | | |
| | | private ArrayList<Attribute> toList(final Attribute attr) |
| | | { |
| | | final ArrayList<Attribute> results = new ArrayList<Attribute>(1); |
| | | final ArrayList<Attribute> results = new ArrayList<>(1); |
| | | results.add(attr); |
| | | return results; |
| | | } |
| | |
| | | final ObjectClass monitorOC = DirectoryServer.getObjectClass(OC_MONITOR_BRANCH, true); |
| | | final HashMap<ObjectClass, String> monitorClasses = newObjectClasses(monitorOC, OC_MONITOR_BRANCH); |
| | | |
| | | final HashMap<AttributeType, List<Attribute>> monitorUserAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | final HashMap<AttributeType, List<Attribute>> monitorUserAttrs = new LinkedHashMap<>(); |
| | | |
| | | final RDN rdn = dn.rdn(); |
| | | if (rdn != null) |
| | |
| | | */ |
| | | private NavigableMap<DN, MonitorProvider<?>> getDIT() |
| | | { |
| | | final NavigableMap<DN, MonitorProvider<?>> dit = |
| | | new TreeMap<DN, MonitorProvider<?>>(); |
| | | for (final MonitorProvider<?> monitorProvider : DirectoryServer |
| | | .getMonitorProviders().values()) |
| | | final NavigableMap<DN, MonitorProvider<?>> dit = new TreeMap<>(); |
| | | for (final MonitorProvider<?> monitorProvider : DirectoryServer.getMonitorProviders().values()) |
| | | { |
| | | DN dn = DirectoryServer.getMonitorProviderDN(monitorProvider); |
| | | dit.put(dn, monitorProvider); |
| | |
| | | { |
| | | break; |
| | | } |
| | | else |
| | | { |
| | | dit.put(dn, null); |
| | | } |
| | | dit.put(dn, null); |
| | | } |
| | | } |
| | | return dit; |
| | |
| | | final HashMap<ObjectClass, String> monitorClasses = newObjectClasses(monitorOC, monitorOC.getPrimaryName()); |
| | | |
| | | final List<Attribute> monitorAttrs = monitorProvider.getMonitorData(); |
| | | final HashMap<AttributeType, List<Attribute>> attrMap = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>( |
| | | monitorAttrs.size() + 1); |
| | | final HashMap<AttributeType, List<Attribute>> attrMap = new LinkedHashMap<>(monitorAttrs.size() + 1); |
| | | |
| | | // Make sure to include the RDN attribute. |
| | | final RDN entryRDN = entryDN.rdn(); |
| | |
| | | List<Attribute> attrs = attrMap.get(type); |
| | | if (attrs == null) |
| | | { |
| | | attrs = new ArrayList<Attribute>(); |
| | | attrs = new ArrayList<>(); |
| | | attrMap.put(type, attrs); |
| | | } |
| | | attrs.add(a); |
| | |
| | | |
| | | private HashMap<ObjectClass, String> newObjectClasses(ObjectClass objectClass, String objectClassName) |
| | | { |
| | | final HashMap<ObjectClass, String> monitorClasses = |
| | | new LinkedHashMap<ObjectClass, String>(monitorObjectClasses.size() + 1); |
| | | final HashMap<ObjectClass, String> monitorClasses = new LinkedHashMap<>(monitorObjectClasses.size() + 1); |
| | | monitorClasses.putAll(monitorObjectClasses); |
| | | monitorClasses.put(objectClass, objectClassName); |
| | | return monitorClasses; |
| | |
| | | private HashSet<DN> baseDNSet; |
| | | |
| | | /** The set of supported controls for this backend. */ |
| | | private final Set<String> supportedControls = new HashSet<String>(Arrays.asList( |
| | | private final Set<String> supportedControls = new HashSet<>(Arrays.asList( |
| | | OID_SUBTREE_DELETE_CONTROL, |
| | | OID_PAGED_RESULTS_CONTROL, |
| | | OID_MANAGE_DSAIT_CONTROL, |
| | |
| | | @Override |
| | | public synchronized void openBackend() throws ConfigException, InitializationException |
| | | { |
| | | baseDNSet = new HashSet<DN>(); |
| | | baseDNSet = new HashSet<>(); |
| | | for (DN dn : baseDNs) |
| | | { |
| | | baseDNSet.add(dn); |
| | |
| | | } |
| | | |
| | | // Initialize null entry object classes. |
| | | objectClasses = new HashMap<ObjectClass,String>(); |
| | | objectClasses = new HashMap<>(); |
| | | |
| | | String topOCName = "top"; |
| | | ObjectClass topOC = DirectoryServer.getObjectClass(topOCName); |
| | |
| | | package org.opends.server.backends; |
| | | |
| | | import java.util.ArrayList; |
| | | import java.util.List; |
| | | |
| | | import org.opends.server.types.DN; |
| | | |
| | |
| | | private DN baseDN; |
| | | private RebuildMode rebuildMode = RebuildMode.USER_DEFINED; |
| | | /** The names of indexes to rebuild. */ |
| | | private ArrayList<String> rebuildList; |
| | | private final List<String> rebuildList = new ArrayList<>(); |
| | | private String tmpDirectory; |
| | | private boolean isClearDegradedState; |
| | | |
| | | /** Create a new rebuild configuration. */ |
| | | public RebuildConfig() |
| | | { |
| | | rebuildList = new ArrayList<String>(); |
| | | } |
| | | |
| | | /** |
| | | * Get the base DN to rebuild. |
| | | * |
| | |
| | | * |
| | | * @return The list of indexes to rebuild. |
| | | */ |
| | | public ArrayList<String> getRebuildList() |
| | | public List<String> getRebuildList() |
| | | { |
| | | return rebuildList; |
| | | } |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | userDefinedAttributes = new ArrayList<Attribute>(); |
| | | userDefinedAttributes = new ArrayList<>(); |
| | | addAllUserDefinedAttrs(userDefinedAttributes, configEntry.getEntry()); |
| | | |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | subordinateBaseDNs = new ConcurrentHashMap<DN, Backend<?>>(); |
| | | subordinateBaseDNs = new ConcurrentHashMap<>(); |
| | | for (DN baseDN : subDNs) |
| | | { |
| | | Backend<?> backend = DirectoryServer.getBackend(baseDN); |
| | |
| | | |
| | | // Construct the set of "static" attributes that will always be present in |
| | | // the root DSE. |
| | | staticDSEAttributes = new ArrayList<Attribute>(); |
| | | |
| | | staticDSEAttributes.add(Attributes.create(ATTR_VENDOR_NAME, |
| | | SERVER_VENDOR_NAME)); |
| | | |
| | | staticDSEAttributes = new ArrayList<>(); |
| | | staticDSEAttributes.add(Attributes.create(ATTR_VENDOR_NAME, SERVER_VENDOR_NAME)); |
| | | staticDSEAttributes.add(Attributes.create(ATTR_VENDOR_VERSION, |
| | | DirectoryServer.getVersionString())); |
| | | |
| | | staticDSEAttributes.add(Attributes.create("fullVendorVersion", |
| | | BuildVersion.binaryVersion().toString())); |
| | | |
| | | // Construct the set of objectclasses to include in the root DSE entry. |
| | | dseObjectClasses = new HashMap<ObjectClass,String>(2); |
| | | dseObjectClasses = new HashMap<>(2); |
| | | ObjectClass topOC = DirectoryServer.getObjectClass(OC_TOP); |
| | | if (topOC == null) |
| | | { |
| | |
| | | } |
| | | dseObjectClasses.put(topOC, OC_TOP); |
| | | |
| | | ObjectClass rootDSEOC = |
| | | DirectoryServer.getObjectClass(OC_ROOT_DSE); |
| | | ObjectClass rootDSEOC = DirectoryServer.getObjectClass(OC_ROOT_DSE); |
| | | if (rootDSEOC == null) |
| | | { |
| | | rootDSEOC = DirectoryServer.getDefaultObjectClass(OC_ROOT_DSE); |
| | |
| | | */ |
| | | private Entry getRootDSE(ClientConnection connection) |
| | | { |
| | | HashMap<AttributeType,List<Attribute>> dseUserAttrs = |
| | | new HashMap<AttributeType,List<Attribute>>(); |
| | | HashMap<AttributeType,List<Attribute>> dseOperationalAttrs = |
| | | new HashMap<AttributeType,List<Attribute>>(); |
| | | HashMap<AttributeType,List<Attribute>> dseUserAttrs = new HashMap<>(); |
| | | HashMap<AttributeType,List<Attribute>> dseOperationalAttrs = new HashMap<>(); |
| | | |
| | | |
| | | Attribute publicNamingContextAttr = createDNAttribute( |
| | |
| | | DirectoryServer.getPublicNamingContexts().keySet()); |
| | | addAttribute(publicNamingContextAttr, dseUserAttrs, dseOperationalAttrs); |
| | | |
| | | |
| | | // Add the "ds-private-naming-contexts" attribute. |
| | | Attribute privateNamingContextAttr = createDNAttribute( |
| | | ATTR_PRIVATE_NAMING_CONTEXTS, ATTR_PRIVATE_NAMING_CONTEXTS, |
| | |
| | | ATTR_SUPPORTED_FEATURE_LC, DirectoryServer.getSupportedFeatures()); |
| | | addAttribute(supportedFeatureAttr, dseUserAttrs, dseOperationalAttrs); |
| | | |
| | | |
| | | // Add the "supportedSASLMechanisms" attribute. |
| | | Attribute supportedSASLMechAttr = createAttribute( |
| | | ATTR_SUPPORTED_SASL_MECHANISMS, ATTR_SUPPORTED_SASL_MECHANISMS_LC, |
| | | DirectoryServer.getSupportedSASLMechanisms().keySet()); |
| | | addAttribute(supportedSASLMechAttr, dseUserAttrs, dseOperationalAttrs); |
| | | |
| | | |
| | | // Add the "supportedLDAPVersions" attribute. |
| | | TreeSet<String> versionStrings = new TreeSet<String>(); |
| | | TreeSet<String> versionStrings = new TreeSet<>(); |
| | | for (Integer ldapVersion : DirectoryServer.getSupportedLDAPVersions()) |
| | | { |
| | | versionStrings.add(ldapVersion.toString()); |
| | | } |
| | | Attribute supportedLDAPVersionAttr = |
| | | createAttribute(ATTR_SUPPORTED_LDAP_VERSION, |
| | | ATTR_SUPPORTED_LDAP_VERSION_LC, |
| | | versionStrings); |
| | | Attribute supportedLDAPVersionAttr = createAttribute( |
| | | ATTR_SUPPORTED_LDAP_VERSION, ATTR_SUPPORTED_LDAP_VERSION_LC, versionStrings); |
| | | addAttribute(supportedLDAPVersionAttr, dseUserAttrs, dseOperationalAttrs); |
| | | |
| | | |
| | | // Add the "supportedAuthPasswordSchemes" attribute. |
| | | Set<String> authPWSchemes = |
| | | DirectoryServer.getAuthPasswordStorageSchemes().keySet(); |
| | |
| | | Attribute supportedAuthPWSchemesAttr = |
| | | createAttribute(ATTR_SUPPORTED_AUTH_PW_SCHEMES, |
| | | ATTR_SUPPORTED_AUTH_PW_SCHEMES_LC, authPWSchemes); |
| | | ArrayList<Attribute> supportedAuthPWSchemesAttrs = |
| | | new ArrayList<Attribute>(1); |
| | | ArrayList<Attribute> supportedAuthPWSchemesAttrs = new ArrayList<>(1); |
| | | supportedAuthPWSchemesAttrs.add(supportedAuthPWSchemesAttr); |
| | | if (showAllAttributes |
| | | || !supportedSASLMechAttr.getAttributeType().isOperational()) |
| | |
| | | List<Attribute> attrs = attrsMap.get(type); |
| | | if (attrs == null) |
| | | { |
| | | attrs = new ArrayList<Attribute>(); |
| | | attrs = new ArrayList<>(); |
| | | attrsMap.put(type, attrs); |
| | | } |
| | | attrs.add(a); |
| | |
| | | { |
| | | if (!publicNamingContextAttr.isEmpty()) |
| | | { |
| | | List<Attribute> privateNamingContextAttrs = new ArrayList<Attribute>(1); |
| | | List<Attribute> privateNamingContextAttrs = new ArrayList<>(1); |
| | | privateNamingContextAttrs.add(publicNamingContextAttr); |
| | | final AttributeType attrType = publicNamingContextAttr.getAttributeType(); |
| | | if (showAllAttributes || !attrType.isOperational()) |
| | |
| | | } |
| | | else |
| | | { |
| | | subBases = new ConcurrentHashMap<DN, Backend<?>>(); |
| | | subBases = new ConcurrentHashMap<>(); |
| | | for (DN baseDN : subDNs) |
| | | { |
| | | Backend<?> backend = DirectoryServer.getBackend(baseDN); |
| | |
| | | |
| | | |
| | | // Check to see if there is a new set of user-defined attributes. |
| | | ArrayList<Attribute> userAttrs = new ArrayList<Attribute>(); |
| | | ArrayList<Attribute> userAttrs = new ArrayList<>(); |
| | | try |
| | | { |
| | | ConfigEntry configEntry = DirectoryServer.getConfigEntry(configEntryDN); |
| | |
| | | DirectoryServer.getAttributeType(OP_ATTR_MODIFY_TIMESTAMP_LC, true); |
| | | |
| | | // Construct the set of objectclasses to include in the schema entry. |
| | | schemaObjectClasses = new LinkedHashMap<ObjectClass,String>(3); |
| | | schemaObjectClasses = new LinkedHashMap<>(3); |
| | | schemaObjectClasses.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | |
| | | ObjectClass subentryOC = DirectoryServer.getObjectClass(OC_LDAP_SUBENTRY_LC, |
| | | true); |
| | | ObjectClass subentryOC = DirectoryServer.getObjectClass(OC_LDAP_SUBENTRY_LC, true); |
| | | schemaObjectClasses.put(subentryOC, OC_LDAP_SUBENTRY); |
| | | |
| | | ObjectClass subschemaOC = DirectoryServer.getObjectClass(OC_SUBSCHEMA, |
| | | true); |
| | | ObjectClass subschemaOC = DirectoryServer.getObjectClass(OC_SUBSCHEMA, true); |
| | | schemaObjectClasses.put(subschemaOC, OC_SUBSCHEMA); |
| | | |
| | | |
| | |
| | | // Get the set of user-defined attributes for the configuration entry. Any |
| | | // attributes that we don't recognize will be included directly in the |
| | | // schema entry. |
| | | userDefinedAttributes = new ArrayList<Attribute>(); |
| | | userDefinedAttributes = new ArrayList<>(); |
| | | addAll(configEntry.getEntry().getUserAttributes().values()); |
| | | addAll(configEntry.getEntry().getOperationalAttributes().values()); |
| | | |
| | |
| | | try |
| | | { |
| | | // First, generate lists of elements from the current schema. |
| | | Set<String> newATs = new LinkedHashSet<String>(); |
| | | Set<String> newOCs = new LinkedHashSet<String>(); |
| | | Set<String> newNFs = new LinkedHashSet<String>(); |
| | | Set<String> newDCRs = new LinkedHashSet<String>(); |
| | | Set<String> newDSRs = new LinkedHashSet<String>(); |
| | | Set<String> newMRUs = new LinkedHashSet<String>(); |
| | | Set<String> newLSDs = new LinkedHashSet<String>(); |
| | | Schema.genConcatenatedSchema(newATs, newOCs, newNFs, newDCRs, newDSRs, |
| | | newMRUs,newLSDs); |
| | | Set<String> newATs = new LinkedHashSet<>(); |
| | | Set<String> newOCs = new LinkedHashSet<>(); |
| | | Set<String> newNFs = new LinkedHashSet<>(); |
| | | Set<String> newDCRs = new LinkedHashSet<>(); |
| | | Set<String> newDSRs = new LinkedHashSet<>(); |
| | | Set<String> newMRUs = new LinkedHashSet<>(); |
| | | Set<String> newLSDs = new LinkedHashSet<>(); |
| | | Schema.genConcatenatedSchema(newATs, newOCs, newNFs, newDCRs, newDSRs, newMRUs,newLSDs); |
| | | |
| | | // Next, generate lists of elements from the previous concatenated schema. |
| | | // If there isn't a previous concatenated schema, then use the base |
| | |
| | | File configFile = new File(DirectoryServer.getConfigFile()); |
| | | File configDirectory = configFile.getParentFile(); |
| | | File upgradeDirectory = new File(configDirectory, "upgrade"); |
| | | File concatFile = new File(upgradeDirectory, |
| | | SCHEMA_CONCAT_FILE_NAME); |
| | | File concatFile = new File(upgradeDirectory, SCHEMA_CONCAT_FILE_NAME); |
| | | if (concatFile.exists()) |
| | | { |
| | | concatFilePath = concatFile.getAbsolutePath(); |
| | |
| | | } |
| | | } |
| | | |
| | | Set<String> oldATs = new LinkedHashSet<String>(); |
| | | Set<String> oldOCs = new LinkedHashSet<String>(); |
| | | Set<String> oldNFs = new LinkedHashSet<String>(); |
| | | Set<String> oldDCRs = new LinkedHashSet<String>(); |
| | | Set<String> oldDSRs = new LinkedHashSet<String>(); |
| | | Set<String> oldMRUs = new LinkedHashSet<String>(); |
| | | Set<String> oldLSDs = new LinkedHashSet<String>(); |
| | | Set<String> oldATs = new LinkedHashSet<>(); |
| | | Set<String> oldOCs = new LinkedHashSet<>(); |
| | | Set<String> oldNFs = new LinkedHashSet<>(); |
| | | Set<String> oldDCRs = new LinkedHashSet<>(); |
| | | Set<String> oldDSRs = new LinkedHashSet<>(); |
| | | Set<String> oldMRUs = new LinkedHashSet<>(); |
| | | Set<String> oldLSDs = new LinkedHashSet<>(); |
| | | Schema.readConcatenatedSchema(concatFilePath, oldATs, oldOCs, oldNFs, |
| | | oldDCRs, oldDSRs, oldMRUs,oldLSDs); |
| | | |
| | | // Create a list of modifications and add any differences between the old |
| | | // and new schema into them. |
| | | List<Modification> mods = new LinkedList<Modification>(); |
| | | List<Modification> mods = new LinkedList<>(); |
| | | Schema.compareConcatenatedSchema(oldATs, newATs, attributeTypesType, mods); |
| | | Schema.compareConcatenatedSchema(oldOCs, newOCs, objectClassesType, mods); |
| | | Schema.compareConcatenatedSchema(oldNFs, newNFs, nameFormsType, mods); |
| | |
| | | private Entry getSchemaEntry(DN entryDN, boolean includeSchemaFile, |
| | | boolean ignoreShowAllOption) |
| | | { |
| | | Map<AttributeType, List<Attribute>> userAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | |
| | | Map<AttributeType, List<Attribute>> operationalAttrs = |
| | | new LinkedHashMap<AttributeType, List<Attribute>>(); |
| | | Map<AttributeType, List<Attribute>> userAttrs = new LinkedHashMap<>(); |
| | | Map<AttributeType, List<Attribute>> operationalAttrs = new LinkedHashMap<>(); |
| | | |
| | | // Add the RDN attribute(s) for the provided entry. |
| | | RDN rdn = entryDN.rdn(); |
| | |
| | | List<Attribute> attrs = attrsMap.get(type); |
| | | if (attrs == null) |
| | | { |
| | | attrs = new ArrayList<Attribute>(1); |
| | | attrs = new ArrayList<>(1); |
| | | attrsMap.put(type, attrs); |
| | | } |
| | | attrs.add(attribute); |
| | |
| | | |
| | | private ArrayList<Attribute> newArrayList(Attribute a) |
| | | { |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(a); |
| | | return attrList; |
| | | } |
| | |
| | | return true; |
| | | } |
| | | } |
| | | |
| | | return false; |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | |
| | | ArrayList<Modification> mods = |
| | | new ArrayList<Modification>(modifyOperation.getModifications()); |
| | | ArrayList<Modification> mods = new ArrayList<>(modifyOperation.getModifications()); |
| | | if (mods.isEmpty()) |
| | | { |
| | | // There aren't any modifications, so we don't need to do anything. |
| | |
| | | } |
| | | |
| | | Schema newSchema = DirectoryServer.getSchema().duplicate(); |
| | | TreeSet<String> modifiedSchemaFiles = new TreeSet<String>(); |
| | | TreeSet<String> modifiedSchemaFiles = new TreeSet<>(); |
| | | |
| | | int pos = -1; |
| | | for (Modification m : mods) |
| | |
| | | // impacted schema files by first creating them in a temporary location |
| | | // and then replacing the existing schema files with the new versions. |
| | | // If all that goes successfully, then activate the new schema. |
| | | HashMap<String,File> tempSchemaFiles = new HashMap<String,File>(); |
| | | HashMap<String, File> tempSchemaFiles = new HashMap<>(); |
| | | try |
| | | { |
| | | for (String schemaFile : modifiedSchemaFiles) |
| | |
| | | */ |
| | | private Entry createEmptySchemaEntry() |
| | | { |
| | | Map<ObjectClass,String> objectClasses = |
| | | new LinkedHashMap<ObjectClass,String>(); |
| | | Map<ObjectClass,String> objectClasses = new LinkedHashMap<>(); |
| | | objectClasses.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | objectClasses.put(DirectoryServer.getObjectClass(OC_LDAP_SUBENTRY_LC, true), |
| | | OC_LDAP_SUBENTRY); |
| | | objectClasses.put(DirectoryServer.getObjectClass(OC_SUBSCHEMA, true), |
| | | OC_SUBSCHEMA); |
| | | objectClasses.put(DirectoryServer.getObjectClass(OC_LDAP_SUBENTRY_LC, true), OC_LDAP_SUBENTRY); |
| | | objectClasses.put(DirectoryServer.getObjectClass(OC_SUBSCHEMA, true), OC_SUBSCHEMA); |
| | | |
| | | Map<AttributeType,List<Attribute>> userAttributes = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(); |
| | | |
| | | Map<AttributeType,List<Attribute>> operationalAttributes = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(); |
| | | Map<AttributeType,List<Attribute>> userAttributes = new LinkedHashMap<>(); |
| | | Map<AttributeType,List<Attribute>> operationalAttributes = new LinkedHashMap<>(); |
| | | |
| | | DN dn = DirectoryServer.getSchemaDN(); |
| | | RDN rdn = dn.rdn(); |
| | | for (int i=0; i < rdn.getNumValues(); i++) |
| | | { |
| | | AttributeType type = rdn.getAttributeType(i); |
| | | List<Attribute> attrList = new LinkedList<Attribute>(); |
| | | List<Attribute> attrList = new LinkedList<>(); |
| | | attrList.add(Attributes.create(type, rdn.getAttributeValue(i))); |
| | | if (type.isOperational()) |
| | | { |
| | |
| | | * this only for the real part of the ldapsyntaxes attribute. The real part |
| | | * is read and write to/from the schema files. |
| | | */ |
| | | Set<ByteString> values = new LinkedHashSet<ByteString>(); |
| | | Set<ByteString> values = new LinkedHashSet<>(); |
| | | for (LDAPSyntaxDescription ldapSyntax : |
| | | schema.getLdapSyntaxDescriptions().values()) |
| | | { |
| | |
| | | // Add all of the appropriate attribute types to the schema entry. We need |
| | | // to be careful of the ordering to ensure that any superior types in the |
| | | // same file are written before the subordinate types. |
| | | Set<AttributeType> addedTypes = new HashSet<AttributeType>(); |
| | | values = new LinkedHashSet<ByteString>(); |
| | | Set<AttributeType> addedTypes = new HashSet<>(); |
| | | values = new LinkedHashSet<>(); |
| | | for (AttributeType at : schema.getAttributeTypes().values()) |
| | | { |
| | | if (schemaFile.equals(getSchemaFile(at))) |
| | |
| | | // Add all of the appropriate objectclasses to the schema entry. We need |
| | | // to be careful of the ordering to ensure that any superior classes in the |
| | | // same file are written before the subordinate classes. |
| | | Set<ObjectClass> addedClasses = new HashSet<ObjectClass>(); |
| | | values = new LinkedHashSet<ByteString>(); |
| | | Set<ObjectClass> addedClasses = new HashSet<>(); |
| | | values = new LinkedHashSet<>(); |
| | | for (ObjectClass oc : schema.getObjectClasses().values()) |
| | | { |
| | | if (schemaFile.equals(getSchemaFile(oc))) |
| | |
| | | // Add all of the appropriate name forms to the schema entry. Since there |
| | | // is no hierarchical relationship between name forms, we don't need to |
| | | // worry about ordering. |
| | | values = new LinkedHashSet<ByteString>(); |
| | | values = new LinkedHashSet<>(); |
| | | for (List<NameForm> forms : schema.getNameFormsByObjectClass().values()) |
| | | { |
| | | for(NameForm nf : forms) |
| | |
| | | // Add all of the appropriate DIT content rules to the schema entry. Since |
| | | // there is no hierarchical relationship between DIT content rules, we don't |
| | | // need to worry about ordering. |
| | | values = new LinkedHashSet<ByteString>(); |
| | | values = new LinkedHashSet<>(); |
| | | for (DITContentRule dcr : schema.getDITContentRules().values()) |
| | | { |
| | | if (schemaFile.equals(getSchemaFile(dcr))) |
| | |
| | | // Add all of the appropriate DIT structure rules to the schema entry. We |
| | | // need to be careful of the ordering to ensure that any superior rules in |
| | | // the same file are written before the subordinate rules. |
| | | Set<DITStructureRule> addedDSRs = new HashSet<DITStructureRule>(); |
| | | values = new LinkedHashSet<ByteString>(); |
| | | Set<DITStructureRule> addedDSRs = new HashSet<>(); |
| | | values = new LinkedHashSet<>(); |
| | | for (DITStructureRule dsr : schema.getDITStructureRulesByID().values()) |
| | | { |
| | | if (schemaFile.equals(getSchemaFile(dsr))) |
| | |
| | | // Add all of the appropriate matching rule uses to the schema entry. Since |
| | | // there is no hierarchical relationship between matching rule uses, we |
| | | // don't need to worry about ordering. |
| | | values = new LinkedHashSet<ByteString>(); |
| | | values = new LinkedHashSet<>(); |
| | | for (MatchingRuleUse mru : schema.getMatchingRuleUses().values()) |
| | | { |
| | | if (schemaFile.equals(getSchemaFile(mru))) |
| | |
| | | // Create lists that will hold the three types of files we'll be dealing |
| | | // with (the temporary files that will be installed, the installed schema |
| | | // files, and the previously-installed schema files). |
| | | ArrayList<File> installedFileList = new ArrayList<File>(); |
| | | ArrayList<File> tempFileList = new ArrayList<File>(); |
| | | ArrayList<File> origFileList = new ArrayList<File>(); |
| | | ArrayList<File> installedFileList = new ArrayList<>(); |
| | | ArrayList<File> tempFileList = new ArrayList<>(); |
| | | ArrayList<File> origFileList = new ArrayList<>(); |
| | | |
| | | File schemaInstanceDir = |
| | | new File(SchemaConfigManager.getSchemaDirectoryPath()); |
| | |
| | | { |
| | | Schema schema = DirectoryServer.getSchema(); |
| | | Schema newSchema = DirectoryServer.getSchema().duplicate(); |
| | | TreeSet<String> modifiedSchemaFiles = new TreeSet<String>(); |
| | | TreeSet<String> modifiedSchemaFiles = new TreeSet<>(); |
| | | |
| | | // Get the attributeTypes attribute from the entry. |
| | | Syntax attrTypeSyntax = schema.getSyntax(SYNTAX_ATTRIBUTE_TYPE_OID); |
| | |
| | | // loop on the attribute types in the entry just received |
| | | // and add them in the existing schema. |
| | | List<Attribute> attrList = newSchemaEntry.getAttribute(attributeAttrType); |
| | | Set<String> oidList = new HashSet<String>(1000); |
| | | Set<String> oidList = new HashSet<>(1000); |
| | | if (attrList != null && !attrList.isEmpty()) |
| | | { |
| | | for (Attribute a : attrList) |
| | | { |
| | | // Look for attributetypes that could have been added to the schema |
| | | // Look for attribute types that could have been added to the schema |
| | | // or modified in the schema |
| | | for (ByteString v : a) |
| | | { |
| | |
| | | Set<DN> newBaseDNs; |
| | | try |
| | | { |
| | | newBaseDNs = new HashSet<DN>(backendCfg.getSchemaEntryDN()); |
| | | newBaseDNs = new HashSet<>(backendCfg.getSchemaEntryDN()); |
| | | if (newBaseDNs.isEmpty()) |
| | | { |
| | | newBaseDNs.add(DN.valueOf(DN_DEFAULT_SCHEMA_ROOT)); |
| | |
| | | |
| | | |
| | | // Check to see if there is a new set of user-defined attributes. |
| | | ArrayList<Attribute> newUserAttrs = new ArrayList<Attribute>(); |
| | | ArrayList<Attribute> newUserAttrs = new ArrayList<>(); |
| | | try |
| | | { |
| | | ConfigEntry configEntry = DirectoryServer.getConfigEntry(configEntryDN); |
| | |
| | | // deleteBaseDNs will contain the set of DNs that should no longer be used |
| | | // and should be deregistered from the server, and the newBaseDNs set will |
| | | // just contain the set of DNs to add. |
| | | Set<DN> deleteBaseDNs = new HashSet<DN>(baseDNs.length); |
| | | Set<DN> deleteBaseDNs = new HashSet<>(baseDNs.length); |
| | | for (DN baseDN : baseDNs) |
| | | { |
| | | if (! newBaseDNs.remove(baseDN)) |
| | |
| | | @Override |
| | | public Map<String, String> getAlerts() |
| | | { |
| | | Map<String, String> alerts = new LinkedHashMap<String, String>(); |
| | | Map<String, String> alerts = new LinkedHashMap<>(); |
| | | |
| | | alerts.put(ALERT_TYPE_CANNOT_COPY_SCHEMA_FILES, |
| | | ALERT_DESCRIPTION_CANNOT_COPY_SCHEMA_FILES); |
| | |
| | | generateInstanceCertificateIfAbsent(); |
| | | |
| | | // Construct the trust store base entry. |
| | | LinkedHashMap<ObjectClass,String> objectClasses = |
| | | new LinkedHashMap<ObjectClass,String>(2); |
| | | LinkedHashMap<ObjectClass,String> objectClasses = new LinkedHashMap<>(2); |
| | | objectClasses.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | |
| | | ObjectClass branchOC = |
| | | DirectoryServer.getObjectClass("ds-cfg-branch", true); |
| | | objectClasses.put(branchOC, "ds-cfg-branch"); |
| | | |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(1); |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = new LinkedHashMap<>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = new LinkedHashMap<>(1); |
| | | |
| | | RDN rdn = baseDN.rdn(); |
| | | int numAVAs = rdn.getNumValues(); |
| | | for (int i=0; i < numAVAs; i++) |
| | | { |
| | | AttributeType attrType = rdn.getAttributeType(i); |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(Attributes.create(attrType, rdn.getAttributeValue(i))); |
| | | userAttrs.put(attrType, attrList); |
| | | } |
| | | |
| | | baseEntry = new Entry(baseDN, objectClasses, userAttrs, |
| | | opAttrs); |
| | | |
| | | baseEntry = new Entry(baseDN, objectClasses, userAttrs, opAttrs); |
| | | |
| | | // Register this as a change listener. |
| | | configuration.addTrustStoreChangeListener(this); |
| | |
| | | } |
| | | |
| | | // Construct the certificate entry to return. |
| | | LinkedHashMap<ObjectClass,String> ocMap = |
| | | new LinkedHashMap<ObjectClass,String>(2); |
| | | LinkedHashMap<ObjectClass,String> ocMap = new LinkedHashMap<>(2); |
| | | ocMap.put(DirectoryServer.getTopObjectClass(), OC_TOP); |
| | | |
| | | ObjectClass objectClass = |
| | | DirectoryServer.getObjectClass(OC_CRYPTO_INSTANCE_KEY, true); |
| | | ocMap.put(objectClass, OC_CRYPTO_INSTANCE_KEY); |
| | | |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = |
| | | new LinkedHashMap<AttributeType,List<Attribute>>(3); |
| | | LinkedHashMap<AttributeType,List<Attribute>> opAttrs = new LinkedHashMap<>(0); |
| | | LinkedHashMap<AttributeType,List<Attribute>> userAttrs = new LinkedHashMap<>(3); |
| | | |
| | | userAttrs.put(t, asList(Attributes.create(t, v))); |
| | | |
| | | |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | attrList.add(Attributes.create(t, v)); |
| | | userAttrs.put(t, attrList); |
| | | |
| | | |
| | | t = DirectoryServer.getAttributeType(ATTR_CRYPTO_PUBLIC_KEY_CERTIFICATE, |
| | | true); |
| | | t = DirectoryServer.getAttributeType(ATTR_CRYPTO_PUBLIC_KEY_CERTIFICATE, true); |
| | | AttributeBuilder builder = new AttributeBuilder(t); |
| | | builder.setOption("binary"); |
| | | builder.add(certValue); |
| | | attrList = new ArrayList<Attribute>(1); |
| | | attrList.add(builder.toAttribute()); |
| | | userAttrs.put(t, attrList); |
| | | userAttrs.put(t, asList(builder.toAttribute())); |
| | | |
| | | |
| | | Entry e = new Entry(entryDN, ocMap, userAttrs, opAttrs); |
| | |
| | | return e; |
| | | } |
| | | |
| | | private ArrayList<Attribute> asList(Attribute create) |
| | | { |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(create); |
| | | return attrList; |
| | | } |
| | | |
| | | /** {@inheritDoc} */ |
| | | @Override |
| | | public void addEntry(Entry entry, AddOperation addOperation) |
| | |
| | | /** The base DN to be verified. */ |
| | | private DN baseDN; |
| | | /** The names of indexes to be verified for completeness. */ |
| | | private ArrayList<String> completeList = new ArrayList<String>(); |
| | | private ArrayList<String> completeList = new ArrayList<>(); |
| | | /** The names of indexes to be verified for cleanliness. */ |
| | | private ArrayList<String> cleanList = new ArrayList<String>(); |
| | | private ArrayList<String> cleanList = new ArrayList<>(); |
| | | |
| | | /** |
| | | * Get the base DN to be verified. |
| | |
| | | // Sorting the keys will ensure database record locks are acquired |
| | | // in a consistent order and help prevent transaction deadlocks between |
| | | // concurrent writers. |
| | | Set<ByteString> set = new HashSet<ByteString>(); |
| | | Set<ByteString> set = new HashSet<>(); |
| | | |
| | | int substrLength = indexConfig.getSubstringLength(); |
| | | |
| | |
| | | /** |
| | | * The controls supported by this backend. |
| | | */ |
| | | private static final Set<String> supportedControls = new HashSet<String>(Arrays.asList( |
| | | private static final Set<String> supportedControls = new HashSet<>(Arrays.asList( |
| | | OID_SUBTREE_DELETE_CONTROL, |
| | | OID_PAGED_RESULTS_CONTROL, |
| | | OID_MANAGE_DSAIT_CONTROL, |
| | |
| | | try |
| | | { |
| | | List<Path> allFiles = BackupManager.getFiles(rootDirectory, new JELogFileFilter(), backendID); |
| | | List<Path> compare = new ArrayList<Path>(files); |
| | | List<Path> compare = new ArrayList<>(files); |
| | | compare.removeAll(allFiles); |
| | | if (!compare.isEmpty()) |
| | | { |
| | |
| | | @Override |
| | | public Map<String, String> getAlerts() |
| | | { |
| | | Map<String, String> alerts = new LinkedHashMap<String, String>(); |
| | | Map<String, String> alerts = new LinkedHashMap<>(); |
| | | |
| | | alerts.put(ALERT_TYPE_BACKEND_ENVIRONMENT_UNUSABLE, |
| | | ALERT_DESCRIPTION_BACKEND_ENVIRONMENT_UNUSABLE); |
| | |
| | | ConfigConstants.NAME_PREFIX_CFG + "je-property"; |
| | | |
| | | |
| | | /** |
| | | * A map of JE property names to the corresponding configuration attribute. |
| | | */ |
| | | private static HashMap<String, String> attrMap = |
| | | new HashMap<String, String>(); |
| | | |
| | | /** A map of JE property names to the corresponding configuration attribute. */ |
| | | private static HashMap<String, String> attrMap = new HashMap<>(); |
| | | /** |
| | | * A map of configuration attribute names to the corresponding configuration |
| | | * object getter method. |
| | | */ |
| | | private static HashMap<String,Method> methodMap = |
| | | new HashMap<String, Method>(); |
| | | |
| | | private static HashMap<String, Method> methodMap = new HashMap<>(); |
| | | /** |
| | | * A map of configuration attribute names to the corresponding configuration |
| | | * PropertyDefinition. |
| | | */ |
| | | private static HashMap<String,PropertyDefinition> defnMap = |
| | | new HashMap<String, PropertyDefinition>(); |
| | | private static HashMap<String, PropertyDefinition> defnMap = new HashMap<>(); |
| | | |
| | | |
| | | /** Pulled from resource/admin/ABBREVIATIONS.xsl. db is mose common. */ |
| | |
| | | } |
| | | |
| | | // Set to catch duplicate properties. |
| | | HashSet<String> uniqueJEProperties = new HashSet<String>(); |
| | | HashSet<String> uniqueJEProperties = new HashSet<>(); |
| | | |
| | | // Iterate through the config values associated with a JE property. |
| | | for (String jeEntry : jeProperties) |
| | |
| | | |
| | | private int appendDatabaseContainerRows(TableBuilder builder, EntryContainer ec, int count) |
| | | { |
| | | ArrayList<DatabaseContainer> databaseContainers = new ArrayList<DatabaseContainer>(); |
| | | ArrayList<DatabaseContainer> databaseContainers = new ArrayList<>(); |
| | | ec.listDatabases(databaseContainers); |
| | | String toReplace = ec.getDatabasePrefix() + "_"; |
| | | for(DatabaseContainer dc : databaseContainers) |
| | |
| | | return 1; |
| | | } |
| | | |
| | | ArrayList<DatabaseContainer> databaseContainers = |
| | | new ArrayList<DatabaseContainer>(); |
| | | Map<Index, StringBuilder> undefinedKeys = |
| | | new HashMap<Index, StringBuilder>(); |
| | | ArrayList<DatabaseContainer> databaseContainers = new ArrayList<>(); |
| | | Map<Index, StringBuilder> undefinedKeys = new HashMap<>(); |
| | | ec.listDatabases(databaseContainers); |
| | | String toReplace = ec.getDatabasePrefix() + "_"; |
| | | for(DatabaseContainer dc : databaseContainers) |
| | |
| | | } |
| | | |
| | | DatabaseContainer databaseContainer = null; |
| | | ArrayList<DatabaseContainer> databaseContainers = |
| | | new ArrayList<DatabaseContainer>(); |
| | | ArrayList<DatabaseContainer> databaseContainers = new ArrayList<>(); |
| | | ec.listDatabases(databaseContainers); |
| | | String toReplace = ec.getDatabasePrefix() + "_"; |
| | | for(DatabaseContainer dc : databaseContainers) |
| | |
| | | private void throwReferralException(DN targetDN, DN referralDN, Set<String> labeledURIs, SearchScope searchScope) |
| | | throws DirectoryException |
| | | { |
| | | ArrayList<String> URIList = new ArrayList<String>(labeledURIs.size()); |
| | | ArrayList<String> URIList = new ArrayList<>(labeledURIs.size()); |
| | | for (String labeledURI : labeledURIs) |
| | | { |
| | | // Remove the label part of the labeled URI if there is a label. |
| | |
| | | if (status == OperationStatus.SUCCESS) |
| | | { |
| | | // Construct a set of all the labeled URIs in the referral. |
| | | Set<String> labeledURIs = new LinkedHashSet<String>(cursor.count()); |
| | | Set<String> labeledURIs = new LinkedHashSet<>(cursor.count()); |
| | | do |
| | | { |
| | | final Pair<String, DN> uriAndDN = decodeURIAndDN(data.getData()); |
| | |
| | | } |
| | | |
| | | // Construct a list of all the URIs in the referral. |
| | | ArrayList<String> URIList = new ArrayList<String>(cursor.count()); |
| | | ArrayList<String> URIList = new ArrayList<>(cursor.count()); |
| | | do |
| | | { |
| | | // Remove the label part of the labeled URI if there is a label. |
| | |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * The name of this monitor instance. |
| | | */ |
| | | /** The name of this monitor instance. */ |
| | | private String name; |
| | | |
| | | /** |
| | | * The root container to be monitored. |
| | | */ |
| | | /** The root container to be monitored. */ |
| | | private RootContainer rootContainer; |
| | | |
| | | private int maxEntries = 1024; |
| | | private boolean filterUseEnabled; |
| | | private String startTimeStamp; |
| | | private final HashMap<SearchFilter, FilterStats> filterToStats = |
| | | new HashMap<SearchFilter, FilterStats>(); |
| | | private final HashMap<SearchFilter, FilterStats> filterToStats = new HashMap<>(); |
| | | private final AtomicInteger indexedSearchCount = new AtomicInteger(); |
| | | private final AtomicInteger unindexedSearchCount = new AtomicInteger(); |
| | | |
| | |
| | | this.rootContainer = rootContainer; |
| | | } |
| | | |
| | | |
| | | |
| | | /** {@inheritDoc} */ |
| | | @Override |
| | | public void initializeMonitorProvider(MonitorProviderCfg configuration) |
| | |
| | | return Collections.emptyList(); |
| | | } |
| | | |
| | | ArrayList<Attribute> monitorAttrs = new ArrayList<Attribute>(); |
| | | ArrayList<Attribute> monitorAttrs = new ArrayList<>(); |
| | | String jeVersion = JEVersion.CURRENT_VERSION.getVersionString(); |
| | | AttributeType versionType = |
| | | DirectoryServer.getDefaultAttributeType("JEVersion"); |
| | |
| | | AttributeBuilder needReindex = new AttributeBuilder("need-reindex"); |
| | | for(EntryContainer ec : rootContainer.getEntryContainers()) |
| | | { |
| | | List<DatabaseContainer> databases = new ArrayList<DatabaseContainer>(); |
| | | List<DatabaseContainer> databases = new ArrayList<>(); |
| | | ec.listDatabases(databases); |
| | | for(DatabaseContainer dc : databases) |
| | | { |
| | |
| | | private State state; |
| | | |
| | | /** The set of attribute indexes. */ |
| | | private final HashMap<AttributeType, AttributeIndex> attrIndexMap = new HashMap<AttributeType, AttributeIndex>(); |
| | | |
| | | private final HashMap<AttributeType, AttributeIndex> attrIndexMap = new HashMap<>(); |
| | | /** The set of VLV (Virtual List View) indexes. */ |
| | | private final HashMap<String, VLVIndex> vlvIndexMap = new HashMap<String, VLVIndex>(); |
| | | private final HashMap<String, VLVIndex> vlvIndexMap = new HashMap<>(); |
| | | |
| | | /** |
| | | * Prevents name clashes for common indexes (like id2entry) across multiple suffixes. |
| | |
| | | */ |
| | | void delete() throws DatabaseException |
| | | { |
| | | List<DatabaseContainer> databases = new ArrayList<DatabaseContainer>(); |
| | | List<DatabaseContainer> databases = new ArrayList<>(); |
| | | listDatabases(databases); |
| | | |
| | | if(env.getConfig().getTransactional()) |
| | |
| | | throws DatabaseException, JebException |
| | | |
| | | { |
| | | List<DatabaseContainer> databases = new ArrayList<DatabaseContainer>(); |
| | | List<DatabaseContainer> databases = new ArrayList<>(); |
| | | listDatabases(databases); |
| | | |
| | | newDatabasePrefix = preparePrefix(newDatabasePrefix); |
| | |
| | | */ |
| | | public void clear() throws DatabaseException |
| | | { |
| | | List<DatabaseContainer> databases = new ArrayList<DatabaseContainer>(); |
| | | List<DatabaseContainer> databases = new ArrayList<>(); |
| | | listDatabases(databases); |
| | | |
| | | for(DatabaseContainer db : databases) |
| | |
| | | SearchScope scope = searchOperation.getScope(); |
| | | SearchFilter filter = searchOperation.getFilter(); |
| | | |
| | | TreeMap<SortValues,EntryID> sortMap = new TreeMap<SortValues,EntryID>(); |
| | | TreeMap<SortValues,EntryID> sortMap = new TreeMap<>(); |
| | | for (EntryID id : entryIDSet) |
| | | { |
| | | try |
| | |
| | | int includedBeforeCount = 0; |
| | | int includedAfterCount = 0; |
| | | int listSize = 0; |
| | | LinkedList<EntryID> idList = new LinkedList<EntryID>(); |
| | | LinkedList<EntryID> idList = new LinkedList<>(); |
| | | for (Map.Entry<SortValues, EntryID> entry : sortMap.entrySet()) |
| | | { |
| | | SortValues sortValues = entry.getKey(); |
| | |
| | | throws IOException, LDIFException, DatabaseException, JebException |
| | | { |
| | | List<DN> includeBranches = exportConfig.getIncludeBranches(); |
| | | DN baseDN; |
| | | ArrayList<EntryContainer> exportContainers = |
| | | new ArrayList<EntryContainer>(); |
| | | ArrayList<EntryContainer> exportContainers = new ArrayList<>(); |
| | | |
| | | for (EntryContainer entryContainer : rootContainer.getEntryContainers()) |
| | | { |
| | | // Skip containers that are not covered by the include branches. |
| | | baseDN = entryContainer.getBaseDN(); |
| | | DN baseDN = entryContainer.getBaseDN(); |
| | | |
| | | if (includeBranches == null || includeBranches.isEmpty()) |
| | | { |
| | |
| | | return null; |
| | | } |
| | | lastEntryBodyLines = lines; |
| | | lastEntryHeaderLines = new LinkedList<StringBuilder>(); |
| | | lastEntryHeaderLines = new LinkedList<>(); |
| | | |
| | | // Read the DN of the entry and see if it is one that should be included |
| | | // in the import. |
| | |
| | | private Entry createEntry(List<StringBuilder> lines, DN entryDN, boolean checkSchema, Suffix suffix) |
| | | { |
| | | // Read the set of attributes from the entry. |
| | | Map<ObjectClass, String> objectClasses = new HashMap<ObjectClass, String>(); |
| | | Map<AttributeType, List<AttributeBuilder>> userAttrBuilders = |
| | | new HashMap<AttributeType, List<AttributeBuilder>>(); |
| | | Map<AttributeType, List<AttributeBuilder>> operationalAttrBuilders = |
| | | new HashMap<AttributeType, List<AttributeBuilder>>(); |
| | | Map<ObjectClass, String> objectClasses = new HashMap<>(); |
| | | Map<AttributeType, List<AttributeBuilder>> userAttrBuilders = new HashMap<>(); |
| | | Map<AttributeType, List<AttributeBuilder>> operationalAttrBuilders = new HashMap<>(); |
| | | try |
| | | { |
| | | for (StringBuilder line : lines) |
| | |
| | | private ExecutorService scratchFileWriterService; |
| | | |
| | | /** Queue of free index buffers -- used to re-cycle index buffers. */ |
| | | private final BlockingQueue<IndexOutputBuffer> freeBufferQueue = |
| | | new LinkedBlockingQueue<IndexOutputBuffer>(); |
| | | private final BlockingQueue<IndexOutputBuffer> freeBufferQueue = new LinkedBlockingQueue<>(); |
| | | |
| | | /** |
| | | * Map of index keys to index buffers. Used to allocate sorted index buffers |
| | | * to a index writer thread. |
| | | */ |
| | | private final Map<IndexKey, BlockingQueue<IndexOutputBuffer>> indexKeyQueueMap = |
| | | new ConcurrentHashMap<IndexKey, BlockingQueue<IndexOutputBuffer>>(); |
| | | private final Map<IndexKey, BlockingQueue<IndexOutputBuffer>> indexKeyQueueMap = new ConcurrentHashMap<>(); |
| | | |
| | | /** Map of DB containers to index managers. Used to start phase 2. */ |
| | | private final List<IndexManager> indexMgrList = new LinkedList<IndexManager>(); |
| | | private final List<IndexManager> indexMgrList = new LinkedList<>(); |
| | | /** Map of DB containers to DN-based index managers. Used to start phase 2. */ |
| | | private final List<IndexManager> DNIndexMgrList = new LinkedList<IndexManager>(); |
| | | private final List<IndexManager> DNIndexMgrList = new LinkedList<>(); |
| | | |
| | | /** |
| | | * Futures used to indicate when the index file writers are done flushing |
| | |
| | | private final List<ScratchFileWriterTask> scratchFileWriterList; |
| | | |
| | | /** Map of DNs to Suffix objects. */ |
| | | private final Map<DN, Suffix> dnSuffixMap = new LinkedHashMap<DN, Suffix>(); |
| | | private final Map<DN, Suffix> dnSuffixMap = new LinkedHashMap<>(); |
| | | /** Map of container ids to database containers. */ |
| | | private final ConcurrentHashMap<Integer, Index> idContainerMap = new ConcurrentHashMap<Integer, Index>(); |
| | | private final ConcurrentHashMap<Integer, Index> idContainerMap = new ConcurrentHashMap<>(); |
| | | /** Map of container ids to entry containers. */ |
| | | private final ConcurrentHashMap<Integer, EntryContainer> idECMap = |
| | | new ConcurrentHashMap<Integer, EntryContainer>(); |
| | | private final ConcurrentHashMap<Integer, EntryContainer> idECMap = new ConcurrentHashMap<>(); |
| | | |
| | | /** Used to synchronize when a scratch file index writer is first setup. */ |
| | | private final Object synObj = new Object(); |
| | |
| | | this.rebuildManager = new RebuildIndexManager(rebuildConfig, cfg); |
| | | this.indexCount = rebuildManager.getIndexCount(); |
| | | this.clearedBackend = false; |
| | | this.scratchFileWriterList = |
| | | new ArrayList<ScratchFileWriterTask>(indexCount); |
| | | this.scratchFileWriterFutures = new CopyOnWriteArrayList<Future<Void>>(); |
| | | this.scratchFileWriterList = new ArrayList<>(indexCount); |
| | | this.scratchFileWriterFutures = new CopyOnWriteArrayList<>(); |
| | | |
| | | this.tempDir = getTempDir(cfg, rebuildConfig.getTmpDirectory()); |
| | | recursiveDelete(tempDir); |
| | |
| | | this.indexCount = getTotalIndexCount(localDBBackendCfg); |
| | | |
| | | this.clearedBackend = mustClearBackend(importConfiguration, localDBBackendCfg); |
| | | this.scratchFileWriterList = |
| | | new ArrayList<ScratchFileWriterTask>(indexCount); |
| | | this.scratchFileWriterFutures = new CopyOnWriteArrayList<Future<Void>>(); |
| | | this.scratchFileWriterList = new ArrayList<>(indexCount); |
| | | this.scratchFileWriterFutures = new CopyOnWriteArrayList<>(); |
| | | |
| | | this.tempDir = getTempDir(localDBBackendCfg, importConfiguration.getTmpDirectory()); |
| | | recursiveDelete(tempDir); |
| | |
| | | { |
| | | DN baseDN = entryContainer.getBaseDN(); |
| | | EntryContainer sourceEntryContainer = null; |
| | | List<DN> includeBranches = new ArrayList<DN>(); |
| | | List<DN> excludeBranches = new ArrayList<DN>(); |
| | | List<DN> includeBranches = new ArrayList<>(); |
| | | List<DN> excludeBranches = new ArrayList<>(); |
| | | |
| | | if (!importConfiguration.appendToExistingData() |
| | | && !importConfiguration.clearBackend()) |
| | |
| | | bufferSortService = Executors.newFixedThreadPool(threadCount); |
| | | final ExecutorService execService = Executors.newFixedThreadPool(threadCount); |
| | | |
| | | final List<Callable<Void>> tasks = new ArrayList<Callable<Void>>(threadCount); |
| | | final List<Callable<Void>> tasks = new ArrayList<>(threadCount); |
| | | tasks.add(new MigrateExistingTask()); |
| | | getAll(execService.invokeAll(tasks)); |
| | | tasks.clear(); |
| | |
| | | int buffers; |
| | | while (true) |
| | | { |
| | | final List<IndexManager> allIndexMgrs = new ArrayList<IndexManager>(DNIndexMgrList); |
| | | final List<IndexManager> allIndexMgrs = new ArrayList<>(DNIndexMgrList); |
| | | allIndexMgrs.addAll(indexMgrList); |
| | | Collections.sort(allIndexMgrs, Collections.reverseOrder()); |
| | | |
| | |
| | | logger.info(NOTE_IMPORT_LDIF_PHASE_TWO_MEM_REPORT, availableMemory, readAheadSize, buffers); |
| | | |
| | | // Start indexing tasks. |
| | | List<Future<Void>> futures = new LinkedList<Future<Void>>(); |
| | | List<Future<Void>> futures = new LinkedList<>(); |
| | | ExecutorService dbService = Executors.newFixedThreadPool(dbThreads); |
| | | Semaphore permits = new Semaphore(buffers); |
| | | |
| | |
| | | |
| | | private List<byte[]> includeBranchesAsBytes(Suffix suffix) |
| | | { |
| | | List<byte[]> includeBranches = new ArrayList<byte[]>(suffix.getIncludeBranches().size()); |
| | | List<byte[]> includeBranches = new ArrayList<>(suffix.getIncludeBranches().size()); |
| | | for (DN includeBranch : suffix.getIncludeBranches()) |
| | | { |
| | | if (includeBranch.isDescendantOf(suffix.getBaseDN())) |
| | |
| | | */ |
| | | private class AppendReplaceTask extends ImportTask |
| | | { |
| | | private final Set<ByteString> insertKeySet = new HashSet<ByteString>(); |
| | | private final Set<ByteString> deleteKeySet = new HashSet<ByteString>(); |
| | | private final Set<ByteString> insertKeySet = new HashSet<>(); |
| | | private final Set<ByteString> deleteKeySet = new HashSet<>(); |
| | | private final EntryInformation entryInfo = new EntryInformation(); |
| | | private Entry oldEntry; |
| | | private EntryID entryID; |
| | |
| | | */ |
| | | private class ImportTask implements Callable<Void> |
| | | { |
| | | private final Map<IndexKey, IndexOutputBuffer> indexBufferMap = new HashMap<IndexKey, IndexOutputBuffer>(); |
| | | private final Set<ByteString> insertKeySet = new HashSet<ByteString>(); |
| | | private final Map<IndexKey, IndexOutputBuffer> indexBufferMap = new HashMap<>(); |
| | | private final Set<ByteString> insertKeySet = new HashSet<>(); |
| | | private final EntryInformation entryInfo = new EntryInformation(); |
| | | private final IndexKey dnIndexKey = new IndexKey(dnType, ImportIndexType.DN.toString(), 1); |
| | | private DatabaseEntry keyEntry = new DatabaseEntry(); |
| | |
| | | |
| | | void flushIndexBuffers() throws InterruptedException, ExecutionException |
| | | { |
| | | final ArrayList<Future<Void>> futures = new ArrayList<Future<Void>>(); |
| | | final ArrayList<Future<Void>> futures = new ArrayList<>(); |
| | | Iterator<Map.Entry<IndexKey, IndexOutputBuffer>> it = indexBufferMap.entrySet().iterator(); |
| | | while (it.hasNext()) |
| | | { |
| | |
| | | private final IndexManager indexMgr; |
| | | private final DatabaseEntry dbKey, dbValue; |
| | | private final int cacheSize; |
| | | private final Map<Integer, DNState> dnStateMap = new HashMap<Integer, DNState>(); |
| | | private final Map<Integer, DNState> dnStateMap = new HashMap<>(); |
| | | private final Semaphore permits; |
| | | private final int maxPermits; |
| | | private final AtomicLong bytesRead = new AtomicLong(); |
| | |
| | | batchNumber.incrementAndGet(); |
| | | |
| | | // Create all the index buffers for the next batch. |
| | | final NavigableSet<IndexInputBuffer> buffers = new TreeSet<IndexInputBuffer>(); |
| | | final NavigableSet<IndexInputBuffer> buffers = new TreeSet<>(); |
| | | for (int i = 0; i < permitRequest; i++) |
| | | { |
| | | final long bufferBegin = bufferIndexFile.readLong(); |
| | |
| | | private final DataOutputStream bufferStream; |
| | | private final DataOutputStream bufferIndexStream; |
| | | private final byte[] tmpArray = new byte[8]; |
| | | private final TreeSet<IndexOutputBuffer> indexSortedSet = new TreeSet<IndexOutputBuffer>(); |
| | | private final TreeSet<IndexOutputBuffer> indexSortedSet = new TreeSet<>(); |
| | | private int insertKeyCount, deleteKeyCount; |
| | | private int bufferCount; |
| | | private boolean poisonSeen; |
| | |
| | | public Void call() throws IOException, InterruptedException |
| | | { |
| | | long offset = 0; |
| | | List<IndexOutputBuffer> l = new LinkedList<IndexOutputBuffer>(); |
| | | List<IndexOutputBuffer> l = new LinkedList<>(); |
| | | try |
| | | { |
| | | while (true) |
| | |
| | | { |
| | | indexMgrList.add(indexMgr); |
| | | } |
| | | BlockingQueue<IndexOutputBuffer> newQueue = |
| | | new ArrayBlockingQueue<IndexOutputBuffer>(phaseOneBufferCount); |
| | | BlockingQueue<IndexOutputBuffer> newQueue = new ArrayBlockingQueue<>(phaseOneBufferCount); |
| | | ScratchFileWriterTask indexWriter = new ScratchFileWriterTask(newQueue, indexMgr); |
| | | scratchFileWriterList.add(indexWriter); |
| | | scratchFileWriterFutures.add(scratchFileWriterService.submit(indexWriter)); |
| | |
| | | |
| | | /** Rebuild index configuration. */ |
| | | private final RebuildConfig rebuildConfig; |
| | | |
| | | /** Local DB backend configuration. */ |
| | | private final LocalDBBackendCfg cfg; |
| | | |
| | | /** Map of index keys to indexes. */ |
| | | private final Map<IndexKey, Index> indexMap = |
| | | new LinkedHashMap<IndexKey, Index>(); |
| | | |
| | | private final Map<IndexKey, Index> indexMap = new LinkedHashMap<>(); |
| | | /** Map of index keys to extensible indexes. */ |
| | | private final Map<IndexKey, Collection<Index>> extensibleIndexMap = |
| | | new LinkedHashMap<IndexKey, Collection<Index>>(); |
| | | |
| | | private final Map<IndexKey, Collection<Index>> extensibleIndexMap = new LinkedHashMap<>(); |
| | | /** List of VLV indexes. */ |
| | | private final List<VLVIndex> vlvIndexes = new LinkedList<VLVIndex>(); |
| | | private final List<VLVIndex> vlvIndexes = new LinkedList<>(); |
| | | |
| | | /** The DN2ID index. */ |
| | | private DN2ID dn2id; |
| | | |
| | | /** The DN2URI index. */ |
| | | private DN2URI dn2uri; |
| | | |
| | |
| | | scratchFileWriterService = Executors.newFixedThreadPool(2 * indexCount); |
| | | bufferSortService = Executors.newFixedThreadPool(threadCount); |
| | | ExecutorService rebuildIndexService = Executors.newFixedThreadPool(threadCount); |
| | | List<Callable<Void>> tasks = new ArrayList<Callable<Void>>(threadCount); |
| | | List<Callable<Void>> tasks = new ArrayList<>(threadCount); |
| | | for (int i = 0; i < threadCount; i++) |
| | | { |
| | | tasks.add(this); |
| | |
| | | DatabaseEntry data = new DatabaseEntry(); |
| | | DatabaseEntry key; |
| | | |
| | | ArrayList<EntryIDSet> lists = new ArrayList<EntryIDSet>(); |
| | | ArrayList<EntryIDSet> lists = new ArrayList<>(); |
| | | |
| | | Cursor cursor = openCursor(null, CursorConfig.READ_COMMITTED); |
| | | try |
| | |
| | | List<Modification> mods) |
| | | throws DatabaseException |
| | | { |
| | | final Map<ByteString, Boolean> modifiedKeys = new TreeMap<ByteString, Boolean>(indexer.getBSComparator()); |
| | | final Map<ByteString, Boolean> modifiedKeys = new TreeMap<>(indexer.getBSComparator()); |
| | | indexer.modifyEntry(oldEntry, newEntry, mods, modifiedKeys); |
| | | |
| | | for (Map.Entry<ByteString, Boolean> modifiedKey : modifiedKeys.entrySet()) |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2014 ForgeRock AS |
| | | * Portions Copyright 2014-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.backends.jeb; |
| | | |
| | |
| | | * The buffered records stored as a map from the record key to the |
| | | * buffered value for that key for each index. |
| | | */ |
| | | private final LinkedHashMap<Index, TreeMap<ByteString, BufferedIndexValues>> bufferedIndexes = |
| | | new LinkedHashMap<Index, TreeMap<ByteString, BufferedIndexValues>>(); |
| | | |
| | | private final LinkedHashMap<Index, TreeMap<ByteString, BufferedIndexValues>> bufferedIndexes = new LinkedHashMap<>(); |
| | | /** The buffered records stored as a set of buffered VLV values for each index. */ |
| | | private final LinkedHashMap<VLVIndex, BufferedVLVValues> bufferedVLVIndexes = |
| | | new LinkedHashMap<VLVIndex, BufferedVLVValues>(); |
| | | private final LinkedHashMap<VLVIndex, BufferedVLVValues> bufferedVLVIndexes = new LinkedHashMap<>(); |
| | | |
| | | /** A simple class representing a pair of added and deleted indexed IDs. */ |
| | | static class BufferedIndexValues |
| | |
| | | { |
| | | if (this.addedValues == null) |
| | | { |
| | | this.addedValues = new TreeSet<SortValues>(); |
| | | this.addedValues = new TreeSet<>(); |
| | | } |
| | | this.addedValues.add(sortValues); |
| | | } |
| | |
| | | { |
| | | if (this.deletedValues == null) |
| | | { |
| | | this.deletedValues = new TreeSet<SortValues>(); |
| | | this.deletedValues = new TreeSet<>(); |
| | | } |
| | | this.deletedValues.add(sortValues); |
| | | } |
| | |
| | | TreeMap<ByteString, BufferedIndexValues> bufferedOperations = bufferedIndexes.get(index); |
| | | if (bufferedOperations == null) |
| | | { |
| | | bufferedOperations = new TreeMap<ByteString, BufferedIndexValues>(bsComparator); |
| | | bufferedOperations = new TreeMap<>(bsComparator); |
| | | bufferedIndexes.put(index, bufferedOperations); |
| | | } |
| | | else |
| | |
| | | // into a hash map, the faster components (equality, presence, approx) |
| | | // into one list and the remainder into another list. |
| | | |
| | | ArrayList<SearchFilter> fastComps = new ArrayList<SearchFilter>(); |
| | | ArrayList<SearchFilter> otherComps = new ArrayList<SearchFilter>(); |
| | | HashMap<AttributeType, ArrayList<SearchFilter>> rangeComps = |
| | | new HashMap<AttributeType, ArrayList<SearchFilter>>(); |
| | | ArrayList<SearchFilter> fastComps = new ArrayList<>(); |
| | | ArrayList<SearchFilter> otherComps = new ArrayList<>(); |
| | | HashMap<AttributeType, ArrayList<SearchFilter>> rangeComps = new HashMap<>(); |
| | | |
| | | for (SearchFilter filter : andFilter.getFilterComponents()) |
| | | { |
| | |
| | | rangeList = rangeComps.get(filter.getAttributeType()); |
| | | if (rangeList == null) |
| | | { |
| | | rangeList = new ArrayList<SearchFilter>(); |
| | | rangeList = new ArrayList<>(); |
| | | rangeComps.put(filter.getAttributeType(), rangeList); |
| | | } |
| | | rangeList.add(filter); |
| | |
| | | } |
| | | |
| | | // Next, process range component pairs like (cn>=A)(cn<=B). |
| | | ArrayList<SearchFilter> remainComps = new ArrayList<SearchFilter>(); |
| | | ArrayList<SearchFilter> remainComps = new ArrayList<>(); |
| | | for (Map.Entry<AttributeType, ArrayList<SearchFilter>> rangeEntry : rangeComps.entrySet()) |
| | | { |
| | | ArrayList<SearchFilter> rangeList = rangeEntry.getValue(); |
| | |
| | | */ |
| | | private EntryIDSet evaluateLogicalOrFilter(SearchFilter orFilter) |
| | | { |
| | | ArrayList<EntryIDSet> candidateSets = new ArrayList<EntryIDSet>( |
| | | orFilter.getFilterComponents().size()); |
| | | ArrayList<EntryIDSet> candidateSets = new ArrayList<>(orFilter.getFilterComponents().size()); |
| | | |
| | | for (SearchFilter filter : orFilter.getFilterComponents()) |
| | | { |
| | |
| | | final byte[] encodedObjectClasses = keyEntry.getData(); |
| | | final ASN1Reader reader = ASN1.getReader(valueEntry.getData()); |
| | | reader.readStartSequence(); |
| | | final List<String> objectClassNames = new LinkedList<String>(); |
| | | final List<String> objectClassNames = new LinkedList<>(); |
| | | while (reader.hasNextElement()) |
| | | { |
| | | objectClassNames.add(reader.readOctetStringAsString()); |
| | |
| | | final ASN1Reader reader = ASN1.getReader(valueEntry.getData()); |
| | | reader.readStartSequence(); |
| | | final String attributeName = reader.readOctetStringAsString(); |
| | | final List<String> attributeOptions = new LinkedList<String>(); |
| | | final List<String> attributeOptions = new LinkedList<>(); |
| | | while (reader.hasNextElement()) |
| | | { |
| | | attributeOptions.add(reader.readOctetStringAsString()); |
| | |
| | | private DatabaseEnvironmentMonitor monitor; |
| | | |
| | | /** The base DNs contained in this root container. */ |
| | | private final ConcurrentHashMap<DN, EntryContainer> entryContainers = new ConcurrentHashMap<DN, EntryContainer>(); |
| | | private final ConcurrentHashMap<DN, EntryContainer> entryContainers = new ConcurrentHashMap<>(); |
| | | |
| | | /** The cached value of the next entry identifier to be assigned. */ |
| | | private AtomicLong nextid = new AtomicLong(1); |
| | |
| | | if (timeLimit > 0) |
| | | { |
| | | // Get a list of all the databases used by the backend. |
| | | ArrayList<DatabaseContainer> dbList = new ArrayList<DatabaseContainer>(); |
| | | ArrayList<DatabaseContainer> dbList = new ArrayList<>(); |
| | | for (EntryContainer ec : entryContainers.values()) |
| | | { |
| | | ec.sharedLock.lock(); |
| | |
| | | private final EntryContainer entryContainer; |
| | | private final Object synchObject = new Object(); |
| | | private static final int PARENT_ID_SET_SIZE = 16 * 1024; |
| | | private final ConcurrentHashMap<DN, CountDownLatch> pendingMap = |
| | | new ConcurrentHashMap<DN, CountDownLatch>(); |
| | | private final Set<DN> parentSet = new HashSet<DN>(PARENT_ID_SET_SIZE); |
| | | private final ConcurrentHashMap<DN, CountDownLatch> pendingMap = new ConcurrentHashMap<>(); |
| | | private final Set<DN> parentSet = new HashSet<>(PARENT_ID_SET_SIZE); |
| | | |
| | | /** |
| | | * Creates a suffix instance using the specified parameters. |
| | |
| | | } |
| | | else |
| | | { |
| | | this.includeBranches = new ArrayList<DN>(0); |
| | | this.includeBranches = new ArrayList<>(0); |
| | | } |
| | | if (excludeBranches != null) |
| | | { |
| | |
| | | } |
| | | else |
| | | { |
| | | this.excludeBranches = new ArrayList<DN>(0); |
| | | this.excludeBranches = new ArrayList<>(0); |
| | | } |
| | | } |
| | | |
| | |
| | | int targetOffset = 0; |
| | | int includedBeforeCount = 0; |
| | | int includedAfterCount = 0; |
| | | LinkedList<EntryID> idList = new LinkedList<EntryID>(); |
| | | LinkedList<EntryID> idList = new LinkedList<>(); |
| | | DatabaseEntry key = new DatabaseEntry(); |
| | | DatabaseEntry data = new DatabaseEntry(); |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | LinkedList<long[]> idSets = new LinkedList<long[]>(); |
| | | LinkedList<long[]> idSets = new LinkedList<>(); |
| | | int currentCount = 0; |
| | | DatabaseEntry key = new DatabaseEntry(); |
| | | DatabaseEntry data = new DatabaseEntry(); |
| | |
| | | * This map is used to gather some statistics about values that have |
| | | * exceeded the entry limit. |
| | | */ |
| | | private IdentityHashMap<Index, HashMap<ByteString, Long>> entryLimitMap = |
| | | new IdentityHashMap<Index, HashMap<ByteString, Long>>(); |
| | | private IdentityHashMap<Index, HashMap<ByteString, Long>> entryLimitMap = new IdentityHashMap<>(); |
| | | |
| | | /** Indicates whether the DN database is to be verified. */ |
| | | private boolean verifyDN2ID; |
| | |
| | | private Index id2s; |
| | | |
| | | /** A list of the attribute indexes to be verified. */ |
| | | private final ArrayList<AttributeIndex> attrIndexList = new ArrayList<AttributeIndex>(); |
| | | private final ArrayList<AttributeIndex> attrIndexList = new ArrayList<>(); |
| | | /** A list of the VLV indexes to be verified. */ |
| | | private final ArrayList<VLVIndex> vlvIndexList = new ArrayList<VLVIndex>(); |
| | | private final ArrayList<VLVIndex> vlvIndexList = new ArrayList<>(); |
| | | |
| | | /** |
| | | * Construct a VerifyJob. |
| | |
| | | } |
| | | } |
| | | |
| | | entryLimitMap = new IdentityHashMap<Index, HashMap<ByteString, Long>>(attrIndexList.size()); |
| | | entryLimitMap = new IdentityHashMap<>(attrIndexList.size()); |
| | | |
| | | // We will be updating these files independently of the indexes |
| | | // so we need direct access to them rather than going through |
| | |
| | | HashMap<ByteString,Long> hashMap = entryLimitMap.get(index); |
| | | if (hashMap == null) |
| | | { |
| | | hashMap = new HashMap<ByteString, Long>(); |
| | | hashMap = new HashMap<>(); |
| | | entryLimitMap.put(index, hashMap); |
| | | } |
| | | ByteString octetString = ByteString.wrap(key); |
| | |
| | | @Override |
| | | public ArrayList<Attribute> getMonitorData() |
| | | { |
| | | ArrayList<Attribute> monitorAttrs = new ArrayList<Attribute>(); |
| | | ArrayList<Attribute> monitorAttrs = new ArrayList<>(); |
| | | |
| | | AttributeBuilder needReindex = new AttributeBuilder("need-reindex"); |
| | | for(EntryContainer ec : rootContainer.getEntryContainers()) |
| | |
| | | Function<KI, KO, ? extends Exception> keyTransformer, |
| | | ValueTransformer<KI, VI, VO, ? extends Exception> valueTransformer) |
| | | { |
| | | return new CursorTransformer<KI, VI, KO, VO>(input, keyTransformer, valueTransformer); |
| | | return new CursorTransformer<>(input, keyTransformer, valueTransformer); |
| | | } |
| | | |
| | | @SuppressWarnings("unchecked") |
| | |
| | | throws IOException, LDIFException, StorageRuntimeException |
| | | { |
| | | List<DN> includeBranches = exportConfig.getIncludeBranches(); |
| | | final ArrayList<EntryContainer> exportContainers = new ArrayList<EntryContainer>(); |
| | | final ArrayList<EntryContainer> exportContainers = new ArrayList<>(); |
| | | |
| | | for (EntryContainer entryContainer : rootContainer.getEntryContainers()) |
| | | { |
| | |
| | | // into a hash map, the faster components (equality, presence, approx) |
| | | // into one list and the remainder into another list. |
| | | |
| | | ArrayList<SearchFilter> fastComps = new ArrayList<SearchFilter>(); |
| | | ArrayList<SearchFilter> otherComps = new ArrayList<SearchFilter>(); |
| | | HashMap<AttributeType, ArrayList<SearchFilter>> rangeComps = |
| | | new HashMap<AttributeType, ArrayList<SearchFilter>>(); |
| | | ArrayList<SearchFilter> fastComps = new ArrayList<>(); |
| | | ArrayList<SearchFilter> otherComps = new ArrayList<>(); |
| | | HashMap<AttributeType, ArrayList<SearchFilter>> rangeComps = new HashMap<>(); |
| | | |
| | | for (SearchFilter filter : andFilter.getFilterComponents()) |
| | | { |
| | |
| | | rangeList = rangeComps.get(filter.getAttributeType()); |
| | | if (rangeList == null) |
| | | { |
| | | rangeList = new ArrayList<SearchFilter>(); |
| | | rangeList = new ArrayList<>(); |
| | | rangeComps.put(filter.getAttributeType(), rangeList); |
| | | } |
| | | rangeList.add(filter); |
| | |
| | | } |
| | | |
| | | // Next, process range component pairs like (cn>=A)(cn<=B). |
| | | ArrayList<SearchFilter> remainComps = new ArrayList<SearchFilter>(); |
| | | ArrayList<SearchFilter> remainComps = new ArrayList<>(); |
| | | for (Map.Entry<AttributeType, ArrayList<SearchFilter>> rangeEntry : rangeComps.entrySet()) |
| | | { |
| | | ArrayList<SearchFilter> rangeList = rangeEntry.getValue(); |
| | |
| | | */ |
| | | private EntryIDSet evaluateLogicalOrFilter(SearchFilter orFilter) |
| | | { |
| | | ArrayList<EntryIDSet> candidateSets = new ArrayList<EntryIDSet>( |
| | | orFilter.getFilterComponents().size()); |
| | | ArrayList<EntryIDSet> candidateSets = new ArrayList<>(orFilter.getFilterComponents().size()); |
| | | |
| | | for (SearchFilter filter : orFilter.getFilterComponents()) |
| | | { |
| | |
| | | { |
| | | // Total number of IDs found so far. |
| | | int totalIDCount = 0; |
| | | ArrayList<EntryIDSet> sets = new ArrayList<EntryIDSet>(); |
| | | ArrayList<EntryIDSet> sets = new ArrayList<>(); |
| | | Cursor<ByteString, EntryIDSet> cursor = index.openCursor(txn); |
| | | try |
| | | { |
| | |
| | | lastPos = bufferPos; |
| | | } |
| | | } |
| | | Cursor<ByteString, ByteString> composite = new CompositeCursor<ByteString, ByteString>(cursors); |
| | | return new ProgressCursor<ByteString, ByteString>(composite, this, cursors); |
| | | Cursor<ByteString, ByteString> composite = new CompositeCursor<>(cursors); |
| | | return new ProgressCursor<>(composite, this, cursors); |
| | | } |
| | | |
| | | private void readBufferPositions() throws IOException |
| | |
| | | public Void run(ReadableTransaction txn) throws Exception |
| | | { |
| | | try (Cursor<ByteString, ByteString> cursor0 = txn.openCursor(treeName); |
| | | SequentialCursor<ByteString, ByteString> cursor = |
| | | new MergingCursor<ByteString, ByteString>(cursor0, getMerger(treeName))) |
| | | SequentialCursor<ByteString, ByteString> cursor = new MergingCursor<>(cursor0, getMerger(treeName))) |
| | | { |
| | | progressTask.addCursor(cursor0); |
| | | while (cursor.next()) |
| | |
| | | final byte[] encodedObjectClasses = ocCursor.getKey().toByteArray(); |
| | | final ASN1Reader reader = ASN1.getReader(ocCursor.getValue()); |
| | | reader.readStartSequence(); |
| | | final List<String> objectClassNames = new LinkedList<String>(); |
| | | final List<String> objectClassNames = new LinkedList<>(); |
| | | while (reader.hasNextElement()) |
| | | { |
| | | objectClassNames.add(reader.readOctetStringAsString()); |
| | |
| | | final ASN1Reader reader = ASN1.getReader(adCursor.getValue()); |
| | | reader.readStartSequence(); |
| | | final String attributeName = reader.readOctetStringAsString(); |
| | | final List<String> attributeOptions = new LinkedList<String>(); |
| | | final List<String> attributeOptions = new LinkedList<>(); |
| | | while (reader.hasNextElement()) |
| | | { |
| | | attributeOptions.add(reader.readOctetStringAsString()); |
| | |
| | | private LinkedList<String> getAttributeValues(String attributeName) |
| | | throws InitializationException |
| | | { |
| | | LinkedList<String> valueStrings = new LinkedList<String>(); |
| | | LinkedList<String> valueStrings = new LinkedList<>(); |
| | | |
| | | List<Attribute> attrList = |
| | | taskEntry.getAttribute(attributeName.toLowerCase()); |
| | | if ((attrList == null) || attrList.isEmpty()) |
| | | List<Attribute> attrList = taskEntry.getAttribute(attributeName.toLowerCase()); |
| | | if (attrList == null || attrList.isEmpty()) |
| | | { |
| | | return valueStrings; |
| | | } |
| | | |
| | | if (attrList.size() > 1) |
| | | { |
| | | throw new InitializationException(ERR_TASK_MULTIPLE_ATTRS_FOR_TYPE.get(attributeName, taskEntry.getName())); |
| | |
| | | try |
| | | { |
| | | this.taskState = taskState; |
| | | Attribute attr = Attributes.create(ATTR_TASK_STATE, |
| | | taskState.toString()); |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | Attribute attr = Attributes.create(ATTR_TASK_STATE, taskState.toString()); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(attr); |
| | | taskEntry.putAttribute(attr.getAttributeType(), attrList); |
| | | } |
| | |
| | | |
| | | /** |
| | | * Sets a state for this task that is the result of a call to |
| | | * {@link #interruptTask(TaskState, org.opends.messages.LocalizableMessage)}. |
| | | * {@link #interruptTask(TaskState, LocalizableMessage)}. |
| | | * It may take this task some time to actually cancel to that |
| | | * actual state may differ until quiescence. |
| | | * |
| | |
| | | |
| | | /** |
| | | * Gets the interrupt state for this task that was set as a |
| | | * result of a call to {@link #interruptTask(TaskState, |
| | | * org.opends.messages.LocalizableMessage)}. |
| | | * result of a call to {@link #interruptTask(TaskState, LocalizableMessage)}. |
| | | * |
| | | * @return interrupt state for this task |
| | | */ |
| | |
| | | /** |
| | | * Returns a state for this task after processing has completed. |
| | | * If the task was interrupted with a call to |
| | | * {@link #interruptTask(TaskState, org.opends.messages.LocalizableMessage)} |
| | | * then that method's interruptState is returned here. Otherwse |
| | | * {@link #interruptTask(TaskState, LocalizableMessage)} |
| | | * then that method's interruptState is returned here. Otherwise |
| | | * this method returns TaskState.COMPLETED_SUCCESSFULLY. It is |
| | | * assumed that if there were errors during task processing that |
| | | * task state will have been derived in some other way. |
| | |
| | | { |
| | | Entry taskEntry = getTaskEntry(); |
| | | |
| | | ArrayList<Modification> modifications = new ArrayList<Modification>(); |
| | | ArrayList<Modification> modifications = new ArrayList<>(); |
| | | modifications.add(new Modification(ModificationType.REPLACE, |
| | | Attributes.create(name, value))); |
| | | |
| | |
| | | String startTimeStr = StaticUtils.formatDateTimeString(d); |
| | | Attribute attr = Attributes.create(ATTR_TASK_ACTUAL_START_TIME, |
| | | startTimeStr); |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(attr); |
| | | taskEntry.putAttribute(attr.getAttributeType(), attrList); |
| | | } |
| | |
| | | Date d = new Date(completionTime); |
| | | Attribute attr = Attributes.create(ATTR_TASK_COMPLETION_TIME, |
| | | dateFormat.format(d)); |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(attr); |
| | | taskEntry.putAttribute(attr.getAttributeType(), attrList); |
| | | } |
| | |
| | | */ |
| | | public final List<LocalizableMessage> getLogMessages() |
| | | { |
| | | List<LocalizableMessage> msgList = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> msgList = new ArrayList<>(); |
| | | for(String logString : logMessages) { |
| | | // TODO: a better job or recreating the message |
| | | msgList.add(LocalizableMessage.raw(logString)); |
| | |
| | | ByteString value = ByteString.valueOf(messageString); |
| | | if (attrList == null) |
| | | { |
| | | attrList = new ArrayList<Attribute>(); |
| | | attrList = new ArrayList<>(); |
| | | attrList.add(Attributes.create(type, value)); |
| | | taskEntry.putAttribute(type, attrList); |
| | | } |
| | |
| | | { |
| | | if (DirectoryServer.mailServerConfigured()) |
| | | { |
| | | LinkedHashSet<String> recipients = new LinkedHashSet<String>(notifyOnCompletion); |
| | | LinkedHashSet<String> recipients = new LinkedHashSet<>(notifyOnCompletion); |
| | | if (! TaskState.isSuccessful(taskState)) |
| | | { |
| | | recipients.addAll(notifyOnError); |
| | |
| | | private static long MAX_SLEEP_TIME = 5000; |
| | | |
| | | |
| | | |
| | | /** Indicates whether the scheduler is currently running. */ |
| | | private boolean isRunning; |
| | | |
| | | /** Indicates whether a request has been received to stop the scheduler. */ |
| | | private boolean stopRequested; |
| | | |
| | | /** The entry that serves as the immediate parent for recurring tasks. */ |
| | | private Entry recurringTaskParentEntry; |
| | | |
| | | /** The entry that serves as the immediate parent for scheduled tasks. */ |
| | | private Entry scheduledTaskParentEntry; |
| | | |
| | | /** The top-level entry at the root of the task tree. */ |
| | | private Entry taskRootEntry; |
| | | |
| | | /** The set of recurring tasks defined in the server. */ |
| | | private HashMap<String,RecurringTask> recurringTasks; |
| | | |
| | | private final HashMap<String,RecurringTask> recurringTasks = new HashMap<>(); |
| | | /** The set of tasks associated with this scheduler. */ |
| | | private HashMap<String,Task> tasks; |
| | | |
| | | private final HashMap<String,Task> tasks = new HashMap<>(); |
| | | /** The set of worker threads that are actively busy processing tasks. */ |
| | | private HashMap<String,TaskThread> activeThreads; |
| | | private final HashMap<String,TaskThread> activeThreads = new HashMap<>(); |
| | | |
| | | /** The thread ID for the next task thread to be created;. */ |
| | | private int nextThreadID; |
| | | private int nextThreadID = 1; |
| | | |
| | | /** The set of worker threads that may be used to process tasks. */ |
| | | private LinkedList<TaskThread> idleThreads; |
| | | private final LinkedList<TaskThread> idleThreads = new LinkedList<>(); |
| | | |
| | | /** The lock used to provide threadsafe access to the scheduler. */ |
| | | private final ReentrantLock schedulerLock; |
| | | private final ReentrantLock schedulerLock = new ReentrantLock(); |
| | | |
| | | /** The task backend with which this scheduler is associated. */ |
| | | private TaskBackend taskBackend; |
| | |
| | | private Thread schedulerThread; |
| | | |
| | | /** The set of recently-completed tasks that need to be retained. */ |
| | | private TreeSet<Task> completedTasks; |
| | | |
| | | private final TreeSet<Task> completedTasks = new TreeSet<>(); |
| | | /** The set of tasks that have been scheduled but not yet arrived. */ |
| | | private TreeSet<Task> pendingTasks; |
| | | |
| | | private final TreeSet<Task> pendingTasks = new TreeSet<>(); |
| | | /** The set of tasks that are currently running. */ |
| | | private TreeSet<Task> runningTasks; |
| | | private final TreeSet<Task> runningTasks = new TreeSet<>(); |
| | | |
| | | private ServerContext serverContext; |
| | | |
| | |
| | | super("Task Scheduler Thread"); |
| | | |
| | | this.serverContext = serverContext; |
| | | |
| | | this.taskBackend = taskBackend; |
| | | |
| | | schedulerLock = new ReentrantLock(); |
| | | isRunning = false; |
| | | stopRequested = false; |
| | | schedulerThread = null; |
| | | nextThreadID = 1; |
| | | recurringTasks = new HashMap<String,RecurringTask>(); |
| | | tasks = new HashMap<String,Task>(); |
| | | activeThreads = new HashMap<String,TaskThread>(); |
| | | idleThreads = new LinkedList<TaskThread>(); |
| | | completedTasks = new TreeSet<Task>(); |
| | | pendingTasks = new TreeSet<Task>(); |
| | | runningTasks = new TreeSet<Task>(); |
| | | taskRootEntry = null; |
| | | recurringTaskParentEntry = null; |
| | | scheduledTaskParentEntry = null; |
| | | |
| | | DirectoryServer.registerAlertGenerator(this); |
| | | |
| | | initializeTasksFromBackingFile(); |
| | |
| | | throw new DirectoryException(ResultCode.ENTRY_ALREADY_EXISTS, message); |
| | | } |
| | | |
| | | Attribute attr = Attributes.create(ATTR_TASK_STATE, |
| | | TaskState.RECURRING.toString()); |
| | | ArrayList<Attribute> attrList = new ArrayList<Attribute>(1); |
| | | Attribute attr = Attributes.create(ATTR_TASK_STATE, TaskState.RECURRING.toString()); |
| | | ArrayList<Attribute> attrList = new ArrayList<>(1); |
| | | attrList.add(attr); |
| | | Entry recurringTaskEntry = recurringTask.getRecurringTaskEntry(); |
| | | recurringTaskEntry.putAttribute(attr.getAttributeType(), attrList); |
| | |
| | | try |
| | | { |
| | | RecurringTask recurringTask = recurringTasks.remove(recurringTaskID); |
| | | HashMap<String,Task> iterationsMap = new HashMap<String,Task>(); |
| | | HashMap<String,Task> iterationsMap = new HashMap<>(); |
| | | |
| | | for (Task t : tasks.values()) |
| | | { |
| | |
| | | { |
| | | // Grab a copy of the running threads so that we can operate on them without |
| | | // holding the lock. |
| | | LinkedList<TaskThread> threadList = new LinkedList<TaskThread>(); |
| | | LinkedList<TaskThread> threadList = new LinkedList<>(); |
| | | |
| | | schedulerLock.lock(); |
| | | |
| | | try |
| | | { |
| | | threadList.addAll(activeThreads.values()); |
| | |
| | | @Override |
| | | public LinkedHashMap<String,String> getAlerts() |
| | | { |
| | | LinkedHashMap<String,String> alerts = new LinkedHashMap<String,String>(); |
| | | LinkedHashMap<String, String> alerts = new LinkedHashMap<>(); |
| | | |
| | | alerts.put(ALERT_TYPE_CANNOT_SCHEDULE_RECURRING_ITERATION, |
| | | ALERT_DESCRIPTION_CANNOT_SCHEDULE_RECURRING_ITERATION); |
| | |
| | | return alerts; |
| | | } |
| | | } |
| | | |
| | |
| | | */ |
| | | private static LinkedHashSet<ByteString> getValueSet(boolean booleanValue) |
| | | { |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(1); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf( |
| | | booleanValue ? CONFIG_VALUE_TRUE : CONFIG_VALUE_FALSE)); |
| | | return valueSet; |
| | |
| | | */ |
| | | public List<String> activeValuesToStrings() |
| | | { |
| | | ArrayList<String> valueStrings = new ArrayList<String>(1); |
| | | valueStrings.add(String.valueOf(activeValue)); |
| | | |
| | | return valueStrings; |
| | | return asList(String.valueOf(activeValue)); |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Converts the set of pending values for this configuration attribute into a |
| | | * set of strings that may be stored in the configuration or represented over |
| | | * protocol. The string representation used by this method should be |
| | | * compatible with the decoding used by the <CODE>stringsToValues</CODE> |
| | | * compatible with the decoding used by the {@link #stringsToValues(List, boolean)} |
| | | * method. |
| | | * |
| | | * @return The string representations of the set of pending values for this |
| | | * configuration attribute, or <CODE>null</CODE> if there are no |
| | | * configuration attribute, or {@code null} if there are no |
| | | * pending values. |
| | | */ |
| | | public List<String> pendingValuesToStrings() |
| | | { |
| | | if (hasPendingValues()) |
| | | { |
| | | ArrayList<String> valueStrings = new ArrayList<String>(1); |
| | | valueStrings.add(String.valueOf(pendingValue)); |
| | | |
| | | return valueStrings; |
| | | return asList(String.valueOf(pendingValue)); |
| | | } |
| | | else |
| | | { |
| | | return null; |
| | | } |
| | | return null; |
| | | } |
| | | |
| | | |
| | | private List<String> asList(String s) |
| | | { |
| | | ArrayList<String> result = new ArrayList<>(1); |
| | | result.add(s); |
| | | return result; |
| | | } |
| | | |
| | | /** |
| | | * Retrieves a new configuration attribute of this type that will contain the |
| | |
| | | this.requiresAdminAction = requiresAdminAction; |
| | | |
| | | hasPendingValues = false; |
| | | activeValues = new LinkedHashSet<ByteString>(); |
| | | activeValues = new LinkedHashSet<>(); |
| | | pendingValues = activeValues; |
| | | } |
| | | |
| | |
| | | this.requiresAdminAction = requiresAdminAction; |
| | | this.hasPendingValues = false; |
| | | |
| | | if (activeValues == null) |
| | | { |
| | | this.activeValues = new LinkedHashSet<ByteString>(); |
| | | } |
| | | else |
| | | { |
| | | this.activeValues = activeValues; |
| | | } |
| | | |
| | | this.activeValues = notNull(activeValues); |
| | | this.pendingValues = this.activeValues; |
| | | } |
| | | |
| | |
| | | this.requiresAdminAction = requiresAdminAction; |
| | | this.hasPendingValues = hasPendingValues; |
| | | |
| | | if (activeValues == null) |
| | | { |
| | | this.activeValues = new LinkedHashSet<ByteString>(); |
| | | } |
| | | else |
| | | { |
| | | this.activeValues = activeValues; |
| | | } |
| | | this.activeValues = notNull(activeValues); |
| | | |
| | | if (! hasPendingValues) |
| | | if (!hasPendingValues) |
| | | { |
| | | this.pendingValues = this.activeValues; |
| | | } |
| | | else |
| | | { |
| | | if (pendingValues == null) |
| | | { |
| | | this.pendingValues = new LinkedHashSet<ByteString>(); |
| | | } |
| | | else |
| | | { |
| | | this.pendingValues = pendingValues; |
| | | } |
| | | this.pendingValues = notNull(pendingValues); |
| | | } |
| | | } |
| | | |
| | |
| | | { |
| | | if (requiresAdminAction) |
| | | { |
| | | if (values == null) |
| | | { |
| | | pendingValues = new LinkedHashSet<ByteString>(); |
| | | } |
| | | else |
| | | { |
| | | pendingValues = values; |
| | | } |
| | | pendingValues = notNull(values); |
| | | |
| | | hasPendingValues = true; |
| | | } |
| | | else |
| | | { |
| | | if (values == null) |
| | | { |
| | | activeValues = new LinkedHashSet<ByteString>(); |
| | | } |
| | | else |
| | | { |
| | | activeValues = values; |
| | | } |
| | | activeValues = notNull(values); |
| | | |
| | | pendingValues = activeValues; |
| | | hasPendingValues = false; |
| | |
| | | } |
| | | } |
| | | |
| | | |
| | | private LinkedHashSet<ByteString> notNull(LinkedHashSet<ByteString> values) |
| | | { |
| | | return values != null ? values : new LinkedHashSet<ByteString>(); |
| | | } |
| | | |
| | | /** |
| | | * Specifies the set of active values for this configuration attribute. No |
| | |
| | | // Create a temporary set of values that we will use for this change. It |
| | | // may not actually be applied if an error occurs for some reason. |
| | | final LinkedHashSet<ByteString> vals = getValues(); |
| | | LinkedHashSet<ByteString> tempValues = new LinkedHashSet<ByteString>(vals.size() + numValues); |
| | | LinkedHashSet<ByteString> tempValues = new LinkedHashSet<>(vals.size() + numValues); |
| | | |
| | | // Iterate through all of the provided values. Make sure that each is |
| | | // acceptable for use and that it is not already contained in the value set. |
| | |
| | | { |
| | | // Create a temporary set of values that we will use for this change. It |
| | | // may not actually be applied if an error occurs for some reason. |
| | | LinkedHashSet<ByteString> tempValues = new LinkedHashSet<ByteString>(getValues()); |
| | | LinkedHashSet<ByteString> tempValues = new LinkedHashSet<>(getValues()); |
| | | |
| | | // Iterate through all the provided values and make sure that they are |
| | | // contained in the list. If not, then throw an exception. If so, then |
| | |
| | | { |
| | | if (pendingValues == null) |
| | | { |
| | | pendingValues = new LinkedHashSet<ByteString>(); |
| | | pendingValues = new LinkedHashSet<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | { |
| | | if (values == null) |
| | | { |
| | | values = new LinkedHashSet<ByteString>(); |
| | | values = new LinkedHashSet<>(); |
| | | } |
| | | |
| | | activeValues = values; |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2014 ForgeRock AS |
| | | * Portions Copyright 2014-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.config; |
| | | |
| | |
| | | this.entry = entry; |
| | | this.parent = parent; |
| | | |
| | | children = new ConcurrentHashMap<DN,ConfigEntry>(); |
| | | addListeners = new CopyOnWriteArrayList<ConfigAddListener>(); |
| | | changeListeners = new CopyOnWriteArrayList<ConfigChangeListener>(); |
| | | deleteListeners = new CopyOnWriteArrayList<ConfigDeleteListener>(); |
| | | children = new ConcurrentHashMap<>(); |
| | | addListeners = new CopyOnWriteArrayList<>(); |
| | | changeListeners = new CopyOnWriteArrayList<>(); |
| | | deleteListeners = new CopyOnWriteArrayList<>(); |
| | | entryLock = new Object(); |
| | | } |
| | | |
| | |
| | | DirectoryServer.getDefaultAttributeType(name, attribute.getSyntax()); |
| | | } |
| | | |
| | | List<Attribute> attrs = new ArrayList<Attribute>(2); |
| | | List<Attribute> attrs = new ArrayList<>(2); |
| | | AttributeBuilder builder = new AttributeBuilder(attrType, name); |
| | | builder.addAll(attribute.getActiveValues()); |
| | | attrs.add(builder.toAttribute()); |
| | |
| | | super(name, description, isRequired, isMultiValued, requiresAdminAction); |
| | | |
| | | |
| | | activeValues = new ArrayList<DN>(); |
| | | activeValues = new ArrayList<>(); |
| | | pendingValues = activeValues; |
| | | } |
| | | |
| | |
| | | |
| | | if (value == null) |
| | | { |
| | | activeValues = new ArrayList<DN>(); |
| | | activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | | activeValues = new ArrayList<DN>(1); |
| | | activeValues = new ArrayList<>(1); |
| | | activeValues.add(value); |
| | | } |
| | | |
| | |
| | | |
| | | if (activeValues == null) |
| | | { |
| | | this.activeValues = new ArrayList<DN>(); |
| | | this.activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | if (requiresAdminAction()) |
| | | { |
| | | pendingValues = new ArrayList<DN>(1); |
| | | pendingValues = new ArrayList<>(1); |
| | | pendingValues.add(value); |
| | | setPendingValues(getValueSet(value)); |
| | | } |
| | |
| | | if (requiresAdminAction()) |
| | | { |
| | | setPendingValues(new LinkedHashSet<ByteString>(0)); |
| | | pendingValues = new ArrayList<DN>(); |
| | | pendingValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | // Iterate through all the provided values, make sure that they are |
| | | // acceptable, and build the value set. |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (DN value : values) |
| | | { |
| | | if (value == null) |
| | |
| | | LinkedHashSet<ByteString> valueSet; |
| | | if (value == null) |
| | | { |
| | | valueSet = new LinkedHashSet<ByteString>(0); |
| | | valueSet = new LinkedHashSet<>(0); |
| | | } |
| | | else |
| | | { |
| | | valueSet = new LinkedHashSet<ByteString>(1); |
| | | valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf(value.toString())); |
| | | } |
| | | return valueSet; |
| | |
| | | return null; |
| | | } |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(values.size()); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(values.size()); |
| | | for (DN value : values) |
| | | { |
| | | valueSet.add(ByteString.valueOf(value.toString())); |
| | |
| | | } |
| | | else |
| | | { |
| | | return new LinkedHashSet<ByteString>(); |
| | | return new LinkedHashSet<>(); |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String valueString : valueStrings) |
| | | { |
| | | if (valueString == null) |
| | |
| | | */ |
| | | public List<String> activeValuesToStrings() |
| | | { |
| | | ArrayList<String> valueStrings = new ArrayList<String>(activeValues.size()); |
| | | ArrayList<String> valueStrings = new ArrayList<>(activeValues.size()); |
| | | for (DN dn : activeValues) |
| | | { |
| | | valueStrings.add(dn.toString()); |
| | |
| | | { |
| | | if (hasPendingValues()) |
| | | { |
| | | ArrayList<String> valueStrings = |
| | | new ArrayList<String>(pendingValues.size()); |
| | | ArrayList<String> valueStrings = new ArrayList<>(pendingValues.size()); |
| | | for (DN dn : pendingValues) |
| | | { |
| | | valueStrings.add(dn.toString()); |
| | | } |
| | | |
| | | return valueStrings; |
| | | } |
| | | else |
| | |
| | | else |
| | | { |
| | | // This is fine. The pending value set can be empty. |
| | | pendingValues = new ArrayList<DN>(0); |
| | | pendingValues = new ArrayList<>(0); |
| | | } |
| | | } |
| | | else |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | pendingValues = new ArrayList<DN>(numValues); |
| | | pendingValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | DN dn; |
| | |
| | | else |
| | | { |
| | | // This is fine. The active value set can be empty. |
| | | activeValues = new ArrayList<DN>(0); |
| | | activeValues = new ArrayList<>(0); |
| | | } |
| | | } |
| | | else |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | activeValues = new ArrayList<DN>(numValues); |
| | | activeValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | DN dn; |
| | |
| | | |
| | | if (componentType.equals(DN.class.getName())) |
| | | { |
| | | ArrayList<DN> dnList = new ArrayList<DN>(length); |
| | | ArrayList<DN> dnList = new ArrayList<>(length); |
| | | for (int i=0; i < length; i++) |
| | | { |
| | | dnList.add((DN) Array.get(value, i)); |
| | |
| | | { |
| | | try |
| | | { |
| | | ArrayList<DN> values = new ArrayList<DN>(length); |
| | | ArrayList<DN> values = new ArrayList<>(length); |
| | | for (int i=0; i < length; i++) |
| | | { |
| | | String valueStr = (String) Array.get(value, i); |
| | |
| | | this.hasUpperBound = hasUpperBound; |
| | | this.upperBound = upperBound; |
| | | |
| | | activeValues = new ArrayList<Long>(); |
| | | activeValues = new ArrayList<>(); |
| | | pendingValues = activeValues; |
| | | } |
| | | |
| | |
| | | this.hasUpperBound = hasUpperBound; |
| | | this.upperBound = upperBound; |
| | | |
| | | activeValues = new ArrayList<Long>(1); |
| | | activeValues = new ArrayList<>(1); |
| | | activeValues.add(value); |
| | | |
| | | pendingValues = activeValues; |
| | |
| | | |
| | | if (activeValues == null) |
| | | { |
| | | this.activeValues = new ArrayList<Long>(); |
| | | this.activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | if (requiresAdminAction()) |
| | | { |
| | | pendingValues = new ArrayList<Long>(1); |
| | | pendingValues = new ArrayList<>(1); |
| | | pendingValues.add(value); |
| | | setPendingValues(getValueSet(value)); |
| | | } |
| | |
| | | if (requiresAdminAction()) |
| | | { |
| | | setPendingValues(new LinkedHashSet<ByteString>(0)); |
| | | pendingValues = new ArrayList<Long>(); |
| | | pendingValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | // Iterate through all the provided values, make sure that they are |
| | | // acceptable, and build the value set. |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (long value : values) |
| | | { |
| | | if (hasLowerBound && (value < lowerBound)) |
| | |
| | | */ |
| | | private static LinkedHashSet<ByteString> getValueSet(long value) |
| | | { |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(1); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf(String.valueOf(value))); |
| | | return valueSet; |
| | | } |
| | |
| | | return null; |
| | | } |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(values.size()); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(values.size()); |
| | | for (long value : values) |
| | | { |
| | | valueSet.add(ByteString.valueOf(String.valueOf(value))); |
| | |
| | | } |
| | | else |
| | | { |
| | | return new LinkedHashSet<ByteString>(); |
| | | return new LinkedHashSet<>(); |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String valueString : valueStrings) |
| | | { |
| | | long longValue; |
| | |
| | | */ |
| | | public List<String> activeValuesToStrings() |
| | | { |
| | | ArrayList<String> valueStrings = |
| | | new ArrayList<String>(activeValues.size()); |
| | | for (long l : activeValues) |
| | | { |
| | | valueStrings.add(String.valueOf(l)); |
| | | } |
| | | |
| | | return valueStrings; |
| | | return toListOfString(activeValues); |
| | | } |
| | | |
| | | |
| | |
| | | { |
| | | if (hasPendingValues()) |
| | | { |
| | | ArrayList<String> valueStrings = |
| | | new ArrayList<String>(pendingValues.size()); |
| | | for (long l : pendingValues) |
| | | { |
| | | valueStrings.add(String.valueOf(l)); |
| | | } |
| | | |
| | | return valueStrings; |
| | | return toListOfString(pendingValues); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | |
| | | /** |
| | | * @param pendingValues2 |
| | | * @return |
| | | */ |
| | | private List<String> toListOfString(List<Long> pendingValues2) |
| | | { |
| | | ArrayList<String> valueStrings = new ArrayList<>(pendingValues2.size()); |
| | | for (long l : pendingValues2) |
| | | { |
| | | valueStrings.add(String.valueOf(l)); |
| | | } |
| | | return valueStrings; |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Retrieves a new configuration attribute of this type that will contain the |
| | | * values from the provided attribute. |
| | | * |
| | |
| | | else |
| | | { |
| | | // This is fine. The pending value set can be empty. |
| | | pendingValues = new ArrayList<Long>(0); |
| | | pendingValues = new ArrayList<>(0); |
| | | } |
| | | } |
| | | else |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | pendingValues = new ArrayList<Long>(numValues); |
| | | pendingValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | long longValue; |
| | |
| | | else |
| | | { |
| | | // This is fine. The active value set can be empty. |
| | | activeValues = new ArrayList<Long>(0); |
| | | activeValues = new ArrayList<>(0); |
| | | } |
| | | } |
| | | else |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | activeValues = new ArrayList<Long>(numValues); |
| | | activeValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | long longValue; |
| | |
| | | { |
| | | if (componentType.equals(Long.class.getName())) |
| | | { |
| | | ArrayList<Long> values = new ArrayList<Long>(); |
| | | ArrayList<Long> values = new ArrayList<>(); |
| | | |
| | | for (int i=0; i < length; i++) |
| | | { |
| | |
| | | } |
| | | else if (componentType.equals(Integer.class.getName())) |
| | | { |
| | | ArrayList<Long> values = new ArrayList<Long>(); |
| | | ArrayList<Long> values = new ArrayList<>(); |
| | | |
| | | for (int i=0; i < length; i++) |
| | | { |
| | |
| | | } |
| | | else if (componentType.equals(String.class.getName())) |
| | | { |
| | | ArrayList<Long> values = new ArrayList<Long>(); |
| | | ArrayList<Long> values = new ArrayList<>(); |
| | | |
| | | for (int i=0; i < length; i++) |
| | | { |
| | |
| | | return null; |
| | | } |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(1); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf(intValue + " " + unit)); |
| | | return valueSet; |
| | | } |
| | |
| | | } |
| | | else |
| | | { |
| | | return new LinkedHashSet<ByteString>(); |
| | | return new LinkedHashSet<>(); |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String valueString : valueStrings) |
| | | { |
| | | if ((valueString == null) || (valueString.length() == 0)) |
| | |
| | | */ |
| | | public List<String> activeValuesToStrings() |
| | | { |
| | | ArrayList<String> valueStrings = new ArrayList<String>(1); |
| | | ArrayList<String> valueStrings = new ArrayList<>(1); |
| | | valueStrings.add(activeIntValue + " " + activeUnit); |
| | | |
| | | return valueStrings; |
| | |
| | | { |
| | | if (hasPendingValues()) |
| | | { |
| | | ArrayList<String> valueStrings = new ArrayList<String>(1); |
| | | ArrayList<String> valueStrings = new ArrayList<>(1); |
| | | valueStrings.add(pendingIntValue + " " + pendingUnit); |
| | | |
| | | return valueStrings; |
| | | } |
| | | else |
| | |
| | | { |
| | | this.configEntryDN = configEntryDN; |
| | | |
| | | alertGenerators = new CopyOnWriteArrayList<AlertGenerator>(); |
| | | invokableComponents = new CopyOnWriteArrayList<InvokableComponent>(); |
| | | monitorProviders = |
| | | new CopyOnWriteArrayList<MonitorProvider< |
| | | ? extends MonitorProviderCfg>>(); |
| | | alertGenerators = new CopyOnWriteArrayList<>(); |
| | | invokableComponents = new CopyOnWriteArrayList<>(); |
| | | monitorProviders = new CopyOnWriteArrayList<>(); |
| | | |
| | | MBeanServer mBeanServer = DirectoryServer.getJMXMBeanServer(); |
| | | if (mBeanServer != null) |
| | |
| | | |
| | | if (iterator.hasNext()) |
| | | { |
| | | List<String> stringValues = new ArrayList<String>(); |
| | | List<String> stringValues = new ArrayList<>(); |
| | | stringValues.add(value.toString()); |
| | | |
| | | while (iterator.hasNext()) |
| | |
| | | |
| | | if (iterator.hasNext()) |
| | | { |
| | | List<String> stringValues = new ArrayList<String>(); |
| | | List<String> stringValues = new ArrayList<>(); |
| | | stringValues.add(value.toString()); |
| | | |
| | | while (iterator.hasNext()) |
| | |
| | | return new MBeanInfo(CLASS_NAME, null, null, null, null, null); |
| | | } |
| | | |
| | | List<MBeanAttributeInfo> attrs = new ArrayList<MBeanAttributeInfo>(); |
| | | for (MonitorProvider<? extends MonitorProviderCfg> monitor : |
| | | monitorProviders) |
| | | List<MBeanAttributeInfo> attrs = new ArrayList<>(); |
| | | for (MonitorProvider<? extends MonitorProviderCfg> monitor : monitorProviders) |
| | | { |
| | | for (org.opends.server.types.Attribute a : monitor.getMonitorData()) |
| | | { |
| | |
| | | } |
| | | } |
| | | |
| | | MBeanAttributeInfo[] mBeanAttributes = new MBeanAttributeInfo[attrs.size()]; |
| | | attrs.toArray(mBeanAttributes); |
| | | MBeanAttributeInfo[] mBeanAttributes = attrs.toArray(new MBeanAttributeInfo[attrs.size()]); |
| | | |
| | | |
| | | List<MBeanNotificationInfo> notifications = |
| | | new ArrayList<MBeanNotificationInfo>(); |
| | | List<MBeanNotificationInfo> notifications = new ArrayList<>(); |
| | | for (AlertGenerator generator : alertGenerators) |
| | | { |
| | | String className = generator.getClassName(); |
| | |
| | | { |
| | | String[] types = { type }; |
| | | String description = alerts.get(type); |
| | | notifications.add(new MBeanNotificationInfo(types, className, |
| | | description)); |
| | | notifications.add(new MBeanNotificationInfo(types, className, description)); |
| | | } |
| | | } |
| | | |
| | | |
| | | MBeanNotificationInfo[] mBeanNotifications = |
| | | new MBeanNotificationInfo[notifications.size()]; |
| | | MBeanNotificationInfo[] mBeanNotifications = new MBeanNotificationInfo[notifications.size()]; |
| | | notifications.toArray(mBeanNotifications); |
| | | |
| | | |
| | | List<MBeanOperationInfo> ops = new ArrayList<MBeanOperationInfo>(); |
| | | List<MBeanOperationInfo> ops = new ArrayList<>(); |
| | | for (InvokableComponent component : invokableComponents) |
| | | { |
| | | for (InvokableMethod method : component.getOperationSignatures()) |
| | |
| | | |
| | | this.allowedValues = allowedValues; |
| | | |
| | | activeValues = new ArrayList<String>(); |
| | | activeValues = new ArrayList<>(); |
| | | pendingValues = activeValues; |
| | | } |
| | | |
| | |
| | | |
| | | if (value == null) |
| | | { |
| | | activeValues = new ArrayList<String>(); |
| | | activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | | activeValues = new ArrayList<String>(1); |
| | | activeValues = new ArrayList<>(1); |
| | | activeValues.add(value); |
| | | } |
| | | |
| | |
| | | |
| | | if (activeValues == null) |
| | | { |
| | | this.activeValues = new ArrayList<String>(); |
| | | this.activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | if (requiresAdminAction()) |
| | | { |
| | | pendingValues = new ArrayList<String>(1); |
| | | pendingValues = new ArrayList<>(1); |
| | | pendingValues.add(value); |
| | | setPendingValues(getValueSet(value)); |
| | | } |
| | |
| | | if (requiresAdminAction()) |
| | | { |
| | | setPendingValues(new LinkedHashSet<ByteString>(0)); |
| | | pendingValues = new ArrayList<String>(); |
| | | pendingValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | // Iterate through all the provided values, make sure that they are |
| | | // acceptable, and build the value set. |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String value : values) |
| | | { |
| | | if ((value == null) || (value.length() == 0)) |
| | |
| | | */ |
| | | private static LinkedHashSet<ByteString> getValueSet(String value) |
| | | { |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(1); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf(value)); |
| | | return valueSet; |
| | | } |
| | |
| | | return null; |
| | | } |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(values.size()); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(values.size()); |
| | | for (String value : values) |
| | | { |
| | | valueSet.add(ByteString.valueOf(value)); |
| | |
| | | LocalizableMessage message = ERR_CONFIG_ATTR_IS_REQUIRED.get(getName()); |
| | | throw new ConfigException(message); |
| | | } |
| | | return new LinkedHashSet<ByteString>(); |
| | | return new LinkedHashSet<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String valueString : valueStrings) |
| | | { |
| | | if ((valueString == null) || (valueString.length() == 0)) |
| | |
| | | throw new ConfigException(ERR_CONFIG_ATTR_IS_REQUIRED.get(a.getName())); |
| | | } |
| | | // This is fine. The pending value set can be empty. |
| | | pendingValues = new ArrayList<String>(0); |
| | | pendingValues = new ArrayList<>(0); |
| | | } |
| | | else |
| | | { |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | pendingValues = new ArrayList<String>(numValues); |
| | | pendingValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | String lowerValue = v.toString().toLowerCase(); |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | // This is fine. The active value set can be empty. |
| | | activeValues = new ArrayList<String>(0); |
| | | activeValues = new ArrayList<>(0); |
| | | } |
| | | else |
| | | { |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | activeValues = new ArrayList<String>(numValues); |
| | | activeValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | String lowerValue = v.toString().toLowerCase(); |
| | |
| | | { |
| | | try |
| | | { |
| | | ArrayList<String> values = new ArrayList<String>(length); |
| | | ArrayList<String> values = new ArrayList<>(length); |
| | | |
| | | for (int i=0; i < length; i++) |
| | | { |
| | |
| | | super(name, description, false, isMultiValued, false); |
| | | |
| | | |
| | | values = new ArrayList<String>(); |
| | | values = new ArrayList<>(); |
| | | } |
| | | |
| | | |
| | |
| | | |
| | | if (value == null) |
| | | { |
| | | values = new ArrayList<String>(0); |
| | | values = new ArrayList<>(0); |
| | | } |
| | | else |
| | | { |
| | | values = new ArrayList<String>(1); |
| | | values = new ArrayList<>(1); |
| | | values.add(value); |
| | | } |
| | | } |
| | |
| | | |
| | | if (values == null) |
| | | { |
| | | this.values = new ArrayList<String>(); |
| | | this.values = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | */ |
| | | private static LinkedHashSet<ByteString> getValueSet(String value) |
| | | { |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(1); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf(value)); |
| | | return valueSet; |
| | | } |
| | |
| | | { |
| | | if ((valueStrings == null) || valueStrings.isEmpty()) |
| | | { |
| | | return new LinkedHashSet<ByteString>(); |
| | | return new LinkedHashSet<>(); |
| | | } |
| | | return toByteStrings(valueStrings); |
| | | } |
| | | |
| | | private static LinkedHashSet<ByteString> toByteStrings(List<String> strings) |
| | | { |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(strings.size()); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(strings.size()); |
| | | for (String valueString : strings) |
| | | { |
| | | valueSet.add(ByteString.valueOf(valueString)); |
| | |
| | | super(name, description, isRequired, isMultiValued, requiresAdminAction); |
| | | |
| | | |
| | | activeValues = new ArrayList<String>(); |
| | | activeValues = new ArrayList<>(); |
| | | pendingValues = activeValues; |
| | | } |
| | | |
| | |
| | | |
| | | if (value == null) |
| | | { |
| | | activeValues = new ArrayList<String>(); |
| | | activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | | activeValues = new ArrayList<String>(1); |
| | | activeValues = new ArrayList<>(1); |
| | | activeValues.add(value); |
| | | } |
| | | |
| | |
| | | |
| | | if (activeValues == null) |
| | | { |
| | | this.activeValues = new ArrayList<String>(); |
| | | this.activeValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | if (requiresAdminAction()) |
| | | { |
| | | pendingValues = new ArrayList<String>(1); |
| | | pendingValues = new ArrayList<>(1); |
| | | pendingValues.add(value); |
| | | setPendingValues(getValueSet(value)); |
| | | } |
| | |
| | | if (requiresAdminAction()) |
| | | { |
| | | setPendingValues(new LinkedHashSet<ByteString>(0)); |
| | | pendingValues = new ArrayList<String>(); |
| | | pendingValues = new ArrayList<>(); |
| | | } |
| | | else |
| | | { |
| | |
| | | |
| | | // Iterate through all the provided values, make sure that they are |
| | | // acceptable, and build the value set. |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String value : values) |
| | | { |
| | | if ((value == null) || (value.length() == 0)) |
| | |
| | | */ |
| | | private static LinkedHashSet<ByteString> getValueSet(String value) |
| | | { |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(1); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(1); |
| | | valueSet.add(ByteString.valueOf(value)); |
| | | return valueSet; |
| | | } |
| | |
| | | return null; |
| | | } |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(values.size()); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(values.size()); |
| | | for (String value : values) |
| | | { |
| | | valueSet.add(ByteString.valueOf(value)); |
| | |
| | | } |
| | | else |
| | | { |
| | | return new LinkedHashSet<ByteString>(); |
| | | return new LinkedHashSet<>(); |
| | | } |
| | | } |
| | | |
| | |
| | | } |
| | | |
| | | |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<ByteString>(numValues); |
| | | LinkedHashSet<ByteString> valueSet = new LinkedHashSet<>(numValues); |
| | | for (String valueString : valueStrings) |
| | | { |
| | | if ((valueString == null) || (valueString.length() == 0)) |
| | |
| | | else |
| | | { |
| | | // This is fine. The pending value set can be empty. |
| | | pendingValues = new ArrayList<String>(0); |
| | | pendingValues = new ArrayList<>(0); |
| | | } |
| | | } |
| | | else |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | pendingValues = new ArrayList<String>(numValues); |
| | | pendingValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | pendingValues.add(v.toString()); |
| | |
| | | else |
| | | { |
| | | // This is fine. The active value set can be empty. |
| | | activeValues = new ArrayList<String>(0); |
| | | activeValues = new ArrayList<>(0); |
| | | } |
| | | } |
| | | else |
| | |
| | | throw new ConfigException(message); |
| | | } |
| | | |
| | | activeValues = new ArrayList<String>(numValues); |
| | | activeValues = new ArrayList<>(numValues); |
| | | for (ByteString v : a) |
| | | { |
| | | activeValues.add(v.toString()); |
| | |
| | | { |
| | | try |
| | | { |
| | | ArrayList<String> values = new ArrayList<String>(length); |
| | | ArrayList<String> values = new ArrayList<>(length); |
| | | |
| | | for (int i=0; i < length; i++) |
| | | { |
| | |
| | | } |
| | | //There is an sequence containing an attribute list, try to decode it. |
| | | if(reader.hasNextElement()) { |
| | | attrs = new LinkedList<AttributeType>(); |
| | | attrs = new LinkedList<>(); |
| | | reader.readStartSequence(); |
| | | while(reader.hasNextElement()) { |
| | | //Decode as an octet string. |
| | |
| | | } |
| | | |
| | | ASN1Reader reader = ASN1.getReader(value); |
| | | LinkedHashSet<String> rawAttributes = new LinkedHashSet<String>(); |
| | | LinkedHashSet<String> rawAttributes = new LinkedHashSet<>(); |
| | | try |
| | | { |
| | | reader.readStartSequence(); |
| | |
| | | super(OID_LDAP_READENTRY_POSTREAD, isCritical); |
| | | if (rawAttributes == null) |
| | | { |
| | | this.rawAttributes = new LinkedHashSet<String>(0); |
| | | this.rawAttributes = new LinkedHashSet<>(0); |
| | | } |
| | | else |
| | | { |
| | |
| | | super(oid, isCritical); |
| | | if (rawAttributes == null) |
| | | { |
| | | this.rawAttributes = new LinkedHashSet<String>(0); |
| | | this.rawAttributes = new LinkedHashSet<>(0); |
| | | } |
| | | else |
| | | { |
| | |
| | | } |
| | | |
| | | ASN1Reader reader = ASN1.getReader(value); |
| | | LinkedHashSet<String> rawAttributes = new LinkedHashSet<String>(); |
| | | LinkedHashSet<String> rawAttributes = new LinkedHashSet<>(); |
| | | try |
| | | { |
| | | reader.readStartSequence(); |
| | |
| | | super(OID_LDAP_READENTRY_PREREAD, isCritical); |
| | | if (rawAttributes == null) |
| | | { |
| | | this.rawAttributes = new LinkedHashSet<String>(0); |
| | | this.rawAttributes = new LinkedHashSet<>(0); |
| | | } |
| | | else |
| | | { |
| | |
| | | throw new DirectoryException(ResultCode.PROTOCOL_ERROR, message); |
| | | } |
| | | |
| | | filters = new ArrayList<MatchedValuesFilter>(); |
| | | filters = new ArrayList<>(); |
| | | while(reader.hasNextElement()) |
| | | { |
| | | filters.add(MatchedValuesFilter.decode(reader)); |
| | |
| | | { |
| | | if(subAny == null) |
| | | { |
| | | subAny = new ArrayList<ByteString>(); |
| | | subAny = new ArrayList<>(); |
| | | } |
| | | subAny.add(reader.readOctetString()); |
| | | } |
| | |
| | | |
| | | |
| | | /** A lookup table for resolving an error type from its integer value. */ |
| | | private static final Map<Integer, PasswordPolicyErrorType> TABLE; |
| | | private static final Map<Integer, PasswordPolicyErrorType> TABLE = new HashMap<>(); |
| | | static |
| | | { |
| | | TABLE = new HashMap<Integer, PasswordPolicyErrorType>(); |
| | | |
| | | for (PasswordPolicyErrorType type : PasswordPolicyErrorType |
| | | .values()) |
| | | for (PasswordPolicyErrorType type : PasswordPolicyErrorType.values()) |
| | | { |
| | | TABLE.put(type.value, type); |
| | | TABLE.put(type.value, type); |
| | |
| | | |
| | | |
| | | /** A lookup table for resolving a warning type from its BER type. */ |
| | | private static final Map<Byte, PasswordPolicyWarningType> TABLE; |
| | | private static final Map<Byte, PasswordPolicyWarningType> TABLE = new HashMap<>(); |
| | | static |
| | | { |
| | | TABLE = new HashMap<Byte, PasswordPolicyWarningType>(); |
| | | |
| | | for (PasswordPolicyWarningType value : PasswordPolicyWarningType |
| | | .values()) |
| | | for (PasswordPolicyWarningType value : PasswordPolicyWarningType.values()) |
| | | { |
| | | TABLE.put(value.type, value); |
| | | TABLE.put(value.type, value); |
| | |
| | | public static Set<PersistentSearchChangeType> intToTypes(int intValue) |
| | | throws LDAPException |
| | | { |
| | | Set<PersistentSearchChangeType> changeTypes = |
| | | new HashSet<PersistentSearchChangeType>(4); |
| | | Set<PersistentSearchChangeType> changeTypes = new HashSet<>(4); |
| | | |
| | | switch (intValue) |
| | | { |
| | |
| | | throw new DirectoryException(ResultCode.PROTOCOL_ERROR, message); |
| | | } |
| | | |
| | | ArrayList<SortKey> sortKeys = new ArrayList<SortKey>(); |
| | | ArrayList<SortKey> sortKeys = new ArrayList<>(); |
| | | while(reader.hasNextElement()) |
| | | { |
| | | reader.readStartSequence(); |
| | |
| | | |
| | | StringTokenizer tokenizer = new StringTokenizer(sortOrderString, ","); |
| | | |
| | | decodedKeyList = new ArrayList<String[]>(); |
| | | decodedKeyList = new ArrayList<>(); |
| | | while (tokenizer.hasMoreTokens()) |
| | | { |
| | | String token = tokenizer.nextToken().trim(); |
| | |
| | | |
| | | private SortOrder decodeSortOrderFromString() throws DirectoryException |
| | | { |
| | | ArrayList<SortKey> sortKeys = new ArrayList<SortKey>(); |
| | | ArrayList<SortKey> sortKeys = new ArrayList<>(); |
| | | for(String[] decodedKey : decodedKeyList) |
| | | { |
| | | AttributeType attrType = |
| | |
| | | @Override |
| | | public LinkedHashMap<String,String> getAlerts() |
| | | { |
| | | LinkedHashMap<String,String> alerts = new LinkedHashMap<String,String>(); |
| | | LinkedHashMap<String,String> alerts = new LinkedHashMap<>(); |
| | | |
| | | alerts.put(ALERT_TYPE_ACCESS_CONTROL_DISABLED, |
| | | ALERT_DESCRIPTION_ACCESS_CONTROL_DISABLED); |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!provider.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | public AccountStatusNotificationHandlerConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | notificationHandlers = new ConcurrentHashMap<DN,AccountStatusNotificationHandler>(); |
| | | notificationHandlers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!notificationHandler.isConfigurationAcceptable(configuration, |
| | | unacceptableReasons)) |
| | | { |
| | |
| | | * |
| | | * |
| | | * Copyright 2007-2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2013-2014 ForgeRock AS |
| | | * Portions Copyright 2013-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | this.rawEntryDN = rawEntryDN; |
| | | this.rawAttributes = rawAttributes; |
| | | |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | entryDN = null; |
| | | userAttributes = null; |
| | |
| | | |
| | | rawEntryDN = ByteString.valueOf(entryDN.toString()); |
| | | |
| | | rawAttributes = new ArrayList<RawAttribute>(); |
| | | rawAttributes = new ArrayList<>(); |
| | | |
| | | ArrayList<ByteString> ocValues = new ArrayList<ByteString>(); |
| | | ArrayList<ByteString> ocValues = new ArrayList<>(); |
| | | for (String s : objectClasses.values()) |
| | | { |
| | | ocValues.add(ByteString.valueOf(s)); |
| | |
| | | } |
| | | } |
| | | |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | proxiedAuthorizationDN = null; |
| | | cancelRequest = null; |
| | | } |
| | |
| | | && (objectClasses == null || userAttributes == null |
| | | || operationalAttributes == null)) |
| | | { |
| | | objectClasses = new HashMap<ObjectClass,String>(); |
| | | userAttributes = new HashMap<AttributeType,List<Attribute>>(); |
| | | operationalAttributes = new HashMap<AttributeType,List<Attribute>>(); |
| | | objectClasses = new HashMap<>(); |
| | | userAttributes = new HashMap<>(); |
| | | operationalAttributes = new HashMap<>(); |
| | | |
| | | for (RawAttribute a : rawAttributes) |
| | | { |
| | |
| | | List<Attribute> attrs = operationalAttributes.get(attrType); |
| | | if (attrs == null) |
| | | { |
| | | attrs = new ArrayList<Attribute>(1); |
| | | attrs = new ArrayList<>(1); |
| | | attrs.add(attr); |
| | | operationalAttributes.put(attrType, attrs); |
| | | } |
| | |
| | | List<Attribute> attrs = userAttributes.get(attrType); |
| | | if (attrs == null) |
| | | { |
| | | attrs = new ArrayList<Attribute>(1); |
| | | attrs = new ArrayList<>(1); |
| | | attrs.add(attr); |
| | | userAttributes.put(attrType, attrs); |
| | | } |
| | |
| | | public AlertHandlerConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | alertHandlers = new ConcurrentHashMap<DN, AlertHandler>(); |
| | | alertHandlers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!handler.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | public AttributeSyntaxConfigManager(final ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | syntaxes = new ConcurrentHashMap<DN,AttributeSyntax>(); |
| | | syntaxes = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!syntax.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | // can not be authenticated as a user that does not exist yet. |
| | | POST_RESPONSE_MODIFY, POST_RESPONSE_MODIFY_DN, POST_RESPONSE_DELETE), |
| | | true); |
| | | userMap = new DITCacheMap<CopyOnWriteArraySet<ClientConnection>>(); |
| | | userMap = new DITCacheMap<>(); |
| | | lock = new ReentrantReadWriteLock(); |
| | | |
| | | DirectoryServer.registerInternalPlugin(this); |
| | |
| | | CopyOnWriteArraySet<ClientConnection> connectionSet = userMap.get(userDN); |
| | | if (connectionSet == null) |
| | | { |
| | | connectionSet = new CopyOnWriteArraySet<ClientConnection>(); |
| | | connectionSet = new CopyOnWriteArraySet<>(); |
| | | connectionSet.add(clientConnection); |
| | | userMap.put(userDN, connectionSet); |
| | | } |
| | |
| | | final DN entryDN = op.getEntryDN(); |
| | | // Identify any client connections that may be authenticated |
| | | // or authorized as the user whose entry has been deleted and terminate them |
| | | Set<CopyOnWriteArraySet<ClientConnection>> arraySet = |
| | | new HashSet<CopyOnWriteArraySet<ClientConnection>>(); |
| | | Set<CopyOnWriteArraySet<ClientConnection>> arraySet = new HashSet<>(); |
| | | lock.writeLock().lock(); |
| | | try |
| | | { |
| | |
| | | lock.writeLock().lock(); |
| | | try |
| | | { |
| | | final Set<CopyOnWriteArraySet<ClientConnection>> arraySet = |
| | | new HashSet<CopyOnWriteArraySet<ClientConnection>>(); |
| | | final Set<CopyOnWriteArraySet<ClientConnection>> arraySet = new HashSet<>(); |
| | | userMap.removeSubtree(oldEntry.getName(), arraySet); |
| | | for (CopyOnWriteArraySet<ClientConnection> connectionSet : arraySet) |
| | | { |
| | |
| | | { |
| | | if (newAuthNSet == null) |
| | | { |
| | | newAuthNSet = new CopyOnWriteArraySet<ClientConnection>(); |
| | | newAuthNSet = new CopyOnWriteArraySet<>(); |
| | | } |
| | | conn.getAuthenticationInfo().setAuthenticationDN(newAuthNDN); |
| | | newAuthNSet.add(conn); |
| | |
| | | { |
| | | if (newAuthZSet == null) |
| | | { |
| | | newAuthZSet = new CopyOnWriteArraySet<ClientConnection>(); |
| | | newAuthZSet = new CopyOnWriteArraySet<>(); |
| | | } |
| | | conn.getAuthenticationInfo().setAuthorizationDN(newAuthZDN); |
| | | newAuthZSet.add(conn); |
| | |
| | | * |
| | | * |
| | | * Copyright 2007-2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2014 ForgeRock AS |
| | | * Portions Copyright 2014-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | public class BaseDnRegistry { |
| | | |
| | | /** The set of base DNs registered with the server. */ |
| | | private final TreeMap<DN, Backend> baseDNs = new TreeMap<DN, Backend>(); |
| | | |
| | | private final TreeMap<DN, Backend> baseDNs = new TreeMap<>(); |
| | | /** The set of private naming contexts registered with the server. */ |
| | | private final TreeMap<DN, Backend> privateNamingContexts = new TreeMap<DN, Backend>(); |
| | | |
| | | private final TreeMap<DN, Backend> privateNamingContexts = new TreeMap<>(); |
| | | /** The set of public naming contexts registered with the server. */ |
| | | private final TreeMap<DN, Backend> publicNamingContexts = new TreeMap<DN, Backend>(); |
| | | private final TreeMap<DN, Backend> publicNamingContexts = new TreeMap<>(); |
| | | |
| | | /** |
| | | * Indicates whether or not this base DN registry is in test mode. |
| | |
| | | // Check to see if the backend is already registered with the server for |
| | | // any other base DN(s). The new base DN must not have any hierarchical |
| | | // relationship with any other base Dns for the same backend. |
| | | LinkedList<DN> otherBaseDNs = new LinkedList<DN>(); |
| | | LinkedList<DN> otherBaseDNs = new LinkedList<>(); |
| | | for (DN dn : baseDNs.keySet()) |
| | | { |
| | | Backend<?> b = baseDNs.get(dn); |
| | |
| | | |
| | | // Check to see if the new base DN should be the superior base DN for any |
| | | // other base DN(s) already defined. |
| | | LinkedList<Backend<?>> subordinateBackends = new LinkedList<Backend<?>>(); |
| | | LinkedList<DN> subordinateBaseDNs = new LinkedList<DN>(); |
| | | LinkedList<Backend<?>> subordinateBackends = new LinkedList<>(); |
| | | LinkedList<DN> subordinateBaseDNs = new LinkedList<>(); |
| | | for (DN dn : baseDNs.keySet()) |
| | | { |
| | | Backend<?> b = baseDNs.get(dn); |
| | |
| | | |
| | | // If we've gotten here, then the new base DN is acceptable. If we should |
| | | // actually apply the changes then do so now. |
| | | final List<LocalizableMessage> errors = new LinkedList<LocalizableMessage>(); |
| | | final List<LocalizableMessage> errors = new LinkedList<>(); |
| | | |
| | | // Check to see if any of the registered backends already contain an |
| | | // entry with the DN specified as the base DN. This could happen if |
| | |
| | | // Check to see if the backend has a parent backend, and whether it has |
| | | // any subordinates with base DNs that are below the base DN to remove. |
| | | Backend<?> superiorBackend = backend.getParentBackend(); |
| | | LinkedList<Backend<?>> subordinateBackends = new LinkedList<Backend<?>>(); |
| | | LinkedList<Backend<?>> subordinateBackends = new LinkedList<>(); |
| | | if (backend.getSubordinateBackends() != null) |
| | | { |
| | | for (Backend<?> b : backend.getSubordinateBackends()) |
| | |
| | | |
| | | |
| | | // See if there are any other base DNs registered within the same backend. |
| | | LinkedList<DN> otherBaseDNs = new LinkedList<DN>(); |
| | | LinkedList<DN> otherBaseDNs = new LinkedList<>(); |
| | | for (DN dn : baseDNs.keySet()) |
| | | { |
| | | if (dn.equals(baseDN)) |
| | |
| | | publicNamingContexts.remove(baseDN); |
| | | privateNamingContexts.remove(baseDN); |
| | | |
| | | final LinkedList<LocalizableMessage> errors = new LinkedList<LocalizableMessage>(); |
| | | final LinkedList<LocalizableMessage> errors = new LinkedList<>(); |
| | | if (superiorBackend == null) |
| | | { |
| | | // If there were any subordinate backends, then all of their base DNs |
| | |
| | | private Entry saslAuthUserEntry; |
| | | |
| | | /** The set of response controls for this bind operation. */ |
| | | private final List<Control> responseControls = new ArrayList<Control>(0); |
| | | private final List<Control> responseControls = new ArrayList<>(0); |
| | | |
| | | /** A message explaining the reason for the authentication failure. */ |
| | | private LocalizableMessage authFailureReason; |
| | |
| | | public CertificateMapperConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | certificateMappers = new ConcurrentHashMap<DN, CertificateMapper>(); |
| | | certificateMappers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!mapper.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | * |
| | | * |
| | | * Copyright 2007-2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2013-2014 ForgeRock AS |
| | | * Portions Copyright 2013-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | this.rawAttributeType = rawAttributeType; |
| | | this.assertionValue = assertionValue; |
| | | |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | entryDN = null; |
| | | attributeType = null; |
| | | attributeOptions = null; |
| | |
| | | this.attributeType = attributeType; |
| | | this.assertionValue = assertionValue; |
| | | |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | rawEntryDN = ByteString.valueOf(entryDN.toString()); |
| | | rawAttributeType = attributeType.getNameOrOID(); |
| | | cancelRequest = null; |
| | | proxiedAuthorizationDN = null; |
| | | attributeOptions = new HashSet<String>(); |
| | | attributeOptions = new HashSet<>(); |
| | | } |
| | | |
| | | /** {@inheritDoc} */ |
| | |
| | | if (semicolonPos > 0) { |
| | | baseName = toLowerCase(rawAttributeType.substring(0, semicolonPos)); |
| | | |
| | | attributeOptions = new HashSet<String>(); |
| | | attributeOptions = new HashSet<>(); |
| | | int nextPos = rawAttributeType.indexOf(';', semicolonPos+1); |
| | | while (nextPos > 0) |
| | | { |
| | |
| | | * CDDL HEADER END |
| | | * |
| | | * |
| | | * Copyright 2014 ForgeRock AS |
| | | * Copyright 2014-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | private Entry rootEntry; |
| | | |
| | | /** The add/delete/change listeners on configuration entries. */ |
| | | private final ConcurrentHashMap<DN, EntryListeners> listeners = new ConcurrentHashMap<DN, EntryListeners>(); |
| | | private final ConcurrentHashMap<DN, EntryListeners> listeners = new ConcurrentHashMap<>(); |
| | | |
| | | /** Schema with configuration-related elements. */ |
| | | private Schema configEnabledSchema; |
| | |
| | | private static class EntryListeners { |
| | | |
| | | /** The set of add listeners that have been registered with this entry. */ |
| | | private final CopyOnWriteArrayList<ConfigAddListener> addListeners = |
| | | new CopyOnWriteArrayList<ConfigAddListener>(); |
| | | |
| | | private final CopyOnWriteArrayList<ConfigAddListener> addListeners = new CopyOnWriteArrayList<>(); |
| | | /** The set of change listeners that have been registered with this entry. */ |
| | | private final CopyOnWriteArrayList<ConfigChangeListener> changeListeners = |
| | | new CopyOnWriteArrayList<ConfigChangeListener>(); |
| | | |
| | | private final CopyOnWriteArrayList<ConfigChangeListener> changeListeners = new CopyOnWriteArrayList<>(); |
| | | /** The set of delete listeners that have been registered with this entry. */ |
| | | private final CopyOnWriteArrayList<ConfigDeleteListener> deleteListeners = |
| | | new CopyOnWriteArrayList<ConfigDeleteListener>(); |
| | | private final CopyOnWriteArrayList<ConfigDeleteListener> deleteListeners = new CopyOnWriteArrayList<>(); |
| | | |
| | | CopyOnWriteArrayList<ConfigChangeListener> getChangeListeners() |
| | | { |
| | |
| | | /** Handler for search results. */ |
| | | private static final class ConfigSearchHandler implements SearchResultHandler |
| | | { |
| | | private final Set<Entry> entries = new HashSet<Entry>(); |
| | | private final Set<Entry> entries = new HashSet<>(); |
| | | |
| | | Set<Entry> getEntries() |
| | | { |
| | |
| | | |
| | | if (resultHandler.hasCompletedSuccessfully()) |
| | | { |
| | | final Set<DN> children = new HashSet<DN>(); |
| | | final Set<DN> children = new HashSet<>(); |
| | | for (final Entry entry : searchHandler.getEntries()) |
| | | { |
| | | children.add(entry.getName()); |
| | |
| | | |
| | | // Notify all the add listeners to apply the new configuration entry. |
| | | ResultCode resultCode = ResultCode.SUCCESS; |
| | | final List<LocalizableMessage> messages = new LinkedList<LocalizableMessage>(); |
| | | final List<LocalizableMessage> messages = new LinkedList<>(); |
| | | for (final ConfigAddListener listener : addListeners) |
| | | { |
| | | final ConfigChangeResult result = listener.applyConfigurationAdd(entry); |
| | |
| | | |
| | | // Notify all the delete listeners that the entry has been removed. |
| | | ResultCode resultCode = ResultCode.SUCCESS; |
| | | final List<LocalizableMessage> messages = new LinkedList<LocalizableMessage>(); |
| | | final List<LocalizableMessage> messages = new LinkedList<>(); |
| | | for (final ConfigDeleteListener listener : deleteListeners) |
| | | { |
| | | final ConfigChangeResult result = listener.applyConfigurationDelete(entry); |
| | |
| | | |
| | | // Notify all the change listeners of the update. |
| | | ResultCode resultCode = ResultCode.SUCCESS; |
| | | final List<LocalizableMessage> messages = new LinkedList<LocalizableMessage>(); |
| | | final List<LocalizableMessage> messages = new LinkedList<>(); |
| | | for (final ConfigChangeListener listener : changeListeners) |
| | | { |
| | | final ConfigChangeResult result = listener.applyConfigurationChange(newEntry); |
| | |
| | | */ |
| | | public ConnectionHandlerConfigManager(ServerContext serverContext) { |
| | | this.serverContext = serverContext; |
| | | connectionHandlers = new ConcurrentHashMap<DN, ConnectionHandler<?>>(); |
| | | connectionHandlers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** {@inheritDoc} */ |
| | |
| | | |
| | | private static List<Properties> getMailServerProperties(Set<String> smtpServers) |
| | | { |
| | | List<Properties> mailServerProperties = new ArrayList<Properties>(); |
| | | List<Properties> mailServerProperties = new ArrayList<>(); |
| | | if (smtpServers != null && !smtpServers.isEmpty()) |
| | | { |
| | | for (String smtpServer : smtpServers) |
| | |
| | | |
| | | private static HashSet<Privilege> convert(Set<DisabledPrivilege> configuredDisabledPrivs) |
| | | { |
| | | HashSet<Privilege> disabledPrivileges = new HashSet<Privilege>(); |
| | | HashSet<Privilege> disabledPrivileges = new HashSet<>(); |
| | | if (configuredDisabledPrivs != null) |
| | | { |
| | | for (DisabledPrivilege p : configuredDisabledPrivs) |
| | |
| | | reader.readStartSequence(); |
| | | final byte[] encodedObjectClasses = reader.readOctetString() |
| | | .toByteArray(); |
| | | final List<String> objectClassNames = new LinkedList<String>(); |
| | | final List<String> objectClassNames = new LinkedList<>(); |
| | | while (reader.hasNextElement()) |
| | | { |
| | | objectClassNames.add(reader.readOctetStringAsString()); |
| | |
| | | reader.readStartSequence(); |
| | | final byte[] encodedAttribute = reader.readOctetString().toByteArray(); |
| | | final String attributeName = reader.readOctetStringAsString(); |
| | | final List<String> attributeOptions = new LinkedList<String>(); |
| | | final List<String> attributeOptions = new LinkedList<>(); |
| | | while (reader.hasNextElement()) |
| | | { |
| | | attributeOptions.add(reader.readOctetStringAsString()); |
| | |
| | | this.rawEntryDN = rawEntryDN; |
| | | |
| | | entryDN = null; |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | } |
| | | |
| | |
| | | this.entryDN = entryDN; |
| | | |
| | | rawEntryDN = ByteString.valueOf(entryDN.toString()); |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | } |
| | | |
| | |
| | | |
| | | /** The entry cache order map sorted by the cache level. */ |
| | | @SuppressWarnings("rawtypes") |
| | | private SortedMap<Integer, EntryCache> cacheOrderMap = |
| | | new TreeMap<Integer, EntryCache>(); |
| | | private SortedMap<Integer, EntryCache> cacheOrderMap = new TreeMap<>(); |
| | | |
| | | /** The entry cache to level map. */ |
| | | private Map<DN,Integer> cacheNameToLevelMap = new HashMap<DN, Integer>(); |
| | | private Map<DN,Integer> cacheNameToLevelMap = new HashMap<>(); |
| | | |
| | | /** Global entry cache monitor provider name. */ |
| | | private static final String |
| | |
| | | // change listener registered to invoke and verify on its own. |
| | | else if (!configuration.isEnabled()) |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!cache.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String buffer = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | |
| | | responseOID = null; |
| | | responseValue = null; |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | |
| | | if (requestOID.equals(OID_CANCEL_REQUEST)) |
| | |
| | | public ExtendedOperationConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | handlers = new ConcurrentHashMap<DN,ExtendedOperationHandler>(); |
| | | handlers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | public IdentityMapperConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | identityMappers = new ConcurrentHashMap<DN,IdentityMapper>(); |
| | | identityMappers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!mapper.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | public KeyManagerProviderConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | providers = new ConcurrentHashMap<DN,KeyManagerProvider>(); |
| | | providers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!provider.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2008 Sun Microsystems, Inc. |
| | | * Portions Copyright 2013-2014 ForgeRock AS |
| | | * Portions Copyright 2013-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | private static final LocalizedLogger logger = LocalizedLogger.getLoggerForThisClass(); |
| | | |
| | | /** A map between the filenames and the lock files for exclusive locks. */ |
| | | private static Map<String, FileLock> exclusiveLocks = |
| | | new HashMap<String,FileLock>(); |
| | | |
| | | private static Map<String, FileLock> exclusiveLocks = new HashMap<>(); |
| | | /** A map between the filenames and the lock files for shared locks. */ |
| | | private static Map<String, FileLock> sharedLocks = |
| | | new HashMap<String,FileLock>(); |
| | | |
| | | private static Map<String, FileLock> sharedLocks = new HashMap<>(); |
| | | /** A map between the filenames and reference counts for shared locks. */ |
| | | private static Map<String, Integer> sharedLockReferences = |
| | | new HashMap<String,Integer>(); |
| | | private static Map<String, Integer> sharedLockReferences = new HashMap<>(); |
| | | |
| | | /** The lock providing threadsafe access to the lock map data. */ |
| | | private static Object mapLock = new Object(); |
| | |
| | | root.addLogPublisherAddListener(this); |
| | | root.addLogPublisherDeleteListener(this); |
| | | |
| | | List<DebugLogPublisherCfg> debugPublisherCfgs = |
| | | new ArrayList<DebugLogPublisherCfg>(); |
| | | |
| | | List<AccessLogPublisherCfg> accessPublisherCfgs = |
| | | new ArrayList<AccessLogPublisherCfg>(); |
| | | |
| | | List<HTTPAccessLogPublisherCfg> httpAccessPublisherCfgs = |
| | | new ArrayList<HTTPAccessLogPublisherCfg>(); |
| | | |
| | | List<ErrorLogPublisherCfg> errorPublisherCfgs = |
| | | new ArrayList<ErrorLogPublisherCfg>(); |
| | | List<DebugLogPublisherCfg> debugPublisherCfgs = new ArrayList<>(); |
| | | List<AccessLogPublisherCfg> accessPublisherCfgs = new ArrayList<>(); |
| | | List<HTTPAccessLogPublisherCfg> httpAccessPublisherCfgs = new ArrayList<>(); |
| | | List<ErrorLogPublisherCfg> errorPublisherCfgs = new ArrayList<>(); |
| | | |
| | | for (String name : root.listLogPublishers()) |
| | | { |
| | |
| | | /** Creates a new instance of this matching rule config manager. */ |
| | | public MatchingRuleConfigManager() |
| | | { |
| | | matchingRuleFactories = new ConcurrentHashMap<DN,MatchingRuleFactory>(); |
| | | matchingRuleFactories = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!factory.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2011-2014 ForgeRock AS. |
| | | * Portions Copyright 2011-2015 ForgeRock AS. |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | entryDN = null; |
| | | newRDN = null; |
| | | newSuperior = null; |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | modifications = null; |
| | | } |
| | |
| | | rawNewSuperior = ByteString.valueOf(newSuperior.toString()); |
| | | } |
| | | |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | modifications = null; |
| | | } |
| | |
| | | { |
| | | if (modifications == null) |
| | | { |
| | | modifications = new ArrayList<Modification>(); |
| | | modifications = new ArrayList<>(); |
| | | } |
| | | if (modification != null) |
| | | { |
| | |
| | | * |
| | | * |
| | | * Copyright 2007-2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2012-2014 ForgeRock AS. |
| | | * Portions Copyright 2012-2015 ForgeRock AS. |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | |
| | | entryDN = null; |
| | | modifications = null; |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | } |
| | | |
| | |
| | | |
| | | rawEntryDN = ByteString.valueOf(entryDN.toString()); |
| | | |
| | | rawModifications = new ArrayList<RawModification>(modifications.size()); |
| | | rawModifications = new ArrayList<>(modifications.size()); |
| | | for (Modification m : modifications) |
| | | { |
| | | rawModifications.add(new LDAPModification(m.getModificationType(), |
| | | new LDAPAttribute(m.getAttribute()))); |
| | | } |
| | | |
| | | responseControls = new ArrayList<Control>(); |
| | | responseControls = new ArrayList<>(); |
| | | cancelRequest = null; |
| | | } |
| | | |
| | |
| | | { |
| | | if (modifications == null) |
| | | { |
| | | modifications = new ArrayList<Modification>(rawModifications.size()); |
| | | modifications = new ArrayList<>(rawModifications.size()); |
| | | try { |
| | | for (RawModification m : rawModifications) |
| | | { |
| | |
| | | public MonitorConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | monitors = new ConcurrentHashMap<DN,MonitorProvider<?>>(); |
| | | monitors = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | public PasswordGeneratorConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | passwordGenerators = new ConcurrentHashMap<DN,PasswordGenerator>(); |
| | | passwordGenerators = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!generator.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | |
| | | // Get the default storage schemes. They must all reference valid storage |
| | | // schemes that support the syntax for the specified password attribute. |
| | | List<PasswordStorageScheme<?>> defaultStorageSchemes = |
| | | new LinkedList<PasswordStorageScheme<?>>(); |
| | | List<PasswordStorageScheme<?>> defaultStorageSchemes = new LinkedList<>(); |
| | | for (DN schemeDN : configuration.getDefaultPasswordStorageSchemeDNs()) |
| | | { |
| | | PasswordStorageScheme<?> scheme = DirectoryServer |
| | |
| | | } |
| | | |
| | | // Get the names of the deprecated storage schemes. |
| | | Set<String> deprecatedStorageSchemes = new LinkedHashSet<String>(); |
| | | Set<String> deprecatedStorageSchemes = new LinkedHashSet<>(); |
| | | for (DN schemeDN : configuration.getDeprecatedPasswordStorageSchemeDNs()) |
| | | { |
| | | PasswordStorageScheme<?> scheme = DirectoryServer |
| | |
| | | } |
| | | |
| | | // Get the password validators. |
| | | Map<DN, PasswordValidator<?>> passwordValidators = |
| | | new HashMap<DN, PasswordValidator<?>>(); |
| | | Map<DN, PasswordValidator<?>> passwordValidators = new HashMap<>(); |
| | | for (DN validatorDN : configuration.getPasswordValidatorDNs()) |
| | | { |
| | | passwordValidators.put(validatorDN, |
| | |
| | | } |
| | | |
| | | // Get the status notification handlers. |
| | | Map<DN, AccountStatusNotificationHandler<?>> notificationHandlers = |
| | | new HashMap<DN, AccountStatusNotificationHandler<?>>(); |
| | | for (DN handlerDN : configuration |
| | | .getAccountStatusNotificationHandlerDNs()) |
| | | Map<DN, AccountStatusNotificationHandler<?>> notificationHandlers = new HashMap<>(); |
| | | for (DN handlerDN : configuration.getAccountStatusNotificationHandlerDNs()) |
| | | { |
| | | AccountStatusNotificationHandler<?> handler = DirectoryServer |
| | | .getAccountStatusNotificationHandler(handlerDN); |
| | |
| | | private long warnedTime = Long.MIN_VALUE; |
| | | |
| | | /** The set of modifications that should be applied to the user's entry. */ |
| | | private LinkedList<Modification> modifications = new LinkedList<Modification>(); |
| | | private LinkedList<Modification> modifications = new LinkedList<>(); |
| | | |
| | | |
| | | |
| | |
| | | private List<Long> getGeneralizedTimes(AttributeType attributeType) |
| | | throws DirectoryException |
| | | { |
| | | ArrayList<Long> timeValues = new ArrayList<Long>(); |
| | | ArrayList<Long> timeValues = new ArrayList<>(); |
| | | |
| | | List<Attribute> attrList = userEntry.getAttribute(attributeType); |
| | | if (attrList != null) |
| | |
| | | final Attribute attr = getFirstAttributeNotEmpty(passwordPolicy.getPasswordAttribute()); |
| | | if (attr != null) |
| | | { |
| | | Set<ByteString> values = new LinkedHashSet<ByteString>(attr.size()); |
| | | Set<ByteString> values = new LinkedHashSet<>(attr.size()); |
| | | for (ByteString value : attr) |
| | | { |
| | | values.add(value); |
| | |
| | | { |
| | | logger.traceException(e, "Error while processing auth failure times for user %s", userDNString); |
| | | |
| | | authFailureTimes = new ArrayList<Long>(); |
| | | authFailureTimes = new ArrayList<>(); |
| | | modifications.add(new Modification(ModificationType.REPLACE, Attributes.empty(type), true)); |
| | | return authFailureTimes; |
| | | } |
| | |
| | | |
| | | if (valuesToRemove == null) |
| | | { |
| | | valuesToRemove = new LinkedHashSet<ByteString>(); |
| | | valuesToRemove = new LinkedHashSet<>(); |
| | | } |
| | | |
| | | valuesToRemove.add(ByteString.valueOf(GeneralizedTimeSyntax.format(l))); |
| | |
| | | { |
| | | logger.traceException(e, "Error while processing grace login times for user %s", userDNString); |
| | | |
| | | graceLoginTimes = new ArrayList<Long>(); |
| | | graceLoginTimes = new ArrayList<>(); |
| | | |
| | | modifications.add(new Modification(ModificationType.REPLACE, Attributes.empty(type), true)); |
| | | } |
| | |
| | | */ |
| | | public List<ByteString> getClearPasswords() |
| | | { |
| | | LinkedList<ByteString> clearPasswords = new LinkedList<ByteString>(); |
| | | LinkedList<ByteString> clearPasswords = new LinkedList<>(); |
| | | |
| | | List<Attribute> attrList = userEntry.getAttribute(passwordPolicy.getPasswordAttribute()); |
| | | |
| | |
| | | throws DirectoryException |
| | | { |
| | | List<PasswordStorageScheme<?>> schemes = passwordPolicy.getDefaultPasswordStorageSchemes(); |
| | | List<ByteString> encodedPasswords = new ArrayList<ByteString>(schemes.size()); |
| | | List<ByteString> encodedPasswords = new ArrayList<>(schemes.size()); |
| | | |
| | | if (passwordPolicy.isAuthPasswordSyntax()) |
| | | { |
| | |
| | | } |
| | | |
| | | |
| | | HashSet<String> existingDefaultSchemes = new HashSet<String>(); |
| | | LinkedHashSet<ByteString> removedValues = new LinkedHashSet<ByteString>(); |
| | | LinkedHashSet<ByteString> updatedValues = new LinkedHashSet<ByteString>(); |
| | | HashSet<String> existingDefaultSchemes = new HashSet<>(); |
| | | LinkedHashSet<ByteString> removedValues = new LinkedHashSet<>(); |
| | | LinkedHashSet<ByteString> updatedValues = new LinkedHashSet<>(); |
| | | |
| | | boolean usesAuthPasswordSyntax = passwordPolicy.isAuthPasswordSyntax(); |
| | | |
| | |
| | | return; |
| | | } |
| | | |
| | | LinkedHashSet<ByteString> addedValues = new LinkedHashSet<ByteString>(); |
| | | LinkedHashSet<ByteString> addedValues = new LinkedHashSet<>(); |
| | | for (PasswordStorageScheme<?> s : passwordPolicy.getDefaultPasswordStorageSchemes()) |
| | | { |
| | | if (! existingDefaultSchemes.contains(toLowerCase(s.getStorageSchemeName()))) |
| | |
| | | */ |
| | | private TreeMap<Long,ByteString> getSortedHistoryValues(List<Attribute> removeAttrs) |
| | | { |
| | | TreeMap<Long, ByteString> historyMap = new TreeMap<Long, ByteString>(); |
| | | TreeMap<Long, ByteString> historyMap = new TreeMap<>(); |
| | | AttributeType historyType = DirectoryServer.getAttributeType(OP_ATTR_PWPOLICY_HISTORY_LC, true); |
| | | List<Attribute> attrList = userEntry.getAttribute(historyType); |
| | | if (attrList != null) |
| | |
| | | |
| | | |
| | | // Get a sorted list of the existing values to see if there are any that should be removed. |
| | | LinkedList<Attribute> removeAttrs = new LinkedList<Attribute>(); |
| | | LinkedList<Attribute> removeAttrs = new LinkedList<>(); |
| | | TreeMap<Long, ByteString> historyMap = getSortedHistoryValues(removeAttrs); |
| | | |
| | | |
| | |
| | | if (historyCount > 0 && historyMap.size() >= historyCount) |
| | | { |
| | | int numToDelete = historyMap.size() - historyCount + 1; |
| | | LinkedHashSet<ByteString> removeValues = new LinkedHashSet<ByteString>(numToDelete); |
| | | LinkedHashSet<ByteString> removeValues = new LinkedHashSet<>(numToDelete); |
| | | Iterator<ByteString> iterator = historyMap.values().iterator(); |
| | | while (iterator.hasNext() && numToDelete > 0) |
| | | { |
| | |
| | | { |
| | | long minAgeToKeep = currentTime - 1000L * historyDuration; |
| | | Iterator<Long> iterator = historyMap.keySet().iterator(); |
| | | LinkedHashSet<ByteString> removeValues = new LinkedHashSet<ByteString>(); |
| | | LinkedHashSet<ByteString> removeValues = new LinkedHashSet<>(); |
| | | while (iterator.hasNext()) |
| | | { |
| | | long timestamp = iterator.next(); |
| | |
| | | */ |
| | | public String[] getPasswordHistoryValues() |
| | | { |
| | | ArrayList<String> historyValues = new ArrayList<String>(); |
| | | ArrayList<String> historyValues = new ArrayList<>(); |
| | | AttributeType historyType = DirectoryServer.getAttributeType(OP_ATTR_PWPOLICY_HISTORY_LC, true); |
| | | List<Attribute> attrList = userEntry.getAttribute(historyType); |
| | | if (attrList != null) |
| | |
| | | } |
| | | |
| | | // Convert the set of modifications to a set of LDAP modifications. |
| | | ArrayList<RawModification> modList = new ArrayList<RawModification>(); |
| | | ArrayList<RawModification> modList = new ArrayList<>(); |
| | | for (Modification m : modifications) |
| | | { |
| | | modList.add(RawModification.create(m.getModificationType(), new LDAPAttribute(m.getAttribute()))); |
| | |
| | | public PasswordStorageSchemeConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | storageSchemes = new ConcurrentHashMap<DN,PasswordStorageScheme>(); |
| | | storageSchemes = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!passwordStorageScheme.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | public PasswordValidatorConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | passwordValidators = new ConcurrentHashMap<DN,PasswordValidator>(); |
| | | passwordValidators = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!validator.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2014 ForgeRock AS |
| | | * Portions Copyright 2014-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | } |
| | | } |
| | | |
| | | /** |
| | | * Cancellation callbacks which should be run when this persistent search is |
| | | * cancelled. |
| | | */ |
| | | private final List<CancellationCallback> cancellationCallbacks = |
| | | new CopyOnWriteArrayList<CancellationCallback>(); |
| | | /** Cancellation callbacks which should be run when this persistent search is cancelled. */ |
| | | private final List<CancellationCallback> cancellationCallbacks = new CopyOnWriteArrayList<>(); |
| | | |
| | | /** The set of change types to send to the client. */ |
| | | private final Set<PersistentSearchChangeType> changeTypes; |
| | | |
| | | /** |
| | | * Indicates whether or not this persistent search has already been aborted. |
| | | */ |
| | | /** Indicates whether or not this persistent search has already been aborted. */ |
| | | private boolean isCancelled; |
| | | |
| | | /** |
| | |
| | | subordinateModifyDNPlugins = new DirectoryServerPlugin[0]; |
| | | subordinateDeletePlugins = new DirectoryServerPlugin[0]; |
| | | intermediateResponsePlugins = new DirectoryServerPlugin[0]; |
| | | registeredPlugins = |
| | | new ConcurrentHashMap<DN, |
| | | DirectoryServerPlugin<? extends PluginCfg>>(); |
| | | skippedPreOperationPlugins = |
| | | new ConcurrentHashMap<PluginOperation, |
| | | ArrayList<DirectoryServerPlugin>>(); |
| | | registeredPlugins = new ConcurrentHashMap<>(); |
| | | skippedPreOperationPlugins = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | |
| | | } |
| | | |
| | | // Create a set of plugin types for the plugin. |
| | | HashSet<PluginType> initTypes = new HashSet<PluginType>(); |
| | | for (PluginCfgDefn.PluginType pluginType : |
| | | pluginConfiguration.getPluginType()) |
| | | HashSet<PluginType> initTypes = new HashSet<>(); |
| | | for (PluginCfgDefn.PluginType pluginType : pluginConfiguration.getPluginType()) |
| | | { |
| | | PluginType t = getPluginType(pluginType); |
| | | if ((pluginTypes == null) || pluginTypes.contains(t)) |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!plugin.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String buffer = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | { |
| | | // Parse the plugin order into initial and final plugin names. |
| | | boolean starFound = false; |
| | | LinkedHashSet<String> initialPluginNames = new LinkedHashSet<String>(); |
| | | LinkedHashSet<String> finalPluginNames = new LinkedHashSet<String>(); |
| | | LinkedHashSet<String> initialPluginNames = new LinkedHashSet<>(); |
| | | LinkedHashSet<String> finalPluginNames = new LinkedHashSet<>(); |
| | | |
| | | StringTokenizer tokenizer = new StringTokenizer(pluginOrder, ","); |
| | | while (tokenizer.hasMoreTokens()) |
| | |
| | | |
| | | |
| | | // Parse the array of already registered plugins to sort them accordingly. |
| | | HashMap<String,DirectoryServerPlugin> initialPlugins = |
| | | new HashMap<String,DirectoryServerPlugin>(initialPluginNames.size()); |
| | | HashMap<String,DirectoryServerPlugin> finalPlugins = |
| | | new HashMap<String,DirectoryServerPlugin>(finalPluginNames.size()); |
| | | ArrayList<DirectoryServerPlugin> otherPlugins = |
| | | new ArrayList<DirectoryServerPlugin>(); |
| | | HashMap<String,DirectoryServerPlugin> initialPlugins = new HashMap<>(initialPluginNames.size()); |
| | | HashMap<String,DirectoryServerPlugin> finalPlugins = new HashMap<>(finalPluginNames.size()); |
| | | ArrayList<DirectoryServerPlugin> otherPlugins = new ArrayList<>(); |
| | | for (DirectoryServerPlugin p : pluginArray) |
| | | { |
| | | DN dn = p.getPluginEntryDN(); |
| | |
| | | |
| | | // Compile a list of all the plugins in the correct order, convert it to |
| | | // an array, and return it. |
| | | ArrayList<DirectoryServerPlugin> newList = |
| | | new ArrayList<DirectoryServerPlugin>(pluginArray.length+1); |
| | | ArrayList<DirectoryServerPlugin> newList = new ArrayList<>(pluginArray.length + 1); |
| | | for (String name : initialPluginNames) |
| | | { |
| | | DirectoryServerPlugin p = initialPlugins.get(name); |
| | |
| | | { |
| | | if (configuration.isEnabled()) |
| | | { |
| | | // Create a set of plugin types for the plugin. |
| | | HashSet<PluginType> pluginTypes = new HashSet<PluginType>(); |
| | | for (PluginCfgDefn.PluginType pluginType : |
| | | configuration.getPluginType()) |
| | | { |
| | | pluginTypes.add(getPluginType(pluginType)); |
| | | } |
| | | HashSet<PluginType> pluginTypes = getPluginTypes(configuration); |
| | | |
| | | // Get the name of the class and make sure we can instantiate it as a |
| | | // plugin. |
| | | // Get the name of the class and make sure we can instantiate it as a plugin. |
| | | String className = configuration.getJavaClass(); |
| | | try |
| | | { |
| | |
| | | return ccr; |
| | | } |
| | | |
| | | // Create a set of plugin types for the plugin. |
| | | HashSet<PluginType> pluginTypes = new HashSet<PluginType>(); |
| | | for (PluginCfgDefn.PluginType pluginType : |
| | | configuration.getPluginType()) |
| | | { |
| | | pluginTypes.add(getPluginType(pluginType)); |
| | | } |
| | | HashSet<PluginType> pluginTypes = getPluginTypes(configuration); |
| | | |
| | | // Get the name of the class and make sure we can instantiate it as a |
| | | // plugin. |
| | | // Get the name of the class and make sure we can instantiate it as a plugin. |
| | | DirectoryServerPlugin<? extends PluginCfg> plugin = null; |
| | | String className = configuration.getJavaClass(); |
| | | try |
| | |
| | | { |
| | | if (configuration.isEnabled()) |
| | | { |
| | | // Create a set of plugin types for the plugin. |
| | | HashSet<PluginType> pluginTypes = new HashSet<PluginType>(); |
| | | for (PluginCfgDefn.PluginType pluginType : |
| | | configuration.getPluginType()) |
| | | { |
| | | pluginTypes.add(getPluginType(pluginType)); |
| | | } |
| | | HashSet<PluginType> pluginTypes = getPluginTypes(configuration); |
| | | |
| | | // Get the name of the class and make sure we can instantiate it as a |
| | | // plugin. |
| | | // Get the name of the class and make sure we can instantiate it as a plugin. |
| | | String className = configuration.getJavaClass(); |
| | | try |
| | | { |
| | |
| | | } |
| | | |
| | | // Create a set of plugin types for the plugin. |
| | | HashSet<PluginType> pluginTypes = new HashSet<PluginType>(); |
| | | for (PluginCfgDefn.PluginType pluginType : |
| | | configuration.getPluginType()) |
| | | { |
| | | pluginTypes.add(getPluginType(pluginType)); |
| | | } |
| | | HashSet<PluginType> pluginTypes = getPluginTypes(configuration); |
| | | |
| | | DirectoryServerPlugin<? extends PluginCfg> plugin = null; |
| | | try |
| | |
| | | return ccr; |
| | | } |
| | | |
| | | private HashSet<PluginType> getPluginTypes(PluginCfg configuration) |
| | | { |
| | | HashSet<PluginType> pluginTypes = new HashSet<>(); |
| | | for (PluginCfgDefn.PluginType pluginType : configuration.getPluginType()) |
| | | { |
| | | pluginTypes.add(getPluginType(pluginType)); |
| | | } |
| | | return pluginTypes; |
| | | } |
| | | |
| | | private void registerSkippedPreOperationPlugins(int i, |
| | | DirectoryServerPlugin[] plugins, |
| | | PluginOperation operation) |
| | | { |
| | | ArrayList<DirectoryServerPlugin> skippedPlugins = |
| | | new ArrayList<DirectoryServerPlugin>(plugins.length - i); |
| | | ArrayList<DirectoryServerPlugin> skippedPlugins = new ArrayList<>(plugins.length - i); |
| | | for(int j = i; j < plugins.length; j++) |
| | | { |
| | | skippedPlugins.add(plugins[j]); |
| | |
| | | skippedPreOperationPlugins.get(operation); |
| | | if(existingList == null) |
| | | { |
| | | existingList = new ArrayList<DirectoryServerPlugin>(); |
| | | existingList = new ArrayList<>(); |
| | | } |
| | | existingList.add(plugin); |
| | | skippedPreOperationPlugins.put(operation, existingList); |
| | |
| | | public RootDNConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | alternateBindDNs = new ConcurrentHashMap<DN, HashSet<DN>>(); |
| | | alternateBindDNs = new ConcurrentHashMap<>(); |
| | | rootPrivilegeChangeListener = new RootPrivilegeChangeListener(); |
| | | } |
| | | |
| | |
| | | rootUserCfg.addChangeListener(this); |
| | | DirectoryServer.registerRootDN(rootUserCfg.dn()); |
| | | |
| | | HashSet<DN> altBindDNs = new HashSet<DN>(); |
| | | HashSet<DN> altBindDNs = new HashSet<>(); |
| | | for (DN alternateBindDN : rootUserCfg.getAlternateBindDN()) |
| | | { |
| | | try |
| | |
| | | |
| | | final ConfigChangeResult ccr = new ConfigChangeResult(); |
| | | |
| | | HashSet<DN> altBindDNs = new HashSet<DN>(); |
| | | HashSet<DN> altBindDNs = new HashSet<>(); |
| | | for (DN altBindDN : configuration.getAlternateBindDN()) |
| | | { |
| | | try |
| | |
| | | { |
| | | final ConfigChangeResult ccr = new ConfigChangeResult(); |
| | | |
| | | HashSet<DN> setDNs = new HashSet<DN>(); |
| | | HashSet<DN> addDNs = new HashSet<DN>(); |
| | | HashSet<DN> delDNs = |
| | | new HashSet<DN>(alternateBindDNs.get(configuration.dn())); |
| | | HashSet<DN> setDNs = new HashSet<>(); |
| | | HashSet<DN> addDNs = new HashSet<>(); |
| | | HashSet<DN> delDNs = new HashSet<>(alternateBindDNs.get(configuration.dn())); |
| | | |
| | | for (DN altBindDN : configuration.getAlternateBindDN()) |
| | | { |
| | |
| | | DirectoryServer.deregisterAlternateRootBindDN(dn); |
| | | } |
| | | |
| | | HashSet<DN> addedDNs = new HashSet<DN>(addDNs.size()); |
| | | HashSet<DN> addedDNs = new HashSet<>(addDNs.size()); |
| | | for (DN dn : addDNs) |
| | | { |
| | | try |
| | |
| | | Set<RootDNCfgDefn.DefaultRootPrivilegeName> configPrivSet = |
| | | configuration.getDefaultRootPrivilegeName(); |
| | | |
| | | HashSet<Privilege> privSet = new HashSet<Privilege>(configPrivSet.size()); |
| | | HashSet<Privilege> privSet = new HashSet<>(configPrivSet.size()); |
| | | for (RootDNCfgDefn.DefaultRootPrivilegeName p : configPrivSet) |
| | | { |
| | | privSet.add(Privilege.privilegeForName(p.toString())); |
| | |
| | | defaultRootPrivileges = privSet; |
| | | } |
| | | } |
| | | |
| | |
| | | public SASLConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | handlers = new ConcurrentHashMap<DN,SASLMechanismHandler>(); |
| | | handlers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | |
| | | |
| | | /** |
| | | * Initializes all SASL mechanism hanlders currently defined in the Directory |
| | | * Initializes all SASL mechanism handlers currently defined in the Directory |
| | | * Server configuration. This should only be called at Directory Server |
| | | * startup. |
| | | * |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!handler.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | File[] schemaInstanceDirFiles = |
| | | schemaInstanceDir.listFiles(filter); |
| | | int fileNumber = schemaInstanceDirFiles.length ; |
| | | ArrayList<String> fileList = new ArrayList<String>(fileNumber); |
| | | ArrayList<String> fileList = new ArrayList<>(fileNumber); |
| | | |
| | | for (File f : schemaInstanceDirFiles) |
| | | { |
| | |
| | | { |
| | | // The file was empty -- skip it. |
| | | reader.close(); |
| | | return new LinkedList<Modification>(); |
| | | return new LinkedList<>(); |
| | | } |
| | | } |
| | | catch (Exception e) |
| | |
| | | } |
| | | |
| | | // Get the attributeTypes attribute from the entry. |
| | | List<Modification> mods = new LinkedList<Modification>(); |
| | | List<Modification> mods = new LinkedList<>(); |
| | | |
| | | //parse the syntaxes first because attributes rely on these. |
| | | List<Attribute> ldapSyntaxList = |
| | | getLdapSyntaxesAttributes(schema, entry, mods); |
| | | List<Attribute> ldapSyntaxList = getLdapSyntaxesAttributes(schema, entry, mods); |
| | | List<Attribute> attrList = getAttributeTypeAttributes(schema, entry, mods); |
| | | List<Attribute> ocList = getObjectClassesAttributes(schema, entry, mods); |
| | | List<Attribute> nfList = getNameFormsAttributes(schema, entry, mods); |
| | | List<Attribute> dcrList = getDITContentRulesAttributes(schema, entry, mods); |
| | | List<Attribute> dsrList = |
| | | getDITStructureRulesAttributes(schema, entry, mods); |
| | | List<Attribute> mruList = |
| | | getMatchingRuleUsesAttributes(schema, entry, mods); |
| | | List<Attribute> dsrList = getDITStructureRulesAttributes(schema, entry, mods); |
| | | List<Attribute> mruList = getMatchingRuleUsesAttributes(schema, entry, mods); |
| | | |
| | | // Loop on all the attribute of the schema entry to |
| | | // find the extra attribute that should be loaded in the Schema. |
| | |
| | | provider.initialize(config, schemaBuilder, schemaUpdater); |
| | | } |
| | | else { |
| | | final List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | final List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | final boolean isAcceptable = provider.isConfigurationAcceptable(config, unacceptableReasons); |
| | | if (!isAcceptable) |
| | | { |
| | |
| | | try |
| | | { |
| | | final File[] schemaFiles = schemaDirectory.listFiles(new SchemaFileFilter()); |
| | | final List<String> schemaFileNames = new ArrayList<String>(schemaFiles.length); |
| | | final List<String> schemaFileNames = new ArrayList<>(schemaFiles.length); |
| | | |
| | | for (final File f : schemaFiles) |
| | | { |
| | |
| | | private Set<String> attributes; |
| | | |
| | | /** The set of response controls for this search operation. */ |
| | | private final List<Control> responseControls = new ArrayList<Control>(); |
| | | private final List<Control> responseControls = new ArrayList<>(); |
| | | |
| | | /** The time that the search time limit has expired. */ |
| | | private long timeLimitExpiration; |
| | |
| | | { |
| | | if (controls == null) |
| | | { |
| | | controls = new ArrayList<Control>(1); |
| | | controls = new ArrayList<>(1); |
| | | } |
| | | |
| | | try |
| | |
| | | { |
| | | AttributeType t = e.getKey(); |
| | | List<Attribute> oldAttributes = e.getValue(); |
| | | List<Attribute> newAttributes = |
| | | new ArrayList<Attribute>(oldAttributes.size()); |
| | | List<Attribute> newAttributes = new ArrayList<>(oldAttributes.size()); |
| | | |
| | | for (Attribute a : oldAttributes) |
| | | { |
| | |
| | | { |
| | | AttributeType t = e.getKey(); |
| | | List<Attribute> oldAttributes = e.getValue(); |
| | | List<Attribute> newAttributes = |
| | | new ArrayList<Attribute>(oldAttributes.size()); |
| | | List<Attribute> newAttributes = new ArrayList<>(oldAttributes.size()); |
| | | |
| | | for (Attribute a : oldAttributes) |
| | | { |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2009 Sun Microsystems, Inc. |
| | | * Portions copyright 2012-2014 ForgeRock AS. |
| | | * Portions copyright 2012-2015 ForgeRock AS. |
| | | */ |
| | | package org.opends.server.core; |
| | | |
| | |
| | | // threads that are currently active. This can be an inexact science, so |
| | | // we'll make sure to allocate enough room for double the threads that we |
| | | // think are currently running. |
| | | threadList = new LinkedList<Thread>(); |
| | | threadList = new LinkedList<>(); |
| | | ThreadGroup threadGroup = DirectoryThread.DIRECTORY_THREAD_GROUP; |
| | | Thread[] threadArray = new Thread[threadGroup.activeCount() * 2]; |
| | | int numThreads = threadGroup.enumerate(threadArray, true); |
| | |
| | | /** Indicates if the password attribute uses auth password syntax. */ |
| | | private final Boolean pAuthPasswordSyntax; |
| | | /** The set of password validators if any. */ |
| | | private final Set<DN> pValidatorNames = new HashSet<DN>(); |
| | | private final Set<DN> pValidatorNames = new HashSet<>(); |
| | | /** Used when logging errors due to invalid validator reference. */ |
| | | private AtomicBoolean isAlreadyLogged = new AtomicBoolean(); |
| | | |
| | |
| | | { |
| | | if (!pValidatorNames.isEmpty()) |
| | | { |
| | | Collection<PasswordValidator<?>> values = |
| | | new HashSet<PasswordValidator<?>>(); |
| | | Collection<PasswordValidator<?>> values = new HashSet<>(); |
| | | for (DN validatorDN : pValidatorNames){ |
| | | PasswordValidator<?> validator = DirectoryServer |
| | | .getPasswordValidator(validatorDN); |
| | | PasswordValidator<?> validator = DirectoryServer.getPasswordValidator(validatorDN); |
| | | if (validator == null) { |
| | | PasswordValidator<?> errorValidator = new RejectPasswordValidator( |
| | | validatorDN.toString(), passwordPolicySubentryDN.toString()); |
| | |
| | | public SynchronizationProviderConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | registeredProviders = new ConcurrentHashMap<DN,SynchronizationProvider<SynchronizationProviderCfg>>(); |
| | | registeredProviders = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | |
| | | // Register as an add and delete listener so that we can |
| | | // be notified when new synchronization providers are added or existing |
| | | // sycnhronization providers are removed. |
| | | // synchronization providers are removed. |
| | | root.addSynchronizationProviderAddListener(this); |
| | | root.addSynchronizationProviderDeleteListener(this); |
| | | |
| | |
| | | public TrustManagerProviderConfigManager(ServerContext serverContext) |
| | | { |
| | | this.serverContext = serverContext; |
| | | providers = new ConcurrentHashMap<DN,TrustManagerProvider>(); |
| | | providers = new ConcurrentHashMap<>(); |
| | | } |
| | | |
| | | /** |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!provider.isConfigurationAcceptable(configuration, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | * A mapping between the DNs of the config entries and the associated virtual |
| | | * attribute rules. |
| | | */ |
| | | private final ConcurrentMap<DN, VirtualAttributeRule> rules = |
| | | new ConcurrentHashMap<DN, VirtualAttributeRule>(); |
| | | private final ConcurrentMap<DN, VirtualAttributeRule> rules = new ConcurrentHashMap<>(); |
| | | |
| | | private final ServerContext serverContext; |
| | | |
| | |
| | | loadProvider(className, cfg, true); |
| | | |
| | | Map<LocalizableMessage, DirectoryException> reasons = |
| | | new LinkedHashMap<LocalizableMessage, DirectoryException>(); |
| | | new LinkedHashMap<>(); |
| | | Set<SearchFilter> filters = buildFilters(cfg, reasons); |
| | | if (!reasons.isEmpty()) |
| | | { |
| | |
| | | private Set<SearchFilter> buildFilters(VirtualAttributeCfg cfg, |
| | | Map<LocalizableMessage, DirectoryException> unacceptableReasons) |
| | | { |
| | | Set<SearchFilter> filters = new LinkedHashSet<SearchFilter>(); |
| | | Set<SearchFilter> filters = new LinkedHashSet<>(); |
| | | for (String filterString : cfg.getFilter()) |
| | | { |
| | | try |
| | |
| | | |
| | | // Make sure that we can parse all of the search filters. |
| | | Map<LocalizableMessage, DirectoryException> reasons = |
| | | new LinkedHashMap<LocalizableMessage, DirectoryException>(); |
| | | new LinkedHashMap<>(); |
| | | Set<SearchFilter> filters = buildFilters(configuration, reasons); |
| | | if (!reasons.isEmpty()) |
| | | { |
| | |
| | | List<LocalizableMessage> unacceptableReasons) |
| | | { |
| | | Map<LocalizableMessage, DirectoryException> reasons = |
| | | new LinkedHashMap<LocalizableMessage, DirectoryException>(); |
| | | new LinkedHashMap<>(); |
| | | buildFilters(cfg, reasons); |
| | | if (!reasons.isEmpty()) |
| | | { |
| | |
| | | |
| | | // Make sure that we can parse all of the search filters. |
| | | Map<LocalizableMessage, DirectoryException> reasons = |
| | | new LinkedHashMap<LocalizableMessage, DirectoryException>(); |
| | | new LinkedHashMap<>(); |
| | | Set<SearchFilter> filters = buildFilters(configuration, reasons); |
| | | if (!reasons.isEmpty()) |
| | | { |
| | |
| | | } |
| | | else |
| | | { |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> unacceptableReasons = new ArrayList<>(); |
| | | if (!provider.isConfigurationAcceptable(cfg, unacceptableReasons)) |
| | | { |
| | | String reasons = Utils.joinAsString(". ", unacceptableReasons); |
| | |
| | | @DataProvider |
| | | public Iterator<Object[]> createCSNPairsToCompare() |
| | | { |
| | | final List<Object> allCSNs = new ArrayList<Object>(); |
| | | final List<Object> allCSNs = new ArrayList<>(); |
| | | for (Object[] csnData : createCSNData()) |
| | | { |
| | | allCSNs.addAll(Arrays.asList(csnData)); |
| | | } |
| | | |
| | | final List<Object[]> results = new ArrayList<Object[]>(); |
| | | final List<Object[]> results = new ArrayList<>(); |
| | | for (Object csn1 : allCSNs) |
| | | { |
| | | for (Object csn2 : allCSNs) |
| | |
| | | // but use only 4 of them for update msg |
| | | // beforeCsn, middleCsn and afterCsn are not used |
| | | // in order to test cursor generation from a key not present in the log (before, in the middle, after) |
| | | final List<CSN> usedCsns = new ArrayList<CSN>(Arrays.asList(sevenCsns)); |
| | | final List<CSN> usedCsns = new ArrayList<>(Arrays.asList(sevenCsns)); |
| | | usedCsns.remove(beforeCsn); |
| | | usedCsns.remove(middleCsn); |
| | | usedCsns.remove(afterCsn); |
| | |
| | | * |
| | | * CDDL HEADER END |
| | | * |
| | | * Copyright 2014 ForgeRock AS |
| | | * Copyright 2014-2015 ForgeRock AS |
| | | */ |
| | | package org.opends.server.replication.server.changelog.je; |
| | | |
| | |
| | | final ReplicationDbEnv changelogStateDB = new TestableReplicationDbEnv(); |
| | | |
| | | // encode data |
| | | final Map<byte[], byte[]> wholeState = new LinkedHashMap<byte[], byte[]>(); |
| | | final Map<byte[], byte[]> wholeState = new LinkedHashMap<>(); |
| | | put(wholeState, toGenIdEntry(baseDN, generationId)); |
| | | for (Integer serverId : replicas) |
| | | { |
| | |
| | | throws IOException, InitializationException, MakeLDIFException |
| | | { |
| | | TemplateFile template = new TemplateFile(resourcePath, new Random(1)); |
| | | ArrayList<LocalizableMessage> warnings = new ArrayList<LocalizableMessage>(); |
| | | ArrayList<LocalizableMessage> warnings = new ArrayList<>(); |
| | | template.parse(templatePath, warnings); |
| | | makeLdif(ldifPath, template); |
| | | } |
| | |
| | | throws IOException, InitializationException, MakeLDIFException |
| | | { |
| | | TemplateFile template = new TemplateFile(resourcePath, new Random(1)); |
| | | ArrayList<LocalizableMessage> warnings = new ArrayList<LocalizableMessage>(); |
| | | ArrayList<LocalizableMessage> warnings = new ArrayList<>(); |
| | | template.parse(templateLines, warnings); |
| | | makeLdif(ldifPath, template); |
| | | } |
| | |
| | | |
| | | private String[] backupTask(String... additionalLdif) |
| | | { |
| | | final ArrayList<String> l = new ArrayList<String>(Arrays.asList( |
| | | final ArrayList<String> l = new ArrayList<>(Arrays.asList( |
| | | "dn: ds-task-id=" + UUID.randomUUID() + ",cn=Scheduled Tasks,cn=Tasks", |
| | | "objectclass: top", |
| | | "objectclass: ds-task", |
| | |
| | | |
| | | private String[] restoreTask(String... additionalLdif) |
| | | { |
| | | final ArrayList<String> l = new ArrayList<String>(Arrays.asList( |
| | | final ArrayList<String> l = new ArrayList<>(Arrays.asList( |
| | | "dn: ds-task-id=" + UUID.randomUUID() + ",cn=Scheduled Tasks,cn=Tasks", |
| | | "objectclass: top", |
| | | "objectclass: ds-task", |
| | |
| | | // Previous behaviour showed "missingVar" on line 5. |
| | | |
| | | TemplateFile templateFile = new TemplateFile(resourcePath); |
| | | List<LocalizableMessage> warns = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> warns = new ArrayList<>(); |
| | | |
| | | try |
| | | { |
| | |
| | | throws Exception |
| | | { |
| | | TemplateFile templateFile = new TemplateFile(resourcePath); |
| | | List<LocalizableMessage> warns = new ArrayList<LocalizableMessage>(); |
| | | List<LocalizableMessage> warns = new ArrayList<>(); |
| | | templateFile.parse(lines, warns); |
| | | assertTrue(warns.isEmpty(),"Warnings in parsing test template " + testName ); |
| | | } |
| | |
| | | * |
| | | * |
| | | * Copyright 2006-2010 Sun Microsystems, Inc. |
| | | * Portions Copyright 2011-2014 ForgeRock AS. |
| | | * Portions Copyright 2011-2015 ForgeRock AS. |
| | | */ |
| | | package org.opends.server.workflowelement.localbackend; |
| | | |
| | |
| | | private void modifyAttribute(String baseDN, ModificationType modType, String attributeName, String attributeValue) |
| | | throws Exception |
| | | { |
| | | ArrayList<Modification> mods = new ArrayList<Modification>(); |
| | | ArrayList<Modification> mods = new ArrayList<>(); |
| | | Attribute attributeToModify = Attributes.create(attributeName, attributeValue); |
| | | mods.add(new Modification(modType, attributeToModify)); |
| | | ModifyOperation modifyOperation = getRootConnection().processModify(DN.valueOf(baseDN), mods); |
| | | assertEquals(modifyOperation.getResultCode(), ResultCode.SUCCESS); |
| | | } |
| | | } |
| | | } |