2 * Copyright (c) 2016 Cisco and/or its affiliates.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at:
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 package io.fd.honeycomb.translate.impl.write.registry;
19 import static com.google.common.base.Preconditions.checkArgument;
20 import static com.google.common.base.Preconditions.checkNotNull;
22 import com.google.common.base.Optional;
23 import com.google.common.collect.ImmutableMap;
24 import com.google.common.collect.Lists;
25 import com.google.common.collect.Multimap;
26 import com.google.common.collect.Sets;
27 import io.fd.honeycomb.translate.TranslationException;
28 import io.fd.honeycomb.translate.util.RWUtils;
29 import io.fd.honeycomb.translate.write.DataObjectUpdate;
30 import io.fd.honeycomb.translate.write.WriteContext;
31 import io.fd.honeycomb.translate.write.Writer;
32 import io.fd.honeycomb.translate.write.registry.UpdateFailedException;
33 import io.fd.honeycomb.translate.write.registry.WriterRegistry;
34 import java.util.ArrayList;
35 import java.util.Collection;
36 import java.util.Collections;
37 import java.util.HashSet;
38 import java.util.LinkedList;
39 import java.util.List;
42 import java.util.stream.Collectors;
43 import javax.annotation.Nonnull;
44 import javax.annotation.Nullable;
45 import javax.annotation.concurrent.ThreadSafe;
46 import org.opendaylight.yangtools.yang.binding.DataObject;
47 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
48 import org.slf4j.Logger;
49 import org.slf4j.LoggerFactory;
52 * Flat writer registry, delegating updates to writers in the order writers were submitted.
55 final class FlatWriterRegistry implements WriterRegistry {
57 private static final Logger LOG = LoggerFactory.getLogger(FlatWriterRegistry.class);
59 private final Set<InstanceIdentifier<?>> writersOrderReversed;
60 private final Set<InstanceIdentifier<?>> writersOrder;
61 private final Map<InstanceIdentifier<?>, Writer<?>> writersById;
62 private final Set<? extends Writer<?>> writers;
65 * Create flat registry instance.
67 * @param writersById immutable, ordered map of writers to use to process updates. Order of the writers has to be one in
68 * which create and update operations should be handled. Deletes will be handled in reversed order.
69 * All deletes are handled before handling all the updates.
71 FlatWriterRegistry(@Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writersById) {
72 this.writersById = writersById;
73 this.writersOrderReversed = Sets.newLinkedHashSet(Lists.reverse(Lists.newArrayList(writersById.keySet())));
74 this.writersOrder = writersById.keySet();
75 this.writers = writersById.entrySet().stream().map(Map.Entry::getValue).collect(Collectors.toSet());
79 public void processModifications(@Nonnull final DataObjectUpdates updates,
80 @Nonnull final WriteContext ctx) throws TranslationException {
81 if (updates.isEmpty()) {
85 // ordered set of already processed nodes
86 final List<DataObjectUpdate> alreadyProcessed = new LinkedList<>();
88 // Optimization for single type updates, less consuming for pairing update with responsible writer,etc
89 if (updates.containsOnlySingleType()) {
90 // First process delete
91 singleUpdate(updates.getDeletes(), alreadyProcessed, ctx);
94 singleUpdate(updates.getUpdates(), alreadyProcessed, ctx);
96 // First process deletes
97 bulkUpdate(updates.getDeletes(), alreadyProcessed, ctx, writersOrderReversed);
100 bulkUpdate(updates.getUpdates(), alreadyProcessed, ctx, writersOrder);
103 LOG.debug("Update successful for types: {}", updates.getTypeIntersection());
104 LOG.trace("Update successful for: {}", updates);
108 public boolean writerSupportsUpdate(@Nonnull final InstanceIdentifier<?> type) {
109 Writer writer = getWriter(type);
111 if (writer == null) {
112 writer = getSubtreeWriterResponsible(type);
115 return checkNotNull(writer, "Unable to find writer for %s", type).supportsDirectUpdate();
118 private void singleUpdate(
119 @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
120 @Nonnull final List<DataObjectUpdate> alreadyProcessed,
121 @Nonnull final WriteContext ctx) throws UpdateFailedException {
122 if (updates.isEmpty()) {
126 DataObjectUpdate current = null;
127 final InstanceIdentifier<?> singleType = updates.keySet().iterator().next();
128 LOG.debug("Performing single type update for: {}", singleType);
129 Collection<? extends DataObjectUpdate> singleTypeUpdates = updates.get(singleType);
130 Writer<?> writer = getWriter(singleType);
132 if (writer == null) {
133 // This node must be handled by a subtree writer, find it and call it or else fail
134 writer = getSubtreeWriterResponsible(singleType);
135 checkArgument(writer != null, "Unable to process update. Missing writers for: %s",
137 singleTypeUpdates = getParentDataObjectUpdate(ctx, updates, writer);
141 LOG.trace("Performing single type update with writer: {}", writer);
143 for (DataObjectUpdate singleUpdate : singleTypeUpdates) {
144 current = singleUpdate;
145 writer.processModification(singleUpdate.getId(), singleUpdate.getDataBefore(),
146 singleUpdate.getDataAfter(),
148 alreadyProcessed.add(singleUpdate);
150 } catch (Exception e) {
151 throw new UpdateFailedException(e, alreadyProcessed, current);
156 private Writer<?> getSubtreeWriterResponsible(final InstanceIdentifier<?> singleType) {
157 return writersById.values().stream()
158 .filter(w -> w instanceof SubtreeWriter)
159 .filter(w -> w.canProcess(singleType))
164 private Collection<DataObjectUpdate> getParentDataObjectUpdate(final WriteContext ctx,
165 final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
166 final Writer<?> writer) {
167 // Now read data for subtree reader root, but first keyed ID is needed and that ID can be cut from updates
168 InstanceIdentifier<?> firstAffectedChildId = ((SubtreeWriter<?>) writer).getHandledChildTypes().stream()
169 .filter(updates::containsKey)
170 .map(unkeyedId -> updates.get(unkeyedId))
171 .flatMap(doUpdates -> doUpdates.stream())
172 .map(DataObjectUpdate::getId)
176 final InstanceIdentifier<?> parentKeyedId =
177 RWUtils.cutId(firstAffectedChildId, writer.getManagedDataObjectType());
179 final Optional<? extends DataObject> parentBefore = ctx.readBefore(parentKeyedId);
180 final Optional<? extends DataObject> parentAfter = ctx.readAfter(parentKeyedId);
181 return Collections.singleton(
182 DataObjectUpdate.create(parentKeyedId, parentBefore.orNull(), parentAfter.orNull()));
185 private void bulkUpdate(
186 @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
187 @Nonnull final List<DataObjectUpdate> alreadyProcessed,
188 @Nonnull final WriteContext ctx,
189 @Nonnull final Set<InstanceIdentifier<?>> writersOrder) throws UpdateFailedException {
190 if (updates.isEmpty()) {
194 // Check that all updates can be handled
195 checkAllTypesCanBeHandled(updates);
197 LOG.debug("Performing bulk update for: {}", updates.keySet());
198 DataObjectUpdate current = null;
200 // Iterate over all writers and call update if there are any related updates
201 for (InstanceIdentifier<?> writerType : writersOrder) {
202 Collection<? extends DataObjectUpdate> writersData = updates.get(writerType);
203 final Writer<?> writer = getWriter(writerType);
205 if (writersData.isEmpty()) {
206 // If there are no data for current writer, but it is a SubtreeWriter and there are updates to
207 // its children, still invoke it with its root data
208 if (writer instanceof SubtreeWriter<?> && isAffected(((SubtreeWriter<?>) writer), updates)) {
209 // Provide parent data for SubtreeWriter for further processing
210 writersData = getParentDataObjectUpdate(ctx, updates, writer);
212 // Skipping unaffected writer
213 // Alternative to this would be modification sort according to the order of writers
218 LOG.debug("Performing update for: {}", writerType);
219 LOG.trace("Performing update with writer: {}", writer);
221 for (DataObjectUpdate singleUpdate : writersData) {
222 current = singleUpdate;
224 writer.processModification(singleUpdate.getId(), singleUpdate.getDataBefore(),
225 singleUpdate.getDataAfter(), ctx);
226 } catch (Exception e) {
227 throw new UpdateFailedException(e, alreadyProcessed, current);
229 alreadyProcessed.add(singleUpdate);
230 LOG.trace("Update successful for type: {}", writerType);
231 LOG.debug("Update successful for: {}", singleUpdate);
236 private void checkAllTypesCanBeHandled(
237 @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates) {
239 List<InstanceIdentifier<?>> noWriterNodes = new ArrayList<>();
240 for (InstanceIdentifier<?> id : updates.keySet()) {
241 // either there is direct writer for the iid
242 if (writersById.containsKey(id)) {
246 if (writers.stream().anyMatch(o -> o.canProcess(id))) {
250 noWriterNodes.add(id);
253 if (!noWriterNodes.isEmpty()) {
254 throw new IllegalArgumentException("Unable to process update. Missing writers for: " + noWriterNodes);
259 * Check whether {@link SubtreeWriter} is affected by the updates.
261 * @return true if there are any updates to SubtreeWriter's child nodes (those marked by SubtreeWriter as being
264 private static boolean isAffected(final SubtreeWriter<?> writer,
265 final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates) {
266 return !Sets.intersection(writer.getHandledChildTypes(), updates.keySet()).isEmpty();
270 private Writer<?> getWriter(@Nonnull final InstanceIdentifier<?> singleType) {
271 return writersById.get(singleType);