146ddb9c5b77d4c69d9e7679e29f7104611fef0a
[honeycomb.git] / infra / translate-impl / src / main / java / io / fd / honeycomb / translate / impl / write / registry / FlatWriterRegistry.java
1 /*
2  * Copyright (c) 2016 Cisco and/or its affiliates.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 package io.fd.honeycomb.translate.impl.write.registry;
18
19 import static com.google.common.base.Preconditions.checkArgument;
20 import static com.google.common.base.Preconditions.checkNotNull;
21
22 import com.google.common.base.Optional;
23 import com.google.common.collect.ImmutableMap;
24 import com.google.common.collect.ImmutableSet;
25 import com.google.common.collect.Lists;
26 import com.google.common.collect.Multimap;
27 import com.google.common.collect.Sets;
28 import io.fd.honeycomb.translate.TranslationException;
29 import io.fd.honeycomb.translate.util.RWUtils;
30 import io.fd.honeycomb.translate.write.DataObjectUpdate;
31 import io.fd.honeycomb.translate.write.WriteContext;
32 import io.fd.honeycomb.translate.write.Writer;
33 import io.fd.honeycomb.translate.write.registry.UpdateFailedException;
34 import io.fd.honeycomb.translate.write.registry.WriterRegistry;
35 import java.util.Collection;
36 import java.util.Collections;
37 import java.util.LinkedList;
38 import java.util.List;
39 import java.util.Map;
40 import java.util.Set;
41 import javax.annotation.Nonnull;
42 import javax.annotation.Nullable;
43 import javax.annotation.concurrent.ThreadSafe;
44 import org.opendaylight.yangtools.yang.binding.DataObject;
45 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
46 import org.slf4j.Logger;
47 import org.slf4j.LoggerFactory;
48
49 /**
50  * Flat writer registry, delegating updates to writers in the order writers were submitted.
51  */
52 @ThreadSafe
53 final class FlatWriterRegistry implements WriterRegistry {
54
55     private static final Logger LOG = LoggerFactory.getLogger(FlatWriterRegistry.class);
56
57     // All types handled by writers directly or as children
58     private final ImmutableSet<InstanceIdentifier<?>> handledTypes;
59
60     private final Set<InstanceIdentifier<?>> writersOrderReversed;
61     private final Set<InstanceIdentifier<?>> writersOrder;
62     private final Map<InstanceIdentifier<?>, Writer<?>> writers;
63
64     /**
65      * Create flat registry instance.
66      *
67      * @param writers immutable, ordered map of writers to use to process updates. Order of the writers has to be one in
68      *                which create and update operations should be handled. Deletes will be handled in reversed order.
69      *                All deletes are handled before handling all the updates.
70      */
71     FlatWriterRegistry(@Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writers) {
72         this.writers = writers;
73         this.writersOrderReversed = Sets.newLinkedHashSet(Lists.reverse(Lists.newArrayList(writers.keySet())));
74         this.writersOrder = writers.keySet();
75         this.handledTypes = getAllHandledTypes(writers);
76     }
77
78     private static ImmutableSet<InstanceIdentifier<?>> getAllHandledTypes(
79             @Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writers) {
80         final ImmutableSet.Builder<InstanceIdentifier<?>> handledTypesBuilder = ImmutableSet.builder();
81         for (Map.Entry<InstanceIdentifier<?>, Writer<?>> writerEntry : writers.entrySet()) {
82             final InstanceIdentifier<?> writerType = writerEntry.getKey();
83             final Writer<?> writer = writerEntry.getValue();
84             handledTypesBuilder.add(writerType);
85             if (writer instanceof SubtreeWriter) {
86                 handledTypesBuilder.addAll(((SubtreeWriter<?>) writer).getHandledChildTypes());
87             }
88         }
89         return handledTypesBuilder.build();
90     }
91
92     @Override
93     public void processModifications(@Nonnull final DataObjectUpdates updates,
94                                      @Nonnull final WriteContext ctx) throws TranslationException {
95         if (updates.isEmpty()) {
96             return;
97         }
98
99         // ordered set of already processed nodes
100         final List<DataObjectUpdate> alreadyProcessed = new LinkedList<>();
101
102         // Optimization for single type updates, less consuming for pairing update with responsible writer,etc
103         if (updates.containsOnlySingleType()) {
104             // First process delete
105             singleUpdate(updates.getDeletes(), alreadyProcessed, ctx);
106
107             // Next is update
108             singleUpdate(updates.getUpdates(), alreadyProcessed, ctx);
109         } else {
110             // First process deletes
111             bulkUpdate(updates.getDeletes(), alreadyProcessed, ctx, writersOrderReversed);
112
113             // Next are updates
114             bulkUpdate(updates.getUpdates(), alreadyProcessed, ctx, writersOrder);
115         }
116
117         LOG.debug("Update successful for types: {}", updates.getTypeIntersection());
118         LOG.trace("Update successful for: {}", updates);
119     }
120
121     @Override
122     public boolean writerSupportsUpdate(@Nonnull final InstanceIdentifier<?> type) {
123         Writer writer = getWriter(type);
124
125         if (writer == null) {
126             writer = getSubtreeWriterResponsible(type);
127         }
128
129         return checkNotNull(writer, "Unable to find writer for %s", type).supportsDirectUpdate();
130     }
131
132     private void singleUpdate(
133             @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
134             @Nonnull final List<DataObjectUpdate> alreadyProcessed,
135             @Nonnull final WriteContext ctx) throws UpdateFailedException {
136         if (updates.isEmpty()) {
137             return;
138         }
139
140         DataObjectUpdate current = null;
141         final InstanceIdentifier<?> singleType = updates.keySet().iterator().next();
142         LOG.debug("Performing single type update for: {}", singleType);
143         Collection<? extends DataObjectUpdate> singleTypeUpdates = updates.get(singleType);
144         Writer<?> writer = getWriter(singleType);
145
146         if (writer == null) {
147             // This node must be handled by a subtree writer, find it and call it or else fail
148             checkArgument(handledTypes.contains(singleType), "Unable to process update. Missing writers for: %s",
149                     singleType);
150             writer = getSubtreeWriterResponsible(singleType);
151             singleTypeUpdates = getParentDataObjectUpdate(ctx, updates, writer);
152         }
153
154         try {
155             LOG.trace("Performing single type update with writer: {}", writer);
156
157             for (DataObjectUpdate singleUpdate : singleTypeUpdates) {
158                 current = singleUpdate;
159                 writer.processModification(singleUpdate.getId(), singleUpdate.getDataBefore(),
160                         singleUpdate.getDataAfter(),
161                         ctx);
162                 alreadyProcessed.add(singleUpdate);
163             }
164         } catch (Exception e) {
165             throw new UpdateFailedException(e, alreadyProcessed, current);
166         }
167     }
168
169     @Nullable
170     private Writer<?> getSubtreeWriterResponsible(final InstanceIdentifier<?> singleType) {
171         return writers.values().stream()
172                 .filter(w -> w instanceof SubtreeWriter)
173                 .filter(w -> ((SubtreeWriter<?>) w).getHandledChildTypes().contains(singleType))
174                 .findFirst()
175                 .orElse(null);
176     }
177
178     private Collection<DataObjectUpdate> getParentDataObjectUpdate(final WriteContext ctx,
179                                                                    final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
180                                                                    final Writer<?> writer) {
181         // Now read data for subtree reader root, but first keyed ID is needed and that ID can be cut from updates
182         InstanceIdentifier<?> firstAffectedChildId = ((SubtreeWriter<?>) writer).getHandledChildTypes().stream()
183                 .filter(updates::containsKey)
184                 .map(unkeyedId -> updates.get(unkeyedId))
185                 .flatMap(doUpdates -> doUpdates.stream())
186                 .map(DataObjectUpdate::getId)
187                 .findFirst()
188                 .get();
189
190         final InstanceIdentifier<?> parentKeyedId =
191                 RWUtils.cutId(firstAffectedChildId, writer.getManagedDataObjectType());
192
193         final Optional<? extends DataObject> parentBefore = ctx.readBefore(parentKeyedId);
194         final Optional<? extends DataObject> parentAfter = ctx.readAfter(parentKeyedId);
195         return Collections.singleton(
196                 DataObjectUpdate.create(parentKeyedId, parentBefore.orNull(), parentAfter.orNull()));
197     }
198
199     private void bulkUpdate(
200             @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
201             @Nonnull final List<DataObjectUpdate> alreadyProcessed,
202             @Nonnull final WriteContext ctx,
203             @Nonnull final Set<InstanceIdentifier<?>> writersOrder) throws UpdateFailedException {
204         if (updates.isEmpty()) {
205             return;
206         }
207
208         // Check that all updates can be handled
209         checkAllTypesCanBeHandled(updates);
210
211         LOG.debug("Performing bulk update for: {}", updates.keySet());
212         DataObjectUpdate current = null;
213
214         // Iterate over all writers and call update if there are any related updates
215         for (InstanceIdentifier<?> writerType : writersOrder) {
216             Collection<? extends DataObjectUpdate> writersData = updates.get(writerType);
217             final Writer<?> writer = getWriter(writerType);
218
219             if (writersData.isEmpty()) {
220                 // If there are no data for current writer, but it is a SubtreeWriter and there are updates to
221                 // its children, still invoke it with its root data
222                 if (writer instanceof SubtreeWriter<?> && isAffected(((SubtreeWriter<?>) writer), updates)) {
223                     // Provide parent data for SubtreeWriter for further processing
224                     writersData = getParentDataObjectUpdate(ctx, updates, writer);
225                 } else {
226                     // Skipping unaffected writer
227                     // Alternative to this would be modification sort according to the order of writers
228                     continue;
229                 }
230             }
231
232             LOG.debug("Performing update for: {}", writerType);
233             LOG.trace("Performing update with writer: {}", writer);
234
235             for (DataObjectUpdate singleUpdate : writersData) {
236                 current = singleUpdate;
237                 try {
238                     writer.processModification(singleUpdate.getId(), singleUpdate.getDataBefore(),
239                             singleUpdate.getDataAfter(), ctx);
240                 } catch (Exception e) {
241                     throw new UpdateFailedException(e, alreadyProcessed, current);
242                 }
243                 alreadyProcessed.add(singleUpdate);
244                 LOG.trace("Update successful for type: {}", writerType);
245                 LOG.debug("Update successful for: {}", singleUpdate);
246             }
247         }
248     }
249
250     private void checkAllTypesCanBeHandled(
251             @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates) {
252         if (!handledTypes.containsAll(updates.keySet())) {
253             final Sets.SetView<InstanceIdentifier<?>> missingWriters = Sets.difference(updates.keySet(), handledTypes);
254             LOG.warn("Unable to process update. Missing writers for: {}", missingWriters);
255             throw new IllegalArgumentException("Unable to process update. Missing writers for: " + missingWriters);
256         }
257     }
258
259     /**
260      * Check whether {@link SubtreeWriter} is affected by the updates.
261      *
262      * @return true if there are any updates to SubtreeWriter's child nodes (those marked by SubtreeWriter as being
263      * taken care of)
264      */
265     private static boolean isAffected(final SubtreeWriter<?> writer,
266                                       final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates) {
267         return !Sets.intersection(writer.getHandledChildTypes(), updates.keySet()).isEmpty();
268     }
269
270     @Nullable
271     private Writer<?> getWriter(@Nonnull final InstanceIdentifier<?> singleType) {
272         return writers.get(singleType);
273     }
274
275 }