HONEYCOMB-359 - Wildcarded writers
[honeycomb.git] / infra / translate-impl / src / main / java / io / fd / honeycomb / translate / impl / write / registry / FlatWriterRegistry.java
1 /*
2  * Copyright (c) 2016 Cisco and/or its affiliates.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at:
7  *
8  *     http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 package io.fd.honeycomb.translate.impl.write.registry;
18
19 import static com.google.common.base.Preconditions.checkArgument;
20 import static com.google.common.base.Preconditions.checkNotNull;
21
22 import com.google.common.base.Optional;
23 import com.google.common.collect.ImmutableMap;
24 import com.google.common.collect.Lists;
25 import com.google.common.collect.Multimap;
26 import com.google.common.collect.Sets;
27 import io.fd.honeycomb.translate.TranslationException;
28 import io.fd.honeycomb.translate.util.RWUtils;
29 import io.fd.honeycomb.translate.write.DataObjectUpdate;
30 import io.fd.honeycomb.translate.write.WriteContext;
31 import io.fd.honeycomb.translate.write.Writer;
32 import io.fd.honeycomb.translate.write.registry.UpdateFailedException;
33 import io.fd.honeycomb.translate.write.registry.WriterRegistry;
34 import java.util.ArrayList;
35 import java.util.Collection;
36 import java.util.Collections;
37 import java.util.HashSet;
38 import java.util.LinkedList;
39 import java.util.List;
40 import java.util.Map;
41 import java.util.Set;
42 import java.util.stream.Collectors;
43 import javax.annotation.Nonnull;
44 import javax.annotation.Nullable;
45 import javax.annotation.concurrent.ThreadSafe;
46 import org.opendaylight.yangtools.yang.binding.DataObject;
47 import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
48 import org.slf4j.Logger;
49 import org.slf4j.LoggerFactory;
50
51 /**
52  * Flat writer registry, delegating updates to writers in the order writers were submitted.
53  */
54 @ThreadSafe
55 final class FlatWriterRegistry implements WriterRegistry {
56
57     private static final Logger LOG = LoggerFactory.getLogger(FlatWriterRegistry.class);
58
59     private final Set<InstanceIdentifier<?>> writersOrderReversed;
60     private final Set<InstanceIdentifier<?>> writersOrder;
61     private final Map<InstanceIdentifier<?>, Writer<?>> writersById;
62     private final Set<? extends Writer<?>> writers;
63
64     /**
65      * Create flat registry instance.
66      *
67      * @param writersById immutable, ordered map of writers to use to process updates. Order of the writers has to be one in
68      *                which create and update operations should be handled. Deletes will be handled in reversed order.
69      *                All deletes are handled before handling all the updates.
70      */
71     FlatWriterRegistry(@Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writersById) {
72         this.writersById = writersById;
73         this.writersOrderReversed = Sets.newLinkedHashSet(Lists.reverse(Lists.newArrayList(writersById.keySet())));
74         this.writersOrder = writersById.keySet();
75         this.writers = writersById.entrySet().stream().map(Map.Entry::getValue).collect(Collectors.toSet());
76     }
77
78     @Override
79     public void processModifications(@Nonnull final DataObjectUpdates updates,
80                                      @Nonnull final WriteContext ctx) throws TranslationException {
81         if (updates.isEmpty()) {
82             return;
83         }
84
85         // ordered set of already processed nodes
86         final List<DataObjectUpdate> alreadyProcessed = new LinkedList<>();
87
88         // Optimization for single type updates, less consuming for pairing update with responsible writer,etc
89         if (updates.containsOnlySingleType()) {
90             // First process delete
91             singleUpdate(updates.getDeletes(), alreadyProcessed, ctx);
92
93             // Next is update
94             singleUpdate(updates.getUpdates(), alreadyProcessed, ctx);
95         } else {
96             // First process deletes
97             bulkUpdate(updates.getDeletes(), alreadyProcessed, ctx, writersOrderReversed);
98
99             // Next are updates
100             bulkUpdate(updates.getUpdates(), alreadyProcessed, ctx, writersOrder);
101         }
102
103         LOG.debug("Update successful for types: {}", updates.getTypeIntersection());
104         LOG.trace("Update successful for: {}", updates);
105     }
106
107     @Override
108     public boolean writerSupportsUpdate(@Nonnull final InstanceIdentifier<?> type) {
109         Writer writer = getWriter(type);
110
111         if (writer == null) {
112             writer = getSubtreeWriterResponsible(type);
113         }
114
115         return checkNotNull(writer, "Unable to find writer for %s", type).supportsDirectUpdate();
116     }
117
118     private void singleUpdate(
119             @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
120             @Nonnull final List<DataObjectUpdate> alreadyProcessed,
121             @Nonnull final WriteContext ctx) throws UpdateFailedException {
122         if (updates.isEmpty()) {
123             return;
124         }
125
126         DataObjectUpdate current = null;
127         final InstanceIdentifier<?> singleType = updates.keySet().iterator().next();
128         LOG.debug("Performing single type update for: {}", singleType);
129         Collection<? extends DataObjectUpdate> singleTypeUpdates = updates.get(singleType);
130         Writer<?> writer = getWriter(singleType);
131
132         if (writer == null) {
133             // This node must be handled by a subtree writer, find it and call it or else fail
134             writer = getSubtreeWriterResponsible(singleType);
135             checkArgument(writer != null, "Unable to process update. Missing writers for: %s",
136                     singleType);
137             singleTypeUpdates = getParentDataObjectUpdate(ctx, updates, writer);
138         }
139
140         try {
141             LOG.trace("Performing single type update with writer: {}", writer);
142
143             for (DataObjectUpdate singleUpdate : singleTypeUpdates) {
144                 current = singleUpdate;
145                 writer.processModification(singleUpdate.getId(), singleUpdate.getDataBefore(),
146                         singleUpdate.getDataAfter(),
147                         ctx);
148                 alreadyProcessed.add(singleUpdate);
149             }
150         } catch (Exception e) {
151             throw new UpdateFailedException(e, alreadyProcessed, current);
152         }
153     }
154
155     @Nullable
156     private Writer<?> getSubtreeWriterResponsible(final InstanceIdentifier<?> singleType) {
157         return writersById.values().stream()
158                 .filter(w -> w instanceof SubtreeWriter)
159                 .filter(w -> w.canProcess(singleType))
160                 .findFirst()
161                 .orElse(null);
162     }
163
164     private Collection<DataObjectUpdate> getParentDataObjectUpdate(final WriteContext ctx,
165                                                                    final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
166                                                                    final Writer<?> writer) {
167         // Now read data for subtree reader root, but first keyed ID is needed and that ID can be cut from updates
168         InstanceIdentifier<?> firstAffectedChildId = ((SubtreeWriter<?>) writer).getHandledChildTypes().stream()
169                 .filter(updates::containsKey)
170                 .map(unkeyedId -> updates.get(unkeyedId))
171                 .flatMap(doUpdates -> doUpdates.stream())
172                 .map(DataObjectUpdate::getId)
173                 .findFirst()
174                 .get();
175
176         final InstanceIdentifier<?> parentKeyedId =
177                 RWUtils.cutId(firstAffectedChildId, writer.getManagedDataObjectType());
178
179         final Optional<? extends DataObject> parentBefore = ctx.readBefore(parentKeyedId);
180         final Optional<? extends DataObject> parentAfter = ctx.readAfter(parentKeyedId);
181         return Collections.singleton(
182                 DataObjectUpdate.create(parentKeyedId, parentBefore.orNull(), parentAfter.orNull()));
183     }
184
185     private void bulkUpdate(
186             @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates,
187             @Nonnull final List<DataObjectUpdate> alreadyProcessed,
188             @Nonnull final WriteContext ctx,
189             @Nonnull final Set<InstanceIdentifier<?>> writersOrder) throws UpdateFailedException {
190         if (updates.isEmpty()) {
191             return;
192         }
193
194         // Check that all updates can be handled
195         checkAllTypesCanBeHandled(updates);
196
197         LOG.debug("Performing bulk update for: {}", updates.keySet());
198         DataObjectUpdate current = null;
199
200         // Iterate over all writers and call update if there are any related updates
201         for (InstanceIdentifier<?> writerType : writersOrder) {
202             Collection<? extends DataObjectUpdate> writersData = updates.get(writerType);
203             final Writer<?> writer = getWriter(writerType);
204
205             if (writersData.isEmpty()) {
206                 // If there are no data for current writer, but it is a SubtreeWriter and there are updates to
207                 // its children, still invoke it with its root data
208                 if (writer instanceof SubtreeWriter<?> && isAffected(((SubtreeWriter<?>) writer), updates)) {
209                     // Provide parent data for SubtreeWriter for further processing
210                     writersData = getParentDataObjectUpdate(ctx, updates, writer);
211                 } else {
212                     // Skipping unaffected writer
213                     // Alternative to this would be modification sort according to the order of writers
214                     continue;
215                 }
216             }
217
218             LOG.debug("Performing update for: {}", writerType);
219             LOG.trace("Performing update with writer: {}", writer);
220
221             for (DataObjectUpdate singleUpdate : writersData) {
222                 current = singleUpdate;
223                 try {
224                     writer.processModification(singleUpdate.getId(), singleUpdate.getDataBefore(),
225                             singleUpdate.getDataAfter(), ctx);
226                 } catch (Exception e) {
227                     throw new UpdateFailedException(e, alreadyProcessed, current);
228                 }
229                 alreadyProcessed.add(singleUpdate);
230                 LOG.trace("Update successful for type: {}", writerType);
231                 LOG.debug("Update successful for: {}", singleUpdate);
232             }
233         }
234     }
235
236     private void checkAllTypesCanBeHandled(
237             @Nonnull final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates) {
238
239         List<InstanceIdentifier<?>> noWriterNodes = new ArrayList<>();
240         for (InstanceIdentifier<?> id : updates.keySet()) {
241             // either there is direct writer for the iid
242             if (writersById.containsKey(id)) {
243                 continue;
244             } else {
245                 // or subtree one
246                 if (writers.stream().anyMatch(o -> o.canProcess(id))) {
247                     continue;
248                 }
249             }
250             noWriterNodes.add(id);
251         }
252
253         if (!noWriterNodes.isEmpty()) {
254             throw new IllegalArgumentException("Unable to process update. Missing writers for: " + noWriterNodes);
255         }
256     }
257
258     /**
259      * Check whether {@link SubtreeWriter} is affected by the updates.
260      *
261      * @return true if there are any updates to SubtreeWriter's child nodes (those marked by SubtreeWriter as being
262      * taken care of)
263      */
264     private static boolean isAffected(final SubtreeWriter<?> writer,
265                                       final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates) {
266         return !Sets.intersection(writer.getHandledChildTypes(), updates.keySet()).isEmpty();
267     }
268
269     @Nullable
270     private Writer<?> getWriter(@Nonnull final InstanceIdentifier<?> singleType) {
271         return writersById.get(singleType);
272     }
273
274 }