TensorPadding.h
1 // This file is part of Eigen, a lightweight C++ template library
2 // for linear algebra.
3 //
4 // Copyright (C) 2014 Benoit Steiner <benoit.steiner.goog@gmail.com>
5 //
6 // This Source Code Form is subject to the terms of the Mozilla
7 // Public License v. 2.0. If a copy of the MPL was not distributed
8 // with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
9 
10 #ifndef EIGEN_CXX11_TENSOR_TENSOR_PADDING_H
11 #define EIGEN_CXX11_TENSOR_TENSOR_PADDING_H
12 
13 namespace Eigen {
14 
22 namespace internal {
23 template<typename PaddingDimensions, typename XprType>
24 struct traits<TensorPaddingOp<PaddingDimensions, XprType> > : public traits<XprType>
25 {
26  typedef typename XprType::Scalar Scalar;
27  typedef traits<XprType> XprTraits;
28  typedef typename packet_traits<Scalar>::type Packet;
29  typedef typename XprTraits::StorageKind StorageKind;
30  typedef typename XprTraits::Index Index;
31  typedef typename XprType::Nested Nested;
32  typedef typename remove_reference<Nested>::type _Nested;
33  static const int NumDimensions = XprTraits::NumDimensions;
34  static const int Layout = XprTraits::Layout;
35 };
36 
37 template<typename PaddingDimensions, typename XprType>
38 struct eval<TensorPaddingOp<PaddingDimensions, XprType>, Eigen::Dense>
39 {
40  typedef const TensorPaddingOp<PaddingDimensions, XprType>& type;
41 };
42 
43 template<typename PaddingDimensions, typename XprType>
44 struct nested<TensorPaddingOp<PaddingDimensions, XprType>, 1, typename eval<TensorPaddingOp<PaddingDimensions, XprType> >::type>
45 {
46  typedef TensorPaddingOp<PaddingDimensions, XprType> type;
47 };
48 
49 } // end namespace internal
50 
51 
52 
53 template<typename PaddingDimensions, typename XprType>
54 class TensorPaddingOp : public TensorBase<TensorPaddingOp<PaddingDimensions, XprType>, ReadOnlyAccessors>
55 {
56  public:
57  typedef typename Eigen::internal::traits<TensorPaddingOp>::Scalar Scalar;
58  typedef typename Eigen::internal::traits<TensorPaddingOp>::Packet Packet;
59  typedef typename Eigen::NumTraits<Scalar>::Real RealScalar;
60  typedef typename XprType::CoeffReturnType CoeffReturnType;
61  typedef typename XprType::PacketReturnType PacketReturnType;
62  typedef typename Eigen::internal::nested<TensorPaddingOp>::type Nested;
63  typedef typename Eigen::internal::traits<TensorPaddingOp>::StorageKind StorageKind;
64  typedef typename Eigen::internal::traits<TensorPaddingOp>::Index Index;
65 
66  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorPaddingOp(const XprType& expr, const PaddingDimensions& padding_dims)
67  : m_xpr(expr), m_padding_dims(padding_dims) {}
68 
69  EIGEN_DEVICE_FUNC
70  const PaddingDimensions& padding() const { return m_padding_dims; }
71 
72  EIGEN_DEVICE_FUNC
73  const typename internal::remove_all<typename XprType::Nested>::type&
74  expression() const { return m_xpr; }
75 
76  protected:
77  typename XprType::Nested m_xpr;
78  const PaddingDimensions m_padding_dims;
79 };
80 
81 
82 // Eval as rvalue
83 template<typename PaddingDimensions, typename ArgType, typename Device>
84 struct TensorEvaluator<const TensorPaddingOp<PaddingDimensions, ArgType>, Device>
85 {
86  typedef TensorPaddingOp<PaddingDimensions, ArgType> XprType;
87  typedef typename XprType::Index Index;
88  static const int NumDims = internal::array_size<PaddingDimensions>::value;
89  typedef DSizes<Index, NumDims> Dimensions;
90 
91  enum {
92  IsAligned = false,
93  PacketAccess = TensorEvaluator<ArgType, Device>::PacketAccess,
94  Layout = TensorEvaluator<ArgType, Device>::Layout,
95  CoordAccess = true,
96  };
97 
98  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE TensorEvaluator(const XprType& op, const Device& device)
99  : m_impl(op.expression(), device), m_padding(op.padding())
100  {
101  // Compute dimensions
102  m_dimensions = m_impl.dimensions();
103  for (int i = 0; i < NumDims; ++i) {
104  m_dimensions[i] += m_padding[i].first + m_padding[i].second;
105  }
106  const typename TensorEvaluator<ArgType, Device>::Dimensions& input_dims = m_impl.dimensions();
107  if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
108  m_inputStrides[0] = 1;
109  m_outputStrides[0] = 1;
110  for (int i = 1; i < NumDims; ++i) {
111  m_inputStrides[i] = m_inputStrides[i-1] * input_dims[i-1];
112  m_outputStrides[i] = m_outputStrides[i-1] * m_dimensions[i-1];
113  }
114  m_outputStrides[NumDims] = m_outputStrides[NumDims-1] * m_dimensions[NumDims-1];
115  } else {
116  m_inputStrides[NumDims - 1] = 1;
117  m_outputStrides[NumDims] = 1;
118  for (int i = NumDims - 2; i >= 0; --i) {
119  m_inputStrides[i] = m_inputStrides[i+1] * input_dims[i+1];
120  m_outputStrides[i+1] = m_outputStrides[i+2] * m_dimensions[i+1];
121  }
122  m_outputStrides[0] = m_outputStrides[1] * m_dimensions[0];
123  }
124  }
125 
126  typedef typename XprType::Scalar Scalar;
127  typedef typename XprType::CoeffReturnType CoeffReturnType;
128  typedef typename XprType::PacketReturnType PacketReturnType;
129 
130  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE const Dimensions& dimensions() const { return m_dimensions; }
131 
132  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE bool evalSubExprsIfNeeded(Scalar*) {
133  m_impl.evalSubExprsIfNeeded(NULL);
134  return true;
135  }
136  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE void cleanup() {
137  m_impl.cleanup();
138  }
139 
140  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(Index index) const
141  {
142  eigen_assert(index < dimensions().TotalSize());
143  Index inputIndex = 0;
144  if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
145  for (int i = NumDims - 1; i > 0; --i) {
146  const Index idx = index / m_outputStrides[i];
147  if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
148  return Scalar(0);
149  }
150  inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
151  index -= idx * m_outputStrides[i];
152  }
153  if (index < m_padding[0].first || index >= m_dimensions[0] - m_padding[0].second) {
154  return Scalar(0);
155  }
156  inputIndex += (index - m_padding[0].first);
157  } else {
158  for (int i = 0; i < NumDims - 1; ++i) {
159  const Index idx = index / m_outputStrides[i+1];
160  if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
161  return Scalar(0);
162  }
163  inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
164  index -= idx * m_outputStrides[i+1];
165  }
166  if (index < m_padding[NumDims-1].first ||
167  index >= m_dimensions[NumDims-1] - m_padding[NumDims-1].second) {
168  return Scalar(0);
169  }
170  inputIndex += (index - m_padding[NumDims-1].first);
171  }
172  return m_impl.coeff(inputIndex);
173  }
174 
175  template<int LoadMode>
176  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packet(Index index) const
177  {
178  if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
179  return packetColMajor(index);
180  }
181  return packetRowMajor(index);
182  }
183 
184  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE CoeffReturnType coeff(const array<Index, NumDims>& coords) const
185  {
186  Index inputIndex;
187  if (static_cast<int>(Layout) == static_cast<int>(ColMajor)) {
188  {
189  const Index idx = coords[0];
190  if (idx < m_padding[0].first || idx >= m_dimensions[0] - m_padding[0].second) {
191  return Scalar(0);
192  }
193  inputIndex = idx - m_padding[0].first;
194  }
195  for (int i = 1; i < NumDims; ++i) {
196  const Index idx = coords[i];
197  if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
198  return Scalar(0);
199  }
200  inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
201  }
202  } else {
203  {
204  const Index idx = coords[NumDims-1];
205  if (idx < m_padding[NumDims-1].first || idx >= m_dimensions[NumDims-1] - m_padding[NumDims-1].second) {
206  return Scalar(0);
207  }
208  inputIndex = idx - m_padding[NumDims-1].first;
209  }
210  for (int i = NumDims - 2; i >= 0; --i) {
211  const Index idx = coords[i];
212  if (idx < m_padding[i].first || idx >= m_dimensions[i] - m_padding[i].second) {
213  return Scalar(0);
214  }
215  inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
216  }
217  }
218  return m_impl.coeff(inputIndex);
219  }
220 
221  EIGEN_DEVICE_FUNC Scalar* data() const { return NULL; }
222 
223  protected:
224 
225  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packetColMajor(Index index) const
226  {
227  const int packetSize = internal::unpacket_traits<PacketReturnType>::size;
228  EIGEN_STATIC_ASSERT(packetSize > 1, YOU_MADE_A_PROGRAMMING_MISTAKE)
229  eigen_assert(index+packetSize-1 < dimensions().TotalSize());
230 
231  const Index initialIndex = index;
232  Index inputIndex = 0;
233  for (int i = NumDims - 1; i > 0; --i) {
234  const Index first = index;
235  const Index last = index + packetSize - 1;
236  const Index lastPaddedLeft = m_padding[i].first * m_outputStrides[i];
237  const Index firstPaddedRight = (m_dimensions[i] - m_padding[i].second) * m_outputStrides[i];
238  const Index lastPaddedRight = m_outputStrides[i+1];
239 
240  if (last < lastPaddedLeft) {
241  // all the coefficient are in the padding zone.
242  return internal::pset1<PacketReturnType>(Scalar(0));
243  }
244  else if (first >= firstPaddedRight && last < lastPaddedRight) {
245  // all the coefficient are in the padding zone.
246  return internal::pset1<PacketReturnType>(Scalar(0));
247  }
248  else if (first >= lastPaddedLeft && last < firstPaddedRight) {
249  // all the coefficient are between the 2 padding zones.
250  const Index idx = index / m_outputStrides[i];
251  inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
252  index -= idx * m_outputStrides[i];
253  }
254  else {
255  // Every other case
256  return packetWithPossibleZero(initialIndex);
257  }
258  }
259 
260  const Index last = index + packetSize - 1;
261  const Index first = index;
262  const Index lastPaddedLeft = m_padding[0].first;
263  const Index firstPaddedRight = (m_dimensions[0] - m_padding[0].second);
264  const Index lastPaddedRight = m_outputStrides[1];
265 
266  if (last < lastPaddedLeft) {
267  // all the coefficient are in the padding zone.
268  return internal::pset1<PacketReturnType>(Scalar(0));
269  }
270  else if (first >= firstPaddedRight && last < lastPaddedRight) {
271  // all the coefficient are in the padding zone.
272  return internal::pset1<PacketReturnType>(Scalar(0));
273  }
274  else if (first >= lastPaddedLeft && last < firstPaddedRight) {
275  // all the coefficient are between the 2 padding zones.
276  inputIndex += (index - m_padding[0].first);
277  return m_impl.template packet<Unaligned>(inputIndex);
278  }
279  // Every other case
280  return packetWithPossibleZero(initialIndex);
281  }
282 
283  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packetRowMajor(Index index) const
284  {
285  const int packetSize = internal::unpacket_traits<PacketReturnType>::size;
286  EIGEN_STATIC_ASSERT(packetSize > 1, YOU_MADE_A_PROGRAMMING_MISTAKE)
287  eigen_assert(index+packetSize-1 < dimensions().TotalSize());
288 
289  const Index initialIndex = index;
290  Index inputIndex = 0;
291 
292  for (int i = 0; i < NumDims - 1; ++i) {
293  const Index first = index;
294  const Index last = index + packetSize - 1;
295  const Index lastPaddedLeft = m_padding[i].first * m_outputStrides[i+1];
296  const Index firstPaddedRight = (m_dimensions[i] - m_padding[i].second) * m_outputStrides[i+1];
297  const Index lastPaddedRight = m_outputStrides[i];
298 
299  if (last < lastPaddedLeft) {
300  // all the coefficient are in the padding zone.
301  return internal::pset1<PacketReturnType>(Scalar(0));
302  }
303  else if (first >= firstPaddedRight && last < lastPaddedRight) {
304  // all the coefficient are in the padding zone.
305  return internal::pset1<PacketReturnType>(Scalar(0));
306  }
307  else if (first >= lastPaddedLeft && last < firstPaddedRight) {
308  // all the coefficient are between the 2 padding zones.
309  const Index idx = index / m_outputStrides[i+1];
310  inputIndex += (idx - m_padding[i].first) * m_inputStrides[i];
311  index -= idx * m_outputStrides[i+1];
312  }
313  else {
314  // Every other case
315  return packetWithPossibleZero(initialIndex);
316  }
317  }
318 
319  const Index last = index + packetSize - 1;
320  const Index first = index;
321  const Index lastPaddedLeft = m_padding[NumDims-1].first;
322  const Index firstPaddedRight = (m_dimensions[NumDims-1] - m_padding[NumDims-1].second);
323  const Index lastPaddedRight = m_outputStrides[NumDims-1];
324 
325  if (last < lastPaddedLeft) {
326  // all the coefficient are in the padding zone.
327  return internal::pset1<PacketReturnType>(Scalar(0));
328  }
329  else if (first >= firstPaddedRight && last < lastPaddedRight) {
330  // all the coefficient are in the padding zone.
331  return internal::pset1<PacketReturnType>(Scalar(0));
332  }
333  else if (first >= lastPaddedLeft && last < firstPaddedRight) {
334  // all the coefficient are between the 2 padding zones.
335  inputIndex += (index - m_padding[NumDims-1].first);
336  return m_impl.template packet<Unaligned>(inputIndex);
337  }
338  // Every other case
339  return packetWithPossibleZero(initialIndex);
340  }
341 
342  EIGEN_DEVICE_FUNC EIGEN_STRONG_INLINE PacketReturnType packetWithPossibleZero(Index index) const
343  {
344  const int packetSize = internal::unpacket_traits<PacketReturnType>::size;
345  EIGEN_ALIGN_MAX typename internal::remove_const<CoeffReturnType>::type values[packetSize];
346  for (int i = 0; i < packetSize; ++i) {
347  values[i] = coeff(index+i);
348  }
349  PacketReturnType rslt = internal::pload<PacketReturnType>(values);
350  return rslt;
351  }
352 
353  Dimensions m_dimensions;
354  array<Index, NumDims+1> m_outputStrides;
355  array<Index, NumDims> m_inputStrides;
356  TensorEvaluator<ArgType, Device> m_impl;
357  PaddingDimensions m_padding;
358 };
359 
360 
361 
362 
363 } // end namespace Eigen
364 
365 #endif // EIGEN_CXX11_TENSOR_TENSOR_PADDING_H
Namespace containing all symbols from the Eigen library.
Definition: CXX11Meta.h:13