dune-common  2.3.1
parallel/collectivecommunication.hh
Go to the documentation of this file.
1 // -*- tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 // vi: set et ts=4 sw=2 sts=2:
3 #ifndef DUNE_COLLECTIVECOMMUNICATION_HH
4 #define DUNE_COLLECTIVECOMMUNICATION_HH
5 
12 #include <iostream>
13 #include <complex>
14 #include <algorithm>
15 
17 
37 namespace Dune
38 {
39 
40  /* define some type that definitely differs from MPI_Comm */
41  struct No_Comm {};
42 
43 
70  template<typename C>
72  {
73  public:
76  {}
78  {}
79 
81  int rank () const
82  {
83  return 0;
84  }
85 
87  int size () const
88  {
89  return 1;
90  }
91 
95  template<typename T>
96  T sum (T& in) const // MPI does not know about const :-(
97  {
98  return in;
99  }
100 
104  template<typename T>
105  int sum (T* inout, int len) const
106  {
107  return 0;
108  }
109 
113  template<typename T>
114  T prod (T& in) const // MPI does not know about const :-(
115  {
116  return in;
117  }
118 
123  template<typename T>
124  int prod (T* inout, int len) const
125  {
126  return 0;
127  }
128 
132  template<typename T>
133  T min (T& in) const // MPI does not know about const :-(
134  {
135  return in;
136  }
137 
142  template<typename T>
143  int min (T* inout, int len) const
144  {
145  return 0;
146  }
147 
151  template<typename T>
152  T max (T& in) const // MPI does not know about const :-(
153  {
154  return in;
155  }
156 
161  template<typename T>
162  int max (T* inout, int len) const
163  {
164  return 0;
165  }
166 
169  int barrier () const
170  {
171  return 0;
172  }
173 
176  template<typename T>
177  int broadcast (T* inout, int len, int root) const
178  {
179  return 0;
180  }
181 
193  template<typename T>
194  int gather (T* in, T* out, int len, int root) const // note out must have same size as in
195  {
196  for (int i=0; i<len; i++)
197  out[i] = in[i];
198  return 0;
199  }
200 
213  template<typename T>
214  int scatter (T* send, T* recv, int len, int root) const // note out must have same size as in
215  {
216  for (int i=0; i<len; i++)
217  recv[i] = send[i];
218  return 0;
219  }
220 
233  template<typename T>
234  int allgather(T* sbuf, int count, T* rbuf) const
235  {
236  for(T* end=sbuf+count; sbuf < end; ++sbuf, ++rbuf)
237  *rbuf=*sbuf;
238  return 0;
239  }
240 
252  template<typename BinaryFunction, typename Type>
253  int allreduce(Type* inout, int len) const
254  {
255  return 0;
256  }
257 
270  template<typename BinaryFunction, typename Type>
271  void allreduce(Type* in, Type* out, int len) const
272  {
273  std::copy(in, in+len, out);
274  return;
275  }
276 
277  };
278 }
279 
280 #endif
int max(T *inout, int len) const
Compute the maximum over all processes for each component of an array and return the result in every ...
Definition: parallel/collectivecommunication.hh:162
int sum(T *inout, int len) const
Compute the sum over all processes for each component of an array and return the result in every proc...
Definition: parallel/collectivecommunication.hh:105
Dune namespace.
Definition: alignment.hh:13
A few common exception classes.
int barrier() const
Wait until all processes have arrived at this point in the program.
Definition: parallel/collectivecommunication.hh:169
int allgather(T *sbuf, int count, T *rbuf) const
Gathers data from all tasks and distribute it to all.
Definition: parallel/collectivecommunication.hh:234
CollectiveCommunication()
Construct default object.
Definition: parallel/collectivecommunication.hh:75
int prod(T *inout, int len) const
Compute the product over all processes for each component of an array and return the result in every ...
Definition: parallel/collectivecommunication.hh:124
int min(T *inout, int len) const
Compute the minimum over all processes for each component of an array and return the result in every ...
Definition: parallel/collectivecommunication.hh:143
T prod(T &in) const
Compute the product of the argument over all processes and return the result in every process...
Definition: parallel/collectivecommunication.hh:114
int rank() const
Return rank, is between 0 and size()-1.
Definition: parallel/collectivecommunication.hh:81
Definition: parallel/collectivecommunication.hh:41
T min(T &in) const
Compute the minimum of the argument over all processes and return the result in every process...
Definition: parallel/collectivecommunication.hh:133
T max(T &in) const
Compute the maximum of the argument over all processes and return the result in every process...
Definition: parallel/collectivecommunication.hh:152
int gather(T *in, T *out, int len, int root) const
Gather arrays on root task.
Definition: parallel/collectivecommunication.hh:194
int scatter(T *send, T *recv, int len, int root) const
Scatter array from a root to all other task.
Definition: parallel/collectivecommunication.hh:214
T sum(T &in) const
Compute the sum of the argument over all processes and return the result in every process...
Definition: parallel/collectivecommunication.hh:96
int allreduce(Type *inout, int len) const
Compute something over all processes for each component of an array and return the result in every pr...
Definition: parallel/collectivecommunication.hh:253
int size() const
Number of processes in set, is greater than 0.
Definition: parallel/collectivecommunication.hh:87
Collective communication interface and sequential default implementation.
Definition: parallel/collectivecommunication.hh:71
int broadcast(T *inout, int len, int root) const
Distribute an array from the process with rank root to all other processes.
Definition: parallel/collectivecommunication.hh:177
void allreduce(Type *in, Type *out, int len) const
Compute something over all processes for each component of an array and return the result in every pr...
Definition: parallel/collectivecommunication.hh:271
CollectiveCommunication(const C &)
Definition: parallel/collectivecommunication.hh:77