Pstream.H
Go to the documentation of this file.
1 /*---------------------------------------------------------------------------*\
2  ========= |
3  \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
4  \\ / O peration |
5  \\ / A nd | www.openfoam.com
6  \\/ M anipulation |
7 -------------------------------------------------------------------------------
8  Copyright (C) 2011-2016 OpenFOAM Foundation
9  Copyright (C) 2016-2023 OpenCFD Ltd.
10 -------------------------------------------------------------------------------
11 License
12  This file is part of OpenFOAM.
13 
14  OpenFOAM is free software: you can redistribute it and/or modify it
15  under the terms of the GNU General Public License as published by
16  the Free Software Foundation, either version 3 of the License, or
17  (at your option) any later version.
18 
19  OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
20  ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
21  FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
22  for more details.
23 
24  You should have received a copy of the GNU General Public License
25  along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
26 
27 Class
28  Foam::Pstream
29 
30 Description
31  Inter-processor communications stream.
32 
33 SourceFiles
34  Pstream.C
35  PstreamBroadcast.C
36  PstreamGather.C
37  PstreamCombineGather.C
38  PstreamGatherList.C
39  PstreamExchangeConsensus.C
40  PstreamExchange.C
41 
42 \*---------------------------------------------------------------------------*/
43 
44 #ifndef Foam_Pstream_H
45 #define Foam_Pstream_H
46 
47 #include "UPstream.H"
48 #include "DynamicList.H"
49 
50 // Legacy
51 // #define Foam_Pstream_scatter_nobroadcast
52 
53 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
54 
55 namespace Foam
56 {
57 
58 /*---------------------------------------------------------------------------*\
59  Class Pstream Declaration
60 \*---------------------------------------------------------------------------*/
61 
62 class Pstream
63 :
64  public UPstream
65 {
66 protected:
67 
68  // Protected Data
69 
70  //- Allocated transfer buffer (can be used for send or receive)
72 
73 
74 public:
75 
76  //- Declare name of the class and its debug switch
77  ClassName("Pstream");
78 
79 
80  // Constructors
81 
82  //- Construct for given communication type, with optional buffer size
83  explicit Pstream
84  (
86  const label bufSize = 0
87  )
88  :
90  {
91  if (bufSize > 0)
92  {
93  transferBuf_.setCapacity(bufSize + 2*sizeof(scalar) + 1);
94  }
95  }
96 
97 
98  // Static Functions
99 
100  // Broadcast
101 
102  //- Broadcast buffer content to all processes in communicator.
103  using UPstream::broadcast;
104 
105  //- Broadcast content (contiguous or non-contiguous) to all
106  //- communicator ranks. Does nothing in \b non-parallel.
107  template<class Type>
108  static void broadcast
109  (
110  Type& value,
111  const label comm = UPstream::worldComm
112  );
113 
114  //- Broadcast multiple items to all communicator ranks.
115  //- Does nothing in \b non-parallel.
116  template<class Type, class... Args>
117  static void broadcasts(const label comm, Type& arg1, Args&&... args);
118 
119  //- Broadcast list content (contiguous or non-contiguous) to all
120  //- communicator ranks. Does nothing in \b non-parallel.
121  // For contiguous list data, this avoids serialization overhead,
122  // but at the expense of an additional broadcast call.
123  template<class ListType>
124  static void broadcastList
125  (
126  ListType& list,
127  const label comm = UPstream::worldComm
128  );
129 
130 
131  // Gather
132 
133  //- Gather (reduce) data, appyling \c bop to combine \c value
134  //- from different processors. The basis for Foam::reduce().
135  // Uses the specified communication schedule.
136  template<class T, class BinaryOp>
137  static void gather
138  (
139  const List<commsStruct>& comms,
140  T& value,
141  const BinaryOp& bop,
142  const int tag,
143  const label comm
144  );
145 
146  //- Gather (reduce) data, applying \c bop to combine \c value
147  //- from different processors. The basis for Foam::reduce().
148  // Uses linear/tree communication.
149  template<class T, class BinaryOp>
150  static void gather
151  (
152  T& value,
153  const BinaryOp& bop,
154  const int tag = UPstream::msgType(),
155  const label comm = UPstream::worldComm
156  );
157 
158 
159  // Gather/combine data
160  // Inplace combine values from processors.
161  // (Uses construct from Istream instead of <<)
162 
163  //- Gather data, applying \c cop to inplace combine \c value
164  //- from different processors.
165  // Uses the specified communication schedule.
166  template<class T, class CombineOp>
167  static void combineGather
168  (
169  const List<commsStruct>& comms,
170  T& value,
171  const CombineOp& cop,
172  const int tag,
173  const label comm
174  );
175 
176  //- Gather data, applying \c cop to inplace combine \c value
177  //- from different processors.
178  // Uses linear/tree communication.
179  template<class T, class CombineOp>
180  static void combineGather
181  (
182  T& value,
183  const CombineOp& cop,
184  const int tag = UPstream::msgType(),
185  const label comm = UPstream::worldComm
186  );
187 
188  //- Reduce inplace (cf. MPI Allreduce)
189  //- applying \c cop to inplace combine \c value
190  //- from different processors.
191  //- After completion all processors have the same data.
192  // Uses the specified communication schedule.
193  // Wraps combineGather/broadcast (may change in the future).
194  template<class T, class CombineOp>
195  static void combineReduce
196  (
197  const List<commsStruct>& comms,
198  T& value,
199  const CombineOp& cop,
200  const int tag = UPstream::msgType(),
201  const label comm = UPstream::worldComm
202  );
203 
204  //- Reduce inplace (cf. MPI Allreduce)
205  //- applying \c cop to inplace combine \c value
206  //- from different processors.
207  //- After completion all processors have the same data.
208  // Uses linear/tree communication.
209  // Wraps combineGather/broadcast (may change in the future).
210  template<class T, class CombineOp>
211  static void combineReduce
212  (
213  T& value,
214  const CombineOp& cop,
215  const int tag = UPstream::msgType(),
216  const label comm = UPstream::worldComm
217  );
218 
219  //- Same as Pstream::combineReduce
220  template<class T, class CombineOp>
221  static void combineAllGather
222  (
223  T& value,
224  const CombineOp& cop,
225  const int tag = UPstream::msgType(),
226  const label comm = UPstream::worldComm
227  )
228  {
229  Pstream::combineReduce(value, cop, tag, comm);
230  }
231 
232 
233  // Combine variants working on whole List at a time.
234 
235  template<class T, class CombineOp>
236  static void listCombineGather
237  (
238  const List<commsStruct>& comms,
239  List<T>& values,
240  const CombineOp& cop,
241  const int tag,
242  const label comm
243  );
244 
245  //- Like above but switches between linear/tree communication
246  template<class T, class CombineOp>
247  static void listCombineGather
248  (
249  List<T>& values,
250  const CombineOp& cop,
251  const int tag = UPstream::msgType(),
252  const label comm = UPstream::worldComm
253  );
254 
255  //- After completion all processors have the same data.
256  template<class T, class CombineOp>
257  static void listCombineReduce
258  (
259  List<T>& values,
260  const CombineOp& cop,
261  const int tag = UPstream::msgType(),
262  const label comm = UPstream::worldComm
263  );
264 
265  //- Same as Pstream::listCombineReduce
266  template<class T, class CombineOp>
267  static void listCombineAllGather
268  (
269  List<T>& values,
270  const CombineOp& cop,
271  const int tag = UPstream::msgType(),
272  const label comm = UPstream::worldComm
273  )
274  {
275  Pstream::listCombineReduce(values, cop, tag, comm);
276  }
277 
278 
279  // Combine variants working on whole map at a time.
280  // Container needs iterators, find() and insert methods defined.
281 
282  template<class Container, class CombineOp>
283  static void mapCombineGather
284  (
285  const List<commsStruct>& comms,
286  Container& values,
287  const CombineOp& cop,
288  const int tag,
289  const label comm
290  );
291 
292  //- Like above but switches between linear/tree communication
293  template<class Container, class CombineOp>
294  static void mapCombineGather
295  (
296  Container& values,
297  const CombineOp& cop,
298  const int tag = UPstream::msgType(),
299  const label comm = UPstream::worldComm
300  );
301 
302  //- Reduce inplace (cf. MPI Allreduce)
303  //- applying \c cop to inplace combine map \c values
304  //- from different processors.
305  //- After completion all processors have the same data.
306  // Uses the specified communication schedule.
307  // Wraps mapCombineGather/broadcast (may change in the future).
308  //- After completion all processors have the same data.
309  template<class Container, class CombineOp>
310  static void mapCombineReduce
311  (
312  Container& values,
313  const CombineOp& cop,
314  const int tag = UPstream::msgType(),
315  const label comm = UPstream::worldComm
316  );
317 
318  //- Same as Pstream::mapCombineReduce
319  template<class Container, class CombineOp>
320  static void mapCombineAllGather
321  (
322  Container& values,
323  const CombineOp& cop,
324  const int tag = UPstream::msgType(),
325  const label comm = UPstream::worldComm
326  )
327  {
328  Pstream::mapCombineReduce(values, cop, tag, comm);
329  }
330 
331 
332  // Gather/scatter keeping the individual processor data separate.
333  // The values is a List of size UPstream::nProcs() where
334  // values[UPstream::myProcNo()] is the data for the current processor.
335 
336  //- Gather data, but keep individual values separate.
337  //- Uses the specified communication schedule.
338  template<class T>
339  static void gatherList
340  (
341  const List<commsStruct>& comms,
342  List<T>& values,
343  const int tag,
344  const label comm
345  );
346 
347  //- Gather data, but keep individual values separate.
348  //- Uses linear/tree communication.
349  template<class T>
350  static void gatherList
351  (
352  List<T>& values,
353  const int tag = UPstream::msgType(),
354  const label comm = UPstream::worldComm
355  );
356 
357  //- Gather data, but keep individual values separate.
358  //- Uses linear/tree communication.
359  // After completion all processors have the same data.
360  // Wraps gatherList/scatterList (may change in the future).
361  template<class T>
362  static void allGatherList
363  (
365  const int tag = UPstream::msgType(),
366  const label comm = UPstream::worldComm
367  );
368 
369 
370  // Scatter
371 
372  //- Broadcast data: Distribute without modification.
373  // \note comms and tag parameters only used when
374  // Foam_Pstream_scatter_nobroadcast is defined
375  template<class T>
376  static void scatter
377  (
378  const List<commsStruct>& comms,
379  T& value,
380  const int tag,
381  const label comm
382  );
383 
384  //- Broadcast data: Distribute without modification.
385  // \note tag parameter only used when
386  // Foam_Pstream_scatter_nobroadcast is defined
387  template<class T>
388  static void scatter
389  (
390  T& value,
391  const int tag = UPstream::msgType(),
392  const label comm = UPstream::worldComm
393  );
394 
395  //- Broadcast data: Distribute without modification.
396  // \note tag parameter only used when
397  // Foam_Pstream_scatter_nobroadcast is defined
398  template<class T>
399  static void combineScatter
400  (
401  const List<commsStruct>& comms,
402  T& value,
403  const int tag,
404  const label comm
405  );
406 
407  //- Broadcast data: Distribute without modification.
408  // \note tag parameter only used when
409  // Foam_Pstream_scatter_nobroadcast is defined
410  template<class T>
411  static void combineScatter
412  (
413  T& value,
414  const int tag = UPstream::msgType(),
415  const label comm = UPstream::worldComm
416  );
417 
418  //- Broadcast data: Distribute without modification.
419  // \note comms and tag parameters only used when
420  // Foam_Pstream_scatter_nobroadcast is defined
421  template<class T>
422  static void listCombineScatter
423  (
424  const List<commsStruct>& comms,
425  List<T>& value,
426  const int tag,
427  const label comm
428  );
429 
430  //- Broadcast data: Distribute without modification.
431  // \note comms and tag parameters only used when
432  // Foam_Pstream_scatter_nobroadcast is defined
433  template<class T>
434  static void listCombineScatter
435  (
436  List<T>& value,
437  const int tag = UPstream::msgType(),
438  const label comm = UPstream::worldComm
439  );
440 
441  //- Broadcast data: Distribute without modification.
442  template<class Container>
443  static void mapCombineScatter
444  (
445  const List<commsStruct>& comms,
446  Container& values,
447  const int tag,
448  const label comm
449  );
450 
451  //- Like above but switches between linear/tree communication
452  template<class Container>
453  static void mapCombineScatter
454  (
455  Container& values,
456  const int tag = UPstream::msgType(),
457  const label comm = UPstream::worldComm
458  );
459 
460 
461  //- Scatter data. Reverse of gatherList
462  template<class T>
463  static void scatterList
464  (
465  const List<commsStruct>& comms,
466  List<T>& values,
467  const int tag,
468  const label comm
469  );
470 
471  //- Like above but switches between linear/tree communication
472  template<class T>
473  static void scatterList
474  (
475  List<T>& values,
476  const int tag = UPstream::msgType(),
477  const label comm = UPstream::worldComm
478  );
479 
480 
481  // Exchange
482 
483  //- Helper: exchange sizes of sendBufs for specified send/recv ranks
484  template<class Container>
485  static void exchangeSizes
486  (
487  const labelUList& sendProcs,
488  const labelUList& recvProcs,
489  const Container& sendBufs,
490  labelList& sizes,
491  const label tag = UPstream::msgType(),
492  const label comm = UPstream::worldComm
493  );
494 
495  //- Helper: exchange sizes of sendBufs for specified neighbour ranks
496  template<class Container>
497  static void exchangeSizes
498  (
499  const labelUList& neighProcs,
500  const Container& sendBufs,
501  labelList& sizes,
502  const label tag = UPstream::msgType(),
503  const label comm = UPstream::worldComm
504  );
505 
506  //- Helper: exchange sizes of sendBufs.
507  //- The sendBufs is the data per processor (in the communicator).
508  // Returns sizes of sendBufs on the sending processor.
509  // \n
510  // For \b non-parallel : copy sizes from sendBufs directly.
511  template<class Container>
512  static void exchangeSizes
513  (
514  const Container& sendBufs,
515  labelList& recvSizes,
516  const label comm = UPstream::worldComm
517  );
518 
519  //- Exchange the \b non-zero sizes of sendBufs entries (sparse map)
520  //- with other ranks in the communicator
521  //- using non-blocking consensus exchange.
522  //
523  // Since the recvData map always cleared before receipt and sizes
524  // of zero are never transmitted, a simple check
525  // of its keys is sufficient to determine connectivity.
526  //
527  // For \b non-parallel : copy size of rank (if it exists and non-empty)
528  // from sendBufs to recvSizes.
529  //
530  // \note The message tag is adjusted internally to improve uniqueness
531  template<class Container>
532  static void exchangeSizes
533  (
534  const Map<Container>& sendBufs,
535  Map<label>& recvSizes,
536  const label tag = UPstream::msgType(),
537  const label comm = UPstream::worldComm
538  );
539 
540  //- Helper: exchange \em contiguous data.
541  //- Sends sendBufs, receives into recvBufs using predetermined receive
542  //- sizing.
543  // If wait=true will wait for all transfers to finish.
544  template<class Container, class Type>
545  static void exchange
546  (
547  const UList<Container>& sendBufs,
548  const labelUList& recvSizes,
549  List<Container>& recvBufs,
550  const int tag = UPstream::msgType(),
551  const label comm = UPstream::worldComm,
552  const bool wait = true
553  );
554 
555  //- Exchange \em contiguous data.
556  //- Sends sendBufs, receives into recvBufs.
557  // Data provided and received as container.
558  //
559  // No internal guards or resizing.
560  template<class Container, class Type>
561  static void exchange
562  (
563  const Map<Container>& sendBufs,
564  const Map<label>& recvSizes,
565  Map<Container>& recvBufs,
566  const int tag = UPstream::msgType(),
567  const label comm = UPstream::worldComm,
568  const bool wait = true
569  );
570 
571  //- Exchange \em contiguous data.
572  //- Sends sendBufs, receives into recvBufs.
573  //- Determines sizes to receive.
574  // If wait=true will wait for all transfers to finish.
575  template<class Container, class Type>
576  static void exchange
577  (
578  const UList<Container>& sendBufs,
579  List<Container>& recvBufs,
580  const int tag = UPstream::msgType(),
581  const label comm = UPstream::worldComm,
582  const bool wait = true
583  );
584 
585  //- Exchange \em contiguous data.
586  //- Sends sendBufs, receives into recvBufs.
587  //- Determines sizes to receive.
588  // If wait=true will wait for all transfers to finish.
589  template<class Container, class Type>
590  static void exchange
591  (
592  const Map<Container>& sendBufs,
593  Map<Container>& recvBufs,
594  const int tag = UPstream::msgType(),
595  const label comm = UPstream::worldComm,
596  const bool wait = true
597  );
598 
599 
600  // Non-blocking exchange
601 
602  //- Exchange \em contiguous data using non-blocking consensus (NBX)
603  //- Sends sendData, receives into recvData.
604  //
605  // Each entry of the recvBufs list is cleared before receipt.
606  // For \b non-parallel : copy own rank from sendBufs to recvBufs.
607  //
608  // \note The message tag should be chosen to be a unique value
609  // since the implementation uses probing with ANY_SOURCE !!
610  template<class Container, class Type>
611  static void exchangeConsensus
612  (
613  const UList<Container>& sendBufs,
614  List<Container>& recvBufs,
615  const int tag,
616  const label comm,
617  const bool wait = true
618  );
619 
620  //- Exchange \em contiguous data using non-blocking consensus (NBX)
621  //- Sends sendData, receives into recvData.
622  //
623  // Each \em entry of the recvBufs map is cleared before receipt,
624  // but the map itself if not cleared. This allows the map to preserve
625  // allocated space (eg DynamicList entries) between calls.
626  //
627  // For \b non-parallel : copy own rank (if it exists and non-empty)
628  // from sendBufs to recvBufs.
629  //
630  // \note The message tag should be chosen to be a unique value
631  // since the implementation uses probing with ANY_SOURCE !!
632  template<class Container, class Type>
633  static void exchangeConsensus
634  (
635  const Map<Container>& sendBufs,
636  Map<Container>& recvBufs,
637  const int tag,
638  const label comm,
639  const bool wait = true
640  );
641 
642  //- Exchange \em contiguous data using non-blocking consensus (NBX)
643  //- Sends sendData returns receive information.
644  //
645  // For \b non-parallel : copy own rank (if it exists and non-empty)
646  //
647  // \note The message tag should be chosen to be a unique value
648  // since the implementation uses probing with ANY_SOURCE !!
649  template<class Container, class Type>
651  (
652  const Map<Container>& sendBufs,
653  const int tag,
654  const label comm,
655  const bool wait = true
656  );
657 };
658 
659 
660 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
661 
662 } // End namespace Foam
663 
664 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
665 
666 #ifdef NoRepository
667  #include "PstreamBroadcast.C"
668  #include "PstreamGather.C"
669  #include "PstreamCombineGather.C"
670  #include "PstreamGatherList.C"
671  #include "PstreamExchange.C"
672 #endif
673 
674 // * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
675 
676 #endif
677 
678 // ************************************************************************* //
static void mapCombineGather(const List< commsStruct > &comms, Container &values, const CombineOp &cop, const int tag, const label comm)
Gather data from all processors onto single processor according to some communication schedule (usual...
static void combineScatter(const List< commsStruct > &comms, T &value, const int tag, const label comm)
Broadcast data: Distribute without modification.
static void mapCombineScatter(const List< commsStruct > &comms, Container &values, const int tag, const label comm)
Broadcast data: Distribute without modification.
static void mapCombineAllGather(Container &values, const CombineOp &cop, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Same as Pstream::mapCombineReduce.
Definition: Pstream.H:364
commsTypes
Communications types.
Definition: UPstream.H:72
Pstream(const UPstream::commsTypes commsType, const label bufSize=0)
Construct for given communication type, with optional buffer size.
Definition: Pstream.H:85
static void broadcastList(ListType &list, const label comm=UPstream::worldComm)
Broadcast list content (contiguous or non-contiguous) to all communicator ranks. Does nothing in non-...
A 1D array of objects of type <T>, where the size of the vector is known and used for subscript bound...
Definition: BitOps.H:56
void setCapacity(const label len)
Alter the size of the underlying storage.
Definition: DynamicListI.H:303
static int & msgType() noexcept
Message tag of standard messages.
Definition: UPstream.H:1229
Gather data from all processors onto single processor according to some communication schedule (usual...
static label worldComm
Communicator for all ranks. May differ from commGlobal() if local worlds are in use.
Definition: UPstream.H:409
static void broadcast(Type &value, const label comm=UPstream::worldComm)
Broadcast content (contiguous or non-contiguous) to all communicator ranks. Does nothing in non-paral...
static void gather(const List< commsStruct > &comms, T &value, const BinaryOp &bop, const int tag, const label comm)
Gather (reduce) data, appyling bop to combine value from different processors. The basis for Foam::re...
Definition: PstreamGather.C:37
static void allGatherList(List< T > &values, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Gather data, but keep individual values separate. Uses linear/tree communication. ...
List< T > values(const HashTable< T, Key, Hash > &tbl, const bool doSort=false)
List of values from HashTable, optionally sorted.
Definition: HashOps.H:164
static void gatherList(const List< commsStruct > &comms, List< T > &values, const int tag, const label comm)
Gather data, but keep individual values separate. Uses the specified communication schedule...
static void combineGather(const List< commsStruct > &comms, T &value, const CombineOp &cop, const int tag, const label comm)
Gather data, applying cop to inplace combine value from different processors.
static void listCombineScatter(const List< commsStruct > &comms, List< T > &value, const int tag, const label comm)
Broadcast data: Distribute without modification.
Inter-processor communications stream.
Definition: Pstream.H:57
Variant of gather, scatter. Normal gather uses:
static void listCombineAllGather(List< T > &values, const CombineOp &cop, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Same as Pstream::listCombineReduce.
Definition: Pstream.H:304
static void combineReduce(const List< commsStruct > &comms, T &value, const CombineOp &cop, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Reduce inplace (cf. MPI Allreduce) applying cop to inplace combine value from different processors...
commsTypes commsType() const noexcept
Get the communications type of the stream.
Definition: UPstream.H:1261
static void exchange(const UList< Container > &sendBufs, const labelUList &recvSizes, List< Container > &recvBufs, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm, const bool wait=true)
Helper: exchange contiguous data. Sends sendBufs, receives into recvBufs using predetermined receive ...
void T(FieldField< Field, Type > &f1, const FieldField< Field, Type > &f2)
ClassName("Pstream")
Declare name of the class and its debug switch.
static void broadcasts(const label comm, Type &arg1, Args &&... args)
Broadcast multiple items to all communicator ranks. Does nothing in non-parallel. ...
static bool broadcast(char *buf, const std::streamsize bufSize, const label communicator, const int rootProcNo=masterNo())
Broadcast buffer contents to all processes in given communicator. The sizes must match on all process...
static void exchangeSizes(const labelUList &sendProcs, const labelUList &recvProcs, const Container &sendBufs, labelList &sizes, const label tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Helper: exchange sizes of sendBufs for specified send/recv ranks.
static void combineAllGather(T &value, const CombineOp &cop, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Same as Pstream::combineReduce.
Definition: Pstream.H:252
static void exchangeConsensus(const UList< Container > &sendBufs, List< Container > &recvBufs, const int tag, const label comm, const bool wait=true)
Exchange contiguous data using non-blocking consensus (NBX) Sends sendData, receives into recvData...
static void listCombineGather(const List< commsStruct > &comms, List< T > &values, const CombineOp &cop, const int tag, const label comm)
DynamicList< char > transferBuf_
Allocated transfer buffer (can be used for send or receive)
Definition: Pstream.H:68
static void mapCombineReduce(Container &values, const CombineOp &cop, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
Reduce inplace (cf. MPI Allreduce) applying cop to inplace combine map values from different processo...
Foam::argList args(argc, argv)
Inter-processor communications stream.
Definition: UPstream.H:60
Namespace for OpenFOAM.
static void scatter(const List< commsStruct > &comms, T &value, const int tag, const label comm)
Broadcast data: Distribute without modification.
A HashTable to objects of type <T> with a label key.
static void listCombineReduce(List< T > &values, const CombineOp &cop, const int tag=UPstream::msgType(), const label comm=UPstream::worldComm)
After completion all processors have the same data.
static void scatterList(const List< commsStruct > &comms, List< T > &values, const int tag, const label comm)
Scatter data. Reverse of gatherList.