My Project
ParallelWellInfo.hpp
1 /*
2  Copyright 2020 OPM-OP AS
3 
4  This file is part of the Open Porous Media project (OPM).
5 
6  OPM is free software: you can redistribute it and/or modify
7  it under the terms of the GNU General Public License as published by
8  the Free Software Foundation, either version 3 of the License, or
9  (at your option) any later version.
10 
11  OPM is distributed in the hope that it will be useful,
12  but WITHOUT ANY WARRANTY; without even the implied warranty of
13  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  GNU General Public License for more details.
15 
16  You should have received a copy of the GNU General Public License
17  along with OPM. If not, see <http://www.gnu.org/licenses/>.
18 */
19 #ifndef OPM_PARALLELWELLINFO_HEADER_INCLUDED
20 
21 #define OPM_PARALLELWELLINFO_HEADER_INCLUDED
22 #include <dune/common/version.hh>
23 #include <dune/common/parallel/mpihelper.hh>
24 #include <dune/common/parallel/plocalindex.hh>
25 #include <dune/istl/owneroverlapcopy.hh>
26 
27 #include <opm/simulators/utils/ParallelCommunication.hpp>
28 
29 #include <opm/common/ErrorMacros.hpp>
30 
31 #include <memory>
32 #include <iterator>
33 #include <numeric>
34 
35 namespace Opm
36 {
37 
38 class Well;
39 
43 {
44 public:
45  enum Attribute {
46  owner=1,
47  overlap=2,
48  // there is a bug in older versions of DUNE that will skip
49  // entries with matching attributes in RemoteIndices that are local
50  // therefore we add one more version for above.
51  ownerAbove = 3,
52  overlapAbove = 4
53  };
54  using LocalIndex = Dune::ParallelLocalIndex<Attribute>;
55  using IndexSet = Dune::ParallelIndexSet<int,LocalIndex,50>;
56 #if HAVE_MPI
57  using RI = Dune::RemoteIndices<IndexSet>;
58 #endif
59 
60  explicit CommunicateAboveBelow(const Parallel::Communication& comm);
68  void pushBackEclIndex(int above, int current, bool owner=true);
69 
71  void clear();
72 
75  void beginReset();
76 
82  int endReset();
83 
89  std::vector<double> communicateAbove(double first_value,
90  const double* current,
91  std::size_t size);
92 
98  std::vector<double> communicateBelow(double first_value,
99  const double* current,
100  std::size_t size);
101 
109  template<class RAIterator>
110  void partialSumPerfValues(RAIterator begin, RAIterator end) const
111  {
112  if (this->comm_.size() < 2)
113  {
114  std::partial_sum(begin, end, begin);
115  }
116  else
117  {
118 #if HAVE_MPI
119  // The global index used in the index set current_indices
120  // is the index of the perforation in ECL Schedule definition.
121  // This is assumed to give the topological order that is used
122  // when doing the partial sum.
123  // allgather the index of the perforation in ECL schedule and the value.
124  using Value = typename std::iterator_traits<RAIterator>::value_type;
125  std::vector<int> sizes(comm_.size());
126  std::vector<int> displ(comm_.size() + 1, 0);
127  using GlobalIndex = typename IndexSet::IndexPair::GlobalIndex;
128  using Pair = std::pair<GlobalIndex,Value>;
129  std::vector<Pair> my_pairs;
130  my_pairs.reserve(current_indices_.size());
131  for (const auto& pair: current_indices_)
132  {
133  if (pair.local().attribute() == owner)
134  {
135  my_pairs.emplace_back(pair.global(), begin[pair.local()]);
136  }
137  }
138  int mySize = my_pairs.size();
139  comm_.allgather(&mySize, 1, sizes.data());
140  std::partial_sum(sizes.begin(), sizes.end(), displ.begin()+1);
141  std::vector<Pair> global_pairs(displ.back());
142  comm_.allgatherv(my_pairs.data(), my_pairs.size(), global_pairs.data(), sizes.data(), displ.data());
143  // sort the complete range to get the correct ordering
144  std::sort(global_pairs.begin(), global_pairs.end(),
145  [](const Pair& p1, const Pair& p2){ return p1.first < p2.first; } );
146  std::vector<Value> sums(global_pairs.size());
147  std::transform(global_pairs.begin(), global_pairs.end(), sums.begin(),
148  [](const Pair& p) { return p.second; });
149  std::partial_sum(sums.begin(), sums.end(),sums.begin());
150  // assign the values (both ranges are sorted by the ecl index)
151  auto global_pair = global_pairs.begin();
152  for (const auto& pair: current_indices_)
153  {
154  global_pair = std::lower_bound(global_pair, global_pairs.end(),
155  pair.global(),
156  [](const Pair& val1, const GlobalIndex& val2)
157  { return val1.first < val2; });
158  assert(global_pair != global_pairs.end());
159  assert(global_pair->first == pair.global());
160  begin[pair.local()] = sums[global_pair - global_pairs.begin()];
161  }
162 #else
163  OPM_THROW(std::logic_error, "In a sequential run the size of the communicator should be 1!");
164 #endif
165  }
166  }
167 
169  const IndexSet& getIndexSet() const;
170 
171  int numLocalPerfs() const;
172 
173 private:
174  Parallel::Communication comm_;
176  IndexSet current_indices_;
177 #if HAVE_MPI
179  IndexSet above_indices_;
180  RI remote_indices_;
181  Dune::Interface interface_;
182  Dune::BufferedCommunicator communicator_;
183 #endif
184  std::size_t num_local_perfs_{};
185 };
186 
194 {
195 public:
196  using IndexSet = CommunicateAboveBelow::IndexSet;
197  using Attribute = CommunicateAboveBelow::Attribute;
198  using GlobalIndex = typename IndexSet::IndexPair::GlobalIndex;
199 
202  GlobalPerfContainerFactory(const IndexSet& local_indices,
203  const Parallel::Communication comm,
204  int num_local_perfs);
205 
211  std::vector<double> createGlobal(const std::vector<double>& local_perf_container,
212  std::size_t num_components) const;
213 
218  void copyGlobalToLocal(const std::vector<double>& global, std::vector<double>& local,
219  std::size_t num_components) const;
220 
221  int numGlobalPerfs() const;
222 
223 private:
224  const IndexSet& local_indices_;
225  Parallel::Communication comm_;
226  int num_global_perfs_;
228  std::vector<int> sizes_;
230  std::vector<int> displ_;
232  std::vector<int> map_received_;
236  std::vector<int> perf_ecl_index_;
237 };
238 
243 {
244 public:
245  static constexpr int INVALID_ECL_INDEX = -1;
246 
248  ParallelWellInfo(const std::string& name = {""},
249  bool hasLocalCells = true);
250 
257  ParallelWellInfo(const std::pair<std::string,bool>& well_info,
258  Parallel::Communication allComm);
259 
260  const Parallel::Communication& communication() const
261  {
262  return *comm_;
263  }
264 
266  void communicateFirstPerforation(bool hasFirst);
267 
268 
272  template<class T>
273  T broadcastFirstPerforationValue(const T& t) const
274  {
275  T res = t;
276  if (rankWithFirstPerf_ >= 0) {
277 #ifndef NDEBUG
278  assert(rankWithFirstPerf_ < comm_->size());
279  // At least on some OpenMPI version this might broadcast might interfere
280  // with other communication if there are bugs
281  comm_->barrier();
282 #endif
283  comm_->broadcast(&res, 1, rankWithFirstPerf_);
284 #ifndef NDEBUG
285  comm_->barrier();
286 #endif
287  }
288  return res;
289  }
290 
296  std::vector<double> communicateAboveValues(double first_value,
297  const double* current,
298  std::size_t size) const;
299 
303  std::vector<double> communicateAboveValues(double first_value,
304  const std::vector<double>& current) const;
305 
311  std::vector<double> communicateBelowValues(double last_value,
312  const double* current,
313  std::size_t size) const;
314 
318  std::vector<double> communicateBelowValues(double last_value,
319  const std::vector<double>& current) const;
320 
328  void pushBackEclIndex(int above, int current);
329 
331  const std::string& name() const
332  {
333  return name_;
334  }
335 
337  bool hasLocalCells() const
338  {
339  return hasLocalCells_;
340  }
341  bool isOwner() const
342  {
343  return isOwner_;
344  }
345 
349  void beginReset();
350 
352  void endReset();
353 
355  template<typename It>
356  typename std::iterator_traits<It>::value_type sumPerfValues(It begin, It end) const
357  {
358  using V = typename std::iterator_traits<It>::value_type;
360  auto local = std::accumulate(begin, end, V());
361  return communication().sum(local);
362  }
363 
371  template<class RAIterator>
372  void partialSumPerfValues(RAIterator begin, RAIterator end) const
373  {
374  commAboveBelow_->partialSumPerfValues(begin, end);
375  }
376 
378  void clear();
379 
386 
387 private:
388 
390  struct DestroyComm
391  {
392  void operator()(Parallel::Communication* comm);
393  };
394 
395 
397  std::string name_;
399  bool hasLocalCells_;
401  bool isOwner_;
403  int rankWithFirstPerf_;
407  std::unique_ptr<Parallel::Communication, DestroyComm> comm_;
408 
410  std::unique_ptr<CommunicateAboveBelow> commAboveBelow_;
411 
412  std::unique_ptr<GlobalPerfContainerFactory> globalPerfCont_;
413 };
414 
419 {
420 public:
421  CheckDistributedWellConnections(const Well& well,
422  const ParallelWellInfo& info);
423 
428  void connectionFound(std::size_t index);
429 
430  bool checkAllConnectionsFound();
431 
432 private:
433  std::vector<std::size_t> foundConnections_;
434  const Well& well_;
435  const ParallelWellInfo& pwinfo_;
436 };
437 
438 bool operator<(const ParallelWellInfo& well1, const ParallelWellInfo& well2);
439 
440 bool operator==(const ParallelWellInfo& well1, const ParallelWellInfo& well2);
441 
442 bool operator!=(const ParallelWellInfo& well1, const ParallelWellInfo& well2);
443 
444 bool operator<(const std::pair<std::string, bool>& pair, const ParallelWellInfo& well);
445 
446 bool operator<( const ParallelWellInfo& well, const std::pair<std::string, bool>& pair);
447 
448 bool operator==(const std::pair<std::string, bool>& pair, const ParallelWellInfo& well);
449 
450 bool operator==(const ParallelWellInfo& well, const std::pair<std::string, bool>& pair);
451 
452 bool operator!=(const std::pair<std::string, bool>& pair, const ParallelWellInfo& well);
453 
454 bool operator!=(const ParallelWellInfo& well, const std::pair<std::string, bool>& pair);
455 
456 } // end namespace Opm
457 #endif // OPM_PARALLELWELLINFO_HEADER_INCLUDED
Class checking that all connections are on active cells.
Definition: ParallelWellInfo.hpp:419
void connectionFound(std::size_t index)
Inidicate that the i-th completion was found.
Definition: ParallelWellInfo.cpp:517
Class to facilitate getting values associated with the above/below perforation.
Definition: ParallelWellInfo.hpp:43
void beginReset()
Indicates that we will add the index information.
Definition: ParallelWellInfo.cpp:203
void clear()
Clear all the parallel information.
Definition: ParallelWellInfo.cpp:192
const IndexSet & getIndexSet() const
Get index set for the local perforations.
Definition: ParallelWellInfo.cpp:343
int endReset()
Indicates that the index information is complete.
Definition: ParallelWellInfo.cpp:215
std::vector< double > communicateAbove(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:248
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:110
void pushBackEclIndex(int above, int current, bool owner=true)
Adds information about original index of the perforations in ECL Schedule.
Definition: ParallelWellInfo.cpp:303
std::vector< double > communicateBelow(double first_value, const double *current, std::size_t size)
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:275
A factory for creating a global data representation for distributed wells.
Definition: ParallelWellInfo.hpp:194
void copyGlobalToLocal(const std::vector< double > &global, std::vector< double > &local, std::size_t num_components) const
Copies the values of the global perforation to the local representation.
Definition: ParallelWellInfo.cpp:150
GlobalPerfContainerFactory(const IndexSet &local_indices, const Parallel::Communication comm, int num_local_perfs)
Constructor.
Definition: ParallelWellInfo.cpp:49
std::vector< double > createGlobal(const std::vector< double > &local_perf_container, std::size_t num_components) const
Creates a container that holds values for all perforations.
Definition: ParallelWellInfo.cpp:101
Class encapsulating some information about parallel wells.
Definition: ParallelWellInfo.hpp:243
bool hasLocalCells() const
Whether local cells are perforated somewhen.
Definition: ParallelWellInfo.hpp:337
const std::string & name() const
Name of the well.
Definition: ParallelWellInfo.hpp:331
void pushBackEclIndex(int above, int current)
Adds information about the ecl indices of the perforations.
Definition: ParallelWellInfo.cpp:391
void clear()
Free data of communication data structures.
Definition: ParallelWellInfo.cpp:411
ParallelWellInfo(const std::string &name={""}, bool hasLocalCells=true)
Constructs object using MPI_COMM_SELF.
Definition: ParallelWellInfo.cpp:353
void communicateFirstPerforation(bool hasFirst)
Collectively decide which rank has first perforation.
Definition: ParallelWellInfo.cpp:380
const GlobalPerfContainerFactory & getGlobalPerfContainerFactory() const
Get a factor to create a global representation of peforation data.
Definition: ParallelWellInfo.cpp:448
std::vector< double > communicateBelowValues(double last_value, const double *current, std::size_t size) const
Creates an array of values for the perforation below.
Definition: ParallelWellInfo.cpp:432
std::vector< double > communicateAboveValues(double first_value, const double *current, std::size_t size) const
Creates an array of values for the perforation above.
Definition: ParallelWellInfo.cpp:417
std::iterator_traits< It >::value_type sumPerfValues(It begin, It end) const
Sum all the values of the perforations.
Definition: ParallelWellInfo.hpp:356
T broadcastFirstPerforationValue(const T &t) const
If the well does not have any open connections the member rankWithFirstPerf is not initialized,...
Definition: ParallelWellInfo.hpp:273
void beginReset()
Inidicate that we will reset the ecl index information.
Definition: ParallelWellInfo.cpp:396
ParallelWellInfo(const std::pair< std::string, bool > &well_info, Parallel::Communication allComm)
Constructs object with communication between all rank sharing a well.
void partialSumPerfValues(RAIterator begin, RAIterator end) const
Do a (in place) partial sum on values attached to all perforations.
Definition: ParallelWellInfo.hpp:372
void endReset()
Inidicate completion of reset of the ecl index information.
Definition: ParallelWellInfo.cpp:402
This file contains a set of helper functions used by VFPProd / VFPInj.
Definition: BlackoilPhases.hpp:27