Thyra  Version of the Day
Thyra_EpetraThyraWrappers.cpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Thyra: Interfaces and Support for Abstract Numerical Algorithms
5 // Copyright (2004) Sandia Corporation
6 //
7 // Under terms of Contract DE-AC04-94AL85000, there is a non-exclusive
8 // license for use of this work by or on behalf of the U.S. Government.
9 //
10 // Redistribution and use in source and binary forms, with or without
11 // modification, are permitted provided that the following conditions are
12 // met:
13 //
14 // 1. Redistributions of source code must retain the above copyright
15 // notice, this list of conditions and the following disclaimer.
16 //
17 // 2. Redistributions in binary form must reproduce the above copyright
18 // notice, this list of conditions and the following disclaimer in the
19 // documentation and/or other materials provided with the distribution.
20 //
21 // 3. Neither the name of the Corporation nor the names of the
22 // contributors may be used to endorse or promote products derived from
23 // this software without specific prior written permission.
24 //
25 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
26 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
27 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
28 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
29 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
30 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
31 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
32 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
33 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
34 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
35 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 //
37 // Questions? Contact Roscoe A. Bartlett (bartlettra@ornl.gov)
38 //
39 // ***********************************************************************
40 // @HEADER
41 
43 #include "Thyra_DefaultSpmdVectorSpace.hpp"
44 #include "Thyra_DefaultSpmdMultiVector.hpp"
45 #include "Thyra_DefaultSpmdVector.hpp"
46 #include "Thyra_DetachedVectorView.hpp"
47 #include "Thyra_DetachedMultiVectorView.hpp"
48 #include "Thyra_VectorSpaceFactoryBase.hpp"
49 #include "Thyra_ProductVectorSpaceBase.hpp"
50 #include "Teuchos_Assert.hpp"
51 #include "Teuchos_dyn_cast.hpp"
52 
53 #include "Teuchos_DefaultSerialComm.hpp"
54 #ifdef HAVE_MPI
55 #include "Teuchos_DefaultMpiComm.hpp"
56 #endif
57 
58 #include "Epetra_Map.h"
59 #include "Epetra_Comm.h"
60 #include "Epetra_SerialComm.h"
61 #ifdef HAVE_MPI
62 # include "Epetra_MpiComm.h"
63 #endif
64 #include "Epetra_MultiVector.h"
65 #include "Epetra_Vector.h"
66 
67 //
68 // Helpers
69 //
70 
71 
72 namespace {
73 
74 
76 unwrapSingleProductVectorSpace(
77  const Teuchos::RCP<const Thyra::VectorSpaceBase<double> > &vs_in
78  )
79 {
80  using Teuchos::RCP;
81  using Teuchos::rcp_dynamic_cast;
83  const RCP<const ProductVectorSpaceBase<double> > pvs =
84  rcp_dynamic_cast<const ProductVectorSpaceBase<double> >(vs_in);
85  if (nonnull(pvs)) {
86  TEUCHOS_ASSERT_EQUALITY( pvs->numBlocks(), 1 );
87  return pvs->getBlock(0);
88  }
89  return vs_in;
90 }
91 
92 
93 } // namespace
94 
95 
96 //
97 // Implementations of user function
98 //
99 
100 
103 {
104  using Teuchos::rcp;
105  using Teuchos::rcp_dynamic_cast;
106  using Teuchos::set_extra_data;
107 
109  serialEpetraComm = rcp_dynamic_cast<const Epetra_SerialComm>(epetraComm);
110  if( serialEpetraComm.get() ) {
112  serialComm = rcp(new Teuchos::SerialComm<Ordinal>());
113  set_extra_data( serialEpetraComm, "serialEpetraComm", Teuchos::inOutArg(serialComm) );
114  return serialComm;
115  }
116 
117 #ifdef HAVE_MPI
118 
120  mpiEpetraComm = rcp_dynamic_cast<const Epetra_MpiComm>(epetraComm);
121  if( mpiEpetraComm.get() ) {
123  rawMpiComm = Teuchos::opaqueWrapper(mpiEpetraComm->Comm());
124  set_extra_data( mpiEpetraComm, "mpiEpetraComm", Teuchos::inOutArg(rawMpiComm) );
126  mpiComm = rcp(new Teuchos::MpiComm<Ordinal>(rawMpiComm));
127  return mpiComm;
128  }
129 
130 #endif // HAVE_MPI
131 
132  // If you get here then the failed!
133  return Teuchos::null;
134 
135 }
136 
137 
140  const RCP<const Epetra_Map> &epetra_map
141  )
142 {
143  using Teuchos::as; using Teuchos::inoutArg;
144 
145 #ifdef TEUCHOS_DEBUG
147  !epetra_map.get(), std::invalid_argument,
148  "create_VectorSpace::initialize(...): Error!" );
149 #endif // TEUCHOS_DEBUG
150 
152  create_Comm(Teuchos::rcpFromRef(epetra_map->Comm())).assert_not_null();
153 
154  Teuchos::set_extra_data(epetra_map, "epetra_map", inoutArg(comm));
155  const Ordinal localSubDim = epetra_map->NumMyElements();
156 
158  defaultSpmdVectorSpace<double>(
159  comm, localSubDim, epetra_map->NumGlobalElements64(),
160  !epetra_map->DistributedGlobal());
161 
162  TEUCHOS_ASSERT_EQUALITY(vs->dim(), as<Ordinal>(epetra_map->NumGlobalElements64()));
163  // NOTE: the above assert just checks to make sure that the size of the
164  // Ordinal type can hold the size returned from NumGlobalElemenets64(). A
165  // 64 bit system will always have Ordinal=ptrdiff_t by default which will
166  // always be 64 bit so this should be fine. However, if Ordinal were
167  // defined to only be 32 bit and then this exception could trigger. Because
168  // this assert will only likely trigger in a non-debug build, we will leave
169  // the assert unguarded since it is very cheap to perform.
170  Teuchos::set_extra_data( epetra_map, "epetra_map", inoutArg(vs) );
171 
172  return vs;
173 }
174 
175 
178  const RCP<const VectorSpaceBase<double> > &parentSpace,
179  const int dim
180  )
181 {
182  using Teuchos::rcp_dynamic_cast;
183 #ifdef TEUCHOS_DEBUG
184  TEUCHOS_TEST_FOR_EXCEPT(parentSpace.get()==NULL);
185  Teuchos::dyn_cast<const SpmdVectorSpaceBase<double> >(*parentSpace);
186  TEUCHOS_TEST_FOR_EXCEPT(dim <= 0);
187 #endif
188  return parentSpace->smallVecSpcFcty()->createVecSpc(dim);
189 }
190 
191 
194  const RCP<Epetra_Vector> &epetra_v,
195  const RCP<const VectorSpaceBase<double> > &space_in
196  )
197 {
198 #ifdef TEUCHOS_DEBUG
199  TEUCHOS_TEST_FOR_EXCEPT(space_in.get()==NULL);
200 #endif
202  space = Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
203  space_in,true);
204  // mfh 06 Dec 2017: This return should not trigger an issue like
205  // #1941, because if epetra_v is NULL on some process but not
206  // others, then that process should not be participating in
207  // collectives with the other processes anyway.
208  if(!epetra_v.get())
209  return Teuchos::null;
210  // New local view of raw data
211  double *localValues;
212  epetra_v->ExtractView( &localValues );
213  // Build the Vector
215  v = Teuchos::rcp(
217  space,
218  Teuchos::arcp(localValues,0,epetra_v->Map().NumMyElements(),false),
219  1
220  )
221  );
222  Teuchos::set_extra_data<RCP<Epetra_Vector> >( epetra_v, "Epetra_Vector",
223  Teuchos::inOutArg(v) );
224  return v;
225 }
226 
227 
230  const RCP<const Epetra_Vector> &epetra_v,
231  const RCP<const VectorSpaceBase<double> > &space_in
232  )
233 {
234 #ifdef TEUCHOS_DEBUG
235  TEUCHOS_TEST_FOR_EXCEPT(space_in.get()==NULL);
236 #endif
238  space = Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
239  space_in,true);
240  // mfh 06 Dec 2017: This return should not trigger an issue like
241  // #1941, because if epetra_v is NULL on some process but not
242  // others, then that process should not be participating in
243  // collectives with the other processes anyway.
244  if(!epetra_v.get())
245  return Teuchos::null;
246  // New local view of raw data
247  double *localValues;
248  epetra_v->ExtractView( &localValues );
249  // Build the Vector
251  v = Teuchos::rcp(
253  space,
254  Teuchos::arcp(localValues,0,epetra_v->Map().NumMyElements(),false),
255  1
256  )
257  );
258  Teuchos::set_extra_data<RCP<const Epetra_Vector> >( epetra_v, "Epetra_Vector",
259  Teuchos::inOutArg(v) );
260  return v;
261 }
262 
263 
266  const RCP<Epetra_MultiVector> &epetra_mv,
267  const RCP<const VectorSpaceBase<double> > &range_in,
268  const RCP<const VectorSpaceBase<double> > &domain_in
269  )
270 {
271  using Teuchos::rcp_dynamic_cast;
272 #ifdef TEUCHOS_DEBUG
273  TEUCHOS_TEST_FOR_EXCEPT(range_in.get()==NULL);
274 #endif
276  Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
277  unwrapSingleProductVectorSpace(range_in),
278  true
279  );
281  Teuchos::rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
282  unwrapSingleProductVectorSpace(domain_in),
283  true
284  );
285  // mfh 06 Dec 2017: This return should not trigger an issue like
286  // #1941, because if epetra_mv is NULL on some process but not
287  // others, then that process should not be participating in
288  // collectives with the other processes anyway.
289  if (!epetra_mv.get() )
290  return Teuchos::null;
291  if ( is_null(domain) ) {
292  domain = rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
294  );
295  }
296  // New local view of raw data
297  double *localValues; int leadingDim;
298  if( epetra_mv->ConstantStride() ) {
299  epetra_mv->ExtractView( &localValues, &leadingDim );
300  }
301  else {
302  TEUCHOS_TEST_FOR_EXCEPT(true); // ToDo: Implement views of non-contiguous mult-vectors!
303  }
304  // Build the MultiVector
306  mv = Teuchos::rcp(
308  range,
309  domain,
310  Teuchos::arcp(localValues,0,leadingDim*epetra_mv->NumVectors(),false),
311  leadingDim
312  )
313  );
314  Teuchos::set_extra_data<RCP<Epetra_MultiVector> >(
315  epetra_mv, "Epetra_MultiVector", Teuchos::inOutArg(mv) );
316  return mv;
317 }
318 
319 
322  const RCP<const Epetra_MultiVector> &epetra_mv,
323  const RCP<const VectorSpaceBase<double> > &range_in,
324  const RCP<const VectorSpaceBase<double> > &domain_in
325  )
326 {
327  using Teuchos::rcp_dynamic_cast;
328 #ifdef TEUCHOS_DEBUG
329  TEUCHOS_TEST_FOR_EXCEPT(range_in.get()==NULL);
330 #endif
332  Teuchos::rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
333  unwrapSingleProductVectorSpace(range_in),
334  true
335  );
337  Teuchos::rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
338  unwrapSingleProductVectorSpace(domain_in),
339  true
340  );
341  // mfh 06 Dec 2017: This return should not trigger an issue like
342  // #1941, because if epetra_mv is NULL on some process but not
343  // others, then that process should not be participating in
344  // collectives with the other processes anyway.
345  if (!epetra_mv.get())
346  return Teuchos::null;
347  if ( is_null(domain) ) {
348  domain = rcp_dynamic_cast<const ScalarProdVectorSpaceBase<double> >(
350  );
351  }
352  // New local view of raw data
353  double *localValues; int leadingDim;
354  if( epetra_mv->ConstantStride() ) {
355  epetra_mv->ExtractView( &localValues, &leadingDim );
356  }
357  else {
358  TEUCHOS_TEST_FOR_EXCEPT(true); // ToDo: Implement views of non-contiguous mult-vectors!
359  }
360  // Build the MultiVector
362  mv = Teuchos::rcp(
364  range,
365  domain,
366  Teuchos::arcp(localValues,0,leadingDim*epetra_mv->NumVectors(),false),
367  leadingDim
368  )
369  );
370  Teuchos::set_extra_data<RCP<const Epetra_MultiVector> >(
371  epetra_mv, "Epetra_MultiVector", Teuchos::inOutArg(mv) );
372  return mv;
373 }
374 
375 
378 {
379 
380  using Teuchos::rcp;
381  using Teuchos::ptrFromRef;
382  using Teuchos::ptr_dynamic_cast;
383  using Teuchos::SerialComm;
384 #ifdef HAVE_MPI
385  using Teuchos::MpiComm;
386 #endif
387 
388  const Ptr<const Teuchos::Comm<Ordinal> > comm = Teuchos::ptrFromRef(comm_in);
389 
390  const Ptr<const SerialComm<Ordinal> > serialComm =
391  ptr_dynamic_cast<const SerialComm<Ordinal> >(comm);
392 
393  RCP<const Epetra_Comm> epetraComm;
394 
395 #ifdef HAVE_MPI
396 
397  const Ptr<const MpiComm<Ordinal> > mpiComm =
398  ptr_dynamic_cast<const MpiComm<Ordinal> >(comm);
399 
400  TEUCHOS_TEST_FOR_EXCEPTION(is_null(mpiComm) && is_null(serialComm),
401  std::runtime_error,
402  "SPMD std::vector space has a communicator that is "
403  "neither a serial comm nor an MPI comm");
404 
405  if (nonnull(mpiComm)) {
406  epetraComm = rcp(new Epetra_MpiComm(*mpiComm->getRawMpiComm()()));
407  }
408  else {
409  epetraComm = rcp(new Epetra_SerialComm());
410  }
411 
412 #else
413 
414  TEUCHOS_TEST_FOR_EXCEPTION(is_null(serialComm), std::runtime_error,
415  "SPMD std::vector space has a communicator that is "
416  "neither a serial comm nor an MPI comm");
417 
418  epetraComm = rcp(new Epetra_SerialComm());
419 
420 #endif
421 
422  TEUCHOS_TEST_FOR_EXCEPTION(is_null(epetraComm), std::runtime_error,
423  "null communicator created");
424 
425  return epetraComm;
426 
427 }
428 
429 
432  const VectorSpaceBase<double>& vs_in,
433  const RCP<const Epetra_Comm>& comm)
434 {
435 
436  using Teuchos::rcpFromRef;
437  using Teuchos::rcpFromPtr;
438  using Teuchos::rcp_dynamic_cast;
439  using Teuchos::ptrFromRef;
440  using Teuchos::ptr_dynamic_cast;
441 
442  const Ptr<const VectorSpaceBase<double> > vs_ptr = ptrFromRef(vs_in);
443 
444  const Ptr<const SpmdVectorSpaceBase<double> > spmd_vs =
445  ptr_dynamic_cast<const SpmdVectorSpaceBase<double> >(vs_ptr);
446 
448  ptr_dynamic_cast<const ProductVectorSpaceBase<double> >(vs_ptr);
449 
450  TEUCHOS_TEST_FOR_EXCEPTION( is_null(spmd_vs) && is_null(prod_vs), std::logic_error,
451  "Error, the concrete VectorSpaceBase object of type "
452  +Teuchos::demangleName(typeid(vs_in).name())+" does not support the"
453  " SpmdVectorSpaceBase or the ProductVectorSpaceBase interfaces!" );
454 
455  const int numBlocks = (nonnull(prod_vs) ? prod_vs->numBlocks() : 1);
456 
457  // Get an array of SpmdVectorBase objects for the blocks
458 
460  if (nonnull(prod_vs)) {
461  for (int block_i = 0; block_i < numBlocks; ++block_i) {
462  const RCP<const SpmdVectorSpaceBase<double> > spmd_vs_i =
463  rcp_dynamic_cast<const SpmdVectorSpaceBase<double> >(
464  prod_vs->getBlock(block_i), true);
465  spmd_vs_blocks.push_back(spmd_vs_i);
466  }
467  }
468  else {
469  spmd_vs_blocks.push_back(rcpFromPtr(spmd_vs));
470  }
471 
472  // Find the number of local elements, summed over all blocks
473 
474  int myLocalElements = 0;
475  for (int block_i = 0; block_i < numBlocks; ++block_i) {
476  myLocalElements += spmd_vs_blocks[block_i]->localSubDim();
477  }
478 
479  // Find the GIDs owned by this processor, taken from all blocks
480 
481  int count=0;
482  int blockOffset = 0;
483  Array<int> myGIDs(myLocalElements);
484  for (int block_i = 0; block_i < numBlocks; ++block_i) {
485  const RCP<const SpmdVectorSpaceBase<double> > spmd_vs_i = spmd_vs_blocks[block_i];
486  const int lowGIDInBlock = spmd_vs_i->localOffset();
487  const int numLocalElementsInBlock = spmd_vs_i->localSubDim();
488  for (int i=0; i < numLocalElementsInBlock; ++i, ++count) {
489  myGIDs[count] = blockOffset + lowGIDInBlock + i;
490  }
491  blockOffset += spmd_vs_i->dim();
492  }
493 
494  const int globalDim = vs_in.dim();
495 
496  return Teuchos::rcp(
497  new Epetra_Map(globalDim, myLocalElements, myGIDs.getRawPtr(), 0, *comm));
498 
499 }
500 
501 // Almost like the above one, but working on an RCP vs as input, we can check for the
502 // presence of RCP<const Epetra_Map> in the RCP extra data, to save us time.
505  const RCP<const VectorSpaceBase<double>>& vs,
506  const RCP<const Epetra_Comm>& comm)
507 {
508  //
509  // First, try to grab the Epetra_Map straight out of the
510  // RCP since this is the fastest way.
511  //
512  const Ptr<const RCP<const Epetra_Map> >
513  epetra_map_ptr = Teuchos::get_optional_extra_data<RCP<const Epetra_Map> >(
514  vs,"epetra_map");
515  // mfh 06 Dec 2017: This should be consistent over all processes
516  // that participate in v's communicator.
517  if(!is_null(epetra_map_ptr)) {
518  return *epetra_map_ptr;
519  }
520 
521  // No luck. We need to call get_Epetra_Map(*vs,comm).
522  TEUCHOS_TEST_FOR_EXCEPTION(comm.is_null(), std::runtime_error,
523  "Error! No RCP Epetra_Map attached to the input vector space RCP, "
524  "and the input comm RCP is null.\n");
525 
526  return get_Epetra_Map(*vs,comm);
527 }
528 
531  const Epetra_Map &map,
532  const RCP<VectorBase<double> > &v
533  )
534 {
535  using Teuchos::get_optional_extra_data;
536 #ifdef TEUCHOS_DEBUG
538  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
540  "Thyra::get_Epetra_Vector(map,v)", *epetra_vs, *v->space() );
541 #endif
542  //
543  // First, try to grab the Epetra_Vector straight out of the
544  // RCP since this is the fastest way.
545  //
547  epetra_v_ptr = get_optional_extra_data<RCP<Epetra_Vector> >(
548  v,"Epetra_Vector");
549  // mfh 06 Dec 2017: This should be consistent over all processes
550  // that participate in v's communicator.
551  if(!is_null(epetra_v_ptr)) {
552  return *epetra_v_ptr;
553  }
554  //
555  // The assumption that we (rightly) make here is that if the vector spaces
556  // are compatible, that either the multi-vectors are both in-core or the
557  // vector spaces are both derived from SpmdVectorSpaceBase and have
558  // compatible maps.
559  //
560  const VectorSpaceBase<double> &vs = *v->range();
561  const SpmdVectorSpaceBase<double> *mpi_vs
562  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
563  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
564  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
565  //
566  // Here we will extract a view of the local elements in the underlying
567  // VectorBase object. In most cases, no data will be allocated or copied
568  // and only some small objects will be created and a few virtual functions
569  // will be called so the overhead should be low and fixed.
570  //
571  // Create a *mutable* view of the local elements, this view will be set on
572  // the RCP that is returned. As a result, this view will be relased
573  // when the returned Epetra_Vector is released.
574  //
575  // Note that the input vector 'v' will be remembered through this detached
576  // view!
577  //
579  emvv = Teuchos::rcp(
581  v
582  ,Range1D(localOffset,localOffset+localSubDim-1)
583  ,true // forceContiguous
584  )
585  );
586  // Create a temporary Epetra_Vector object and give it
587  // the above local view
589  epetra_v = Teuchos::rcp(
590  new Epetra_Vector(
591  ::View // CV
592  ,map // Map
593  ,const_cast<double*>(emvv->values()) // V
594  )
595  );
596  // Give the explict view object to the above Epetra_Vector smart pointer
597  // object. In this way, when the client is finished with the Epetra_Vector
598  // view the destructor from the object in emvv will automatically commit the
599  // changes to the elements in the input v VectorBase object (reguardless of
600  // its implementation). This is truly an elegant result!
601  Teuchos::set_extra_data( emvv, "emvv", Teuchos::inOutArg(epetra_v),
602  Teuchos::PRE_DESTROY );
603  // We are done!
604  return epetra_v;
605 }
606 
607 // Same as above, except allows to not pass the map (in case the RCP of v
608 // already has an attached RCP<Epetra_Vector>)
611  const RCP<VectorBase<double> > &v,
612  const RCP<const Epetra_Map>& map
613  )
614 {
615  //
616  // First, try to grab the Epetra_Vector straight out of the
617  // RCP since this is the fastest way.
618  //
619  const Ptr<const RCP<Epetra_Vector> >
620  epetra_v_ptr = Teuchos::get_optional_extra_data<RCP<Epetra_Vector> >(
621  v,"Epetra_Vector");
622  // mfh 06 Dec 2017: This should be consistent over all processes
623  // that participate in v's communicator.
624  if(!is_null(epetra_v_ptr)) {
625  return *epetra_v_ptr;
626  }
627 
628  // No luck. We need to call get_Epetra_Vector(*map,v).
629  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
630  "Error! No RCP Epetra_Vector attached to the input vector RCP, "
631  "and the input map RCP is null.\n");
632 
633  return get_Epetra_Vector(*map,v);
634 }
635 
638  const Epetra_Map &map,
639  const RCP<const VectorBase<double> > &v
640  )
641 {
642  using Teuchos::get_optional_extra_data;
643 #ifdef TEUCHOS_DEBUG
645  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
647  "Thyra::get_Epetra_Vector(map,v)", *epetra_vs, *v->space() );
648 #endif
649  //
650  // First, try to grab the Epetra_Vector straight out of the
651  // RCP since this is the fastest way.
652  //
654  epetra_v_ptr = get_optional_extra_data<RCP<const Epetra_Vector> >(
655  v,"Epetra_Vector");
656  // mfh 06 Dec 2017: This should be consistent over all processes
657  // that participate in v's communicator.
658  if(!is_null(epetra_v_ptr))
659  return *epetra_v_ptr;
661  epetra_nonconst_v_ptr = get_optional_extra_data<RCP<Epetra_Vector> >(
662  v,"Epetra_Vector");
663  // mfh 06 Dec 2017: This should be consistent over all processes
664  // that participate in v's communicator.
665  if(!is_null(epetra_nonconst_v_ptr))
666  return *epetra_nonconst_v_ptr;
667  //
668  // Same as above function except as stated below
669  //
670  const VectorSpaceBase<double> &vs = *v->range();
671  const SpmdVectorSpaceBase<double> *mpi_vs
672  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
673  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
674  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
675  // Get an explicit *non-mutable* view of all of the elements in the multi
676  // vector. Note that 'v' will be remembered by this view!
678  evv = Teuchos::rcp(
680  v
681  ,Range1D(localOffset,localOffset+localSubDim-1)
682  ,true // forceContiguous
683  )
684  );
685  // Create a temporary Epetra_Vector object and give it
686  // the above view
688  epetra_v = Teuchos::rcp(
689  new Epetra_Vector(
690  ::View // CV
691  ,map // Map
692  ,const_cast<double*>(evv->values()) // V
693  )
694  );
695  // This next statement will cause the destructor to free the view if
696  // needed (see above function). Since this view is non-mutable,
697  // only a releaseDetachedView(...) and not a commit will be called.
698  // This is the whole reason there is a seperate implementation for
699  // the const and non-const cases.
700  Teuchos::set_extra_data( evv, "evv", Teuchos::inOutArg(epetra_v),
701  Teuchos::PRE_DESTROY );
702  // We are done!
703  return epetra_v;
704 }
705 
706 // Same as above, except allows to not pass the map (in case the RCP of v
707 // already has an attached RCP<Epetra_Vector>)
710  const RCP<const VectorBase<double> > &v,
711  const RCP<const Epetra_Map>& map
712  )
713 {
714  //
715  // First, try to grab the Epetra_Vector straight out of the
716  // RCP since this is the fastest way.
717  //
718  const Ptr<const RCP<const Epetra_Vector> >
719  epetra_v_ptr = Teuchos::get_optional_extra_data<RCP<const Epetra_Vector> >(
720  v,"Epetra_Vector");
721  // mfh 06 Dec 2017: This should be consistent over all processes
722  // that participate in v's communicator.
723  if(!is_null(epetra_v_ptr)) {
724  return *epetra_v_ptr;
725  }
726 
727  // No luck. We need to call get_Epetra_Vector(*map,v).
728  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
729  "Error! No RCP to Epetra_Vector attached to the input vector RCP, "
730  "and the input map RCP is null.\n");
731 
732  return get_Epetra_Vector(*map,v);
733 }
734 
737  const Epetra_Map &map,
738  const RCP<MultiVectorBase<double> > &mv
739  )
740 {
741  using Teuchos::get_optional_extra_data;
742 #ifdef TEUCHOS_DEBUG
744  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
746  "Thyra::get_Epetra_MultiVector(map,mv)", *epetra_vs, *mv->range() );
747 #endif
748  //
749  // First, try to grab the Epetra_MultiVector straight out of the
750  // RCP since this is the fastest way.
751  //
753  epetra_mv_ptr = get_optional_extra_data<RCP<Epetra_MultiVector> >(
754  mv,"Epetra_MultiVector");
755  // mfh 06 Dec 2017: This should be consistent over all processes
756  // that participate in v's communicator.
757  if(!is_null(epetra_mv_ptr)) {
758  return *epetra_mv_ptr;
759  }
760  //
761  // The assumption that we (rightly) make here is that if the vector spaces
762  // are compatible, that either the multi-vectors are both in-core or the
763  // vector spaces are both derived from SpmdVectorSpaceBase and have
764  // compatible maps.
765  //
766  const VectorSpaceBase<double> &vs = *mv->range();
767  const SpmdVectorSpaceBase<double> *mpi_vs
768  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
769  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
770  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
771  //
772  // Here we will extract a view of the local elements in the underlying
773  // MultiVectorBase object. In most cases, no data will be allocated or
774  // copied and only some small objects will be created and a few virtual
775  // functions will be called so the overhead should be low and fixed.
776  //
777  // Create a *mutable* view of the local elements, this view will be set on
778  // the RCP that is returned. As a result, this view will be relased
779  // when the returned Epetra_MultiVector is released.
780  //
782  emmvv = Teuchos::rcp(
784  *mv
785  ,Range1D(localOffset,localOffset+localSubDim-1)
786  )
787  );
788  // Create a temporary Epetra_MultiVector object and give it
789  // the above local view
791  epetra_mv = Teuchos::rcp(
792  new Epetra_MultiVector(
793  ::View // CV
794  ,map // Map
795  ,const_cast<double*>(emmvv->values()) // A
796  ,emmvv->leadingDim() // MyLDA
797  ,emmvv->numSubCols() // NumVectors
798  )
799  );
800  // Give the explict view object to the above Epetra_MultiVector
801  // smart pointer object. In this way, when the client is finished
802  // with the Epetra_MultiVector view the destructor from the object
803  // in emmvv will automatically commit the changes to the elements in
804  // the input mv MultiVectorBase object (reguardless of its
805  // implementation). This is truly an elegant result!
806  Teuchos::set_extra_data( emmvv, "emmvv", Teuchos::inOutArg(epetra_mv),
807  Teuchos::PRE_DESTROY );
808  // Also set the mv itself as extra data just to be safe
809  Teuchos::set_extra_data( mv, "mv", Teuchos::inOutArg(epetra_mv) );
810  // We are done!
811  return epetra_mv;
812 }
813 
814 // Same as above, except allows to not pass the map (in case the RCP of v
815 // already has an attached RCP<const Epetra_MultiVector>)
818  const RCP<MultiVectorBase<double> > &mv,
819  const RCP<const Epetra_Map>& map
820  )
821 {
822  //
823  // First, try to grab the Epetra_MultiVector straight out of the
824  // RCP since this is the fastest way.
825  //
826  const Ptr<const RCP<Epetra_MultiVector> >
827  epetra_mv_ptr = Teuchos::get_optional_extra_data<RCP<Epetra_MultiVector> >(
828  mv,"Epetra_MultiVector");
829  // mfh 06 Dec 2017: This should be consistent over all processes
830  // that participate in v's communicator.
831  if(!is_null(epetra_mv_ptr)) {
832  return *epetra_mv_ptr;
833  }
834 
835  // No luck. We need to call get_Epetra_MultiVector(*map,mv).
836  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
837  "Error! No RCP to Epetra_MultiVector attached to the input vector RCP, "
838  "and the input map RCP is null.\n");
839 
840  return get_Epetra_MultiVector(*map,mv);
841 }
842 
845  const Epetra_Map &map,
846  const RCP<const MultiVectorBase<double> > &mv
847  )
848 {
849  using Teuchos::get_optional_extra_data;
850 
851 #ifdef TEUCHOS_DEBUG
853  epetra_vs = create_VectorSpace(Teuchos::rcp(&map,false));
854 
856  "Thyra::get_Epetra_MultiVector(map,mv)",
857  *epetra_vs, *mv->range() );
858 #endif
859 
860  //
861  // First, try to grab the Epetra_MultiVector straight out of the
862  // RCP since this is the fastest way.
863  //
865  epetra_mv_ptr
866  = get_optional_extra_data<RCP<const Epetra_MultiVector> >(
867  mv,"Epetra_MultiVector" );
868  // mfh 06 Dec 2017: This should be consistent over all processes
869  // that participate in v's communicator.
870  if(!is_null(epetra_mv_ptr)) {
871  return *epetra_mv_ptr;
872  }
873 
874  //
875  // Same as above function except as stated below
876  //
877  const VectorSpaceBase<double> &vs = *mv->range();
878  const SpmdVectorSpaceBase<double> *mpi_vs
879  = dynamic_cast<const SpmdVectorSpaceBase<double>*>(&vs);
880  const Ordinal localOffset = ( mpi_vs ? mpi_vs->localOffset() : 0 );
881  const Ordinal localSubDim = ( mpi_vs ? mpi_vs->localSubDim() : vs.dim() );
882  // Get an explicit *non-mutable* view of all of the elements in
883  // the multi vector.
885  emvv = Teuchos::rcp(
887  *mv
888  ,Range1D(localOffset,localOffset+localSubDim-1)
889  )
890  );
891 
892  // Create a temporary Epetra_MultiVector object and give it
893  // the above view
895  epetra_mv = Teuchos::rcp(
896  new Epetra_MultiVector(
897  ::View // CV
898  ,map // Map
899  ,const_cast<double*>(emvv->values()) // A
900  ,emvv->leadingDim() // MyLDA
901  ,emvv->numSubCols() // NumVectors
902  )
903  );
904 
905  // This next statement will cause the destructor to free the view if
906  // needed (see above function). Since this view is non-mutable,
907  // only a releaseDetachedView(...) and not a commit will be called.
908  // This is the whole reason there is a seperate implementation for
909  // the const and non-const cases.
910  Teuchos::set_extra_data( emvv, "emvv", Teuchos::inOutArg(epetra_mv),
911  Teuchos::PRE_DESTROY );
912  // Also set the mv itself as extra data just to be safe
913  Teuchos::set_extra_data( mv, "mv", Teuchos::inOutArg(epetra_mv) );
914 
915  // We are done!
916  return epetra_mv;
917 }
918 
919 // Same as above, except allows to not pass the map (in case the RCP of v
920 // already has an attached RCP<const Epetra_MultiVector>)
923  const RCP<const MultiVectorBase<double> > &mv,
924  const RCP<const Epetra_Map>& map
925  )
926 {
927  //
928  // First, try to grab the Epetra_MultiVector straight out of the
929  // RCP since this is the fastest way.
930  //
931  const Ptr<const RCP<const Epetra_MultiVector> >
932  epetra_mv_ptr = Teuchos::get_optional_extra_data<RCP<const Epetra_MultiVector> >(
933  mv,"Epetra_MultiVector");
934  // mfh 06 Dec 2017: This should be consistent over all processes
935  // that participate in v's communicator.
936  if(!is_null(epetra_mv_ptr)) {
937  return *epetra_mv_ptr;
938  }
939 
940  // No luck. We need to call get_Epetra_MultiVector(*map,mv).
941  TEUCHOS_TEST_FOR_EXCEPTION(map.is_null(), std::runtime_error,
942  "Error! No RCP to Epetra_MultiVector attached to the input vector RCP, "
943  "and the input map RCP is null.\n");
944 
945  return get_Epetra_MultiVector(*map,mv);
946 }
947 
950  const Epetra_Map &map,
952  )
953 {
954  using Teuchos::rcpWithEmbeddedObj;
955  using Teuchos::ptrFromRef;
956  using Teuchos::ptr_dynamic_cast;
957  using Teuchos::outArg;
958 
960  ptr_dynamic_cast<SpmdMultiVectorBase<double> >(ptrFromRef(mv));
961 
962  ArrayRCP<double> mvData;
963  Ordinal mvLeadingDim = 0;
964  if (nonnull(mvSpmdMv)) {
965  mvSpmdMv->getNonconstLocalData(outArg(mvData), outArg(mvLeadingDim));
966  }
967 
968  return rcpWithEmbeddedObj(
969  new Epetra_MultiVector(
970  ::View, map, mvData.getRawPtr(), mvLeadingDim, mv.domain()->dim()
971  ),
972  mvData
973  );
974 }
975 
976 
979  const Epetra_Map &map,
980  const MultiVectorBase<double> &mv
981  )
982 {
983  using Teuchos::rcpWithEmbeddedObj;
984  using Teuchos::ptrFromRef;
985  using Teuchos::ptr_dynamic_cast;
986  using Teuchos::outArg;
987 
989  ptr_dynamic_cast<const SpmdMultiVectorBase<double> >(ptrFromRef(mv));
990 
991  ArrayRCP<const double> mvData;
992  Ordinal mvLeadingDim = 0;
993  if (nonnull(mvSpmdMv)) {
994  mvSpmdMv->getLocalData(outArg(mvData), outArg(mvLeadingDim));
995  }
996 
997  return rcpWithEmbeddedObj(
998  new Epetra_MultiVector(
999  ::View, map, const_cast<double*>(mvData.getRawPtr()), mvLeadingDim, mv.domain()->dim()
1000  ),
1001  mvData
1002  );
1003 }
is_null
bool is_null(const boost::shared_ptr< T > &p)
as
TypeTo as(const TypeFrom &t)
Thyra::VectorSpaceBase::dim
virtual Ordinal dim() const =0
Return the dimension of the vector space.
Epetra_BlockMap::Comm
const Epetra_Comm & Comm() const
Epetra_BlockMap::NumMyElements
int NumMyElements() const
TEUCHOS_TEST_FOR_EXCEPT
#define TEUCHOS_TEST_FOR_EXCEPT(throw_exception_test)
Thyra::VectorSpaceBase
Abstract interface for objects that represent a space for vectors.
Definition: Thyra_OperatorVectorTypes.hpp:367
Thyra::DefaultSpmdVector
Efficient concrete implementation subclass for SPMD vectors.
Definition: Thyra_DefaultSpmdVector_decl.hpp:69
Teuchos::SerialComm
Epetra_MultiVector::ExtractView
int ExtractView(double **A, int *MyLDA) const
Epetra_DistObject::Map
const Epetra_BlockMap & Map() const
Thyra::SpmdVectorSpaceBase
Base abstract VectorSpaceBase class for all SPMD-based vector spaces.
Definition: Thyra_SpmdMultiVectorBase.hpp:52
Thyra::LinearOpBase::domain
virtual RCP< const VectorSpaceBase< Scalar > > domain() const =0
Return a smart pointer for the domain space for this operator.
Thyra::create_Vector
RCP< VectorBase< double > > create_Vector(const RCP< Epetra_Vector > &epetra_v, const RCP< const VectorSpaceBase< double > > &space=Teuchos::null)
Create a non-const VectorBase object from a non-const Epetra_Vector object.
Definition: Thyra_EpetraThyraWrappers.cpp:193
Teuchos::Array::push_back
void push_back(const value_type &x)
Teuchos::RCP::assert_not_null
const RCP< T > & assert_not_null() const
rcp
TEUCHOS_DEPRECATED RCP< T > rcp(T *p, Dealloc_T dealloc, bool owns_mem)
Thyra::get_Epetra_Vector
RCP< Epetra_Vector > get_Epetra_Vector(const Epetra_Map &map, const RCP< VectorBase< double > > &v)
Get a non-const Epetra_Vector view from a non-const VectorBase object if possible.
Definition: Thyra_EpetraThyraWrappers.cpp:530
Epetra_MultiVector::ConstantStride
bool ConstantStride() const
Teuchos::RCP
Teuchos::Ptr
Thyra::Ordinal
Teuchos::Ordinal Ordinal
Type for the dimension of a vector space. `*.
Definition: Thyra_OperatorVectorTypes.hpp:127
TEUCHOS_ASSERT_EQUALITY
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
Teuchos::Array
Epetra_MpiComm::Comm
MPI_Comm Comm() const
Thyra::ProductVectorSpaceBase
Definition: Thyra_BlockedLinearOpBase.hpp:53
Thyra::DefaultSpmdMultiVector
Efficient concrete implementation subclass for SPMD multi-vectors.
Definition: Thyra_DefaultSpmdMultiVector_decl.hpp:68
Teuchos::ArrayRCP
Thyra::ConstDetachedVectorView
Create an explicit non-mutable (const) view of a VectorBase object.
Definition: Thyra_DetachedVectorView.hpp:72
Thyra::create_VectorSpace
RCP< const VectorSpaceBase< double > > create_VectorSpace(const RCP< const Epetra_Map > &epetra_map)
Create an VectorSpaceBase object given an Epetra_Map object.
Definition: Thyra_EpetraThyraWrappers.cpp:139
Thyra::DetachedVectorView
Create an explicit mutable (non-const) view of a VectorBase object.
Definition: Thyra_DetachedVectorView.hpp:254
demangleName
TEUCHOSCORE_LIB_DLL_EXPORT std::string demangleName(const std::string &mangledName)
Thyra::MultiVectorBase
Interface for a collection of column vectors called a multi-vector.
Definition: Thyra_MultiVectorBase_decl.hpp:493
Epetra_MpiComm
Epetra_SerialComm
Thyra::DetachedMultiVectorView
Create an explicit mutable (non-const) view of a MultiVectorBase object.
Definition: Thyra_DetachedMultiVectorView.hpp:108
THYRA_ASSERT_VEC_SPACES
#define THYRA_ASSERT_VEC_SPACES(FUNC_NAME, VS1, VS2)
This is a very useful macro that should be used to validate that two vector spaces are compatible.
Definition: Thyra_AssertOp.hpp:188
Thyra::ConstDetachedMultiVectorView
Create an explicit non-mutable (const) view of a MultiVectorBase object.
Definition: Thyra_DetachedMultiVectorView.hpp:57
Thyra::create_Comm
RCP< const Teuchos::Comm< Ordinal > > create_Comm(const RCP< const Epetra_Comm > &epetraComm)
Given an Epetra_Comm object, return an equivalent Teuchos::Comm object.
Definition: Thyra_EpetraThyraWrappers.cpp:102
Epetra_Vector
Thyra::ScalarProdVectorSpaceBase
Forward decl.
Definition: Thyra_MultiVectorAdapterBase_decl.hpp:52
Thyra::create_LocallyReplicatedVectorSpace
RCP< const VectorSpaceBase< double > > create_LocallyReplicatedVectorSpace(const RCP< const VectorSpaceBase< double > > &parentSpace, const int dim)
Create a VectorSpaceBase object that creates locally replicated vector objects.
Definition: Thyra_EpetraThyraWrappers.cpp:177
Epetra_BlockMap::DistributedGlobal
bool DistributedGlobal() const
Epetra_Vector::ExtractView
int ExtractView(double **V) const
Array< int >::getRawPtr
int * getRawPtr()
Thyra::SpmdMultiVectorBase
Base interface class for SPMD multi-vectors.
Definition: Thyra_SpmdMultiVectorBase.hpp:67
Epetra_MultiVector
Thyra::create_MultiVector
RCP< MultiVectorBase< double > > create_MultiVector(const RCP< Epetra_MultiVector > &epetra_mv, const RCP< const VectorSpaceBase< double > > &range=Teuchos::null, const RCP< const VectorSpaceBase< double > > &domain=Teuchos::null)
Create a non-const MultiVectorBase object from a non-const Epetra_MultiVector object.
Definition: Thyra_EpetraThyraWrappers.cpp:265
nonnull
bool nonnull(const boost::shared_ptr< T > &p)
Thyra::get_Epetra_MultiVector
RCP< Epetra_MultiVector > get_Epetra_MultiVector(const Epetra_Map &map, const RCP< MultiVectorBase< double > > &mv)
Get a non-const Epetra_MultiVector view from a non-const MultiVectorBase object if possible.
Definition: Thyra_EpetraThyraWrappers.cpp:736
Thyra::VectorBase
Abstract interface for finite-dimensional dense vectors.
Definition: Thyra_OperatorVectorTypes.hpp:370
Teuchos::RCP::get
T * get() const
Teuchos::Comm
Definition: Thyra_DefaultSpmdVectorSpaceFactory_decl.hpp:47
dyn_cast
T_To & dyn_cast(T_From &from)
Thyra::get_Epetra_Map
RCP< const Epetra_Map > get_Epetra_Map(const VectorSpaceBase< double > &vs, const RCP< const Epetra_Comm > &comm)
Get (or create) an Epetra_Map object given an VectorSpaceBase object an optionally an extra Epetra_Co...
Definition: Thyra_EpetraThyraWrappers.cpp:431
Thyra::SpmdVectorSpaceBase::localOffset
virtual Ordinal localOffset() const =0
Returns the offset for the local sub-vector stored on this process.
Thyra::get_Epetra_Comm
RCP< const Epetra_Comm > get_Epetra_Comm(const Teuchos::Comm< Ordinal > &comm)
Get (or create) and Epetra_Comm given a Teuchos::Comm object.
Definition: Thyra_EpetraThyraWrappers.cpp:377
Thyra::SpmdVectorSpaceBase::localSubDim
virtual Ordinal localSubDim() const =0
Returns the number of local elements stored on this process.
Epetra_Map
Thyra::Range1D
Teuchos::Range1D Range1D
Definition: Thyra_OperatorVectorTypes.hpp:97
TEUCHOS_TEST_FOR_EXCEPTION
#define TEUCHOS_TEST_FOR_EXCEPTION(throw_exception_test, Exception, msg)
Thyra_EpetraThyraWrappers.hpp
View
View
Epetra_MultiVector::NumVectors
int NumVectors() const