DUNE-FEM (unstable)

petscoperator.hh
1#ifndef DUNE_FEM_PETSCLINEAROPERATOR_HH
2#define DUNE_FEM_PETSCLINEAROPERATOR_HH
3
4#include <cassert>
5#include <cstddef>
6
7#include <iostream>
8#include <memory>
9#include <string>
10#include <type_traits>
11#include <vector>
12
14
15#include <dune/fem/function/petscdiscretefunction/petscdiscretefunction.hh>
16#include <dune/fem/misc/functor.hh>
17#include <dune/fem/misc/fmatrixconverter.hh>
18#include <dune/fem/misc/petsc/petsccommon.hh>
19#include <dune/fem/misc/threads/threadsafevalue.hh>
20#include <dune/fem/operator/common/localcontribution.hh>
21#include <dune/fem/operator/common/localmatrix.hh>
22#include <dune/fem/operator/common/localmatrixwrapper.hh>
23#include <dune/fem/operator/common/temporarylocalmatrix.hh>
24#include <dune/fem/operator/common/operator.hh>
25#include <dune/fem/operator/common/stencil.hh>
26#include <dune/fem/operator/matrix/columnobject.hh>
27#include <dune/fem/operator/matrix/functor.hh>
28#include <dune/fem/space/mapper/nonblockmapper.hh>
29#include <dune/fem/space/mapper/petsc.hh>
30#include <dune/fem/storage/objectstack.hh>
31
32#include <dune/fem/solver/parameter.hh>
33
34#if HAVE_PETSC
35
36#include "petscmat.h"
37
38
39namespace Dune
40{
41 namespace Fem
42 {
43
44 struct PetscSolverParameter : public LocalParameter< SolverParameter, PetscSolverParameter >
45 {
46 typedef LocalParameter< SolverParameter, PetscSolverParameter > BaseType;
47
48 public:
49 using BaseType :: parameter ;
50 using BaseType :: keyPrefix ;
51
52 PetscSolverParameter( const ParameterReader &parameter = Parameter::container() )
53 : BaseType( parameter )
54 {}
55
56 PetscSolverParameter( const SolverParameter& sp )
57 : PetscSolverParameter( sp.keyPrefix(), sp.parameter() )
58 {}
59
60 PetscSolverParameter( const std::string &keyPrefix, const ParameterReader &parameter = Parameter::container() )
61 : BaseType( keyPrefix, parameter )
62 {}
63
64 bool isPetscSolverParameter() const { return true; }
65
66 static const int boomeramg = 0;
67 static const int parasails = 1;
68 static const int pilut = 2;
69
70 int hypreMethod() const
71 {
72 const std::string hyprePCNames[] = { "boomer-amg", "parasails", "pilu-t" };
73 int hypreType = 0;
74 if (parameter().exists("petsc.preconditioning.method"))
75 {
76 hypreType = parameter().getEnum( "petsc.preconditioning.hypre.method", hyprePCNames, 0 );
77 std::cout << "WARNING: using deprecated parameter 'petsc.preconditioning.hypre.method' use "
78 << keyPrefix() << "preconditioning.hypre.method instead\n";
79 }
80 else
81 hypreType = parameter().getEnum( keyPrefix()+"hypre.method", hyprePCNames, 0 );
82 return hypreType;
83 }
84
85 int superluMethod() const
86 {
87 const std::string factorizationNames[] = { "petsc", "superlu", "mumps" };
88 int factorType = 0;
89 if (parameter().exists("petsc.preconditioning.lu.method"))
90 {
91 factorType = parameter().getEnum( "petsc.preconditioning.lu.method", factorizationNames, 0 );
92 std::cout << "WARNING: using deprecated parameter 'petsc.preconditioning.lu.method' use "
93 << keyPrefix() << "preconditioning.lu.method instead\n";
94 }
95 else
96 factorType = parameter().getEnum( keyPrefix()+"preconditioning.lu.method", factorizationNames, 0 );
97 return factorType;
98 }
99
100 bool viennaCL () const {
101 return parameter().getValue< bool > ( keyPrefix() + "petsc.viennacl", false );
102 }
103 bool blockedMode () const {
104 return parameter().getValue< bool > ( keyPrefix() + "petsc.blockedmode", true );
105 }
106 };
107
108
109
110 /* ========================================
111 * class PetscLinearOperator
112 */
113 template< typename DomainFunction, typename RangeFunction >
114 class PetscLinearOperator
115 : public Fem::AssembledOperator< DomainFunction, RangeFunction >
116 {
117 typedef PetscLinearOperator< DomainFunction, RangeFunction > ThisType;
118 public:
119 typedef Mat MatrixType;
120 typedef DomainFunction DomainFunctionType;
121 typedef RangeFunction RangeFunctionType;
122 typedef typename DomainFunctionType::RangeFieldType DomainFieldType;
123 typedef typename RangeFunctionType::RangeFieldType RangeFieldType;
124 typedef typename DomainFunctionType::DiscreteFunctionSpaceType DomainSpaceType;
125 typedef typename RangeFunctionType::DiscreteFunctionSpaceType RangeSpaceType;
126
127 typedef PetscDiscreteFunction< DomainSpaceType > PetscDomainFunctionType;
128 typedef PetscDiscreteFunction< RangeSpaceType > PetscRangeFunctionType;
129
130 typedef typename DomainSpaceType::GridPartType::template Codim< 0 >::EntityType DomainEntityType;
131 typedef typename RangeSpaceType::GridPartType::template Codim< 0 >::EntityType RangeEntityType;
132
133 static const unsigned int domainLocalBlockSize = DomainSpaceType::localBlockSize;
134 static const unsigned int rangeLocalBlockSize = RangeSpaceType::localBlockSize;
135
136 static constexpr bool squareBlocked = domainLocalBlockSize == rangeLocalBlockSize ;
137 static constexpr bool blockedMatrix = domainLocalBlockSize > 1 && squareBlocked;
138
139 // is this right? It should be rangeBS x domainBS - the system is
140 // Ad=r so domain gives columns and r gives range
141 typedef FlatFieldMatrix< RangeFieldType, domainLocalBlockSize, rangeLocalBlockSize > MatrixBlockType;
142 typedef MatrixBlockType block_type;
143
144 private:
145 enum Status {statAssembled=0,statAdd=1,statInsert=2,statGet=3,statNothing=4};
146
147 typedef PetscMappers< DomainSpaceType > DomainMappersType;
148 typedef PetscMappers< RangeSpaceType > RangeMappersType;
149
150 typedef AuxiliaryDofs< typename DomainSpaceType::GridPartType, typename DomainMappersType::GhostMapperType > DomainAuxiliaryDofsType;
151 typedef AuxiliaryDofs< typename RangeSpaceType::GridPartType, typename RangeMappersType::GhostMapperType > RangeAuxiliaryDofsType;
152
153 public:
154 // the local matrix
155 class LocalMatrix;
156
157 struct LocalMatrixTraits
158 {
159 typedef typename DomainFunctionType::DiscreteFunctionSpaceType DomainSpaceType;
160 typedef typename RangeFunctionType::DiscreteFunctionSpaceType RangeSpaceType;
161 typedef LocalMatrix LocalMatrixType;
162 typedef typename RangeSpaceType::RangeFieldType RangeFieldType;
163
164 // copied this typedef from spmatrix.hh
165 typedef RangeFieldType LittleBlockType;
166 };
167
169 typedef LocalMatrix ObjectType;
170 typedef ThisType LocalMatrixFactoryType;
171 typedef ObjectStack< LocalMatrixFactoryType > LocalMatrixStackType;
172
174 typedef LocalMatrixWrapper< LocalMatrixStackType > LocalMatrixType;
175 typedef ColumnObject< ThisType > LocalColumnObjectType;
176
177 PetscLinearOperator ( const DomainSpaceType &domainSpace, const RangeSpaceType &rangeSpace,
178 const PetscSolverParameter& param = PetscSolverParameter() )
179 : domainMappers_( domainSpace ),
180 rangeMappers_( rangeSpace ),
181 localMatrixStack_( *this ),
182 status_(statNothing),
183 viennaCL_( param.viennaCL() ),
184 blockedMode_( blockedMatrix && (!viennaCL_) && param.blockedMode() )
185 {}
186
187 PetscLinearOperator ( const std::string &, const DomainSpaceType &domainSpace, const RangeSpaceType &rangeSpace,
188 const PetscSolverParameter& param = PetscSolverParameter() )
189 : PetscLinearOperator( domainSpace, rangeSpace, param )
190 {}
191
193 ~PetscLinearOperator ()
194 {
195 destroy();
196 }
197
198 void flushAssembly()
199 {
200 ::Dune::Petsc::MatAssemblyBegin( petscMatrix_, MAT_FLUSH_ASSEMBLY );
201 ::Dune::Petsc::MatAssemblyEnd ( petscMatrix_, MAT_FLUSH_ASSEMBLY );
202 // set variable directly since setStatus might be disabled
203 status_ = statAssembled;
204 }
205
206 void finalize ()
207 {
208 if( ! finalizedAlready() )
209 {
210 ::Dune::Petsc::MatAssemblyBegin( petscMatrix_, MAT_FINAL_ASSEMBLY );
211 ::Dune::Petsc::MatAssemblyEnd ( petscMatrix_, MAT_FINAL_ASSEMBLY );
212
213 setUnitRowsImpl( unitRows_, auxRows_ );
214 // remove all cached unit rows
215 unitRows_.clear();
216 auxRows_.clear();
217 }
218 }
219
220 protected:
221 bool finalizedAlready() const
222 {
223 PetscBool assembled = PETSC_FALSE ;
224 ::Dune::Petsc::MatAssembled( petscMatrix_, &assembled );
225 return assembled == PETSC_TRUE;
226 }
227
228 void finalizeAssembly () const
229 {
230 const_cast< ThisType& > (*this).finalize();
231 }
232
233 public:
234 const DomainSpaceType &domainSpace () const { return domainMappers_.space(); }
235 const RangeSpaceType &rangeSpace () const { return rangeMappers_.space(); }
236
240 template <class DF, class RF>
241 void apply ( const DF &arg, RF &dest ) const
242 {
243 if( ! petscArg_ )
244 petscArg_.reset( new PetscDomainFunctionType( "PetscOp-arg", domainSpace() ) );
245 if( ! petscDest_ )
246 petscDest_.reset( new PetscRangeFunctionType( "PetscOp-arg", rangeSpace() ) );
247
248 petscArg_->assign( arg );
249 apply( *petscArg_, *petscDest_ );
250 dest.assign( *petscDest_ );
251 }
252
254 void apply ( const PetscDomainFunctionType &arg, PetscRangeFunctionType &dest ) const
255 {
256 // make sure matrix is in correct state
257 finalizeAssembly();
258 ::Dune::Petsc::MatMult( petscMatrix_, *arg.petscVec() , *dest.petscVec() );
259 }
260
261 void operator() ( const DomainFunctionType &arg, RangeFunctionType &dest ) const
262 {
263 apply( arg, dest );
264 }
265
266 void reserve ()
267 {
268 reserve( SimpleStencil<DomainSpaceType,RangeSpaceType>(0), true );
269 }
270
271 template <class Set>
272 void reserve (const std::vector< Set >& sparsityPattern )
273 {
274 reserve( StencilWrapper< DomainSpaceType,RangeSpaceType, Set >( sparsityPattern ), true );
275 }
276
278 template <class StencilType>
279 void reserve (const StencilType &stencil, const bool isSimpleStencil = false )
280 {
281 domainMappers_.update();
282 rangeMappers_.update();
283
284 if(sequence_ != domainSpace().sequence())
285 {
286 // clear Petsc Mat
287 destroy();
288
289 // reset temporary Petsc discrete functions
290 petscArg_.reset();
291 petscDest_.reset();
292
293 // create matrix
294 ::Dune::Petsc::MatCreate( domainSpace().gridPart().comm(), &petscMatrix_ );
295
296 // set sizes of the matrix (columns == domain and rows == range)
297 const PetscInt localCols = domainMappers_.ghostMapper().interiorSize() * domainLocalBlockSize;
298 const PetscInt localRows = rangeMappers_.ghostMapper().interiorSize() * rangeLocalBlockSize;
299
300 const PetscInt globalCols = domainMappers_.parallelMapper().size() * domainLocalBlockSize;
301 const PetscInt globalRows = rangeMappers_.parallelMapper().size() * rangeLocalBlockSize;
302
303 ::Dune::Petsc::MatSetSizes( petscMatrix_, localRows, localCols, globalRows, globalCols );
304
305 PetscInt bs = 1;
306 if( viennaCL_ )
307 {
308 ::Dune::Petsc::MatSetType( petscMatrix_, MATAIJVIENNACL );
309 }
310 else if( blockedMode_ )
311 {
312 bs = domainLocalBlockSize ;
313 ::Dune::Petsc::MatSetType( petscMatrix_, MATBAIJ );
314 // set block size
315 ::Dune::Petsc::MatSetBlockSize( petscMatrix_, bs );
316 }
317 else
318 {
319 ::Dune::Petsc::MatSetType( petscMatrix_, MATAIJ );
320 }
321 /*
322 std::cout << "Matrix dimension with bs=" << bs << " "
323 << localRows << "x" << localCols << " "
324 << globalRows << "x" << globalCols << " "
325 << rangeLocalBlockSize/bs << "x" << domainLocalBlockSize/bs << " "
326 << std::endl;
327 */
328
329 // there is an issue with the stencil and non zero pattern in the
330 // case of domainSpace != rangeSpace. In this case additional
331 // mallocs are required during matrix assembly which heavily
332 // impacts the preformance. As a quick fix we use a global value
333 // for the number of non zeros per row. That leads to a
334 // significant increase in memory usage but not to much
335 // computational overhead in assembly. The issue with the stencil
336 // is a bug and needs fixing....
337 if( isSimpleStencil || std::is_same< StencilType,SimpleStencil<DomainSpaceType,RangeSpaceType> >::value )
338 {
339 ::Dune::Petsc::MatSetUp( petscMatrix_, bs, domainLocalBlockSize * stencil.maxNonZerosEstimate() );
340 }
341 else
342 {
343 DomainAuxiliaryDofsType domainAuxiliaryDofs( domainMappers_.ghostMapper() );
344 RangeAuxiliaryDofsType rangeAuxiliaryDofs( rangeMappers_.ghostMapper() );
345
346 std::vector< PetscInt > d_nnz( localRows / bs, 0 );
347 std::vector< PetscInt > o_nnz( localRows / bs, 0 );
348 for( const auto& entry : stencil.globalStencil() )
349 {
350 const int petscIndex = rangeMappers_.ghostIndex( entry.first );
351 if( rangeAuxiliaryDofs.contains( petscIndex ) )
352 continue;
353
354 for (unsigned int rb = 0; rb<rangeLocalBlockSize/bs; ++rb)
355 {
356 const size_t row = petscIndex*rangeLocalBlockSize/bs + rb;
357 // Remark: ghost entities should not be inserted into the stencil for dg to
358 // get optimal results but they are needed for istl....
359 assert( row < size_t(localRows/bs) );
360 d_nnz[ row ] = o_nnz[ row ] = 0;
361 for( const auto local : entry.second )
362 {
363 if( !domainAuxiliaryDofs.contains( domainMappers_.ghostIndex( local ) ) )
364 d_nnz[ row ] += domainLocalBlockSize/bs;
365 else
366 o_nnz[ row ] += domainLocalBlockSize/bs;
367 }
368 }
369 }
370 ::Dune::Petsc::MatSetUp( petscMatrix_, bs, &d_nnz[0], &o_nnz[0] );
371 }
372 sequence_ = domainSpace().sequence();
373 }
374
375 flushAssembly();
376 status_ = statAssembled ;
377 }
378
379 virtual void clear ()
380 {
381 flushAssembly();
382 ::Dune::Petsc::MatZeroEntries( petscMatrix_ );
383 flushAssembly();
384
385 unitRows_.clear();
386 auxRows_.clear();
387 }
388
389 void setUnitRowsImpl( const std::vector< PetscInt >& r,
390 const std::vector< PetscInt >& a )
391 {
392 ::Dune::Petsc::MatZeroRows( petscMatrix_, r.size(), r.data(), 1.0 );
393 ::Dune::Petsc::MatZeroRows( petscMatrix_, a.size(), a.data(), 0.0 );
394 }
395
396 template <class Container> // could bet std::set or std::vector
397 void setUnitRows( const Container& unitRows, const Container& auxRows )
398 {
399 std::vector< PetscInt > r, a;
400 r.reserve( unitRows.size() );
401 a.reserve( auxRows.size() );
402
403 auto setupRows = [this] (const Container& rows, std::vector< PetscInt >& r )
404 {
405 for( const auto& row : rows )
406 {
407 const PetscInt block = this->rangeMappers_.parallelIndex( row / rangeLocalBlockSize );
408 r.push_back( block * rangeLocalBlockSize + (row % rangeLocalBlockSize) );
409 }
410 };
411
412 setupRows( unitRows, r );
413 setupRows( auxRows, a );
414
415 if( finalizedAlready() )
416 {
417 setUnitRowsImpl( r, a );
418 }
419 else
420 {
421 unitRows_.reserve( unitRows_.size() + r.size() );
422 for( const auto& row : r )
423 unitRows_.push_back( row );
424
425 auxRows_.reserve( auxRows_.size() + a.size() );
426 for( const auto& row : a )
427 auxRows_.push_back( row );
428 }
429 }
430
432 ObjectType* newObject() const
433 {
434 return new ObjectType( *this, domainSpace(), rangeSpace() );
435 }
436
441 [[deprecated("Use TemporaryLocalMatrix,LocalContribution and {get,add,set}LocalMatrix")]]
442 LocalMatrixType localMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity ) const
443 {
444 return LocalMatrixType(localMatrixStack_, domainEntity, rangeEntity);
445 }
446
447 LocalColumnObjectType localColumn( const DomainEntityType &colEntity ) const
448 {
449 return LocalColumnObjectType ( *this, colEntity );
450 }
451
452 public:
453 void unitRow( const PetscInt localRow, const PetscScalar diag = 1.0 )
454 {
455 std::array< PetscInt, domainLocalBlockSize > rows;
456 const PetscInt row = rangeMappers_.parallelIndex( localRow );
457 for( unsigned int i=0, r = row * domainLocalBlockSize; i<domainLocalBlockSize; ++i, ++r )
458 {
459 rows[ i ] = r;
460 }
461
462 if( finalizedAlready() )
463 {
464 // set given row to a zero row with diagonal entry equal to diag
465 ::Dune::Petsc::MatZeroRows( petscMatrix_, domainLocalBlockSize, rows.data(), diag );
466 }
467 else
468 {
469 // this only works for diag = 1
470 assert( std::abs( diag - 1. ) < 1e-12 );
471 unitRows_.reserve( unitRows_.size() + domainLocalBlockSize );
472 for( const auto& r : rows )
473 {
474 unitRows_.push_back( r );
475 }
476 }
477 }
478
479 bool blockedMode() const { return blockedMode_; }
480
481 protected:
482 template< class PetscOp >
483 void applyToBlock ( const PetscInt localRow, const PetscInt localCol, const MatrixBlockType& block, PetscOp op )
484 {
485#ifndef NDEBUG
486 const PetscInt localCols = domainMappers_.ghostMapper().interiorSize() * domainLocalBlockSize;
487 const PetscInt localRows = rangeMappers_.ghostMapper().interiorSize() * rangeLocalBlockSize;
488 assert( localRow < localRows );
489 assert( localCol < localCols );
490#endif
491
492 if( blockedMode_ )
493 {
494 // convert process local indices to global indices
495 const PetscInt row = rangeMappers_.parallelIndex( localRow );
496 const PetscInt col = rangeMappers_.parallelIndex( localCol );
497 ::Dune::Petsc::MatSetValuesBlocked( petscMatrix_, 1, &row, 1, &col, block.data(), op );
498 }
499 else
500 {
501 // convert process local indices to global indices
502 const PetscInt row = rangeMappers_.parallelIndex( localRow );
503 const PetscInt col = rangeMappers_.parallelIndex( localCol );
504 std::array< PetscInt, domainLocalBlockSize > rows;
505 std::array< PetscInt, domainLocalBlockSize > cols;
506 for( unsigned int i=0, r = row * domainLocalBlockSize, c = col * domainLocalBlockSize; i<domainLocalBlockSize; ++i, ++r, ++c )
507 {
508 rows[ i ] = r;
509 cols[ i ] = c;
510 }
511
512 // set given row to a zero row with diagonal entry equal to diag
513 ::Dune::Petsc::MatSetValues( petscMatrix_, domainLocalBlockSize, rows.data(), domainLocalBlockSize, cols.data(), block.data(), op );
514 }
515 setStatus( statAssembled );
516 }
517
518 template< class LocalBlock, class PetscOp >
519 void applyToBlock ( const size_t row, const size_t col, const LocalBlock& block, PetscOp op )
520 {
521 assert( block.rows() == rangeLocalBlockSize );
522 assert( block.cols() == domainLocalBlockSize );
523
524 // copy to MatrixBlockType data structure suited to be inserted into Mat
525 MatrixBlockType matBlock( block );
526 applyToBlock( row, col, matBlock, op );
527 }
528
529 template< class LocalBlock >
530 void setBlock ( const size_t row, const size_t col, const LocalBlock& block )
531 {
532#ifndef USE_SMP_PARALLEL
533 assert( status_==statAssembled || status_==statInsert );
534#endif
535 assert( row < std::numeric_limits< int > :: max() );
536 assert( col < std::numeric_limits< int > :: max() );
537
538 setStatus( statInsert );
539 applyToBlock( static_cast< PetscInt > (row), static_cast< PetscInt > (col), block, INSERT_VALUES );
540 }
541
542 template< class LocalBlock >
543 void addBlock ( const size_t row, const size_t col, const LocalBlock& block )
544 {
545#ifndef USE_SMP_PARALLEL
546 assert( status_==statAssembled || status_==statInsert );
547#endif
548 assert( row < std::numeric_limits< int > :: max() );
549 assert( col < std::numeric_limits< int > :: max() );
550
551 setStatus( statAdd );
552 applyToBlock( static_cast< PetscInt > (row), static_cast< PetscInt > (col), block, ADD_VALUES );
553 }
554
555 protected:
556 typedef TemporaryLocalMatrix< DomainSpaceType, RangeSpaceType > TemporaryLocalMatrixType;
557
558 // specialization for temporary local matrix, then copy of values is not needed
559 template <class Operation>
560 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
561 const TemporaryLocalMatrixType &localMat, const Operation&,
562 const std::integral_constant< bool, false> nonscaled )
563 {
564 return localMat.data();
565 }
566
567 // specialization for temporary local matrix, then copy of values is not needed
568 template <class LM, class Operation>
569 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
570 const Assembly::Impl::LocalMatrixGetter< LM >& localMat, const Operation&,
571 const std::integral_constant< bool, false> nonscaled )
572 {
573 return localMat.localMatrix().data();
574 }
575
576 // retrieve values for arbitrary local matrix
577 template <class LocalMatrix, class Operation, bool T>
578 const PetscScalar* getValues( const unsigned int rSize, const unsigned int cSize,
579 const LocalMatrix &localMat, const Operation& operation,
580 const std::integral_constant< bool, T> scaled )
581 {
582 std::vector< PetscScalar >& v = *(v_);
583 v.resize( rSize * cSize );
584 for( unsigned int i = 0, ic = 0 ; i< rSize; ++i )
585 {
586 for( unsigned int j =0; j< cSize; ++j, ++ic )
587 {
588 v[ ic ] = operation( localMat.get( i, j ) );
589 }
590 }
591 return v.data();
592 }
593
594 template< class LocalMatrix, class Operation, class PetscOp, bool T >
595 void applyLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity,
596 const LocalMatrix &localMat, const Operation& operation,
597 PetscOp petscOp,
598 const std::integral_constant<bool, T> scaled )
599 {
600 auto& rcTemp = *(rcTemp_);
601 std::vector< PetscInt >& r = rcTemp.first;
602 std::vector< PetscInt >& c = rcTemp.second;
603
604 if( blockedMode_ )
605 {
606 setupIndicesBlocked( rangeMappers_, rangeEntity, r );
607 setupIndicesBlocked( domainMappers_, domainEntity, c );
608
609 // domainLocalBlockSize == rangeLocalBlockSize
610 const unsigned int rSize = r.size() * domainLocalBlockSize ;
611 const unsigned int cSize = c.size() * domainLocalBlockSize ;
612
613 const PetscScalar* values = getValues( rSize, cSize, localMat, operation, scaled );
614 ::Dune::Petsc::MatSetValuesBlocked( petscMatrix_, r.size(), r.data(), c.size(), c.data(), values, petscOp );
615 }
616 else
617 {
618 setupIndices( rangeMappers_, rangeEntity, r );
619 setupIndices( domainMappers_, domainEntity, c );
620
621 const unsigned int rSize = r.size();
622 const unsigned int cSize = c.size();
623
624 const PetscScalar* values = getValues( rSize, cSize, localMat, operation, scaled );
625 ::Dune::Petsc::MatSetValues( petscMatrix_, rSize, r.data(), cSize, c.data(), values, petscOp );
626 }
627 setStatus( statAssembled );
628 }
629
630 public:
631 template< class LocalMatrix >
632 void addLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat )
633 {
634#ifndef USE_SMP_PARALLEL
635 assert( status_==statAssembled || status_==statAdd );
636#endif
637 setStatus( statAdd );
638
639 auto operation = [] ( const PetscScalar& value ) -> PetscScalar { return value; };
640
641 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, ADD_VALUES, std::integral_constant< bool, false>() );
642 }
643
644 template< class LocalMatrix, class Scalar >
645 void addScaledLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat, const Scalar &s )
646 {
647#ifndef USE_SMP_PARALLEL
648 assert( status_==statAssembled || status_==statAdd );
649#endif
650 setStatus( statAdd );
651
652 auto operation = [ &s ] ( const PetscScalar& value ) -> PetscScalar { return s * value; };
653
654 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, ADD_VALUES, std::integral_constant< bool, true>() );
655 }
656
657 template< class LocalMatrix >
658 void setLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const LocalMatrix &localMat )
659 {
660#ifndef USE_SMP_PARALLEL
661 assert( status_==statAssembled || status_==statInsert );
662#endif
663 setStatus( statInsert );
664
665 auto operation = [] ( const PetscScalar& value ) -> PetscScalar { return value; };
666
667 applyLocalMatrix( domainEntity, rangeEntity, localMat, operation, INSERT_VALUES, std::integral_constant< bool, false>() );
668 }
669
670 template< class LocalMatrix >
671 void getLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, LocalMatrix &localMat ) const
672 {
673 // make sure matrix is in correct state before using
674 finalizeAssembly();
675
676#ifndef USE_SMP_PARALLEL
677 assert( status_==statAssembled || status_==statGet );
678#endif
679 setStatus( statGet );
680
681 auto& rcTemp = *(rcTemp_);
682 std::vector< PetscInt >& r = rcTemp.first;
683 std::vector< PetscInt >& c = rcTemp.second;
684 std::vector< PetscScalar >& v = *(v_);
685
686 setupIndices( rangeMappers_, rangeEntity, r );
687 setupIndices( domainMappers_, domainEntity, c );
688
689 v.resize( r.size() * c.size() );
690 ::Dune::Petsc::MatGetValues( petscMatrix_, r.size(), r.data(), c.size(), c.data(), v.data() );
691
692 for( std::size_t i =0 ; i< r.size(); ++i )
693 for( std::size_t j =0; j< c.size(); ++j )
694 localMat.set( i, j, v[ i * c.size() +j ] );
695
696 setStatus( statAssembled );
697 }
698
699 void scaleLocalMatrix ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity, const RangeFieldType &s )
700 {
701 // make sure matrix is in correct state before using
702 finalizeAssembly();
703
704#ifndef USE_SMP_PARALLEL
705 assert( status_==statAssembled || status_==statGet );
706#endif
707 setStatus( statGet );
708
709 auto& rcTemp = *(rcTemp_);
710 std::vector< PetscInt >& r = rcTemp.first;
711 std::vector< PetscInt >& c = rcTemp.second;
712 std::vector< PetscScalar >& v = *(v_);
713
714 setupIndices( rangeMappers_, rangeEntity, r );
715 setupIndices( domainMappers_, domainEntity, c );
716
717 const unsigned int rSize = r.size();
718 const unsigned int cSize = c.size();
719 const std::size_t size = rSize * cSize;
720
721 v.resize( size );
722 // get values
723 ::Dune::Petsc::MatGetValues( petscMatrix_, r.size(), r.data(), c.size(), c.data(), v.data() );
724
725 // scale values
726 for( std::size_t i=0; i<size; ++i )
727 v[ i ] *= s;
728
729 // set values again
730 ::Dune::Petsc::MatSetValues( petscMatrix_, rSize, r.data(), cSize, c.data(), v.data(), INSERT_VALUES );
731 setStatus( statAssembled );
732 }
733
734 // just here for debugging
735 void view () const
736 {
737 ::Dune::Petsc::MatView( petscMatrix_, PETSC_VIEWER_STDOUT_WORLD );
738 }
739
740 // print matrix just here for debugging
741 void print( std::ostream& s ) const
742 {
743 if( &s == &std::cout || &s == &std::cerr )
744 {
745 view();
746 }
747 }
748
749 // return reference to PETSc matrix object
750 Mat& exportMatrix () const
751 {
752 // make sure matrix is in correct state
753 finalizeAssembly();
754 return petscMatrix_;
755 }
756
757 private:
758 PetscLinearOperator ();
759
761 void destroy ()
762 {
763 if( status_ != statNothing )
764 {
765 ::Dune::Petsc::MatDestroy( &petscMatrix_ );
766 status_ = statNothing ;
767 }
768 sequence_ = -1;
769 unitRows_.clear();
770 auxRows_.clear();
771 }
772
773 void setStatus (const Status &newstatus) const
774 {
775 // in case OpenMP is used simply avoid status check
776#ifndef USE_SMP_PARALLEL
777 status_ = newstatus;
778#endif
779 }
780
781 template< class DFS, class Entity >
782 static void setupIndices ( const PetscMappers< DFS > &mappers, const Entity &entity, std::vector< PetscInt > &indices )
783 {
784 NonBlockMapper< const typename PetscMappers< DFS >::ParallelMapperType, DFS::localBlockSize > nonBlockMapper( mappers.parallelMapper() );
785 nonBlockMapper.map( entity, indices );
786 }
787
788 template< class DFS, class Entity >
789 static void setupIndicesBlocked ( const PetscMappers< DFS > &mappers, const Entity &entity, std::vector< PetscInt > &indices )
790 {
791 mappers.parallelMapper().map( entity, indices );
792 }
793
794 /*
795 * data fields
796 */
797 DomainMappersType domainMappers_;
798 RangeMappersType rangeMappers_;
799
800 int sequence_ = -1;
801 mutable Mat petscMatrix_;
802
803 mutable LocalMatrixStackType localMatrixStack_;
804 mutable Status status_;
805
806 const bool viennaCL_;
807 const bool blockedMode_;
808
809 mutable std::unique_ptr< PetscDomainFunctionType > petscArg_;
810 mutable std::unique_ptr< PetscRangeFunctionType > petscDest_;
811
812 mutable ThreadSafeValue< std::vector< PetscScalar > > v_;
813 mutable ThreadSafeValue< std::pair< std::vector< PetscInt >, std::vector< PetscInt > > > rcTemp_;
814
815 mutable std::vector< PetscInt > unitRows_;
816 mutable std::vector< PetscInt > auxRows_;
817 };
818
819
820
821 /* ========================================
822 * class PetscLinearOperator::LocalMatrix
823 */
824 template< typename DomainFunction, typename RangeFunction >
825 class PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrix
826 : public LocalMatrixDefault< typename PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrixTraits >
827 {
828 typedef LocalMatrix ThisType;
829 typedef LocalMatrixDefault< typename PetscLinearOperator< DomainFunction, RangeFunction >::LocalMatrixTraits > BaseType;
830
831 typedef PetscLinearOperator< DomainFunction, RangeFunction > PetscLinearOperatorType;
832
833
834 public:
835 typedef PetscInt DofIndexType;
836 typedef std::vector< DofIndexType > IndexVectorType;
837 typedef typename DomainFunction::DiscreteFunctionSpaceType DomainSpaceType;
838 typedef typename RangeFunction::DiscreteFunctionSpaceType RangeSpaceType;
839 typedef typename DomainSpaceType::BasisFunctionSetType DomainBasisFunctionSetType;
840 typedef typename RangeSpaceType::BasisFunctionSetType RangeBasisFunctionSetType;
841
842 enum { rangeBlockSize = RangeSpaceType::localBlockSize };
843 enum { domainBlockSize = DomainSpaceType ::localBlockSize };
844
845 private:
846
847 // needed for .mapEach below
848 template< typename PetscMapping >
849 struct PetscAssignFunctor
850 {
851 explicit PetscAssignFunctor ( const PetscMapping &petscMapping, IndexVectorType &indices )
852 : petscMapping_( petscMapping ),
853 indices_( indices )
854 {}
855
856 template< typename T >
857 void operator() ( const std::size_t localIndex, T globalIndex ) { indices_[ localIndex ] = petscMapping_.globalMapping( globalIndex ); }
858
859 private:
860 const PetscMapping &petscMapping_;
861 IndexVectorType &indices_;
862 };
863
864 public:
865 [[deprecated("Use TemporaryLocal Matrix and {add,set,get}LocalMatrix")]]
866 LocalMatrix ( const PetscLinearOperatorType &petscLinOp,
867 const DomainSpaceType &domainSpace,
868 const RangeSpaceType &rangeSpace )
869 : BaseType( domainSpace, rangeSpace ),
870 petscLinearOperator_( petscLinOp )
871 {}
872
873 void finalize()
874 {
875 }
876
877 void init ( const DomainEntityType &domainEntity, const RangeEntityType &rangeEntity )
878 {
879 // call initialize on base class
880 BaseType :: init( domainEntity, rangeEntity );
881
882 //*************************************************
883 // The rows belong to the domain space
884 // it's indices are determained by the rangeSpace
885 //
886 // The columns belong to the range space
887 // it's indices are determained by the domainSpace
888 //*************************************************
889
890 setupIndices( petscLinearOperator_.rangeMappers_, rangeEntity, rowIndices_ );
891 setupIndices( petscLinearOperator_.domainMappers_, domainEntity, colIndices_ );
892
893 status_ = statAssembled;
894 petscLinearOperator_.setStatus(status_);
895 }
896
897 inline void add ( const int localRow, const int localCol, const RangeFieldType &value )
898 {
899 assert( status_==statAssembled || status_==statAdd );
900 status_ = statAdd;
901 petscLinearOperator_.setStatus(status_);
902 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIndex( localRow ), globalColIndex( localCol ) , value, ADD_VALUES );
903 }
904
905 inline void set(const int localRow, const int localCol, const RangeFieldType &value )
906 {
907 assert( status_==statAssembled || status_==statInsert );
908 status_ = statInsert;
909 petscLinearOperator_.setStatus(status_);
910 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIndex( localRow ), globalColIndex( localCol ) , value, INSERT_VALUES );
911 }
912
914 void clearRow ( const int localRow )
915 {
916 assert( status_==statAssembled || status_==statInsert );
917 status_ = statInsert;
918 petscLinearOperator_.setStatus(status_);
919 const int col = this->columns();
920 const int globalRowIdx = globalRowIndex( localRow );
921 for(int localCol=0; localCol<col; ++localCol)
922 {
923 ::Dune::Petsc::MatSetValue( petscMatrix(), globalRowIdx, globalColIndex( localCol ), 0.0, INSERT_VALUES );
924 }
925 }
926
927 inline const RangeFieldType get ( const int localRow, const int localCol ) const
928 {
929 assert( status_==statAssembled || status_==statGet );
930 status_ = statGet;
931 petscLinearOperator_.setStatus(status_);
932 RangeFieldType v[1];
933 const int r[] = {globalRowIndex( localRow )};
934 const int c[] = {globalColIndex( localCol )};
935 ::Dune::Petsc::MatGetValues( petscMatrix(), 1, r, 1, c, v );
936 return v[0];
937 }
938
939 inline void scale ( const RangeFieldType &factor )
940 {
941 const_cast< PetscLinearOperatorType& > (petscLinearOperator_).scaleLocalMatrix( this->domainEntity(), this->rangeEntity(), factor );
942 }
943
944 private:
945 LocalMatrix ();
946
947 Mat& petscMatrix () { return petscLinearOperator_.petscMatrix_; }
948 const Mat& petscMatrix () const { return petscLinearOperator_.petscMatrix_; }
949
950 public:
951 int rows() const { return rowIndices_.size(); }
952 int columns() const { return colIndices_.size(); }
953
954 private:
955 DofIndexType globalRowIndex( const int localRow ) const
956 {
957 assert( localRow < static_cast< int >( rowIndices_.size() ) );
958 return rowIndices_[ localRow ];
959 }
960
961 DofIndexType globalColIndex( const int localCol ) const
962 {
963 assert( localCol < static_cast< int >( colIndices_.size() ) );
964 return colIndices_[ localCol ];
965 }
966
967 /*
968 * data fields
969 */
970 const PetscLinearOperatorType &petscLinearOperator_;
971 IndexVectorType rowIndices_;
972 IndexVectorType colIndices_;
973 mutable Status status_;
974 };
975
976
977 } // namespace Fem
978
979} // namespace Dune
980
981#endif // #if HAVE_PETSC
982
983#endif // #ifndef DUNE_FEM_PETSCLINEAROPERATOR_HH
This file implements a dense matrix with dynamic numbers of rows and columns.
constexpr auto max
Function object that returns the greater of the given values.
Definition: hybridutilities.hh:484
typename Overloads::ScalarType< std::decay_t< V > >::type Scalar
Element type of some SIMD type.
Definition: interface.hh:235
Dune namespace.
Definition: alignedallocator.hh:13
constexpr std::integral_constant< std::size_t, sizeof...(II)> size(std::integer_sequence< T, II... >)
Return the size of the sequence.
Definition: integersequence.hh:75
constexpr auto get(std::integer_sequence< T, II... >, std::integral_constant< std::size_t, pos >={})
Return the entry at position pos of the given sequence.
Definition: integersequence.hh:22
Creative Commons License   |  Legal Statements / Impressum  |  Hosted by TU Dresden  |  generated with Hugo v0.111.3 (Nov 13, 23:29, 2024)