Merge pull request #2339 from atgeirr/fix-binary-op

Remove std::pointer_to_binary_function and silence a warning.
This commit is contained in:
Arne Morten Kvarving 2020-02-14 10:26:03 +01:00 committed by GitHub
commit 301aa7a7cd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 29 additions and 12 deletions

View File

@ -580,12 +580,17 @@ private:
/// ///
/// To be used with ParallelISTLInformation::computeReduction. /// To be used with ParallelISTLInformation::computeReduction.
template<class T> template<class T>
MaskToMinOperator<std::pointer_to_binary_function<const T&,const T&,const T&> > auto makeGlobalMaxFunctor()
makeGlobalMaxFunctor()
{ {
return MaskToMinOperator<std::pointer_to_binary_function<const T&,const T&,const T&> > struct MaxOp
(std::pointer_to_binary_function<const T&,const T&,const T&> {
((const T&(*)(const T&, const T&))std::max<T>)); using result_type = T;
const T& operator()(const T& t1, const T& t2)
{
return std::max(t1, t2);
}
};
return MaskToMinOperator(MaxOp());
} }
namespace detail namespace detail
@ -630,12 +635,18 @@ private:
/// ///
/// To be used with ParallelISTLInformation::computeReduction. /// To be used with ParallelISTLInformation::computeReduction.
template<class T> template<class T>
MaskToMaxOperator<std::pointer_to_binary_function<const T&,const T&,const T&> > auto
makeGlobalMinFunctor() makeGlobalMinFunctor()
{ {
return MaskToMaxOperator<std::pointer_to_binary_function<const T&,const T&,const T&> > struct MinOp
(std::pointer_to_binary_function<const T&,const T&,const T&> {
((const T&(*)(const T&, const T&))std::min<T>)); using result_type = T;
const T& operator()(const T& t1, const T& t2)
{
return std::min(t1, t2);
}
};
return MaskToMaxOperator(MinOp());
} }
template<class T> template<class T>
InnerProductFunctor<T> InnerProductFunctor<T>

View File

@ -35,13 +35,19 @@ typedef Dune::InverseOperatorResult InverseOperatorResult;
namespace Opm namespace Opm
{ {
BdaBridge::BdaBridge(bool use_gpu_ OPM_UNUSED, int linear_solver_verbosity OPM_UNUSED, int maxit OPM_UNUSED, double tolerance OPM_UNUSED) : use_gpu(use_gpu_) {
#if HAVE_CUDA #if HAVE_CUDA
BdaBridge::BdaBridge(bool use_gpu_, int linear_solver_verbosity, int maxit, double tolerance)
: use_gpu(use_gpu_)
{
if (use_gpu) { if (use_gpu) {
backend.reset(new cusparseSolverBackend(linear_solver_verbosity, maxit, tolerance)); backend.reset(new cusparseSolverBackend(linear_solver_verbosity, maxit, tolerance));
} }
#endif
} }
#else
BdaBridge::BdaBridge(bool use_gpu_ OPM_UNUSED, int linear_solver_verbosity OPM_UNUSED, int maxit OPM_UNUSED, double tolerance OPM_UNUSED)
{
}
#endif

View File

@ -42,8 +42,8 @@ class BdaBridge
private: private:
#if HAVE_CUDA #if HAVE_CUDA
std::unique_ptr<cusparseSolverBackend> backend; std::unique_ptr<cusparseSolverBackend> backend;
#endif
bool use_gpu; bool use_gpu;
#endif
public: public:
/// Construct a BdaBridge /// Construct a BdaBridge