9 #include <boost/function.hpp> 10 #include <boost/utility/enable_if.hpp> 12 #include <alps/config.hpp> 14 #if defined(ALPS_HAVE_MPI) 25 template<
typename Base,
typename ScheduleChecker>
class mcmpiadapter_base :
public Base {
28 typedef typename Base::parameters_type parameters_type;
45 parameters_type
const & parameters
47 , ScheduleChecker
const & check
51 : Base(parameters, comm.rank()*rng_seed_step + rng_seed_base)
53 , schedule_checker(check)
62 bool run(boost::function<
bool ()>
const & stop_callback) {
63 bool done =
false, stopped =
false;
67 if (stopped || schedule_checker.pending()) {
68 stopped = stop_callback();
70 schedule_checker.update(fraction =
alps::mpi::all_reduce(communicator, local_fraction, std::plus<double>()));
71 done = fraction >= 1.;
81 typename Base::results_type
collect_results(
typename Base::result_names_type
const & names)
const {
82 typename Base::results_type partial_results;
83 for(
typename Base::result_names_type::const_iterator it = names.begin(); it != names.end(); ++it) {
84 size_t has_count=(this->measurements[*it].count() > 0);
85 const size_t sum_counts =
89 if (static_cast<int>(sum_counts) == communicator.size()) {
90 typename Base::observable_collection_type::value_type merged = this->measurements[*it];
91 merged.collective_merge(communicator, 0);
92 partial_results.insert(*it, merged.result());
93 }
else if (sum_counts > 0 && static_cast<int>(sum_counts) < communicator.size()) {
94 throw std::runtime_error(*it +
" was measured on only some of the MPI processes.");
97 return partial_results;
104 ScheduleChecker schedule_checker;
112 template<
typename Base,
typename ScheduleChecker = alps::check_schedule>
class mcmpiadapter :
public detail::mcmpiadapter_base<Base,ScheduleChecker> {
114 typedef detail::mcmpiadapter_base<Base,ScheduleChecker> base_type_;
117 typedef typename base_type_::parameters_type parameters_type;
140 parameters_type
const & parameters
142 , ScheduleChecker
const & check
143 ,
int rng_seed_step = 1
144 ,
int rng_seed_base = 0
146 : base_type_(parameters, comm, check, rng_seed_step, rng_seed_base)
152 template<
typename Base>
class mcmpiadapter<Base,
alps::
check_schedule> :
public detail::mcmpiadapter_base<Base,alps::check_schedule> {
155 typedef detail::mcmpiadapter_base<Base,ScheduleChecker> base_type_;
158 typedef typename base_type_::parameters_type parameters_type;
181 parameters_type
const & parameters
183 , ScheduleChecker
const & check
184 ,
int rng_seed_step = 1
185 ,
int rng_seed_base = 0
187 : base_type_(parameters, comm, check, rng_seed_step, rng_seed_base)
210 parameters_type
const & parameters
212 ,
int rng_seed_step = 1
213 ,
int rng_seed_base = 0
215 : base_type_(parameters, comm, ScheduleChecker(parameters[
"Tmin"], parameters[
"Tmax"]), rng_seed_step, rng_seed_base)
219 static parameters_type& define_parameters(parameters_type & parameters) {
220 base_type_::define_parameters(parameters);
221 if (parameters.is_restored())
return parameters;
222 parameters.template define<std::size_t>(
"Tmin", 1,
"minimum time to check if simulation has finished");
223 parameters.template define<std::size_t>(
"Tmax", 600,
"maximum time to check if simulation has finished");
void all_reduce(const alps::mpi::communicator &comm, const T *val, int n, T *out_val, const OP &)
Performs MPI_Allreduce for array of a primitive type, T[n].
Encapsulation of an MPI communicator and some communicator-related operations.
result_names_type< S >::type result_names(S const &s)
double fraction_completed(S const &s)
results_type< S >::type collect_results(S const &s)
detail::generic_check_schedule< detail::posix_wall_clock > check_schedule