Logo AND Algorithmique Numérique Distribuée

Public GIT Repository
Merge branch 'add_missing_comm_python_bindings' into 'master'
[simgrid.git] / src / smpi / internals / smpi_actor.cpp
1 /* Copyright (c) 2009-2022. The SimGrid Team. All rights reserved.          */
2
3 /* This program is free software; you can redistribute it and/or modify it
4  * under the terms of the license (GNU LGPL) which comes with this package. */
5
6 #include "src/smpi/include/smpi_actor.hpp"
7 #include "mc/mc.h"
8 #include "simgrid/s4u/Engine.hpp"
9 #include "simgrid/s4u/Mutex.hpp"
10 #include "smpi_comm.hpp"
11 #include "smpi_info.hpp"
12 #include "src/mc/mc_replay.hpp"
13 #include "xbt/str.h"
14
15 #if HAVE_PAPI
16 #include "papi.h"
17 #endif
18
19 XBT_LOG_NEW_DEFAULT_SUBCATEGORY(smpi_process, smpi, "Logging specific to SMPI (kernel)");
20
21 namespace simgrid {
22 namespace smpi {
23 simgrid::xbt::Extension<simgrid::s4u::Actor, ActorExt> ActorExt::EXTENSION_ID;
24
25 ActorExt::ActorExt(s4u::Actor* actor) : actor_(actor)
26 {
27   if (not simgrid::smpi::ActorExt::EXTENSION_ID.valid())
28     simgrid::smpi::ActorExt::EXTENSION_ID = simgrid::s4u::Actor::extension_create<simgrid::smpi::ActorExt>();
29
30   mailbox_         = s4u::Mailbox::by_name("SMPI-" + std::to_string(actor_->get_pid()));
31   mailbox_small_   = s4u::Mailbox::by_name("small-" + std::to_string(actor_->get_pid()));
32   mailboxes_mutex_ = s4u::Mutex::create();
33   timer_           = xbt_os_timer_new();
34   state_           = SmpiProcessState::UNINITIALIZED;
35   info_env_        = MPI_INFO_NULL;
36   if (MC_is_active())
37     MC_ignore_heap(timer_, xbt_os_timer_size());
38
39 #if HAVE_PAPI
40   if (not smpi_cfg_papi_events_file().empty()) {
41     // TODO: Implement host/process/thread based counters. This implementation
42     // just always takes the values passed via "default", like this:
43     // "default:COUNTER1:COUNTER2:COUNTER3;".
44     auto it = units2papi_setup.find("default");
45     if (it != units2papi_setup.end()) {
46       papi_event_set_    = it->second.event_set;
47       papi_counter_data_ = it->second.counter_data;
48       XBT_DEBUG("Setting PAPI set for process %li", actor->get_pid());
49     } else {
50       papi_event_set_ = PAPI_NULL;
51       XBT_DEBUG("No PAPI set for process %li", actor->get_pid());
52     }
53   }
54 #endif
55 }
56
57 ActorExt::~ActorExt()
58 {
59   xbt_os_timer_free(timer_);
60 }
61
62 /** @brief Prepares the current process for termination. */
63 void ActorExt::finalize()
64 {
65   state_ = SmpiProcessState::FINALIZED;
66   XBT_DEBUG("<%ld> Process left the game", actor_->get_pid());
67   if (info_env_ != MPI_INFO_NULL)
68     simgrid::smpi::Info::unref(info_env_);
69   if (comm_self_ != MPI_COMM_NULL)
70     simgrid::smpi::Comm::destroy(comm_self_);
71   if (comm_intra_ != MPI_COMM_NULL)
72     simgrid::smpi::Comm::destroy(comm_intra_);
73   smpi_deployment_unregister_process(instance_id_);
74 }
75
76 /** @brief Check if a process is finalized */
77 int ActorExt::finalized() const
78 {
79   return (state_ == SmpiProcessState::FINALIZED);
80 }
81
82 /** @brief Check if a process is partially initialized already */
83 int ActorExt::initializing() const
84 {
85   return (state_ == SmpiProcessState::INITIALIZING);
86 }
87
88 /** @brief Check if a process is initialized */
89 int ActorExt::initialized() const
90 {
91   // TODO cheinrich: Check if we still need this. This should be a global condition, not for a
92   // single process ... ?
93   return (state_ == SmpiProcessState::INITIALIZED);
94 }
95
96 /** @brief Mark a process as initialized (=MPI_Init called) */
97 void ActorExt::mark_as_initialized()
98 {
99   if (state_ != SmpiProcessState::FINALIZED)
100     state_ = SmpiProcessState::INITIALIZED;
101 }
102
103 /** @brief Mark a process as finalizing (=MPI_Finalize called) */
104 void ActorExt::mark_as_finalizing()
105 {
106   if (state_ != SmpiProcessState::FINALIZED)
107     state_ = SmpiProcessState::FINALIZING;
108 }
109
110 /** @brief Check if a process is finalizing */
111 int ActorExt::finalizing() const
112 {
113   return (state_ == SmpiProcessState::FINALIZING);
114 }
115
116 void ActorExt::set_replaying(bool value)
117 {
118   if (state_ != SmpiProcessState::FINALIZED)
119     replaying_ = value;
120 }
121
122 bool ActorExt::replaying() const
123 {
124   return replaying_;
125 }
126
127 s4u::ActorPtr ActorExt::get_actor()
128 {
129   return actor_;
130 }
131
132 /**
133  * @brief Returns a structure that stores the location (filename + linenumber) of the last calls to MPI_* functions.
134  *
135  * @see smpi_trace_set_call_location
136  */
137 smpi_trace_call_location_t* ActorExt::call_location()
138 {
139   return &trace_call_loc_;
140 }
141
142 void ActorExt::set_privatized_region(smpi_privatization_region_t region)
143 {
144   privatized_region_ = region;
145 }
146
147 smpi_privatization_region_t ActorExt::privatized_region() const
148 {
149   return privatized_region_;
150 }
151
152 MPI_Comm ActorExt::comm_world() const
153 {
154   return comm_world_ == nullptr ? MPI_COMM_NULL : *comm_world_;
155 }
156
157 s4u::MutexPtr ActorExt::mailboxes_mutex() const
158 {
159   return mailboxes_mutex_;
160 }
161
162 #if HAVE_PAPI
163 int ActorExt::papi_event_set() const
164 {
165   return papi_event_set_;
166 }
167
168 papi_counter_t& ActorExt::papi_counters()
169 {
170   return papi_counter_data_;
171 }
172 #endif
173
174 xbt_os_timer_t ActorExt::timer()
175 {
176   return timer_;
177 }
178
179 void ActorExt::simulated_start()
180 {
181   simulated_ = s4u::Engine::get_clock();
182 }
183
184 double ActorExt::simulated_elapsed() const
185 {
186   return s4u::Engine::get_clock() - simulated_;
187 }
188
189 MPI_Comm ActorExt::comm_self()
190 {
191   if (comm_self_ == MPI_COMM_NULL) {
192     auto* group = new Group(1);
193     comm_self_  = new Comm(group, nullptr);
194     comm_self_->set_name("MPI_COMM_SELF");
195     group->set_mapping(actor_->get_pid(), 0);
196   }
197   return comm_self_;
198 }
199
200 MPI_Info ActorExt::info_env()
201 {
202   if (info_env_==MPI_INFO_NULL)
203     info_env_=new Info();
204   return info_env_;
205 }
206
207 MPI_Comm ActorExt::comm_intra()
208 {
209   return comm_intra_;
210 }
211
212 void ActorExt::set_comm_intra(MPI_Comm comm)
213 {
214   comm_intra_ = comm;
215 }
216
217 void ActorExt::set_sampling(int s)
218 {
219   sampling_ = s;
220 }
221
222 int ActorExt::sampling() const
223 {
224   return sampling_;
225 }
226
227 void ActorExt::init()
228 {
229   xbt_assert(smpi_get_universe_size() != 0, "SimGrid was not initialized properly before entering MPI_Init. "
230                                             "Aborting, please check compilation process and use smpirun.");
231
232   ActorExt* ext = smpi_process();
233   // if we are in MPI_Init and argc handling has already been done.
234   if (ext->initialized())
235     return;
236
237   const simgrid::s4u::Actor* self = simgrid::s4u::Actor::self();
238   ext->instance_id_ = self->get_property("instance_id");
239   const int rank = static_cast<int>(xbt_str_parse_int(self->get_property("rank"), "Cannot parse rank"));
240
241   ext->state_ = SmpiProcessState::INITIALIZING;
242   smpi_deployment_register_process(ext->instance_id_, rank, self);
243
244   ext->comm_world_ = smpi_deployment_comm_world(ext->instance_id_);
245
246   // set the process attached to the mailbox
247   ext->mailbox_small_->set_receiver(ext->actor_);
248   XBT_DEBUG("<%ld> SMPI process has been initialized: %p", ext->actor_->get_pid(), ext->actor_);
249 }
250
251 int ActorExt::get_optind() const
252 {
253   return optind_;
254 }
255
256 void ActorExt::set_optind(int new_optind)
257 {
258   optind_ = new_optind;
259 }
260
261 void ActorExt::bsend_buffer(void** buf, int* size)
262 {
263   *buf  = bsend_buffer_;
264   *size = bsend_buffer_size_;
265 }
266
267 int ActorExt::set_bsend_buffer(void* buf, int size)
268 {
269   if(buf!=nullptr && bsend_buffer_!=nullptr)
270     return MPI_ERR_BUFFER;
271   bsend_buffer_     = buf;
272   bsend_buffer_size_= size;
273   return MPI_SUCCESS;
274 }
275
276 } // namespace smpi
277 } // namespace simgrid