[300] | 1 | #include "context_server.hpp" |
---|
| 2 | #include "buffer_in.hpp" |
---|
| 3 | #include "type.hpp" |
---|
| 4 | #include "context.hpp" |
---|
[352] | 5 | #include "object_template.hpp" |
---|
| 6 | #include "group_template.hpp" |
---|
| 7 | #include "attribute_template.hpp" |
---|
[300] | 8 | #include "domain.hpp" |
---|
[352] | 9 | #include "field.hpp" |
---|
| 10 | #include "file.hpp" |
---|
| 11 | #include "grid.hpp" |
---|
[382] | 12 | #include "mpi.hpp" |
---|
[347] | 13 | #include "tracer.hpp" |
---|
| 14 | #include "timer.hpp" |
---|
[401] | 15 | #include "cxios.hpp" |
---|
[492] | 16 | #include "event_scheduler.hpp" |
---|
| 17 | #include "server.hpp" |
---|
| 18 | #include <boost/functional/hash.hpp> |
---|
[300] | 19 | |
---|
[1646] | 20 | #ifdef _usingEP |
---|
[1601] | 21 | using namespace ep_lib; |
---|
[1646] | 22 | #endif |
---|
[300] | 23 | |
---|
[335] | 24 | namespace xios |
---|
[300] | 25 | { |
---|
| 26 | |
---|
[1158] | 27 | CContextServer::CContextServer(CContext* parent,MPI_Comm intraComm_,MPI_Comm interComm_) |
---|
[300] | 28 | { |
---|
[549] | 29 | context=parent; |
---|
| 30 | intraComm=intraComm_; |
---|
| 31 | MPI_Comm_size(intraComm,&intraCommSize); |
---|
| 32 | MPI_Comm_rank(intraComm,&intraCommRank); |
---|
[1054] | 33 | |
---|
[549] | 34 | interComm=interComm_; |
---|
| 35 | int flag; |
---|
| 36 | MPI_Comm_test_inter(interComm,&flag); |
---|
[300] | 37 | if (flag) MPI_Comm_remote_size(interComm,&commSize); |
---|
[549] | 38 | else MPI_Comm_size(interComm,&commSize); |
---|
[983] | 39 | |
---|
[549] | 40 | currentTimeLine=0; |
---|
| 41 | scheduled=false; |
---|
| 42 | finished=false; |
---|
| 43 | boost::hash<string> hashString; |
---|
[1130] | 44 | if (CServer::serverLevel == 1) |
---|
| 45 | hashId=hashString(context->getId() + boost::lexical_cast<string>(context->clientPrimServer.size())); |
---|
| 46 | else |
---|
| 47 | hashId=hashString(context->getId()); |
---|
[300] | 48 | } |
---|
[992] | 49 | |
---|
[300] | 50 | void CContextServer::setPendingEvent(void) |
---|
| 51 | { |
---|
[549] | 52 | pendingEvent=true; |
---|
[300] | 53 | } |
---|
[489] | 54 | |
---|
[300] | 55 | bool CContextServer::hasPendingEvent(void) |
---|
| 56 | { |
---|
[549] | 57 | return pendingEvent; |
---|
[300] | 58 | } |
---|
[489] | 59 | |
---|
[597] | 60 | bool CContextServer::hasFinished(void) |
---|
| 61 | { |
---|
| 62 | return finished; |
---|
| 63 | } |
---|
| 64 | |
---|
[1054] | 65 | bool CContextServer::eventLoop(bool enableEventsProcessing /*= true*/) |
---|
[300] | 66 | { |
---|
[549] | 67 | listen(); |
---|
| 68 | checkPendingRequest(); |
---|
[1054] | 69 | if (enableEventsProcessing) |
---|
| 70 | processEvents(); |
---|
[549] | 71 | return finished; |
---|
[300] | 72 | } |
---|
| 73 | |
---|
| 74 | void CContextServer::listen(void) |
---|
| 75 | { |
---|
| 76 | int rank; |
---|
[549] | 77 | int flag; |
---|
| 78 | int count; |
---|
| 79 | char * addr; |
---|
[489] | 80 | MPI_Status status; |
---|
[300] | 81 | map<int,CServerBuffer*>::iterator it; |
---|
[1230] | 82 | bool okLoop; |
---|
[489] | 83 | |
---|
[1225] | 84 | traceOff(); |
---|
[1601] | 85 | MPI_Iprobe(-2, 20,interComm,&flag,&status); |
---|
[1225] | 86 | traceOn(); |
---|
| 87 | |
---|
| 88 | if (flag==true) |
---|
[300] | 89 | { |
---|
[1665] | 90 | |
---|
| 91 | #ifdef _usingEP |
---|
| 92 | rank=status.ep_src ; |
---|
| 93 | #else |
---|
[1225] | 94 | rank=status.MPI_SOURCE ; |
---|
[1601] | 95 | #endif |
---|
[1230] | 96 | okLoop = true; |
---|
[1228] | 97 | if (pendingRequest.find(rank)==pendingRequest.end()) |
---|
| 98 | okLoop = !listenPendingRequest(status) ; |
---|
| 99 | if (okLoop) |
---|
[300] | 100 | { |
---|
[1225] | 101 | for(rank=0;rank<commSize;rank++) |
---|
[300] | 102 | { |
---|
[1225] | 103 | if (pendingRequest.find(rank)==pendingRequest.end()) |
---|
[300] | 104 | { |
---|
[1225] | 105 | |
---|
| 106 | traceOff(); |
---|
| 107 | MPI_Iprobe(rank, 20,interComm,&flag,&status); |
---|
| 108 | traceOn(); |
---|
| 109 | if (flag==true) listenPendingRequest(status) ; |
---|
[300] | 110 | } |
---|
| 111 | } |
---|
| 112 | } |
---|
| 113 | } |
---|
| 114 | } |
---|
[489] | 115 | |
---|
[1228] | 116 | bool CContextServer::listenPendingRequest(MPI_Status& status) |
---|
[1225] | 117 | { |
---|
| 118 | int count; |
---|
| 119 | char * addr; |
---|
| 120 | map<int,CServerBuffer*>::iterator it; |
---|
[1665] | 121 | |
---|
| 122 | #ifdef _usingEP |
---|
| 123 | int rank=status.ep_src; |
---|
| 124 | #else |
---|
[1225] | 125 | int rank=status.MPI_SOURCE ; |
---|
[1601] | 126 | #endif |
---|
[1225] | 127 | |
---|
| 128 | it=buffers.find(rank); |
---|
| 129 | if (it==buffers.end()) // Receive the buffer size and allocate the buffer |
---|
| 130 | { |
---|
| 131 | StdSize buffSize = 0; |
---|
[1601] | 132 | MPI_Request request; |
---|
| 133 | |
---|
| 134 | MPI_Irecv(&buffSize, 1, MPI_LONG, rank, 20, interComm, &request); |
---|
| 135 | MPI_Wait(&request, &status); |
---|
[1225] | 136 | mapBufferSize_.insert(std::make_pair(rank, buffSize)); |
---|
| 137 | it=(buffers.insert(pair<int,CServerBuffer*>(rank,new CServerBuffer(buffSize)))).first; |
---|
[1228] | 138 | return true; |
---|
[1225] | 139 | } |
---|
| 140 | else |
---|
| 141 | { |
---|
| 142 | MPI_Get_count(&status,MPI_CHAR,&count); |
---|
| 143 | if (it->second->isBufferFree(count)) |
---|
| 144 | { |
---|
| 145 | addr=(char*)it->second->getBuffer(count); |
---|
| 146 | MPI_Irecv(addr,count,MPI_CHAR,rank,20,interComm,&pendingRequest[rank]); |
---|
| 147 | bufferRequest[rank]=addr; |
---|
[1228] | 148 | return true; |
---|
[1601] | 149 | } |
---|
[1228] | 150 | else |
---|
| 151 | return false; |
---|
[1225] | 152 | } |
---|
| 153 | } |
---|
| 154 | |
---|
| 155 | |
---|
[300] | 156 | void CContextServer::checkPendingRequest(void) |
---|
| 157 | { |
---|
| 158 | map<int,MPI_Request>::iterator it; |
---|
[549] | 159 | list<int> recvRequest; |
---|
[300] | 160 | list<int>::iterator itRecv; |
---|
[549] | 161 | int rank; |
---|
| 162 | int flag; |
---|
| 163 | int count; |
---|
| 164 | MPI_Status status; |
---|
[489] | 165 | |
---|
[300] | 166 | for(it=pendingRequest.begin();it!=pendingRequest.end();it++) |
---|
| 167 | { |
---|
[549] | 168 | rank=it->first; |
---|
| 169 | traceOff(); |
---|
| 170 | MPI_Test(& it->second, &flag, &status); |
---|
| 171 | traceOn(); |
---|
[300] | 172 | if (flag==true) |
---|
| 173 | { |
---|
[549] | 174 | recvRequest.push_back(rank); |
---|
| 175 | MPI_Get_count(&status,MPI_CHAR,&count); |
---|
| 176 | processRequest(rank,bufferRequest[rank],count); |
---|
[300] | 177 | } |
---|
| 178 | } |
---|
[489] | 179 | |
---|
| 180 | for(itRecv=recvRequest.begin();itRecv!=recvRequest.end();itRecv++) |
---|
[300] | 181 | { |
---|
[549] | 182 | pendingRequest.erase(*itRecv); |
---|
| 183 | bufferRequest.erase(*itRecv); |
---|
[300] | 184 | } |
---|
| 185 | } |
---|
[489] | 186 | |
---|
[300] | 187 | void CContextServer::processRequest(int rank, char* buff,int count) |
---|
| 188 | { |
---|
[489] | 189 | |
---|
[549] | 190 | CBufferIn buffer(buff,count); |
---|
| 191 | char* startBuffer,endBuffer; |
---|
| 192 | int size, offset; |
---|
| 193 | size_t timeLine; |
---|
| 194 | map<size_t,CEventServer*>::iterator it; |
---|
[489] | 195 | |
---|
[1225] | 196 | CTimer::get("Process request").resume(); |
---|
[300] | 197 | while(count>0) |
---|
| 198 | { |
---|
[549] | 199 | char* startBuffer=(char*)buffer.ptr(); |
---|
| 200 | CBufferIn newBuffer(startBuffer,buffer.remain()); |
---|
| 201 | newBuffer>>size>>timeLine; |
---|
[300] | 202 | |
---|
[549] | 203 | it=events.find(timeLine); |
---|
| 204 | if (it==events.end()) it=events.insert(pair<int,CEventServer*>(timeLine,new CEventServer)).first; |
---|
| 205 | it->second->push(rank,buffers[rank],startBuffer,size); |
---|
[300] | 206 | |
---|
[549] | 207 | buffer.advance(size); |
---|
| 208 | count=buffer.remain(); |
---|
[489] | 209 | } |
---|
[1225] | 210 | CTimer::get("Process request").suspend(); |
---|
[300] | 211 | } |
---|
[489] | 212 | |
---|
[300] | 213 | void CContextServer::processEvents(void) |
---|
| 214 | { |
---|
[549] | 215 | map<size_t,CEventServer*>::iterator it; |
---|
| 216 | CEventServer* event; |
---|
[489] | 217 | |
---|
[549] | 218 | it=events.find(currentTimeLine); |
---|
[489] | 219 | if (it!=events.end()) |
---|
[300] | 220 | { |
---|
[549] | 221 | event=it->second; |
---|
[509] | 222 | |
---|
[300] | 223 | if (event->isFull()) |
---|
| 224 | { |
---|
[597] | 225 | if (!scheduled && CServer::eventScheduler) // Skip event scheduling for attached mode and reception on client side |
---|
[492] | 226 | { |
---|
[549] | 227 | CServer::eventScheduler->registerEvent(currentTimeLine,hashId); |
---|
| 228 | scheduled=true; |
---|
[492] | 229 | } |
---|
[597] | 230 | else if (!CServer::eventScheduler || CServer::eventScheduler->queryEvent(currentTimeLine,hashId) ) |
---|
[492] | 231 | { |
---|
[851] | 232 | // When using attached mode, synchronise the processes to avoid that differents event be scheduled by differents processes |
---|
| 233 | // The best way to properly solve this problem will be to use the event scheduler also in attached mode |
---|
| 234 | // for now just set up a MPI barrier |
---|
[1130] | 235 | if (!CServer::eventScheduler && CXios::isServer) MPI_Barrier(intraComm) ; |
---|
[851] | 236 | |
---|
[549] | 237 | CTimer::get("Process events").resume(); |
---|
| 238 | dispatchEvent(*event); |
---|
| 239 | CTimer::get("Process events").suspend(); |
---|
| 240 | pendingEvent=false; |
---|
| 241 | delete event; |
---|
| 242 | events.erase(it); |
---|
| 243 | currentTimeLine++; |
---|
| 244 | scheduled = false; |
---|
[492] | 245 | } |
---|
| 246 | } |
---|
| 247 | } |
---|
| 248 | } |
---|
[489] | 249 | |
---|
[300] | 250 | CContextServer::~CContextServer() |
---|
| 251 | { |
---|
[549] | 252 | map<int,CServerBuffer*>::iterator it; |
---|
[1158] | 253 | for(it=buffers.begin();it!=buffers.end();++it) delete it->second; |
---|
[489] | 254 | } |
---|
[300] | 255 | |
---|
| 256 | void CContextServer::dispatchEvent(CEventServer& event) |
---|
| 257 | { |
---|
[549] | 258 | string contextName; |
---|
| 259 | string buff; |
---|
| 260 | int MsgSize; |
---|
| 261 | int rank; |
---|
| 262 | list<CEventServer::SSubEvent>::iterator it; |
---|
[1054] | 263 | StdString ctxId = context->getId(); |
---|
| 264 | CContext::setCurrent(ctxId); |
---|
[1130] | 265 | StdSize totalBuf = 0; |
---|
[489] | 266 | |
---|
[300] | 267 | if (event.classId==CContext::GetType() && event.type==CContext::EVENT_ID_CONTEXT_FINALIZE) |
---|
| 268 | { |
---|
[597] | 269 | finished=true; |
---|
[1601] | 270 | #pragma omp critical (_output) |
---|
[1646] | 271 | { |
---|
| 272 | info(20)<<" CContextServer: Receive context <"<<context->getId()<<"> finalize."<<endl; |
---|
| 273 | } |
---|
[1194] | 274 | context->finalize(); |
---|
[511] | 275 | std::map<int, StdSize>::const_iterator itbMap = mapBufferSize_.begin(), |
---|
[983] | 276 | iteMap = mapBufferSize_.end(), itMap; |
---|
[511] | 277 | for (itMap = itbMap; itMap != iteMap; ++itMap) |
---|
| 278 | { |
---|
[1054] | 279 | rank = itMap->first; |
---|
[1601] | 280 | #pragma omp critical (_output) |
---|
[1646] | 281 | { |
---|
| 282 | report(10)<< " Memory report : Context <"<<ctxId<<"> : server side : memory used for buffer of each connection to client" << endl |
---|
[1130] | 283 | << " +) With client of rank " << rank << " : " << itMap->second << " bytes " << endl; |
---|
[1646] | 284 | } |
---|
[1130] | 285 | totalBuf += itMap->second; |
---|
[511] | 286 | } |
---|
[1601] | 287 | #pragma omp critical (_output) |
---|
[1646] | 288 | { |
---|
| 289 | report(0)<< " Memory report : Context <"<<ctxId<<"> : server side : total memory used for buffer "<<totalBuf<<" bytes"<<endl; |
---|
| 290 | } |
---|
[300] | 291 | } |
---|
[549] | 292 | else if (event.classId==CContext::GetType()) CContext::dispatchEvent(event); |
---|
| 293 | else if (event.classId==CContextGroup::GetType()) CContextGroup::dispatchEvent(event); |
---|
| 294 | else if (event.classId==CCalendarWrapper::GetType()) CCalendarWrapper::dispatchEvent(event); |
---|
| 295 | else if (event.classId==CDomain::GetType()) CDomain::dispatchEvent(event); |
---|
| 296 | else if (event.classId==CDomainGroup::GetType()) CDomainGroup::dispatchEvent(event); |
---|
| 297 | else if (event.classId==CAxis::GetType()) CAxis::dispatchEvent(event); |
---|
| 298 | else if (event.classId==CAxisGroup::GetType()) CAxisGroup::dispatchEvent(event); |
---|
[887] | 299 | else if (event.classId==CScalar::GetType()) CScalar::dispatchEvent(event); |
---|
| 300 | else if (event.classId==CScalarGroup::GetType()) CScalarGroup::dispatchEvent(event); |
---|
[549] | 301 | else if (event.classId==CGrid::GetType()) CGrid::dispatchEvent(event); |
---|
| 302 | else if (event.classId==CGridGroup::GetType()) CGridGroup::dispatchEvent(event); |
---|
| 303 | else if (event.classId==CField::GetType()) CField::dispatchEvent(event); |
---|
| 304 | else if (event.classId==CFieldGroup::GetType()) CFieldGroup::dispatchEvent(event); |
---|
| 305 | else if (event.classId==CFile::GetType()) CFile::dispatchEvent(event); |
---|
| 306 | else if (event.classId==CFileGroup::GetType()) CFileGroup::dispatchEvent(event); |
---|
| 307 | else if (event.classId==CVariable::GetType()) CVariable::dispatchEvent(event); |
---|
[300] | 308 | else |
---|
| 309 | { |
---|
[549] | 310 | ERROR("void CContextServer::dispatchEvent(CEventServer& event)",<<" Bad event class Id"<<endl); |
---|
[300] | 311 | } |
---|
| 312 | } |
---|
| 313 | } |
---|