Ignore:
Timestamp:
12/12/19 18:15:14 (4 years ago)
Author:
ymipsl
Message:
  • Preparing coupling functionalities.
  • Make some cleaner things

YM

Location:
XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation
Files:
10 edited

Legend:

Unmodified
Added
Removed
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/axis_algorithm_interpolate.cpp

    r1639 r1784  
    7272  CTimer::get("CAxisAlgorithmInterpolate::computeIndexSourceMapping_").resume() ; 
    7373  CContext* context = CContext::getCurrent(); 
    74   CContextClient* client=context->client; 
    75   int nbClient = client->clientSize; 
     74  int nbClient = context->intraCommSize_; 
    7675  CArray<bool,1>& axisMask = axisSrc_->mask; 
    7776  int srcSize  = axisSrc_->n_glo.getValue(); 
     
    227226{ 
    228227  CContext* context = CContext::getCurrent(); 
    229   CContextClient* client=context->client; 
    230   int nbClient = client->clientSize; 
     228  int nbClient = context->intraCommSize_; 
    231229 
    232230  int srcSize  = axisSrc_->n_glo.getValue(); 
     
    272270 
    273271    int* recvCount=new int[nbClient]; 
    274     MPI_Allgather(&numValue,1,MPI_INT,recvCount,1,MPI_INT,client->intraComm); 
     272    MPI_Allgather(&numValue,1,MPI_INT,recvCount,1,MPI_INT,context->intraComm_); 
    275273 
    276274    int* displ=new int[nbClient]; 
     
    279277 
    280278    // Each client have enough global info of axis 
    281     MPI_Allgatherv(sendIndexBuff,numValue,MPI_INT,recvIndexBuff,recvCount,displ,MPI_INT,client->intraComm); 
    282     MPI_Allgatherv(sendValueBuff,numValue,MPI_DOUBLE,&(recvBuff[0]),recvCount,displ,MPI_DOUBLE,client->intraComm); 
     279    MPI_Allgatherv(sendIndexBuff,numValue,MPI_INT,recvIndexBuff,recvCount,displ,MPI_INT,context->intraComm_); 
     280    MPI_Allgatherv(sendValueBuff,numValue,MPI_DOUBLE,&(recvBuff[0]),recvCount,displ,MPI_DOUBLE,context->intraComm_); 
    283281 
    284282    for (int idx = 0; idx < srcSize; ++idx) 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/axis_algorithm_inverse.cpp

    r1639 r1784  
    9898{ 
    9999  CContext* context = CContext::getCurrent(); 
    100   CContextClient* client=context->client; 
    101   int clientRank = client->clientRank; 
    102   int nbClient = client->clientSize; 
     100  int clientRank = context->intraCommRank_; 
     101  int nbClient = context->intraCommSize_; 
    103102 
    104103  int niSrc     = axisSrc_->n.getValue(); 
     
    131130  } 
    132131 
    133   CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     132  CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    134133  dhtIndexProcRank.computeIndexInfoMapping(globalSrcIndex); 
    135134  CClientClientDHTInt::Index2VectorInfoTypeMap& computedGlobalIndexOnProc = dhtIndexProcRank.getInfoIndexMap(); 
     
    161160    sendRankSizeMap[itIndex->first] = sendSize; 
    162161  } 
    163   MPI_Allgather(&connectedClient,1,MPI_INT,recvCount,1,MPI_INT,client->intraComm); 
     162  MPI_Allgather(&connectedClient,1,MPI_INT,recvCount,1,MPI_INT,context->intraComm_); 
    164163 
    165164  displ[0]=0 ; 
     
    168167  int* recvRankBuff=new int[recvSize]; 
    169168  int* recvSizeBuff=new int[recvSize]; 
    170   MPI_Allgatherv(sendRankBuff,connectedClient,MPI_INT,recvRankBuff,recvCount,displ,MPI_INT,client->intraComm); 
    171   MPI_Allgatherv(sendSizeBuff,connectedClient,MPI_INT,recvSizeBuff,recvCount,displ,MPI_INT,client->intraComm); 
     169  MPI_Allgatherv(sendRankBuff,connectedClient,MPI_INT,recvRankBuff,recvCount,displ,MPI_INT,context->intraComm_); 
     170  MPI_Allgatherv(sendSizeBuff,connectedClient,MPI_INT,recvSizeBuff,recvCount,displ,MPI_INT,context->intraComm_); 
    172171  for (int i = 0; i < nbClient; ++i) 
    173172  { 
     
    193192 
    194193    requests.push_back(MPI_Request()); 
    195     MPI_Irecv(recvGlobalIndexSrc[recvRank], recvSize, MPI_UNSIGNED_LONG, recvRank, 46, client->intraComm, &requests.back()); 
     194    MPI_Irecv(recvGlobalIndexSrc[recvRank], recvSize, MPI_UNSIGNED_LONG, recvRank, 46, context->intraComm_, &requests.back()); 
    196195  } 
    197196 
     
    215214    // Send global index source and mask 
    216215    requests.push_back(MPI_Request()); 
    217     MPI_Isend(sendGlobalIndexSrc[sendRank], sendSize, MPI_UNSIGNED_LONG, sendRank, 46, client->intraComm, &requests.back()); 
     216    MPI_Isend(sendGlobalIndexSrc[sendRank], sendSize, MPI_UNSIGNED_LONG, sendRank, 46, context->intraComm_, &requests.back()); 
    218217  } 
    219218 
     
    232231 
    233232    requests.push_back(MPI_Request()); 
    234     MPI_Irecv(recvValueFromSrc[recvRank], recvSize, MPI_DOUBLE, recvRank, 48, client->intraComm, &requests.back()); 
     233    MPI_Irecv(recvValueFromSrc[recvRank], recvSize, MPI_DOUBLE, recvRank, 48, context->intraComm_, &requests.back()); 
    235234  } 
    236235 
     
    250249    // Okie, now inform the destination which source index are masked 
    251250    requests.push_back(MPI_Request()); 
    252     MPI_Isend(sendValueToDest[recvRank], recvSize, MPI_DOUBLE, recvRank, 48, client->intraComm, &requests.back()); 
     251    MPI_Isend(sendValueToDest[recvRank], recvSize, MPI_DOUBLE, recvRank, 48, context->intraComm_, &requests.back()); 
    253252  } 
    254253  status.resize(requests.size()); 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/axis_algorithm_transformation.cpp

    r1622 r1784  
    7676{ 
    7777  CContext* context = CContext::getCurrent(); 
    78   CContextClient* client=context->client; 
    79   int clientRank = client->clientRank; 
    80   int clientSize = client->clientSize; 
     78  int clientRank = context->intraCommRank_; 
     79  int clientSize = context->intraCommSize_; 
    8180 
    8281  size_t globalIndex; 
     
    127126  } 
    128127 
    129   CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     128  CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    130129  dhtIndexProcRank.computeIndexInfoMapping(globalAxisIndex); 
    131130 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/domain_algorithm_expand.cpp

    r1622 r1784  
    9494{ 
    9595  CContext* context = CContext::getCurrent(); 
    96   CContextClient* client=context->client; 
    97  
     96  
    9897  int type = 1; // For edge 
    9998  CMesh mesh; 
     
    103102  switch (domainSource->type) { 
    104103   case CDomain::type_attr::unstructured:       
    105       mesh.getGlobalNghbFaces(type, client->intraComm, domainSource->i_index, bounds_lon_src, bounds_lat_src, neighborsSrc); 
     104      mesh.getGlobalNghbFaces(type, context->intraComm_, domainSource->i_index, bounds_lon_src, bounds_lat_src, neighborsSrc); 
    106105      updateUnstructuredDomainAttributes(domainDestination, domainSource, neighborsSrc); 
    107106      break; 
     
    123122{ 
    124123  CContext* context = CContext::getCurrent(); 
    125   CContextClient* client=context->client; 
    126124 
    127125  int type = 1; // For edge 
     
    132130  switch (domainSource->type) { 
    133131   case CDomain::type_attr::unstructured:       
    134       mesh.getGlobalNghbFaces(type, client->intraComm, domainSource->i_index, bounds_lon_src, bounds_lat_src, neighborsSrc); 
     132      mesh.getGlobalNghbFaces(type, context->intraComm_, domainSource->i_index, bounds_lon_src, bounds_lat_src, neighborsSrc); 
    135133      updateUnstructuredDomainAttributes(domainDestination, domainSource, neighborsSrc); 
    136134      break; 
     
    158156  int iindexSrc, jindexSrc, globIndexSrc; 
    159157  CContext* context = CContext::getCurrent(); 
    160   CContextClient* client=context->client; 
    161158 
    162159  // First of all, "copy" all attributes of domain source to domain destination 
     
    341338  } 
    342339 
    343   CClientClientDHTDouble dhtData(localData,client->intraComm); 
     340  CClientClientDHTDouble dhtData(localData,context->intraComm_); 
    344341  dhtData.computeIndexInfoMapping(globalIndexSrcOnDstDomain); 
    345342  CClientClientDHTDouble::Index2VectorInfoTypeMap& neighborData = dhtData.getInfoIndexMap(); 
     
    482479 
    483480  CContext* context = CContext::getCurrent(); 
    484   CContextClient* client=context->client; 
    485481 
    486482  // First of all, "copy" all attributes of domain source to domain destination 
     
    606602  } 
    607603 
    608   CClientClientDHTDouble dhtData(localData,client->intraComm); 
     604  CClientClientDHTDouble dhtData(localData, context->intraComm_); 
    609605  CArray<size_t,1> neighborInd(nbNeighbor); 
    610606  for (int idx = 0; idx < nbNeighbor; ++idx) 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/domain_algorithm_generate_rectilinear.cpp

    r1639 r1784  
    4848{ 
    4949  CContext* context = CContext::getCurrent(); 
    50   CContextClient* client = context->client; 
    51  
     50  int clientSize = context->intraCommSize_ ; 
     51  int clientRank = context->intraCommRank_ ; 
     52   
    5253  std::vector<CDomain*> domListSrcP = gridSrc->getDomains(); 
    5354  std::vector<CAxis*> axisListSrcP = gridSrc->getAxis(); 
     
    5657  { 
    5758    // First, find (roundly) distribution of associated axis (if any) 
    58     if (axisListSrcP.empty()) nbDomainDistributedPart_ = client->clientSize; 
     59    if (axisListSrcP.empty()) nbDomainDistributedPart_ = clientSize; 
    5960    else 
    6061    { 
     
    6970        HashXIOS<int> hashFunc; 
    7071        StdSize hashValue = hashFunc.hashVec(globalAxisIndex); 
    71         std::vector<StdSize> recvBuff(client->clientSize); 
     72        std::vector<StdSize> recvBuff(clientSize); 
    7273        MPI_Gather(&hashValue, 1, MPI_UNSIGNED_LONG, 
    7374                   &recvBuff[0], 1, MPI_UNSIGNED_LONG, 
    7475                   0, 
    75                    client->intraComm); 
    76         if (0 == client->clientRank) 
     76                   context->intraComm_); 
     77        if (0 == clientRank) 
    7778        { 
    7879          std::set<StdSize> setTmp; 
     
    8889 
    8990        MPI_Bcast(&nbLocalAxis[0], nbAxis, MPI_INT, 
    90                   0, client->intraComm); 
     91                  0, context->intraComm_); 
    9192      } 
    9293 
    9394      int nbAxisDistributedPart = 1; 
    9495      for (int j = 0; j < nbAxis; ++j) nbAxisDistributedPart *= nbLocalAxis[j]; 
    95       nbDomainDistributedPart_ = client->clientSize/nbAxisDistributedPart; 
     96      nbDomainDistributedPart_ = clientSize/nbAxisDistributedPart; 
    9697    } 
    9798  } 
     
    118119 
    119120  CContext* context = CContext::getCurrent(); 
    120   CContextClient* client = context->client; 
    121   int modPart = (client->clientSize) % nbPartition; 
     121  int modPart = (context->intraCommSize_) % nbPartition; 
    122122  if (0 != modPart) 
    123123    ERROR("CDomainAlgorithmGenerateRectilinear::computeDistributionGridDestination(CGrid* gridDest)", 
    124124       << "The grid " <<gridDest->getId() << " is not well-distributed. There is an incompatibility between distribution of axis and domain."); 
    125   nbDomainDistributedPart_ = client->clientSize/nbPartition; 
     125  nbDomainDistributedPart_ = context->intraCommSize_/nbPartition; 
    126126 
    127127} 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/domain_algorithm_interpolate.cpp

    r1639 r1784  
    109109 
    110110  CContext* context = CContext::getCurrent(); 
    111   CContextClient* client=context->client; 
    112   int clientRank = client->clientRank; 
     111  int clientRank = context->intraCommRank_; 
    113112  int i, j, k, idx; 
    114113  std::vector<double> srcPole(3,0), dstPole(3,0); 
     
    300299 
    301300  // Calculate weight index 
    302   Mapper mapper(client->intraComm); 
     301  Mapper mapper(context->intraComm_); 
    303302  mapper.setVerbosity(PROGRESS) ; 
    304303 
     
    432431{ 
    433432  CContext* context = CContext::getCurrent(); 
    434   CContextClient* client=context->client; 
    435  
     433   
    436434  MPI_Comm poleComme(MPI_COMM_NULL); 
    437   MPI_Comm_split(client->intraComm, interMapValuePole.empty() ? MPI_UNDEFINED : 1, 0, &poleComme); 
     435  MPI_Comm_split(context->intraComm_, interMapValuePole.empty() ? MPI_UNDEFINED : 1, 0, &poleComme); 
    438436  if (MPI_COMM_NULL != poleComme) 
    439437  { 
     
    534532{ 
    535533  CContext* context = CContext::getCurrent(); 
    536   CContextClient* client=context->client; 
    537   int clientRank = client->clientRank; 
     534  int clientRank = context->intraCommRank_; 
    538535 
    539536  this->transformationMapping_.resize(1); 
     
    800797{ 
    801798  CContext* context = CContext::getCurrent(); 
    802   CContextClient* client=context->client; 
    803  
     799   
    804800  size_t n_src = domainSrc_->ni_glo * domainSrc_->nj_glo; 
    805801  size_t n_dst = domainDest_->ni_glo * domainDest_->nj_glo; 
     
    835831  } 
    836832 
    837   MPI_Allreduce(&localNbWeight, &globalNbWeight, 1, MPI_LONG, MPI_SUM, client->intraComm); 
    838   MPI_Scan(&localNbWeight, &startIndex, 1, MPI_LONG, MPI_SUM, client->intraComm); 
     833  MPI_Allreduce(&localNbWeight, &globalNbWeight, 1, MPI_LONG, MPI_SUM, context->intraComm_); 
     834  MPI_Scan(&localNbWeight, &startIndex, 1, MPI_LONG, MPI_SUM, context->intraComm_); 
    839835   
    840836  if (0 == globalNbWeight) 
     
    850846  std::vector<StdSize> count(1, localNbWeight); 
    851847   
    852   WriteNetCdf netCdfWriter(filename, client->intraComm);   
     848  WriteNetCdf netCdfWriter(filename, context->intraComm_);   
    853849 
    854850  // Define some dimensions 
     
    895891 
    896892  CContext* context = CContext::getCurrent(); 
    897   CContextClient* client=context->client; 
    898   int clientRank = client->clientRank; 
    899   int clientSize = client->clientSize; 
     893  int clientRank = context->intraCommRank_; 
     894  int clientSize = context->intraCommSize_; 
    900895 
    901896 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/domain_algorithm_transformation.cpp

    r1622 r1784  
    3939{ 
    4040  CContext* context = CContext::getCurrent(); 
    41   CContextClient* client=context->client; 
    42   int clientRank = client->clientRank; 
    43   int clientSize = client->clientSize; 
     41  int clientRank = context->intraCommRank_; 
     42  int clientSize = context->intraCommSize_; 
    4443 
    4544  int niGlob = domainSrc_->ni_glo.getValue(); 
     
    6261  } 
    6362 
    64   CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     63  CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    6564  dhtIndexProcRank.computeIndexInfoMapping(globalDomainIndex); 
    6665  globalDomainIndexOnProc = dhtIndexProcRank.getInfoIndexMap(); 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/generic_algorithm_transformation.cpp

    r1639 r1784  
    124124    { 
    125125      CContext* context = CContext::getCurrent(); 
    126       CContextClient* client = context->client; 
    127    
     126         
    128127      computePositionElements(gridSrc, gridDst); 
    129128      std::vector<CScalar*> scalarListSrcP  = gridSrc->getScalars(); 
     
    136135      { 
    137136        distributed=domainListSrcP[elementPositionInGridSrc2DomainPosition_[elementPositionInGrid]]->isDistributed() ; 
    138         MPI_Allreduce(&distributed,&distributed_glo, 1, MPI_INT, MPI_LOR, client->intraComm) ; 
     137        MPI_Allreduce(&distributed,&distributed_glo, 1, MPI_INT, MPI_LOR, context->intraComm_) ; 
    139138     
    140139      } 
     
    142141      { 
    143142        distributed=axisListSrcP[elementPositionInGridSrc2AxisPosition_[elementPositionInGrid]]->isDistributed() ; 
    144         MPI_Allreduce(&distributed,&distributed_glo, 1, MPI_INT, MPI_LOR, client->intraComm) ; 
     143        MPI_Allreduce(&distributed,&distributed_glo, 1, MPI_INT, MPI_LOR, context->intraComm_) ; 
    145144      } 
    146145      else //it's a scalar 
     
    170169 { 
    171170  CContext* context = CContext::getCurrent(); 
    172   CContextClient* client = context->client; 
    173   int nbClient = client->clientSize; 
     171  int nbClient = context->intraCommSize_; 
    174172 
    175173  typedef std::unordered_map<int, std::vector<std::pair<int,double> > > SrcToDstMap; 
     
    238236  int sendValue = (computeGlobalIndexOnProc) ? 1 : 0; 
    239237  int recvValue = 0; 
    240   MPI_Allreduce(&sendValue, &recvValue, 1, MPI_INT, MPI_SUM, client->intraComm); 
     238  MPI_Allreduce(&sendValue, &recvValue, 1, MPI_INT, MPI_SUM, context->intraComm_); 
    241239  computeGlobalIndexOnProc = (0 < recvValue); 
    242240 
     
    475473   
    476474  CContext* context = CContext::getCurrent(); 
    477   CContextClient* client=context->client; 
    478   int clientRank = client->clientRank; 
     475  int clientRank = context->intraCommRank_; 
    479476   
    480477  std::vector<CDomain*> domainListSrcP = gridSrc->getDomains(); 
     
    667664{ 
    668665  CContext* context = CContext::getCurrent(); 
    669   CContextClient* client=context->client; 
    670   int clientRank = client->clientRank; 
    671   int clientSize = client->clientSize; 
     666  int clientRank = context->intraCommRank_; 
     667  int clientSize = context->intraCommSize_; 
    672668 
    673669  globalScalarIndexOnProc.rehash(std::ceil(clientSize/globalScalarIndexOnProc.max_load_factor())); 
     
    693689{ 
    694690  CContext* context = CContext::getCurrent(); 
    695   CContextClient* client=context->client; 
    696   int clientRank = client->clientRank; 
    697   int clientSize = client->clientSize; 
     691  int clientRank = context->intraCommRank_; 
     692  int clientSize = context->intraCommSize_; 
    698693 
    699694  size_t globalIndex; 
     
    710705  } 
    711706 
    712   CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     707  CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    713708  CArray<size_t,1> globalAxisIndex(axisDst->index.numElements()); 
    714709  for (int idx = 0; idx < globalAxisIndex.numElements(); ++idx) 
     
    764759{ 
    765760  CContext* context = CContext::getCurrent(); 
    766   CContextClient* client=context->client; 
    767   int clientRank = client->clientRank; 
    768   int clientSize = client->clientSize; 
     761  int clientRank = context->intraCommRank_; 
     762  int clientSize = context->intraCommSize_; 
    769763 
    770764  int niGlobSrc = domainSrc->ni_glo.getValue(); 
     
    820814  } 
    821815 
    822   CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     816  CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    823817  dhtIndexProcRank.computeIndexInfoMapping(globalDomainIndex); 
    824818 
     
    862856 
    863857  CContext* context = CContext::getCurrent(); 
    864   CContextClient* client = context->client; 
    865   int nbClient = client->clientSize; 
     858  int nbClient = context->intraCommSize_; 
    866859 
    867860  computePositionElements(gridDst, gridSrc); 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/grid_transformation.cpp

    r1639 r1784  
    365365 
    366366  CContext* context = CContext::getCurrent(); 
    367   CContextClient* client = context->client; 
    368367 
    369368  ListAlgoType::const_iterator itb = listAlgos_.begin(), 
     
    419418        CTimer::get("computeTransformationMappingConvert").resume();   
    420419        nbLocalIndexOnGridDest_.push_back(nbLocalIndexOnGridDest) ; 
    421         int clientRank=client->clientRank ; 
     420        int clientRank=context->intraCommRank_ ; 
    422421        { 
    423422          SendingIndexGridSourceMap tmp; 
     
    475474{ 
    476475  CContext* context = CContext::getCurrent(); 
    477   CContextClient* client = context->client; 
    478   int nbClient = client->clientSize; 
    479   int clientRank = client->clientRank; 
     476  int nbClient = context->intraCommSize_; 
     477  int clientRank = context->intraCommRank_; 
    480478 
    481479  // Recalculate the distribution of grid destination 
    482   CDistributionClient distributionClientDest(client->clientRank, tmpGridDestination_); 
     480  CDistributionClient distributionClientDest(clientRank, tmpGridDestination_); 
    483481  CDistributionClient::GlobalLocalDataMap& globalLocalIndexGridDestSendToServer = distributionClientDest.getGlobalLocalDataSendToServer(); 
    484482 
     
    514512    sendRankSizeMap[itIndex->first] = sendSize; 
    515513  } 
    516   MPI_Allgather(&connectedClient,1,MPI_INT,recvCount,1,MPI_INT,client->intraComm); 
     514  MPI_Allgather(&connectedClient,1,MPI_INT,recvCount,1,MPI_INT, context->intraComm_); 
    517515 
    518516  displ[0]=0 ; 
     
    521519  int* recvRankBuff=new int[recvSize]; 
    522520  int* recvSizeBuff=new int[recvSize]; 
    523   MPI_Allgatherv(sendRankBuff,connectedClient,MPI_INT,recvRankBuff,recvCount,displ,MPI_INT,client->intraComm); 
    524   MPI_Allgatherv(sendSizeBuff,connectedClient,MPI_INT,recvSizeBuff,recvCount,displ,MPI_INT,client->intraComm); 
     521  MPI_Allgatherv(sendRankBuff,connectedClient,MPI_INT,recvRankBuff,recvCount,displ,MPI_INT,context->intraComm_); 
     522  MPI_Allgatherv(sendSizeBuff,connectedClient,MPI_INT,recvSizeBuff,recvCount,displ,MPI_INT,context->intraComm_); 
    525523  for (int i = 0; i < nbClient; ++i) 
    526524  { 
     
    546544 
    547545    requests.push_back(MPI_Request()); 
    548     MPI_Irecv(recvGlobalIndexSrc[recvRank], recvSize, MPI_UNSIGNED_LONG, recvRank, 46, client->intraComm, &requests.back()); 
     546    MPI_Irecv(recvGlobalIndexSrc[recvRank], recvSize, MPI_UNSIGNED_LONG, recvRank, 46, context->intraComm_, &requests.back()); 
    549547    requests.push_back(MPI_Request()); 
    550     MPI_Irecv(recvMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 47, client->intraComm, &requests.back()); 
     548    MPI_Irecv(recvMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 47, context->intraComm_, &requests.back()); 
    551549  } 
    552550 
     
    584582    // Send global index source and mask 
    585583    requests.push_back(MPI_Request()); 
    586     MPI_Isend(sendGlobalIndexSrc[sendRank], sendSize, MPI_UNSIGNED_LONG, sendRank, 46, client->intraComm, &requests.back()); 
     584    MPI_Isend(sendGlobalIndexSrc[sendRank], sendSize, MPI_UNSIGNED_LONG, sendRank, 46, context->intraComm_, &requests.back()); 
    587585    requests.push_back(MPI_Request()); 
    588     MPI_Isend(sendMaskDst[sendRank], sendSize, MPI_UNSIGNED_CHAR, sendRank, 47, client->intraComm, &requests.back()); 
     586    MPI_Isend(sendMaskDst[sendRank], sendSize, MPI_UNSIGNED_CHAR, sendRank, 47, context->intraComm_, &requests.back()); 
    589587  } 
    590588 
     
    602600 
    603601    requests.push_back(MPI_Request()); 
    604     MPI_Irecv(sendMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 48, client->intraComm, &requests.back()); 
     602    MPI_Irecv(sendMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 48, context->intraComm_, &requests.back()); 
    605603  } 
    606604 
    607605  // Ok, now we fill in local index of grid source (we even count for masked index) 
    608   CDistributionClient distributionClientSrc(client->clientRank, gridSource_); 
     606  CDistributionClient distributionClientSrc(clientRank, gridSource_); 
    609607  CDistributionClient::GlobalLocalDataMap& globalLocalIndexGridSrcSendToServer = distributionClientSrc.getGlobalLocalDataSendToServer(); 
    610608  localIndexToSendFromGridSource_.push_back(SendingIndexGridSourceMap()); 
     
    639637    // Okie, now inform the destination which source index are masked 
    640638    requests.push_back(MPI_Request()); 
    641     MPI_Isend(recvMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 48, client->intraComm, &requests.back()); 
     639    MPI_Isend(recvMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 48, context->intraComm_, &requests.back()); 
    642640  } 
    643641  status.resize(requests.size()); 
  • XIOS/dev/dev_ym/XIOS_COUPLING/src/transformation/scalar_algorithm_transformation.cpp

    r1622 r1784  
    6060{ 
    6161  CContext* context = CContext::getCurrent(); 
    62   CContextClient* client=context->client; 
    63   int clientRank = client->clientRank; 
    64   int clientSize = client->clientSize; 
     62  int clientRank = context->intraCommRank_; 
     63  int clientSize = context->intraCommSize_; 
    6564 
    6665  if (2 == elementSourceType) // Source is a domain 
     
    8483    } 
    8584 
    86     CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     85    CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    8786    dhtIndexProcRank.computeIndexInfoMapping(globalIndexElementSource); 
    8887    globalIndexElementSourceOnProc = dhtIndexProcRank.getInfoIndexMap(); 
     
    106105    } 
    107106 
    108     CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     107    CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    109108    dhtIndexProcRank.computeIndexInfoMapping(globalIndexElementSource); 
    110109    globalIndexElementSourceOnProc = dhtIndexProcRank.getInfoIndexMap(); 
     
    117116    globalIndex2ProcRank[globalIndex][0] = clientRank; 
    118117 
    119     CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, client->intraComm); 
     118    CClientClientDHTInt dhtIndexProcRank(globalIndex2ProcRank, context->intraComm_); 
    120119    dhtIndexProcRank.computeIndexInfoMapping(globalIndexElementSource); 
    121120    globalIndexElementSourceOnProc = dhtIndexProcRank.getInfoIndexMap(); 
Note: See TracChangeset for help on using the changeset viewer.