Changeset 2397


Ignore:
Timestamp:
08/31/22 17:23:56 (21 months ago)
Author:
ymipsl
Message:
  • Optimize remote connector computation in case of read (reverse way).
  • don't compute anymore clientFromServerConnector (and all intermediate computation) for non reading case.

YM

Location:
XIOS3/trunk/src
Files:
14 edited

Legend:

Unmodified
Added
Removed
  • XIOS3/trunk/src/distribution/grid_client_server_remote_connector.cpp

    r2304 r2397  
    3838    } 
    3939  } 
     40  
     41  void CGridClientServerRemoteConnector::computeConnectorOut() 
     42  { 
     43    set<int> workflowRankToRemove ; 
     44    vector<bool> workflowIsSrcViewDistributed ; 
     45    { 
     46      auto workflowRemoteConnector=make_shared<CGridRemoteConnector>(srcWorkflowView_,dstView_,localComm_,remoteSize_) ; 
     47      workflowRemoteConnector->computeViewDistribution() ; 
     48      workflowRemoteConnector->computeConnectorMethods(false) ; 
     49      workflowRemoteConnector->computeRedondantRanks(false) ; 
     50      workflowRankToRemove = workflowRemoteConnector->getRankToRemove() ; 
     51      workflowIsSrcViewDistributed = workflowRemoteConnector->getIsSrcViewDistributed() ; 
     52    } 
     53     
     54    computeViewDistribution() ; 
     55     
     56    for(int i=0;i<srcView_.size();i++) isSrcViewDistributed_[i] =  isSrcViewDistributed_[i] || workflowIsSrcViewDistributed[i]  ; 
     57    computeConnectorMethods(false) ; 
     58    computeRedondantRanks(false) ; 
    4059 
     60    for(auto& rank : rankToRemove_) 
     61      if (workflowRankToRemove.count(rank)!=0) 
     62        for(auto& element : elements_) element.erase(rank) ; 
     63  } 
     64 
     65  void CGridClientServerRemoteConnector::computeConnectorIn() 
     66  { 
     67    set<int> workflowRankToRemove ; 
     68    vector<bool> workflowIsSrcViewDistributed ; 
     69    { 
     70      auto workflowRemoteConnector=make_shared<CGridRemoteConnector>(srcWorkflowView_,dstView_,localComm_,remoteSize_) ; 
     71      workflowRemoteConnector->computeViewDistribution() ; 
     72      workflowRemoteConnector->computeConnectorMethods(true) ; 
     73      workflowRemoteConnector->computeRedondantRanks(true) ; 
     74      workflowRankToRemove = workflowRemoteConnector->getRankToRemove() ; 
     75      workflowIsSrcViewDistributed = workflowRemoteConnector->getIsSrcViewDistributed() ; 
     76    } 
     77 
     78    computeViewDistribution() ; 
     79     
     80    for(int i=0;i<srcView_.size();i++) isSrcViewDistributed_[i] =  isSrcViewDistributed_[i] || workflowIsSrcViewDistributed[i]  ; 
     81    computeConnectorMethods(true) ; 
     82    computeRedondantRanks(true) ; 
     83 
     84    for(auto& rank : rankToRemove_) 
     85      if (workflowRankToRemove.count(rank)!=0) 
     86        for(auto& element : elements_) element.erase(rank) ; 
     87  } 
    4188} 
  • XIOS3/trunk/src/distribution/grid_client_server_remote_connector.hpp

    r2304 r2397  
    1414      CGridClientServerRemoteConnector(vector<shared_ptr<CLocalView>>& srcView, vector<shared_ptr<CLocalView>>& worflowSrcView, vector<shared_ptr<CDistributedView>>& dstView, MPI_Comm localComm, int remoteSize) ; 
    1515      void computeConnector(bool eliminateRedondant=true) ; 
     16      void computeConnectorOut(void) ; 
     17      void computeConnectorIn(void) ; 
    1618      vector<shared_ptr<CLocalView>> srcWorkflowView_ ;    
    1719  } ; 
  • XIOS3/trunk/src/distribution/grid_remote_connector.cpp

    r2296 r2397  
    33#include "leader_process.hpp" 
    44#include "mpi.hpp" 
     5#include "element.hpp" 
    56 
    67 
     
    188189  *         redondantly the the remote servers 
    189190  */ 
    190   void CGridRemoteConnector::computeConnectorMethods(void) 
     191  void CGridRemoteConnector::computeConnectorMethods(bool reverse) 
    191192  { 
    192193    vector<shared_ptr<CLocalView>> srcView ; 
     
    201202    for(int i=0;i<dstView_.size();i++) dstViewsNonDistributed = dstViewsNonDistributed && !isDstViewDistributed_[i] ; 
    202203     
    203     if (srcViewsNonDistributed)  
     204  //***************************************************** 
     205    if (srcViewsNonDistributed && dstViewsNonDistributed) 
     206    { 
     207      int commRank, commSize ; 
     208      MPI_Comm_rank(localComm_,&commRank) ; 
     209      MPI_Comm_size(localComm_,&commSize) ; 
     210  
     211      map<int,bool> ranks ; 
     212      if (reverse) 
     213      { 
     214        int leaderRank=getLeaderRank(remoteSize_, commSize, commRank) ; 
     215        ranks[leaderRank] = true ;  
     216      } 
     217      else 
     218      { 
     219        list<int> remoteRanks; 
     220        list<int> notUsed ; 
     221        computeLeaderProcess(commRank, commSize, remoteSize_, remoteRanks, notUsed) ; 
     222        for(int rank : remoteRanks) ranks[rank]=true ; 
     223      } 
     224      for(int i=0; i<srcView_.size(); i++) computeSrcDstNonDistributed(i,ranks) ;   
     225    } 
     226   
     227  //***************************************************** 
     228    else if (srcViewsNonDistributed)  
    204229    { 
    205230      int commRank, commSize ; 
     
    208233      list<int> remoteRanks; 
    209234      list<int> notUsed ; 
    210       map<int,bool> ranks ;   
    211       computeLeaderProcess(commRank, commSize, remoteSize_, remoteRanks, notUsed) ; 
    212       for(int rank : remoteRanks) ranks[rank]=true ; 
     235      map<int,bool> ranks ; 
    213236       
    214       for(int i=0; i<srcView_.size(); i++)   
    215       { 
    216         if (isDstViewDistributed_[i]) computeSrcNonDistributed(i) ; 
    217         else computeSrcDstNonDistributed(i, ranks) ; 
    218       } 
     237      if (reverse) 
     238      { 
     239        shared_ptr<CLocalElement> voidElement = make_shared<CLocalElement>(commRank, 0, CArray<size_t,1>()) ; 
     240        shared_ptr<CLocalView> voidView = make_shared<CLocalView>(voidElement, CElementView::FULL, CArray<int,1>()) ; 
     241 
     242        for(int i=0;i<srcView_.size();i++)  
     243          if (isDstViewDistributed_[i]) 
     244          { 
     245            if (commRank==0) srcView.push_back(srcView_[i]) ; 
     246            else srcView.push_back(make_shared<CLocalView>(make_shared<CLocalElement>(commRank, srcView_[i]->getGlobalSize(), CArray<size_t,1>()), 
     247                                                           CElementView::FULL, CArray<int,1>())) ; // void view 
     248            dstView.push_back(dstView_[i]) ; 
     249            indElements.push_back(i) ; 
     250          } 
     251         
     252        computeGenericMethod(srcView, dstView, indElements) ; 
     253 
     254        for(int i=0;i<srcView_.size();i++)  
     255          if (isDstViewDistributed_[i]) 
     256          { 
     257            size_t sizeElement ; 
     258            int nRank ; 
     259            if (commRank==0) nRank = elements_[i].size() ; 
     260            MPI_Bcast(&nRank, 1, MPI_INT, 0, localComm_) ; 
     261               
     262            auto it=elements_[i].begin() ; 
     263            for(int j=0;j<nRank;j++) 
     264            { 
     265              int rank ; 
     266              size_t sizeElement ; 
     267              if (commRank==0) { rank = it->first ; sizeElement=it->second.numElements(); } 
     268              MPI_Bcast(&rank, 1, MPI_INT, 0, localComm_) ; 
     269              MPI_Bcast(&sizeElement, 1, MPI_SIZE_T, 0, localComm_) ; 
     270              if (commRank!=0) elements_[i][rank].resize(sizeElement) ; 
     271              MPI_Bcast(elements_[i][rank].dataFirst(), sizeElement, MPI_SIZE_T, 0, localComm_) ; 
     272              if (commRank==0) ++it ; 
     273            } 
     274          } 
     275 
     276        for(auto& it : elements_[indElements[0]])  
     277        { 
     278          if (it.second.numElements()==0) ranks[it.first] = false ; 
     279          else  ranks[it.first] = true ; 
     280        } 
     281     
     282        for(int i=0;i<srcView_.size();i++)  
     283          if (!isDstViewDistributed_[i]) computeSrcDstNonDistributed(i, ranks) ; 
     284 
     285      } 
     286      else  
     287      { 
     288        computeLeaderProcess(commRank, commSize, remoteSize_, remoteRanks, notUsed) ; 
     289        for(int rank : remoteRanks) ranks[rank]=true ; 
     290 
     291        for(int i=0; i<srcView_.size(); i++)   
     292        { 
     293          if (isDstViewDistributed_[i]) computeSrcNonDistributed(i) ; 
     294          else computeSrcDstNonDistributed(i, ranks) ; 
     295        } 
     296      } 
     297 
    219298    }  
     299  //***************************************************** 
    220300    else if (dstViewsNonDistributed) 
    221301    { 
     302      int commRank, commSize ; 
     303      MPI_Comm_rank(localComm_,&commRank) ; 
     304      MPI_Comm_size(localComm_,&commSize) ; 
     305  
    222306      map<int,bool> ranks ; 
    223       for(int i=0;i<remoteSize_;i++) ranks[i]=true ; 
     307      if (reverse) 
     308      { 
     309        int leaderRank=getLeaderRank(remoteSize_, commSize, commRank) ; 
     310        ranks[leaderRank] = true ;  
     311      } 
     312      else for(int i=0;i<remoteSize_;i++) ranks[i]=true ; 
     313      
    224314      for(int i=0; i<srcView_.size(); i++)   
    225315      { 
     
    228318      } 
    229319    }  
     320  //***************************************************** 
    230321    else 
    231322    { 
     
    338429    } 
    339430  } 
     431 
     432/** 
     433  * \brief Compute the connector for the element \b i when the source view is not distributed.  
     434  *        After the call element_[i] is defined. 
     435  *  \param i Indice of the element composing the source grid.  
     436  */ 
     437 
     438  void CGridRemoteConnector::computeSrcNonDistributedReverse(int i) 
     439  { 
     440    auto& element = elements_[i] ; 
     441    map<int,CArray<size_t,1>> globalIndexView ; 
     442    dstView_[i]->getGlobalIndexView(globalIndexView) ; 
     443     
     444    CClientClientDHTTemplate<int>::Index2InfoTypeMap dataInfo; 
     445     
     446    for(auto& it : globalIndexView) 
     447    { 
     448      auto& globalIndex=it.second ; 
     449      for(size_t ind : globalIndex) dataInfo[ind]=it.first ; 
     450    } 
     451     
     452    // First we feed the distributed hash map  with key (remote global index)  
     453    // associated with the value of the remote rank 
     454    CClientClientDHTTemplate<int> DHT(dataInfo, localComm_) ; 
     455    // after we feed the DHT with the local global indices of the source view 
     456 
     457    int commRank, commSize ; 
     458    MPI_Comm_rank(localComm_,&commRank) ; 
     459    MPI_Comm_size(localComm_,&commSize) ; 
     460    CArray<size_t,1> srcIndex ; 
     461    // like the source view is not distributed, then only the rank 0 need to feed the DHT 
     462    if (commRank==0) srcView_[i]->getGlobalIndexView(srcIndex) ; 
     463     
     464    // compute the mapping 
     465    DHT.computeIndexInfoMapping(srcIndex) ; 
     466    auto& returnInfo = DHT.getInfoIndexMap() ; 
     467     
     468    // returnInfo contains now the map for each global indices to send to a list of remote rank 
     469    // only for the rank=0 because it is the one to feed the DHT 
     470    // so it need to send the list to each server leader i.e. the local process that handle specifically one or more  
     471    // servers 
     472     
     473    // rankIndGlo : rankIndGlo[rank][indGlo] : list of indice to send the the remote server of rank "rank" 
     474    vector<vector<size_t>> rankIndGlo(remoteSize_) ; 
     475    if (commRank==0)  
     476      for(auto& it1 : returnInfo) 
     477        for(auto& it2 : it1.second) rankIndGlo[it2].push_back(it1.first) ; 
     478     
     479   // bcast the same for each client 
     480   for(int remoteRank=0 ; remoteRank<remoteSize_ ; remoteRank++) 
     481   { 
     482      int remoteDataSize ; 
     483      if (commRank==0) remoteDataSize = rankIndGlo[remoteRank].size() ; 
     484      MPI_Bcast(&remoteDataSize, 1, MPI_INT, 0, localComm_) ; 
     485 
     486      auto& element = elements_[i][remoteRank] ; 
     487      element.resize(remoteDataSize) ; 
     488      if (commRank==0) for(int j=0 ; j<remoteDataSize; j++) element(j)=rankIndGlo[remoteRank][j] ; 
     489      MPI_Bcast(element.dataFirst(), remoteDataSize, MPI_SIZE_T, 0, localComm_) ; 
     490   } 
     491  } 
     492 
     493 
    340494 
    341495  /** 
     
    442596        for(auto& it2 : it1.second) indGlo.push_back(it1.first) ; 
    443597 
    444     // now local rank 0 know which indices to seed to remote rank 0, but all the server 
     598    // now local rank 0 know which indices to send to remote rank 0, but all the server 
    445599    // must receive the same information. So only the leader rank will sent this. 
    446600    // So local rank 0 must broadcast the information to all leader. 
     
    642796  *        After we compare hash between local rank and remove redondant data corresponding to the same hash. 
    643797  */ 
    644   void CGridRemoteConnector::computeRedondantRanks(void) 
     798  void CGridRemoteConnector::computeRedondantRanks(bool reverse) 
    645799  { 
    646800    int commRank ; 
     
    672826        } 
    673827      } 
    674     // a hash is now computed for data block I will sent to the server. 
    675  
    676     CClientClientDHTTemplate<int>::Index2InfoTypeMap info ; 
    677  
    678     map<size_t,int> hashRank ; 
    679     HashXIOS<int> hashGlobalIndexRank; 
    680     for(auto& it : hashRanks)  
    681     { 
    682       it.second = hashGlobalIndexRank.hashCombine(it.first,it.second) ;  
    683       info[it.second]=commRank ; 
    684       hashRank[it.second]=it.first ; 
    685     } 
    686  
    687     // we feed a DHT map with key : hash, value : myrank 
    688     CClientClientDHTTemplate<int> dataHash(info, localComm_) ; 
    689     CArray<size_t,1> hashList(hashRank.size()) ; 
    690      
    691     int i=0 ; 
    692     for(auto& it : hashRank) { hashList(i)=it.first ; i++; } 
    693  
    694     // now who are the ranks that have the same hash : feed the DHT with my list of hash 
    695     dataHash.computeIndexInfoMapping(hashList) ; 
    696     auto& hashRankList = dataHash.getInfoIndexMap() ; 
    697      
    698  
    699     for(auto& it : hashRankList) 
    700     { 
    701       size_t hash = it.first ; 
    702       auto& ranks = it.second ; 
     828     
     829    if (reverse) 
     830    { 
     831      set<size_t> hashs ; 
     832      //easy because local 
     833      for(auto& hashRank : hashRanks) 
     834      { 
     835        if (hashs.count(hashRank.second)==0) hashs.insert(hashRank.second) ; 
     836        else rankToRemove_.insert(hashRank.first) ; 
     837      } 
     838     
     839    } 
     840    else 
     841    { 
     842      // a hash is now computed for data block I will sent to the server. 
     843 
     844      CClientClientDHTTemplate<int>::Index2InfoTypeMap info ; 
     845   
     846      map<size_t,int> hashRank ; 
     847      HashXIOS<int> hashGlobalIndexRank; 
     848      for(auto& it : hashRanks)  
     849      { 
     850        it.second = hashGlobalIndexRank.hashCombine(it.first,it.second) ;  
     851        info[it.second]=commRank ; 
     852        hashRank[it.second]=it.first ; 
     853      } 
     854 
     855      // we feed a DHT map with key : hash, value : myrank 
     856      CClientClientDHTTemplate<int> dataHash(info, localComm_) ; 
     857      CArray<size_t,1> hashList(hashRank.size()) ; 
     858     
     859      int i=0 ; 
     860      for(auto& it : hashRank) { hashList(i)=it.first ; i++; } 
     861 
     862      // now who are the ranks that have the same hash : feed the DHT with my list of hash 
     863      dataHash.computeIndexInfoMapping(hashList) ; 
     864      auto& hashRankList = dataHash.getInfoIndexMap() ; 
     865     
     866 
     867      for(auto& it : hashRankList) 
     868      { 
     869        size_t hash = it.first ; 
     870        auto& ranks = it.second ; 
    703871       
    704       bool first=true ; 
    705       // only the process with the lowest rank get in charge of sendinf data to remote server 
    706       for(int rank : ranks) if (commRank>rank) first=false ; 
    707       if (!first) rankToRemove_.insert(hashRank[hash]) ; 
     872        bool first=true ; 
     873        // only the process with the lowest rank get in charge of sendinf data to remote server 
     874        for(int rank : ranks) if (commRank>rank) first=false ; 
     875        if (!first) rankToRemove_.insert(hashRank[hash]) ; 
     876      } 
    708877    } 
    709878  } 
  • XIOS3/trunk/src/distribution/grid_remote_connector.hpp

    r2291 r2397  
    2222      void computeViewDistribution(void) ; 
    2323      void computeConnector(bool eliminateRedundant=true) ; 
    24       void computeConnectorMethods(void) ; 
     24      void computeConnectorMethods(bool reverse=false) ; 
    2525      void computeConnectorRedundant(void) ; 
    2626      void computeGenericMethod(vector<shared_ptr<CLocalView>>& srcView, vector<shared_ptr<CDistributedView>>& dstView, vector<int>& indElements) ; 
     
    2828      void computeDstNonDistributed(int i, map<int,bool>& ranks) ; 
    2929      void computeSrcNonDistributed(int i) ; 
    30       void computeRedondantRanks(void) ; 
     30      void computeSrcNonDistributedReverse(int i) ; 
     31      void computeRedondantRanks(bool reverse=false) ; 
    3132      std::map<int, CArray<size_t,1>>& getDistributedGlobalIndex(int pos) { return elements_[pos] ;}  
    3233      const vector<bool>& getIsSrcViewDistributed(void) { return isSrcViewDistributed_ ;} 
  • XIOS3/trunk/src/node/axis.cpp

    r2389 r2397  
    982982  } 
    983983  
    984   void CAxis::distributeToServer(CContextClient* client, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn,  
     984  void CAxis::distributeToServer(CContextClient* client, bool inOut, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn,  
    985985                                 shared_ptr<CScattererConnector> &scattererConnector, const string& axisId) 
    986986  { 
     
    10421042    // phase 3 : compute connector to receive from server 
    10431043    //////////// 
     1044    if (inOut) 
    10441045    { 
    10451046      auto scatteredElement = make_shared<CDistributedElement>(n_glo, globalIndexIn) ; 
     
    11281129//      gathererConnector_ -> computeConnector() ; 
    11291130    } 
    1130     else if (phasis==3) 
     1131    else if (phasis==3) // only for server -> client 
    11311132    { 
    11321133      elementTo_ = make_shared<CDistributedElement>(event) ; 
     
    11451146    serverFromClientConnector_ = make_shared<CGathererConnector>(elementFrom_->getView(CElementView::FULL), localElement_->getView(CElementView::WORKFLOW)) ; 
    11461147    serverFromClientConnector_->computeConnector() ; 
    1147        
    1148     serverToClientConnector_ = make_shared<CScattererConnector>(localElement_->getView(CElementView::WORKFLOW), elementTo_->getView(CElementView::FULL), 
     1148    elementFrom_.reset() ; 
     1149 
     1150    if (elementTo_) 
     1151    { 
     1152      serverToClientConnector_ = make_shared<CScattererConnector>(localElement_->getView(CElementView::WORKFLOW), elementTo_->getView(CElementView::FULL), 
    11491153                                                                context->getIntraComm(), client->getRemoteSize()) ; 
    1150     serverToClientConnector_->computeConnector() ; 
     1154      serverToClientConnector_->computeConnector() ; 
     1155      elementTo_.reset() ; 
     1156    } 
    11511157  } 
    11521158  CATCH_DUMP_ATTR 
  • XIOS3/trunk/src/node/axis.hpp

    r2389 r2397  
    253253       public: 
    254254         void computeRemoteElement(CContextClient* client, EDistributionType) ; 
    255          void distributeToServer(CContextClient* client, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn,  
     255         void distributeToServer(CContextClient* client, bool inOut, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn,  
    256256                                 shared_ptr<CScattererConnector>& scattererConnector, const string& axisId="") ; 
    257257 
  • XIOS3/trunk/src/node/context.cpp

    r2343 r2397  
    10121012      for(auto file : this->enabledReadModeFiles) file->setContextClient(client) ; 
    10131013    
     1014 
     1015    // workflow startpoint => data from server on client side 
     1016    // important : sendFieldToInputFileServer must be done prior sendFieldToFileServer because for the first case the grid remoteConnectorIn 
     1017    //             and grid remoteConnectorOut will be computed, and in the second case only the remoteConnectorOut. 
     1018    if (serviceType_==CServicesManager::CLIENT) 
     1019    { 
     1020      for(auto field : fileInField)  
     1021      { 
     1022        field->sendFieldToInputFileServer() ; 
     1023        field->connectToServerInput(garbageCollector) ; // connect the field to server filter 
     1024        fileInFields_.push_back(field) ; 
     1025      } 
     1026    } 
     1027 
    10141028    // workflow endpoint => sent to IO/SERVER 
    10151029    if (serviceType_==CServicesManager::CLIENT || serviceType_==CServicesManager::GATHERER) 
     
    10761090    { 
    10771091      field->connectToCouplerOut(garbageCollector) ; // for now the same kind of filter that for file server 
    1078     } 
    1079  
    1080      // workflow startpoint => data from server on client side 
    1081     if (serviceType_==CServicesManager::CLIENT) 
    1082     { 
    1083       for(auto field : fileInField)  
    1084       { 
    1085         field->sendFieldToInputFileServer() ; 
    1086         field->connectToServerInput(garbageCollector) ; // connect the field to server filter 
    1087         fileInFields_.push_back(field) ; 
    1088       } 
    10891092    } 
    10901093 
  • XIOS3/trunk/src/node/domain.cpp

    r2395 r2397  
    21132113  
    21142114 
    2115   void CDomain::distributeToServer(CContextClient* client, map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn, 
     2115  void CDomain::distributeToServer(CContextClient* client, bool inOut, map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn, 
    21162116                                   shared_ptr<CScattererConnector> &scattererConnector, const string& domainId) 
    21172117  TRY 
     
    21752175    // phase 3 : compute connector to receive from server 
    21762176    //////////// 
     2177    if (inOut) 
    21772178    { 
    21782179      auto scatteredElement = make_shared<CDistributedElement>(ni_glo*nj_glo, globalIndexIn) ; 
     
    22782279    serverFromClientConnector_ = make_shared<CGathererConnector>(elementFrom_->getView(CElementView::FULL), localElement_->getView(CElementView::WORKFLOW)) ; 
    22792280    serverFromClientConnector_->computeConnector() ; 
     2281    elementFrom_.reset() ; 
    22802282       
    2281     serverToClientConnector_ = make_shared<CScattererConnector>(localElement_->getView(CElementView::WORKFLOW), elementTo_->getView(CElementView::FULL), 
     2283    if (elementTo_) 
     2284    { 
     2285      serverToClientConnector_ = make_shared<CScattererConnector>(localElement_->getView(CElementView::WORKFLOW), elementTo_->getView(CElementView::FULL), 
    22822286                                                                context->getIntraComm(), client->getRemoteSize()) ; 
    2283     serverToClientConnector_->computeConnector() ; 
     2287      serverToClientConnector_->computeConnector() ; 
     2288      elementTo_.reset() ; 
     2289    } 
     2290 
    22842291  } 
    22852292  CATCH_DUMP_ATTR 
  • XIOS3/trunk/src/node/domain.hpp

    r2389 r2397  
    309309       public: 
    310310         void computeRemoteElement(CContextClient* client, EDistributionType) ; 
    311          void distributeToServer(CContextClient* client, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn,  
     311         void distributeToServer(CContextClient* client, bool inOut, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn,  
    312312                                 shared_ptr<CScattererConnector> &scattererConnector, const string& domainId="") ; 
    313313 
  • XIOS3/trunk/src/node/field.cpp

    r2326 r2397  
    14661466    getRelFile()->sendFileToFileServer(client); 
    14671467    sentGrid_ = grid_-> duplicateSentGrid() ; 
    1468     sentGrid_->sendGridToFileServer(client); 
     1468    sentGrid_->sendGridToFileServer(client, false); 
    14691469    name = getFieldOutputName() ; 
    14701470    this->sendAllAttributesToServer(client); 
     
    14771477    getRelFile()->sendFileToFileServer(client); 
    14781478    sentGrid_ = grid_-> duplicateSentGrid() ; 
    1479     sentGrid_->sendGridToFileServer(client); 
     1479    sentGrid_->sendGridToFileServer(client, true); 
    14801480    read_access=true ; // not the best solution, but on server side, the field must be a starting point of the workflow 
    14811481                       // must be replace by a better solution when implementing filters for reading and send to client 
  • XIOS3/trunk/src/node/grid.cpp

    r2351 r2397  
    14481448  
    14491449 
    1450   void CGrid::sendGridToFileServer(CContextClient* client) 
     1450  void CGrid::sendGridToFileServer(CContextClient* client, bool inOut) 
    14511451  { 
    14521452    if (sendGridToFileServer_done_.count(client)!=0) return ; 
     
    14571457    gridPtr->sendCreateChild(this->getId(),client); 
    14581458    this->sendAllAttributesToServer(client); 
    1459     distributeGridToServer(client) ; 
     1459    distributeGridToServer(client, inOut) ; 
    14601460  } 
    14611461 
     
    14661466    else sendGridToCouplerOut_done_.insert(client) ; 
    14671467    this->sendAllAttributesToServer(client, getCouplingAlias(fieldId)); 
    1468     distributeGridToServer(client, fieldId) ; 
    1469   } 
    1470  
    1471  
    1472   void CGrid::distributeGridToServer(CContextClient* client, const string& fieldId) 
     1468    distributeGridToServer(client, false, fieldId) ; 
     1469  } 
     1470 
     1471 
     1472  void CGrid::distributeGridToServer(CContextClient* client, bool inOut, const string& fieldId) 
    14731473  { 
    14741474    CContext* context = CContext::getCurrent(); 
     
    15201520    // CGridClientServerRemoteConnector : workflowView is added to avoid spurious optimisation with only the fullview 
    15211521    auto gridRemoteConnector = make_shared<CGridClientServerRemoteConnector>(localViews, workflowView, remoteViews, context->getIntraComm(), client->getRemoteSize()) ; 
    1522     gridRemoteConnector->computeConnector(true) ; 
     1522    gridRemoteConnector->computeConnectorOut() ; 
    15231523     
    1524     auto gridRemoteConnectorIn = make_shared<CGridClientServerRemoteConnector>(localViews, workflowView, remoteViews, context->getIntraComm(), client->getRemoteSize()) ; 
    1525     gridRemoteConnectorIn->computeConnector(false) ; 
    1526  
    1527      
     1524    shared_ptr<CGridClientServerRemoteConnector> gridRemoteConnectorIn ; 
     1525    if (inOut) 
     1526    { 
     1527      gridRemoteConnectorIn = make_shared<CGridClientServerRemoteConnector>(localViews, workflowView, remoteViews, context->getIntraComm(), client->getRemoteSize()) ; 
     1528      gridRemoteConnectorIn->computeConnectorIn() ; 
     1529    } 
     1530    else gridRemoteConnectorIn = gridRemoteConnector ; 
     1531 
     1532 
    15281533    vector<shared_ptr<CScattererConnector>> scattererConnectors ; 
    15291534    shared_ptr<CScattererConnector> scattererConnector; 
     
    15331538      {  
    15341539        CDomain* domain = (CDomain*) elements[i].ptr ; 
    1535         if (isCoupling) domain->distributeToServer(client, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), 
     1540        if (isCoupling) domain->distributeToServer(client, inOut, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), 
    15361541                                                   scattererConnector,  domain->getCouplingAlias(fieldId,i)) ; 
    15371542        else  
    15381543        { 
    15391544          sendAddDomain(domain->getId(),client) ; 
    1540           domain->distributeToServer(client, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), scattererConnector) ; 
     1545          domain->distributeToServer(client, inOut, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), scattererConnector) ; 
    15411546        } 
    15421547        scattererConnectors.push_back(scattererConnector) ; 
     
    15451550      { 
    15461551        CAxis* axis = (CAxis*) elements[i].ptr ; 
    1547         if (isCoupling) axis->distributeToServer(client, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), 
     1552        if (isCoupling) axis->distributeToServer(client, inOut, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), 
    15481553                                                 scattererConnector,  axis->getCouplingAlias(fieldId,i)) ; 
    15491554        else  
    15501555        { 
    15511556          sendAddAxis(axis->getId(),client) ; 
    1552           axis->distributeToServer(client, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), scattererConnector) ; 
     1557          axis->distributeToServer(client, inOut, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), scattererConnector) ; 
    15531558        } 
    15541559        scattererConnectors.push_back(scattererConnector) ; 
     
    15571562      { 
    15581563        CScalar* scalar = (CScalar*) elements[i].ptr ; 
    1559         if (isCoupling) scalar->distributeToServer(client, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), 
     1564        if (isCoupling) scalar->distributeToServer(client, inOut, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), 
    15601565                                                   scattererConnector,  scalar->getCouplingAlias(fieldId,i)) ; 
    15611566        else  
    15621567        { 
    15631568          sendAddScalar(scalar->getId(),client) ; 
    1564           scalar->distributeToServer(client, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), scattererConnector) ; 
     1569          scalar->distributeToServer(client, inOut, gridRemoteConnector->getDistributedGlobalIndex(i), gridRemoteConnectorIn->getDistributedGlobalIndex(i), scattererConnector) ; 
    15651570        } 
    15661571        scattererConnectors.push_back(scattererConnector) ; 
     
    15881593      {  
    15891594         clientToServerConnectors.push_back(element.domain->getClientToServerConnector(client)) ; 
    1590          clientFromServerConnectors.push_back(element.domain->getClientFromServerConnector(client)) ; 
     1595         if (inOut) clientFromServerConnectors.push_back(element.domain->getClientFromServerConnector(client)) ; 
    15911596      } 
    15921597      else if (element.type==TYPE_AXIS) 
    15931598      { 
    15941599        clientToServerConnectors.push_back(element.axis->getClientToServerConnector(client)) ; 
    1595         clientFromServerConnectors.push_back(element.axis->getClientFromServerConnector(client)) ; 
     1600        if (inOut) clientFromServerConnectors.push_back(element.axis->getClientFromServerConnector(client)) ; 
    15961601 
    15971602      } 
     
    15991604      { 
    16001605        clientToServerConnectors.push_back(element.scalar->getClientToServerConnector(client)) ; 
    1601         clientFromServerConnectors.push_back(element.scalar->getClientFromServerConnector(client)) ; 
     1606        if (inOut) clientFromServerConnectors.push_back(element.scalar->getClientFromServerConnector(client)) ; 
    16021607      } 
    16031608    } 
     
    16051610    // compute the grid clientToServerConnector to send flux from client to servers 
    16061611    clientToServerConnector_[client] = make_shared<CGridScattererConnector>(clientToServerConnectors) ; 
    1607     clientFromServerConnector_[client] = make_shared<CGridGathererConnector>(clientFromServerConnectors) ; 
     1612    if (inOut) clientFromServerConnector_[client] = make_shared<CGridGathererConnector>(clientFromServerConnectors) ; 
    16081613 
    16091614  } 
  • XIOS3/trunk/src/node/grid.hpp

    r2326 r2397  
    172172 
    173173      public: 
    174          void sendGridToFileServer(CContextClient* client) ; 
     174         void sendGridToFileServer(CContextClient* client, bool inOut) ; 
    175175      private: 
    176176         std::set<CContextClient*> sendGridToFileServer_done_ ; 
     
    351351 
    352352      public: //?  
    353         void distributeGridToServer(CContextClient* client, const string& fieldId=""); 
     353        void distributeGridToServer(CContextClient* client, bool inOut, const string& fieldId=""); 
    354354       
    355355             
  • XIOS3/trunk/src/node/scalar.cpp

    r2343 r2397  
    420420  } 
    421421  
    422   void CScalar::distributeToServer(CContextClient* client, std::map<int, CArray<size_t,1>>& globalIndexOut,  std::map<int, CArray<size_t,1>>& globalIndexIn,  
     422  void CScalar::distributeToServer(CContextClient* client, bool inOut, std::map<int, CArray<size_t,1>>& globalIndexOut,  std::map<int, CArray<size_t,1>>& globalIndexIn,  
    423423                                   shared_ptr<CScattererConnector> &scattererConnector, const string& scalarId) 
    424424  { 
     
    480480    // phase 3 : compute connector to receive from server 
    481481    //////////// 
     482    if (inOut) 
    482483    { 
    483484      auto scatteredElement = make_shared<CDistributedElement>(1, globalIndexIn) ; 
     
    567568    serverFromClientConnector_ = make_shared<CGathererConnector>(elementFrom_->getView(CElementView::FULL), localElement_->getView(CElementView::WORKFLOW)) ; 
    568569    serverFromClientConnector_->computeConnector() ; 
     570    elementFrom_.reset() ; 
    569571       
    570     serverToClientConnector_ = make_shared<CScattererConnector>(localElement_->getView(CElementView::WORKFLOW), elementTo_->getView(CElementView::FULL), 
     572    if (elementTo_) 
     573    { 
     574      serverToClientConnector_ = make_shared<CScattererConnector>(localElement_->getView(CElementView::WORKFLOW), elementTo_->getView(CElementView::FULL), 
    571575                                                                context->getIntraComm(), client->getRemoteSize()) ; 
    572     serverToClientConnector_->computeConnector() ; 
     576      serverToClientConnector_->computeConnector() ; 
     577      elementTo_.reset() ; 
     578    } 
    573579  } 
    574580  CATCH_DUMP_ATTR 
  • XIOS3/trunk/src/node/scalar.hpp

    r2326 r2397  
    202202    public: 
    203203      void computeRemoteElement(CContextClient* client, EDistributionType) ; 
    204       void distributeToServer(CContextClient* client, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn, 
     204      void distributeToServer(CContextClient* client, bool inOut, std::map<int, CArray<size_t,1>>& globalIndexOut, std::map<int, CArray<size_t,1>>& globalIndexIn, 
    205205                              shared_ptr<CScattererConnector> &scattererConnector, const string& scalarId="") ; 
    206206 
Note: See TracChangeset for help on using the changeset viewer.