Changeset 1134


Ignore:
Timestamp:
05/16/17 17:54:30 (7 years ago)
Author:
yushan
Message:

branch merged with trunk r1130

Location:
XIOS/dev/branch_yushan_merged
Files:
51 added
120 edited

Legend:

Unmodified
Added
Removed
  • XIOS/dev/branch_yushan_merged/arch/arch-GCC_LINUX.env

    r395 r1134  
    1 export HDF5_INC_DIR=$HOME/hdf5/include 
    2 export HDF5_LIB_DIR=$HOME/hdf5/lib 
    31 
    4 export NETCDF_INC_DIR=$HOME/netcdf4/include 
    5 export NETCDF_LIB_DIR=$HOME/netcdf4/lib 
     2export HDF5_INC_DIR=$HOME/lib/hdf5/include 
     3export HDF5_LIB_DIR=$HOME/lib/hdf5/lib 
    64 
     5export NETCDF_INC_DIR=$HOME/lib/netcdf/include 
     6export NETCDF_LIB_DIR=$HOME/lib/netcdf/lib 
     7 
  • XIOS/dev/branch_yushan_merged/arch/arch-GCC_LINUX.fcm

    r591 r1134  
    33################################################################################ 
    44 
    5 %CCOMPILER      mpicc 
    6 %FCOMPILER      mpif90 
    7 %LINKER         mpif90  
     5%CCOMPILER      mpicc -fopenmp -D_openmpi -D_usingEP  
     6%FCOMPILER      mpif90 -fopenmp 
     7%LINKER         mpif90 -fopenmp -D_openmpi -D_usingEP  
    88 
    99%BASE_CFLAGS    -ansi -w 
  • XIOS/dev/branch_yushan_merged/arch/arch-GCC_LINUX.path

    r475 r1134  
    1 NETCDF_INCDIR="-I $NETCDF_INC_DIR" 
    2 NETCDF_LIBDIR="-L $NETCDF_LIB_DIR" 
    3 NETCDF_LIB="-lnetcdff -lnetcdf" 
     1NETCDF_INCDIR="-I $NETCDF_INC_DIR -I $HOME/lib/netcdf_f/include" 
     2NETCDF_LIBDIR="-L $NETCDF_LIB_DIR -L $HOME/lib/netcdf_f/lib" 
     3NETCDF_LIB=" -lnetcdf" 
    44 
    5 MPI_INCDIR="" 
    6 MPI_LIBDIR="" 
     5MPI_INCDIR="-I /usr/local/include" 
     6MPI_LIBDIR="-L /usr/local/lib" 
    77MPI_LIB="" 
    88 
    99HDF5_INCDIR="-I $HDF5_INC_DIR" 
    1010HDF5_LIBDIR="-L $HDF5_LIB_DIR" 
    11 HDF5_LIB="-lhdf5_hl -lhdf5 -lhdf5 -lz" 
     11HDF5_LIB="-lhdf5_hl -lhdf5 -lhdf5 -lz -ldl -lcurl" 
    1212 
    1313OASIS_INCDIR="-I$PWD/../../oasis3-mct/BLD/build/lib/psmile.MPI1" 
  • XIOS/dev/branch_yushan_merged/arch/arch-X64_ADA.fcm

    r985 r1134  
    33################################################################################ 
    44 
    5 %CCOMPILER      mpiicc 
    6 %FCOMPILER      mpiifort  
    7 %LINKER         mpiifort  -nofor-main 
     5%CCOMPILER      mpiicc -qopenmp -D_usingEP -D_intelmpi 
     6%FCOMPILER      mpiifort -qopenmp -D_usingEP -D_intelmpi 
     7%LINKER         mpiifort  -nofor-main -qopenmp -D_usingEP -D_intelmpi 
    88 
    99%BASE_CFLAGS    -diag-disable 1125 -diag-disable 279 
  • XIOS/dev/branch_yushan_merged/arch/arch-X64_CURIE.fcm

    r1002 r1134  
    33################################################################################ 
    44 
    5 %CCOMPILER      mpicc 
    6 %FCOMPILER      mpif90 
     5%CCOMPILER      mpicc -openmp -D_openmpi -D_usingEP 
     6%FCOMPILER      mpif90 -openmp -D_openmpi -D_usingEP 
    77%LINKER         mpif90  -nofor-main 
    88 
  • XIOS/dev/branch_yushan_merged/bld.cfg

    r1118 r1134  
    2929src::netcdf $PWD/extern/netcdf4 
    3030src::remap $PWD/extern/remap/src 
     31src::src_ep_dev $PWD/extern/src_ep_dev 
    3132bld::lib xios 
    32 bld::target libxios.a  
     33#bld::target libxios.a  
    3334#bld::target generate_fortran_interface.exe  
    34 bld::target xios_server.exe  
     35#bld::target xios_server.exe  
    3536#bld::target test_remap.exe 
    3637#bld::target test_regular.exe 
    3738#bld::target test_expand_domain.exe 
    3839#bld::target test_new_features.exe test_unstruct_complete.exe  
     40bld::target test_omp.exe test_complete_omp.exe 
    3941bld::target test_client.exe test_complete.exe test_xios2_cmip6.exe 
    4042#bld::target test_connectivity_expand.exe 
     
    6668bld::excl_dep        use::netcdf 
    6769bld::excl_dep        inc::mpif.h 
     70bld::excl_dep        use::omp_lib 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/mapper.cpp

    r1114 r1134  
    368368                        double srcArea = recvArea[rank][n1]; 
    369369                        double w = (*it)->area; 
    370       if (quantity) w/=srcArea ; 
     370                        if (quantity) w/=srcArea ; 
    371371 
    372372                        /* first order: src value times weight (weight = supermesh area), later divide by target area */ 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/mapper.hpp

    r1114 r1134  
    33#include "parallel_tree.hpp" 
    44#include "mpi.hpp" 
     5 
     6#ifdef _usingEP 
     7#include "ep_declaration.hpp" 
     8#endif 
    59 
    610namespace sphereRemap { 
     
    1822{ 
    1923public: 
    20        Mapper(MPI_Comm comm=MPI_COMM_WORLD) : communicator(comm), verbose(SILENT), neighbourElements(NULL), sstree(comm) {} 
     24       Mapper(ep_lib::MPI_Comm comm=MPI_COMM_WORLD) : communicator(comm), verbose(SILENT), neighbourElements(NULL), sstree(comm) {} 
    2125       ~Mapper(); 
    2226       void setVerbosity(verbosity v) {verbose=v ;} 
     
    6771 
    6872       CParallelTree sstree; 
    69        MPI_Comm communicator ; 
     73       ep_lib::MPI_Comm communicator ; 
    7074       std::vector<Elt>  sourceElements ; 
    7175       std::vector<Node> sourceMesh ; 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/mpi_routing.cpp

    r694 r1134  
    55#include "timerRemap.hpp" 
    66#include <iostream> 
     7#ifdef _usingEP 
     8#include "ep_declaration.hpp" 
     9#endif 
    710 
    811namespace sphereRemap { 
     
    122125        CTimer::get("CMPIRouting::init(reduce_scatter)").print(); 
    123126 
    124         MPI_Alloc_mem(nbTarget *sizeof(int), MPI_INFO_NULL, &targetRank); 
    125         MPI_Alloc_mem(nbSource *sizeof(int), MPI_INFO_NULL, &sourceRank); 
     127        MPI_Info info_null; 
     128 
     129        MPI_Alloc_mem(nbTarget *sizeof(int), info_null, &targetRank); 
     130        MPI_Alloc_mem(nbSource *sizeof(int), info_null, &sourceRank); 
    126131 
    127132        targetRankToIndex = new int[mpiSize]; 
     
    150155        for (int i = 0; i < nbSource; i++) 
    151156        { 
     157                #ifdef _usingEP 
     158                MPI_Irecv(&sourceRank[i], 1, MPI_INT, -1, 0, communicator, &request[indexRequest]); 
     159                #else 
    152160                MPI_Irecv(&sourceRank[i], 1, MPI_INT, MPI_ANY_SOURCE, 0, communicator, &request[indexRequest]); 
     161                #endif 
    153162                indexRequest++; 
    154163        } 
     
    170179        for (int i = 0; i < nbSource; i++) 
    171180        { 
     181                #ifdef _usingEP 
     182                MPI_Irecv(&sourceRank[i], 1, MPI_INT, -1, 0, communicator, &request[indexRequest]); 
     183                #else 
    172184                MPI_Irecv(&sourceRank[i], 1, MPI_INT, MPI_ANY_SOURCE, 0, communicator, &request[indexRequest]); 
     185                #endif 
    173186                indexRequest++; 
    174187        } 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/parallel_tree.hpp

    r694 r1134  
    66#include "mpi_cascade.hpp" 
    77#include "mpi.hpp" 
     8#ifdef _usingEP 
     9#include "ep_declaration.hpp" 
     10#endif 
    811 
    912namespace sphereRemap { 
     
    1215{ 
    1316public: 
    14         CParallelTree(MPI_Comm comm); 
     17        CParallelTree(ep_lib::MPI_Comm comm); 
    1518        ~CParallelTree(); 
    1619 
     
    3437        vector<CSampleTree> treeCascade; // first for sample tree, then for routing tree 
    3538        CMPICascade cascade; 
    36   MPI_Comm communicator ; 
     39        ep_lib::MPI_Comm communicator ; 
    3740 
    3841}; 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/tree.cpp

    r1066 r1134  
    142142        root->parent = 0; 
    143143        root->leafCount = 0; 
    144 // initialize root node on the sphere 
    145   root->centre.x=1 ; root->centre.y=0 ; root->centre.z=0 ;  
     144        // initialize root node on the sphere 
     145        root->centre.x=1 ;  
     146        root->centre.y=0 ;  
     147        root->centre.z=0 ;  
    146148        root->radius = 0.; 
    147149        root->reinserted = false; 
  • XIOS/dev/branch_yushan_merged/inputs/COMPLETE/context_atmosphere.xml

    r787 r1134  
    88  </field_definition> 
    99 
    10   <file_definition type="multiple_file" par_access="collective" output_freq="6h" sync_freq="6h" output_level="10" enabled=".TRUE."> 
     10  <file_definition type="one_file" par_access="collective" output_freq="6h" sync_freq="6h" output_level="10" enabled=".TRUE."> 
    1111    <file id="output_atmosphere" name="output_atmosphere"> 
    1212      <field field_ref="field_A_atm" /> 
  • XIOS/dev/branch_yushan_merged/inputs/COMPLETE/context_surface.xml

    r562 r1134  
    1212  </field_definition> 
    1313 
    14   <file_definition type="multiple_file" par_access="collective" output_level="10" enabled=".TRUE."> 
     14  <file_definition type="one_file" par_access="collective" output_level="10" enabled=".TRUE."> 
    1515    <file id="output_surface" name="output_surface_6h" output_freq="6h"> 
    1616      <field field_ref="field_A_srf"> 
  • XIOS/dev/branch_yushan_merged/inputs/iodef.xml

    r787 r1134  
    1212 
    1313 
    14    <file_definition type="multiple_file" par_access="collective" output_freq="6h" output_level="10" enabled=".TRUE."> 
     14   <file_definition type="one_file" par_access="collective" output_freq="6h" output_level="10" enabled=".TRUE."> 
    1515     <file id="output" name="output"> 
    1616        <field field_ref="field_A_zoom" name="field_A" /> 
  • XIOS/dev/branch_yushan_merged/src/array_new.hpp

    r1111 r1134  
    554554        TinyVector<int,N_rank> vect; 
    555555        size_t ne; 
    556          
     556 
    557557        ret =  buffer.get(numDim); 
    558558        ret &= buffer.get(vect.data(), N_rank); 
  • XIOS/dev/branch_yushan_merged/src/attribute.cpp

    r1112 r1134  
    5454      ///-------------------------------------------------------------- 
    5555 
     56 
    5657      CMessage& operator<<(CMessage& msg,CAttribute& type) 
    5758      { 
  • XIOS/dev/branch_yushan_merged/src/attribute_enum.hpp

    r1112 r1134  
    1414namespace xios 
    1515{ 
    16       /// ////////////////////// Déclarations ////////////////////// /// 
    17         /*! 
    18         \class CAttributeEnum 
    19         This class implements the attribute representing enumeration 
     16    /// ////////////////////// Déclarations ////////////////////// /// 
     17    /*! 
     18      \class CAttributeEnum 
     19      This class implements the attribute representing enumeration 
    2020      */ 
    21       template <class T> 
    22          class CAttributeEnum : public CAttribute, public CEnum<T> 
    23       { 
     21    template <class T> 
     22        class CAttributeEnum : public CAttribute, public CEnum<T> 
     23    { 
    2424        typedef typename T::t_enum T_enum ; 
    2525        public : 
    2626 
    27             /// Constructeurs /// 
    28             explicit CAttributeEnum(const StdString & id); 
    29             CAttributeEnum(const StdString & id, 
    30                                xios_map<StdString, CAttribute*> & umap); 
    31             CAttributeEnum(const StdString & id, const T_enum & value); 
    32             CAttributeEnum(const StdString & id, const T_enum & value, 
    33                                xios_map<StdString, CAttribute*> & umap); 
     27        /// Constructeurs /// 
     28        explicit CAttributeEnum(const StdString & id); 
     29        CAttributeEnum(const StdString & id, 
     30                xios_map<StdString, CAttribute*> & umap); 
     31        CAttributeEnum(const StdString & id, const T_enum & value); 
     32        CAttributeEnum(const StdString & id, const T_enum & value, 
     33                xios_map<StdString, CAttribute*> & umap); 
    3434 
    35             /// Accesseur /// 
    36             T_enum getValue(void) const; 
    37             string getStringValue(void) const; 
     35        /// Accesseur /// 
     36        T_enum getValue(void) const; 
     37        string getStringValue(void) const; 
    3838 
    3939 
    40             /// Mutateurs /// 
    41             void setValue(const T_enum & value); 
    42              
    43             void set(const CAttribute& attr) ; 
    44             void set(const CAttributeEnum& attr) ; 
    45             void reset(void); 
    46              
    47             void setInheritedValue(const CAttributeEnum& attr ); 
    48             void setInheritedValue(const CAttribute& attr ); 
    49             T_enum getInheritedValue(void)  const; 
    50             string getInheritedStringValue(void) const; 
    51             bool hasInheritedValue(void) const;           
    52            
    53             bool isEqual(const CAttributeEnum& attr ); 
    54             bool isEqual(const CAttribute& attr ); 
     40        /// Mutateurs /// 
     41        void setValue(const T_enum & value); 
    5542 
    56             /// Destructeur /// 
    57             virtual ~CAttributeEnum(void) { } 
     43        void set(const CAttribute& attr) ; 
     44        void set(const CAttributeEnum& attr) ; 
     45        void reset(void); 
    5846 
    59             /// Operateur /// 
    60             CAttributeEnum& operator=(const T_enum & value); 
     47        void setInheritedValue(const CAttributeEnum& attr ); 
     48        void setInheritedValue(const CAttribute& attr ); 
     49        T_enum getInheritedValue(void)  const; 
     50        string getInheritedStringValue(void) const; 
     51        bool hasInheritedValue(void) const;           
    6152 
    62             /// Autre /// 
    63             virtual StdString toString(void) const { return _toString();} 
    64             virtual void fromString(const StdString & str) { if (str==resetInheritanceStr) { reset(); _canInherite=false ;}  else _fromString(str);} 
     53        bool isEqual(const CAttributeEnum& attr ); 
     54        bool isEqual(const CAttribute& attr ); 
    6555 
    66             virtual bool toBuffer  (CBufferOut& buffer) const { return _toBuffer(buffer);}  
    67             virtual bool fromBuffer(CBufferIn& buffer) { return _fromBuffer(buffer); }  
    68              
    69             virtual void generateCInterface(ostream& oss,const string& className) ; 
    70             virtual void generateFortran2003Interface(ostream& oss,const string& className) ; 
    71             virtual void generateFortranInterfaceDeclaration_(ostream& oss,const string& className) ; 
    72             virtual void generateFortranInterfaceBody_(ostream& oss,const string& className) ; 
    73             virtual void generateFortranInterfaceDeclaration(ostream& oss,const string& className) ; 
    74             virtual void generateFortranInterfaceGetDeclaration_(ostream& oss,const string& className) ; 
    75             virtual void generateFortranInterfaceGetBody_(ostream& oss,const string& className) ; 
    76             virtual void generateFortranInterfaceGetDeclaration(ostream& oss,const string& className) ;       
     56        /// Destructeur /// 
     57        virtual ~CAttributeEnum(void) { } 
    7758 
    78          private : 
    79           StdString _toString(void) const; 
    80           void _fromString(const StdString & str); 
    81           bool _toBuffer  (CBufferOut& buffer) const; 
    82           bool _fromBuffer(CBufferIn& buffer) ; 
    83           CEnum<T> inheritedValue ; 
    84       }; // class CAttributeEnum     
    85     
     59        /// Operateur /// 
     60        CAttributeEnum& operator=(const T_enum & value); 
     61 
     62        /// Autre /// 
     63        virtual StdString toString(void) const { return _toString();} 
     64        virtual void fromString(const StdString & str) { if (str==resetInheritanceStr) { reset(); _canInherite=false ;}  else _fromString(str);} 
     65 
     66        virtual bool toBuffer  (CBufferOut& buffer) const { return _toBuffer(buffer);}  
     67        virtual bool fromBuffer(CBufferIn& buffer) { return _fromBuffer(buffer); }  
     68 
     69        virtual void generateCInterface(ostream& oss,const string& className) ; 
     70        virtual void generateFortran2003Interface(ostream& oss,const string& className) ; 
     71        virtual void generateFortranInterfaceDeclaration_(ostream& oss,const string& className) ; 
     72        virtual void generateFortranInterfaceBody_(ostream& oss,const string& className) ; 
     73        virtual void generateFortranInterfaceDeclaration(ostream& oss,const string& className) ; 
     74        virtual void generateFortranInterfaceGetDeclaration_(ostream& oss,const string& className) ; 
     75        virtual void generateFortranInterfaceGetBody_(ostream& oss,const string& className) ; 
     76        virtual void generateFortranInterfaceGetDeclaration(ostream& oss,const string& className) ;       
     77 
     78        private : 
     79        StdString _toString(void) const; 
     80        void _fromString(const StdString & str); 
     81        bool _toBuffer  (CBufferOut& buffer) const; 
     82        bool _fromBuffer(CBufferIn& buffer) ; 
     83        CEnum<T> inheritedValue ; 
     84    }; // class CAttributeEnum     
     85 
    8686} // namespace xios 
    8787 
    8888#endif // __XIOS_ATTRIBUTE_ENUM__ 
     89 
  • XIOS/dev/branch_yushan_merged/src/attribute_enum_impl.hpp

    r1112 r1134  
    1010namespace xios 
    1111{ 
    12   /// ////////////////////// Définitions ////////////////////// /// 
     12  /// ////////////////////// Définitions ////////////////////// /// 
    1313  template <class T> 
    1414  CAttributeEnum<T>::CAttributeEnum(const StdString & id) 
     
    3030     umap.insert(umap.end(), std::make_pair(id, this)); 
    3131  } 
    32  
     32  
    3333  template <class T> 
    3434  CAttributeEnum<T>::CAttributeEnum 
     
    4040     umap.insert(umap.end(), std::make_pair(id, this)); 
    4141  } 
    42  
     42  
    4343  ///-------------------------------------------------------------- 
    4444  template <class T> 
     
    5454     return CEnum<T>::get(); 
    5555  } 
    56  
     56  
    5757  template <class T> 
    5858  string CAttributeEnum<T>::getStringValue(void) const 
    5959  { 
    60      return CEnum<T>::toString(); 
    61   } 
     60    return CEnum<T>::toString(); 
     61  } 
     62 
    6263 
    6364  template <class T> 
     
    7071  void CAttributeEnum<T>::set(const CAttribute& attr) 
    7172  { 
    72     this->set(dynamic_cast<const CAttributeEnum<T>& >(attr)); 
    73   } 
    74  
    75  template <class T> 
     73     this->set(dynamic_cast<const CAttributeEnum<T>& >(attr)); 
     74  } 
     75    
     76  template <class T> 
    7677  void CAttributeEnum<T>::set(const CAttributeEnum& attr) 
    7778  { 
    78     CEnum<T>::set(attr); 
    79   } 
    80  
     79     CEnum<T>::set(attr); 
     80  } 
     81  
    8182  template <class T> 
    8283  void CAttributeEnum<T>::setInheritedValue(const CAttribute& attr) 
    8384  { 
    84     this->setInheritedValue(dynamic_cast<const CAttributeEnum<T>& >(attr)); 
    85   } 
    86  
     85     this->setInheritedValue(dynamic_cast<const CAttributeEnum<T>& >(attr)); 
     86  } 
     87   
    8788  template <class T> 
    8889  void CAttributeEnum<T>::setInheritedValue(const CAttributeEnum& attr) 
    8990  { 
    90     if (this->isEmpty() && _canInherite && attr.hasInheritedValue()) inheritedValue.set(attr.getInheritedValue()); 
    91   } 
    92  
     91     if (this->isEmpty() && _canInherite && attr.hasInheritedValue()) inheritedValue.set(attr.getInheritedValue()); 
     92  } 
     93   
    9394  template <class T> 
    9495  typename T::t_enum CAttributeEnum<T>::getInheritedValue(void) const 
    9596  { 
    96     if (this->isEmpty()) return inheritedValue.get(); 
    97     else return getValue(); 
    98   } 
    99  
    100   template <class T> 
    101   string CAttributeEnum<T>::getInheritedStringValue(void) const 
    102   { 
    103      if (this->isEmpty()) return inheritedValue.toString(); 
    104      else return CEnum<T>::toString();; 
    105   } 
    106  
    107   template <class T> 
    108   bool CAttributeEnum<T>::hasInheritedValue(void) const 
    109   { 
    110     return !this->isEmpty() || !inheritedValue.isEmpty(); 
    111   } 
    112  
    113   template <class T> 
    114   bool CAttributeEnum<T>::isEqual(const CAttribute& attr) 
    115   { 
    116     return (this->isEqual(dynamic_cast<const CAttributeEnum<T>& >(attr))); 
    117   } 
    118  
    119   template <class T> 
    120   bool CAttributeEnum<T>::isEqual(const CAttributeEnum& attr) 
    121   { 
    122     return ((dynamic_cast<const CEnum<T>& >(*this)) == (dynamic_cast<const CEnum<T>& >(attr))); 
    123   } 
     97     if (this->isEmpty()) return inheritedValue.get(); 
     98     else return getValue(); 
     99  } 
     100 
     101  template <class T> 
     102      string CAttributeEnum<T>::getInheritedStringValue(void) const 
     103      { 
     104          if (this->isEmpty()) return inheritedValue.toString(); 
     105          else return CEnum<T>::toString();; 
     106      } 
     107 
     108  template <class T> 
     109      bool CAttributeEnum<T>::hasInheritedValue(void) const 
     110      { 
     111          return !this->isEmpty() || !inheritedValue.isEmpty(); 
     112      } 
     113 
     114  template <class T> 
     115      bool CAttributeEnum<T>::isEqual(const CAttribute& attr) 
     116      { 
     117          return (this->isEqual(dynamic_cast<const CAttributeEnum<T>& >(attr))); 
     118      } 
     119 
     120  template <class T> 
     121      bool CAttributeEnum<T>::isEqual(const CAttributeEnum& attr) 
     122      { 
     123          return ((dynamic_cast<const CEnum<T>& >(*this)) == (dynamic_cast<const CEnum<T>& >(attr))); 
     124      } 
    124125 
    125126  //--------------------------------------------------------------- 
    126127 
    127128  template <class T> 
    128   CAttributeEnum<T>& CAttributeEnum<T>::operator=(const T_enum & value) 
    129   { 
    130      this->setValue(value); 
    131      return *this; 
    132   } 
     129      CAttributeEnum<T>& CAttributeEnum<T>::operator=(const T_enum & value) 
     130      { 
     131          this->setValue(value); 
     132          return *this; 
     133      } 
    133134 
    134135  //--------------------------------------------------------------- 
    135136 
    136137  template <class T> 
    137   StdString CAttributeEnum<T>::_toString(void) const 
    138   { 
    139      StdOStringStream oss; 
    140      if (!CEnum<T>::isEmpty() && this->hasId()) 
    141         oss << this->getName() << "=\"" << CEnum<T>::toString() << "\""; 
    142      return (oss.str()); 
    143   } 
    144  
    145   template <class T> 
    146   void CAttributeEnum<T>::_fromString(const StdString & str) 
    147   { 
    148     CEnum<T>::fromString(str); 
    149   } 
    150  
    151   template <class T> 
    152   bool CAttributeEnum<T>::_toBuffer (CBufferOut& buffer) const 
    153   { 
    154      return CEnum<T>::toBuffer(buffer); 
    155   } 
    156  
    157   template <class T> 
    158   bool CAttributeEnum<T>::_fromBuffer(CBufferIn& buffer) 
    159   { 
    160     return CEnum<T>::fromBuffer(buffer); 
    161   } 
    162  
    163   template <typename T> 
    164   void CAttributeEnum<T>::generateCInterface(ostream& oss,const string& className) 
    165   { 
    166     CInterface::AttributeCInterface<CEnumBase>(oss, className, this->getName()); 
    167   } 
    168  
    169   template <typename T> 
    170   void CAttributeEnum<T>::generateFortran2003Interface(ostream& oss,const string& className) 
    171   { 
    172     CInterface::AttributeFortran2003Interface<string>(oss, className, this->getName()); 
    173   } 
    174  
    175   template <typename T> 
    176   void CAttributeEnum<T>::generateFortranInterfaceDeclaration_(ostream& oss,const string& className) 
    177   { 
    178     CInterface::AttributeFortranInterfaceDeclaration<string>(oss, className, this->getName()+"_"); 
    179   } 
    180  
    181   template <typename T> 
    182   void CAttributeEnum<T>::generateFortranInterfaceBody_(ostream& oss,const string& className) 
    183   { 
    184     CInterface::AttributeFortranInterfaceBody<string>(oss, className, this->getName()); 
    185   } 
    186  
    187   template <typename T> 
    188   void CAttributeEnum<T>::generateFortranInterfaceDeclaration(ostream& oss,const string& className) 
    189   { 
    190     CInterface::AttributeFortranInterfaceDeclaration<string>(oss, className, this->getName()); 
    191   } 
    192  
    193   template <typename T> 
    194   void CAttributeEnum<T>::generateFortranInterfaceGetDeclaration_(ostream& oss,const string& className) 
    195   { 
    196     CInterface::AttributeFortranInterfaceGetDeclaration<string>(oss, className, this->getName()+"_"); 
    197   } 
    198  
    199   template <typename T> 
    200   void CAttributeEnum<T>::generateFortranInterfaceGetBody_(ostream& oss,const string& className) 
    201   { 
    202     CInterface::AttributeFortranInterfaceGetBody<string>(oss, className, this->getName()); 
    203   } 
    204  
    205   template <typename T> 
    206   void CAttributeEnum<T>::generateFortranInterfaceGetDeclaration(ostream& oss,const string& className) 
    207   { 
    208     CInterface::AttributeFortranInterfaceGetDeclaration<string>(oss, className, this->getName()); 
    209   } 
     138      StdString CAttributeEnum<T>::_toString(void) const 
     139      { 
     140          StdOStringStream oss; 
     141          if (!CEnum<T>::isEmpty() && this->hasId()) 
     142              oss << this->getName() << "=\"" << CEnum<T>::toString() << "\""; 
     143          return (oss.str()); 
     144      } 
     145 
     146  template <class T> 
     147      void CAttributeEnum<T>::_fromString(const StdString & str) 
     148      { 
     149          CEnum<T>::fromString(str); 
     150      } 
     151 
     152  template <class T> 
     153      bool CAttributeEnum<T>::_toBuffer (CBufferOut& buffer) const 
     154      { 
     155          return CEnum<T>::toBuffer(buffer); 
     156      } 
     157 
     158  template <class T> 
     159      bool CAttributeEnum<T>::_fromBuffer(CBufferIn& buffer) 
     160      { 
     161          return CEnum<T>::fromBuffer(buffer); 
     162      } 
     163 
     164  template <typename T> 
     165      void CAttributeEnum<T>::generateCInterface(ostream& oss,const string& className) 
     166      { 
     167          CInterface::AttributeCInterface<CEnumBase>(oss, className, this->getName()); 
     168      } 
     169 
     170  template <typename T> 
     171      void CAttributeEnum<T>::generateFortran2003Interface(ostream& oss,const string& className) 
     172      { 
     173          CInterface::AttributeFortran2003Interface<string>(oss, className, this->getName()); 
     174      } 
     175 
     176  template <typename T> 
     177      void CAttributeEnum<T>::generateFortranInterfaceDeclaration_(ostream& oss,const string& className) 
     178      { 
     179          CInterface::AttributeFortranInterfaceDeclaration<string>(oss, className, this->getName()+"_"); 
     180      } 
     181 
     182  template <typename T> 
     183      void CAttributeEnum<T>::generateFortranInterfaceBody_(ostream& oss,const string& className) 
     184      { 
     185          CInterface::AttributeFortranInterfaceBody<string>(oss, className, this->getName()); 
     186      } 
     187 
     188  template <typename T> 
     189      void CAttributeEnum<T>::generateFortranInterfaceDeclaration(ostream& oss,const string& className) 
     190      { 
     191          CInterface::AttributeFortranInterfaceDeclaration<string>(oss, className, this->getName()); 
     192      } 
     193 
     194  template <typename T> 
     195      void CAttributeEnum<T>::generateFortranInterfaceGetDeclaration_(ostream& oss,const string& className) 
     196      { 
     197          CInterface::AttributeFortranInterfaceGetDeclaration<string>(oss, className, this->getName()+"_"); 
     198      } 
     199 
     200  template <typename T> 
     201      void CAttributeEnum<T>::generateFortranInterfaceGetBody_(ostream& oss,const string& className) 
     202      { 
     203          CInterface::AttributeFortranInterfaceGetBody<string>(oss, className, this->getName()); 
     204      } 
     205 
     206  template <typename T> 
     207      void CAttributeEnum<T>::generateFortranInterfaceGetDeclaration(ostream& oss,const string& className) 
     208      { 
     209          CInterface::AttributeFortranInterfaceGetDeclaration<string>(oss, className, this->getName()); 
     210      } 
    210211} // namespace xios 
    211212 
    212213#endif // __XIOS_ATTRIBUTE_ENUM_IMPL_HPP__ 
     214 
  • XIOS/dev/branch_yushan_merged/src/attribute_map.hpp

    r1117 r1134  
    7676            /// Propriété statique /// 
    7777            static CAttributeMap * Current; 
     78            #pragma omp threadprivate (Current) 
    7879 
    7980      };  // class CAttributeMap 
  • XIOS/dev/branch_yushan_merged/src/buffer_client.cpp

    r917 r1134  
    2525    buffer[1] = new char[bufferSize]; 
    2626    retBuffer = new CBufferOut(buffer[current], bufferSize); 
     27    #pragma omp critical (_output) 
    2728    info(10) << "CClientBuffer: allocated 2 x " << bufferSize << " bytes for server " << serverRank << " with a maximum of " << maxBufferedEvents << " buffered events" << endl; 
    2829  } 
  • XIOS/dev/branch_yushan_merged/src/buffer_client.hpp

    r917 r1134  
    66#include "mpi.hpp" 
    77#include "cxios.hpp" 
     8#ifdef _usingEP 
     9#include "ep_declaration.hpp" 
     10#endif 
    811 
    912namespace xios 
     
    1316    public: 
    1417      static size_t maxRequestSize; 
     18      #pragma omp threadprivate(maxRequestSize) 
    1519 
    1620      CClientBuffer(MPI_Comm intercomm, int serverRank, StdSize bufferSize, StdSize maxBufferedEvents); 
  • XIOS/dev/branch_yushan_merged/src/buffer_server.hpp

    r717 r1134  
    44#include "xios_spl.hpp" 
    55#include "buffer.hpp" 
    6 #include "mpi.hpp" 
     6#include "mpi_std.hpp" 
    77#include "cxios.hpp" 
    88 
  • XIOS/dev/branch_yushan_merged/src/calendar.cpp

    r561 r1134  
    117117      const CDate& CCalendar::update(int step) 
    118118      { 
    119         info(20) << "update step : " << step << " timestep " << this->timestep << std::endl; 
     119        #pragma omp critical (_output) 
     120        info(80)<< "update step : " << step << " timestep " << this->timestep << std::endl; 
    120121        return (this->currentDate = this->getInitDate() + step * this->timestep); 
    121122      } 
  • XIOS/dev/branch_yushan_merged/src/client.cpp

    r1032 r1134  
    1111#include "timer.hpp" 
    1212#include "buffer_client.hpp" 
     13#include "log.hpp" 
     14 
    1315 
    1416namespace xios 
    1517{ 
     18    extern int test_omp_rank; 
     19    #pragma omp threadprivate(test_omp_rank) 
    1620 
    1721    MPI_Comm CClient::intraComm ; 
    1822    MPI_Comm CClient::interComm ; 
    19     std::list<MPI_Comm> CClient::contextInterComms; 
     23    std::list<MPI_Comm> *CClient::contextInterComms_ptr = 0; 
    2024    int CClient::serverLeader ; 
    2125    bool CClient::is_MPI_Initialized ; 
     
    2428    StdOFStream CClient::m_errorStream; 
    2529 
    26     void CClient::initialize(const string& codeId,MPI_Comm& localComm,MPI_Comm& returnComm) 
     30    StdOFStream CClient::array_infoStream[10]; 
     31 
     32    void CClient::initialize(const string& codeId, MPI_Comm& localComm, MPI_Comm& returnComm) 
    2733    { 
    2834      int initialized ; 
     
    3541      { 
    3642// localComm doesn't given 
     43 
    3744        if (localComm == MPI_COMM_NULL) 
    3845        { 
    3946          if (!is_MPI_Initialized) 
    4047          { 
    41             MPI_Init(NULL, NULL); 
     48            //MPI_Init(NULL, NULL); 
     49            int return_level; 
     50            MPI_Init_thread(NULL, NULL, 3, &return_level); 
     51            assert(return_level == 3); 
    4252          } 
    4353          CTimer::get("XIOS").resume() ; 
     
    5161          int myColor ; 
    5262          int i,c ; 
    53           MPI_Comm newComm ; 
    54  
    55           MPI_Comm_size(CXios::globalComm,&size) ; 
     63 
     64          MPI_Comm_size(CXios::globalComm,&size); 
    5665          MPI_Comm_rank(CXios::globalComm,&rank); 
     66        
    5767 
    5868          hashAll=new unsigned long[size] ; 
     
    96106            MPI_Comm_size(intraComm,&intraCommSize) ; 
    97107            MPI_Comm_rank(intraComm,&intraCommRank) ; 
    98             info(50)<<"intercommCreate::client "<<rank<<" intraCommSize : "<<intraCommSize 
    99                  <<" intraCommRank :"<<intraCommRank<<"  clientLeader "<< serverLeader<<endl ; 
     108             
     109            #pragma omp critical(_output) 
     110            { 
     111              info(10)<<"intercommCreate::client "<<test_omp_rank<< " "<< &test_omp_rank <<" intraCommSize : "<<intraCommSize 
     112                 <<" intraCommRank :"<<intraCommRank<<"  serverLeader "<< serverLeader 
     113                 <<" globalComm : "<< &(CXios::globalComm) << endl ;   
     114            } 
     115 
     116             
     117             
    100118            MPI_Intercomm_create(intraComm,0,CXios::globalComm,serverLeader,0,&interComm) ; 
     119 
    101120          } 
    102121          else 
     
    148167 
    149168      MPI_Comm_dup(intraComm,&returnComm) ; 
     169 
    150170    } 
    151171 
     
    154174    { 
    155175      CContext::setCurrent(id) ; 
    156       CContext* context=CContext::create(id); 
     176      CContext* context = CContext::create(id); 
     177 
     178      int tmp_rank; 
     179      MPI_Comm_rank(contextComm,&tmp_rank) ; 
     180       
    157181      StdString idServer(id); 
    158182      idServer += "_server"; 
     
    161185      { 
    162186        int size,rank,globalRank ; 
    163         size_t message_size ; 
    164         int leaderRank ; 
     187        //size_t message_size ; 
     188        //int leaderRank ; 
    165189        MPI_Comm contextInterComm ; 
    166190 
     
    173197        CMessage msg ; 
    174198        msg<<idServer<<size<<globalRank ; 
    175 //        msg<<id<<size<<globalRank ; 
     199 
    176200 
    177201        int messageSize=msg.size() ; 
     
    184208 
    185209        MPI_Intercomm_create(contextComm,0,CXios::globalComm,serverLeader,10+globalRank,&contextInterComm) ; 
    186         info(10)<<"Register new Context : "<<id<<endl ; 
     210         
     211        #pragma omp critical(_output) 
     212        info(10)<<" RANK "<< tmp_rank<<" Register new Context : "<<id<<endl ; 
     213 
    187214 
    188215        MPI_Comm inter ; 
     
    190217        MPI_Barrier(inter) ; 
    191218 
     219         
    192220        context->initClient(contextComm,contextInterComm) ; 
    193221 
    194         contextInterComms.push_back(contextInterComm); 
     222         
     223        if(contextInterComms_ptr == NULL) contextInterComms_ptr = new std::list<MPI_Comm>; 
     224        contextInterComms_ptr->push_back(contextInterComm); 
     225         
    195226        MPI_Comm_free(&inter); 
    196227      } 
     
    209240        // Finally, we should return current context to context client 
    210241        CContext::setCurrent(id); 
    211  
    212         contextInterComms.push_back(contextInterComm); 
     242         
     243        if(contextInterComms_ptr == NULL) contextInterComms_ptr = new std::list<MPI_Comm>; 
     244        contextInterComms_ptr->push_back(contextInterComm); 
     245 
    213246      } 
    214247    } 
     
    220253 
    221254      MPI_Comm_rank(intraComm,&rank) ; 
    222   
     255 
    223256      if (!CXios::isServer) 
    224257      { 
     
    230263      } 
    231264 
    232       for (std::list<MPI_Comm>::iterator it = contextInterComms.begin(); it != contextInterComms.end(); it++) 
     265      for (std::list<MPI_Comm>::iterator it = contextInterComms_ptr->begin(); it != contextInterComms_ptr->end(); ++it) 
    233266        MPI_Comm_free(&(*it)); 
     267       
    234268      MPI_Comm_free(&interComm); 
    235269      MPI_Comm_free(&intraComm); 
     
    241275      { 
    242276        if (CXios::usingOasis) oasis_finalize(); 
    243         else MPI_Finalize() ; 
    244       } 
    245        
    246       info(20) << "Client side context is finalized"<<endl ; 
    247       report(0) <<" Performance report : total time spent for XIOS : "<< CTimer::get("XIOS").getCumulatedTime()<<" s"<<endl ; 
    248       report(0)<< " Performance report : time spent for waiting free buffer : "<< CTimer::get("Blocking time").getCumulatedTime()<<" s"<<endl ; 
    249       report(0)<< " Performance report : Ratio : "<< CTimer::get("Blocking time").getCumulatedTime()/CTimer::get("XIOS").getCumulatedTime()*100.<<" %"<<endl ; 
    250       report(0)<< " Performance report : This ratio must be close to zero. Otherwise it may be usefull to increase buffer size or numbers of server"<<endl ; 
    251 //      report(0)<< " Memory report : Current buffer_size : "<<CXios::bufferSize<<endl ; 
    252       report(0)<< " Memory report : Minimum buffer size required : " << CClientBuffer::maxRequestSize << " bytes" << endl ; 
    253       report(0)<< " Memory report : increasing it by a factor will increase performance, depending of the volume of data wrote in file at each time step of the file"<<endl ; 
     277        else MPI_Finalize(); 
     278      } 
     279       
     280      #pragma omp critical (_output) 
     281      info(20) << "Client "<<rank<<" : Client side context is finalized "<< endl ; 
     282 
     283  /*    #pragma omp critical (_output) 
     284      { 
     285         report(0) <<"     Performance report : total time spent for XIOS : "<< CTimer::get("XIOS").getCumulatedTime()<<" s"<<endl ; 
     286         report(0)<< "     Performance report : time spent for waiting free buffer : "<< CTimer::get("Blocking time").getCumulatedTime()<<" s"<<endl ; 
     287         report(0)<< "     Performance report : Ratio : "<< CTimer::get("Blocking time").getCumulatedTime()/CTimer::get("XIOS").getCumulatedTime()*100.<<" %"<<endl ; 
     288         report(0)<< "     Performance report : This ratio must be close to zero. Otherwise it may be usefull to increase buffer size or numbers of server"<<endl ; 
     289         report(0)<< "     Memory report : Current buffer_size : "<<CXios::bufferSize<<endl ; 
     290         report(0)<< "     Memory report : Minimum buffer size required : " << CClientBuffer::maxRequestSize << " bytes" << endl ; 
     291         report(0)<< "     Memory report : increasing it by a factor will increase performance, depending of the volume of data wrote in file at each time step of the file"<<endl ; 
     292       }       
     293*/ 
    254294   } 
    255295 
     
    280320 
    281321      fileNameClient << fileName << "_" << std::setfill('0') << std::setw(numDigit) << getRank() << ext; 
     322       
    282323      fb->open(fileNameClient.str().c_str(), std::ios::out); 
    283324      if (!fb->is_open()) 
    284325        ERROR("void CClient::openStream(const StdString& fileName, const StdString& ext, std::filebuf* fb)", 
    285               << std::endl << "Can not open <" << fileNameClient << "> file to write the client log(s)."); 
     326            << std::endl << "Can not open <" << fileNameClient << "> file to write the client log(s)."); 
    286327    } 
    287328 
     
    294335    void CClient::openInfoStream(const StdString& fileName) 
    295336    { 
    296       std::filebuf* fb = m_infoStream.rdbuf(); 
    297       openStream(fileName, ".out", fb); 
    298  
    299       info.write2File(fb); 
    300       report.write2File(fb); 
     337      //std::filebuf* fb = m_infoStream.rdbuf(); 
     338 
     339      info_FB[omp_get_thread_num()] = array_infoStream[omp_get_thread_num()].rdbuf(); 
     340           
     341      openStream(fileName, ".out", info_FB[omp_get_thread_num()]); 
     342 
     343      info.write2File(info_FB[omp_get_thread_num()]); 
     344      report.write2File(info_FB[omp_get_thread_num()]); 
     345       
    301346    } 
    302347 
  • XIOS/dev/branch_yushan_merged/src/client.hpp

    r655 r1134  
    1212        static void initialize(const string& codeId, MPI_Comm& localComm, MPI_Comm& returnComm); 
    1313        static void finalize(void); 
    14         static void registerContext(const string& id, MPI_Comm contextComm); 
     14        static void registerContext(const string& id, ep_lib::MPI_Comm contextComm); 
    1515 
    1616        static MPI_Comm intraComm; 
     17        #pragma omp threadprivate(intraComm) 
     18 
    1719        static MPI_Comm interComm; 
    18         static std::list<MPI_Comm> contextInterComms; 
     20        #pragma omp threadprivate(interComm) 
     21 
     22        //static std::list<MPI_Comm> contextInterComms; 
     23         
     24        static std::list<MPI_Comm> * contextInterComms_ptr; 
     25        #pragma omp threadprivate(contextInterComms_ptr) 
     26 
    1927        static int serverLeader; 
     28        #pragma omp threadprivate(serverLeader) 
     29 
    2030        static bool is_MPI_Initialized ; 
     31        #pragma omp threadprivate(is_MPI_Initialized) 
    2132 
    2233        //! Get rank of the current process 
     
    3950      protected: 
    4051        static int rank; 
     52        #pragma omp threadprivate(rank) 
     53 
    4154        static StdOFStream m_infoStream; 
     55        #pragma omp threadprivate(m_infoStream)  
     56 
    4257        static StdOFStream m_errorStream; 
     58        #pragma omp threadprivate(m_errorStream) 
     59 
     60        static StdOFStream array_infoStream[10]; 
    4361 
    4462        static void openStream(const StdString& fileName, const StdString& ext, std::filebuf* fb); 
  • XIOS/dev/branch_yushan_merged/src/client_client_dht_template.hpp

    r941 r1134  
    1313#include "xios_spl.hpp" 
    1414#include "array_new.hpp" 
    15 #include "mpi.hpp" 
     15#include "mpi_std.hpp" 
    1616#include "policy.hpp" 
    1717#include <boost/unordered_map.hpp> 
     
    4040  public: 
    4141    CClientClientDHTTemplate(const Index2InfoTypeMap& indexInfoInitMap, 
    42                              const MPI_Comm& clientIntraComm); 
     42                             const ep_lib::MPI_Comm& clientIntraComm); 
    4343 
    4444    CClientClientDHTTemplate(const Index2VectorInfoTypeMap& indexInfoInitMap, 
    45                              const MPI_Comm& clientIntraComm); 
     45                             const ep_lib::MPI_Comm& clientIntraComm); 
    4646 
    4747    void computeIndexInfoMapping(const CArray<size_t,1>& indices); 
     
    5555 
    5656  protected: 
    57     CClientClientDHTTemplate(const MPI_Comm& clientIntraComm); 
     57    CClientClientDHTTemplate(const ep_lib::MPI_Comm& clientIntraComm); 
    5858 
    5959  protected: 
     
    6262    // Redistribute index and info among clients 
    6363    void computeDistributedIndex(const Index2InfoTypeMap& indexInfoInitMap, 
    64                                  const MPI_Comm& intraCommLevel, 
     64                                 const ep_lib::MPI_Comm& intraCommLevel, 
    6565                                 int level); 
    6666 
    6767    void computeDistributedIndex(const Index2VectorInfoTypeMap& indexInfoInitMap, 
    68                                  const MPI_Comm& intraCommLevel, 
     68                                 const ep_lib::MPI_Comm& intraCommLevel, 
    6969                                 int level); 
    7070 
     
    7373 
    7474    void computeIndexInfoMappingLevel(const CArray<size_t,1>& indices, 
    75                                       const MPI_Comm& intraCommLevel, 
     75                                      const ep_lib::MPI_Comm& intraCommLevel, 
    7676                                      int level); 
    7777 
     
    8585    // Send information to clients 
    8686    void sendInfoToClients(int clientDestRank, unsigned char* info, int infoSize, 
    87                            const MPI_Comm& clientIntraComm, 
    88                            std::vector<MPI_Request>& requestSendInfo); 
     87                           const ep_lib::MPI_Comm& clientIntraComm, 
     88                           std::vector<ep_lib::MPI_Request>& requestSendInfo); 
    8989 
    9090    void recvInfoFromClients(int clientSrcRank, unsigned char* info, int infoSize, 
    91                             const MPI_Comm& clientIntraComm, 
    92                             std::vector<MPI_Request>& requestRecvInfo); 
     91                            const ep_lib::MPI_Comm& clientIntraComm, 
     92                            std::vector<ep_lib::MPI_Request>& requestRecvInfo); 
    9393 
    9494    // Send global index to clients 
    9595    void sendIndexToClients(int clientDestRank, size_t* indices, size_t indiceSize, 
    96                             const MPI_Comm& clientIntraComm, 
    97                             std::vector<MPI_Request>& requestSendIndexGlobal); 
     96                            const ep_lib::MPI_Comm& clientIntraComm, 
     97                            std::vector<ep_lib::MPI_Request>& requestSendIndexGlobal); 
    9898 
    9999    void recvIndexFromClients(int clientSrcRank, size_t* indices, size_t indiceSize, 
    100                              const MPI_Comm& clientIntraComm, 
    101                              std::vector<MPI_Request>& requestRecvIndex); 
     100                             const ep_lib::MPI_Comm& clientIntraComm, 
     101                             std::vector<ep_lib::MPI_Request>& requestRecvIndex); 
    102102 
    103103    void sendRecvOnReturn(const std::vector<int>& sendNbRank, std::vector<int>& sendNbElements, 
  • XIOS/dev/branch_yushan_merged/src/client_client_dht_template_impl.hpp

    r892 r1134  
    1010#include "utils.hpp" 
    1111#include "mpi_tag.hpp" 
     12#ifdef _usingEP 
     13#include "ep_declaration.hpp" 
     14#endif 
     15 
    1216 
    1317namespace xios 
    1418{ 
    1519template<typename T, typename H> 
    16 CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const MPI_Comm& clientIntraComm) 
     20CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const ep_lib::MPI_Comm& clientIntraComm) 
    1721  : H(clientIntraComm), index2InfoMapping_(), indexToInfoMappingLevel_(), nbClient_(0) 
    1822{ 
     
    3438template<typename T, typename H> 
    3539CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const Index2InfoTypeMap& indexInfoMap, 
    36                                                         const MPI_Comm& clientIntraComm) 
     40                                                        const ep_lib::MPI_Comm& clientIntraComm) 
    3741  : H(clientIntraComm), index2InfoMapping_(), indexToInfoMappingLevel_(), nbClient_(0) 
    3842{ 
     
    5963template<typename T, typename H> 
    6064CClientClientDHTTemplate<T,H>::CClientClientDHTTemplate(const Index2VectorInfoTypeMap& indexInfoMap, 
    61                                                         const MPI_Comm& clientIntraComm) 
     65                                                        const ep_lib::MPI_Comm& clientIntraComm) 
    6266  : H(clientIntraComm), index2InfoMapping_(), indexToInfoMappingLevel_(), nbClient_(0) 
    6367{ 
     
    9599template<typename T, typename H> 
    96100void CClientClientDHTTemplate<T,H>::computeIndexInfoMappingLevel(const CArray<size_t,1>& indices, 
    97                                                                  const MPI_Comm& commLevel, 
     101                                                                 const ep_lib::MPI_Comm& commLevel, 
    98102                                                                 int level) 
    99103{ 
     
    169173    recvIndexBuff = new unsigned long[recvNbIndexCount]; 
    170174 
    171   std::vector<MPI_Request> request; 
     175  std::vector<ep_lib::MPI_Request> request; 
    172176  std::vector<int>::iterator itbRecvIndex = recvRankClient.begin(), itRecvIndex, 
    173177                             iteRecvIndex = recvRankClient.end(), 
     
    179183  { 
    180184    if (0 != recvNbIndexClientCount[idx]) 
     185    { 
    181186      recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, request); 
     187    } 
    182188    currentIndex += recvNbIndexClientCount[idx]; 
    183189  } 
     
    188194    sendIndexToClients(itIndex->first, (itIndex->second), sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, request); 
    189195 
    190   std::vector<MPI_Status> status(request.size()); 
     196  std::vector<ep_lib::MPI_Status> status(request.size()); 
    191197  MPI_Waitall(request.size(), &request[0], &status[0]); 
    192198 
     
    242248  } 
    243249 
    244   std::vector<MPI_Request> requestOnReturn; 
     250  std::vector<ep_lib::MPI_Request> requestOnReturn; 
    245251  currentIndex = 0; 
    246252  for (int idx = 0; idx < recvRankOnReturn.size(); ++idx) 
     
    293299  } 
    294300 
    295   std::vector<MPI_Status> statusOnReturn(requestOnReturn.size()); 
     301  std::vector<ep_lib::MPI_Status> statusOnReturn(requestOnReturn.size()); 
    296302  MPI_Waitall(requestOnReturn.size(), &requestOnReturn[0], &statusOnReturn[0]); 
    297303 
     
    360366template<typename T, typename H> 
    361367void CClientClientDHTTemplate<T,H>::computeDistributedIndex(const Index2VectorInfoTypeMap& indexInfoMap, 
    362                                                             const MPI_Comm& commLevel, 
     368                                                            const ep_lib::MPI_Comm& commLevel, 
    363369                                                            int level) 
    364370{ 
     
    412418    { 
    413419      client2ClientIndex[indexClient + groupRankBegin][sendNbIndexBuff[indexClient]] = it->first;; 
    414   //          ProcessDHTElement<InfoType>::packElement(it->second, client2ClientInfo[indexClient + groupRankBegin], sendNbInfo[indexClient]); 
    415420      ProcessDHTElement<InfoType>::packElement(infoTmp[idx], client2ClientInfo[indexClient + groupRankBegin], sendNbInfo[indexClient]); 
    416421      ++sendNbIndexBuff[indexClient]; 
     
    439444  // it will send a message to the correct clients. 
    440445  // Contents of the message are index and its corresponding informatioin 
    441   std::vector<MPI_Request> request; 
     446  std::vector<ep_lib::MPI_Request> request; 
    442447  int currentIndex = 0; 
    443448  int nbRecvClient = recvRankClient.size(); 
     
    458463                                                iteIndex = client2ClientIndex.end(); 
    459464  for (itIndex = itbIndex; itIndex != iteIndex; ++itIndex) 
     465  { 
    460466    sendIndexToClients(itIndex->first, itIndex->second, sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, request); 
     467  } 
     468 
    461469  boost::unordered_map<int, unsigned char*>::iterator itbInfo = client2ClientInfo.begin(), itInfo, 
    462470                                                      iteInfo = client2ClientInfo.end(); 
    463471  for (itInfo = itbInfo; itInfo != iteInfo; ++itInfo) 
     472  { 
    464473    sendInfoToClients(itInfo->first, itInfo->second, sendNbInfo[itInfo->first-groupRankBegin], commLevel, request); 
    465474 
    466   std::vector<MPI_Status> status(request.size()); 
     475  } 
     476 
     477  std::vector<ep_lib::MPI_Status> status(request.size()); 
     478 
    467479  MPI_Waitall(request.size(), &request[0], &status[0]); 
    468480 
     
    518530template<typename T, typename H> 
    519531void CClientClientDHTTemplate<T,H>::sendIndexToClients(int clientDestRank, size_t* indices, size_t indiceSize, 
    520                                                        const MPI_Comm& clientIntraComm, 
    521                                                        std::vector<MPI_Request>& requestSendIndex) 
    522 { 
    523   MPI_Request request; 
     532                                                       const ep_lib::MPI_Comm& clientIntraComm, 
     533                                                       std::vector<ep_lib::MPI_Request>& requestSendIndex) 
     534{ 
     535  ep_lib::MPI_Request request; 
    524536  requestSendIndex.push_back(request); 
    525537  MPI_Isend(indices, indiceSize, MPI_UNSIGNED_LONG, 
     
    536548template<typename T, typename H> 
    537549void CClientClientDHTTemplate<T,H>::recvIndexFromClients(int clientSrcRank, size_t* indices, size_t indiceSize, 
    538                                                          const MPI_Comm& clientIntraComm, 
    539                                                          std::vector<MPI_Request>& requestRecvIndex) 
    540 { 
    541   MPI_Request request; 
     550                                                         const ep_lib::MPI_Comm& clientIntraComm, 
     551                                                         std::vector<ep_lib::MPI_Request>& requestRecvIndex) 
     552{ 
     553  ep_lib::MPI_Request request; 
    542554  requestRecvIndex.push_back(request); 
    543555  MPI_Irecv(indices, indiceSize, MPI_UNSIGNED_LONG, 
     
    555567template<typename T, typename H> 
    556568void CClientClientDHTTemplate<T,H>::sendInfoToClients(int clientDestRank, unsigned char* info, int infoSize, 
    557                                                       const MPI_Comm& clientIntraComm, 
    558                                                       std::vector<MPI_Request>& requestSendInfo) 
    559 { 
    560   MPI_Request request; 
     569                                                      const ep_lib::MPI_Comm& clientIntraComm, 
     570                                                      std::vector<ep_lib::MPI_Request>& requestSendInfo) 
     571{ 
     572  ep_lib::MPI_Request request; 
    561573  requestSendInfo.push_back(request); 
    562  
    563574  MPI_Isend(info, infoSize, MPI_CHAR, 
    564575            clientDestRank, MPI_DHT_INFO, clientIntraComm, &(requestSendInfo.back())); 
     
    575586template<typename T, typename H> 
    576587void CClientClientDHTTemplate<T,H>::recvInfoFromClients(int clientSrcRank, unsigned char* info, int infoSize, 
    577                                                         const MPI_Comm& clientIntraComm, 
    578                                                         std::vector<MPI_Request>& requestRecvInfo) 
    579 { 
    580   MPI_Request request; 
     588                                                        const ep_lib::MPI_Comm& clientIntraComm, 
     589                                                        std::vector<ep_lib::MPI_Request>& requestRecvInfo) 
     590{ 
     591  ep_lib::MPI_Request request; 
    581592  requestRecvInfo.push_back(request); 
    582593 
     
    651662{ 
    652663  recvNbElements.resize(recvNbRank.size()); 
    653   std::vector<MPI_Request> request(sendNbRank.size()+recvNbRank.size()); 
    654   std::vector<MPI_Status> requestStatus(sendNbRank.size()+recvNbRank.size()); 
     664  std::vector<ep_lib::MPI_Request> request(sendNbRank.size()+recvNbRank.size()); 
     665  std::vector<ep_lib::MPI_Status> requestStatus(sendNbRank.size()+recvNbRank.size()); 
    655666 
    656667  int nRequest = 0; 
     
    696707  std::vector<int> recvBuff(recvBuffSize*2,0); 
    697708 
    698   std::vector<MPI_Request> request(sendBuffSize+recvBuffSize); 
    699   std::vector<MPI_Status> requestStatus(sendBuffSize+recvBuffSize); 
     709  std::vector<ep_lib::MPI_Request> request(sendBuffSize+recvBuffSize); 
     710  std::vector<ep_lib::MPI_Status> requestStatus(sendBuffSize+recvBuffSize); 
    700711 
    701712  int nRequest = 0; 
     
    721732  } 
    722733 
     734  //MPI_Barrier(this->internalComm_); 
     735 
    723736  MPI_Waitall(sendBuffSize+recvBuffSize, &request[0], &requestStatus[0]); 
     737 
    724738  int nbRecvRank = 0, nbRecvElements = 0; 
    725739  recvNbRank.clear(); 
  • XIOS/dev/branch_yushan_merged/src/client_server_mapping.hpp

    r843 r1134  
    1414#include "mpi.hpp" 
    1515#include <boost/unordered_map.hpp> 
     16#ifdef _usingEP 
     17#include "ep_declaration.hpp" 
     18#endif 
     19 
    1620 
    1721namespace xios { 
     
    3741 
    3842    static std::map<int,int> computeConnectedClients(int nbServer, int nbClient, 
    39                                                      MPI_Comm& clientIntraComm, 
     43                                                     ep_lib::MPI_Comm& clientIntraComm, 
    4044                                                     const std::vector<int>& connectedServerRank); 
    4145 
  • XIOS/dev/branch_yushan_merged/src/client_server_mapping_distributed.hpp

    r835 r1134  
    3535    /** Default constructor */ 
    3636    CClientServerMappingDistributed(const boost::unordered_map<size_t,int>& globalIndexOfServer, 
    37                                     const MPI_Comm& clientIntraComm, 
     37                                    const ep_lib::MPI_Comm& clientIntraComm, 
    3838                                    bool isDataDistributed = true); 
    3939 
  • XIOS/dev/branch_yushan_merged/src/context_client.cpp

    r1033 r1134  
    2020    \cxtSer [in] cxtSer Pointer to context of server side. (It is only used on case of attached mode) 
    2121    */ 
    22     CContextClient::CContextClient(CContext* parent, MPI_Comm intraComm_, MPI_Comm interComm_, CContext* cxtSer) 
     22    CContextClient::CContextClient(CContext* parent, ep_lib::MPI_Comm intraComm_, ep_lib::MPI_Comm interComm_, CContext* cxtSer) 
    2323     : mapBufferSize_(), parentServer(cxtSer), maxBufferedEvents(4) 
    2424    { 
     
    291291       if (ratio < minBufferSizeEventSizeRatio) minBufferSizeEventSizeRatio = ratio; 
    292292     } 
     293     #ifdef _usingMPI 
    293294     MPI_Allreduce(MPI_IN_PLACE, &minBufferSizeEventSizeRatio, 1, MPI_DOUBLE, MPI_MIN, intraComm); 
    294  
     295     #elif _usingEP 
     296     MPI_Allreduce(&minBufferSizeEventSizeRatio, &minBufferSizeEventSizeRatio, 1, MPI_DOUBLE, MPI_MIN, intraComm); 
     297     #endif 
     298      
    295299     if (minBufferSizeEventSizeRatio < 1.0) 
    296300       ERROR("void CContextClient::setBufferSize(const std::map<int,StdSize>& mapSize, const std::map<int,StdSize>& maxEventSize)", 
     
    392396     for (itMap = itbMap; itMap != iteMap; ++itMap) 
    393397     { 
    394        report(10) << " Memory report : Context <" << context->getId() << "> : client side : memory used for buffer of each connection to server" << endl 
    395                   << "  +) To server with rank " << itMap->first << " : " << itMap->second << " bytes " << endl; 
     398       //report(10) << " Memory report : Context <" << context->getId() << "> : client side : memory used for buffer of each connection to server" << endl 
     399       //           << "  +) To server with rank " << itMap->first << " : " << itMap->second << " bytes " << endl; 
    396400       totalBuf += itMap->second; 
    397401     } 
    398      report(0) << " Memory report : Context <" << context->getId() << "> : client side : total memory used for buffer " << totalBuf << " bytes" << endl; 
     402     //report(0) << " Memory report : Context <" << context->getId() << "> : client side : total memory used for buffer " << totalBuf << " bytes" << endl; 
    399403 
    400404     releaseBuffers(); 
  • XIOS/dev/branch_yushan_merged/src/context_client.hpp

    r1033 r1134  
    2727    public: 
    2828      // Contructor 
    29       CContextClient(CContext* parent, MPI_Comm intraComm, MPI_Comm interComm, CContext* parentServer = 0); 
     29      CContextClient(CContext* parent, ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* parentServer = 0); 
    3030 
    3131      // Send event to server 
     
    6767      int serverSize; //!< Size of server group 
    6868 
    69       MPI_Comm interComm; //!< Communicator of server group 
     69      ep_lib::MPI_Comm interComm; //!< Communicator of server group 
    7070 
    71       MPI_Comm intraComm; //!< Communicator of client group 
     71      ep_lib::MPI_Comm intraComm; //!< Communicator of client group 
    7272 
    7373      map<int,CClientBuffer*> buffers; //!< Buffers for connection to servers 
  • XIOS/dev/branch_yushan_merged/src/context_server.cpp

    r1033 r1134  
    1010#include "file.hpp" 
    1111#include "grid.hpp" 
    12 #include "mpi.hpp" 
     12#include "mpi_std.hpp" 
    1313#include "tracer.hpp" 
    1414#include "timer.hpp" 
     
    2323{ 
    2424 
    25   CContextServer::CContextServer(CContext* parent,MPI_Comm intraComm_,MPI_Comm interComm_) 
     25  CContextServer::CContextServer(CContext* parent, ep_lib::MPI_Comm intraComm_, ep_lib::MPI_Comm interComm_) 
    2626  { 
    2727    context=parent; 
     
    7272    int count; 
    7373    char * addr; 
    74     MPI_Status status; 
     74    ep_lib::MPI_Status status; 
    7575    map<int,CServerBuffer*>::iterator it; 
    7676 
     
    8080      { 
    8181        traceOff(); 
    82         MPI_Iprobe(rank,20,interComm,&flag,&status); 
     82        ep_lib::MPI_Iprobe(rank,20,interComm,&flag,&status); 
    8383        traceOn(); 
    8484        if (flag==true) 
     
    8888          { 
    8989            StdSize buffSize = 0; 
    90             MPI_Recv(&buffSize, 1, MPI_LONG, rank, 20, interComm, &status); 
     90            ep_lib::MPI_Recv(&buffSize, 1, MPI_LONG, rank, 20, interComm, &status); 
    9191            mapBufferSize_.insert(std::make_pair(rank, buffSize)); 
    9292            it=(buffers.insert(pair<int,CServerBuffer*>(rank,new CServerBuffer(buffSize)))).first; 
     
    9494          else 
    9595          { 
    96             MPI_Get_count(&status,MPI_CHAR,&count); 
     96             
     97            ep_lib::MPI_Get_count(&status,MPI_CHAR,&count); 
    9798            if (it->second->isBufferFree(count)) 
    9899            { 
    99100              addr=(char*)it->second->getBuffer(count); 
    100               MPI_Irecv(addr,count,MPI_CHAR,rank,20,interComm,&pendingRequest[rank]); 
     101              ep_lib::MPI_Irecv(addr,count,MPI_CHAR,rank,20,interComm,&pendingRequest[rank]); 
    101102              bufferRequest[rank]=addr; 
    102103            } 
     
    109110  void CContextServer::checkPendingRequest(void) 
    110111  { 
    111     map<int,MPI_Request>::iterator it; 
     112    map<int,ep_lib::MPI_Request>::iterator it; 
    112113    list<int> recvRequest; 
    113114    list<int>::iterator itRecv; 
     
    115116    int flag; 
    116117    int count; 
    117     MPI_Status status; 
    118  
    119     for(it=pendingRequest.begin();it!=pendingRequest.end();it++) 
     118    ep_lib::MPI_Status status; 
     119 
     120    for(it=pendingRequest.begin();it!=pendingRequest.end();++it) 
    120121    { 
    121122      rank=it->first; 
    122123      traceOff(); 
    123       MPI_Test(& it->second, &flag, &status); 
     124      ep_lib::MPI_Test(& it->second, &flag, &status); 
    124125      traceOn(); 
    125126      if (flag==true) 
    126127      { 
    127128        recvRequest.push_back(rank); 
    128         MPI_Get_count(&status,MPI_CHAR,&count); 
     129        ep_lib::MPI_Get_count(&status,MPI_CHAR,&count); 
    129130        processRequest(rank,bufferRequest[rank],count); 
    130131      } 
     
    219220    { 
    220221      finished=true; 
     222      #pragma omp critical (_output) 
    221223      info(20)<<"Server Side context <"<<context->getId()<<"> finalized"<<endl; 
    222224      std::map<int, StdSize>::const_iterator itbMap = mapBufferSize_.begin(), 
     
    225227      for (itMap = itbMap; itMap != iteMap; ++itMap) 
    226228      { 
    227         report(10)<< " Memory report : Context <"<<context->getId()<<"> : server side : memory used for buffer of each connection to client" << endl 
    228                   << "  +) With client of rank " << itMap->first << " : " << itMap->second << " bytes " << endl; 
     229        //report(10)<< " Memory report : Context <"<<context->getId()<<"> : server side : memory used for buffer of each connection to client" << endl 
     230        //          << "  +) With client of rank " << itMap->first << " : " << itMap->second << " bytes " << endl; 
    229231        totalBuf += itMap->second; 
    230232      } 
    231233      context->finalize(); 
    232       report(0)<< " Memory report : Context <"<<context->getId()<<"> : server side : total memory used for buffer "<<totalBuf<<" bytes"<<endl; 
     234      //report(0)<< " Memory report : Context <"<<context->getId()<<"> : server side : total memory used for buffer "<<totalBuf<<" bytes"<<endl; 
    233235    } 
    234236    else if (event.classId==CContext::GetType()) CContext::dispatchEvent(event); 
  • XIOS/dev/branch_yushan_merged/src/context_server.hpp

    r1033 r1134  
    1414    public: 
    1515 
    16     CContextServer(CContext* parent,MPI_Comm intraComm,MPI_Comm interComm) ; 
     16    CContextServer(CContext* parent, ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm) ; 
    1717    bool eventLoop(bool enableEventsProcessing = true); 
    1818    void listen(void) ; 
     
    2525    bool hasFinished(void); 
    2626 
    27     MPI_Comm intraComm ; 
     27    ep_lib::MPI_Comm intraComm ; 
    2828    int intraCommSize ; 
    2929    int intraCommRank ; 
    3030 
    31     MPI_Comm interComm ; 
     31    ep_lib::MPI_Comm interComm ; 
    3232    int commSize ; 
    3333 
    3434    map<int,CServerBuffer*> buffers ; 
    35     map<int,MPI_Request> pendingRequest ; 
     35    map<int,ep_lib::MPI_Request> pendingRequest ; 
    3636    map<int,char*> bufferRequest ; 
    3737 
  • XIOS/dev/branch_yushan_merged/src/cxios.cpp

    r1029 r1134  
    1414namespace xios 
    1515{ 
    16   string CXios::rootFile="./iodef.xml" ; 
    17   string CXios::xiosCodeId="xios.x" ; 
    18   string CXios::clientFile="./xios_client"; 
    19   string CXios::serverFile="./xios_server"; 
     16 
     17  extern int test_omp_rank; 
     18  #pragma omp threadprivate(test_omp_rank) 
     19 
     20  const string CXios::rootFile="./iodef.xml" ; 
     21  const string CXios::xiosCodeId="xios.x" ; 
     22  const string CXios::clientFile="./xios_client"; 
     23  const string CXios::serverFile="./xios_server"; 
     24 
    2025 
    2126  bool CXios::isClient ; 
    2227  bool CXios::isServer ; 
     28 
     29 
    2330  MPI_Comm CXios::globalComm ; 
     31 
     32   
    2433  bool CXios::usingOasis ; 
    2534  bool CXios::usingServer = false; 
     35 
     36 
    2637  double CXios::bufferSizeFactor = 1.0; 
    2738  const double CXios::defaultBufferSizeFactor = 1.0; 
    2839  StdSize CXios::minBufferSize = 1024 * sizeof(double); 
     40 
     41 
    2942  bool CXios::printLogs2Files; 
    3043  bool CXios::isOptPerformance = true; 
     
    3649  { 
    3750    set_new_handler(noMemory); 
    38     parseFile(rootFile); 
     51     
     52     
     53    #pragma omp critical 
     54    { 
     55      parseFile(rootFile);   
     56    } 
     57    #pragma omp barrier 
    3958    parseXiosConfig(); 
    4059  } 
     
    6887      ERROR("CXios::parseXiosConfig()", "recv_field_timeout cannot be negative."); 
    6988 
    70     globalComm=MPI_COMM_WORLD ; 
     89  
     90    int num_ep; 
     91    if(isClient)   
     92    {  
     93      num_ep = omp_get_num_threads(); 
     94    } 
     95     
     96    if(isServer)  
     97    {  
     98      num_ep = omp_get_num_threads(); 
     99    } 
     100     
     101    MPI_Info info; 
     102    #pragma omp master 
     103    { 
     104      MPI_Comm *ep_comm; 
     105      MPI_Comm_create_endpoints(MPI_COMM_WORLD, num_ep, info, ep_comm);  // servers should reach here too. 
     106      passage = ep_comm;   
     107    } 
     108     
     109    #pragma omp barrier 
     110 
     111       
     112    CXios::globalComm = passage[omp_get_thread_num()]; 
     113 
     114    int tmp_rank; 
     115    MPI_Comm_rank(CXios::globalComm, &tmp_rank); 
     116 
     117     
     118    test_omp_rank = tmp_rank; 
     119     
    71120  } 
    72121 
     
    79128  void CXios::initClientSide(const string& codeId, MPI_Comm& localComm, MPI_Comm& returnComm) 
    80129  { 
     130    isClient = true; 
     131     
    81132    initialize() ; 
    82133 
    83     isClient = true; 
    84  
    85134    CClient::initialize(codeId,localComm,returnComm) ; 
     135 
    86136    if (CClient::getRank()==0) globalRegistry = new CRegistry(returnComm) ; 
    87137 
     
    92142    if (printLogs2Files) 
    93143    { 
     144      #pragma omp critical 
    94145      CClient::openInfoStream(clientFile); 
    95146      CClient::openErrorStream(clientFile); 
     
    107158     if (CClient::getRank()==0) 
    108159     { 
     160       #pragma omp critical (_output) 
    109161       info(80)<<"Write data base Registry"<<endl<<globalRegistry->toString()<<endl ; 
    110162       globalRegistry->toFile("xios_registry.bin") ; 
     
    123175  void CXios::initServer() 
    124176  { 
     177    int initialized; 
     178    MPI_Initialized(&initialized); 
     179    if (initialized) CServer::is_MPI_Initialized=true ; 
     180    else CServer::is_MPI_Initialized=false ; 
     181       
     182  
     183    if(!CServer::is_MPI_Initialized) 
     184    { 
     185      MPI_Init(NULL, NULL); 
     186    } 
     187       
    125188    set_new_handler(noMemory); 
    126189    std::set<StdString> parseList; 
     
    133196  void CXios::initServerSide(void) 
    134197  { 
    135     initServer(); 
     198     
    136199    isClient = false; 
    137200    isServer = true; 
    138  
     201     
     202    initServer(); 
     203     
     204     
    139205    // Initialize all aspects MPI 
    140206    CServer::initialize(); 
     
    162228       delete globalRegistry ; 
    163229     } 
     230 
    164231    CServer::finalize(); 
     232         
    165233    CServer::closeInfoStream(); 
    166234  } 
  • XIOS/dev/branch_yushan_merged/src/cxios.hpp

    r1029 r1134  
    55#include "mpi.hpp" 
    66#include "registry.hpp" 
     7#include "log.hpp" 
    78 
    89namespace xios 
     
    1415  { 
    1516    public: 
    16      static void initialize(void) ; 
    17      static void initClientSide(const string & codeId, MPI_Comm& localComm, MPI_Comm& returnComm) ; 
    18      static void initServerSide(void) ; 
    19      static void clientFinalize(void) ; 
    20      static void parseFile(const string& filename) ; 
     17      static void initialize(void) ; 
     18      static void initClientSide(const string & codeId, ep_lib::MPI_Comm& localComm, ep_lib::MPI_Comm& returnComm) ; 
     19      static void initServerSide(void) ; 
     20      static void clientFinalize(void) ; 
     21      static void parseFile(const string& filename) ; 
    2122 
    22      template <typename T> 
    23      static T getin(const string& id,const T& defaultValue) ; 
     23      template <typename T> 
     24      static T getin(const string& id,const T& defaultValue) ; 
    2425 
    25      template <typename T> 
    26      static T getin(const string& id) ; 
     26      template <typename T> 
     27      static T getin(const string& id) ; 
    2728 
    2829    public: 
    29      static string rootFile ; //!< Configuration filename 
    30      static string xiosCodeId ; //!< Identity for XIOS 
    31      static string clientFile; //!< Filename template for client 
    32      static string serverFile; //!< Filename template for server 
     30      static const string rootFile; //!< Configuration filename 
     31      static const string xiosCodeId ; //!< Identity for XIOS 
     32      static const string clientFile; //!< Filename template for client 
     33      static const string serverFile; //!< Filename template for server 
    3334 
    34      static bool isClient ; //!< Check if xios is client 
    35      static bool isServer ; //!< Check if xios is server 
     35      static bool isClient ; //!< Check if xios is client 
     36      static bool isServer ; //!< Check if xios is server 
     37      #pragma omp threadprivate(isClient, isServer) 
    3638 
    37      static MPI_Comm globalComm ; //!< Global communicator 
     39      static MPI_Comm globalComm ; //!< Global communicator 
     40      #pragma omp threadprivate(globalComm) 
    3841 
    39      static bool printLogs2Files; //!< Printing out logs into files 
    40      static bool usingOasis ; //!< Using Oasis 
    41      static bool usingServer ; //!< Using server (server mode) 
    42      static double bufferSizeFactor; //!< Factor used to tune the buffer size 
    43      static const double defaultBufferSizeFactor; //!< Default factor value 
    44      static StdSize minBufferSize; //!< Minimum buffer size 
    45      static bool isOptPerformance; //!< Check if buffer size is for performance (as large as possible) 
    46      static CRegistry* globalRegistry ; //!< global registry which is wrote by the root process of the servers 
    47      static double recvFieldTimeout; //!< Time to wait for data before issuing an error when receiving a field 
    48  
     42      static bool printLogs2Files; //!< Printing out logs into files 
     43      static bool usingOasis ; //!< Using Oasis 
     44      static bool usingServer ; //!< Using server (server mode) 
     45      static double bufferSizeFactor; //!< Factor used to tune the buffer size 
     46      static const double defaultBufferSizeFactor; //!< Default factor value 
     47      static StdSize minBufferSize; //!< Minimum buffer size 
     48      static bool isOptPerformance; //!< Check if buffer size is for performance (as large as possible) 
     49      #pragma omp threadprivate(printLogs2Files, usingOasis, usingServer, bufferSizeFactor, minBufferSize, isOptPerformance) 
     50       
     51      static CRegistry* globalRegistry ; //!< global registry which is wrote by the root process of the servers 
     52      static double recvFieldTimeout; 
     53      #pragma omp threadprivate(recvFieldTimeout) 
     54       
     55       
    4956    public: 
    5057     //! Setting xios to use server mode 
  • XIOS/dev/branch_yushan_merged/src/dht_auto_indexing.cpp

    r1002 r1134  
    2222 
    2323  CDHTAutoIndexing::CDHTAutoIndexing(const CArray<size_t,1>& hashValue, 
    24                                      const MPI_Comm& clientIntraComm) 
     24                                     const ep_lib::MPI_Comm& clientIntraComm) 
    2525    : CClientClientDHTTemplate<size_t>(clientIntraComm) 
    2626  { 
     
    5858  */ 
    5959  CDHTAutoIndexing::CDHTAutoIndexing(Index2VectorInfoTypeMap& hashInitMap, 
    60                                      const MPI_Comm& clientIntraComm) 
     60                                     const ep_lib::MPI_Comm& clientIntraComm) 
    6161    : CClientClientDHTTemplate<size_t>(clientIntraComm) 
    6262  { 
  • XIOS/dev/branch_yushan_merged/src/dht_auto_indexing.hpp

    r924 r1134  
    1212 
    1313#include "client_client_dht_template.hpp" 
     14#ifdef _usingEP 
     15#include "ep_declaration.hpp" 
     16#endif 
    1417 
    1518namespace xios 
     
    2528 
    2629    CDHTAutoIndexing(const CArray<size_t,1>& hashValue, 
    27                      const MPI_Comm& clientIntraComm); 
     30                     const ep_lib::MPI_Comm& clientIntraComm); 
    2831 
    2932    CDHTAutoIndexing(Index2VectorInfoTypeMap& hashInitMap, 
    30                      const MPI_Comm& clientIntraComm); 
     33                     const ep_lib::MPI_Comm& clientIntraComm); 
    3134 
    3235    size_t getNbIndexesGlobal() const; 
  • XIOS/dev/branch_yushan_merged/src/event_scheduler.cpp

    r591 r1134  
    132132    while(received) 
    133133    { 
     134      #ifdef _usingEP 
     135      MPI_Iprobe(-1,1,communicator,&received, &status) ; 
     136      #else 
    134137      MPI_Iprobe(MPI_ANY_SOURCE,1,communicator,&received, &status) ; 
     138      #endif 
    135139      if (received) 
    136140      { 
    137141        recvRequest=new SPendingRequest ; 
     142        #ifdef _usingEP 
     143        MPI_Irecv(recvRequest->buffer, 3, MPI_UNSIGNED_LONG, -1, 1, communicator, &(recvRequest->request)) ; 
     144        #else 
    138145        MPI_Irecv(recvRequest->buffer, 3, MPI_UNSIGNED_LONG, MPI_ANY_SOURCE, 1, communicator, &(recvRequest->request)) ; 
     146        #endif 
    139147        pendingRecvParentRequest.push(recvRequest) ; 
    140148      } 
     
    174182    while(received) 
    175183    { 
     184      #ifdef _usingEP 
     185      MPI_Iprobe(-1,0,communicator,&received, &status) ; 
     186      #else 
    176187      MPI_Iprobe(MPI_ANY_SOURCE,0,communicator,&received, &status) ; 
     188      #endif 
    177189      if (received) 
    178190      { 
    179191        recvRequest=new SPendingRequest ; 
     192        #ifdef _usingEP 
     193        MPI_Irecv(recvRequest->buffer, 3, MPI_UNSIGNED_LONG, -1, 0, communicator, &recvRequest->request) ; 
     194        #else 
    180195        MPI_Irecv(recvRequest->buffer, 3, MPI_UNSIGNED_LONG, MPI_ANY_SOURCE, 0, communicator, &recvRequest->request) ; 
     196        #endif 
    181197        pendingRecvChildRequest.push_back(recvRequest) ; 
    182198      } 
  • XIOS/dev/branch_yushan_merged/src/event_scheduler.hpp

    r591 r1134  
    44#include "xios_spl.hpp" 
    55#include "mpi.hpp" 
     6#ifdef _usingEP 
     7#include "ep_declaration.hpp" 
     8#endif 
     9 
    610 
    711namespace xios 
  • XIOS/dev/branch_yushan_merged/src/filter/spatial_transform_filter.cpp

    r1076 r1134  
    6565  } 
    6666 
    67   std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> > CSpatialTransformFilterEngine::engines; 
     67  std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> > *CSpatialTransformFilterEngine::engines_ptr = 0; 
    6868 
    6969  CSpatialTransformFilterEngine* CSpatialTransformFilterEngine::get(CGridTransformation* gridTransformation) 
     
    7373            "Impossible to get the requested engine, the grid transformation is invalid."); 
    7474 
    75     std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> >::iterator it = engines.find(gridTransformation); 
    76     if (it == engines.end()) 
     75    if(engines_ptr == NULL) engines_ptr = new std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> >; 
     76 
     77    std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> >::iterator it = engines_ptr->find(gridTransformation); 
     78    if (it == engines_ptr->end()) 
    7779    { 
    7880      boost::shared_ptr<CSpatialTransformFilterEngine> engine(new CSpatialTransformFilterEngine(gridTransformation)); 
    79       it = engines.insert(std::make_pair(gridTransformation, engine)).first; 
     81      it = engines_ptr->insert(std::make_pair(gridTransformation, engine)).first; 
    8082    } 
    8183 
     
    153155 
    154156      idxSendBuff = 0; 
    155       std::vector<MPI_Request> sendRecvRequest; 
     157      std::vector<ep_lib::MPI_Request> sendRecvRequest; 
    156158      for (itSend = itbSend; itSend != iteSend; ++itSend, ++idxSendBuff) 
    157159      { 
     
    163165          sendBuff[idxSendBuff][idx] = dataCurrentSrc(localIndex_p(idx)); 
    164166        } 
    165         sendRecvRequest.push_back(MPI_Request()); 
     167        sendRecvRequest.push_back(ep_lib::MPI_Request()); 
    166168        MPI_Isend(sendBuff[idxSendBuff], countSize, MPI_DOUBLE, destRank, 12, client->intraComm, &sendRecvRequest.back()); 
    167169      } 
     
    181183        int srcRank = itRecv->first; 
    182184        int countSize = itRecv->second.size(); 
    183         sendRecvRequest.push_back(MPI_Request()); 
     185        sendRecvRequest.push_back(ep_lib::MPI_Request()); 
    184186        MPI_Irecv(recvBuff + currentBuff, countSize, MPI_DOUBLE, srcRank, 12, client->intraComm, &sendRecvRequest.back()); 
    185187        currentBuff += countSize; 
    186188      } 
    187       std::vector<MPI_Status> status(sendRecvRequest.size()); 
     189      std::vector<ep_lib::MPI_Status> status(sendRecvRequest.size()); 
    188190      MPI_Waitall(sendRecvRequest.size(), &sendRecvRequest[0], &status[0]); 
    189191 
  • XIOS/dev/branch_yushan_merged/src/filter/spatial_transform_filter.hpp

    r1018 r1134  
    104104 
    105105      //! The allocated engines 
    106       static std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> > engines; 
     106      //static std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> > engines; 
     107      static std::map<CGridTransformation*, boost::shared_ptr<CSpatialTransformFilterEngine> > *engines_ptr; 
     108      #pragma omp threadprivate(engines_ptr) 
     109       
    107110  }; // class CSpatialTransformFilterEngine 
    108111} // namespace xios 
  • XIOS/dev/branch_yushan_merged/src/group_factory.cpp

    r501 r1134  
    44{ 
    55   /// ////////////////////// Définitions ////////////////////// /// 
    6    StdString CGroupFactory::CurrContext(""); 
     6   StdString *CGroupFactory::CurrContext_ptr = new StdString; 
    77 
    88   void CGroupFactory::SetCurrentContextId(const StdString & context) 
    9    {  
    10       CGroupFactory::CurrContext = context; 
     9   {   
     10      if(CGroupFactory::CurrContext_ptr == NULL ) CGroupFactory::CurrContext_ptr = new StdString;  
     11      CGroupFactory::CurrContext_ptr->assign(context); 
    1112   } 
    1213 
    1314   StdString & CGroupFactory::GetCurrentContextId(void) 
    1415   {  
    15       return (CGroupFactory::CurrContext); 
     16      return (*CGroupFactory::CurrContext_ptr); 
    1617   } 
    1718 
  • XIOS/dev/branch_yushan_merged/src/group_factory.hpp

    r591 r1134  
    6969 
    7070         /// Propriétés statiques /// 
    71          static StdString CurrContext; 
     71         static StdString *CurrContext_ptr; 
     72         #pragma omp threadprivate(CurrContext_ptr) 
    7273 
    7374   }; // class CGroupFactory 
  • XIOS/dev/branch_yushan_merged/src/indent.hpp

    r501 r1134  
    1010    public: 
    1111    static int defaultIncSize; 
     12    #pragma omp threadprivate(defaultIncSize) 
     13     
    1214    static int index ; 
     15    #pragma omp threadprivate(index) 
     16     
    1317    int incSize ; 
    1418    int offset ; 
  • XIOS/dev/branch_yushan_merged/src/indent_xml.cpp

    r501 r1134  
    1515   { 
    1616      static unsigned int LineNB = 1; 
     17      #pragma omp threadprivate(LineNB) 
     18       
    1719      if (CIndent::WithLine) out << LineNB++ << ". "; 
    1820      for(unsigned int i = 0; i < CIndent::Indent; out << CIndent::Increm , i++){} 
  • XIOS/dev/branch_yushan_merged/src/indent_xml.hpp

    r591 r1134  
    2222         /// Propriétés  statiques /// 
    2323         static unsigned int Indent; 
     24         #pragma omp threadprivate(Indent) 
     25 
    2426         static StdString    Increm; 
     27         #pragma omp threadprivate(Increm) 
     28 
    2529         static bool         WithLine; 
     30         #pragma omp threadprivate(WithLine) 
    2631 
    2732   }; // class CIndent 
  • XIOS/dev/branch_yushan_merged/src/interface/c/icdata.cpp

    r961 r1134  
    1111 
    1212#include "xios.hpp" 
    13 #include "oasis_cinterface.hpp" 
     13//#include "oasis_cinterface.hpp" 
    1414 
    1515#include "attribute_template.hpp" 
     
    2323#include "context.hpp" 
    2424#include "context_client.hpp" 
    25 #include "mpi.hpp" 
     25#include "mpi_std.hpp" 
    2626#include "timer.hpp" 
    2727#include "array_new.hpp" 
     
    5555   { 
    5656      std::string str; 
    57       MPI_Comm local_comm; 
    58       MPI_Comm return_comm; 
     57      ep_lib::MPI_Comm local_comm; 
     58      ep_lib::MPI_Comm return_comm; 
     59       
     60      ep_lib::fc_comm_map.clear(); 
    5961 
    6062      if (!cstr2string(client_id, len_client_id, str)) return; 
     
    6264      int initialized; 
    6365      MPI_Initialized(&initialized); 
     66 
     67      #ifdef _usingEP 
     68      if (initialized) local_comm = ep_lib::EP_Comm_f2c(static_cast< int >(*f_local_comm)); 
     69      else local_comm = MPI_COMM_NULL; 
     70      #else 
    6471      if (initialized) local_comm=MPI_Comm_f2c(*f_local_comm); 
    65       else local_comm=MPI_COMM_NULL; 
     72      else local_comm = MPI_COMM_NULL; 
     73      #endif 
     74       
     75      
     76 
    6677      CXios::initClientSide(str, local_comm, return_comm); 
    67       *f_return_comm=MPI_Comm_c2f(return_comm); 
     78 
     79      #ifdef _usingEP 
     80      *f_return_comm = ep_lib::EP_Comm_c2f(return_comm); 
     81      #else 
     82      *f_return_comm = MPI_Comm_c2f(return_comm); 
     83      #endif 
     84 
    6885      CTimer::get("XIOS init").suspend(); 
    6986      CTimer::get("XIOS").suspend(); 
     
    7390   { 
    7491     std::string str; 
    75      MPI_Comm comm; 
     92     ep_lib::MPI_Comm comm; 
    7693 
    7794     if (!cstr2string(context_id, len_context_id, str)) return; 
    7895     CTimer::get("XIOS").resume(); 
    7996     CTimer::get("XIOS init context").resume(); 
    80      comm=MPI_Comm_f2c(*f_comm); 
    81      CClient::registerContext(str, comm); 
     97     comm = ep_lib::EP_Comm_f2c(static_cast< int >(*f_comm)); 
     98 
     99     CClient::registerContext(str,comm); 
     100           
    82101     CTimer::get("XIOS init context").suspend(); 
    83102     CTimer::get("XIOS").suspend(); 
     
    100119     CTimer::get("XIOS close definition").resume(); 
    101120     CContext* context = CContext::getCurrent(); 
     121      
    102122     context->closeDefinition(); 
     123      
    103124     CTimer::get("XIOS close definition").suspend(); 
    104125     CTimer::get("XIOS").suspend(); 
     
    109130     CTimer::get("XIOS").resume(); 
    110131     CTimer::get("XIOS context finalize").resume(); 
     132      
     133      
     134      
    111135     CContext* context = CContext::getCurrent(); 
    112136     context->finalize(); 
     
    429453      CContext* context = CContext::getCurrent(); 
    430454      if (!context->hasServer && !context->client->isAttachedModeEnabled()) 
    431         context->checkBuffersAndListen(); 
     455      { 
     456        context->checkBuffersAndListen(); 
     457      }   
    432458 
    433459      CArray<double, 3>data(data_k8, shape(data_Xsize, data_Ysize, data_Zsize), neverDeleteData); 
  • XIOS/dev/branch_yushan_merged/src/interface/c/oasis_cinterface.cpp

    r501 r1134  
    11#include "oasis_cinterface.hpp" 
    22#include <string> 
    3 #include "mpi.hpp" 
     3//#include "mpi_std.hpp" 
    44 
    55namespace xios 
     
    2626     
    2727    fxios_oasis_get_localcomm(&f_comm) ; 
     28    #ifdef _usingEP 
     29    comm=EP_Comm_f2c(f_comm.mpi_fint) ; 
     30    #else 
    2831    comm=MPI_Comm_f2c(f_comm) ; 
     32    #endif 
    2933  } 
    3034  
     
    3438     
    3539    fxios_oasis_get_intracomm(&f_comm,server_id.data(),server_id.size()) ; 
     40    #ifdef _usingEP 
     41    comm_client_server=EP_Comm_f2c(f_comm.mpi_fint) ; 
     42    #else 
    3643    comm_client_server=MPI_Comm_f2c(f_comm) ; 
     44    #endif 
    3745  } 
    3846  
     
    4250     
    4351    fxios_oasis_get_intercomm(&f_comm,server_id.data(),server_id.size()) ; 
     52    #ifdef _usingEP 
     53    comm_client_server=EP_Comm_f2c(f_comm.mpi_fint) ; 
     54    #else 
    4455    comm_client_server=MPI_Comm_f2c(f_comm) ; 
     56    #endif 
    4557  } 
    4658} 
  • XIOS/dev/branch_yushan_merged/src/interface/fortran/idata.F90

    r965 r1134  
    465465   INTEGER :: f_return_comm 
    466466 
     467 
    467468      IF (PRESENT(local_comm)) THEN 
    468469        f_local_comm=local_comm 
  • XIOS/dev/branch_yushan_merged/src/io/inetcdf4.cpp

    r948 r1134  
    1818    } 
    1919    mpi = comm && !multifile; 
     20    MPI_Info m_info; 
    2021 
    2122    // The file format will be detected automatically by NetCDF, it is safe to always set NC_MPIIO 
    2223    // even if Parallel NetCDF ends up being used. 
    2324    if (mpi) 
    24       CNetCdfInterface::openPar(filename, NC_NOWRITE | NC_MPIIO, *comm, MPI_INFO_NULL, this->ncidp); 
     25      CNetCdfInterface::openPar(filename, NC_NOWRITE | NC_MPIIO, *comm, m_info, this->ncidp); 
    2526    else 
    2627      CNetCdfInterface::open(filename, NC_NOWRITE, this->ncidp); 
  • XIOS/dev/branch_yushan_merged/src/io/inetcdf4.hpp

    r802 r1134  
    77#include "array_new.hpp" 
    88 
    9 #include "mpi.hpp" 
     9#include "mpi_std.hpp" 
    1010#include "netcdf.hpp" 
    1111 
  • XIOS/dev/branch_yushan_merged/src/io/nc4_data_output.cpp

    r1108 r1134  
    2828      CNc4DataOutput::CNc4DataOutput 
    2929         (CFile* file, const StdString & filename, bool exist, bool useClassicFormat, bool useCFConvention, 
    30           MPI_Comm comm_file, bool multifile, bool isCollective, const StdString& timeCounterName) 
     30          ep_lib::MPI_Comm comm_file, bool multifile, bool isCollective, const StdString& timeCounterName) 
    3131            : SuperClass() 
    3232            , SuperClassWriter(filename, exist, useClassicFormat, useCFConvention, &comm_file, multifile, timeCounterName) 
     
    7878 
    7979 
    80         StdString dimXid, dimYid ; 
     80         StdString dimXid, dimYid ; 
    8181 
    8282        nc_type typePrec ; 
     
    463463      StdString domainName = domain->name; 
    464464      domain->assignMesh(domainName, domain->nvertex); 
    465       domain->mesh->createMeshEpsilon(server->intraComm, domain->lonvalue_srv, domain->latvalue_srv, domain->bounds_lon_srv, domain->bounds_lat_srv); 
     465      domain->mesh->createMeshEpsilon(static_cast<MPI_Comm>(server->intraComm.mpi_comm), domain->lonvalue_srv, domain->latvalue_srv, domain->bounds_lon_srv, domain->bounds_lat_srv); 
    466466 
    467467      StdString node_x = domainName + "_node_x"; 
  • XIOS/dev/branch_yushan_merged/src/io/nc4_data_output.hpp

    r1096 r1134  
    2727               (CFile* file, const StdString & filename, bool exist, bool useClassicFormat, 
    2828                bool useCFConvention, 
    29                 MPI_Comm comm_file, bool multifile, bool isCollective = true, 
     29                ep_lib::MPI_Comm comm_file, bool multifile, bool isCollective = true, 
    3030                const StdString& timeCounterName = "time_counter"); 
    3131 
     
    116116 
    117117            /// Propriétés privées /// 
    118             MPI_Comm comm_file; 
     118            ep_lib::MPI_Comm comm_file; 
    119119            const StdString filename; 
    120120            std::map<Time, StdSize> timeToRecordCache; 
  • XIOS/dev/branch_yushan_merged/src/io/netCdfInterface.hpp

    r811 r1134  
    1616#endif 
    1717 
    18 #include "mpi.hpp" 
     18#include "mpi_std.hpp" 
    1919#include "netcdf.hpp" 
    2020 
  • XIOS/dev/branch_yushan_merged/src/io/netcdf.hpp

    r685 r1134  
    11#ifndef __XIOS_NETCDF_HPP__ 
    22#define __XIOS_NETCDF_HPP__ 
    3 #include "mpi.hpp" 
     3#include "mpi_std.hpp" 
    44#define MPI_INCLUDED 
    55#include <netcdf.h> 
     
    1818extern "C" 
    1919{ 
    20 include <netcdf_par.h> 
     20  #include <netcdf_par.h> 
    2121} 
    2222#  endif 
     
    3030namespace xios 
    3131{ 
    32   inline int nc_create_par(const char *path, int cmode, MPI_Comm comm, MPI_Info info,int *ncidp) 
     32  inline int nc_create_par(const char *path, int cmode, ep_lib::MPI_Comm comm, MPI_Info info,int *ncidp) 
    3333  { 
    3434#if defined(USING_NETCDF_PAR) 
    35     return ::nc_create_par(path, cmode, comm, info, ncidp) ; 
     35    return ::nc_create_par(path, cmode, static_cast<MPI_Comm>(comm.mpi_comm), info, ncidp) ; 
    3636#else 
    3737    ERROR("int nc_create_par(const char *path, int cmode, MPI_Comm comm, MPI_Info info,int *ncidp)", 
     
    4141  } 
    4242 
    43   inline int nc_open_par(const char *path, int mode, MPI_Comm comm, MPI_Info info,int *ncidp) 
     43  inline int nc_open_par(const char *path, int mode, ep_lib::MPI_Comm comm, MPI_Info info,int *ncidp) 
    4444  { 
    4545#if defined(USING_NETCDF_PAR) 
    46     return ::nc_open_par(path, mode, comm, info, ncidp) ; 
     46    return ::nc_open_par(path, mode, static_cast<MPI_Comm>(comm.mpi_comm), info, ncidp) ; 
    4747#else 
    4848    ERROR("int nc_open_par(const char *path, int mode, MPI_Comm comm, MPI_Info info,int *ncidp)", 
  • XIOS/dev/branch_yushan_merged/src/io/onetcdf4.cpp

    r1097 r1134  
    33#include "onetcdf4.hpp" 
    44#include "group_template.hpp" 
    5 #include "mpi.hpp" 
    65#include "netcdf.hpp" 
    76#include "netCdfInterface.hpp" 
     
    1211      /// ////////////////////// Définitions ////////////////////// /// 
    1312 
    14       CONetCDF4::CONetCDF4(const StdString& filename, bool append, bool useClassicFormat, 
    15                                                         bool useCFConvention, 
    16                            const MPI_Comm* comm, bool multifile, const StdString& timeCounterName) 
     13      CONetCDF4::CONetCDF4(const StdString& filename, bool append, bool useClassicFormat, bool useCFConvention,  
     14                           const ep_lib::MPI_Comm* comm, bool multifile, const StdString& timeCounterName) 
    1715        : path() 
    1816        , wmpi(false) 
     
    3230 
    3331      void CONetCDF4::initialize(const StdString& filename, bool append, bool useClassicFormat, bool useCFConvention,  
    34                                  const MPI_Comm* comm, bool multifile, const StdString& timeCounterName) 
     32                                 const ep_lib::MPI_Comm* comm, bool multifile, const StdString& timeCounterName) 
    3533      { 
    3634         this->useClassicFormat = useClassicFormat; 
     
    5654         { 
    5755            if (wmpi) 
    58                CNetCdfInterface::createPar(filename, mode, *comm, MPI_INFO_NULL, this->ncidp); 
     56               CNetCdfInterface::createPar(filename, mode, static_cast<MPI_Comm>(comm->mpi_comm), MPI_INFO_NULL_STD, this->ncidp);            
    5957            else 
    6058               CNetCdfInterface::create(filename, mode, this->ncidp); 
     
    6664            mode |= NC_WRITE; 
    6765            if (wmpi) 
    68                CNetCdfInterface::openPar(filename, mode, *comm, MPI_INFO_NULL, this->ncidp); 
     66               CNetCdfInterface::openPar(filename, mode, static_cast<MPI_Comm>(comm->mpi_comm), MPI_INFO_NULL_STD, this->ncidp); 
    6967            else 
    7068               CNetCdfInterface::open(filename, mode, this->ncidp); 
     
    535533                                 const std::vector<StdSize>& scount, const int* data) 
    536534      { 
    537           CNetCdfInterface::putVaraType(grpid, varid, &sstart[0], &scount[0], data); 
    538       } 
     535         CNetCdfInterface::putVaraType(grpid, varid, &sstart[0], &scount[0], data); 
     536      } 
     537 
    539538      //--------------------------------------------------------------- 
    540539 
     
    544543                                 const std::vector<StdSize>& scount, const float* data) 
    545544      { 
    546           CNetCdfInterface::putVaraType(grpid, varid, &sstart[0], &scount[0], data); 
     545         CNetCdfInterface::putVaraType(grpid, varid, &sstart[0], &scount[0], data); 
    547546      } 
    548547 
  • XIOS/dev/branch_yushan_merged/src/io/onetcdf4.hpp

    r1097 r1134  
    77#include "data_output.hpp" 
    88#include "array_new.hpp" 
    9 #include "mpi.hpp" 
     9#include "mpi_std.hpp" 
    1010#include "netcdf.hpp" 
    1111 
     
    2828            CONetCDF4(const StdString& filename, bool append, bool useClassicFormat = false, 
    2929                          bool useCFConvention = true, 
    30                       const MPI_Comm* comm = NULL, bool multifile = true, 
     30                      const ep_lib::MPI_Comm* comm = NULL, bool multifile = true, 
    3131                      const StdString& timeCounterName = "time_counter"); 
    3232 
     
    3737            /// Initialisation /// 
    3838            void initialize(const StdString& filename, bool append, bool useClassicFormat, bool useCFConvention, 
    39                             const MPI_Comm* comm, bool multifile, const StdString& timeCounterName); 
     39                            const ep_lib::MPI_Comm* comm, bool multifile, const StdString& timeCounterName); 
    4040            void close(void); 
    4141            void sync(void); 
  • XIOS/dev/branch_yushan_merged/src/log.cpp

    r523 r1134  
    11#include "log.hpp" 
     2#include <string> 
     3#include <iostream> 
     4#include <string> 
    25 
    36namespace xios 
    47{ 
     8 
     9  std::filebuf* info_FB[10]; 
     10 
     11 
    512  CLog info("info") ; 
    613  CLog report("report") ; 
    714  CLog error("error", cerr.rdbuf()) ; 
     15 
     16   
     17  CLog& CLog::operator()(int l) 
     18    { 
     19      if (l<=level) 
     20      { 
     21        omp_set_lock( &mutex ); 
     22        //rdbuf(strBuf_);  
     23        rdbuf(strBuf_array[omp_get_thread_num()]);  
     24        *this<<"-> "<<name<<" : " ; 
     25        omp_unset_lock( &mutex ); 
     26      } 
     27      else rdbuf(NULL) ; 
     28      return *this; 
     29    } 
     30 
     31 
     32 
     33  int test_omp_rank; 
     34  #pragma omp threadprivate(test_omp_rank) 
     35 
     36   
     37 
    838} 
  • XIOS/dev/branch_yushan_merged/src/log.hpp

    r523 r1134  
    55#include <iostream> 
    66#include <string> 
     7#include <stdio.h> 
     8#include <omp.h> 
    79 
    810namespace xios 
     
    1416    public : 
    1517    CLog(const string& name_, std::streambuf* sBuff = cout.rdbuf()) 
    16       : ostream(sBuff), level(0), name(name_), strBuf_(sBuff) {} 
    17     CLog& operator()(int l) 
     18      : ostream(cout.rdbuf()), level(0), name(name_), strBuf_(sBuff)  
    1819    { 
    19       if (l<=level) 
    20       { 
    21         rdbuf(strBuf_); 
    22         *this<<"-> "<<name<<" : " ; 
    23       } 
    24       else rdbuf(NULL) ; 
    25       return *this; 
     20      omp_init_lock( &mutex ); 
     21      for(int i=0; i<10; i++) 
     22        strBuf_array[i] = sBuff; 
    2623    } 
     24 
     25    ~CLog() 
     26    { 
     27      omp_destroy_lock( &mutex ); 
     28    } 
     29 
     30 
     31    CLog& operator()(int l); 
    2732    void setLevel(int l) {level=l; } 
    28     int getLevel() {return level ;} 
     33    int  getLevel() {return level ;} 
    2934    bool isActive(void) { if (rdbuf()==NULL) return true ; else return false ;} 
    3035    bool isActive(int l) {if (l<=level) return true ; else return false ; } 
     
    4651     * \param [in] pointer to new streambuf 
    4752    */ 
    48     void changeStreamBuff(std::streambuf* sBuff) { strBuf_ = sBuff; rdbuf(sBuff); } 
     53    void changeStreamBuff(std::streambuf* sBuff)  
     54    {  
     55      strBuf_ = sBuff;  
     56      strBuf_array[omp_get_thread_num()] = sBuff; 
     57      rdbuf(sBuff); 
     58    } 
    4959 
    5060    int level ; 
    5161    string name ; 
    5262    std::streambuf* strBuf_; 
     63    std::streambuf* strBuf_array[10]; 
     64    omp_lock_t mutex; 
    5365  }; 
    5466 
     
    5668  extern CLog report; 
    5769  extern CLog error; 
     70 
     71 
     72  extern std::filebuf* info_FB[10]; 
     73 
     74 
    5875} 
    5976#endif 
  • XIOS/dev/branch_yushan_merged/src/memtrack.cpp

    r501 r1134  
    6868        private:    // static member variables 
    6969            static BlockHeader *ourFirstNode; 
     70            #pragma omp threadprivate(ourFirstNode) 
    7071     
    7172        private:    // member variables 
  • XIOS/dev/branch_yushan_merged/src/mpi.hpp

    r501 r1134  
    1111#define OMPI_SKIP_MPICXX 
    1212 
    13 #include <mpi.h> 
     13#ifdef _usingEP 
     14  #include <omp.h> 
     15  #include "../extern/src_ep_dev/ep_lib.hpp" 
     16  using namespace ep_lib; 
     17#elif _usingMPI 
     18  #include <mpi.h> 
     19#endif 
     20 
    1421 
    1522#endif 
  • XIOS/dev/branch_yushan_merged/src/node/axis.cpp

    r1117 r1134  
    4444   { /* Ne rien faire de plus */ } 
    4545 
    46    std::map<StdString, ETranformationType> CAxis::transformationMapList_ = std::map<StdString, ETranformationType>(); 
    47    bool CAxis::dummyTransformationMapList_ = CAxis::initializeTransformationMap(CAxis::transformationMapList_); 
     46   std::map<StdString, ETranformationType> *CAxis::transformationMapList_ptr = 0; //new std::map<StdString, ETranformationType>();   
     47   //bool CAxis::dummyTransformationMapList_ = CAxis::initializeTransformationMap(CAxis::transformationMapList_ptr); 
     48 
    4849   bool CAxis::initializeTransformationMap(std::map<StdString, ETranformationType>& m) 
    4950   { 
     
    5455     m["extract_domain"] = TRANS_EXTRACT_DOMAIN_TO_AXIS; 
    5556   } 
     57 
     58 
     59   bool CAxis::initializeTransformationMap() 
     60   { 
     61     if(CAxis::transformationMapList_ptr == 0) CAxis::transformationMapList_ptr = new std::map<StdString, ETranformationType>(); 
     62     (*CAxis::transformationMapList_ptr)["zoom_axis"]        = TRANS_ZOOM_AXIS; 
     63     (*CAxis::transformationMapList_ptr)["interpolate_axis"] = TRANS_INTERPOLATE_AXIS; 
     64     (*CAxis::transformationMapList_ptr)["inverse_axis"]     = TRANS_INVERSE_AXIS; 
     65     (*CAxis::transformationMapList_ptr)["reduce_domain"]    = TRANS_REDUCE_DOMAIN_TO_AXIS; 
     66     (*CAxis::transformationMapList_ptr)["extract_domain"]   = TRANS_EXTRACT_DOMAIN_TO_AXIS; 
     67   } 
     68 
    5669 
    5770   ///--------------------------------------------------------------- 
     
    780793      CContextServer* server = CContext::getCurrent()->server; 
    781794      axis->numberWrittenIndexes_ = axis->indexesToWrite.size(); 
    782       MPI_Allreduce(&axis->numberWrittenIndexes_, &axis->totalNumberWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
    783       MPI_Scan(&axis->numberWrittenIndexes_, &axis->offsetWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
     795      ep_lib::MPI_Allreduce(&axis->numberWrittenIndexes_, &axis->totalNumberWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
     796      ep_lib::MPI_Scan(&axis->numberWrittenIndexes_, &axis->offsetWrittenIndexes_, 1, MPI_INT, MPI_SUM, server->intraComm); 
    784797      axis->offsetWrittenIndexes_ -= axis->numberWrittenIndexes_; 
    785798    } 
     
    10301043  } 
    10311044 
     1045 
     1046 
    10321047  void CAxis::duplicateTransformation(CAxis* src) 
    10331048  { 
     
    10731088 
    10741089        nodeElementName = node.getElementName(); 
    1075         std::map<StdString, ETranformationType>::const_iterator ite = transformationMapList_.end(), it; 
    1076         it = transformationMapList_.find(nodeElementName); 
     1090 
     1091        if(transformationMapList_ptr == 0) initializeTransformationMap(); 
     1092        //transformationMapList_ptr = new std::map<StdString, ETranformationType>(); 
     1093 
     1094        std::map<StdString, ETranformationType>::const_iterator ite = (*CAxis::transformationMapList_ptr).end(), it; 
     1095        it = (*CAxis::transformationMapList_ptr).find(nodeElementName); 
    10771096        if (ite != it) 
    10781097        { 
     
    10961115 
    10971116} // namespace xios 
     1117 
  • XIOS/dev/branch_yushan_merged/src/node/axis.hpp

    r1106 r1134  
    169169       private: 
    170170         static bool initializeTransformationMap(std::map<StdString, ETranformationType>& m); 
    171          static std::map<StdString, ETranformationType> transformationMapList_; 
    172          static bool dummyTransformationMapList_; 
     171         //static bool initializeTransformationMap(std::map<StdString, ETranformationType>* m); 
     172         static bool initializeTransformationMap(); 
     173 
     174         //static std::map<StdString, ETranformationType> transformationMapList_; 
     175          
     176         static std::map<StdString, ETranformationType> *transformationMapList_ptr; 
     177         #pragma omp threadprivate(transformationMapList_ptr) 
     178 
     179         //static bool dummyTransformationMapList_; 
     180         //#pragma omp threadprivate(dummyTransformationMapList_) 
    173181 
    174182         DECLARE_REF_FUNC(Axis,axis) 
     
    182190 
    183191#endif // __XIOS_CAxis__ 
     192 
  • XIOS/dev/branch_yushan_merged/src/node/compute_connectivity_domain.hpp

    r934 r1134  
    6060      static CTransformation<CDomain>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
     63       
    6264  }; // class CComputeConnectivityDomain 
    6365 
  • XIOS/dev/branch_yushan_merged/src/node/context.cpp

    r1091 r1134  
    1818namespace xios { 
    1919 
    20   shared_ptr<CContextGroup> CContext::root; 
     20  //shared_ptr<CContextGroup> CContext::root; 
     21  shared_ptr<CContextGroup> * CContext::root_ptr = 0; 
    2122 
    2223   /// ////////////////////// Définitions ////////////////////// /// 
     
    5455   CContextGroup* CContext::getRoot(void) 
    5556   { 
    56       if (root.get()==NULL) root=shared_ptr<CContextGroup>(new CContextGroup(xml::CXMLNode::GetRootName())); 
    57       return root.get(); 
     57      //if (root.get()==NULL) root=shared_ptr<CContextGroup>(new CContextGroup(xml::CXMLNode::GetRootName())); 
     58      //return root.get(); 
     59 
     60      //static shared_ptr<CContextGroup> *root_ptr; 
     61      if(root_ptr == 0) //root_ptr = new shared_ptr<CContextGroup>; 
     62      // if (root_ptr->get()==NULL)  
     63      root_ptr = new shared_ptr<CContextGroup>(new CContextGroup(xml::CXMLNode::GetRootName())); 
     64      return root_ptr->get(); 
    5865   } 
    5966 
     
    236243 
    237244   //! Initialize client side 
    238    void CContext::initClient(MPI_Comm intraComm, MPI_Comm interComm, CContext* cxtServer /*= 0*/) 
     245   void CContext::initClient(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtServer /*= 0*/) 
    239246   { 
    240247     hasClient=true; 
    241      client = new CContextClient(this,intraComm, interComm, cxtServer); 
     248     client = new CContextClient(this, intraComm, interComm, cxtServer); 
     249 
     250     int tmp_rank; 
     251     MPI_Comm_rank(intraComm, &tmp_rank); 
     252     MPI_Barrier(intraComm); 
     253      
     254 
    242255     registryIn=new CRegistry(intraComm); 
    243256     registryIn->setPath(getId()) ; 
     
    248261     registryOut->setPath(getId()) ; 
    249262 
    250      MPI_Comm intraCommServer, interCommServer; 
     263     ep_lib::MPI_Comm intraCommServer, interCommServer; 
    251264     if (cxtServer) // Attached mode 
    252265     { 
     
    311324 
    312325   //! Initialize server 
    313    void CContext::initServer(MPI_Comm intraComm,MPI_Comm interComm, CContext* cxtClient /*= 0*/) 
     326   void CContext::initServer(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtClient /*= 0*/) 
    314327   { 
    315328     hasServer=true; 
     
    323336     registryOut->setPath(getId()) ; 
    324337 
    325      MPI_Comm intraCommClient, interCommClient; 
     338     ep_lib::MPI_Comm intraCommClient, interCommClient; 
    326339     if (cxtClient) // Attached mode 
    327340     { 
     
    372385        } 
    373386 
    374         for (std::list<MPI_Comm>::iterator it = comms.begin(); it != comms.end(); ++it) 
     387        for (std::list<ep_lib::MPI_Comm>::iterator it = comms.begin(); it != comms.end(); ++it) 
    375388          MPI_Comm_free(&(*it)); 
    376389        comms.clear(); 
     
    812825   void CContext::postProcessing() 
    813826   { 
     827     int myRank; 
     828     MPI_Comm_rank(MPI_COMM_WORLD, &myRank); 
     829 
    814830     if (isPostProcessed) return; 
    815831 
     
    11911207   void CContext::updateCalendar(int step) 
    11921208   { 
    1193       info(50) << "updateCalendar : before : " << calendar->getCurrentDate() << endl; 
    11941209      calendar->update(step); 
    1195       info(50) << "updateCalendar : after : " << calendar->getCurrentDate() << endl; 
    11961210 
    11971211      if (hasClient) 
     
    12411255    CContext* context = CObjectFactory::CreateObject<CContext>(id).get(); 
    12421256    getRoot(); 
    1243     if (!hasctxt) CGroupFactory::AddChild(root, context->getShared()); 
     1257    //if (!hasctxt) CGroupFactory::AddChild(root, context->getShared()); 
     1258    if (!hasctxt) CGroupFactory::AddChild(*root_ptr, context->getShared()); 
    12441259 
    12451260#define DECLARE_NODE(Name_, name_) \ 
  • XIOS/dev/branch_yushan_merged/src/node/context.hpp

    r1033 r1134  
    8888      public : 
    8989         // Initialize server or client 
    90          void initServer(MPI_Comm intraComm, MPI_Comm interComm, CContext* cxtClient = 0); 
    91          void initClient(MPI_Comm intraComm, MPI_Comm interComm, CContext* cxtServer = 0); 
     90         void initServer(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtClient = 0); 
     91         void initClient(ep_lib::MPI_Comm intraComm, ep_lib::MPI_Comm interComm, CContext* cxtServer = 0); 
    9292         bool isInitialized(void); 
    9393 
     
    206206 
    207207         // Context root 
    208          static shared_ptr<CContextGroup> root; 
     208         //static shared_ptr<CContextGroup> root; 
     209 
     210         static shared_ptr<CContextGroup> *root_ptr; 
     211         #pragma omp threadprivate(root_ptr) 
    209212 
    210213         // Determine context on client or not 
     
    219222         // Concrete contex client 
    220223         CContextClient* client; 
     224 
     225 
    221226         CRegistry* registryIn ;  //!< input registry which is read from file 
    222227         CRegistry* registryOut ; //!< output registry which will be wrote on file at the finalize 
     228          
    223229 
    224230      private: 
     
    227233         StdString idServer_; 
    228234         CGarbageCollector garbageCollector; 
    229          std::list<MPI_Comm> comms; //!< Communicators allocated internally 
     235         std::list<ep_lib::MPI_Comm> comms; //!< Communicators allocated internally 
    230236 
    231237      public: // Some function maybe removed in the near future 
  • XIOS/dev/branch_yushan_merged/src/node/domain.cpp

    r1117 r1134  
    6565   } 
    6666 
    67    std::map<StdString, ETranformationType> CDomain::transformationMapList_ = std::map<StdString, ETranformationType>(); 
    68    bool CDomain::_dummyTransformationMapList = CDomain::initializeTransformationMap(CDomain::transformationMapList_); 
     67   //std::map<StdString, ETranformationType> CDomain::transformationMapList_ = std::map<StdString, ETranformationType>(); 
     68   //bool CDomain::_dummyTransformationMapList = CDomain::initializeTransformationMap(CDomain::transformationMapList_); 
     69 
     70   std::map<StdString, ETranformationType> *CDomain::transformationMapList_ptr = 0; 
    6971 
    7072   bool CDomain::initializeTransformationMap(std::map<StdString, ETranformationType>& m) 
     
    7678     m["expand_domain"] = TRANS_EXPAND_DOMAIN; 
    7779   } 
     80 
     81   bool CDomain::initializeTransformationMap() 
     82   { 
     83     CDomain::transformationMapList_ptr = new std::map<StdString, ETranformationType>(); 
     84     (*CDomain::transformationMapList_ptr)["zoom_domain"] = TRANS_ZOOM_DOMAIN; 
     85     (*CDomain::transformationMapList_ptr)["interpolate_domain"] = TRANS_INTERPOLATE_DOMAIN; 
     86     (*CDomain::transformationMapList_ptr)["generate_rectilinear_domain"] = TRANS_GENERATE_RECTILINEAR_DOMAIN; 
     87     (*CDomain::transformationMapList_ptr)["compute_connectivity_domain"] = TRANS_COMPUTE_CONNECTIVITY_DOMAIN; 
     88     (*CDomain::transformationMapList_ptr)["expand_domain"] = TRANS_EXPAND_DOMAIN; 
     89   } 
     90 
    7891 
    7992   const std::set<StdString> & CDomain::getRelFiles(void) const 
     
    623636   { 
    624637          CContext* context = CContext::getCurrent(); 
    625       CContextClient* client = context->client; 
     638          CContextClient* client = context->client; 
    626639          lon_g.resize(ni_glo) ; 
    627640          lat_g.resize(nj_glo) ; 
     
    17131726                                                                                client->intraComm); 
    17141727    clientServerMap->computeServerIndexMapping(globalIndexDomain); 
     1728     
    17151729    const CClientServerMapping::GlobalIndexMap& globalIndexDomainOnServer = clientServerMap->getGlobalIndexOnServer(); 
    17161730 
     
    23502364 
    23512365        nodeElementName = node.getElementName(); 
    2352         std::map<StdString, ETranformationType>::const_iterator ite = transformationMapList_.end(), it; 
    2353         it = transformationMapList_.find(nodeElementName); 
     2366        if(transformationMapList_ptr == 0) initializeTransformationMap(); 
     2367        std::map<StdString, ETranformationType>::const_iterator ite = (*transformationMapList_ptr).end(), it; 
     2368        it = (*transformationMapList_ptr).find(nodeElementName); 
    23542369        if (ite != it) 
    23552370        { 
  • XIOS/dev/branch_yushan_merged/src/node/domain.hpp

    r1106 r1134  
    216216       private: 
    217217         static bool initializeTransformationMap(std::map<StdString, ETranformationType>& m); 
    218          static std::map<StdString, ETranformationType> transformationMapList_; 
    219          static bool _dummyTransformationMapList; 
     218         static bool initializeTransformationMap(); 
     219         //static std::map<StdString, ETranformationType> transformationMapList_; 
     220  
     221         static std::map<StdString, ETranformationType> *transformationMapList_ptr; 
     222         #pragma omp threadprivate(transformationMapList_ptr) 
     223 
     224         //static bool _dummyTransformationMapList; 
     225         //#pragma omp threadprivate(_dummyTransformationMapList) 
    220226 
    221227         DECLARE_REF_FUNC(Domain,domain) 
  • XIOS/dev/branch_yushan_merged/src/node/expand_domain.hpp

    r935 r1134  
    6060      static CTransformation<CDomain>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
     63       
    6264  }; // class CExpandDomain 
    6365 
  • XIOS/dev/branch_yushan_merged/src/node/extract_axis_to_scalar.hpp

    r960 r1134  
    6060      static CTransformation<CScalar>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CExtractAxisToScalar 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/extract_domain_to_axis.hpp

    r895 r1134  
    6060      static CTransformation<CAxis>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CExtractDomainToAxis 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/field.cpp

    r1120 r1134  
    953953 
    954954       const bool ignoreMissingValue = (!detect_missing_value.isEmpty() && !default_value.isEmpty() && detect_missing_value == true); 
    955         
     955 
    956956       boost::shared_ptr<CTemporalFilter> temporalFilter(new CTemporalFilter(gc, operation, 
    957957                                                                             CContext::getCurrent()->getCalendar()->getInitDate(), 
  • XIOS/dev/branch_yushan_merged/src/node/file.cpp

    r1098 r1134  
    579579 
    580580      if (isOpen) data_out->closeFile(); 
    581       if (time_counter_name.isEmpty()) data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), fileComm, multifile, isCollective)); 
    582       else data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), fileComm, multifile, isCollective, time_counter_name)); 
     581      if (time_counter_name.isEmpty()) data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), static_cast<MPI_Comm>(fileComm.mpi_comm), multifile, isCollective)); 
     582      else data_in = shared_ptr<CDataInput>(new CNc4DataInput(oss.str(), static_cast<MPI_Comm>(fileComm.mpi_comm), multifile, isCollective, time_counter_name)); 
    583583      isOpen = true; 
    584584    } 
  • XIOS/dev/branch_yushan_merged/src/node/file.hpp

    r1090 r1134  
    1212#include "attribute_enum_impl.hpp" 
    1313#include "mpi.hpp" 
     14#ifdef _usingEP 
     15#include "ep_declaration.hpp" 
     16#endif 
    1417 
    1518namespace xios { 
     
    156159         bool isOpen; 
    157160         bool allDomainEmpty; 
    158          MPI_Comm fileComm; 
     161         ep_lib::MPI_Comm fileComm; 
    159162 
    160163      private : 
  • XIOS/dev/branch_yushan_merged/src/node/generate_rectilinear_domain.hpp

    r836 r1134  
    6060      static CTransformation<CDomain>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CGenerateRectilinearDomain 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/grid.cpp

    r1093 r1134  
    11141114          outLocalIndexToServer(idx) = itIndex->second; 
    11151115        } 
    1116          
     1116 
    11171117        const std::list<int>& ranks = client->getRanksServerLeader(); 
    11181118        for (std::list<int>::const_iterator itRank = ranks.begin(), itRankEnd = ranks.end(); itRank != itRankEnd; ++itRank) 
  • XIOS/dev/branch_yushan_merged/src/node/interpolate_axis.hpp

    r836 r1134  
    6262      static CTransformation<CAxis>* create(const StdString& id, xml::CXMLNode* node); 
    6363      static bool _dummyRegistered; 
     64      #pragma omp threadprivate(_dummyRegistered) 
     65       
    6466  }; // class CInterpolateAxis 
    6567 
  • XIOS/dev/branch_yushan_merged/src/node/interpolate_domain.hpp

    r1004 r1134  
    6060      static CTransformation<CDomain>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CInterpolateDomain 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/inverse_axis.hpp

    r836 r1134  
    5959      static CTransformation<CAxis>* create(const StdString& id, xml::CXMLNode* node); 
    6060      static bool _dummyRegistered; 
     61      #pragma omp threadprivate(_dummyRegistered) 
    6162 
    6263  }; // class CInverseAxis 
  • XIOS/dev/branch_yushan_merged/src/node/mesh.cpp

    r1002 r1134  
    3434  std::map <StdString, vector<int> > CMesh::domainList = std::map <StdString, vector<int> >(); 
    3535 
     36  std::map <StdString, CMesh> *CMesh::meshList_ptr = 0; 
     37  std::map <StdString, vector<int> > *CMesh::domainList_ptr = 0; 
     38 
     39 
    3640///--------------------------------------------------------------- 
    3741/*! 
     
    4145 * \param [in] nvertex Number of verteces (1 for nodes, 2 for edges, 3 and up for faces). 
    4246 */ 
     47 
     48/* bkp 
    4349  CMesh* CMesh::getMesh (StdString meshName, int nvertex) 
    4450  { 
     
    6470      CMesh::meshList.insert( make_pair(meshName, newMesh) ); 
    6571      return &meshList[meshName]; 
     72    } 
     73  } 
     74*/ 
     75 
     76  CMesh* CMesh::getMesh (StdString meshName, int nvertex) 
     77  { 
     78    if(CMesh::domainList_ptr == NULL) CMesh::domainList_ptr = new std::map <StdString, vector<int> >(); 
     79    if(CMesh::meshList_ptr == NULL)   CMesh::meshList_ptr   = new std::map <StdString, CMesh>(); 
     80 
     81    (*CMesh::domainList_ptr)[meshName].push_back(nvertex); 
     82 
     83    if ( (*CMesh::meshList_ptr).begin() != (*CMesh::meshList_ptr).end() ) 
     84    { 
     85      for (std::map<StdString, CMesh>::iterator it=(*CMesh::meshList_ptr).begin(); it!=(*CMesh::meshList_ptr).end(); ++it) 
     86      { 
     87        if (it->first == meshName) 
     88          return &((*CMesh::meshList_ptr)[meshName]); 
     89        else 
     90        { 
     91          CMesh newMesh; 
     92          (*CMesh::meshList_ptr).insert( make_pair(meshName, newMesh) ); 
     93          return &((*CMesh::meshList_ptr)[meshName]); 
     94        } 
     95      } 
     96    } 
     97    else 
     98    { 
     99      CMesh newMesh; 
     100      (*CMesh::meshList_ptr).insert( make_pair(meshName, newMesh) ); 
     101      return &((*CMesh::meshList_ptr)[meshName]); 
    66102    } 
    67103  } 
     
    488524 * \param [in] bounds_lat Array of boundary latitudes. Its size depends on the element type. 
    489525 */ 
    490   void CMesh::createMeshEpsilon(const MPI_Comm& comm, 
     526  void CMesh::createMeshEpsilon(const ep_lib::MPI_Comm& comm, 
    491527                                const CArray<double, 1>& lonvalue, const CArray<double, 1>& latvalue, 
    492528                                const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat) 
     
    16881724   */ 
    16891725 
    1690   void CMesh::getGloNghbFacesNodeType(const MPI_Comm& comm, const CArray<int, 1>& face_idx, 
     1726  void CMesh::getGloNghbFacesNodeType(const ep_lib::MPI_Comm& comm, const CArray<int, 1>& face_idx, 
    16911727                               const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 
    16921728                               CArray<int, 2>& nghbFaces) 
     
    18441880   */ 
    18451881 
    1846   void CMesh::getGloNghbFacesEdgeType(const MPI_Comm& comm, const CArray<int, 1>& face_idx, 
     1882  void CMesh::getGloNghbFacesEdgeType(const ep_lib::MPI_Comm& comm, const CArray<int, 1>& face_idx, 
    18471883                               const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 
    18481884                               CArray<int, 2>& nghbFaces) 
     
    20252061   */ 
    20262062 
    2027   void CMesh::getGlobalNghbFaces(const int nghbType, const MPI_Comm& comm, 
     2063  void CMesh::getGlobalNghbFaces(const int nghbType, const ep_lib::MPI_Comm& comm, 
    20282064                                 const CArray<int, 1>& face_idx, 
    20292065                                 const CArray<double, 2>& bounds_lon, const CArray<double, 2>& bounds_lat, 
  • XIOS/dev/branch_yushan_merged/src/node/mesh.hpp

    r931 r1134  
    6060                      const CArray<double, 2>&, const CArray<double, 2>& ); 
    6161                         
    62       void createMeshEpsilon(const MPI_Comm&, 
     62      void createMeshEpsilon(const ep_lib::MPI_Comm&, 
    6363                             const CArray<double, 1>&, const CArray<double, 1>&, 
    6464                             const CArray<double, 2>&, const CArray<double, 2>& ); 
    6565 
    66       void getGlobalNghbFaces(const int, const MPI_Comm&, const CArray<int, 1>&, 
     66      void getGlobalNghbFaces(const int, const ep_lib::MPI_Comm&, const CArray<int, 1>&, 
    6767                              const CArray<double, 2>&, const CArray<double, 2>&, 
    6868                              CArray<int, 2>&); 
     
    8282      static std::map <StdString, CMesh> meshList; 
    8383      static std::map <StdString, vector<int> > domainList; 
     84 
     85      static std::map <StdString, CMesh> *meshList_ptr; 
     86      static std::map <StdString, vector<int> > *domainList_ptr; 
     87      #pragma omp threadprivate(meshList_ptr, domainList_ptr) 
     88    
    8489      CClientClientDHTSizet* pNodeGlobalIndex;                    // pointer to a map <nodeHash, nodeIdxGlo> 
    8590      CClientClientDHTSizet* pEdgeGlobalIndex;                    // pointer to a map <edgeHash, edgeIdxGlo> 
    86       void getGloNghbFacesNodeType(const MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
    87       void getGloNghbFacesEdgeType(const MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
     91      void getGloNghbFacesNodeType(const ep_lib::MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
     92      void getGloNghbFacesEdgeType(const ep_lib::MPI_Comm&, const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&); 
    8893      void getLocNghbFacesNodeType(const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&, CArray<int, 1>&); 
    8994      void getLocNghbFacesEdgeType(const CArray<int, 1>&, const CArray<double, 2>&, const CArray<double, 2>&, CArray<int, 2>&, CArray<int, 1>&); 
  • XIOS/dev/branch_yushan_merged/src/node/reduce_axis_to_scalar.hpp

    r888 r1134  
    5959      static CTransformation<CScalar>* create(const StdString& id, xml::CXMLNode* node); 
    6060      static bool _dummyRegistered; 
     61      #pragma omp threadprivate(_dummyRegistered) 
    6162  }; // class CReduceAxisToScalar 
    6263 
  • XIOS/dev/branch_yushan_merged/src/node/reduce_domain_to_axis.hpp

    r895 r1134  
    6060      static CTransformation<CAxis>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CReduceDomainToAxis 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/reduce_domain_to_scalar.hpp

    r976 r1134  
    6060      static CTransformation<CScalar>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CReduceDomainToScalar 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/scalar.cpp

    r1117 r1134  
    2727   { /* Ne rien faire de plus */ } 
    2828 
    29    std::map<StdString, ETranformationType> CScalar::transformationMapList_ = std::map<StdString, ETranformationType>(); 
    30    bool CScalar::dummyTransformationMapList_ = CScalar::initializeTransformationMap(CScalar::transformationMapList_); 
     29   //std::map<StdString, ETranformationType> CScalar::transformationMapList_ = std::map<StdString, ETranformationType>(); 
     30   //bool CScalar::dummyTransformationMapList_ = CScalar::initializeTransformationMap(CScalar::transformationMapList_); 
     31 
     32   std::map<StdString, ETranformationType> *CScalar::transformationMapList_ptr = 0; 
     33    
    3134   bool CScalar::initializeTransformationMap(std::map<StdString, ETranformationType>& m) 
    3235   { 
     
    3437     m["extract_axis"]  = TRANS_EXTRACT_AXIS_TO_SCALAR; 
    3538     m["reduce_domain"] = TRANS_REDUCE_DOMAIN_TO_SCALAR; 
     39   } 
     40 
     41   bool CScalar::initializeTransformationMap() 
     42   { 
     43     CScalar::transformationMapList_ptr = new std::map<StdString, ETranformationType>(); 
     44     (*CScalar::transformationMapList_ptr)["reduce_axis"]   = TRANS_REDUCE_AXIS_TO_SCALAR; 
     45     (*CScalar::transformationMapList_ptr)["extract_axis"]  = TRANS_EXTRACT_AXIS_TO_SCALAR; 
     46     (*CScalar::transformationMapList_ptr)["reduce_domain"] = TRANS_REDUCE_DOMAIN_TO_SCALAR; 
    3647   } 
    3748 
     
    164175 
    165176        nodeElementName = node.getElementName(); 
    166         std::map<StdString, ETranformationType>::const_iterator ite = transformationMapList_.end(), it; 
    167         it = transformationMapList_.find(nodeElementName); 
     177        if(CScalar::transformationMapList_ptr == 0) initializeTransformationMap(); 
     178        std::map<StdString, ETranformationType>::const_iterator ite = (*CScalar::transformationMapList_ptr).end(), it; 
     179        it = (*CScalar::transformationMapList_ptr).find(nodeElementName); 
    168180        if (ite != it) 
    169181        { 
  • XIOS/dev/branch_yushan_merged/src/node/scalar.hpp

    r1106 r1134  
    8787       private: 
    8888           static bool initializeTransformationMap(std::map<StdString, ETranformationType>& m); 
    89            static std::map<StdString, ETranformationType> transformationMapList_; 
    90            static bool dummyTransformationMapList_; 
     89           static bool initializeTransformationMap(); 
     90            
     91           //static bool dummyTransformationMapList_; 
     92 
     93           static std::map<StdString, ETranformationType> *transformationMapList_ptr; 
     94           #pragma omp threadprivate(transformationMapList_ptr) 
    9195 
    9296 
  • XIOS/dev/branch_yushan_merged/src/node/zoom_axis.hpp

    r836 r1134  
    6060      static CTransformation<CAxis>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
    6263  }; // class CZoomAxis 
    6364 
  • XIOS/dev/branch_yushan_merged/src/node/zoom_domain.hpp

    r836 r1134  
    6060      static CTransformation<CDomain>* create(const StdString& id, xml::CXMLNode* node); 
    6161      static bool _dummyRegistered; 
     62      #pragma omp threadprivate(_dummyRegistered) 
     63       
    6264  }; // class CZoomDomain 
    6365 
  • XIOS/dev/branch_yushan_merged/src/object_factory.cpp

    r501 r1134  
    55   /// ////////////////////// Définitions ////////////////////// /// 
    66 
    7    StdString CObjectFactory::CurrContext(""); 
     7   StdString *CObjectFactory::CurrContext_ptr = new StdString; 
    88 
    99   void CObjectFactory::SetCurrentContextId(const StdString & context) 
    10    { CObjectFactory::CurrContext = context; } 
     10   { 
     11     if(CObjectFactory::CurrContext_ptr == NULL ) CObjectFactory::CurrContext_ptr = new StdString; 
     12     CObjectFactory::CurrContext_ptr->assign(context);  
     13   } 
    1114 
    1215   StdString & CObjectFactory::GetCurrentContextId(void) 
    13    { return (CObjectFactory::CurrContext); } 
     16   {  
     17     return (*CObjectFactory::CurrContext_ptr);  
     18   } 
    1419 
    1520} // namespace xios 
  • XIOS/dev/branch_yushan_merged/src/object_factory.hpp

    r769 r1134  
    5959 
    6060         /// Propriétés statiques /// 
    61          static StdString CurrContext; 
     61         static StdString *CurrContext_ptr; 
     62         #pragma omp threadprivate(CurrContext_ptr) 
    6263 
    6364   }; // class CObjectFactory 
  • XIOS/dev/branch_yushan_merged/src/object_factory_impl.hpp

    r769 r1134  
    1010       int CObjectFactory::GetObjectNum(void) 
    1111   { 
    12       if (CurrContext.size() == 0) 
     12      if (CurrContext_ptr->size() == 0) 
    1313         ERROR("CObjectFactory::GetObjectNum(void)", 
    1414               << "please define current context id !"); 
    15       return (U::AllVectObj[CObjectFactory::CurrContext].size()); 
     15 
     16      if(U::AllVectObj == NULL) return 0; 
     17       
     18       
     19      return (*U::AllVectObj)[*CObjectFactory::CurrContext_ptr].size(); 
    1620   } 
    1721 
     
    1923      int CObjectFactory::GetObjectIdNum(void) 
    2024   { 
    21       if (CurrContext.size() == 0) 
     25      if (CurrContext_ptr->size() == 0) 
    2226         ERROR("CObjectFactory::GetObjectIdNum(void)", 
    2327               << "please define current context id !"); 
    24       return (U::AllMapObj[CObjectFactory::CurrContext].size()); 
     28      if(U::AllMapObj  == NULL) return 0; 
     29 
     30       
     31 
     32      return (* U::AllMapObj) [*CObjectFactory::CurrContext_ptr].size(); 
    2533   } 
    2634 
     
    2836      bool CObjectFactory::HasObject(const StdString & id) 
    2937   { 
    30       if (CurrContext.size() == 0) 
     38      if (CurrContext_ptr->size() == 0) 
    3139         ERROR("CObjectFactory::HasObject(const StdString & id)", 
    3240               << "[ id = " << id << " ] please define current context id !"); 
    33       return (U::AllMapObj[CObjectFactory::CurrContext].find(id) != 
    34               U::AllMapObj[CObjectFactory::CurrContext].end()); 
     41       
     42      if(U::AllMapObj  == NULL)  return false; 
     43 
     44       
     45 
     46      return ((*U::AllMapObj)[*CObjectFactory::CurrContext_ptr].find(id) != 
     47              (*U::AllMapObj)[*CObjectFactory::CurrContext_ptr].end()); 
    3548   } 
    3649 
     
    3851      bool CObjectFactory::HasObject(const StdString & context, const StdString & id) 
    3952   { 
    40       if (U::AllMapObj.find(context) == U::AllMapObj.end()) return false ; 
    41       else return (U::AllMapObj[context].find(id) !=  U::AllMapObj[context].end()); 
     53      if(U::AllMapObj  == NULL) return false; 
     54 
     55      if (U::AllMapObj->find(context) == U::AllMapObj->end()) return false ; 
     56 
     57      else 
     58      { 
     59         return ((*U::AllMapObj)[context].find(id) !=  (*U::AllMapObj)[context].end()); 
     60      }  
     61          
    4262   } 
    4363 
     
    4565      boost::shared_ptr<U> CObjectFactory::GetObject(const U * const object) 
    4666   { 
    47       if (CurrContext.size() == 0) 
     67      if(U::AllVectObj == NULL) return (boost::shared_ptr<U>()); 
     68    
     69      if (CurrContext_ptr->size() == 0) 
    4870         ERROR("CObjectFactory::GetObject(const U * const object)", 
    4971               << "please define current context id !"); 
    5072      std::vector<boost::shared_ptr<U> > & vect = 
    51                      U::AllVectObj[CObjectFactory::CurrContext]; 
     73                     (*U::AllVectObj)[*CObjectFactory::CurrContext_ptr]; 
    5274 
    5375      typename std::vector<boost::shared_ptr<U> >::const_iterator 
     
    7092      boost::shared_ptr<U> CObjectFactory::GetObject(const StdString & id) 
    7193   { 
    72       if (CurrContext.size() == 0) 
     94      if(U::AllMapObj  == NULL) return (boost::shared_ptr<U>()); 
     95 
     96      if (CurrContext_ptr->size() == 0) 
    7397         ERROR("CObjectFactory::GetObject(const StdString & id)", 
    7498               << "[ id = " << id << " ] please define current context id !"); 
     
    77101               << "[ id = " << id << ", U = " << U::GetName() << " ] " 
    78102               << "object was not found."); 
    79       return (U::AllMapObj[CObjectFactory::CurrContext][id]); 
     103      return (*U::AllMapObj)[*CObjectFactory::CurrContext_ptr][id]; 
    80104   } 
    81105 
     
    83107      boost::shared_ptr<U> CObjectFactory::GetObject(const StdString & context, const StdString & id) 
    84108   { 
     109      if(U::AllMapObj  == NULL) return (boost::shared_ptr<U>()); 
     110 
    85111      if (!CObjectFactory::HasObject<U>(context,id)) 
    86112         ERROR("CObjectFactory::GetObject(const StdString & id)", 
    87113               << "[ id = " << id << ", U = " << U::GetName() <<", context = "<<context<< " ] " 
    88114               << "object was not found."); 
    89       return (U::AllMapObj[context][id]); 
     115 
     116      return (*U::AllMapObj)[context][id]; 
    90117   } 
    91118 
     
    93120   boost::shared_ptr<U> CObjectFactory::CreateObject(const StdString& id) 
    94121   { 
    95       if (CurrContext.empty()) 
     122      if(U::AllVectObj == NULL) U::AllVectObj = new xios_map<StdString, std::vector<boost::shared_ptr<U> > >; 
     123      if(U::AllMapObj  == NULL) U::AllMapObj  = new xios_map<StdString, xios_map<StdString, boost::shared_ptr<U> > >; 
     124 
     125       
     126      if (CurrContext_ptr->empty()) 
    96127         ERROR("CObjectFactory::CreateObject(const StdString& id)", 
    97128               << "[ id = " << id << " ] please define current context id !"); 
     
    105136         boost::shared_ptr<U> value(new U(id.empty() ? CObjectFactory::GenUId<U>() : id)); 
    106137 
    107          U::AllVectObj[CObjectFactory::CurrContext].insert(U::AllVectObj[CObjectFactory::CurrContext].end(), value); 
    108          U::AllMapObj[CObjectFactory::CurrContext].insert(std::make_pair(value->getId(), value)); 
     138         (* U::AllVectObj)[*CObjectFactory::CurrContext_ptr].insert((*U::AllVectObj)[*CObjectFactory::CurrContext_ptr].end(), value); 
     139         (* U::AllMapObj) [*CObjectFactory::CurrContext_ptr].insert(std::make_pair(value->getId(), value)); 
    109140 
    110141         return value; 
     
    116147         CObjectFactory::GetObjectVector(const StdString & context) 
    117148   { 
    118       return (U::AllVectObj[context]); 
     149      if(U::AllVectObj != NULL)  
     150       
     151      return (*U::AllVectObj)[context]; 
    119152   } 
    120153 
     
    130163   { 
    131164      StdOStringStream oss; 
    132       oss << GetUIdBase<U>() << U::GenId[CObjectFactory::CurrContext]++; 
     165      if(U::GenId == NULL) U::GenId = new xios_map< StdString, long int >; 
     166      oss << GetUIdBase<U>() << (*U::GenId)[*CObjectFactory::CurrContext_ptr]++; 
    133167      return oss.str(); 
    134168   } 
  • XIOS/dev/branch_yushan_merged/src/object_template.hpp

    r1117 r1134  
    100100 
    101101         /// Propriétés statiques /// 
    102          static xios_map<StdString, 
    103                 xios_map<StdString, 
    104                 boost::shared_ptr<DerivedType> > > AllMapObj; 
    105          static xios_map<StdString, 
    106                 std::vector<boost::shared_ptr<DerivedType> > > AllVectObj; 
     102         // bkp 
     103         // static xios_map<StdString, 
     104         //        xios_map<StdString, 
     105         //        boost::shared_ptr<DerivedType> > > AllMapObj; 
     106         // static xios_map<StdString, 
     107         //        std::vector<boost::shared_ptr<DerivedType> > > AllVectObj; 
    107108 
    108          static xios_map< StdString, long int > GenId ; 
     109         // static xios_map< StdString, long int > GenId ; 
     110 
     111 
     112         static xios_map<StdString, xios_map<StdString, boost::shared_ptr<DerivedType> > > *AllMapObj; 
     113         static xios_map<StdString, std::vector<boost::shared_ptr<DerivedType> > > *AllVectObj; 
     114         static xios_map< StdString, long int > *GenId; 
     115         #pragma omp threadprivate(AllMapObj, AllVectObj, GenId) 
    109116 
    110117   }; // class CObjectTemplate 
  • XIOS/dev/branch_yushan_merged/src/object_template_impl.hpp

    r1117 r1134  
    2424      xios_map<StdString, 
    2525      xios_map<StdString, 
    26       boost::shared_ptr<T> > > CObjectTemplate<T>::AllMapObj; 
     26      boost::shared_ptr<T> > > *CObjectTemplate<T>::AllMapObj = 0; 
    2727 
    2828   template <class T> 
    2929      xios_map<StdString, 
    30       std::vector<boost::shared_ptr<T> > > CObjectTemplate<T>::AllVectObj; 
    31  
    32    template <class T> 
    33       xios_map<StdString,long int> CObjectTemplate<T>::GenId; 
     30      std::vector<boost::shared_ptr<T> > > *CObjectTemplate<T>::AllVectObj = 0; 
     31 
     32   template <class T> 
     33      xios_map<StdString,long int> *CObjectTemplate<T>::GenId = 0; 
    3434 
    3535   template <class T> 
     
    6666         CObjectTemplate<T>::GetAllVectobject(const StdString & contextId) 
    6767   { 
    68       return (CObjectTemplate<T>::AllVectObj[contextId]); 
     68      return (CObjectTemplate<T>::AllVectObj->at(contextId)); 
    6969   } 
    7070 
  • XIOS/dev/branch_yushan_merged/src/parse_expr/lex_parser.cpp

    r1038 r1134  
    347347extern char *yytext; 
    348348#define yytext_ptr yytext 
     349 
    349350static yyconst flex_int16_t yy_nxt[][128] = 
    350351    { 
  • XIOS/dev/branch_yushan_merged/src/parse_expr/yacc_parser.cpp

    r1038 r1134  
    8080} 
    8181 
    82   IFilterExprNode* parsed; 
    83   std::string globalInputText; 
    84   size_t globalReadOffset = 0; 
    85  
     82  static IFilterExprNode* parsed; 
     83  static std::string globalInputText; 
     84  static std::string *globalInputText_ptr = 0; 
     85  static size_t globalReadOffset = 0; 
     86  #pragma omp threadprivate(parsed, globalInputText_ptr, globalReadOffset) 
     87   
    8688  int readInputForLexer(char* buffer, size_t* numBytesRead, size_t maxBytesToRead) 
    8789  { 
     90    if(globalInputText_ptr == 0) globalInputText_ptr = new std::string; 
    8891    size_t numBytesToRead = maxBytesToRead; 
    89     size_t bytesRemaining = globalInputText.length()-globalReadOffset; 
     92    size_t bytesRemaining = (*globalInputText_ptr).length()-globalReadOffset; 
    9093    size_t i; 
    9194    if (numBytesToRead > bytesRemaining) numBytesToRead = bytesRemaining; 
    92     for (i = 0; i < numBytesToRead; i++) buffer[i] = globalInputText.c_str()[globalReadOffset + i]; 
     95    for (i = 0; i < numBytesToRead; i++) buffer[i] = (*globalInputText_ptr).c_str()[globalReadOffset + i]; 
    9396    *numBytesRead = numBytesToRead; 
    9497    globalReadOffset += numBytesToRead; 
     
    20022005  IFilterExprNode* parseExpr(const string& strExpr) 
    20032006  { 
    2004     globalInputText = strExpr; 
    2005     globalReadOffset = 0; 
    2006     yyparse(); 
     2007    #pragma omp critical (_parser) 
     2008    { 
     2009      if(globalInputText_ptr == 0) globalInputText_ptr = new std::string; 
     2010      (*globalInputText_ptr).assign (strExpr); 
     2011      globalReadOffset = 0; 
     2012      yyparse(); 
     2013    } 
    20072014    return parsed; 
    20082015  } 
     
    20102017 
    20112018 
     2019 
  • XIOS/dev/branch_yushan_merged/src/parse_expr/yacc_parser.yacc

    r1038 r1134  
    1515} 
    1616 
    17   IFilterExprNode* parsed; 
    18   std::string globalInputText; 
    19   size_t globalReadOffset = 0; 
    20  
     17  static IFilterExprNode* parsed; 
     18  static std::string globalInputText; 
     19  static std::string *globalInputText_ptr = 0; 
     20  static size_t globalReadOffset = 0; 
     21  #pragma omp threadprivate(parsed, globalInputText_ptr, globalReadOffset) 
     22   
    2123  int readInputForLexer(char* buffer, size_t* numBytesRead, size_t maxBytesToRead) 
    2224  { 
     25    if(globalInputText_ptr == 0) globalInputText_ptr = new std::string; 
    2326    size_t numBytesToRead = maxBytesToRead; 
    24     size_t bytesRemaining = globalInputText.length()-globalReadOffset; 
     27    size_t bytesRemaining = (*globalInputText_ptr).length()-globalReadOffset; 
    2528    size_t i; 
    2629    if (numBytesToRead > bytesRemaining) numBytesToRead = bytesRemaining; 
    27     for (i = 0; i < numBytesToRead; i++) buffer[i] = globalInputText.c_str()[globalReadOffset + i]; 
     30    for (i = 0; i < numBytesToRead; i++) buffer[i] = (*globalInputText_ptr).c_str()[globalReadOffset + i]; 
    2831    *numBytesRead = numBytesToRead; 
    2932    globalReadOffset += numBytesToRead; 
     
    145148  IFilterExprNode* parseExpr(const string& strExpr) 
    146149  { 
    147     globalInputText = strExpr; 
    148     globalReadOffset = 0; 
    149     yyparse(); 
     150    #pragma omp critical (_parser) 
     151    { 
     152      if(globalInputText_ptr == 0) globalInputText_ptr = new std::string; 
     153      (*globalInputText_ptr).assign (strExpr); 
     154      globalReadOffset = 0; 
     155      yyparse(); 
     156    } 
    150157    return parsed; 
    151158  } 
  • XIOS/dev/branch_yushan_merged/src/policy.hpp

    r855 r1134  
    3131{ 
    3232protected: 
    33   DivideAdaptiveComm(const MPI_Comm& mpiComm); 
     33  DivideAdaptiveComm(const ep_lib::MPI_Comm& mpiComm); 
    3434 
    3535  void computeMPICommLevel(); 
     
    4141 
    4242protected: 
    43   const MPI_Comm& internalComm_; 
     43  const ep_lib::MPI_Comm& internalComm_; 
    4444  std::vector<std::vector<int> > groupParentsBegin_; 
    4545  std::vector<std::vector<int> > nbInGroupParents_; 
  • XIOS/dev/branch_yushan_merged/src/registry.cpp

    r696 r1134  
    11#include "registry.hpp" 
    22#include "type.hpp" 
    3 #include <mpi.hpp> 
    43#include <fstream> 
    54#include <sstream> 
     
    258257  void CRegistry::hierarchicalGatherRegistry(void) 
    259258  { 
    260     hierarchicalGatherRegistry(communicator) ; 
     259   // hierarchicalGatherRegistry(communicator) ; 
     260    gatherRegistry(communicator) ; 
    261261  } 
    262262 
  • XIOS/dev/branch_yushan_merged/src/registry.hpp

    r700 r1134  
    66#include "mpi.hpp" 
    77#include "message.hpp" 
     8#ifdef _usingEP 
     9#include "ep_declaration.hpp" 
     10#endif 
     11 
    812 
    913// Those two headers can be replaced by the C++11 equivalent in the future 
     
    2327 
    2428/** Constructor, the communicator is used for bcast or gather operation between MPI processes */ 
    25       CRegistry(const MPI_Comm& comm=MPI_COMM_WORLD) : communicator(comm) {} 
     29      CRegistry(const ep_lib::MPI_Comm& comm=MPI_COMM_WORLD) : communicator(comm) {} 
    2630 
    2731/** Copy constructor */ 
     
    120124 
    121125/** MPI communicator used for broadcast and gather operation */ 
    122       MPI_Comm communicator ; 
     126      ep_lib::MPI_Comm communicator ; 
    123127  } ; 
    124128 
  • XIOS/dev/branch_yushan_merged/src/server.cpp

    r1032 r1134  
    99#include <boost/functional/hash.hpp> 
    1010#include <boost/algorithm/string.hpp> 
    11 #include "mpi.hpp" 
    1211#include "tracer.hpp" 
    1312#include "timer.hpp" 
     
    2625    bool CServer::finished=false ; 
    2726    bool CServer::is_MPI_Initialized ; 
     27 
     28     
    2829    CEventScheduler* CServer::eventScheduler = 0; 
    2930    
    3031    void CServer::initialize(void) 
    3132    { 
    32       int initialized ; 
    33       MPI_Initialized(&initialized) ; 
    34       if (initialized) is_MPI_Initialized=true ; 
    35       else is_MPI_Initialized=false ; 
    36  
    3733      // Not using OASIS 
    3834      if (!CXios::usingOasis) 
    3935      { 
    4036 
    41         if (!is_MPI_Initialized) 
    42         { 
    43           MPI_Init(NULL, NULL); 
    44         } 
    4537        CTimer::get("XIOS").resume() ; 
    4638 
     
    5042        unsigned long* hashAll ; 
    5143 
    52 //        int rank ; 
     44 
    5345        int size ; 
    5446        int myColor ; 
     
    7769 
    7870        myColor=colors[hashServer] ; 
    79         MPI_Comm_split(MPI_COMM_WORLD,myColor,rank,&intraComm) ; 
    80  
     71 
     72 
     73        MPI_Comm_split(CXios::globalComm,myColor,rank,&intraComm) ; 
     74 
     75         
    8176        int serverLeader=leaders[hashServer] ; 
    8277        int clientLeader; 
    8378 
    8479         serverLeader=leaders[hashServer] ; 
    85          for(it=leaders.begin();it!=leaders.end();it++) 
     80         for(it=leaders.begin();it!=leaders.end();++it) 
    8681         { 
    8782           if (it->first!=hashServer) 
     
    10499      else 
    105100      { 
    106 //        int rank ,size; 
    107101        int size; 
    108102        if (!is_MPI_Initialized) oasis_init(CXios::xiosCodeId); 
     
    135129      } 
    136130 
    137 //      int rank; 
    138131      MPI_Comm_rank(intraComm,&rank) ; 
    139132      if (rank==0) isRoot=true; 
     
    149142      delete eventScheduler ; 
    150143 
    151       for (std::list<MPI_Comm>::iterator it = contextInterComms.begin(); it != contextInterComms.end(); it++) 
     144      for (std::list<MPI_Comm>::iterator it = contextInterComms.begin(); it != contextInterComms.end(); ++it) 
    152145        MPI_Comm_free(&(*it)); 
    153       for (std::list<MPI_Comm>::iterator it = interComm.begin(); it != interComm.end(); it++) 
     146      for (std::list<MPI_Comm>::iterator it = interComm.begin(); it != interComm.end(); ++it) 
    154147        MPI_Comm_free(&(*it)); 
     148 
    155149      MPI_Comm_free(&intraComm); 
    156150 
     
    158152      { 
    159153        if (CXios::usingOasis) oasis_finalize(); 
    160         else MPI_Finalize() ; 
     154        //else  {MPI_Finalize() ;} 
    161155      } 
     156 
     157       
    162158      report(0)<<"Performance report : Time spent for XIOS : "<<CTimer::get("XIOS server").getCumulatedTime()<<endl  ; 
    163159      report(0)<<"Performance report : Time spent in processing events : "<<CTimer::get("Process events").getCumulatedTime()<<endl  ; 
     
    180176         { 
    181177           listenRootContext(); 
    182            if (!finished) listenRootFinalize() ; 
     178           if (!finished)  
     179           { 
     180             listenRootFinalize() ; 
     181           } 
    183182         } 
    184183 
    185184         contextEventLoop() ; 
    186185         if (finished && contextList.empty()) stop=true ; 
     186          
    187187         eventScheduler->checkEvent() ; 
    188188       } 
     189        
     190        
    189191       CTimer::get("XIOS server").suspend() ; 
    190192     } 
     
    196198        int flag ; 
    197199 
    198         for(it=interComm.begin();it!=interComm.end();it++) 
     200        for(it=interComm.begin();it!=interComm.end();++it) 
    199201        { 
    200202           MPI_Status status ; 
     
    206208              MPI_Recv(&msg,1,MPI_INT,0,0,*it,&status) ; 
    207209              info(20)<<" CServer : Receive client finalize"<<endl ; 
     210 
    208211              MPI_Comm_free(&(*it)); 
    209212              interComm.erase(it) ; 
     
    259262       { 
    260263         traceOff() ; 
     264         #ifdef _usingEP 
     265         MPI_Iprobe(-1,1,CXios::globalComm, &flag, &status) ; 
     266         #else 
    261267         MPI_Iprobe(MPI_ANY_SOURCE,1,CXios::globalComm, &flag, &status) ; 
     268         #endif 
    262269         traceOn() ; 
     270          
    263271         if (flag==true) 
    264272         { 
     273           #ifdef _usingMPI 
    265274           rank=status.MPI_SOURCE ; 
     275           #elif _usingEP 
     276           rank= status.ep_src ; 
     277           #endif 
    266278           MPI_Get_count(&status,MPI_CHAR,&count) ; 
    267279           buffer=new char[count] ; 
     
    277289         if (flag==true) 
    278290         { 
     291           #ifdef _usingMPI 
    279292           rank=status.MPI_SOURCE ; 
     293           #elif _usingEP 
     294           rank= status.ep_src ; 
     295           #endif 
    280296           MPI_Get_count(&status,MPI_CHAR,&count) ; 
    281297           recvContextMessage((void*)buffer,count) ; 
     
    399415       bool finished ; 
    400416       map<string,CContext*>::iterator it ; 
    401        for(it=contextList.begin();it!=contextList.end();it++) 
     417       for(it=contextList.begin();it!=contextList.end();++it) 
    402418       { 
    403419         finished=it->second->checkBuffersAndListen(); 
  • XIOS/dev/branch_yushan_merged/src/server.hpp

    r697 r1134  
    77#include "mpi.hpp" 
    88#include "event_scheduler.hpp" 
     9 
     10#ifdef _usingEP 
     11#include "ep_declaration.hpp" 
     12#endif 
    913 
    1014namespace xios 
  • XIOS/dev/branch_yushan_merged/src/test/test_client.f90

    r794 r1134  
    3535 
    3636  CALL MPI_INIT(ierr) 
     37  CALL init_wait 
    3738 
    38   CALL init_wait 
     39  CALL MPI_COMM_RANK(MPI_COMM_WORLD,rank,ierr) 
     40  if(rank < 2) then 
    3941 
    4042!!! XIOS Initialization (get the local communicator) 
     
    7274 
    7375  CALL xios_context_initialize("test",comm) 
     76 
    7477  CALL xios_get_handle("test",ctx_hdl) 
    7578  CALL xios_set_current_context(ctx_hdl) 
     
    125128  CALL xios_is_defined_field_attr("field_A",enabled=ok) 
    126129  PRINT *,"field_A : attribute enabled is defined ? ",ok 
     130   
    127131  CALL xios_close_context_definition() 
    128132 
    129133  PRINT*,"field field_A is active ? ",xios_field_is_active("field_A") 
     134 
     135  call MPI_Barrier(comm, ierr) 
     136 
    130137  DO ts=1,24*10 
    131138    CALL xios_update_calendar(ts) 
    132139    CALL xios_send_field("field_A",field_A) 
    133     CALL wait_us(5000) ; 
     140    CALL wait_us(5000) 
    134141  ENDDO 
    135142 
     
    141148 
    142149  CALL xios_finalize() 
     150  print *, "Client : xios_finalize " 
     151 
     152    else 
     153 
     154    CALL xios_init_server 
     155    print *, "Server : xios_finalize " 
     156   
     157    endif 
     158     
    143159 
    144160  CALL MPI_FINALIZE(ierr) 
  • XIOS/dev/branch_yushan_merged/src/test/test_complete.f90

    r787 r1134  
    55  IMPLICIT NONE 
    66  INCLUDE "mpif.h" 
    7   INTEGER :: rank 
     7  INTEGER :: rank, size 
    88  INTEGER :: size_loc 
    99  INTEGER :: ierr 
     
    2828  INTEGER, ALLOCATABLE :: kindex(:) 
    2929  INTEGER :: ni,ibegin,iend,nj,jbegin,jend 
    30   INTEGER :: i,j,l,ts,n, nb_pt 
     30  INTEGER :: i,j,l,ts,n, nb_pt, provided 
    3131 
    3232!!! MPI Initialization 
    3333 
    34   CALL MPI_INIT(ierr) 
     34  CALL MPI_INIT_THREAD(3, provided, ierr) 
     35    if(provided .NE. 3) then 
     36      print*, "provided thread level = ", provided 
     37      call MPI_Abort() 
     38    endif  
     39 
     40   
    3541 
    3642  CALL init_wait 
     43 
     44  CALL MPI_COMM_RANK(MPI_COMM_WORLD,rank,ierr) 
     45  CALL MPI_COMM_SIZE(MPI_COMM_WORLD,size,ierr) 
     46  if(rank < size-1) then 
    3747 
    3848!!! XIOS Initialization (get the local communicator) 
     
    220230!#################################################################################### 
    221231 
    222     DO ts=1,24*10 
     232    DO ts=1,24*2 
     233    !DO ts=1,24 
    223234 
    224235      CALL xios_get_handle("atmosphere",ctx_hdl) 
     
    255266!!! Fin des contextes 
    256267 
    257     CALL xios_context_finalize() 
    258     CALL xios_get_handle("atmosphere",ctx_hdl) 
     268 
     269    CALL xios_get_handle("surface",ctx_hdl)  
     270 
    259271    CALL xios_set_current_context(ctx_hdl) 
    260272    CALL xios_context_finalize() 
    261273 
     274    print *, "xios_context_finalize(surface)"  
     275 
     276     CALL xios_get_handle("atmosphere",ctx_hdl) 
     277 
     278     CALL xios_set_current_context(ctx_hdl) 
     279 
     280     CALL xios_context_finalize() 
     281 
     282     print *, "xios_context_finalize(atmosphere)" 
     283 
     284     
     285 
     286!!! Fin de XIOS 
     287 
     288     
     289 
     290    CALL xios_finalize() 
     291 
    262292    DEALLOCATE(lon, lat, field_A_atm, lonvalue) 
    263293    DEALLOCATE(kindex, field_A_srf) 
    264294 
    265 !!! Fin de XIOS 
     295     print *, "Client : xios_finalize " 
    266296 
    267297    CALL MPI_COMM_FREE(comm, ierr) 
    268298 
    269     CALL xios_finalize() 
     299  else 
     300 
     301    CALL xios_init_server 
     302    print *, "Server : xios_finalize " 
     303   
     304    endif 
     305 
    270306 
    271307    CALL MPI_FINALIZE(ierr) 
  • XIOS/dev/branch_yushan_merged/src/timer.cpp

    r652 r1134  
    88{ 
    99  std::map<std::string,CTimer> CTimer::allTimer; 
     10  std::map<std::string,CTimer> *CTimer::allTimer_ptr = 0; 
    1011   
    1112  CTimer::CTimer(const std::string& name_) : name(name_)  
     
    5253  CTimer& CTimer::get(const std::string name) 
    5354  { 
    54     std::map<std::string,CTimer>::iterator it = allTimer.find(name); 
    55     if (it == allTimer.end()) 
    56       it = allTimer.insert(std::make_pair(name, CTimer(name))).first; 
     55    // bkp 
     56    // std::map<std::string,CTimer>::iterator it = allTimer.find(name); 
     57    // if (it == allTimer.end()) 
     58    //   it = allTimer.insert(std::make_pair(name, CTimer(name))).first; 
     59    // return it->second; 
     60 
     61    if(allTimer_ptr == NULL) allTimer_ptr = new std::map<std::string,CTimer>; 
     62 
     63    std::map<std::string,CTimer>::iterator it = (*allTimer_ptr).find(name); 
     64    if (it == (*allTimer_ptr).end()) 
     65      it = (*allTimer_ptr).insert(std::make_pair(name, CTimer(name))).first; 
    5766    return it->second; 
    5867  } 
  • XIOS/dev/branch_yushan_merged/src/timer.hpp

    r688 r1134  
    2121      double getCumulatedTime(void); 
    2222      static std::map<std::string,CTimer> allTimer; 
     23 
     24      static std::map<std::string,CTimer> *allTimer_ptr; 
     25      #pragma omp threadprivate(allTimer_ptr) 
     26       
    2327      static double getTime(void); 
    2428      static CTimer& get(std::string name); 
  • XIOS/dev/branch_yushan_merged/src/transformation/Functions/reduction.cpp

    r979 r1134  
    99 
    1010CReductionAlgorithm::CallBackMap* CReductionAlgorithm::reductionCreationCallBacks_ = 0; 
    11 std::map<StdString,EReductionType> CReductionAlgorithm::ReductionOperations = std::map<StdString,EReductionType>(); 
     11//std::map<StdString,EReductionType> CReductionAlgorithm::ReductionOperations = std::map<StdString,EReductionType>(); 
     12std::map<StdString,EReductionType> *CReductionAlgorithm::ReductionOperations_ptr = 0;  
     13 
    1214bool CReductionAlgorithm::initReductionOperation(std::map<StdString,EReductionType>& m) 
    1315{ 
     
    2931} 
    3032 
    31 bool CReductionAlgorithm::_dummyInit = CReductionAlgorithm::initReductionOperation(CReductionAlgorithm::ReductionOperations); 
     33 
     34bool CReductionAlgorithm::initReductionOperation() 
     35{ 
     36  if(CReductionAlgorithm::ReductionOperations_ptr == NULL) CReductionAlgorithm::ReductionOperations_ptr = new std::map<StdString,EReductionType>(); 
     37  // So so stupid way to intialize operation but it works ... 
     38  (*CReductionAlgorithm::ReductionOperations_ptr)["sum"] = TRANS_REDUCE_SUM; 
     39  CSumReductionAlgorithm::registerTrans(); 
     40 
     41  (*CReductionAlgorithm::ReductionOperations_ptr)["min"] = TRANS_REDUCE_MIN; 
     42  CMinReductionAlgorithm::registerTrans(); 
     43 
     44  (*CReductionAlgorithm::ReductionOperations_ptr)["max"] = TRANS_REDUCE_MAX; 
     45  CMaxReductionAlgorithm::registerTrans(); 
     46 
     47  (*CReductionAlgorithm::ReductionOperations_ptr)["extract"] = TRANS_REDUCE_EXTRACT; 
     48  CExtractReductionAlgorithm::registerTrans(); 
     49 
     50  (*CReductionAlgorithm::ReductionOperations_ptr)["average"] = TRANS_REDUCE_AVERAGE; 
     51  CAverageReductionAlgorithm::registerTrans(); 
     52} 
     53 
     54//bool CReductionAlgorithm::_dummyInit = CReductionAlgorithm::initReductionOperation(CReductionAlgorithm::ReductionOperations); 
     55bool CReductionAlgorithm::_dummyInit = CReductionAlgorithm::initReductionOperation(); 
    3256 
    3357CReductionAlgorithm* CReductionAlgorithm::createOperation(EReductionType reduceType) 
    3458{ 
    3559  int reduceTypeInt = reduceType; 
     60  //if (0 == reductionCreationCallBacks_) 
     61  //  reductionCreationCallBacks_ = new CallBackMap(); 
     62 
    3663  CallBackMap::const_iterator it = (*reductionCreationCallBacks_).find(reduceType); 
    3764  if ((*reductionCreationCallBacks_).end() == it) 
  • XIOS/dev/branch_yushan_merged/src/transformation/Functions/reduction.hpp

    r1076 r1134  
    2323{ 
    2424public: 
    25   static std::map<StdString,EReductionType> ReductionOperations; 
     25  //static std::map<StdString,EReductionType> ReductionOperations; 
     26  static std::map<StdString,EReductionType> *ReductionOperations_ptr; 
     27  #pragma omp threadprivate(ReductionOperations_ptr) 
    2628 
    2729public: 
     
    6062  typedef std::map<EReductionType, CreateOperationCallBack> CallBackMap; 
    6163  static CallBackMap* reductionCreationCallBacks_; 
     64  #pragma omp threadprivate(reductionCreationCallBacks_) 
    6265 
    6366  static bool registerOperation(EReductionType reduceType, CreateOperationCallBack createFn); 
     
    6669protected: 
    6770  static bool initReductionOperation(std::map<StdString,EReductionType>& m); 
     71  static bool initReductionOperation(); 
    6872  static bool _dummyInit; 
     73  #pragma omp threadprivate(_dummyInit) 
    6974}; 
    7075 
  • XIOS/dev/branch_yushan_merged/src/transformation/axis_algorithm_extract_domain.cpp

    r1076 r1134  
    6262 
    6363  pos_ = algo->position; 
    64   reduction_ = CReductionAlgorithm::createOperation(CReductionAlgorithm::ReductionOperations[op]); 
     64  reduction_ = CReductionAlgorithm::createOperation((*CReductionAlgorithm::ReductionOperations_ptr)[op]); 
    6565} 
    6666 
  • XIOS/dev/branch_yushan_merged/src/transformation/axis_algorithm_inverse.cpp

    r936 r1134  
    173173 
    174174  // Sending global index of grid source to corresponding process as well as the corresponding mask 
    175   std::vector<MPI_Request> requests; 
    176   std::vector<MPI_Status> status; 
     175  std::vector<ep_lib::MPI_Request> requests; 
     176  std::vector<ep_lib::MPI_Status> status; 
    177177  boost::unordered_map<int, unsigned long* > recvGlobalIndexSrc; 
    178178  boost::unordered_map<int, double* > sendValueToDest; 
     
    184184    sendValueToDest[recvRank] = new double [recvSize]; 
    185185 
    186     requests.push_back(MPI_Request()); 
     186    requests.push_back(ep_lib::MPI_Request()); 
    187187    MPI_Irecv(recvGlobalIndexSrc[recvRank], recvSize, MPI_UNSIGNED_LONG, recvRank, 46, client->intraComm, &requests.back()); 
    188188  } 
     
    206206 
    207207    // Send global index source and mask 
    208     requests.push_back(MPI_Request()); 
     208    requests.push_back(ep_lib::MPI_Request()); 
    209209    MPI_Isend(sendGlobalIndexSrc[sendRank], sendSize, MPI_UNSIGNED_LONG, sendRank, 46, client->intraComm, &requests.back()); 
    210210  } 
     
    213213  MPI_Waitall(requests.size(), &requests[0], &status[0]); 
    214214 
    215  
    216   std::vector<MPI_Request>().swap(requests); 
    217   std::vector<MPI_Status>().swap(status); 
     215  std::vector<ep_lib::MPI_Request>().swap(requests); 
     216  std::vector<ep_lib::MPI_Status>().swap(status); 
    218217 
    219218  // Okie, on destination side, we will wait for information of masked index of source 
     
    223222    int recvSize = itSend->second; 
    224223 
    225     requests.push_back(MPI_Request()); 
     224    requests.push_back(ep_lib::MPI_Request()); 
    226225    MPI_Irecv(recvValueFromSrc[recvRank], recvSize, MPI_DOUBLE, recvRank, 48, client->intraComm, &requests.back()); 
    227226  } 
     
    241240    } 
    242241    // Okie, now inform the destination which source index are masked 
    243     requests.push_back(MPI_Request()); 
     242    requests.push_back(ep_lib::MPI_Request()); 
    244243    MPI_Isend(sendValueToDest[recvRank], recvSize, MPI_DOUBLE, recvRank, 48, client->intraComm, &requests.back()); 
    245244  } 
    246245  status.resize(requests.size()); 
    247246  MPI_Waitall(requests.size(), &requests[0], &status[0]); 
    248  
    249247 
    250248  size_t nGloAxisDest = axisDest_->n_glo.getValue() - 1; 
  • XIOS/dev/branch_yushan_merged/src/transformation/axis_algorithm_inverse.hpp

    r933 r1134  
    1212#include "axis_algorithm_transformation.hpp" 
    1313#include "transformation.hpp" 
    14  
     14#ifdef _usingEP 
     15#include "ep_declaration.hpp" 
     16#endif 
     17    
    1518namespace xios { 
    1619 
  • XIOS/dev/branch_yushan_merged/src/transformation/axis_algorithm_reduce_domain.cpp

    r1076 r1134  
    7070 
    7171  dir_ = (CReduceDomainToAxis::direction_attr::iDir == algo->direction)  ? iDir : jDir; 
    72   reduction_ = CReductionAlgorithm::createOperation(CReductionAlgorithm::ReductionOperations[op]); 
     72  reduction_ = CReductionAlgorithm::createOperation((*CReductionAlgorithm::ReductionOperations_ptr)[op]); 
    7373} 
    7474 
  • XIOS/dev/branch_yushan_merged/src/transformation/domain_algorithm_interpolate.cpp

    r1114 r1134  
    405405  CContextClient* client=context->client; 
    406406 
    407   MPI_Comm poleComme(MPI_COMM_NULL); 
    408   MPI_Comm_split(client->intraComm, interMapValuePole.empty() ? MPI_UNDEFINED : 1, 0, &poleComme); 
     407  ep_lib::MPI_Comm poleComme(MPI_COMM_NULL); 
     408  ep_lib::MPI_Comm_split(client->intraComm, interMapValuePole.empty() ? MPI_UNDEFINED : 1, 0, &poleComme); 
    409409  if (MPI_COMM_NULL != poleComme) 
    410410  { 
    411411    int nbClientPole; 
    412     MPI_Comm_size(poleComme, &nbClientPole); 
     412    ep_lib::MPI_Comm_size(poleComme, &nbClientPole); 
    413413 
    414414    std::map<int,std::vector<std::pair<int,double> > >::iterator itePole = interMapValuePole.end(), itPole, 
     
    573573  double* sendWeightBuff = new double [sendBuffSize]; 
    574574 
    575   std::vector<MPI_Request> sendRequest; 
     575  std::vector<ep_lib::MPI_Request> sendRequest; 
    576576 
    577577  int sendOffSet = 0, l = 0; 
     
    594594    } 
    595595 
    596     sendRequest.push_back(MPI_Request()); 
     596    sendRequest.push_back(ep_lib::MPI_Request()); 
    597597    MPI_Isend(sendIndexDestBuff + sendOffSet, 
    598598             k, 
     
    602602             client->intraComm, 
    603603             &sendRequest.back()); 
    604     sendRequest.push_back(MPI_Request()); 
     604    sendRequest.push_back(ep_lib::MPI_Request()); 
    605605    MPI_Isend(sendIndexSrcBuff + sendOffSet, 
    606606             k, 
     
    610610             client->intraComm, 
    611611             &sendRequest.back()); 
    612     sendRequest.push_back(MPI_Request()); 
     612    sendRequest.push_back(ep_lib::MPI_Request()); 
    613613    MPI_Isend(sendWeightBuff + sendOffSet, 
    614614             k, 
     
    629629  while (receivedSize < recvBuffSize) 
    630630  { 
    631     MPI_Status recvStatus; 
     631    ep_lib::MPI_Status recvStatus; 
    632632    MPI_Recv((recvIndexDestBuff + receivedSize), 
    633633             recvBuffSize, 
     
    640640    int countBuff = 0; 
    641641    MPI_Get_count(&recvStatus, MPI_INT, &countBuff); 
     642    #ifdef _usingMPI 
    642643    clientSrcRank = recvStatus.MPI_SOURCE; 
    643  
     644    #elif _usingEP 
     645    clientSrcRank = recvStatus.ep_src; 
     646    #endif 
    644647    MPI_Recv((recvIndexSrcBuff + receivedSize), 
    645648             recvBuffSize, 
     
    666669  } 
    667670 
    668   std::vector<MPI_Status> requestStatus(sendRequest.size()); 
    669   MPI_Waitall(sendRequest.size(), &sendRequest[0], MPI_STATUS_IGNORE); 
     671  std::vector<ep_lib::MPI_Status> requestStatus(sendRequest.size()); 
     672  ep_lib::MPI_Status stat_ignore; 
     673  MPI_Waitall(sendRequest.size(), &sendRequest[0], &stat_ignore); 
    670674 
    671675  delete [] sendIndexDestBuff; 
     
    758762 
    759763  MPI_Allreduce(&localNbWeight, &globalNbWeight, 1, MPI_LONG, MPI_SUM, client->intraComm); 
    760   MPI_Scan(&localNbWeight, &startIndex, 1, MPI_LONG, MPI_SUM, client->intraComm); 
     764  ep_lib::MPI_Scan(&localNbWeight, &startIndex, 1, MPI_LONG, MPI_SUM, client->intraComm); 
    761765   
    762766  if (0 == globalNbWeight) 
     
    771775  std::vector<StdSize> start(1, startIndex - localNbWeight); 
    772776  std::vector<StdSize> count(1, localNbWeight); 
    773    
    774   WriteNetCdf netCdfWriter(filename, client->intraComm);   
     777 
     778  WriteNetCdf netCdfWriter(filename, static_cast<MPI_Comm>(client->intraComm.mpi_comm)); 
    775779 
    776780  // Define some dimensions 
  • XIOS/dev/branch_yushan_merged/src/transformation/domain_algorithm_interpolate.hpp

    r1014 r1134  
    1313#include "transformation.hpp" 
    1414#include "nc4_data_output.hpp" 
     15#ifdef _usingEP 
     16#include "ep_declaration.hpp" 
     17#endif 
    1518 
    1619namespace xios { 
  • XIOS/dev/branch_yushan_merged/src/transformation/grid_transformation.cpp

    r1078 r1134  
    473473 
    474474  // Sending global index of grid source to corresponding process as well as the corresponding mask 
    475   std::vector<MPI_Request> requests; 
    476   std::vector<MPI_Status> status; 
     475  std::vector<ep_lib::MPI_Request> requests; 
     476  std::vector<ep_lib::MPI_Status> status; 
    477477  boost::unordered_map<int, unsigned char* > recvMaskDst; 
    478478  boost::unordered_map<int, unsigned long* > recvGlobalIndexSrc; 
     
    484484    recvGlobalIndexSrc[recvRank] = new unsigned long [recvSize]; 
    485485 
    486     requests.push_back(MPI_Request()); 
     486    requests.push_back(ep_lib::MPI_Request()); 
    487487    MPI_Irecv(recvGlobalIndexSrc[recvRank], recvSize, MPI_UNSIGNED_LONG, recvRank, 46, client->intraComm, &requests.back()); 
    488     requests.push_back(MPI_Request()); 
     488    requests.push_back(ep_lib::MPI_Request()); 
    489489    MPI_Irecv(recvMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 47, client->intraComm, &requests.back()); 
    490490  } 
     
    522522 
    523523    // Send global index source and mask 
    524     requests.push_back(MPI_Request()); 
     524    requests.push_back(ep_lib::MPI_Request()); 
    525525    MPI_Isend(sendGlobalIndexSrc[sendRank], sendSize, MPI_UNSIGNED_LONG, sendRank, 46, client->intraComm, &requests.back()); 
    526     requests.push_back(MPI_Request()); 
     526    requests.push_back(ep_lib::MPI_Request()); 
    527527    MPI_Isend(sendMaskDst[sendRank], sendSize, MPI_UNSIGNED_CHAR, sendRank, 47, client->intraComm, &requests.back()); 
    528528  } 
     
    532532 
    533533  // Okie, now use the mask to identify which index source we need to send, then also signal the destination which masked index we will return 
    534   std::vector<MPI_Request>().swap(requests); 
    535   std::vector<MPI_Status>().swap(status); 
     534  std::vector<ep_lib::MPI_Request>().swap(requests); 
     535  std::vector<ep_lib::MPI_Status>().swap(status); 
    536536  // Okie, on destination side, we will wait for information of masked index of source 
    537537  for (std::map<int,int>::const_iterator itSend = sendRankSizeMap.begin(); itSend != sendRankSizeMap.end(); ++itSend) 
     
    540540    int recvSize = itSend->second; 
    541541 
    542     requests.push_back(MPI_Request()); 
     542    requests.push_back(ep_lib::MPI_Request()); 
    543543    MPI_Irecv(sendMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 48, client->intraComm, &requests.back()); 
    544544  } 
     
    577577 
    578578    // Okie, now inform the destination which source index are masked 
    579     requests.push_back(MPI_Request()); 
     579    requests.push_back(ep_lib::MPI_Request()); 
    580580    MPI_Isend(recvMaskDst[recvRank], recvSize, MPI_UNSIGNED_CHAR, recvRank, 48, client->intraComm, &requests.back()); 
    581581  } 
  • XIOS/dev/branch_yushan_merged/src/transformation/grid_transformation_factory_impl.hpp

    r933 r1134  
    5757  typedef std::map<ETranformationType, CreateTransformationCallBack> CallBackMap; 
    5858  static CallBackMap* transformationCreationCallBacks_; 
     59  #pragma omp threadprivate(transformationCreationCallBacks_) 
     60   
    5961  static bool registerTransformation(ETranformationType transType, CreateTransformationCallBack createFn); 
    6062  static bool unregisterTransformation(ETranformationType transType); 
    6163  static bool initializeTransformation_; 
     64  #pragma omp threadprivate(initializeTransformation_) 
    6265}; 
    6366 
     
    7982                                                                               std::map<int, int>& elementPositionInGridDst2DomainPosition) 
    8083{ 
     84  if (0 == transformationCreationCallBacks_) 
     85    transformationCreationCallBacks_ = new CallBackMap(); 
    8186  typename CallBackMap::const_iterator it = (*transformationCreationCallBacks_).find(transType); 
    8287  if ((*transformationCreationCallBacks_).end() == it) 
  • XIOS/dev/branch_yushan_merged/src/transformation/scalar_algorithm_extract_axis.cpp

    r1076 r1134  
    4949  StdString op = "extract"; 
    5050  pos_ = algo->position; 
    51   reduction_ = CReductionAlgorithm::createOperation(CReductionAlgorithm::ReductionOperations[op]); 
     51  reduction_ = CReductionAlgorithm::createOperation((*CReductionAlgorithm::ReductionOperations_ptr)[op]); 
    5252} 
    5353 
  • XIOS/dev/branch_yushan_merged/src/transformation/scalar_algorithm_reduce_axis.cpp

    r1082 r1134  
    7575  } 
    7676   
    77   if (CReductionAlgorithm::ReductionOperations.end() == CReductionAlgorithm::ReductionOperations.find(op)) 
     77  if ((*CReductionAlgorithm::ReductionOperations_ptr).end() == (*CReductionAlgorithm::ReductionOperations_ptr).find(op)) 
    7878    ERROR("CScalarAlgorithmReduceAxis::CScalarAlgorithmReduceAxis(CAxis* axisDestination, CAxis* axisSource, CReduceAxisToScalar* algo)", 
    7979       << "Operation '" << op << "' not found. Please make sure to use a supported one" 
     
    8181       << "Scalar destination " << scalarDestination->getId()); 
    8282 
    83   reduction_ = CReductionAlgorithm::createOperation(CReductionAlgorithm::ReductionOperations[op]); 
     83  reduction_ = CReductionAlgorithm::createOperation((*CReductionAlgorithm::ReductionOperations_ptr)[op]); 
    8484} 
    8585 
  • XIOS/dev/branch_yushan_merged/src/transformation/scalar_algorithm_reduce_domain.cpp

    r1076 r1134  
    6969  } 
    7070   
    71   if (CReductionAlgorithm::ReductionOperations.end() == CReductionAlgorithm::ReductionOperations.find(op)) 
     71  if ((*CReductionAlgorithm::ReductionOperations_ptr).end() == (*CReductionAlgorithm::ReductionOperations_ptr).find(op)) 
    7272    ERROR("CScalarAlgorithmReduceDomain::CScalarAlgorithmReduceDomain(CDomain* domainDestination, CDomain* domainSource, CReduceDomainToScalar* algo)", 
    7373       << "Operation '" << op << "' not found. Please make sure to use a supported one" 
     
    7575       << "Scalar destination " << scalarDestination->getId()); 
    7676 
    77   reduction_ = CReductionAlgorithm::createOperation(CReductionAlgorithm::ReductionOperations[op]); 
     77  reduction_ = CReductionAlgorithm::createOperation((*CReductionAlgorithm::ReductionOperations_ptr)[op]); 
    7878} 
    7979 
  • XIOS/dev/branch_yushan_merged/src/type/type.hpp

    r1107 r1134  
    9494    const CType_ref& operator = (CType<T>& val) const ; 
    9595    const CType_ref& operator = (const CType_ref& val) const; 
    96     operator T&() const;     
     96    operator T&() const; 
    9797 
    9898    inline virtual CBaseType* clone(void) const   { return _clone(); } 
  • XIOS/dev/branch_yushan_merged/src/xios_server.f90

    r501 r1134  
    11PROGRAM server_main 
    22  USE xios 
     3  USE mod_wait 
    34  IMPLICIT NONE 
    45  INCLUDE "mpif.h" 
    5   INTEGER :: ierr 
    6    
     6  INTEGER :: ierr, th_level 
     7     
     8    CALL MPI_INIT(ierr) 
     9    !CALL MPI_INIT_thread(3, th_level, ierr) 
     10    CALL init_wait 
    711    CALL xios_init_server 
     12     
     13    CALL MPI_FINALIZE(ierr) 
    814 
    915  END PROGRAM server_main 
Note: See TracChangeset for help on using the changeset viewer.