Changeset 1176


Ignore:
Timestamp:
06/21/17 09:09:59 (4 years ago)
Author:
yushan
Message:

save modif

Location:
XIOS/dev/branch_yushan_merged
Files:
16 edited

Legend:

Unmodified
Added
Removed
  • XIOS/dev/branch_yushan_merged/bld.cfg

    r1141 r1176  
    3838#bld::target test_expand_domain.exe 
    3939#bld::target test_new_features.exe test_unstruct_complete.exe  
    40 bld::target test_omp.exe test_complete_omp.exe test_remap_omp.exe 
     40bld::target test_omp.exe test_complete_omp.exe test_remap_omp.exe test_unstruct_omp.exe 
    4141#bld::target test_client.exe test_complete.exe #test_xios2_cmip6.exe 
    4242#bld::target test_connectivity_expand.exe 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/libmapper.cpp

    r1155 r1176  
    1616#include "gridRemap.hpp" 
    1717 
     18#include <stdio.h> 
     19 
    1820using namespace sphereRemap ; 
    1921 
    20 extern CRemapGrid srcGrid; 
    21 #pragma omp threadprivate(srcGrid) 
     22//extern CRemapGrid srcGrid; 
     23//#pragma omp threadprivate(srcGrid) 
    2224 
    23 extern CRemapGrid tgtGrid; 
    24 #pragma omp threadprivate(tgtGrid) 
     25//extern CRemapGrid tgtGrid; 
     26//#pragma omp threadprivate(tgtGrid) 
    2527 
    2628 
     
    4042                     int order, int* n_weights) 
    4143{ 
    42         assert(src_bounds_lon); 
    43         assert(src_bounds_lat); 
    44         assert(n_vert_per_cell_src >= 3); 
    45         assert(n_cell_src >= 4); 
    46         assert(dst_bounds_lon); 
    47         assert(dst_bounds_lat); 
    48         assert(n_vert_per_cell_dst >= 3); 
    49         assert(n_cell_dst >= 4); 
    50         assert(1 <= order && order <= 2); 
     44  printf("libmapper callded : remap_get_num_weights\n"); 
     45  assert(src_bounds_lon); 
     46  assert(src_bounds_lat); 
     47  assert(n_vert_per_cell_src >= 3); 
     48  assert(n_cell_src >= 4); 
     49  assert(dst_bounds_lon); 
     50  assert(dst_bounds_lat); 
     51  assert(n_vert_per_cell_dst >= 3); 
     52  assert(n_cell_dst >= 4); 
     53  assert(1 <= order && order <= 2); 
    5154 
    5255  mapper = new Mapper(MPI_COMM_WORLD); 
     
    8790        double tic = cputime(); 
    8891        mapper = new Mapper(MPI_COMM_WORLD); 
    89   mapper->setVerbosity(PROGRESS) ; 
     92        mapper->setVerbosity(PROGRESS) ; 
    9093        mapper->buildSSTree(src_msh, dst_msh); 
    9194        double tac = cputime(); 
     
    122125                     double* centre_lon, double* centre_lat, double* areas) 
    123126{ 
     127  printf("libmapper callded : remap_get_barycentres_and_areas\n"); 
    124128        for (int i = 0; i < n_cell; i++) 
    125129        { 
     
    145149extern "C" void remap_get_weights(double* weights, int* src_indices, int* dst_indices) 
    146150{ 
     151  printf("libmapper callded : remap_get_weights\n"); 
    147152        memcpy(weights, mapper->remapMatrix, mapper->nWeights*sizeof(double)); 
    148153        memcpy(src_indices, mapper->srcAddress, mapper->nWeights*sizeof(int)); 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/polyg.cpp

    r950 r1176  
    33#include <cassert> 
    44#include <iostream> 
     5#include <stdio.h> 
    56#include "elt.hpp" 
    67#include "errhandle.hpp" 
     
    161162{ 
    162163        if (N < 3) 
    163                 return 0; /* polygons with less then three vertices have zero area */ 
     164                return 0; /* polygons with less than three vertices have zero area */ 
    164165        Coord t[3]; 
    165166        t[0] = barycentre(x, N); 
     
    174175                t[2] = x[ii]; 
    175176                double sc=scalarprod(crossprod(t[1] - t[0], t[2] - t[0]), t[0]) ; 
    176                 assert(sc >= -1e-10); // Error: tri a l'env (wrong orientation) 
     177                //assert(sc >= -1e-10); // Error: tri a l'env (wrong orientation) 
     178                if(sc < -1e-10) 
     179                { 
     180                  printf("N=%d, sc = %f, t[0]=(%f,%f,%f), t[1]=(%f,%f,%f), t[2]=(%f,%f,%f)\n", N, sc, 
     181                                                                                         t[0].x, t[0].y, t[0].z,  
     182                                                                                         t[1].x, t[1].y, t[1].z, 
     183                                                                                         t[2].x, t[2].y, t[2].z); 
     184                  assert(sc >= -1e-10); 
     185                } 
    177186                double area_gc = triarea(t[0], t[1], t[2]); 
    178187                double area_sc_gc_moon = 0; 
  • XIOS/dev/branch_yushan_merged/extern/remap/src/triple.cpp

    r1016 r1176  
    33namespace sphereRemap { 
    44 
    5 extern const Coord ORIGIN(0.0, 0.0, 0.0); 
     5const Coord ORIGIN(0.0, 0.0, 0.0); 
    66 
    77std::ostream& operator<<(std::ostream& os, const Coord& c) { 
  • XIOS/dev/branch_yushan_merged/extern/src_ep_dev/ep_message.cpp

    r1134 r1176  
    5050      } 
    5151      #elif _intelmpi 
    52       ::MPI_Improbe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &message, &status);  
     52      //#pragma omp critical (_mpi_call) 
     53      //::MPI_Improbe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &message, &status);  
     54      #pragma omp critical (_mpi_call) 
     55      { 
     56        ::MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &status); 
     57        if(flag) 
     58        { 
     59          Debug("find message in mpi comm \n"); 
     60          mpi_source = status.MPI_SOURCE; 
     61          int tag = status.MPI_TAG; 
     62          ::MPI_Mprobe(mpi_source, tag, mpi_comm, &message, &status); 
     63 
     64        } 
     65      } 
    5366      #endif 
    54  
     67       
    5568      if(flag) 
    5669      { 
     
    128141      } 
    129142      #elif _intelmpi 
    130       ::MPI_Improbe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &message, &status);        
     143      #pragma omp critical (_mpi_call) 
     144      { 
     145        ::MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &status); 
     146        if(flag) 
     147        { 
     148          Debug("find message in mpi comm \n"); 
     149          mpi_source = status.MPI_SOURCE; 
     150          int tag = status.MPI_TAG; 
     151          ::MPI_Mprobe(mpi_source, tag, mpi_comm, &message, &status); 
     152 
     153        } 
     154      } 
     155      //::MPI_Improbe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &message, &status);        
    131156      #endif 
    132157 
     
    183208      } 
    184209      #elif _intelmpi 
    185       ::MPI_Improbe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &message, &status);        
     210      #pragma omp critical (_mpi_call) 
     211      { 
     212        ::MPI_Iprobe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &status); 
     213        if(flag) 
     214        { 
     215          Debug("find message in mpi comm \n"); 
     216          mpi_source = status.MPI_SOURCE; 
     217          int tag = status.MPI_TAG; 
     218          ::MPI_Mprobe(mpi_source, tag, mpi_comm, &message, &status); 
     219 
     220        } 
     221      } 
     222      //::MPI_Improbe(MPI_ANY_SOURCE, MPI_ANY_TAG, mpi_comm, &flag, &message, &status);        
    186223      #endif 
    187224 
  • XIOS/dev/branch_yushan_merged/extern/src_ep_dev/ep_wait.cpp

    r1164 r1176  
    2222    if(request->type == 1) 
    2323    { 
    24       ::MPI_Request *mpi_request = static_cast< ::MPI_Request* >(&(request->mpi_request)); 
     24      ::MPI_Request mpi_request = static_cast< ::MPI_Request >(request->mpi_request); 
    2525      ::MPI_Status mpi_status; 
    2626      ::MPI_Errhandler_set(MPI_COMM_WORLD_STD, MPI_ERRORS_RETURN); 
    27       int error_code = ::MPI_Wait(mpi_request, &mpi_status); 
     27      int error_code = ::MPI_Wait(&mpi_request, &mpi_status); 
    2828      if (error_code != MPI_SUCCESS) { 
    2929       
     
    6868    if(request->type == 3) 
    6969    { 
    70       ::MPI_Request *mpi_request = static_cast< ::MPI_Request* >(&(request->mpi_request)); 
     70      ::MPI_Request mpi_request = static_cast< ::MPI_Request >(request->mpi_request); 
    7171      ::MPI_Status mpi_status; 
    7272      ::MPI_Errhandler_set(MPI_COMM_WORLD_STD, MPI_ERRORS_RETURN); 
    73       int error_code = ::MPI_Wait(mpi_request, &mpi_status); 
     73      int error_code = ::MPI_Wait(&mpi_request, &mpi_status); 
    7474      if (error_code != MPI_SUCCESS) { 
    7575       
     
    122122          if(array_of_requests[i].type != 2) // isend or imrecv 
    123123          {       
    124             MPI_Wait(&array_of_requests[i], &array_of_statuses[i]); 
    125             //int tested=false; 
    126             //while(!tested) MPI_Test(&array_of_requests[i], &tested, &array_of_statuses[i]); 
     124            //MPI_Wait(&array_of_requests[i], &array_of_statuses[i]); 
     125            int tested; 
     126            MPI_Test(&array_of_requests[i], &tested, &array_of_statuses[i]); 
     127            if(!tested) MPI_Wait(&array_of_requests[i], &array_of_statuses[i]); 
    127128            finished++; 
    128129            finished_index[i] = true; 
  • XIOS/dev/branch_yushan_merged/inputs/REMAP/iodef.xml

    r1172 r1176  
    3535   <file_definition type="one_file" par_access="collective" output_freq="1ts" output_level="10" enabled=".TRUE."> 
    3636       
    37       <file_group id="read_then_write_files" enabled=".TRUE."> 
     37      <file_group id="read_then_write_files" enabled=".FALSE."> 
    3838       <file id="output_regular_pole" name="output_dst_regular" > 
    39           <field field_ref="tmp_field_0" name="field_regular_0" enabled=".TRUE."/> 
    40           <field field_ref="dst_field_regular" name="field_regular" enabled=".TRUE."/> 
     39          <field field_ref="tmp_field_0" name="field_regular_0" enabled=".FALSE."/> 
     40          <field field_ref="dst_field_regular" name="field_regular" enabled=".FALSE."/> 
    4141          <field field_ref="dst_field_regular_pole_0" name="field_regular_pole_0" enabled=".FALSE." /> 
    4242          <field field_ref="dst_field_regular_pole_1" name="field_regular_pole_1" enabled=".FALSE." /> 
    4343       </file> 
    44        <file id="output_dst_curvilinear" name="output_dst_curvilinear" enabled=".TRUE." > 
     44       <file id="output_dst_curvilinear" name="output_dst_curvilinear" enabled=".FALSE." > 
    4545          <field field_ref="tmp_field_1" operation="instant"/> 
    4646       </file> 
    47        <file id="output_dst_unstructured" name="output_dst_unstructured" enabled=".TRUE." > 
     47       <file id="output_dst_unstructured" name="output_dst_unstructured" enabled=".FALSE." > 
    4848          <field field_ref="tmp_field_2" operation="instant"/> 
    4949       </file> 
     
    5252      <file_group id="write_files" > 
    5353        <file id="output_2D" name="output_2D" enabled=".TRUE."> 
    54           <field field_ref="src_field_2D" name="field_src" /> 
    55           <field field_ref="src_field_2D_clone" name="field_src_clone" default_value="100000" /> 
    56           <field field_ref="src_field_2D" name="field_dst_regular_0"  domain_ref="dst_domain_regular_pole" /> 
    57           <field field_ref="dst_field_2D" name="field_dst_regular_1" /> 
    58           <field field_ref="dst_field_2D_regular_pole" name="field_dst_regular_2" /> 
    59           <field field_ref="dst_field_2D_clone" name="field_dst_regular_3" detect_missing_value=".false." default_value="100000" /> 
    60           <field field_ref="dst_field_2D_extract" name="field_dst_regular_4" /> 
     54          <field field_ref="src_field_2D" name="field_src" enabled=".FALSE."/> 
     55          <field field_ref="src_field_2D_clone" name="field_src_clone" default_value="100000" enabled=".FALSE."/> 
     56          <field field_ref="src_field_2D" name="field_dst_regular_0"  domain_ref="dst_domain_regular_pole" enabled=".TRUE."/> 
     57          <field field_ref="dst_field_2D" name="field_dst_regular_1" enabled=".FALSE." /> 
     58          <field field_ref="dst_field_2D_regular_pole" name="field_dst_regular_2" enabled=".FALSE."/> 
     59          <field field_ref="dst_field_2D_clone" name="field_dst_regular_3" detect_missing_value=".false." default_value="100000" enabled=".FALSE."/> 
     60          <field field_ref="dst_field_2D_extract" name="field_dst_regular_4" enabled=".FALSE."/> 
    6161        </file>  
    62        <file id="output_3D" name="output_3D" enabled=".TRUE."> 
     62       <file id="output_3D" name="output_3D" enabled=".FALSE."> 
    6363          <field field_ref="src_field_3D" name="field_src" /> 
    6464          <field field_ref="src_field_3D_pression" name="field" /> 
     
    6666          <field field_ref="dst_field_3D_interp" name="field_dst_interp_domain_axis" domain_ref="dst_domain_regular_pole"/>   
    6767       </file> 
    68        <file id="output_4D" name="output_4D" enabled=".TRUE."> 
    69           <field field_ref="src_field_4D" name="field_4D" /> 
     68       <file id="output_4D" name="output_4D" enabled=".FALSE."> 
     69          <field field_ref="src_field_4D" name="field_4D" enabled=".FALSE."/> 
    7070          <field field_ref="dst_field_4D_extract" name="field_4D_extract" /> 
    7171        </file> 
  • XIOS/dev/branch_yushan_merged/inputs/Unstruct/iodef.xml

    r944 r1176  
    77   <field_definition level="1" enabled=".TRUE." default_value="1000"> 
    88     <field id="field_A_srf"  operation="average" freq_op="3600s" grid_ref="grid_A"/> 
     9     <field id="field_B_srf"  operation="average" freq_op="3600s" grid_ref="grid_A"/> 
    910     <field id="field_A_expand"  operation="average" grid_ref="grid_dst" field_ref="field_A_srf"/> 
    1011   </field_definition> 
     
    1213   <file_definition type="one_file" par_access="collective" output_freq="1h" output_level="10" enabled=".TRUE." > 
    1314     <file id="output" name="output"> 
    14         <field field_ref="field_A_expand" name="field"/> 
     15<!--        <field field_ref="field_A_expand" name="field"/> --> 
     16        <field field_ref="field_A_srf" name="field_A"/> 
     17        <field field_ref="field_A_srf" name="field_rect" grid_ref="grid_rect"/> 
     18 <!--       <field field_ref="field_A_srf" name="field_curv" grid_ref="grid_curv"/> --> 
    1519     </file> 
     20 
     21 <!--    <file id="output_src_curvilinear" name="output_src_curvilinear" mode="read" > 
     22          <field id="src_field_curvilinear" name="field_A" grid_ref="src_grid_curvilinear_read" operation="instant"/> 
     23     </file>  --> 
     24  
    1625   </file_definition> 
    1726 
    1827   <axis_definition> 
    1928     <axis id="axis_srf" positive="up"/> 
     29      <axis id="src_axis_curvilinear_read" positive="down" /> 
    2030   </axis_definition> 
    2131 
    2232   <domain_definition> 
    23      <domain id="domain_srf" />      
     33     <domain id="domain_srf" />     
     34  
    2435     <domain id="domain_dst" domain_ref="domain_srf" > 
    2536       <expand_domain/> 
    2637       <compute_connectivity_domain id="compute" type="node"/> 
    2738     </domain> 
     39 
     40     <domain id="dst_domain_regular_pole" ni_glo="90" nj_glo="45" type="rectilinear"> 
     41       <generate_rectilinear_domain id="domain_regular_pole"/> 
     42       <interpolate_domain write_weight="false" order="1" renormalize="true"/>        
     43     </domain> 
     44 
     45     <domain id="src_domain_curvilinear_read" type="curvilinear"> 
     46        <generate_rectilinear_domain /> 
     47     </domain> 
     48 
    2849   </domain_definition> 
    2950 
     
    3354       <axis axis_ref="axis_srf" /> 
    3455     </grid> 
     56 
    3557     <grid id="grid_dst"> 
    3658       <domain domain_ref="domain_dst" /> 
    3759       <axis axis_ref="axis_srf" /> 
    3860     </grid> 
     61 
     62     <grid id="grid_rect"> 
     63       <domain domain_ref="dst_domain_regular_pole" /> 
     64       <axis axis_ref="axis_srf" /> 
     65     </grid> 
     66 
     67     <grid id="grid_curv"> 
     68       <domain id="src_domain_curvilinear_read" /> 
     69       <axis axis_ref="axis_srf" /> 
     70     </grid> 
     71 
     72     <grid id="src_grid_curvilinear_read"> 
     73        <domain id="src_domain_curvilinear_read"/> 
     74        <axis axis_ref="src_axis_curvilinear_read" /> 
     75     </grid>  
     76 
    3977   </grid_definition> 
    4078  </context> 
  • XIOS/dev/branch_yushan_merged/src/buffer_client.cpp

    r1134 r1176  
    8888      { 
    8989        MPI_Issend(buffer[current], count, MPI_CHAR, serverRank, 20, interComm, &request); 
     90        #pragma omp critical (_output) 
    9091        pending = true; 
    9192        if (current == 1) current = 0; 
  • XIOS/dev/branch_yushan_merged/src/client.cpp

    r1164 r1176  
    107107            MPI_Comm_rank(intraComm,&intraCommRank) ; 
    108108             
    109             #pragma omp critical(_output) 
     109            /*#pragma omp critical(_output) 
    110110            { 
    111111              info(10)<<"intercommCreate::client "<<test_omp_rank<< " "<< &test_omp_rank <<" intraCommSize : "<<intraCommSize 
    112112                 <<" intraCommRank :"<<intraCommRank<<"  serverLeader "<< serverLeader 
    113113                 <<" globalComm : "<< &(CXios::globalComm) << endl ;   
    114             } 
     114            }*/ 
    115115 
    116116             
  • XIOS/dev/branch_yushan_merged/src/client_client_dht_template_impl.hpp

    r1172 r1176  
    104104  int clientRank; 
    105105  MPI_Comm_rank(commLevel,&clientRank); 
     106  ep_lib::MPI_Barrier(commLevel); 
    106107  int groupRankBegin = this->getGroupBegin()[level]; 
    107108  int nbClient = this->getNbInGroup()[level]; 
     
    180181  int currentIndex = 0; 
    181182  int nbRecvClient = recvRankClient.size(); 
    182   for (int idx = 0; idx < nbRecvClient; ++idx) 
    183   { 
    184     if (0 != recvNbIndexClientCount[idx]) 
    185     { 
    186       recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, request); 
    187     } 
    188     currentIndex += recvNbIndexClientCount[idx]; 
    189   } 
    190  
     183   
    191184  boost::unordered_map<int, size_t* >::iterator itbIndex = client2ClientIndex.begin(), itIndex, 
    192185                                                iteIndex = client2ClientIndex.end(); 
     
    194187    sendIndexToClients(itIndex->first, (itIndex->second), sendNbIndexBuff[itIndex->first-groupRankBegin], commLevel, request); 
    195188 
     189 
     190 
     191  for (int idx = 0; idx < nbRecvClient; ++idx) 
     192  { 
     193    if (0 != recvNbIndexClientCount[idx]) 
     194    { 
     195      recvIndexFromClients(recvRankClient[idx], recvIndexBuff+currentIndex, recvNbIndexClientCount[idx], commLevel, request); 
     196    } 
     197    currentIndex += recvNbIndexClientCount[idx]; 
     198  } 
     199 
     200   
    196201  std::vector<ep_lib::MPI_Status> status(request.size()); 
    197202  MPI_Waitall(request.size(), &request[0], &status[0]); 
     203   
    198204 
    199205  CArray<size_t,1>* tmpGlobalIndex; 
     
    208214    --level; 
    209215    computeIndexInfoMappingLevel(*tmpGlobalIndex, this->internalComm_, level); 
     216       
    210217  } 
    211218  else // Now, we are in the last level where necessary mappings are. 
     
    372379  MPI_Comm_rank(commLevel,&clientRank); 
    373380  computeSendRecvRank(level, clientRank); 
     381  ep_lib::MPI_Barrier(commLevel); 
    374382 
    375383  int groupRankBegin = this->getGroupBegin()[level]; 
     
    666674 
    667675  int nRequest = 0; 
     676   
     677 
     678  for (int idx = 0; idx < sendNbRank.size(); ++idx) 
     679  { 
     680    MPI_Isend(&sendNbElements[0]+idx, 1, MPI_INT, 
     681              sendNbRank[idx], MPI_DHT_INDEX_1, this->internalComm_, &request[nRequest]); 
     682    ++nRequest; 
     683  } 
     684   
    668685  for (int idx = 0; idx < recvNbRank.size(); ++idx) 
    669686  { 
    670687    MPI_Irecv(&recvNbElements[0]+idx, 1, MPI_INT, 
    671688              recvNbRank[idx], MPI_DHT_INDEX_1, this->internalComm_, &request[nRequest]); 
    672     ++nRequest; 
    673   } 
    674  
    675   for (int idx = 0; idx < sendNbRank.size(); ++idx) 
    676   { 
    677     MPI_Isend(&sendNbElements[0]+idx, 1, MPI_INT, 
    678               sendNbRank[idx], MPI_DHT_INDEX_1, this->internalComm_, &request[nRequest]); 
    679689    ++nRequest; 
    680690  } 
     
    714724   
    715725  int nRequest = 0; 
    716   for (int idx = 0; idx < recvBuffSize; ++idx) 
    717   { 
    718     MPI_Irecv(&recvBuff[0]+2*idx, 2, MPI_INT, 
    719               recvRank[idx], MPI_DHT_INDEX_0, this->internalComm_, &request[nRequest]); 
    720     ++nRequest; 
    721   } 
     726   
    722727 
    723728  for (int idx = 0; idx < sendBuffSize; ++idx) 
     
    734739    MPI_Isend(&sendBuff[idx*2], 2, MPI_INT, 
    735740              sendRank[idx], MPI_DHT_INDEX_0, this->internalComm_, &request[nRequest]); 
     741    ++nRequest; 
     742  } 
     743   
     744  for (int idx = 0; idx < recvBuffSize; ++idx) 
     745  { 
     746    MPI_Irecv(&recvBuff[0]+2*idx, 2, MPI_INT, 
     747              recvRank[idx], MPI_DHT_INDEX_0, this->internalComm_, &request[nRequest]); 
    736748    ++nRequest; 
    737749  } 
  • XIOS/dev/branch_yushan_merged/src/data_output.cpp

    r1096 r1176  
    44#include "group_template.hpp" 
    55#include "context.hpp" 
    6  
     6//mpi.hpp 
    77namespace xios 
    88{ 
    9       /// ////////////////////// Définitions ////////////////////// /// 
     9      /// ////////////////////// Dfinitions ////////////////////// /// 
    1010 
    1111      CDataOutput::~CDataOutput(void) 
  • XIOS/dev/branch_yushan_merged/src/io/nc4_data_input.cpp

    r1172 r1176  
    5757    #ifdef _usingEP 
    5858    SuperClass::type = ONE_FILE; 
    59     //printf("SuperClass::type = %d\n", SuperClass::type); 
     59    printf("SuperClass::type = %d\n", SuperClass::type); 
    6060    #endif 
    6161    switch (SuperClass::type) 
  • XIOS/dev/branch_yushan_merged/src/io/nc4_data_output.cpp

    r1172 r1176  
    11021102 
    11031103            SuperClassWriter::definition_end(); 
    1104             printf("SuperClass::type = %d\n", SuperClass::type); 
     1104            printf("SuperClass::type = %d, typePrec = %d\n", SuperClass::type, typePrec); 
    11051105            switch (SuperClass::type) 
    11061106            { 
  • XIOS/dev/branch_yushan_merged/src/test/test_complete_omp.f90

    r1134 r1176  
    8484    jbegin=jbegin+nj 
    8585  ENDDO 
     86   
     87  if((ni.LE.0) .OR. (nj.LE.0)) call MPI_Abort() 
    8688 
    8789  iend=ibegin+ni-1 ; jend=jbegin+nj-1 
  • XIOS/dev/branch_yushan_merged/src/test/test_remap_omp.f90

    r1153 r1176  
    5252  if(rank < size-2) then 
    5353 
    54   !$omp parallel default(firstprivate) firstprivate(dtime) 
     54  !$omp parallel default(private) firstprivate(dtime) 
    5555 
    5656!!! XIOS Initialization (get the local communicator) 
     
    6969 
    7070  ierr=NF90_INQ_VARID(ncid,"bounds_lon",varid) 
    71   ierr=NF90_INQUIRE_VARIABLE(ncid, varid,dimids=dimids) 
     71  ierr=NF90_INQUIRE_VARIABLE(ncid, varid, dimids=dimids) 
    7272  ierr=NF90_INQUIRE_DIMENSION(ncid, dimids(1), len=src_nvertex) 
    7373  ierr=NF90_INQUIRE_DIMENSION(ncid, dimids(2), len=src_ni_glo) 
     
    8282    src_ibegin= remain * (div+1) + (rank-remain) * div ; 
    8383  ENDIF 
     84 
     85  if(src_ni .LE. 0) CALL MPI_ABORT() 
     86 
    8487 
    8588  ALLOCATE(src_lon(src_ni), src_lon_tmp(src_ni)) 
     
    9598  ALLOCATE(lval1(interpolatedLlm)) 
    9699  ALLOCATE(lval2(llm2)) 
     100  lval2 = 0 
     101  lval=0 
     102  lval1=0 
    97103 
    98104  ierr=NF90_INQ_VARID(ncid,"lon",varid) 
     
    161167  ENDIF 
    162168 
     169  if(dst_ni .LE. 0) CALL MPI_ABORT() 
     170 
    163171  ALLOCATE(dst_lon(dst_ni)) 
    164172  ALLOCATE(dst_lat(dst_ni)) 
Note: See TracChangeset for help on using the changeset viewer.