1 | /*! |
---|
2 | \file distribution_client.cpp |
---|
3 | \author Ha NGUYEN |
---|
4 | \since 13 Jan 2015 |
---|
5 | \date 09 Mars 2015 |
---|
6 | |
---|
7 | \brief Index distribution on client side. |
---|
8 | */ |
---|
9 | #include "distribution_client.hpp" |
---|
10 | |
---|
11 | namespace xios { |
---|
12 | |
---|
13 | CDistributionClient::CDistributionClient(int rank, int dims, CArray<size_t,1>* globalIndex) |
---|
14 | : CDistribution(rank, dims, globalIndex), |
---|
15 | globalDataSendToServer_(0), localDataIndex_(0), localDataIndexSendToServer_(0), localMaskIndex_(0), |
---|
16 | axisDomainOrder_(), |
---|
17 | nLocal_(), nGlob_(), nBeginLocal_(), nBeginGlobal_(),nZoomBegin_(), nZoomEnd_(), |
---|
18 | dataNIndex_(), dataDims_(), dataBegin_(), dataIndex_(), domainMasks_(), axisMasks_(), |
---|
19 | gridMask_(), localDomainIndex_(), localAxisIndex_(), indexMap_(), indexDomainData_(), indexAxisData_(), |
---|
20 | isDataDistributed_(true), axisNum_(0), domainNum_(0), nIndexDomain_(), nIndexAxis_() |
---|
21 | { |
---|
22 | } |
---|
23 | |
---|
24 | CDistributionClient::CDistributionClient(int rank, CGrid* grid) |
---|
25 | : CDistribution(rank, 0, 0), |
---|
26 | globalDataSendToServer_(0), localDataIndex_(0), localDataIndexSendToServer_(0), localMaskIndex_(0), |
---|
27 | axisDomainOrder_(), |
---|
28 | nLocal_(), nGlob_(), nBeginLocal_(), nBeginGlobal_(),nZoomBegin_(), nZoomEnd_(), |
---|
29 | dataNIndex_(), dataDims_(), dataBegin_(), dataIndex_(), domainMasks_(), axisMasks_(), |
---|
30 | gridMask_(), localDomainIndex_(), localAxisIndex_(), indexMap_(), indexDomainData_(), indexAxisData_(), |
---|
31 | isDataDistributed_(true), axisNum_(0), domainNum_(0), nIndexDomain_(), nIndexAxis_() |
---|
32 | { |
---|
33 | readDistributionInfo(grid); |
---|
34 | createGlobalIndex(); |
---|
35 | createGlobalIndexSendToServer(); |
---|
36 | } |
---|
37 | |
---|
38 | CDistributionClient::~CDistributionClient() |
---|
39 | { |
---|
40 | if (0 != globalDataSendToServer_) delete globalDataSendToServer_; |
---|
41 | if (0 != localDataIndex_) delete localDataIndex_; |
---|
42 | if (0 != localDataIndexSendToServer_) delete localDataIndexSendToServer_; |
---|
43 | if (0 != localMaskIndex_) delete localMaskIndex_; |
---|
44 | } |
---|
45 | |
---|
46 | /*! |
---|
47 | Read information of a grid to generate distribution. |
---|
48 | Every grid is composed of several axis or/and domain(s). Their information are processed |
---|
49 | stored and used to calculate index distribution between client and server |
---|
50 | \param [in] grid Grid to read |
---|
51 | */ |
---|
52 | void CDistributionClient::readDistributionInfo(CGrid* grid) |
---|
53 | { |
---|
54 | std::vector<CDomain*> domList = grid->getDomains(); |
---|
55 | std::vector<CAxis*> axisList = grid->getAxis(); |
---|
56 | CArray<bool,1> axisDomainOrder = grid->axis_domain_order; |
---|
57 | |
---|
58 | std::vector<CDomain*>::iterator itbDom, iteDom, itDom; |
---|
59 | std::vector<CAxis*>::iterator itbAxis, iteAxis, itAxis; |
---|
60 | |
---|
61 | itbDom = itDom = domList.begin(); iteDom = domList.end(); |
---|
62 | itbAxis = itAxis = axisList.begin(); iteAxis = axisList.end(); |
---|
63 | |
---|
64 | readDistributionInfo(domList, axisList, axisDomainOrder); |
---|
65 | |
---|
66 | // Then check mask of grid |
---|
67 | int gridDim = domList.size()*2 + axisList.size(); |
---|
68 | grid->checkMask(); |
---|
69 | switch (gridDim) { |
---|
70 | case 1: |
---|
71 | readGridMaskInfo(grid->mask1); |
---|
72 | break; |
---|
73 | case 2: |
---|
74 | readGridMaskInfo(grid->mask2); |
---|
75 | break; |
---|
76 | case 3: |
---|
77 | readGridMaskInfo(grid->mask3); |
---|
78 | break; |
---|
79 | default: |
---|
80 | break; |
---|
81 | } |
---|
82 | } |
---|
83 | |
---|
84 | void CDistributionClient::readDomainIndex(const std::vector<CDomain*>& domList) |
---|
85 | { |
---|
86 | int domainSize = domList.size(); |
---|
87 | nIndexDomain_.resize(domainSize); |
---|
88 | |
---|
89 | for (int k = 0; k < domainSize; ++k) |
---|
90 | { |
---|
91 | nIndexDomain_[k].resize(2); |
---|
92 | int ni = domList[k]->ni; |
---|
93 | int nj = domList[k]->nj; |
---|
94 | nIndexDomain_[k][0].resize(ni,nj); |
---|
95 | nIndexDomain_[k][1].resize(ni,nj); |
---|
96 | nIndexDomain_[k][0] = domList[k]->i_index; |
---|
97 | nIndexDomain_[k][1] = domList[k]->j_index; |
---|
98 | } |
---|
99 | } |
---|
100 | |
---|
101 | void CDistributionClient::readAxisIndex(const std::vector<CAxis*>& axisList) |
---|
102 | { |
---|
103 | int axisSize = axisList.size(); |
---|
104 | nIndexAxis_.resize(axisSize); |
---|
105 | |
---|
106 | for (int k = 0; k < axisSize; ++k) |
---|
107 | { |
---|
108 | int n = axisList[k]->ni; |
---|
109 | nIndexAxis_[k].resize(n); |
---|
110 | for (int i = 0; i < n; ++i) |
---|
111 | nIndexAxis_[k](i) = i; |
---|
112 | } |
---|
113 | } |
---|
114 | |
---|
115 | /*! |
---|
116 | Read information from domain(s) and axis to generate distribution. |
---|
117 | All information related to domain, e.g ibegin, jbegin, ni, nj, ni_glo, nj_glo |
---|
118 | as well as related to axis, e.g dataNIndex, dataIndex will be stored to compute |
---|
119 | the distribution between clients and servers. Till now, every data structure of domain has been kept |
---|
120 | like before, e.g: data_n_index to make sure a compability, however, it should be changed? |
---|
121 | \param [in] domList List of domains of grid |
---|
122 | \param [in] axisList List of axis of grid |
---|
123 | \param [in] axisDomainOrder order of axis and domain inside a grid. True if domain, false if axis |
---|
124 | // \param [in] gridMask Mask of grid, for now, keep it 3 dimension, but it needs changing |
---|
125 | */ |
---|
126 | void CDistributionClient::readDistributionInfo(const std::vector<CDomain*>& domList, |
---|
127 | const std::vector<CAxis*>& axisList, |
---|
128 | const CArray<bool,1>& axisDomainOrder) |
---|
129 | { |
---|
130 | domainNum_ = domList.size(); |
---|
131 | axisNum_ = axisList.size(); |
---|
132 | numElement_ = axisDomainOrder.numElements(); // Number of element, e.x: Axis, Domain |
---|
133 | |
---|
134 | axisDomainOrder_.resize(numElement_); |
---|
135 | axisDomainOrder_ = axisDomainOrder; |
---|
136 | |
---|
137 | // Each domain or axis has its mask, of course |
---|
138 | domainMasks_.resize(domainNum_); |
---|
139 | for (int i = 0; i < domainNum_;++i) |
---|
140 | { |
---|
141 | domainMasks_[i].resize(domList[i]->mask.extent(0), domList[i]->mask.extent(1)); |
---|
142 | domainMasks_[i] = domList[i]->mask; |
---|
143 | } |
---|
144 | |
---|
145 | axisMasks_.resize(axisNum_); |
---|
146 | for (int i = 0; i < axisNum_; ++i) |
---|
147 | { |
---|
148 | axisMasks_[i].resize(axisList[i]->mask.numElements()); |
---|
149 | axisMasks_[i] = axisList[i]->mask; |
---|
150 | } |
---|
151 | |
---|
152 | // Because domain and axis can be in any order (axis1, domain1, axis2, axis3, ) |
---|
153 | // their position should be specified. In axisDomainOrder, domain == true, axis == false |
---|
154 | int idx = 0; |
---|
155 | indexMap_.resize(numElement_); |
---|
156 | this->dims_ = numElement_; |
---|
157 | for (int i = 0; i < numElement_; ++i) |
---|
158 | { |
---|
159 | indexMap_[i] = idx; |
---|
160 | if (true == axisDomainOrder(i)) |
---|
161 | { |
---|
162 | ++(this->dims_); |
---|
163 | idx += 2; |
---|
164 | } |
---|
165 | else ++idx; |
---|
166 | } |
---|
167 | |
---|
168 | // Size of each dimension (local and global) |
---|
169 | nLocal_.resize(this->dims_); |
---|
170 | nGlob_.resize(this->dims_); |
---|
171 | nBeginLocal_.resize(this->dims_,0); |
---|
172 | nBeginGlobal_.resize(this->dims_,0); |
---|
173 | nZoomBegin_.resize(this->dims_); |
---|
174 | nZoomEnd_.resize(this->dims_); |
---|
175 | |
---|
176 | // Data_n_index of domain or axis (For now, axis uses its size as data_n_index |
---|
177 | dataNIndex_.resize(numElement_); |
---|
178 | dataDims_.resize(numElement_); |
---|
179 | dataBegin_.resize(this->dims_); |
---|
180 | |
---|
181 | // Data_*_index of each dimension |
---|
182 | dataIndex_.resize(this->dims_); |
---|
183 | |
---|
184 | // A trick to determine position of each domain in domainList |
---|
185 | int domIndex = 0, axisIndex = 0; |
---|
186 | idx = 0; |
---|
187 | |
---|
188 | isDataDistributed_ = false; |
---|
189 | // Update all the vectors above |
---|
190 | while (idx < numElement_) |
---|
191 | { |
---|
192 | bool isDomain = axisDomainOrder(idx); |
---|
193 | |
---|
194 | // If this is a domain |
---|
195 | if (isDomain) |
---|
196 | { |
---|
197 | // On the j axis |
---|
198 | nLocal_.at(indexMap_[idx]+1) = domList[domIndex]->nj.getValue(); |
---|
199 | nGlob_.at(indexMap_[idx]+1) = domList[domIndex]->nj_glo.getValue(); |
---|
200 | nBeginLocal_.at(indexMap_[idx]+1) = 0; |
---|
201 | nBeginGlobal_.at(indexMap_[idx]+1) = domList[domIndex]->jbegin; |
---|
202 | nZoomBegin_.at((indexMap_[idx]+1)) = domList[domIndex]->zoom_jbegin; |
---|
203 | nZoomEnd_.at((indexMap_[idx]+1)) = domList[domIndex]->zoom_jbegin + domList[domIndex]->zoom_nj-1; |
---|
204 | |
---|
205 | dataBegin_.at(indexMap_[idx]+1) = (2 == domList[domIndex]->data_dim) ? domList[domIndex]->data_jbegin.getValue() : -1; |
---|
206 | dataIndex_.at(indexMap_[idx]+1).resize(domList[domIndex]->data_j_index.numElements()); |
---|
207 | dataIndex_.at(indexMap_[idx]+1) = domList[domIndex]->data_j_index; |
---|
208 | |
---|
209 | // On the i axis |
---|
210 | nLocal_.at(indexMap_[idx]) = domList[domIndex]->ni.getValue(); |
---|
211 | nGlob_.at(indexMap_[idx]) = domList[domIndex]->ni_glo.getValue(); |
---|
212 | nBeginLocal_.at(indexMap_[idx]) = 0; |
---|
213 | nBeginGlobal_.at(indexMap_[idx]) = domList[domIndex]->ibegin; |
---|
214 | nZoomBegin_.at((indexMap_[idx])) = domList[domIndex]->zoom_ibegin; |
---|
215 | nZoomEnd_.at((indexMap_[idx])) = domList[domIndex]->zoom_ibegin + domList[domIndex]->zoom_ni-1; |
---|
216 | |
---|
217 | dataBegin_.at(indexMap_[idx]) = domList[domIndex]->data_ibegin.getValue(); |
---|
218 | dataIndex_.at(indexMap_[idx]).resize(domList[domIndex]->data_i_index.numElements()); |
---|
219 | dataIndex_.at(indexMap_[idx]) = domList[domIndex]->data_i_index; |
---|
220 | |
---|
221 | dataNIndex_.at(idx) = domList[domIndex]->data_n_index.getValue(); |
---|
222 | dataDims_.at(idx) = domList[domIndex]->data_dim.getValue(); |
---|
223 | |
---|
224 | isDataDistributed_ |= domList[domIndex]->isDistributed(); |
---|
225 | |
---|
226 | ++domIndex; |
---|
227 | } |
---|
228 | else // So it's an axis |
---|
229 | { |
---|
230 | nLocal_.at(indexMap_[idx]) = axisList[axisIndex]->ni.getValue(); |
---|
231 | nGlob_.at(indexMap_[idx]) = axisList[axisIndex]->size.getValue(); |
---|
232 | nBeginLocal_.at(indexMap_[idx]) = 0; |
---|
233 | nBeginGlobal_.at(indexMap_[idx]) = axisList[axisIndex]->ibegin.getValue(); |
---|
234 | nZoomBegin_.at((indexMap_[idx])) = axisList[axisIndex]->global_zoom_begin; |
---|
235 | nZoomEnd_.at((indexMap_[idx])) = axisList[axisIndex]->global_zoom_begin + axisList[axisIndex]->global_zoom_size-1; |
---|
236 | |
---|
237 | dataBegin_.at(indexMap_[idx]) = axisList[axisIndex]->data_begin.getValue(); |
---|
238 | dataIndex_.at(indexMap_[idx]).resize(axisList[axisIndex]->data_index.numElements()); |
---|
239 | dataIndex_.at(indexMap_[idx]) = axisList[axisIndex]->data_index; |
---|
240 | dataNIndex_.at(idx) = axisList[axisIndex]->data_index.numElements(); |
---|
241 | dataDims_.at(idx) = 1; |
---|
242 | |
---|
243 | isDataDistributed_ |= axisList[axisIndex]->isDistributed(); |
---|
244 | |
---|
245 | ++axisIndex; |
---|
246 | } |
---|
247 | ++idx; |
---|
248 | } |
---|
249 | readDomainIndex(domList); |
---|
250 | readAxisIndex(axisList); |
---|
251 | } |
---|
252 | |
---|
253 | /*! |
---|
254 | Create local index of domain(s). |
---|
255 | A domain can have data index which even contains the "ghost" points. Very often, these |
---|
256 | data surround the true data. In order to send correct data to server, |
---|
257 | a client need to know index of the true data. |
---|
258 | */ |
---|
259 | void CDistributionClient::createLocalDomainDataIndex() |
---|
260 | { |
---|
261 | int numDomain = 0; |
---|
262 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
263 | if (axisDomainOrder_(i)) ++numDomain; |
---|
264 | |
---|
265 | localDomainIndex_.resize(numDomain*2); |
---|
266 | indexDomainData_.resize(numDomain); |
---|
267 | |
---|
268 | int idxDomain = 0; |
---|
269 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
270 | { |
---|
271 | if (axisDomainOrder_(i)) |
---|
272 | { |
---|
273 | int iIdx, jIdx = 0, count = 0; |
---|
274 | indexDomainData_[idxDomain].resize(dataNIndex_[i], false); |
---|
275 | for (int j = 0; j < dataNIndex_[i]; ++j) |
---|
276 | { |
---|
277 | iIdx = getDomainIndex(dataIndex_[indexMap_[i]](j), dataIndex_[indexMap_[i]+1](j), |
---|
278 | dataBegin_[indexMap_[i]], dataBegin_[indexMap_[i]+1], |
---|
279 | dataDims_[i], nLocal_[indexMap_[i]], jIdx); |
---|
280 | |
---|
281 | if ((iIdx >= nBeginLocal_[indexMap_[i]]) && (iIdx < nLocal_[indexMap_[i]]) && |
---|
282 | (jIdx >= nBeginLocal_[indexMap_[i]+1]) && (jIdx < nLocal_[indexMap_[i]+1]) && |
---|
283 | (domainMasks_[idxDomain](iIdx, jIdx))) |
---|
284 | { |
---|
285 | (localDomainIndex_[idxDomain]).push_back(iIdx); |
---|
286 | (localDomainIndex_[idxDomain*2+1]).push_back(jIdx); |
---|
287 | indexDomainData_[idxDomain][j] = true; |
---|
288 | } |
---|
289 | } |
---|
290 | ++idxDomain; |
---|
291 | } |
---|
292 | } |
---|
293 | } |
---|
294 | |
---|
295 | /*! |
---|
296 | Create local index of axis. |
---|
297 | */ |
---|
298 | void CDistributionClient::createLocalAxisDataIndex() |
---|
299 | { |
---|
300 | int numAxis = 0; |
---|
301 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
302 | if (!axisDomainOrder_(i)) ++numAxis; |
---|
303 | |
---|
304 | localAxisIndex_.resize(numAxis); |
---|
305 | indexAxisData_.resize(numAxis); |
---|
306 | |
---|
307 | int idxAxis = 0; |
---|
308 | for (int i = 0; i < axisDomainOrder_.numElements(); ++i) |
---|
309 | { |
---|
310 | if (!axisDomainOrder_(i)) |
---|
311 | { |
---|
312 | int iIdx = 0; |
---|
313 | indexAxisData_[idxAxis].resize(dataNIndex_[i], false); |
---|
314 | for (int j = 0; j < dataNIndex_[i]; ++j) |
---|
315 | { |
---|
316 | iIdx = getAxisIndex(dataIndex_[indexMap_[i]](j), dataBegin_[indexMap_[i]], nLocal_[indexMap_[i]]); |
---|
317 | if ((iIdx >= nBeginLocal_[indexMap_[i]]) && |
---|
318 | (iIdx < nLocal_[indexMap_[i]]) && (axisMasks_[idxAxis](iIdx))) |
---|
319 | { |
---|
320 | localAxisIndex_[idxAxis].push_back(iIdx); |
---|
321 | indexAxisData_[idxAxis][j] = true; |
---|
322 | } |
---|
323 | } |
---|
324 | ++idxAxis; |
---|
325 | } |
---|
326 | } |
---|
327 | } |
---|
328 | |
---|
329 | void CDistributionClient::createGlobalIndex() |
---|
330 | { |
---|
331 | size_t ssize = 1, idx = 0; |
---|
332 | for (int i = 0; i < this->dims_; ++i) |
---|
333 | ssize *= nLocal_[i]; |
---|
334 | |
---|
335 | this->globalIndex_ = new CArray<size_t,1>(ssize); |
---|
336 | std::vector<int> idxLoop(this->dims_,0); |
---|
337 | int innnerLoopSize = nLocal_[0]; |
---|
338 | while (idx < ssize) |
---|
339 | { |
---|
340 | for (int i = 0; i < this->dims_; ++i) |
---|
341 | { |
---|
342 | if (idxLoop[i] == nLocal_[i]) |
---|
343 | { |
---|
344 | idxLoop[i] = 0; |
---|
345 | ++idxLoop[i+1]; |
---|
346 | } |
---|
347 | } |
---|
348 | |
---|
349 | for (int i = 0; i < innnerLoopSize; ++i) |
---|
350 | { |
---|
351 | size_t globalIndex = idxLoop[0] + nBeginGlobal_[0]; |
---|
352 | size_t mulDim = 1; |
---|
353 | for (int k = 1; k < this->dims_; ++k) |
---|
354 | { |
---|
355 | mulDim *= nGlob_[k-1]; |
---|
356 | globalIndex += (idxLoop[k] + nBeginGlobal_[k])*mulDim; |
---|
357 | } |
---|
358 | (*this->globalIndex_)(idx) = globalIndex; |
---|
359 | ++idxLoop[0]; |
---|
360 | ++idx; |
---|
361 | } |
---|
362 | } |
---|
363 | } |
---|
364 | |
---|
365 | |
---|
366 | /*! |
---|
367 | Create global index on client |
---|
368 | In order to do the mapping between client-server, each client creates its own |
---|
369 | global index of sending data. This global index is then used to calculate to which server |
---|
370 | the client needs to send it data as well as which part of data belongs to the server. |
---|
371 | So as to make clients and server coherent in order of index, global index is calculated by |
---|
372 | take into account of C-convention, the rightmost dimension varies faster. |
---|
373 | */ |
---|
374 | void CDistributionClient::createGlobalIndexSendToServer() |
---|
375 | { |
---|
376 | createLocalDomainDataIndex(); |
---|
377 | createLocalAxisDataIndex(); |
---|
378 | |
---|
379 | int idxDomain = 0, idxAxis = 0; |
---|
380 | std::vector<int> eachElementSize(numElement_); |
---|
381 | |
---|
382 | // Precompute size of the loop |
---|
383 | for (int i = 0; i < numElement_; ++i) |
---|
384 | { |
---|
385 | if(axisDomainOrder_(i)) |
---|
386 | { |
---|
387 | eachElementSize[i] = localDomainIndex_[idxDomain].size(); |
---|
388 | idxDomain += 2; |
---|
389 | } |
---|
390 | else |
---|
391 | { |
---|
392 | eachElementSize[i] = localAxisIndex_[idxAxis].size(); |
---|
393 | ++idxAxis; |
---|
394 | } |
---|
395 | } |
---|
396 | |
---|
397 | // Compute size of the global index on client |
---|
398 | std::vector<StdSize> idxLoop(numElement_,0); |
---|
399 | std::vector<StdSize> currentIndex(this->dims_,0); |
---|
400 | int innerLoopSize = eachElementSize[0]; |
---|
401 | size_t idx = 0, indexLocalDataOnClientCount = 0, indexSend2ServerCount = 0; |
---|
402 | size_t ssize = 1; |
---|
403 | for (int i = 0; i < numElement_; ++i) ssize *= eachElementSize[i]; |
---|
404 | while (idx < ssize) |
---|
405 | { |
---|
406 | for (int i = 0; i < numElement_-1; ++i) |
---|
407 | { |
---|
408 | if (idxLoop[i] == eachElementSize[i]) |
---|
409 | { |
---|
410 | idxLoop[i] = 0; |
---|
411 | ++idxLoop[i+1]; |
---|
412 | } |
---|
413 | } |
---|
414 | |
---|
415 | // Find out outer index |
---|
416 | // Depending the inner-most element is axis or domain, |
---|
417 | // The outer loop index begins correspondingly at one (1) or zero (0) |
---|
418 | idxDomain = idxAxis = 0; |
---|
419 | if (axisDomainOrder_(0)) ++idxDomain; |
---|
420 | else ++idxAxis; |
---|
421 | for (int i = 1; i < numElement_; ++i) |
---|
422 | { |
---|
423 | if (axisDomainOrder_(i)) |
---|
424 | { |
---|
425 | currentIndex[indexMap_[i]] = localDomainIndex_[idxDomain][idxLoop[i]]; |
---|
426 | currentIndex[indexMap_[i]+1] = localDomainIndex_[idxDomain+1][idxLoop[i]]; |
---|
427 | idxDomain += 2; |
---|
428 | } |
---|
429 | else |
---|
430 | { |
---|
431 | currentIndex[indexMap_[i]] = localAxisIndex_[idxAxis][idxLoop[i]]; |
---|
432 | ++idxAxis; |
---|
433 | } |
---|
434 | } |
---|
435 | |
---|
436 | int maskIndex = currentIndex[0]; |
---|
437 | for (int j = 0; j < this->dims_; ++j) |
---|
438 | |
---|
439 | // Inner most index |
---|
440 | idxDomain = idxAxis = 0; |
---|
441 | for (int i = 0; i < innerLoopSize; ++i) |
---|
442 | { |
---|
443 | if (axisDomainOrder_(0)) |
---|
444 | { |
---|
445 | currentIndex[0] = localDomainIndex_[idxDomain][i]; |
---|
446 | currentIndex[1] = localDomainIndex_[idxDomain+1][i]; |
---|
447 | } |
---|
448 | else currentIndex[0] = localAxisIndex_[idxAxis][i]; |
---|
449 | |
---|
450 | StdSize gridMaskIndex = currentIndex[0]; |
---|
451 | int mulDimMask = 1; |
---|
452 | for (int k = 1; k < this->dims_; ++k) |
---|
453 | { |
---|
454 | mulDimMask *= nLocal_[k-1]; |
---|
455 | gridMaskIndex += (currentIndex[k])*mulDimMask; |
---|
456 | } |
---|
457 | |
---|
458 | if (gridMask_(gridMaskIndex)) //(gridMask_(currentIndex[0], currentIndex[1], currentIndex[2])) |
---|
459 | { |
---|
460 | ++indexLocalDataOnClientCount; |
---|
461 | bool isIndexOnServer = true; |
---|
462 | for (int j = 0; j < this->dims_; ++j) |
---|
463 | isIndexOnServer = isIndexOnServer && ((currentIndex[j]+nBeginGlobal_[j]) <= nZoomEnd_[j]) |
---|
464 | && (nZoomBegin_[j] <= (currentIndex[j]+nBeginGlobal_[j])); |
---|
465 | if (isIndexOnServer) ++indexSend2ServerCount; |
---|
466 | } |
---|
467 | |
---|
468 | } |
---|
469 | idxLoop[0] += innerLoopSize; |
---|
470 | idx += innerLoopSize; |
---|
471 | } |
---|
472 | |
---|
473 | |
---|
474 | // Now allocate these arrays |
---|
475 | this->globalDataSendToServer_ = new CArray<size_t,1>(indexSend2ServerCount); |
---|
476 | localDataIndex_ = new CArray<int,1>(indexLocalDataOnClientCount); |
---|
477 | localDataIndexSendToServer_ = new CArray<int,1>(indexSend2ServerCount); |
---|
478 | localMaskIndex_ = new CArray<int,1>(indexSend2ServerCount); |
---|
479 | |
---|
480 | // We need to loop with data index |
---|
481 | idxLoop.assign(numElement_,0); |
---|
482 | idx = indexLocalDataOnClientCount = indexSend2ServerCount = 0; |
---|
483 | ssize = 1; for (int i = 0; i < numElement_; ++i) ssize *= dataNIndex_[i]; |
---|
484 | innerLoopSize = dataNIndex_[0]; |
---|
485 | int countLocalData = 0; |
---|
486 | std::vector<int> correctOuterIndex(numElement_,0); |
---|
487 | bool isOuterIndexCorrect = true; |
---|
488 | while (idx < ssize) |
---|
489 | { |
---|
490 | for (int i = 0; i < numElement_-1; ++i) |
---|
491 | { |
---|
492 | if (idxLoop[i] == dataNIndex_[i]) |
---|
493 | { |
---|
494 | idxLoop[i] = 0; |
---|
495 | correctOuterIndex[i] = 0; |
---|
496 | ++idxLoop[i+1]; |
---|
497 | if (isOuterIndexCorrect) ++correctOuterIndex[i+1]; |
---|
498 | } |
---|
499 | } |
---|
500 | |
---|
501 | // Depending the inner-most element axis or domain, |
---|
502 | // The outer loop index begins correspondingly at one (1) or zero (0) |
---|
503 | idxDomain = idxAxis = 0; |
---|
504 | if (axisDomainOrder_(0)) ++idxDomain; |
---|
505 | else ++idxAxis; |
---|
506 | bool isIndexDomainDataCorrect = true; |
---|
507 | bool isIndexAxisDataCorrect = true; |
---|
508 | |
---|
509 | for (int i = 1; i < numElement_; ++i) |
---|
510 | { |
---|
511 | if (axisDomainOrder_(i)) |
---|
512 | { |
---|
513 | if (indexDomainData_[idxDomain][idxLoop[i]]) |
---|
514 | { |
---|
515 | currentIndex[indexMap_[i]] = localDomainIndex_[idxDomain][correctOuterIndex[i]]; |
---|
516 | currentIndex[indexMap_[i]+1] = localDomainIndex_[idxDomain*2+1][correctOuterIndex[i]]; |
---|
517 | isIndexDomainDataCorrect &= true; |
---|
518 | } |
---|
519 | else isIndexDomainDataCorrect = false; |
---|
520 | ++idxDomain; |
---|
521 | } |
---|
522 | else |
---|
523 | { |
---|
524 | if (indexAxisData_[idxAxis][idxLoop[i]]) |
---|
525 | { |
---|
526 | currentIndex[indexMap_[i]] = localAxisIndex_[idxAxis][correctOuterIndex[i]]; |
---|
527 | isIndexAxisDataCorrect &= true; |
---|
528 | } |
---|
529 | else isIndexAxisDataCorrect = false; |
---|
530 | ++idxAxis; |
---|
531 | } |
---|
532 | } |
---|
533 | |
---|
534 | isOuterIndexCorrect = (isIndexAxisDataCorrect) && (isIndexDomainDataCorrect); |
---|
535 | |
---|
536 | // Inner most index |
---|
537 | idxDomain = idxAxis = 0; |
---|
538 | int correctIndexDomain = 0, correctIndexAxis = 0; |
---|
539 | for (int i = 0; i < innerLoopSize; ++i) |
---|
540 | { |
---|
541 | bool isCurrentIndexDomainDataCorrect = isIndexDomainDataCorrect; |
---|
542 | bool isCurrentIndexAxisDataCorrect = isIndexAxisDataCorrect; |
---|
543 | |
---|
544 | if (axisDomainOrder_(0)) |
---|
545 | { |
---|
546 | if (indexDomainData_[idxDomain][i]) |
---|
547 | { |
---|
548 | currentIndex[0] = localDomainIndex_[idxDomain][correctIndexDomain]; |
---|
549 | currentIndex[1] = localDomainIndex_[idxDomain+1][correctIndexDomain]; |
---|
550 | isCurrentIndexDomainDataCorrect &= true; |
---|
551 | ++correctIndexDomain; |
---|
552 | } |
---|
553 | else isCurrentIndexDomainDataCorrect = false; |
---|
554 | } |
---|
555 | else |
---|
556 | { |
---|
557 | if (indexAxisData_[idxAxis][i]) |
---|
558 | { |
---|
559 | currentIndex[0] = localAxisIndex_[idxAxis][correctIndexAxis]; |
---|
560 | isCurrentIndexAxisDataCorrect &= true; |
---|
561 | ++correctIndexAxis; |
---|
562 | } |
---|
563 | else isCurrentIndexAxisDataCorrect = false; |
---|
564 | } |
---|
565 | |
---|
566 | int gridMaskIndex = currentIndex[0]; |
---|
567 | int mulDimMask = 1; |
---|
568 | for (int k = 1; k < this->dims_; ++k) |
---|
569 | { |
---|
570 | mulDimMask *= nLocal_[k-1]; |
---|
571 | gridMaskIndex += (currentIndex[k])*mulDimMask; |
---|
572 | } |
---|
573 | |
---|
574 | if (isCurrentIndexDomainDataCorrect && |
---|
575 | isCurrentIndexAxisDataCorrect && |
---|
576 | gridMask_(gridMaskIndex)) |
---|
577 | { |
---|
578 | (*localDataIndex_)(indexLocalDataOnClientCount) = countLocalData; |
---|
579 | |
---|
580 | bool isIndexOnServer = true; |
---|
581 | for (int j = 0; j < this->dims_; ++j) |
---|
582 | isIndexOnServer = isIndexOnServer && |
---|
583 | ((currentIndex[j]+nBeginGlobal_[j]) <= nZoomEnd_[j]) && |
---|
584 | (nZoomBegin_[j] <= (currentIndex[j]+nBeginGlobal_[j])); |
---|
585 | if (isIndexOnServer) |
---|
586 | { |
---|
587 | size_t globalIndex = currentIndex[0] + nBeginGlobal_[0]; |
---|
588 | size_t mulDim = 1; |
---|
589 | for (int k = 1; k < this->dims_; ++k) |
---|
590 | { |
---|
591 | mulDim *= nGlob_[k-1]; |
---|
592 | globalIndex += (currentIndex[k] + nBeginGlobal_[k])*mulDim; |
---|
593 | } |
---|
594 | (*this->globalDataSendToServer_)(indexSend2ServerCount) = globalIndex; |
---|
595 | (*localDataIndexSendToServer_)(indexSend2ServerCount) = indexLocalDataOnClientCount; |
---|
596 | (*localMaskIndex_)(indexSend2ServerCount) = gridMaskIndex; |
---|
597 | ++indexSend2ServerCount; |
---|
598 | } |
---|
599 | ++indexLocalDataOnClientCount; |
---|
600 | } |
---|
601 | ++countLocalData; |
---|
602 | } |
---|
603 | idxLoop[0] += innerLoopSize; |
---|
604 | idx += innerLoopSize; |
---|
605 | } |
---|
606 | } |
---|
607 | |
---|
608 | /*! |
---|
609 | Retrieve index i and index j of a domain from its data index |
---|
610 | Data contains not only true data, which are sent to servers, but also ghost data, which |
---|
611 | very often play a role of border of each local data, so does data index. Because data of a domain |
---|
612 | can be one dimension, or two dimensions, there is a need to convert data index to domain index |
---|
613 | \param [in] dataIIndex index of i data |
---|
614 | \param [in] dataJIndex index of j data |
---|
615 | \param [in] dataIBegin index begin of i data |
---|
616 | \param [in] dataJBegin index begin of j data |
---|
617 | \param [in] dataDim dimension of data (1 or 2) |
---|
618 | \param [in] ni local size ni of domain |
---|
619 | \param [out] j j index of domain |
---|
620 | \return i index of domain |
---|
621 | */ |
---|
622 | int CDistributionClient::getDomainIndex(const int& dataIIndex, const int& dataJIndex, |
---|
623 | const int& dataIBegin, const int& dataJBegin, |
---|
624 | const int& dataDim, const int& ni, int& j) |
---|
625 | { |
---|
626 | int tempI = dataIIndex + dataIBegin, |
---|
627 | tempJ = (1 == dataDim) ? -1 |
---|
628 | : (dataJIndex + dataJBegin); |
---|
629 | int i = (dataDim == 1) ? (tempI - 1) % ni |
---|
630 | : (tempI - 1) ; |
---|
631 | j = (dataDim == 1) ? (tempI - 1) / ni |
---|
632 | : (tempJ - 1) ; |
---|
633 | |
---|
634 | return i; |
---|
635 | } |
---|
636 | |
---|
637 | /*! |
---|
638 | Retrieve index of an axis from its data index |
---|
639 | \param [in] dataIndex index of data |
---|
640 | \param [in] dataBegin index begin of data |
---|
641 | \param [in] ni local size of axis |
---|
642 | \return index of domain |
---|
643 | */ |
---|
644 | int CDistributionClient::getAxisIndex(const int& dataIndex, const int& dataBegin, const int& ni) |
---|
645 | { |
---|
646 | int tempI = dataIndex + dataBegin; |
---|
647 | return ((tempI-1)%ni); |
---|
648 | } |
---|
649 | |
---|
650 | const CArray<size_t,1>& CDistributionClient::getGlobalDataIndexSendToServer() const |
---|
651 | { |
---|
652 | return (*globalDataSendToServer_); |
---|
653 | } |
---|
654 | |
---|
655 | /*! |
---|
656 | Return local data index of client |
---|
657 | */ |
---|
658 | const CArray<int,1>& CDistributionClient::getLocalDataIndexOnClient() const |
---|
659 | { |
---|
660 | return (*localDataIndex_); |
---|
661 | } |
---|
662 | |
---|
663 | /*! |
---|
664 | Return local mask index of client |
---|
665 | */ |
---|
666 | const CArray<int,1>& CDistributionClient::getLocalMaskIndexOnClient() const |
---|
667 | { |
---|
668 | return (*localMaskIndex_); |
---|
669 | } |
---|
670 | |
---|
671 | /*! |
---|
672 | Return local data index on client which are sent to servers |
---|
673 | */ |
---|
674 | const CArray<int,1>& CDistributionClient::getLocalDataIndexSendToServer() const |
---|
675 | { |
---|
676 | return (*localDataIndexSendToServer_); |
---|
677 | } |
---|
678 | |
---|
679 | } // namespace xios |
---|