Line |
Branch |
Exec |
Source |
1 |
|
|
// ______ ______ _ _ _____ ______ |
2 |
|
|
// | ____| ____| | (_)/ ____| | ____| |
3 |
|
|
// | |__ | |__ | | _| (___ ___| |__ |
4 |
|
|
// | __| | __| | | | |\___ \ / __| __| |
5 |
|
|
// | | | |____| |____| |____) | (__| |____ |
6 |
|
|
// |_| |______|______|_|_____/ \___|______| |
7 |
|
|
// Finite Elements for Life Sciences and Engineering |
8 |
|
|
// |
9 |
|
|
// License: LGL2.1 License |
10 |
|
|
// FELiScE default license: LICENSE in root folder |
11 |
|
|
// |
12 |
|
|
// Main authors: M. Aletti |
13 |
|
|
// |
14 |
|
|
|
15 |
|
|
#if FELISCE_WITH_CVGRAPH |
16 |
|
|
|
17 |
|
|
// System includes |
18 |
|
|
|
19 |
|
|
// External includes |
20 |
|
|
|
21 |
|
|
// Project includes |
22 |
|
|
#include "Model/cvgMainSlave.hpp" |
23 |
|
|
#include "Geometry/surfaceInterpolator.hpp" |
24 |
|
|
#include "Core/cvgraphUtil.hpp" |
25 |
|
|
#include "Core/mpiInfo.hpp" |
26 |
|
|
|
27 |
|
|
namespace felisce { |
28 |
|
|
// Initialization: |
29 |
|
|
// - read the cvgraph data file |
30 |
|
|
// - overwrite the time loop information in FelisceParam |
31 |
|
|
// - overwrite the boundary condition type in FelisceParam |
32 |
|
|
// - build the graph (m_cvg) |
33 |
|
|
// - initialize the msgAdministrator (m_msg) |
34 |
|
|
// - std::set other variables such as protocol, compartments ids, names, connections ids |
35 |
|
✗ |
cvgMainSlave::cvgMainSlave(int argc, char ** argv) { |
36 |
|
|
// IMPORTANT WARNING: |
37 |
|
|
// Remember to initialize the slave AFTER the model. |
38 |
|
|
// The reason is that the model initializes MPI and it should be done as the first thing. |
39 |
|
|
|
40 |
|
|
// ========== Handling parameters =====================// |
41 |
|
|
// Reading CVGraph parameters |
42 |
|
✗ |
CVGraphParam::instance().getInputFile(argc, argv, FelisceParam::instance().dataFileCVG.c_str()); |
43 |
|
|
|
44 |
|
|
// Overwriting felisce time-parameters parameters |
45 |
|
|
// These information are used to initialize felisceTransient |
46 |
|
|
// IMPORTANT WARNING: We have to be careful and construct the felisceTransient after cvgMainSlave. |
47 |
|
✗ |
FelisceParam::instance().timeStep = CVGraphParam::instance().dt; |
48 |
|
✗ |
FelisceParam::instance().timeIterationMax = CVGraphParam::instance().maxIter; |
49 |
|
|
// Graph initialization and creation |
50 |
|
✗ |
m_cvg.SetParam(); |
51 |
|
|
|
52 |
|
|
// ========== IDs and MSG administrator ===============// |
53 |
|
|
// id of the current compartment |
54 |
|
✗ |
m_cmptName = FelisceParam::instance().compartmentName; |
55 |
|
✗ |
if ( m_cmptName == "none" ) |
56 |
|
✗ |
FEL_ERROR("You must specify the compartmentName in the data file, in the cvgraph section"); |
57 |
|
✗ |
m_cmptId = m_cvg.compartId(m_cmptName); |
58 |
|
✗ |
m_cvg.adjacentCompartmentsAndConnections(m_cmptName,m_IdsOfOtherCompartments,m_IdsOfConnections); |
59 |
|
✗ |
m_numOfNeighbours=m_IdsOfOtherCompartments.size(); |
60 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours; ++iConn ) { |
61 |
|
✗ |
m_iConnFromIds[m_IdsOfConnections[iConn]]=iConn; |
62 |
|
|
} |
63 |
|
|
// Getting the cvgraph verbosity |
64 |
|
|
// The policy is that the verbosity of the cvgMainSlave class is controlled by the cvgVerbosity, while the |
65 |
|
|
// rest of felisce is controlled by the verbosity of FelisceParam. |
66 |
|
✗ |
m_verbose = CVGraphParam::instance().verb[m_cmptId]; |
67 |
|
✗ |
if ( m_verbose > 1) |
68 |
|
✗ |
m_cvg.print(m_verbose); |
69 |
|
|
// TODO this print and the one below |
70 |
|
|
// are done by all the processors, we should be able to get the rank |
71 |
|
|
// and let only the master do it. |
72 |
|
✗ |
if ( m_verbose > 1 ) |
73 |
|
✗ |
CVGraphParam::instance().print(m_verbose); |
74 |
|
|
|
75 |
|
|
// Message administrator (slave) initialization |
76 |
|
✗ |
m_protocol = 1; // TODO: what is this protocol about? Check in cvgraph. |
77 |
|
✗ |
m_msg = new MessageAdm(m_verbose,m_protocol,m_cvg.graph(),m_cmptId ); |
78 |
|
|
|
79 |
|
✗ |
this->handleCouplingVariablesAndOverwriteBC(); |
80 |
|
|
|
81 |
|
|
// TODO: In the case where there are several connections, the variable to exchange may be different. For now FelisceParam::instance().idVarToExchange is a single integer but it should be a std::vector of integers in the future. |
82 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours; ++iConn ) { |
83 |
|
✗ |
m_IdsOfVarToExchange.push_back(FelisceParam::instance().idVarToExchange); |
84 |
|
|
} |
85 |
|
✗ |
m_residualInterpolationType.resize(m_numOfNeighbours); |
86 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours; ++iConn ) { |
87 |
|
✗ |
int idconn = m_IdsOfConnections[iConn]; //id connection |
88 |
|
|
// 0: Internodes, 1: transposed interpolator |
89 |
|
✗ |
m_residualInterpolationType[iConn] = CVGraphParam::instance().residualInterpType[idconn]; |
90 |
|
|
// TODO: in principle we could use a different residual interpolation method for the forward and backward direction |
91 |
|
|
// for now we assume that the entire connection uses the same approach |
92 |
|
|
} |
93 |
|
|
|
94 |
|
✗ |
if ( FelisceParam::verbose() > 1) |
95 |
|
✗ |
std::cout<<"Slave "<<m_cmptName<<" initialization completed."<<std::endl; |
96 |
|
|
} |
97 |
|
|
|
98 |
|
✗ |
void cvgMainSlave::handleCouplingVariablesAndOverwriteBC() { |
99 |
|
|
|
100 |
|
|
/* |
101 |
|
|
For each connection we have one (or more) variables to send |
102 |
|
|
and one (or more) variable to read. |
103 |
|
|
|
104 |
|
|
- The number of variables to read for the connection iConn is m_numVarToRead[iConn] |
105 |
|
|
- The number of variables to send for the connection iConn is m_numVarToSend[iConn] |
106 |
|
|
|
107 |
|
|
- The list of boundary labels for the connection iConn is m_interfaceLabels[iConn] |
108 |
|
|
|
109 |
|
|
For now, std::vector variables such as the velocity, are NOT stored component-wise. |
110 |
|
|
We could pass separately u_x,u_y,u_z, but we just pass \vec u. |
111 |
|
|
|
112 |
|
|
Depending on the variable to read the code overwrite the boundary condition type |
113 |
|
|
with the appropriate type. For instance, if we read VELOCITY, we are going to replace |
114 |
|
|
the type of the boundary condition (CVGRAPH) with Dirichlet. |
115 |
|
|
|
116 |
|
|
Moreover if the number of variables to read is equal to two, we assume that the type |
117 |
|
|
of boundary conditions is Robin. |
118 |
|
|
|
119 |
|
|
Be careful when using Robin boundary condition: in the felisce datafile all physical robin boundary |
120 |
|
|
conditions should be listed BEFORE the cvgraph boundary condition. |
121 |
|
|
The alphaRobin parameter is specified in the cvgraph datafile. |
122 |
|
|
*/ |
123 |
|
|
|
124 |
|
✗ |
m_interfaceLabels.resize(numConnections()); |
125 |
|
✗ |
m_numVarToRead.resize(numConnections()); |
126 |
|
✗ |
m_numVarToSend.resize(numConnections()); |
127 |
|
|
|
128 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours; ++iConn ) { |
129 |
|
✗ |
int idconn = m_IdsOfConnections[iConn]; //id connection |
130 |
|
|
|
131 |
|
✗ |
std::vector<std::string> readVar; |
132 |
|
✗ |
m_numVarToRead[iConn] = m_cvg.graph()[m_cvg.connectDescriptor( idconn ) ].numToBeReadVariables(m_cmptName); |
133 |
|
✗ |
for (std::size_t v(0); v<m_numVarToRead[iConn]; v++){ |
134 |
|
✗ |
readVar.push_back(this->variableToRead(idconn,v)); |
135 |
|
|
} |
136 |
|
✗ |
m_readVariables.push_back(readVar); |
137 |
|
|
|
138 |
|
✗ |
std::vector<std::string> sendVar; |
139 |
|
✗ |
m_numVarToSend[iConn] = m_cvg.graph()[m_cvg.connectDescriptor( idconn ) ].numToBeSentVariables(m_cmptName); |
140 |
|
✗ |
for (std::size_t v(0); v<m_numVarToSend[iConn]; v++){ |
141 |
|
✗ |
sendVar.push_back(this->variableToSend(idconn,v)); |
142 |
|
|
} |
143 |
|
✗ |
m_sendVariables.push_back(sendVar); |
144 |
|
|
} |
145 |
|
|
|
146 |
|
✗ |
std::size_t start(0); |
147 |
|
✗ |
for ( std::size_t iBC(0); iBC<FelisceParam::instance().type.size(); ++iBC ) { |
148 |
|
✗ |
std::size_t numLabel=FelisceParam::instance().numLabel[iBC]; |
149 |
|
✗ |
bool cvgraphRobin(false); |
150 |
|
✗ |
bool found(false); |
151 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours && !found; ++iConn ) { |
152 |
|
✗ |
int idconn = m_IdsOfConnections[iConn]; //id connection |
153 |
|
✗ |
std::string otherCmpt = m_cvg.compartNameFromId(m_IdsOfOtherCompartments[iConn]); |
154 |
|
✗ |
std::stringstream tmpType; |
155 |
|
✗ |
tmpType<<"cvgraph"<<otherCmpt; |
156 |
|
✗ |
if ( FelisceParam::instance().type[iBC] == tmpType.str() ) { |
157 |
|
✗ |
found=true; |
158 |
|
|
//Extract the labels |
159 |
|
✗ |
for ( std::size_t iLab(0); iLab<numLabel; ++iLab ) { |
160 |
|
✗ |
m_interfaceLabels[iConn].push_back(FelisceParam::instance().label[start+iLab]); |
161 |
|
|
} |
162 |
|
|
// TODO: this it should be more flexible. |
163 |
|
|
// It is very likely that it causes problems. |
164 |
|
✗ |
if ( neumann(iConn) ) { |
165 |
|
✗ |
FelisceParam::instance().type[iBC]="Neumann"; |
166 |
|
✗ |
} else if ( dirichlet(iConn) ) { |
167 |
|
✗ |
FelisceParam::instance().type[iBC]="Dirichlet"; |
168 |
|
✗ |
} else if ( robin(iConn) ) { |
169 |
|
✗ |
cvgraphRobin=true; |
170 |
|
✗ |
FelisceParam::instance().type[iBC] = "Robin"; |
171 |
|
✗ |
FelisceParam::instance().betaRobin.push_back(1.); |
172 |
|
✗ |
if (m_cvg.isSource(m_cmptId, idconn)) { |
173 |
|
✗ |
FelisceParam::instance().alphaRobin.push_back(CVGraphParam::instance().robSource[idconn]); |
174 |
|
|
} else { |
175 |
|
✗ |
FelisceParam::instance().alphaRobin.push_back(CVGraphParam::instance().robTarget[idconn]); |
176 |
|
|
} |
177 |
|
|
} |
178 |
|
✗ |
} else if (cvgraphRobin && FelisceParam::instance().type[iBC]=="Robin") { |
179 |
|
✗ |
FEL_ERROR("When using Robin BC with both cvgraph and physical BCs, cvgraph BCs should be placed AFTER the physical robin BC in the felisce data file"); |
180 |
|
|
} |
181 |
|
|
} |
182 |
|
✗ |
start += numLabel; |
183 |
|
|
} |
184 |
|
|
#ifndef NDEBUG |
185 |
|
|
// In debug mode we count all the interface labels and we verify that we replaced all the BCs. |
186 |
|
✗ |
std::vector<int> allInterfaceLabels; |
187 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours; ++iConn ) { |
188 |
|
✗ |
for ( std::size_t iDebug(0); iDebug<m_interfaceLabels[iConn].size(); ++iDebug ) |
189 |
|
✗ |
allInterfaceLabels.push_back(m_interfaceLabels[iConn][iDebug]); |
190 |
|
|
} |
191 |
|
✗ |
std::vector<int> tmp=FelisceParam::instance().interfaceLabels; |
192 |
|
✗ |
Tools::printVector(tmp,"tmp"); |
193 |
|
✗ |
Tools::printVector(allInterfaceLabels,"allInterfaceLabels"); |
194 |
|
✗ |
FEL_ASSERT(tmp.size()==allInterfaceLabels.size()); |
195 |
|
✗ |
sort(allInterfaceLabels.begin(),allInterfaceLabels.end()); |
196 |
|
✗ |
sort(tmp.begin(),tmp.end()); |
197 |
|
✗ |
FEL_ASSERT(tmp.size()==allInterfaceLabels.size()); |
198 |
|
✗ |
for ( std::size_t iDebug(0); iDebug<tmp.size(); ++iDebug ) { |
199 |
|
✗ |
FEL_ASSERT(tmp[iDebug]==allInterfaceLabels[iDebug]); |
200 |
|
|
} |
201 |
|
|
#endif |
202 |
|
|
} |
203 |
|
|
|
204 |
|
|
|
205 |
|
|
// The goals of this function are: |
206 |
|
|
// - send to the other compartments the information necessary to build the interpolators (points of the current mesh, ids). |
207 |
|
|
// - receive the same information from the other compartments. |
208 |
|
|
// - initialize properly the volume-boundary mappings in dofBoundary |
209 |
|
|
// - use the functions of the surfaceInterpolator class to build the interpolators. |
210 |
|
|
// Note that the surfaceInterpolator object is used to build this matrix and then destroyed |
211 |
|
✗ |
void cvgMainSlave::buildInterpolator(LinearProblem* pt) { |
212 |
|
|
// Surface Interpolator is felisce class that is |
213 |
|
|
// able to construct an interpolator matrix between two surfaces. |
214 |
|
|
// This class needs informations in linear problem and in the dofBoundary class. |
215 |
|
✗ |
SurfaceInterpolator surfInterpolator; |
216 |
|
✗ |
m_interpolator.resize(m_numOfNeighbours); |
217 |
|
✗ |
m_dimOfDataToReceive.resize(m_numOfNeighbours); |
218 |
|
✗ |
m_dimOfDataToSend.resize(m_numOfNeighbours); |
219 |
|
|
// Resizing std::vector of dofBoundary in linearProblem |
220 |
|
✗ |
pt->dofBDVec().resize(m_numOfNeighbours); |
221 |
|
✗ |
pt->massBDVec().resize(m_numOfNeighbours); |
222 |
|
✗ |
pt->kspMassBDVec().resize(m_numOfNeighbours); |
223 |
|
✗ |
std::vector<DofBoundary*> dofBDVec(m_numOfNeighbours); |
224 |
|
✗ |
for ( std::size_t iCmpt(0); iCmpt<m_numOfNeighbours; ++iCmpt ) { |
225 |
|
✗ |
dofBDVec[iCmpt]=&pt->dofBD(iCmpt); |
226 |
|
|
} |
227 |
|
✗ |
surfInterpolator.initSurfaceInterpolator(pt,dofBDVec); |
228 |
|
|
|
229 |
|
|
// Initialization of mesh and dof boundary. |
230 |
|
|
// It is important to use the global mesh because of the algorithm |
231 |
|
|
// that projects a point on a surface. |
232 |
|
|
// Both dofBoundary class in linearProblem |
233 |
|
|
// and the geometricMeshRegion class holding the global mesh needs to be correctly initialized. |
234 |
|
✗ |
m_lpb=pt; |
235 |
|
✗ |
surfInterpolator.initializeSurfaceInterpolator(); |
236 |
|
|
|
237 |
|
✗ |
for ( std::size_t iCmpt(0); iCmpt<m_numOfNeighbours; ++iCmpt ) { |
238 |
|
✗ |
m_lpb->dofBD(iCmpt).initialize(m_lpb); |
239 |
|
|
// Here the dofBoundary class is used to be the mapping that connects |
240 |
|
|
// indices on the surfaces to the indices of the volume |
241 |
|
|
// this is done with the help of the linearProblem class. |
242 |
|
|
// TODO for now we are considering the first unknown and its components to be coupled. |
243 |
|
|
// one should be able to define in the felisce data file which unknown and which components. |
244 |
|
|
// This has to be done also below where the interface file is written. |
245 |
|
✗ |
std::vector<int> listComp; |
246 |
|
✗ |
int iVar = m_IdsOfVarToExchange[iCmpt]; // iConn = iCmpt ?? |
247 |
|
✗ |
std::size_t numComp=m_lpb->listVariable()[iVar].numComponent(); |
248 |
|
✗ |
for ( std::size_t idim(0); idim < numComp; ++idim) { |
249 |
|
✗ |
m_lpb->dofBD(iCmpt).buildListOfBoundaryPetscDofs(m_lpb, m_interfaceLabels[iCmpt], iVar, idim); |
250 |
|
✗ |
listComp.push_back(idim); |
251 |
|
|
} |
252 |
|
✗ |
m_lpb->dofBD(iCmpt).buildBoundaryVolumeMapping(iVar,listComp); |
253 |
|
|
|
254 |
|
|
// Only the master does the job here. |
255 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
256 |
|
|
// SENDING INFO TO THE OTHER COMPARTMENT |
257 |
|
✗ |
surfInterpolator.writeInterfaceFile(iVar,numComp,fileName("Interface",iCmpt,"write"),CVGraphParam::instance().dirTmpFile, iCmpt); |
258 |
|
✗ |
std::cout<<CVGraphParam::instance().dirTmpFile <<fileName("Interface",iCmpt,"write")<<" written."<<std::endl; |
259 |
|
|
// Sending a message to the other compartment saying that the interface file has been written and that they can start reading it. |
260 |
|
✗ |
this->customFileCompleted("interface",iCmpt); |
261 |
|
|
} |
262 |
|
|
} |
263 |
|
✗ |
for ( std::size_t iCmpt(0); iCmpt<m_numOfNeighbours; ++iCmpt ) { |
264 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
265 |
|
|
// RECEIVING INFO FROM THE OTHER COMPARTMENT |
266 |
|
|
// the list of points to be projected along with the surface label where to find them |
267 |
|
✗ |
std::vector< std::pair< Point*, felInt > > pointsAndCorrespondingLabel; |
268 |
|
|
// the corresponding row ids. If more than one component is used the ids are stores as id(P1,icomp0),id(P1,icomp1),id(P1,icomp2), id(P2,icomp0),id(P2,icomp1),id(P2,icomp2) |
269 |
|
✗ |
std::vector<int> rowNumbering; |
270 |
|
✗ |
int iVar = m_IdsOfVarToExchange[iCmpt]; |
271 |
|
|
// Before starting to read we check if the other compartment has finished to write the file. |
272 |
|
✗ |
this->checkCustomFileCompleted("interface",iCmpt); |
273 |
|
|
|
274 |
|
|
// Reading the file through the functions of surfInterpolator |
275 |
|
✗ |
surfInterpolator.readInterfaceFile(fileName("Interface",iCmpt,"read"),pointsAndCorrespondingLabel,rowNumbering, CVGraphParam::instance().dirTmpFile); |
276 |
|
✗ |
std::cout<<CVGraphParam::instance().dirTmpFile <<fileName("Interface",iCmpt,"read")<<" read."<<std::endl; |
277 |
|
|
|
278 |
|
|
// Build the interpolator matrix (m_interpolator) |
279 |
|
✗ |
surfInterpolator.buildInterpolator(*m_interpolator[iCmpt],pointsAndCorrespondingLabel,rowNumbering,m_lpb->listVariable()[0],iVar,iCmpt,this->interfaceLabels(iCmpt),fileName("LogInterpolator",iCmpt,"write",CVGraphParam::instance().dirTmpFile)); |
280 |
|
|
|
281 |
|
|
// Extract the information about the dimension of the data to be sent and to be received. |
282 |
|
✗ |
m_dimOfDataToReceive[iCmpt] = surfInterpolator.ncols(iCmpt); //num of dof in this linearProblem (on the surface) |
283 |
|
✗ |
m_dimOfDataToSend[iCmpt] = surfInterpolator.nrows(iCmpt); //num of dof in the other linearProblem (on the surface) |
284 |
|
✗ |
if ( FelisceParam::verbose() > 1 ) { |
285 |
|
✗ |
std::cout<<"Interpolator for "<<m_cvg.compartNameFromId(m_IdsOfOtherCompartments[iCmpt])<<"built, nrows: "<<m_dimOfDataToSend[iCmpt]<<", ncols: "<<m_dimOfDataToReceive[iCmpt]<<std::endl; |
286 |
|
|
} |
287 |
|
|
} |
288 |
|
|
} |
289 |
|
|
|
290 |
|
✗ |
this->exchangeBackwardInterpolators(); |
291 |
|
|
|
292 |
|
✗ |
std::stringstream chronoName; |
293 |
|
✗ |
chronoName<<"receive_"<<m_cmptName; |
294 |
|
✗ |
m_receiveChronoPtr = felisce::make_shared<ChronoInstance>(); |
295 |
|
|
} |
296 |
|
|
|
297 |
|
✗ |
void cvgMainSlave::exchangeBackwardInterpolators() { |
298 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
299 |
|
✗ |
for ( std::size_t iCmpt(0); iCmpt<m_numOfNeighbours; ++iCmpt ) { |
300 |
|
✗ |
if ( m_residualInterpolationType[iCmpt] == 1 ) { |
301 |
|
|
//writing interpolator matrix |
302 |
|
✗ |
m_interpolator[iCmpt]->saveInBinaryFormat(MPI_COMM_SELF,fileName("interpolatorMatrix",iCmpt,"write").c_str(),CVGraphParam::instance().dirTmpFile); |
303 |
|
✗ |
this->customFileCompleted("interpolatorMatrix",iCmpt); |
304 |
|
|
} |
305 |
|
|
} |
306 |
|
✗ |
m_interpolatorBackward.resize(m_numOfNeighbours); |
307 |
|
✗ |
for ( std::size_t iCmpt(0); iCmpt<m_numOfNeighbours; ++iCmpt ) { |
308 |
|
✗ |
if ( m_residualInterpolationType[iCmpt] == 1 ) { |
309 |
|
|
// Memory allocation |
310 |
|
✗ |
m_interpolatorBackward[iCmpt]->createSeqAIJ(MPI_COMM_SELF,m_dimOfDataToReceive[iCmpt],m_dimOfDataToSend[iCmpt],0,nullptr); |
311 |
|
✗ |
m_interpolatorBackward[iCmpt]->setOption(MAT_KEEP_NONZERO_PATTERN, PETSC_TRUE); |
312 |
|
✗ |
m_interpolatorBackward[iCmpt]->setFromOptions(); |
313 |
|
|
// Read from file |
314 |
|
✗ |
this->checkCustomFileCompleted("interpolatorMatrix",iCmpt); |
315 |
|
✗ |
m_interpolatorBackward[iCmpt]->loadFromBinaryFormat(MPI_COMM_SELF,fileName("interpolatorMatrix",iCmpt,"read").c_str(),CVGraphParam::instance().dirTmpFile); |
316 |
|
|
} |
317 |
|
|
} |
318 |
|
|
} |
319 |
|
|
} |
320 |
|
|
|
321 |
|
✗ |
std::string cvgMainSlave::fileName(std::string keyword, std::size_t iConn, std::string readOrWrite, std::string folder) const { |
322 |
|
✗ |
std::stringstream filename; |
323 |
|
✗ |
filename<<folder<<"/"<<keyword; |
324 |
|
✗ |
if ( readOrWrite == "read" ) { |
325 |
|
✗ |
filename<<"_"<<m_cvg.compartNameFromId(m_IdsOfOtherCompartments[iConn])<<"_"<<m_cmptName; |
326 |
|
✗ |
} else if ( readOrWrite == "write" ) { |
327 |
|
✗ |
filename<<"_"<<m_cmptName<<"_"<<m_cvg.compartNameFromId(m_IdsOfOtherCompartments[iConn]); |
328 |
|
|
} |
329 |
|
✗ |
return filename.str(); |
330 |
|
|
} |
331 |
|
|
// It writes an empty file just to tell the other compartment that the interface file is completed. |
332 |
|
✗ |
void cvgMainSlave::customFileCompleted(std::string keyword, std::size_t iBD) const { |
333 |
|
✗ |
std::stringstream file; |
334 |
|
✗ |
file<<CVGraphParam::instance().dirTmpFile<<"/"<<keyword<<m_cmptName<<"_"<<m_cvg.compartNameFromId(m_IdsOfOtherCompartments[iBD])<<"completed.ok"; |
335 |
|
✗ |
cvgraph::writeEmptyFile(file.str().c_str(),CVGraphParam::instance().verbose); |
336 |
|
|
} |
337 |
|
|
|
338 |
|
|
// It check if the (empty) file is present, if yes it means that we can start reading the interface file |
339 |
|
|
// and we can remove the empty file since it is no longer needed. |
340 |
|
✗ |
void cvgMainSlave::checkCustomFileCompleted(std::string keyword,std::size_t iBD) const { |
341 |
|
|
// TODO: |
342 |
|
|
// These communications should be handled also via MPI and pvm |
343 |
|
|
// In general cvgMainSlave has been designed for file communications, but it should be easy to generalize it. |
344 |
|
✗ |
std::stringstream fname; |
345 |
|
✗ |
fname<<CVGraphParam::instance().dirTmpFile<<"/"<<keyword<<m_cvg.compartNameFromId(m_IdsOfOtherCompartments[iBD])<<"_"<<m_cmptName<<"completed.ok"; |
346 |
|
✗ |
cvgraph::wait_and_remove_file(fname.str().c_str(),CVGraphParam::instance().verbose); |
347 |
|
|
} |
348 |
|
|
|
349 |
|
|
// First contact between master and slave. |
350 |
|
|
// Main task of this function: |
351 |
|
|
// - initialize the m_data_recv(send) vectors with the correct sizes. |
352 |
|
|
// TODO i think an unnecessary copy of the data is done. |
353 |
|
|
// probably we should use a petscVector defined on the boundary to store the data |
354 |
|
|
// and give his underlying pointer to the data to the cvg data structures. |
355 |
|
|
// to avoid at maximum the duplications. |
356 |
|
✗ |
void cvgMainSlave::firstContact() { |
357 |
|
|
// initialization |
358 |
|
✗ |
m_data_recv.resize(m_numOfNeighbours); |
359 |
|
✗ |
m_data_send.resize(m_numOfNeighbours); |
360 |
|
✗ |
m_dataSendPreInterpolation.resize(m_numOfNeighbours); |
361 |
|
✗ |
m_dataSend.resize(m_numOfNeighbours); |
362 |
|
✗ |
if (MpiInfo::rankProc() == 0) { |
363 |
|
✗ |
for (std::size_t iCmpt(0); iCmpt<m_numOfNeighbours; ++iCmpt) { |
364 |
|
✗ |
int idconn = m_IdsOfConnections[iCmpt]; //id connection |
365 |
|
|
|
366 |
|
✗ |
std::vector<std::size_t> dim_recv(m_numVarToRead[iCmpt]), dim_sent(m_numVarToSend[iCmpt]); |
367 |
|
✗ |
for(std::size_t iSend=0;iSend<m_numVarToSend[iCmpt];iSend++) |
368 |
|
✗ |
dim_sent[iSend] = m_dimOfDataToSend[iCmpt]; |
369 |
|
✗ |
for(std::size_t iRead=0;iRead<m_numVarToRead[iCmpt];iRead++) |
370 |
|
✗ |
dim_recv[iRead] = m_dimOfDataToReceive[iCmpt]; |
371 |
|
|
|
372 |
|
|
// Now we can allocate the vectors of the CVGgraph |
373 |
|
✗ |
m_data_recv[iCmpt] = &m_cvg.initDataReceived(dim_recv, m_cmptName, m_cvg.connectNameFromId(idconn)); |
374 |
|
✗ |
m_data_send[iCmpt] = &m_cvg.initDataSent(dim_sent, m_cmptName, m_cvg.connectNameFromId(idconn)); |
375 |
|
|
|
376 |
|
✗ |
m_interpolator[iCmpt]->getVecs(m_dataSendPreInterpolation[iCmpt],m_dataSend[iCmpt]); |
377 |
|
|
} |
378 |
|
|
//Now we exchange some quantities with the master. |
379 |
|
|
int rcvProtocol; |
380 |
|
✗ |
m_msg->receiveInitFromMaster(rcvProtocol); |
381 |
|
|
// We also check that the master and the slave are on the same page. |
382 |
|
✗ |
if(rcvProtocol != m_protocol){ |
383 |
|
✗ |
CVG_ERROR("Protocol mismatch !"); |
384 |
|
|
} |
385 |
|
✗ |
if(!Tools::equal(FelisceParam::instance().timeStep,CVGraphParam::instance().dt)){ |
386 |
|
✗ |
CVG_ERROR("Different time step Master-Slave!"); |
387 |
|
|
} |
388 |
|
✗ |
if(FelisceParam::instance().timeIterationMax != CVGraphParam::instance().maxIter){ |
389 |
|
✗ |
std::cout<<FelisceParam::instance().timeIterationMax<<" <-fp cvgp-> "<<CVGraphParam::instance().maxIter<<std::endl; |
390 |
|
✗ |
CVG_ERROR("Different number of max Iterations Master-Slave!"); |
391 |
|
|
} |
392 |
|
|
|
393 |
|
✗ |
int physUnit = CVGraphParam::instance().physUnit[m_cmptId]; |
394 |
|
✗ |
int spaceDim = CVGraphParam::instance().spaceDim[m_cmptId]; |
395 |
|
✗ |
double physTime = CVGraphParam::instance().physTime[m_cmptId]; |
396 |
|
|
|
397 |
|
✗ |
m_msg->sendInitToMaster(physUnit,spaceDim,physTime); |
398 |
|
✗ |
std::cout<<"First contact phase completed."<<std::endl; |
399 |
|
|
|
400 |
|
|
} |
401 |
|
|
} |
402 |
|
✗ |
void cvgMainSlave::exchangeInitialCondition() { |
403 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
404 |
|
✗ |
m_msg->PrintCommonIterationBanner(m_msgInfo,/*iter*/0); |
405 |
|
✗ |
cvgraph::ConnectionIterator ed, lastE; |
406 |
|
✗ |
for (std::tie(ed,lastE) = edges(m_cvg.graph()); ed != lastE; ed++) { |
407 |
|
✗ |
if ( m_cvg.isSource(m_cmptId, m_cvg.connectId(*ed)) or m_cvg.isTarget(m_cmptId,m_cvg.connectId(*ed)) ) { |
408 |
|
✗ |
m_msg->receiveTimeFromMaster(m_msgInfo,*ed); |
409 |
|
✗ |
m_msg->PrintIteration(m_msgInfo, /*iter*/0, m_cvg.connectName(*ed)); |
410 |
|
✗ |
m_lpb->sendInitialCondition(this->iConn(m_cvg.connectId(*ed))); |
411 |
|
|
} |
412 |
|
|
} |
413 |
|
|
} |
414 |
|
|
} |
415 |
|
|
// Given a sequential volume std::vector (such as the sequential solution of the problem for instance) |
416 |
|
|
// the data of the interface are extracted, interpolated on the other compartment mesh and sent to the master. |
417 |
|
|
// The unknown and the components have been used |
418 |
|
|
// to initialize the dofBoundary class in buildInterpolator, so the functions already knows which dofs have to be extracted). |
419 |
|
|
// It has to be a sequential std::vector for now since we do getValues with all the surface dofs |
420 |
|
|
// TODO: I think we could pass also a parallel std::vector, extract only the local values gather them in one boundary std::vector |
421 |
|
|
// and send them. All the utilities to do that are already in dofBoundary and linearProblem. |
422 |
|
✗ |
void cvgMainSlave::sendData(std::vector<PetscVector>& sequentialVolumeVectors, std::size_t iConn){ |
423 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
424 |
|
✗ |
std::cout<<"sending data to master..."<<std::endl; |
425 |
|
✗ |
for ( std::size_t cVar(0); cVar<m_numVarToSend[iConn]; ++cVar ) { |
426 |
|
|
double * dataSendPreInterpolationArray; |
427 |
|
|
double * dataSendArray; |
428 |
|
|
// Get the pointer to the array underlying the PetscVector m_dataSendPreInterpolation |
429 |
|
✗ |
m_dataSendPreInterpolation[iConn].getArray(&dataSendPreInterpolationArray); |
430 |
|
|
// Getting the values of sequential volume std::vector located in glob2PetscVol locations saving them to the array pointer by dataSendPreInterpolationArray. |
431 |
|
|
// Which means that they are saved in the PetscVector m_dataSendPreInterpolation in the location from 0 to numGlobalDofInterface -1; |
432 |
|
✗ |
sequentialVolumeVectors[cVar].getValues ( m_dimOfDataToReceive[iConn], m_lpb->dofBD(iConn).glob2PetscVolPtr(),dataSendPreInterpolationArray); |
433 |
|
|
// Restore array. We no longer need this pointer. We restore it. (see petsc documentation) |
434 |
|
✗ |
m_dataSendPreInterpolation[iConn].restoreArray(&dataSendPreInterpolationArray); |
435 |
|
|
// Apply the interpolator matrix to m_dataSendPreInterpolation save the result in dataSend |
436 |
|
✗ |
if ( m_residualInterpolationType[iConn] == 1 && CVGraph::neumannTypeVariable(sendVariable(iConn,cVar)) ) { |
437 |
|
✗ |
multTranspose(*m_interpolatorBackward[iConn],m_dataSendPreInterpolation[iConn],m_dataSend[iConn]); |
438 |
|
|
} else { |
439 |
|
✗ |
mult(*m_interpolator[iConn],m_dataSendPreInterpolation[iConn],m_dataSend[iConn]); |
440 |
|
|
} |
441 |
|
|
// Get the pointer to this array |
442 |
|
✗ |
m_dataSend[iConn].getArray(&dataSendArray); |
443 |
|
|
// Copy the content of m_dataSend into m_data_send data structure. |
444 |
|
✗ |
double* cvgDataSend = m_data_send[iConn]->begin(); |
445 |
|
✗ |
for ( int k(0); k < m_dimOfDataToSend[iConn] ; ++k) { |
446 |
|
✗ |
cvgDataSend[k+cVar*m_dimOfDataToSend[iConn]] = dataSendArray[k]; |
447 |
|
|
} |
448 |
|
|
//Restoring |
449 |
|
✗ |
m_dataSend[iConn].restoreArray(&dataSendArray); |
450 |
|
|
} |
451 |
|
✗ |
m_msg->sendDataToMaster(m_msgInfo, *m_data_send[iConn], m_cvg.connectDescriptor(m_IdsOfConnections[iConn] )); |
452 |
|
✗ |
std::cout<<"done."<<std::endl; |
453 |
|
|
} |
454 |
|
|
} |
455 |
|
|
// Data are simply received and stored in a sequentialVolumeVector, such as the sequential solution of the problem for instance) |
456 |
|
|
// only the master reads these data, but then they are broadcasted to everyone. |
457 |
|
|
// TODO this function could be generalized to work with a parallel std::vector. |
458 |
|
✗ |
void cvgMainSlave::receiveData(std::vector<PetscVector>& sequentialVolumeVectors, std::size_t iConn){ |
459 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
460 |
|
✗ |
std::cout<<"receiving data from master..."<<std::endl; |
461 |
|
✗ |
m_receiveChronoPtr->start(); |
462 |
|
✗ |
m_msg->receiveDataFromMaster(m_msgInfo, *m_data_recv[iConn],m_cvg.connectDescriptor(m_IdsOfConnections[iConn] )); |
463 |
|
✗ |
m_receiveChronoPtr->stop(); |
464 |
|
✗ |
for ( std::size_t cVar(0); cVar<m_numVarToRead[iConn]; ++cVar ) { |
465 |
|
✗ |
sequentialVolumeVectors[cVar].setValues ( m_dimOfDataToReceive[iConn], m_lpb->dofBD(iConn).glob2PetscVolPtr(), m_data_recv[iConn]->begin(cVar*m_dimOfDataToReceive[iConn]), INSERT_VALUES); |
466 |
|
✗ |
sequentialVolumeVectors[cVar].assembly(); |
467 |
|
|
} |
468 |
|
✗ |
std::cout<<"done"<<std::endl; |
469 |
|
|
} |
470 |
|
✗ |
m_receiveChronoPtr->toFileMax(); // It has to be outside of the if because it contains MPI communications |
471 |
|
✗ |
for ( std::size_t cVar(0); cVar<m_numVarToRead[iConn]; ++cVar ) { |
472 |
|
✗ |
sequentialVolumeVectors[cVar].broadCastSequentialVector(MpiInfo::petscComm(),/*master id with respect to the communicator*/0); |
473 |
|
|
} |
474 |
|
|
} |
475 |
|
|
// If a new time iteration starts, print a banner and update the counter |
476 |
|
✗ |
void cvgMainSlave::printIteration(int& timeIteration) { |
477 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ){ |
478 |
|
✗ |
if ( m_msgInfo.timeStatus() == CVG_TIME_STATUS_NEW_TIME_STEP ) { |
479 |
|
✗ |
timeIteration++; |
480 |
|
✗ |
m_msg->PrintCommonIterationBanner(m_msgInfo, timeIteration); |
481 |
|
|
} |
482 |
|
|
} |
483 |
|
✗ |
MPI_Bcast(&timeIteration,1,MPI_INT,/*master*/0,MpiInfo::petscComm()); |
484 |
|
|
} |
485 |
|
|
|
486 |
|
|
// General function: given a time status flag the master checks if this is current status |
487 |
|
|
// and then broadcast the answer. |
488 |
|
✗ |
bool cvgMainSlave::checkTimeStatus(const cvgraph::CVGStatus flag) const { |
489 |
|
|
// False by default |
490 |
|
✗ |
int result(0); |
491 |
|
|
// Master does the check, it is the only one that handles communications |
492 |
|
✗ |
if ( MpiInfo::rankProc() == 0 ) { |
493 |
|
✗ |
result = m_msgInfo.timeStatus() == flag; |
494 |
|
|
} |
495 |
|
|
// The result is broadcasted to the other processors of this slave. |
496 |
|
✗ |
MPI_Bcast(&result, 1, MPI_INT, /*master*/0, MpiInfo::petscComm()); |
497 |
|
|
// Then the result is returned casted to bool. |
498 |
|
✗ |
return bool(result); |
499 |
|
|
} |
500 |
|
|
|
501 |
|
✗ |
bool cvgMainSlave::initialConditionNeeded( std::size_t iConn ) const { |
502 |
|
✗ |
return m_cvg.cmptHaveToSendInitialCondition( m_cmptId, this->IdConn(iConn) ); |
503 |
|
|
} |
504 |
|
✗ |
void cvgMainSlave::print() const { |
505 |
|
✗ |
std::cout << "--------------------------------------------------" << std::endl; |
506 |
|
✗ |
std::cout << " cvgMainSlave " << std::endl; |
507 |
|
✗ |
std::cout << " Compartment-node name: "<<m_cmptName << std::endl; |
508 |
|
✗ |
std::cout << " Number of neighbours: "<<m_numOfNeighbours << std::endl; |
509 |
|
|
// TODO: print more info. |
510 |
|
✗ |
std::cout << "--------------------------------------------------" << std::endl; |
511 |
|
|
} |
512 |
|
✗ |
bool cvgMainSlave::thereIsAtLeastOneConditionOfType(bool(cvgMainSlave::*test)(std::size_t) const) const { |
513 |
|
✗ |
for ( std::size_t iConn(0); iConn<m_numOfNeighbours; ++iConn ) { |
514 |
|
✗ |
if ( (this->*test)(iConn) ) |
515 |
|
✗ |
return true; |
516 |
|
|
} |
517 |
|
✗ |
return false; |
518 |
|
|
} |
519 |
|
✗ |
std::string cvgMainSlave::neumannVariable(std::size_t iConn) const { |
520 |
|
✗ |
if ( neumann(iConn) ) { |
521 |
|
✗ |
return m_readVariables[iConn][0]; |
522 |
|
|
} |
523 |
|
✗ |
if ( robin(iConn) ) { |
524 |
|
✗ |
if ( CVGraph::neumannTypeVariable(m_readVariables[iConn][0]) ) { |
525 |
|
✗ |
return m_readVariables[iConn][0]; |
526 |
|
✗ |
} else if( CVGraph::neumannTypeVariable(m_readVariables[iConn][1] ) ) { |
527 |
|
✗ |
return m_readVariables[iConn][1]; |
528 |
|
|
} |
529 |
|
|
} |
530 |
|
✗ |
return "Error"; |
531 |
|
|
} |
532 |
|
|
} |
533 |
|
|
#endif |
534 |
|
|
|