Refactored cpu/gpu headers into common/ScaLBL.h to prevent divergence of conventions

This commit is contained in:
James E McClure 2015-06-30 11:33:27 -04:00
parent 2756b80e2d
commit 6414325ab2
13 changed files with 253 additions and 267 deletions

View File

@ -1,13 +1,73 @@
/* ScaLBL.h
* Header file for Scalable Lattice Boltzmann Library
* contains definitions for essential components of the LBM
* Separate implementations for GPU and CPU must both follow the conventions defined in this header
* This libarry contains the essential components of the LBM
* - streaming implementations
* - collision terms for different physics
* - communication framework for the LBM
* Separate implementations for GPU and CPU
* should follow the conventions in this header
* Refer to Domain.h for setup of parallel domains
*/
#include "Domain.h"
extern "C" void AllocateDeviceMemory(void** address, size_t size);
extern "C" void CopyToDevice(void* dest, void* source, size_t size);
extern "C" void CopyToHost(void* dest, void* source, size_t size);
extern "C" void DeviceBarrier();
extern "C" void PackDist(int q, int *list, int start, int count, double *sendbuf, double *dist, int N);
extern "C" void UnpackDist(int q, int Cqx, int Cqy, int Cqz, int *list, int start, int count,double *recvbuf, double *dist, int Nx, int Ny, int Nz);
extern "C" void InitD3Q19(char *ID, double *f_even, double *f_odd, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q19(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);
extern "C" void MRT(char *ID, double *f_even, double *f_odd, double rlxA, double rlxB,
double Fx, double Fy, double Fz,int Nx, int Ny, int Nz);
extern "C" void ComputeVelocityD3Q19(char *ID, double *disteven, double *distodd, double *vel,
int Nx, int Ny, int Nz);
extern "C" void ComputePressureD3Q19(char *ID, double *disteven, double *distodd, double *Pressure,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_inlet(double *disteven, double *distodd, double din,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_outlet(double *disteven, double *distodd, double dout,
int Nx, int Ny, int Nz, int outlet);
extern "C" void InitDenColor(char *ID, double *Den, double *Phi, double das, double dbs, int Nx, int Ny, int Nz);
extern "C" void InitDenColorDistance(char *ID, double *Den, double *Phi, double *Distance,
double das, double dbs, double beta, double xp, int Nx, int Ny, int Nz);
extern "C" void ComputeColorGradient(char *ID, double *phi, double *ColorGrad, int Nx, int Ny, int Nz);
extern "C" void ColorCollide( char *ID, double *disteven, double *distodd, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz, bool pBC);
extern "C" void ColorCollideOpt( char *ID, double *disteven, double *distodd, double *phi, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz);
extern "C" void DensityStreamD3Q7(char *ID, double *Den, double *Copy, double *Phi, double *ColorGrad, double *Velocity,
double beta, int Nx, int Ny, int Nz, bool pBC);
extern "C" void ComputePhi(char *ID, double *Phi, double *Den, int N);
extern "C" void MassColorCollideD3Q7(char *ID, double *A_even, double *A_odd, double *B_even, double *B_odd,
double *Den, double *Phi, double *ColorGrad, double *Velocity, double beta, int N, bool pBC);
extern "C" void ColorBC_inlet(double *Phi, double *Den, double *A_even, double *A_odd,
double *B_even, double *B_odd, int Nx, int Ny, int Nz);
extern "C" void ColorBC_outlet(double *Phi, double *Den, double *A_even, double *A_odd,
double *B_even, double *B_odd, int Nx, int Ny, int Nz);
class ScaLBL_Communicator{
ScaLBL_Communicator(){
@ -15,15 +75,25 @@ class ScaLBL_Communicator{
~ScaLBL_Communicator(){
}
private:
bool Lock; // use Lock to make sure only one call at a time to protect data in transit
// only one set of Send requests can be active at any time (per instance)
int i,j,k,n;
int sendtag,recvtag;
// Give the object it's own MPI communicator
RankInfoStruct rank_info;
MPI_Group Group; // Group of processors associated with this domain
MPI_Comm MPI_COMM_SCALBL; // MPI Communicator for this domain
MPI_Request req1[18],req2[18];
MPI_Status stat1[18],stat2[18];
public:
//......................................................................................
// Set up for D319 distributions
// - determines how much memory is allocated
// - buffers are reused to send D3Q7 distributions and halo exchange as needed
// MPI ranks for all 18 neighbors
//......................................................................................
// These variables are all private to prevent external things from modifying them!!
//......................................................................................
int rank_x,rank_y,rank_z,rank_X,rank_Y,rank_Z;
int rank_xy,rank_XY,rank_xY,rank_Xy;
int rank_xz,rank_XZ,rank_xZ,rank_Xz;
int rank_yz,rank_YZ,rank_yZ,rank_Yz;
//......................................................................................
int Nx,Ny,Nz,N;
//......................................................................................
@ -43,6 +113,14 @@ public:
int *dvcRecvList_x, *dvcRecvList_y, *dvcRecvList_z, *dvcRecvList_X, *dvcRecvList_Y, *dvcRecvList_Z;
int *dvcRecvList_xy, *dvcRecvList_yz, *dvcRecvList_xz, *dvcRecvList_Xy, *dvcRecvList_Yz, *dvcRecvList_xZ;
int *dvcRecvList_xY, *dvcRecvList_yZ, *dvcRecvList_Xz, *dvcRecvList_XY, *dvcRecvList_YZ, *dvcRecvList_XZ;
//......................................................................................
public:
//......................................................................................
// Set up for D319 distributions
// - determines how much memory is allocated
// - buffers are reused to send D3Q7 distributions and halo exchange as needed
//......................................................................................
// Buffers to store data sent and recieved by this MPI process
double *sendbuf_x, *sendbuf_y, *sendbuf_z, *sendbuf_X, *sendbuf_Y, *sendbuf_Z;
double *sendbuf_xy, *sendbuf_yz, *sendbuf_xz, *sendbuf_Xy, *sendbuf_Yz, *sendbuf_xZ;
@ -52,7 +130,6 @@ public:
double *recvbuf_xY, *recvbuf_yZ, *recvbuf_Xz, *recvbuf_XY, *recvbuf_YZ, *recvbuf_XZ;
//......................................................................................
void Allocate(Domain &Dm);
void SendD3Q19(double *f_even, double *f_odd);
void RecvD3Q19(double *f_even, double *f_odd);
void BiSendD3Q7(double *A_even, double *A_odd, double *B_even, double *B_odd);
@ -61,7 +138,55 @@ public:
void RecvHalo(double *data);
};
void ScaLBL_Communicator::Allocate(Domain &Dm){
void ScaLBL_Communicator::ScaLBL_Communicator(Domain &Dm){
//......................................................................................
Lock=false; // unlock the communicator
//......................................................................................
// Create a separate copy of the communicator for the device
MPI_Comm_group(Dm.Comm,&Group);
MPI_Comm_create(Dm.Comm,Group,&MPI_COMM_SCALBL);
//......................................................................................
// Copy the domain size and communication counts from Dm
Nx = Dm.Nx;
Ny = Dm.Ny;
Nz = Dm.Nz;
N = Nx*Ny*Nz;
sendCount_x=Dm.sendCount_x;
sendCount_y=Dm.sendCount_y;
sendCount_z=Dm.sendCount_z;
sendCount_X=Dm.sendCount_X;
sendCount_Y=Dm.sendCount_Y;
sendCount_Z=Dm.sendCount_Z;
sendCount_xy=Dm.sendCount_xy;
sendCount_yz=Dm.sendCount_yz;
sendCount_xz=Dm.sendCount_xz;
sendCount_Xy=Dm.sendCount_Xy;
sendCount_Yz=Dm.sendCount_Yz;
sendCount_xZ=Dm.sendCount_xZ;
sendCount_xY=Dm.sendCount_xY;
sendCount_yZ=Dm.sendCount_yZ;
sendCount_Xz=Dm.sendCount_Xz;
sendCount_XY=Dm.sendCount_XY;
sendCount_YZ=Dm.sendCount_YZ;
sendCount_XZ=Dm.sendCount_XZ;
recvCount_x=Dm.recvCount_x;
recvCount_y=Dm.recvCount_y;
recvCount_z=Dm.recvCount_z;
recvCount_X=Dm.recvCount_X;
recvCount_Y=Dm.recvCount_Y;
recvCount_Z=Dm.recvCount_Z;
recvCount_xy=Dm.recvCount_xy;
recvCount_yz=Dm.recvCount_yz;
recvCount_xz=Dm.recvCount_xz;
recvCount_Xy=Dm.recvCount_Xy;
recvCount_Yz=Dm.recvCount_Yz;
recvCount_xZ=Dm.recvCount_xZ;
recvCount_xY=Dm.recvCount_xY;
recvCount_yZ=Dm.recvCount_yZ;
recvCount_Xz=Dm.recvCount_Xz;
recvCount_XY=Dm.recvCount_XY;
recvCount_YZ=Dm.recvCount_YZ;
recvCount_XZ=Dm.recvCount_XZ;
//......................................................................................
AllocateDeviceMemory((void **) &sendbuf_x, 5*sendCount_x*sizeof(double)); // Allocate device memory
AllocateDeviceMemory((void **) &sendbuf_X, 5*sendCount_X*sizeof(double)); // Allocate device memory
@ -182,8 +307,15 @@ void ScaLBL_Communicator::Allocate(Domain &Dm){
void ScaLBL_Communicator::SendD3Q19(double *f_even, double *f_odd){
if (Lock==true){
ERROR("ScaLBL Error (SendD3Q19): ScaLBL_Communicator is locked -- did you forget to match Send/Recv calls?");
}
else{
Lock=true;
}
// assign tag of 19 to D3Q19 communication
sendtag = recvtag = 19;
// Pack the distributions
PackDist(1,dvcSendList_x,0,sendCount_x,sendbuf_x,f_even,N);
PackDist(4,dvcSendList_x,sendCount_x,sendCount_x,sendbuf_x,f_even,N);
PackDist(5,dvcSendList_x,2*sendCount_x,sendCount_x,sendbuf_x,f_even,N);
@ -248,50 +380,49 @@ void ScaLBL_Communicator::SendD3Q19(double *f_even, double *f_odd){
//...................................................................................
// Send all the distributions
MPI_Isend(sendbuf_x, 5*sendCount_x,MPI_DOUBLE,rank_x,sendtag,MPI_COMM_WORLD,&req1[0]);
MPI_Irecv(recvbuf_X, 5*recvCount_X,MPI_DOUBLE,rank_X,recvtag,MPI_COMM_WORLD,&req2[0]);
MPI_Isend(sendbuf_X, 5*sendCount_X,MPI_DOUBLE,rank_X,sendtag,MPI_COMM_WORLD,&req1[1]);
MPI_Irecv(recvbuf_x, 5*recvCount_x,MPI_DOUBLE,rank_x,recvtag,MPI_COMM_WORLD,&req2[1]);
MPI_Isend(sendbuf_y, 5*sendCount_y,MPI_DOUBLE,rank_y,sendtag,MPI_COMM_WORLD,&req1[2]);
MPI_Irecv(recvbuf_Y, 5*recvCount_Y,MPI_DOUBLE,rank_Y,recvtag,MPI_COMM_WORLD,&req2[2]);
MPI_Isend(sendbuf_Y, 5*sendCount_Y,MPI_DOUBLE,rank_Y,sendtag,MPI_COMM_WORLD,&req1[3]);
MPI_Irecv(recvbuf_y, 5*recvCount_y,MPI_DOUBLE,rank_y,recvtag,MPI_COMM_WORLD,&req2[3]);
MPI_Isend(sendbuf_z, 5*sendCount_z,MPI_DOUBLE,rank_z,sendtag,MPI_COMM_WORLD,&req1[4]);
MPI_Irecv(recvbuf_Z, 5*recvCount_Z,MPI_DOUBLE,rank_Z,recvtag,MPI_COMM_WORLD,&req2[4]);
MPI_Isend(sendbuf_Z, 5*sendCount_Z,MPI_DOUBLE,rank_Z,sendtag,MPI_COMM_WORLD,&req1[5]);
MPI_Irecv(recvbuf_z, 5*recvCount_z,MPI_DOUBLE,rank_z,recvtag,MPI_COMM_WORLD,&req2[5]);
MPI_Isend(sendbuf_xy, sendCount_xy,MPI_DOUBLE,rank_xy,sendtag,MPI_COMM_WORLD,&req1[6]);
MPI_Irecv(recvbuf_XY, recvCount_XY,MPI_DOUBLE,rank_XY,recvtag,MPI_COMM_WORLD,&req2[6]);
MPI_Isend(sendbuf_XY, sendCount_XY,MPI_DOUBLE,rank_XY,sendtag,MPI_COMM_WORLD,&req1[7]);
MPI_Irecv(recvbuf_xy, recvCount_xy,MPI_DOUBLE,rank_xy,recvtag,MPI_COMM_WORLD,&req2[7]);
MPI_Isend(sendbuf_Xy, sendCount_Xy,MPI_DOUBLE,rank_Xy,sendtag,MPI_COMM_WORLD,&req1[8]);
MPI_Irecv(recvbuf_xY, recvCount_xY,MPI_DOUBLE,rank_xY,recvtag,MPI_COMM_WORLD,&req2[8]);
MPI_Isend(sendbuf_xY, sendCount_xY,MPI_DOUBLE,rank_xY,sendtag,MPI_COMM_WORLD,&req1[9]);
MPI_Irecv(recvbuf_Xy, recvCount_Xy,MPI_DOUBLE,rank_Xy,recvtag,MPI_COMM_WORLD,&req2[9]);
MPI_Isend(sendbuf_xz, sendCount_xz,MPI_DOUBLE,rank_xz,sendtag,MPI_COMM_WORLD,&req1[10]);
MPI_Irecv(recvbuf_XZ, recvCount_XZ,MPI_DOUBLE,rank_XZ,recvtag,MPI_COMM_WORLD,&req2[10]);
MPI_Isend(sendbuf_XZ, sendCount_XZ,MPI_DOUBLE,rank_XZ,sendtag,MPI_COMM_WORLD,&req1[11]);
MPI_Irecv(recvbuf_xz, recvCount_xz,MPI_DOUBLE,rank_xz,recvtag,MPI_COMM_WORLD,&req2[11]);
MPI_Isend(sendbuf_Xz, sendCount_Xz,MPI_DOUBLE,rank_Xz,sendtag,MPI_COMM_WORLD,&req1[12]);
MPI_Irecv(recvbuf_xZ, recvCount_xZ,MPI_DOUBLE,rank_xZ,recvtag,MPI_COMM_WORLD,&req2[12]);
MPI_Isend(sendbuf_xZ, sendCount_xZ,MPI_DOUBLE,rank_xZ,sendtag,MPI_COMM_WORLD,&req1[13]);
MPI_Irecv(recvbuf_Xz, recvCount_Xz,MPI_DOUBLE,rank_Xz,recvtag,MPI_COMM_WORLD,&req2[13]);
MPI_Isend(sendbuf_yz, sendCount_yz,MPI_DOUBLE,rank_yz,sendtag,MPI_COMM_WORLD,&req1[14]);
MPI_Irecv(recvbuf_YZ, recvCount_YZ,MPI_DOUBLE,rank_YZ,recvtag,MPI_COMM_WORLD,&req2[14]);
MPI_Isend(sendbuf_YZ, sendCount_YZ,MPI_DOUBLE,rank_YZ,sendtag,MPI_COMM_WORLD,&req1[15]);
MPI_Irecv(recvbuf_yz, recvCount_yz,MPI_DOUBLE,rank_yz,recvtag,MPI_COMM_WORLD,&req2[15]);
MPI_Isend(sendbuf_Yz, sendCount_Yz,MPI_DOUBLE,rank_Yz,sendtag,MPI_COMM_WORLD,&req1[16]);
MPI_Irecv(recvbuf_yZ, recvCount_yZ,MPI_DOUBLE,rank_yZ,recvtag,MPI_COMM_WORLD,&req2[16]);
MPI_Isend(sendbuf_yZ, sendCount_yZ,MPI_DOUBLE,rank_yZ,sendtag,MPI_COMM_WORLD,&req1[17]);
MPI_Irecv(recvbuf_Yz, recvCount_Yz,MPI_DOUBLE,rank_Yz,recvtag,MPI_COMM_WORLD,&req2[17]);
MPI_Isend(sendbuf_x, 5*sendCount_x,MPI_DOUBLE,rank_x,sendtag,MPI_COMM_SCALBL,&req1[0]);
MPI_Irecv(recvbuf_X, 5*recvCount_X,MPI_DOUBLE,rank_X,recvtag,MPI_COMM_SCALBL,&req2[0]);
MPI_Isend(sendbuf_X, 5*sendCount_X,MPI_DOUBLE,rank_X,sendtag,MPI_COMM_SCALBL,&req1[1]);
MPI_Irecv(recvbuf_x, 5*recvCount_x,MPI_DOUBLE,rank_x,recvtag,MPI_COMM_SCALBL,&req2[1]);
MPI_Isend(sendbuf_y, 5*sendCount_y,MPI_DOUBLE,rank_y,sendtag,MPI_COMM_SCALBL,&req1[2]);
MPI_Irecv(recvbuf_Y, 5*recvCount_Y,MPI_DOUBLE,rank_Y,recvtag,MPI_COMM_SCALBL,&req2[2]);
MPI_Isend(sendbuf_Y, 5*sendCount_Y,MPI_DOUBLE,rank_Y,sendtag,MPI_COMM_SCALBL,&req1[3]);
MPI_Irecv(recvbuf_y, 5*recvCount_y,MPI_DOUBLE,rank_y,recvtag,MPI_COMM_SCALBL,&req2[3]);
MPI_Isend(sendbuf_z, 5*sendCount_z,MPI_DOUBLE,rank_z,sendtag,MPI_COMM_SCALBL,&req1[4]);
MPI_Irecv(recvbuf_Z, 5*recvCount_Z,MPI_DOUBLE,rank_Z,recvtag,MPI_COMM_SCALBL,&req2[4]);
MPI_Isend(sendbuf_Z, 5*sendCount_Z,MPI_DOUBLE,rank_Z,sendtag,MPI_COMM_SCALBL,&req1[5]);
MPI_Irecv(recvbuf_z, 5*recvCount_z,MPI_DOUBLE,rank_z,recvtag,MPI_COMM_SCALBL,&req2[5]);
MPI_Isend(sendbuf_xy, sendCount_xy,MPI_DOUBLE,rank_xy,sendtag,MPI_COMM_SCALBL,&req1[6]);
MPI_Irecv(recvbuf_XY, recvCount_XY,MPI_DOUBLE,rank_XY,recvtag,MPI_COMM_SCALBL,&req2[6]);
MPI_Isend(sendbuf_XY, sendCount_XY,MPI_DOUBLE,rank_XY,sendtag,MPI_COMM_SCALBL,&req1[7]);
MPI_Irecv(recvbuf_xy, recvCount_xy,MPI_DOUBLE,rank_xy,recvtag,MPI_COMM_SCALBL,&req2[7]);
MPI_Isend(sendbuf_Xy, sendCount_Xy,MPI_DOUBLE,rank_Xy,sendtag,MPI_COMM_SCALBL,&req1[8]);
MPI_Irecv(recvbuf_xY, recvCount_xY,MPI_DOUBLE,rank_xY,recvtag,MPI_COMM_SCALBL,&req2[8]);
MPI_Isend(sendbuf_xY, sendCount_xY,MPI_DOUBLE,rank_xY,sendtag,MPI_COMM_SCALBL,&req1[9]);
MPI_Irecv(recvbuf_Xy, recvCount_Xy,MPI_DOUBLE,rank_Xy,recvtag,MPI_COMM_SCALBL,&req2[9]);
MPI_Isend(sendbuf_xz, sendCount_xz,MPI_DOUBLE,rank_xz,sendtag,MPI_COMM_SCALBL,&req1[10]);
MPI_Irecv(recvbuf_XZ, recvCount_XZ,MPI_DOUBLE,rank_XZ,recvtag,MPI_COMM_SCALBL,&req2[10]);
MPI_Isend(sendbuf_XZ, sendCount_XZ,MPI_DOUBLE,rank_XZ,sendtag,MPI_COMM_SCALBL,&req1[11]);
MPI_Irecv(recvbuf_xz, recvCount_xz,MPI_DOUBLE,rank_xz,recvtag,MPI_COMM_SCALBL,&req2[11]);
MPI_Isend(sendbuf_Xz, sendCount_Xz,MPI_DOUBLE,rank_Xz,sendtag,MPI_COMM_SCALBL,&req1[12]);
MPI_Irecv(recvbuf_xZ, recvCount_xZ,MPI_DOUBLE,rank_xZ,recvtag,MPI_COMM_SCALBL,&req2[12]);
MPI_Isend(sendbuf_xZ, sendCount_xZ,MPI_DOUBLE,rank_xZ,sendtag,MPI_COMM_SCALBL,&req1[13]);
MPI_Irecv(recvbuf_Xz, recvCount_Xz,MPI_DOUBLE,rank_Xz,recvtag,MPI_COMM_SCALBL,&req2[13]);
MPI_Isend(sendbuf_yz, sendCount_yz,MPI_DOUBLE,rank_yz,sendtag,MPI_COMM_SCALBL,&req1[14]);
MPI_Irecv(recvbuf_YZ, recvCount_YZ,MPI_DOUBLE,rank_YZ,recvtag,MPI_COMM_SCALBL,&req2[14]);
MPI_Isend(sendbuf_YZ, sendCount_YZ,MPI_DOUBLE,rank_YZ,sendtag,MPI_COMM_SCALBL,&req1[15]);
MPI_Irecv(recvbuf_yz, recvCount_yz,MPI_DOUBLE,rank_yz,recvtag,MPI_COMM_SCALBL,&req2[15]);
MPI_Isend(sendbuf_Yz, sendCount_Yz,MPI_DOUBLE,rank_Yz,sendtag,MPI_COMM_SCALBL,&req1[16]);
MPI_Irecv(recvbuf_yZ, recvCount_yZ,MPI_DOUBLE,rank_yZ,recvtag,MPI_COMM_SCALBL,&req2[16]);
MPI_Isend(sendbuf_yZ, sendCount_yZ,MPI_DOUBLE,rank_yZ,sendtag,MPI_COMM_SCALBL,&req1[17]);
MPI_Irecv(recvbuf_Yz, recvCount_Yz,MPI_DOUBLE,rank_Yz,recvtag,MPI_COMM_SCALBL,&req2[17]);
}
void ScaLBL_Communicator::RecvD3Q19(double *f_even, double *f_odd){
void ScaLBL_Communicator::RecvD3Q19(double *f_even, double *f_odd, Domain &Dm){
//...................................................................................
// Wait for completion of D3Q19 communication
MPI_Waitall(18,req1,stat1);
MPI_Waitall(18,req2,stat2);
//...................................................................................
// Unpack the distributions on the device
//...................................................................................
@ -361,10 +492,18 @@ void ScaLBL_Communicator::RecvD3Q19(double *f_even, double *f_odd){
//...Map recieve list for the YZ edge <<<15)................................
UnpackDist(8,0,1,1,dvcRecvList_yz,0,recvCount_yz,recvbuf_yz,f_even,Nx,Ny,Nz);
//...................................................................................
Lock=false; // unlock the communicator after communications complete
//...................................................................................
}
void ScaLBL_Communicator::BiSendD3Q7(double *A_even, double *A_odd, double *B_even, double *B_odd){
void ScaLBL_Communicator::BiSendD3Q7(double *A_even, double *A_odd, double *B_even, double *B_odd, Domain &Dm){
if (Lock==true){
ERROR("ScaLBL Error (SendD3Q7): ScaLBL_Communicator is locked -- did you forget to match Send/Recv calls?");
}
else{
Lock=true;
}
//...................................................................................
sendtag = recvtag = 7;
//...................................................................................
@ -389,18 +528,18 @@ void ScaLBL_Communicator::BiSendD3Q7(double *A_even, double *A_odd, double *B_ev
//...................................................................................
// Send all the distributions
MPI_Isend(sendbuf_x, 2*sendCount_x,MPI_DOUBLE,rank_x,sendtag,MPI_COMM_WORLD,&req1[0]);
MPI_Irecv(recvbuf_X, 2*recvCount_X,MPI_DOUBLE,rank_X,recvtag,MPI_COMM_WORLD,&req2[0]);
MPI_Isend(sendbuf_X, 2*sendCount_X,MPI_DOUBLE,rank_X,sendtag,MPI_COMM_WORLD,&req1[1]);
MPI_Irecv(recvbuf_x, 2*recvCount_x,MPI_DOUBLE,rank_x,recvtag,MPI_COMM_WORLD,&req2[1]);
MPI_Isend(sendbuf_y, 2*sendCount_y,MPI_DOUBLE,rank_y,sendtag,MPI_COMM_WORLD,&req1[2]);
MPI_Irecv(recvbuf_Y, 2*recvCount_Y,MPI_DOUBLE,rank_Y,recvtag,MPI_COMM_WORLD,&req2[2]);
MPI_Isend(sendbuf_Y, 2*sendCount_Y,MPI_DOUBLE,rank_Y,sendtag,MPI_COMM_WORLD,&req1[3]);
MPI_Irecv(recvbuf_y, 2*recvCount_y,MPI_DOUBLE,rank_y,recvtag,MPI_COMM_WORLD,&req2[3]);
MPI_Isend(sendbuf_z, 2*sendCount_z,MPI_DOUBLE,rank_z,sendtag,MPI_COMM_WORLD,&req1[4]);
MPI_Irecv(recvbuf_Z, 2*recvCount_Z,MPI_DOUBLE,rank_Z,recvtag,MPI_COMM_WORLD,&req2[4]);
MPI_Isend(sendbuf_Z, 2*sendCount_Z,MPI_DOUBLE,rank_Z,sendtag,MPI_COMM_WORLD,&req1[5]);
MPI_Irecv(recvbuf_z, 2*recvCount_z,MPI_DOUBLE,rank_z,recvtag,MPI_COMM_WORLD,&req2[5]);
MPI_Isend(sendbuf_x, 2*sendCount_x,MPI_DOUBLE,rank_x,sendtag,MPI_COMM_SCALBL,&req1[0]);
MPI_Irecv(recvbuf_X, 2*recvCount_X,MPI_DOUBLE,rank_X,recvtag,MPI_COMM_SCALBL,&req2[0]);
MPI_Isend(sendbuf_X, 2*sendCount_X,MPI_DOUBLE,rank_X,sendtag,MPI_COMM_SCALBL,&req1[1]);
MPI_Irecv(recvbuf_x, 2*recvCount_x,MPI_DOUBLE,rank_x,recvtag,MPI_COMM_SCALBL,&req2[1]);
MPI_Isend(sendbuf_y, 2*sendCount_y,MPI_DOUBLE,rank_y,sendtag,MPI_COMM_SCALBL,&req1[2]);
MPI_Irecv(recvbuf_Y, 2*recvCount_Y,MPI_DOUBLE,rank_Y,recvtag,MPI_COMM_SCALBL,&req2[2]);
MPI_Isend(sendbuf_Y, 2*sendCount_Y,MPI_DOUBLE,rank_Y,sendtag,MPI_COMM_SCALBL,&req1[3]);
MPI_Irecv(recvbuf_y, 2*recvCount_y,MPI_DOUBLE,rank_y,recvtag,MPI_COMM_SCALBL,&req2[3]);
MPI_Isend(sendbuf_z, 2*sendCount_z,MPI_DOUBLE,rank_z,sendtag,MPI_COMM_SCALBL,&req1[4]);
MPI_Irecv(recvbuf_Z, 2*recvCount_Z,MPI_DOUBLE,rank_Z,recvtag,MPI_COMM_SCALBL,&req2[4]);
MPI_Isend(sendbuf_Z, 2*sendCount_Z,MPI_DOUBLE,rank_Z,sendtag,MPI_COMM_SCALBL,&req1[5]);
MPI_Irecv(recvbuf_z, 2*recvCount_z,MPI_DOUBLE,rank_z,recvtag,MPI_COMM_SCALBL,&req2[5]);
//...................................................................................
}
void ScaLBL_Communicator::BiRecvD3Q7(double *A_even, double *A_odd, double *B_even, double *B_odd){
@ -411,7 +550,7 @@ void ScaLBL_Communicator::BiRecvD3Q7(double *A_even, double *A_odd, double *B_ev
//...................................................................................
// Unpack the distributions on the device
//...................................................................................
//...Map recieve list for the X face: q=2,8,10,12,13 .................................
//...Map recieve list for the X face: q=2,8,10,12,13 ................................
UnpackDist(0,-1,0,0,dvcRecvList_X,0,recvCount_X,recvbuf_X,A_odd,Nx,Ny,Nz);
UnpackDist(0,-1,0,0,dvcRecvList_X,recvCount_X,recvCount_X,recvbuf_X,B_odd,Nx,Ny,Nz);
//...................................................................................
@ -419,23 +558,32 @@ void ScaLBL_Communicator::BiRecvD3Q7(double *A_even, double *A_odd, double *B_ev
UnpackDist(1,1,0,0,dvcRecvList_x,0,recvCount_x,recvbuf_x,A_even,Nx,Ny,Nz);
UnpackDist(1,1,0,0,dvcRecvList_x,recvCount_x,recvCount_x,recvbuf_x,B_even,Nx,Ny,Nz);
//...................................................................................
//...Map recieve list for the y face: q=4,8,9,16,18 ...................................
//...Map recieve list for the y face: q=4,8,9,16,18 .................................
UnpackDist(1,0,-1,0,dvcRecvList_Y,0,recvCount_Y,recvbuf_Y,A_odd,Nx,Ny,Nz);
UnpackDist(1,0,-1,0,dvcRecvList_Y,recvCount_Y,recvCount_Y,recvbuf_Y,B_odd,Nx,Ny,Nz);
//...................................................................................
//...Map recieve list for the Y face: q=3,7,10,15,17 ..................................
//...Map recieve list for the Y face: q=3,7,10,15,17 ................................
UnpackDist(2,0,1,0,dvcRecvList_y,0,recvCount_y,recvbuf_y,A_even,Nx,Ny,Nz);
UnpackDist(2,0,1,0,dvcRecvList_y,recvCount_y,recvCount_y,recvbuf_y,B_even,Nx,Ny,Nz);
//...................................................................................
//...Map recieve list for the z face<<<6,12,13,16,17)..............................................
//...Map recieve list for the z face<<<6,12,13,16,17)................................
UnpackDist(2,0,0,-1,dvcRecvList_Z,0,recvCount_Z,recvbuf_Z,A_odd,Nx,Ny,Nz);
UnpackDist(2,0,0,-1,dvcRecvList_Z,recvCount_Z,recvCount_Z,recvbuf_Z,B_odd,Nx,Ny,Nz);
//...Map recieve list for the Z face<<<5,11,14,15,18)..............................................
//...Map recieve list for the Z face<<<5,11,14,15,18)................................
UnpackDist(3,0,0,1,dvcRecvList_z,0,recvCount_z,recvbuf_z,A_even,Nx,Ny,Nz);
UnpackDist(3,0,0,1,dvcRecvList_z,recvCount_z,recvCount_z,recvbuf_z,B_even,Nx,Ny,Nz);
//..................................................................................
Lock=false; // unlock the communicator after communications complete
//...................................................................................
}
void ScaLBL_Communicator::SendHalo(double *data){
//...................................................................................
if (Lock==true){
ERROR("ScaLBL Error (SendHalo): ScaLBL_Communicator is locked -- did you forget to match Send/Recv calls?");
}
else{
Lock=true;
}
//...................................................................................
sendtag = recvtag = 1;
//...................................................................................
@ -460,45 +608,46 @@ void ScaLBL_Communicator::SendHalo(double *data){
//...................................................................................
// Send / Recv all the phase indcator field values
//...................................................................................
MPI_Isend(sendbuf_x, sendCount_x,MPI_DOUBLE,rank_x,sendtag,MPI_COMM_WORLD,&req1[0]);
MPI_Irecv(recvbuf_X, recvCount_X,MPI_DOUBLE,rank_X,recvtag,MPI_COMM_WORLD,&req2[0]);
MPI_Isend(sendbuf_X, sendCount_X,MPI_DOUBLE,rank_X,sendtag,MPI_COMM_WORLD,&req1[1]);
MPI_Irecv(recvbuf_x, recvCount_x,MPI_DOUBLE,rank_x,recvtag,MPI_COMM_WORLD,&req2[1]);
MPI_Isend(sendbuf_y, sendCount_y,MPI_DOUBLE,rank_y,sendtag,MPI_COMM_WORLD,&req1[2]);
MPI_Irecv(recvbuf_Y, recvCount_Y,MPI_DOUBLE,rank_Y,recvtag,MPI_COMM_WORLD,&req2[2]);
MPI_Isend(sendbuf_Y, sendCount_Y,MPI_DOUBLE,rank_Y,sendtag,MPI_COMM_WORLD,&req1[3]);
MPI_Irecv(recvbuf_y, recvCount_y,MPI_DOUBLE,rank_y,recvtag,MPI_COMM_WORLD,&req2[3]);
MPI_Isend(sendbuf_z, sendCount_z,MPI_DOUBLE,rank_z,sendtag,MPI_COMM_WORLD,&req1[4]);
MPI_Irecv(recvbuf_Z, recvCount_Z,MPI_DOUBLE,rank_Z,recvtag,MPI_COMM_WORLD,&req2[4]);
MPI_Isend(sendbuf_Z, sendCount_Z,MPI_DOUBLE,rank_Z,sendtag,MPI_COMM_WORLD,&req1[5]);
MPI_Irecv(recvbuf_z, recvCount_z,MPI_DOUBLE,rank_z,recvtag,MPI_COMM_WORLD,&req2[5]);
MPI_Isend(sendbuf_xy, sendCount_xy,MPI_DOUBLE,rank_xy,sendtag,MPI_COMM_WORLD,&req1[6]);
MPI_Irecv(recvbuf_XY, recvCount_XY,MPI_DOUBLE,rank_XY,recvtag,MPI_COMM_WORLD,&req2[6]);
MPI_Isend(sendbuf_XY, sendCount_XY,MPI_DOUBLE,rank_XY,sendtag,MPI_COMM_WORLD,&req1[7]);
MPI_Irecv(recvbuf_xy, recvCount_xy,MPI_DOUBLE,rank_xy,recvtag,MPI_COMM_WORLD,&req2[7]);
MPI_Isend(sendbuf_Xy, sendCount_Xy,MPI_DOUBLE,rank_Xy,sendtag,MPI_COMM_WORLD,&req1[8]);
MPI_Irecv(recvbuf_xY, recvCount_xY,MPI_DOUBLE,rank_xY,recvtag,MPI_COMM_WORLD,&req2[8]);
MPI_Isend(sendbuf_xY, sendCount_xY,MPI_DOUBLE,rank_xY,sendtag,MPI_COMM_WORLD,&req1[9]);
MPI_Irecv(recvbuf_Xy, recvCount_Xy,MPI_DOUBLE,rank_Xy,recvtag,MPI_COMM_WORLD,&req2[9]);
MPI_Isend(sendbuf_xz, sendCount_xz,MPI_DOUBLE,rank_xz,sendtag,MPI_COMM_WORLD,&req1[10]);
MPI_Irecv(recvbuf_XZ, recvCount_XZ,MPI_DOUBLE,rank_XZ,recvtag,MPI_COMM_WORLD,&req2[10]);
MPI_Isend(sendbuf_XZ, sendCount_XZ,MPI_DOUBLE,rank_XZ,sendtag,MPI_COMM_WORLD,&req1[11]);
MPI_Irecv(recvbuf_xz, recvCount_xz,MPI_DOUBLE,rank_xz,recvtag,MPI_COMM_WORLD,&req2[11]);
MPI_Isend(sendbuf_Xz, sendCount_Xz,MPI_DOUBLE,rank_Xz,sendtag,MPI_COMM_WORLD,&req1[12]);
MPI_Irecv(recvbuf_xZ, recvCount_xZ,MPI_DOUBLE,rank_xZ,recvtag,MPI_COMM_WORLD,&req2[12]);
MPI_Isend(sendbuf_xZ, sendCount_xZ,MPI_DOUBLE,rank_xZ,sendtag,MPI_COMM_WORLD,&req1[13]);
MPI_Irecv(recvbuf_Xz, recvCount_Xz,MPI_DOUBLE,rank_Xz,recvtag,MPI_COMM_WORLD,&req2[13]);
MPI_Isend(sendbuf_yz, sendCount_yz,MPI_DOUBLE,rank_yz,sendtag,MPI_COMM_WORLD,&req1[14]);
MPI_Irecv(recvbuf_YZ, recvCount_YZ,MPI_DOUBLE,rank_YZ,recvtag,MPI_COMM_WORLD,&req2[14]);
MPI_Isend(sendbuf_YZ, sendCount_YZ,MPI_DOUBLE,rank_YZ,sendtag,MPI_COMM_WORLD,&req1[15]);
MPI_Irecv(recvbuf_yz, recvCount_yz,MPI_DOUBLE,rank_yz,recvtag,MPI_COMM_WORLD,&req2[15]);
MPI_Isend(sendbuf_Yz, sendCount_Yz,MPI_DOUBLE,rank_Yz,sendtag,MPI_COMM_WORLD,&req1[16]);
MPI_Irecv(recvbuf_yZ, recvCount_yZ,MPI_DOUBLE,rank_yZ,recvtag,MPI_COMM_WORLD,&req2[16]);
MPI_Isend(sendbuf_yZ, sendCount_yZ,MPI_DOUBLE,rank_yZ,sendtag,MPI_COMM_WORLD,&req1[17]);
MPI_Irecv(recvbuf_Yz, recvCount_Yz,MPI_DOUBLE,rank_Yz,recvtag,MPI_COMM_WORLD,&req2[17]);
MPI_Isend(sendbuf_x, sendCount_x,MPI_DOUBLE,rank_x,sendtag,MPI_COMM_SCALBL,&req1[0]);
MPI_Irecv(recvbuf_X, recvCount_X,MPI_DOUBLE,rank_X,recvtag,MPI_COMM_SCALBL,&req2[0]);
MPI_Isend(sendbuf_X, sendCount_X,MPI_DOUBLE,rank_X,sendtag,MPI_COMM_SCALBL,&req1[1]);
MPI_Irecv(recvbuf_x, recvCount_x,MPI_DOUBLE,rank_x,recvtag,MPI_COMM_SCALBL,&req2[1]);
MPI_Isend(sendbuf_y, sendCount_y,MPI_DOUBLE,rank_y,sendtag,MPI_COMM_SCALBL,&req1[2]);
MPI_Irecv(recvbuf_Y, recvCount_Y,MPI_DOUBLE,rank_Y,recvtag,MPI_COMM_SCALBL,&req2[2]);
MPI_Isend(sendbuf_Y, sendCount_Y,MPI_DOUBLE,rank_Y,sendtag,MPI_COMM_SCALBL,&req1[3]);
MPI_Irecv(recvbuf_y, recvCount_y,MPI_DOUBLE,rank_y,recvtag,MPI_COMM_SCALBL,&req2[3]);
MPI_Isend(sendbuf_z, sendCount_z,MPI_DOUBLE,rank_z,sendtag,MPI_COMM_SCALBL,&req1[4]);
MPI_Irecv(recvbuf_Z, recvCount_Z,MPI_DOUBLE,rank_Z,recvtag,MPI_COMM_SCALBL,&req2[4]);
MPI_Isend(sendbuf_Z, sendCount_Z,MPI_DOUBLE,rank_Z,sendtag,MPI_COMM_SCALBL,&req1[5]);
MPI_Irecv(recvbuf_z, recvCount_z,MPI_DOUBLE,rank_z,recvtag,MPI_COMM_SCALBL,&req2[5]);
MPI_Isend(sendbuf_xy, sendCount_xy,MPI_DOUBLE,rank_xy,sendtag,MPI_COMM_SCALBL,&req1[6]);
MPI_Irecv(recvbuf_XY, recvCount_XY,MPI_DOUBLE,rank_XY,recvtag,MPI_COMM_SCALBL,&req2[6]);
MPI_Isend(sendbuf_XY, sendCount_XY,MPI_DOUBLE,rank_XY,sendtag,MPI_COMM_SCALBL,&req1[7]);
MPI_Irecv(recvbuf_xy, recvCount_xy,MPI_DOUBLE,rank_xy,recvtag,MPI_COMM_SCALBL,&req2[7]);
MPI_Isend(sendbuf_Xy, sendCount_Xy,MPI_DOUBLE,rank_Xy,sendtag,MPI_COMM_SCALBL,&req1[8]);
MPI_Irecv(recvbuf_xY, recvCount_xY,MPI_DOUBLE,rank_xY,recvtag,MPI_COMM_SCALBL,&req2[8]);
MPI_Isend(sendbuf_xY, sendCount_xY,MPI_DOUBLE,rank_xY,sendtag,MPI_COMM_SCALBL,&req1[9]);
MPI_Irecv(recvbuf_Xy, recvCount_Xy,MPI_DOUBLE,rank_Xy,recvtag,MPI_COMM_SCALBL,&req2[9]);
MPI_Isend(sendbuf_xz, sendCount_xz,MPI_DOUBLE,rank_xz,sendtag,MPI_COMM_SCALBL,&req1[10]);
MPI_Irecv(recvbuf_XZ, recvCount_XZ,MPI_DOUBLE,rank_XZ,recvtag,MPI_COMM_SCALBL,&req2[10]);
MPI_Isend(sendbuf_XZ, sendCount_XZ,MPI_DOUBLE,rank_XZ,sendtag,MPI_COMM_SCALBL,&req1[11]);
MPI_Irecv(recvbuf_xz, recvCount_xz,MPI_DOUBLE,rank_xz,recvtag,MPI_COMM_SCALBL,&req2[11]);
MPI_Isend(sendbuf_Xz, sendCount_Xz,MPI_DOUBLE,rank_Xz,sendtag,MPI_COMM_SCALBL,&req1[12]);
MPI_Irecv(recvbuf_xZ, recvCount_xZ,MPI_DOUBLE,rank_xZ,recvtag,MPI_COMM_SCALBL,&req2[12]);
MPI_Isend(sendbuf_xZ, sendCount_xZ,MPI_DOUBLE,rank_xZ,sendtag,MPI_COMM_SCALBL,&req1[13]);
MPI_Irecv(recvbuf_Xz, recvCount_Xz,MPI_DOUBLE,rank_Xz,recvtag,MPI_COMM_SCALBL,&req2[13]);
MPI_Isend(sendbuf_yz, sendCount_yz,MPI_DOUBLE,rank_yz,sendtag,MPI_COMM_SCALBL,&req1[14]);
MPI_Irecv(recvbuf_YZ, recvCount_YZ,MPI_DOUBLE,rank_YZ,recvtag,MPI_COMM_SCALBL,&req2[14]);
MPI_Isend(sendbuf_YZ, sendCount_YZ,MPI_DOUBLE,rank_YZ,sendtag,MPI_COMM_SCALBL,&req1[15]);
MPI_Irecv(recvbuf_yz, recvCount_yz,MPI_DOUBLE,rank_yz,recvtag,MPI_COMM_SCALBL,&req2[15]);
MPI_Isend(sendbuf_Yz, sendCount_Yz,MPI_DOUBLE,rank_Yz,sendtag,MPI_COMM_SCALBL,&req1[16]);
MPI_Irecv(recvbuf_yZ, recvCount_yZ,MPI_DOUBLE,rank_yZ,recvtag,MPI_COMM_SCALBL,&req2[16]);
MPI_Isend(sendbuf_yZ, sendCount_yZ,MPI_DOUBLE,rank_yZ,sendtag,MPI_COMM_SCALBL,&req1[17]);
MPI_Irecv(recvbuf_Yz, recvCount_Yz,MPI_DOUBLE,rank_Yz,recvtag,MPI_COMM_SCALBL,&req2[17]);
//...................................................................................
}
void ScaLBL_Communicator::RecvHalo(double *data){
//...................................................................................
MPI_Waitall(18,req1,stat1);
MPI_Waitall(18,req2,stat2);
@ -524,5 +673,7 @@ void ScaLBL_Communicator::RecvHalo(double *data){
UnpackValues(dvcRecvList_Yz, recvCount_Yz,recvbuf_Yz, data, N);
UnpackValues(dvcRecvList_YZ, recvCount_YZ,recvbuf_YZ, data, N);
//...................................................................................
Lock=false; // unlock the communicator after communications complete
//...................................................................................
}

View File

@ -1,21 +0,0 @@
extern "C" void InitDenColor(char *ID, double *Den, double *Phi, double das, double dbs, int Nx, int Ny, int Nz);
extern "C" void InitDenColorDistance(char *ID, double *Den, double *Phi, double *Distance,
double das, double dbs, double beta, double xp, int Nx, int Ny, int Nz);
extern "C" void ComputeColorGradient(char *ID, double *phi, double *ColorGrad, int Nx, int Ny, int Nz);
extern "C" void ColorCollide( char *ID, double *disteven, double *distodd, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz, bool pBC);
extern "C" void ColorCollideOpt( char *ID, double *disteven, double *distodd, double *phi, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz);
extern "C" void DensityStreamD3Q7(char *ID, double *Den, double *Copy, double *Phi, double *ColorGrad, double *Velocity,
double beta, int Nx, int Ny, int Nz, bool pBC);
//extern "C" void ComputePhi(char *ID, double *Phi, double *Copy, double *Den, int N);
// Added by J.E. McClure 2-12-2014
extern "C" void ComputePhi(char *ID, double *Phi, double *Den, int N);
extern "C" void MassColorCollideD3Q7(char *ID, double *A_even, double *A_odd, double *B_even, double *B_odd,
double *Den, double *Phi, double *ColorGrad, double *Velocity, double beta, int N, bool pBC);
extern "C" void ColorBC_inlet(double *Phi, double *Den, double *A_even, double *A_odd,
double *B_even, double *B_odd, int Nx, int Ny, int Nz);
extern "C" void ColorBC_outlet(double *Phi, double *Den, double *A_even, double *A_odd,
double *B_even, double *B_odd, int Nx, int Ny, int Nz);

View File

@ -1,6 +0,0 @@
extern "C" void PackDist(int q, int *list, int start, int count, double *sendbuf, double *dist, int N);
extern "C" void UnpackDist(int q, int Cqx, int Cqy, int Cqz, int *list, int start, int count,
double *recvbuf, double *dist, int Nx, int Ny, int Nz);
//*************************************************************************
extern "C" void InitD3Q19(char *ID, double *f_even, double *f_odd, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q19(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);

View File

@ -1,16 +0,0 @@
// CPU Functions for D3Q7 Lattice Boltzmann Methods
extern "C" void PackValues(int *list, int count, double *sendbuf, double *Data, int N);
extern "C" void UnpackValues(int *list, int count, double *recvbuf, double *Data, int N);
extern "C" void PackDenD3Q7(int *list, int count, double *sendbuf, int number, double *Data, int N);
extern "C" void UnpackDenD3Q7(int *list, int count, double *recvbuf, int number, double *Data, int N);
extern "C" void InitD3Q7(char *ID, double *f_even, double *f_odd, double *Den, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q7(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);
extern "C" void ComputeDensityD3Q7(char *ID, double *disteven, double *distodd, double *Den,
int Nx, int Ny, int Nz);

View File

@ -1,7 +0,0 @@
extern "C" void AllocateDeviceMemory(void** address, size_t size);
extern "C" void CopyToDevice(void* dest, void* source, size_t size);
extern "C" void CopyToHost(void* dest, void* source, size_t size);
extern "C" void DeviceBarrier();

View File

@ -1,22 +0,0 @@
extern "C" void PackDist(int q, int *list, int start, int count, double *sendbuf, double *dist, int N);
extern "C" void UnpackDist(int q, int Cqx, int Cqy, int Cqz, int *list, int start, int count,double *recvbuf, double *dist, int Nx, int Ny, int Nz);
extern "C" void InitD3Q19(char *ID, double *f_even, double *f_odd, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q19(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);
extern "C" void MRT(char *ID, double *f_even, double *f_odd, double rlxA, double rlxB,
double Fx, double Fy, double Fz,int Nx, int Ny, int Nz);
extern "C" void ComputeVelocityD3Q19(char *ID, double *disteven, double *distodd, double *vel,
int Nx, int Ny, int Nz);
extern "C" void ComputePressureD3Q19(char *ID, double *disteven, double *distodd, double *Pressure,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_inlet(double *disteven, double *distodd, double din,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_outlet(double *disteven, double *distodd, double dout,
int Nx, int Ny, int Nz, int outlet);

View File

@ -1,2 +0,0 @@
// header for thermal lattice boltzmann methods
// copyright James McClure, 2014

View File

@ -1,28 +0,0 @@
extern "C" void InitDenColor(char *ID, double *Den, double *Phi, double das, double dbs, int Nx, int Ny, int Nz);
extern "C" void InitDenColorDistance(char *ID, double *Den, double *Phi, double *Distance,
double das, double dbs, double beta, double xp, int Nx, int Ny, int Nz);
extern "C" void Compute_VELOCITY(char *ID, double *disteven, double *distodd, double *vel, int Nx, int Ny, int Nz);
extern "C" void ComputePressureD3Q19(char *ID, double *disteven, double *distodd, double *Pressure,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_inlet(double *disteven, double *distodd, double din,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_outlet(double *disteven, double *distodd, double dout,
int Nx, int Ny, int Nz, int outlet);
extern "C" void ComputeColorGradient(char *ID, double *phi, double *ColorGrad, int Nx, int Ny, int Nz);
extern "C" void ColorCollide( char *ID, double *disteven, double *distodd, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz, bool pBC);
extern "C" void ColorCollideOpt( char *ID, double *disteven, double *distodd, double *phi, double *ColorGrad,
double *Velocity, int Nx, int Ny, int Nz,double rlx_setA, double rlx_setB,
double alpha, double beta, double Fx, double Fy, double Fz);
extern "C" void DensityStreamD3Q7(char *ID, double *Den, double *Copy, double *Phi, double *ColorGrad, double *Velocity,
double beta, int Nx, int Ny, int Nz, bool pBC);
//extern "C" void ComputePhi(char *ID, double *Phi, double *Copy, double *Den, int N);
// Added by J.E. McClure 2-12-2014
extern "C" void ComputePhi(char *ID, double *Phi, double *Den, int N);
extern "C" void MassColorCollideD3Q7(char *ID, double *A_even, double *A_odd, double *B_even, double *B_odd,
double *Den, double *Phi, double *ColorGrad, double *Velocity, double beta, int N, bool pBC);
extern "C" void ColorBC_inlet(double *Phi, double *Den, double *A_even, double *A_odd,
double *B_even, double *B_odd, int Nx, int Ny, int Nz);
extern "C" void ColorBC_outlet(double *Phi, double *Den, double *A_even, double *A_odd,
double *B_even, double *B_odd, int Nx, int Ny, int Nz);

View File

@ -1,12 +0,0 @@
//*************************************************************************
// A few basic cuda functions callable from C / C++ code
//************************************************************************
//*************************************************************************
extern "C" void dvc_AllocateDeviceMemory(void** address, size_t size);
//*************************************************************************
extern "C" void dvc_CopyToDevice(void* dest, void* source, size_t size);
//*************************************************************************
extern "C" void dvc_CopyToHost(void* dest, void* source, size_t size);
//*************************************************************************
extern "C" void dvc_Barrier();
//*************************************************************************

View File

@ -1,6 +0,0 @@
extern "C" void PackDist(int q, int *list, int start, int count, double *sendbuf, double *dist, int N);
extern "C" void UnpackDist(int q, int Cqx, int Cqy, int Cqz, int *list, int start, int count,
double *recvbuf, double *dist, int Nx, int Ny, int Nz);
//*************************************************************************
extern "C" void InitD3Q19(char *ID, double *f_even, double *f_odd, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q19(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);

View File

@ -1,16 +0,0 @@
// CPU Functions for D3Q7 Lattice Boltzmann Methods
extern "C" void PackValues(int *list, int count, double *sendbuf, double *Data, int N);
extern "C" void UnpackValues(int *list, int count, double *recvbuf, double *Data, int N);
extern "C" void PackDenD3Q7(int *list, int count, double *sendbuf, int number, double *Data, int N);
extern "C" void UnpackDenD3Q7(int *list, int count, double *recvbuf, int number, double *Data, int N);
extern "C" void InitD3Q7(char *ID, double *f_even, double *f_odd, double *Den, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q7(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);
extern "C" void ComputeDensityD3Q7(char *ID, double *disteven, double *distodd, double *Den,
int Nx, int Ny, int Nz);

View File

@ -1,7 +0,0 @@
extern "C" void AllocateDeviceMemory(void** address, size_t size);
extern "C" void CopyToDevice(void* dest, void* source, size_t size);
extern "C" void CopyToHost(void* dest, void* source, size_t size);
extern "C" void DeviceBarrier();

View File

@ -1,22 +0,0 @@
extern "C" void PackDist(int q, int *list, int start, int count, double *sendbuf, double *dist, int N);
extern "C" void UnpackDist(int q, int Cqx, int Cqy, int Cqz, int *list, int start, int count,double *recvbuf, double *dist, int Nx, int Ny, int Nz);
extern "C" void InitD3Q19(char *ID, double *f_even, double *f_odd, int Nx, int Ny, int Nz);
extern "C" void SwapD3Q19(char *ID, double *disteven, double *distodd, int Nx, int Ny, int Nz);
extern "C" void MRT(char *ID, double *f_even, double *f_odd, double rlxA, double rlxB,
double Fx, double Fy, double Fz,int Nx, int Ny, int Nz);
extern "C" void ComputeVelocityD3Q19(char *ID, double *disteven, double *distodd, double *vel,
int Nx, int Ny, int Nz);
extern "C" void ComputePressureD3Q19(char *ID, double *disteven, double *distodd, double *Pressure,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_inlet(double *disteven, double *distodd, double din,
int Nx, int Ny, int Nz);
extern "C" void PressureBC_outlet(double *disteven, double *distodd, double dout,
int Nx, int Ny, int Nz, int outlet);