Fixing compile errors when compiling without MPI

This commit is contained in:
Mark Berrill 2015-07-09 11:28:40 -04:00
parent 5fc08ce163
commit 54da497392
4 changed files with 52 additions and 7 deletions

View File

@ -4,9 +4,9 @@ Checkout code and create a cmake script to compile plug-in.
Example commands for Rhea:
Load the proper modules
module load visit
module load cmake
module unload intel gcc
module load gcc/4.8.2
module load cmake
Configure cmake to build the plug-in
cmake \
@ -16,12 +16,11 @@ Configure cmake to build the plug-in
-D USE_MPI=false \
-D USE_CUDA=false \
-D USE_VISIT=true \
-D VISIT_ROOT_DIR=/sw/rhea/visit \
-D VISIT_ROOT_DIR=/sw/rhea/visit/2.9.0/linux-x86_64 \
-D USE_TIMER=false \
../../LBPM-WIA
Build the visit plug-in
make visit

View File

@ -162,11 +162,27 @@ int MPI_Waitall( int, MPI_Request[], MPI_Status[] )
{
return 0;
}
int MPI_Wait( MPI_Request*, MPI_Status* )
{
return 0;
}
int MPI_Bcast( void *buffer, int count, MPI_Datatype datatype, int root, MPI_Comm comm )
{
ERROR("Not implimented yet");
return 0;
}
int MPI_Send(const void *buf, int count, MPI_Datatype datatype, int dest, int tag,
MPI_Comm comm)
{
ERROR("Not implimented yet");
return 0;
}
int MPI_Recv(void *buf, int count, MPI_Datatype datatype, int source, int tag,
MPI_Comm comm, MPI_Status *status)
{
ERROR("Not implimented yet");
return 0;
}
int MPI_Isend(const void *buf, int count, MPI_Datatype datatype, int dest, int tag,
MPI_Comm comm, MPI_Request *request)
{
@ -185,6 +201,13 @@ int MPI_Allreduce(const void *sendbuf, void *recvbuf, int count,
ERROR("Not implimented yet");
return 0;
}
int MPI_Allgather(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
void *recvbuf, int recvcount, MPI_Datatype recvtype,
MPI_Comm comm)
{
ERROR("Not implimented yet");
return 0;
}
int MPI_Sendrecv(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
int dest, int sendtag,
void *recvbuf, int recvcount, MPI_Datatype recvtype,
@ -194,6 +217,16 @@ int MPI_Sendrecv(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
ERROR("Not implimented yet");
return 0;
}
int MPI_Comm_group(MPI_Comm comm, MPI_Group *group)
{
ERROR("Not implimented yet");
return 0;
}
int MPI_Comm_create(MPI_Comm comm, MPI_Group group, MPI_Comm *newcomm)
{
ERROR("Not implimented yet");
return 0;
}
double MPI_Wtime( void )
{
return 0.0;

View File

@ -18,28 +18,40 @@
#define MPI_COMM_WORLD 0
#define MPI_COMM_SELF 0
#define MPI_STATUS_IGNORE NULL
enum MPI_Datatype { MPI_LOGICAL, MPI_CHAR, MPI_INT, MPI_DOUBLE };
enum MPI_Op { MPI_SUM };
enum MPI_Datatype { MPI_LOGICAL, MPI_CHAR, MPI_UNSIGNED_CHAR, MPI_INT,
MPI_UNSIGNED, MPI_LONG, MPI_UNSIGNED_LONG, MPI_LONG_LONG, MPI_FLOAT, MPI_DOUBLE };
enum MPI_Op { MPI_MIN, MPI_MAX, MPI_SUM };
enum MPI_Group { };
// Fake MPI functions
int MPI_Init(int*,char***);
int MPI_Finalize();
int MPI_Comm_size( MPI_Comm, int *size );
int MPI_Comm_rank( MPI_Comm, int *rank );
int MPI_Barrier(MPI_Comm);
int MPI_Wait(MPI_Request*,MPI_Status*);
int MPI_Waitall(int,MPI_Request[],MPI_Status[]);
int MPI_Bcast(void*,int,MPI_Datatype,int,MPI_Comm);
int MPI_Send(const void *buf, int count, MPI_Datatype datatype, int dest, int tag,
MPI_Comm comm);
int MPI_Recv(void *buf, int count, MPI_Datatype datatype, int source, int tag,
MPI_Comm comm, MPI_Status *status);
int MPI_Isend(const void *buf, int count, MPI_Datatype datatype, int dest, int tag,
MPI_Comm comm, MPI_Request *request);
int MPI_Irecv(void *buf, int count, MPI_Datatype datatype, int source,
int tag, MPI_Comm comm, MPI_Request *request);
int MPI_Allreduce(const void *sendbuf, void *recvbuf, int count,
MPI_Datatype datatype, MPI_Op op, MPI_Comm comm);
int MPI_Allgather(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
void *recvbuf, int recvcount, MPI_Datatype recvtype,
MPI_Comm comm);
int MPI_Sendrecv(const void *sendbuf, int sendcount, MPI_Datatype sendtype,
int dest, int sendtag,
void *recvbuf, int recvcount, MPI_Datatype recvtype,
int source, int recvtag,
MPI_Comm comm, MPI_Status *status);
double MPI_Wtime( void );
int MPI_Comm_group(MPI_Comm comm, MPI_Group *group);
int MPI_Comm_create(MPI_Comm comm, MPI_Group group, MPI_Comm *newcomm);
#endif

View File

@ -5,12 +5,13 @@
#include <exception>
#include <stdexcept>
#include <fstream>
#include <mpi.h>
#include "pmmc.h"
#include "Domain.h"
#include "Extras.h"
#include "Communication.h"
#include "MPI_Helpers.h" // This includes mpi.h
/*
* Pre-Processor to generate signed distance function from disc packing