Skip to content

Commit

Permalink
Merge branch 'master' into faster_shear_b
Browse files Browse the repository at this point in the history
  • Loading branch information
damonge authored Jul 6, 2020
2 parents 0b3e638 + 7497897 commit 421f0cc
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 11 deletions.
2 changes: 1 addition & 1 deletion src/beaming.c
Original file line number Diff line number Diff line change
Expand Up @@ -324,7 +324,7 @@ void get_beam_properties(ParamCoLoRe *par)
int node_i_am_now=(NodeThis-i-1+NNodes)%NNodes;
#ifdef _HAVE_MPI
#ifdef _DEBUG
print_info("Communication %d, Node %d is now Node %d\n",i,NodeThis,node_i_am_now);
print_info("Communication %d, MPI task %d is now MPI task %d\n",i,NodeThis,node_i_am_now);
#endif //_DEBUG
mpi_sendrecv_wrap(par->grid_npot,buffer_sr,size_slice_npot,i);
mpi_sendrecv_wrap(par->grid_dens,buffer_sr,size_slice_dens,i);
Expand Down
10 changes: 5 additions & 5 deletions src/common.c
Original file line number Diff line number Diff line change
Expand Up @@ -248,7 +248,7 @@ void mpi_init(int* p_argc,char*** p_argv)
#ifdef _DEBUG
if(NodeThis==0) {
for(ii=0;ii<NNodes;ii++)
printf("Node %d has %d threads\n",ii,nthreads_all[ii]);
printf("MPI task %d has %d OMP threads\n",ii,nthreads_all[ii]);
}
#endif //_DEBUG
IThread0=0;
Expand All @@ -268,8 +268,8 @@ void mpi_init(int* p_argc,char*** p_argv)
#endif //_HAVE_MPI

#ifdef _DEBUG
printf("Node %d, thread count starts at %d\n",NodeThis,IThread0);
print_info(" MPIThreads = %d\n",MPIThreadsOK);
printf("MPI task %d, OMP thread count starts at %d\n",NodeThis,IThread0);
print_info(" MPIThreadsOK = %d\n",MPIThreadsOK);
#endif //_DEBUG
}

Expand Down Expand Up @@ -297,11 +297,11 @@ void report_error(int level,char *fmt,...)
va_end(args);

if(level) {
fprintf(stderr,"Node %d, Fatal: %s",NodeThis,msg);
fprintf(stderr,"MPI task %d, Fatal: %s",NodeThis,msg);
exit(level);
}
else {
fprintf(stderr,"Node %d, Warning: %s",NodeThis,msg);
fprintf(stderr,"MPI task %d, Warning: %s",NodeThis,msg);
}
}

Expand Down
6 changes: 3 additions & 3 deletions src/density.c
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,7 @@ static void share_particles(ParamCoLoRe *par,unsigned long long np_allocated,uns
z_bright_right[inode]=(par->lpt_interp_type+1)*0.5*dx;
}
#ifdef _DEBUG
print_info("Node %d: [%lf,%lf] [%lf,%lf] [%lf %lf] [%lf %lf]\n",inode,
print_info("MPI task %d: [%lf,%lf] [%lf,%lf] [%lf %lf] [%lf %lf]\n",inode,
z_left[inode],z_right[inode],
z_true_left[inode],z_true_right[inode],
z_bleft_left[inode],z_bleft_right[inode],
Expand Down Expand Up @@ -269,7 +269,7 @@ static void share_particles(ParamCoLoRe *par,unsigned long long np_allocated,uns
}

#ifdef _DEBUG
printf("Node %d: %llu %llu %llu %llu\n",NodeThis,np_allocated,n_inrange,nbuffer,n_inbuffer);
printf("MPI task %d: %llu %llu %llu %llu\n",NodeThis,np_allocated,n_inrange,nbuffer,n_inbuffer);
#ifdef _HAVE_MPI
MPI_Barrier(MPI_COMM_WORLD);
#endif
Expand Down Expand Up @@ -335,7 +335,7 @@ static void share_particles(ParamCoLoRe *par,unsigned long long np_allocated,uns


#ifdef _DEBUG
printf("%d. Node %d: send %d particles to node %d, receive %d particles from node %d\n",
printf("%d. MPI task %d: send %d particles to node %d, receive %d particles from MPI task %d\n",
inode,NodeThis,nsend,node_to,nrecv,node_from);
#ifdef _HAVE_MPI
MPI_Barrier(MPI_COMM_WORLD);
Expand Down
4 changes: 2 additions & 2 deletions src/srcs.c
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ static void srcs_set_cartesian_single(ParamCoLoRe *par,int ipop)

print_info(" There will be %ld objects in total \n",(long)nsources_total);
#ifdef _DEBUG
fprintf(par->f_dbg,"Node %d has %ld particles\n",NodeThis,(long)(par->nsources_c_this[ipop]));
fprintf(par->f_dbg,"MPI task %d has %ld particles\n",NodeThis,(long)(par->nsources_c_this[ipop]));
#endif //_DEBUG

for(ii=nthr-1;ii>0;ii--) {
Expand Down Expand Up @@ -351,7 +351,7 @@ static void srcs_distribute_single(ParamCoLoRe *par,int ipop)
int node_from=(NodeLeft -ii+NNodes)%NNodes;
int npart_send=ns_transfer_matrix[NodeThis*NNodes+node_to];
int npart_recv=ns_transfer_matrix[node_from*NNodes+NodeThis];
// print_info("Node %d: %d-th iteration. to->%d from->%d.",NodeThis,ii,node_to,node_from);
// print_info("MPI task %d: %d-th iteration. to->%d from->%d.",NodeThis,ii,node_to,node_from);
// print_info(" Should get %07ld objects, and will send %07ld.\n",npart_recv,npart_send);
#ifdef _HAVE_MPI
MPI_Sendrecv(&(pos_ordered[NPOS_CC*i0_in_nodes[node_to]]),NPOS_CC*npart_send,MPI_FLOAT,node_to ,ii,
Expand Down

0 comments on commit 421f0cc

Please sign in to comment.