ERF
Energy Research and Forecasting: An Atmospheric Modeling Code
ERF_NCWpsFile.H File Reference
#include <sstream>
#include <string>
#include <atomic>
#include "AMReX_FArrayBox.H"
#include "AMReX_IArrayBox.H"
#include "ERF_EpochTime.H"
#include "ERF_NCInterface.H"
Include dependency graph for ERF_NCWpsFile.H:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Classes

struct  NDArray< DataType >
 

Typedefs

using PlaneVector = amrex::Vector< amrex::FArrayBox >
 

Enumerations

enum class  NC_Data_Dims_Type {
  Time_SL_SN_WE , Time_BT_SN_WE , Time_SN_WE , Time_BT ,
  Time_SL , Time , Time_BdyWidth_BT_SN , Time_BdyWidth_BT_WE ,
  Time_BdyWidth_SN , Time_BdyWidth_WE
}
 

Functions

int BuildFABsFromWRFBdyFile (const std::string &fname, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_xlo, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_xhi, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_ylo, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_yhi)
 
template<typename DType >
void ReadTimeSliceFromNetCDFFile (const std::string &fname, const int tidx, amrex::Vector< std::string > names, amrex::Vector< NDArray< DType > > &arrays, amrex::Vector< int > &success)
 
template<typename DType >
void ReadNetCDFFile (const std::string &fname, amrex::Vector< std::string > names, amrex::Vector< NDArray< DType > > &arrays, amrex::Vector< int > &success)
 
template<class FAB , typename DType >
void fill_fab_from_arrays (int iv, const amrex::Box &domain, amrex::Vector< NDArray< float >> &nc_arrays, const std::string &var_name, NC_Data_Dims_Type &NC_dim_type, FAB &temp)
 
template<class FAB , typename DType >
void BuildFABsFromNetCDFFile (const amrex::Box &domain, const std::string &fname, amrex::Vector< std::string > nc_var_names, amrex::Vector< enum NC_Data_Dims_Type > NC_dim_types, amrex::Vector< FAB * > fab_vars, amrex::Vector< int > &success)
 

Typedef Documentation

◆ PlaneVector

using PlaneVector = amrex::Vector<amrex::FArrayBox>

Enumeration Type Documentation

◆ NC_Data_Dims_Type

enum NC_Data_Dims_Type
strong
Enumerator
Time_SL_SN_WE 
Time_BT_SN_WE 
Time_SN_WE 
Time_BT 
Time_SL 
Time 
Time_BdyWidth_BT_SN 
Time_BdyWidth_BT_WE 
Time_BdyWidth_SN 
Time_BdyWidth_WE 
29  {
30  Time_SL_SN_WE, // Time, Soil Layers, South-North, West-East
32  Time_SN_WE,
33  Time_BT,
34  Time_SL, // Time, Soil layers
35  Time,
40 };

Function Documentation

◆ BuildFABsFromNetCDFFile()

template<class FAB , typename DType >
void BuildFABsFromNetCDFFile ( const amrex::Box &  domain,
const std::string &  fname,
amrex::Vector< std::string >  nc_var_names,
amrex::Vector< enum NC_Data_Dims_Type NC_dim_types,
amrex::Vector< FAB * >  fab_vars,
amrex::Vector< int > &  success 
)

Function to read NetCDF variables and fill the corresponding Array4's

Parameters
fnameName of the NetCDF file to be read
nc_var_namesVariable names in the NetCDF file
NC_dim_typesNetCDF data dimension types
fab_varsFab data we are to fill
373 {
374  int ioproc = amrex::ParallelDescriptor::IOProcessorNumber(); // I/O rank
375 
376  amrex::Vector<NDArray<float>> nc_arrays(nc_var_names.size());
377 
378  if (amrex::ParallelDescriptor::IOProcessor())
379  {
380  ReadNetCDFFile(fname, nc_var_names, nc_arrays, success);
381  }
382 
383  amrex::ParallelDescriptor::Bcast(success.dataPtr(), success.size(), ioproc);
384 
385  for (int iv = 0; iv < nc_var_names.size(); iv++)
386  {
387  if (success[iv] == 1) {
388  FAB tmp;
389  if (amrex::ParallelDescriptor::IOProcessor()) {
390  fill_fab_from_arrays<FAB,DType>(iv, domain, nc_arrays, nc_var_names[iv], NC_dim_types[iv], tmp);
391  }
392 
393  int ncomp = tmp.nComp();
394  amrex::Box box = tmp.box();
395 
396  amrex::ParallelDescriptor::Bcast(&box, 1, ioproc);
397  amrex::ParallelDescriptor::Bcast(&ncomp, 1, ioproc);
398 
399  if (!amrex::ParallelDescriptor::IOProcessor()) {
400 #ifdef AMREX_USE_GPU
401  tmp.resize(box,ncomp,amrex::The_Pinned_Arena());
402 #else
403  tmp.resize(box,ncomp);
404 #endif
405  }
406 
407  amrex::ParallelDescriptor::Bcast(tmp.dataPtr(), tmp.size(), ioproc);
408 
409  // Shift box by the domain lower corner
410  amrex::Box fab_bx = tmp.box();
411  amrex::Dim3 dom_lb = lbound(domain);
412  fab_bx += amrex::IntVect(dom_lb.x,dom_lb.y,dom_lb.z);
413  // fab_vars points to data on device
414  fab_vars[iv]->resize(fab_bx,1);
415 #ifdef AMREX_USE_GPU
416  amrex::Gpu::copy(amrex::Gpu::hostToDevice,
417  tmp.dataPtr(), tmp.dataPtr() + tmp.size(),
418  fab_vars[iv]->dataPtr());
419 #else
420  // Provided by BaseFab inheritance through FArrayBox
421  fab_vars[iv]->copy(tmp,tmp.box(),0,fab_bx,0,1);
422 #endif
423  } // success
424  } // iv
425 }
void ReadNetCDFFile(const std::string &fname, amrex::Vector< std::string > names, amrex::Vector< NDArray< DType > > &arrays, amrex::Vector< int > &success)
Definition: ERF_NCWpsFile.H:180
Here is the call graph for this function:

◆ BuildFABsFromWRFBdyFile()

int BuildFABsFromWRFBdyFile ( const std::string &  fname,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_xlo,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_xhi,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_ylo,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_yhi 
)

◆ fill_fab_from_arrays()

template<class FAB , typename DType >
void fill_fab_from_arrays ( int  iv,
const amrex::Box &  domain,
amrex::Vector< NDArray< float >> &  nc_arrays,
const std::string &  var_name,
NC_Data_Dims_Type NC_dim_type,
FAB &  temp 
)

Helper function for reading data from NetCDF file into a provided FAB.

Parameters
ivIndex for which variable we are going to fill
nc_arraysArrays of data from NetCDF file
var_nameVariable name
NC_dim_typeDimension type for the variable as stored in the NetCDF file
tempFAB where we store the variable data from the NetCDF Arrays
272 {
273  int ns1, ns2, ns3; // bottom_top, south_north, west_east (these can be staggered or unstaggered)
274  if (NC_dim_type == NC_Data_Dims_Type::Time_BT) {
275  ns1 = nc_arrays[iv].get_vshape()[1];
276  ns2 = 1;
277  ns3 = 1;
278  // amrex::Print() << "TYPE BT " << ns1 << std::endl;
279  } else if (NC_dim_type == NC_Data_Dims_Type::Time_SN_WE) {
280  ns1 = 1;
281  ns2 = nc_arrays[iv].get_vshape()[1];
282  ns3 = nc_arrays[iv].get_vshape()[2];
283  // amrex::Print() << "TYPE SN WE " << ns2 << " " << ns3 << std::endl;
284  } else if (NC_dim_type == NC_Data_Dims_Type::Time_BT_SN_WE) {
285  ns1 = nc_arrays[iv].get_vshape()[1];
286  ns2 = nc_arrays[iv].get_vshape()[2];
287  ns3 = nc_arrays[iv].get_vshape()[3];
288  // amrex::Print() << "TYPE BT SN WE " << ns1 << " " << ns2 << " " << ns3 << std::endl;
289  } else if (NC_dim_type == NC_Data_Dims_Type::Time_SL_SN_WE) {
290  ns1 = nc_arrays[iv].get_vshape()[1];
291  ns2 = nc_arrays[iv].get_vshape()[2];
292  ns3 = nc_arrays[iv].get_vshape()[3];
293  // amrex::Print() << "TYPE SL SN WE " << ns1 << " " << ns2 << " " << ns3 << std::endl;
294  } else if (NC_dim_type == NC_Data_Dims_Type::Time_SL) {
295  ns1 = nc_arrays[iv].get_vshape()[1];
296  ns2 = 1;
297  ns3 = 1;
298  // amrex::Print() << "TYPE SL " << ns1 << std::endl;
299  }
300  else {
301  amrex::Abort("Dont know this NC_Data_Dims_Type");
302  }
303 
304  // allow fewer z levels to be read in
305  // - ns1 may be the bottom_top or bottom_top_stag dim
306  // - domain.bigEnd(2) is the input number of cells in z
307  // - khi may be nz==bottom_top, nz < bottom_top, or 0 for 2D fields
308  int khi = std::min(domain.bigEnd(2), ns1-1);
309 
310  // TODO: The box will only start at (0,0,0) at level 0 -- we need to generalize this
311  amrex::Box in_box(amrex::IntVect(0,0,0), amrex::IntVect(ns3-1,ns2-1,ns1-1)); // ns1 may or may not be staggered
312  amrex::Box my_box(amrex::IntVect(0,0,0), amrex::IntVect(ns3-1,ns2-1,khi));
313 
314  if (var_name == "PH" || var_name == "PHB") {
315  my_box.setType(amrex::IndexType(amrex::IntVect(0,0,1)));
316  my_box.setBig(2, khi+1);
317  }
318  else if (var_name == "U" || var_name == "UU" || var_name == "MAPFAC_U") {
319  my_box.setType(amrex::IndexType(amrex::IntVect(1,0,0)));
320  }
321  else if (var_name == "V" || var_name == "VV" || var_name == "MAPFAC_V") {
322  my_box.setType(amrex::IndexType(amrex::IntVect(0,1,0)));
323  }
324  else if (var_name == "W" || var_name == "WW") {
325  my_box.setType(amrex::IndexType(amrex::IntVect(0,0,1)));
326  my_box.setBig(2, khi+1);
327  }
328  else if (var_name == "TSLB" || var_name == "SMOIS" || var_name == "SH2O" || var_name == "ZS" || var_name == "DZS") {
329  // soil variables are staggered in z
330  my_box.setType(amrex::IndexType(amrex::IntVect(0,0,1)));
331  }
332 
333  amrex::Arena* Arena_Used = amrex::The_Arena();
334 #ifdef AMREX_USE_GPU
335  // Make sure temp lives on CPU since nc_arrays lives on CPU only
336  Arena_Used = amrex::The_Pinned_Arena();
337 #endif
338  temp.resize(my_box,1, Arena_Used);
339  amrex::Array4<DType> fab_arr = temp.array();
340 
341  int ioff = temp.box().smallEnd()[0];
342  int joff = temp.box().smallEnd()[1];
343  int kmax = temp.box().bigEnd()[2]; // highest z level to be read in
344  //amrex::Print() << var_name << " domain khi, kmax, ns1 = " << khi << " " << kmax << " " << ns1 << std::endl;
345 
346  auto num_pts = in_box.numPts();
347 
348  for (int n(0); n < num_pts; ++n) {
349  int k = n / (ns2*ns3);
350  if (k > kmax) continue;
351  int j = (n - k*(ns2*ns3)) / ns3 + joff;
352  int i = n - k*(ns2*ns3) - (j-joff) * ns3 + ioff;
353  fab_arr(i,j,k,0) = static_cast<DType>(*(nc_arrays[iv].get_data()+n));
354  }
355 }

◆ ReadNetCDFFile()

template<typename DType >
void ReadNetCDFFile ( const std::string &  fname,
amrex::Vector< std::string >  names,
amrex::Vector< NDArray< DType > > &  arrays,
amrex::Vector< int > &  success 
)
182 {
183  AMREX_ASSERT(arrays.size() == names.size());
184 
185  if (amrex::ParallelDescriptor::IOProcessor())
186  {
187  auto ncf = ncutils::NCFile::open(fname, NC_CLOBBER | NC_NETCDF4);
188 
189  /*
190  // get the dimension information
191  int Time = static_cast<int>(ncf.dim("Time").len());
192  int DateStrLen = static_cast<int>(ncf.dim("DateStrLen").len());
193  int west_east = static_cast<int>(ncf.dim("west_east").len());
194  int south_north = static_cast<int>(ncf.dim("south_north").len());
195  int bottom_top = static_cast<int>(ncf.dim("bottom_top").len());
196  int bottom_top_stag = static_cast<int>(ncf.dim("bottom_top_stag").len());
197  int west_east_stag = static_cast<int>(ncf.dim("west_east_stag").len());
198  int south_north_stag = static_cast<int>(ncf.dim("south_north_stag").len());
199  int bdy_width = static_cast<int>(ncf.dim("bdy_width").len());
200  */
201 
202  // amrex::Print() << "Reading the dimensions from the netcdf file " << "\n";
203 
204  for (auto n=0; n<arrays.size(); ++n)
205  {
206  std::string vname_to_write = names[n];
207  std::string vname_to_read = names[n];
208 
209  if (vname_to_read.substr(0,2) == "R_") {
210  vname_to_read = names[n+4]; // This allows us to read "T" instead -- we will over-write this later
211  }
212 
213  success[n] = ncf.has_var(vname_to_read);
214  if (success[n] == 0) {
215  amrex::Print() << " Skipping " << vname_to_read << std::endl;
216  } else {
217  amrex::Print() << " Reading " << vname_to_read << std::endl;
218  }
219 
220  if (success[n] == 1) {
221 
222  std::vector<std::string> dimnames = ncf.var(vname_to_read).dimnames();
223  AMREX_ALWAYS_ASSERT(dimnames[0] == "Time" || dimnames[0] == "time");
224 
225  std::vector<size_t> shape = ncf.var(vname_to_read).shape();
226  arrays[n] = NDArray<DType>(vname_to_read,shape);
227  DType* dataPtr = arrays[n].get_data();
228 
229  std::vector<size_t> start(shape.size(), 0);
230 
231 #if 0
232  auto numPts = arrays[n].ndim();
233  amrex::Print() << "NetCDF Variable name = " << vname_to_read << std::endl;
234  amrex::Print() << "numPts read from NetCDF file/var = " << numPts << std::endl;
235  amrex::Print() << "Dims in var = " << ncf.var(vname_to_read).ndim() << std::endl;
236  amrex::Print() << "Dim names = (";
237  for (auto &dim:dimnames)
238  amrex::Print() << dim << ", " ;
239  amrex::Print() << ")" << std::endl;
240  amrex::Print() << "Dims of the variable = (";
241  for (auto &dim:shape)
242  amrex::Print() << dim << ", " ;
243  amrex::Print() << ")" << std::endl;
244 #endif
245 
246  ncf.var(vname_to_read).get(dataPtr, start, shape);
247 
248  } // has_var
249  }
250  ncf.close();
251  }
252 }
static NCFile open(const std::string &name, const int cmode=NC_NOWRITE)
Definition: ERF_NCInterface.cpp:707
Definition: ERF_NCWpsFile.H:52

Referenced by BuildFABsFromNetCDFFile().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadTimeSliceFromNetCDFFile()

template<typename DType >
void ReadTimeSliceFromNetCDFFile ( const std::string &  fname,
const int  tidx,
amrex::Vector< std::string >  names,
amrex::Vector< NDArray< DType > > &  arrays,
amrex::Vector< int > &  success 
)
138 {
139  amrex::Print() << "Reading time slice " << tidx << " from " << fname << std::endl;
140  AMREX_ASSERT(arrays.size() == names.size());
141 
142  if (amrex::ParallelDescriptor::IOProcessor())
143  {
144  auto ncf = ncutils::NCFile::open(fname, NC_CLOBBER | NC_NETCDF4);
145 
146  int ntimes = ncf.dim("Time").len();
147  AMREX_ALWAYS_ASSERT((tidx >= 0) && (tidx < ntimes));
148 
149  for (auto n=0; n<arrays.size(); ++n)
150  {
151  std::string vname_to_write = names[n];
152  std::string vname_to_read = names[n];
153  if (vname_to_read.substr(0,2) == "R_") {
154  vname_to_read = names[n+4]; // This allows us to read "T" instead -- we will over-write this later
155  }
156 
157  success[n] = ncf.has_var(vname_to_read);
158 
159  if (success[n] == 1)
160  {
161  std::vector<std::string> dimnames = ncf.var(vname_to_read).dimnames();
162  AMREX_ALWAYS_ASSERT(dimnames[0] == "Time");
163 
164  std::vector<size_t> count = ncf.var(vname_to_read).shape();
165  std::vector<size_t> start(count.size(), 0);
166  start[0] = tidx;
167  count[0] = 1;
168 
169  arrays[n] = NDArray<DType>(vname_to_read, count);
170  DType* dataPtr = arrays[n].get_data();
171 
172  ncf.var(vname_to_read).get(dataPtr, start, count);
173  } // has_var
174  }
175  ncf.close();
176  }
177 }
Here is the call graph for this function: