ERF
Energy Research and Forecasting: An Atmospheric Modeling Code
ERF_NCWpsFile.H File Reference
#include <sstream>
#include <string>
#include <atomic>
#include "AMReX_FArrayBox.H"
#include "AMReX_IArrayBox.H"
#include "ERF_EpochTime.H"
#include "ERF_NCInterface.H"
Include dependency graph for ERF_NCWpsFile.H:
This graph shows which files directly or indirectly include this file:

Go to the source code of this file.

Classes

struct  NDArray< DataType >
 

Typedefs

using PlaneVector = amrex::Vector< amrex::FArrayBox >
 

Enumerations

enum class  NC_Data_Dims_Type {
  Time_SL_SN_WE , Time_BT_SN_WE , Time_SN_WE , Time_BT ,
  Time_SL , Time , Time_BdyWidth_BT_SN , Time_BdyWidth_BT_WE ,
  Time_BdyWidth_SN , Time_BdyWidth_WE
}
 

Functions

int BuildFABsFromWRFBdyFile (const std::string &fname, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_xlo, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_xhi, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_ylo, amrex::Vector< amrex::Vector< amrex::FArrayBox >> &bdy_data_yhi)
 
template<typename DType >
void ReadTimeSliceFromNetCDFFile (const std::string &fname, const int tidx, amrex::Vector< std::string > names, amrex::Vector< NDArray< DType > > &arrays, amrex::Vector< int > &success)
 
template<typename DType >
void ReadNetCDFFile (const std::string &fname, amrex::Vector< std::string > names, amrex::Vector< NDArray< DType > > &arrays, amrex::Vector< int > &success)
 
template<class FAB , typename DType >
void fill_fab_from_arrays (int iv, const amrex::Box &domain, amrex::Vector< NDArray< float >> &nc_arrays, const std::string &var_name, NC_Data_Dims_Type &NC_dim_type, FAB &temp)
 
template<class FAB , typename DType >
void BuildFABsFromNetCDFFile (const amrex::Box &domain, const std::string &fname, amrex::Vector< std::string > nc_var_names, amrex::Vector< enum NC_Data_Dims_Type > NC_dim_types, amrex::Vector< FAB * > fab_vars, amrex::Vector< int > &success)
 

Typedef Documentation

◆ PlaneVector

using PlaneVector = amrex::Vector<amrex::FArrayBox>

Enumeration Type Documentation

◆ NC_Data_Dims_Type

enum NC_Data_Dims_Type
strong
Enumerator
Time_SL_SN_WE 
Time_BT_SN_WE 
Time_SN_WE 
Time_BT 
Time_SL 
Time 
Time_BdyWidth_BT_SN 
Time_BdyWidth_BT_WE 
Time_BdyWidth_SN 
Time_BdyWidth_WE 
29  {
30  Time_SL_SN_WE, // Time, Soil Layers, South-North, West-East
32  Time_SN_WE,
33  Time_BT,
34  Time_SL, // Time, Soil layers
35  Time,
40 };

Function Documentation

◆ BuildFABsFromNetCDFFile()

template<class FAB , typename DType >
void BuildFABsFromNetCDFFile ( const amrex::Box &  domain,
const std::string &  fname,
amrex::Vector< std::string >  nc_var_names,
amrex::Vector< enum NC_Data_Dims_Type NC_dim_types,
amrex::Vector< FAB * >  fab_vars,
amrex::Vector< int > &  success 
)

Function to read NetCDF variables and fill the corresponding Array4's

Parameters
fnameName of the NetCDF file to be read
nc_var_namesVariable names in the NetCDF file
NC_dim_typesNetCDF data dimension types
fab_varsFab data we are to fill
345 {
346  int ioproc = amrex::ParallelDescriptor::IOProcessorNumber(); // I/O rank
347 
348  amrex::Vector<NDArray<float>> nc_arrays(nc_var_names.size());
349 
350  if (amrex::ParallelDescriptor::IOProcessor())
351  {
352  ReadNetCDFFile(fname, nc_var_names, nc_arrays, success);
353  }
354 
355  amrex::ParallelDescriptor::Bcast(success.dataPtr(), success.size(), ioproc);
356 
357  for (int iv = 0; iv < nc_var_names.size(); iv++)
358  {
359  if (success[iv] == 1) {
360  FAB tmp;
361  if (amrex::ParallelDescriptor::IOProcessor()) {
362  fill_fab_from_arrays<FAB,DType>(iv, domain, nc_arrays, nc_var_names[iv], NC_dim_types[iv], tmp);
363  }
364 
365  int ncomp = tmp.nComp();
366  amrex::Box box = tmp.box();
367 
368  amrex::ParallelDescriptor::Bcast(&box, 1, ioproc);
369  amrex::ParallelDescriptor::Bcast(&ncomp, 1, ioproc);
370 
371  if (!amrex::ParallelDescriptor::IOProcessor()) {
372 #ifdef AMREX_USE_GPU
373  tmp.resize(box,ncomp,amrex::The_Pinned_Arena());
374 #else
375  tmp.resize(box,ncomp);
376 #endif
377  }
378 
379  amrex::ParallelDescriptor::Bcast(tmp.dataPtr(), tmp.size(), ioproc);
380 
381  // Shift box by the domain lower corner
382  amrex::Box fab_bx = tmp.box();
383  amrex::Dim3 dom_lb = lbound(domain);
384  fab_bx += amrex::IntVect(dom_lb.x,dom_lb.y,dom_lb.z);
385  // fab_vars points to data on device
386  fab_vars[iv]->resize(fab_bx,1);
387 #ifdef AMREX_USE_GPU
388  amrex::Gpu::copy(amrex::Gpu::hostToDevice,
389  tmp.dataPtr(), tmp.dataPtr() + tmp.size(),
390  fab_vars[iv]->dataPtr());
391 #else
392  // Provided by BaseFab inheritance through FArrayBox
393  fab_vars[iv]->copy(tmp,tmp.box(),0,fab_bx,0,1);
394 #endif
395  } // success
396  } // iv
397 }
void ReadNetCDFFile(const std::string &fname, amrex::Vector< std::string > names, amrex::Vector< NDArray< DType > > &arrays, amrex::Vector< int > &success)
Definition: ERF_NCWpsFile.H:152
Here is the call graph for this function:

◆ BuildFABsFromWRFBdyFile()

int BuildFABsFromWRFBdyFile ( const std::string &  fname,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_xlo,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_xhi,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_ylo,
amrex::Vector< amrex::Vector< amrex::FArrayBox >> &  bdy_data_yhi 
)

◆ fill_fab_from_arrays()

template<class FAB , typename DType >
void fill_fab_from_arrays ( int  iv,
const amrex::Box &  domain,
amrex::Vector< NDArray< float >> &  nc_arrays,
const std::string &  var_name,
NC_Data_Dims_Type NC_dim_type,
FAB &  temp 
)

Helper function for reading data from NetCDF file into a provided FAB.

Parameters
ivIndex for which variable we are going to fill
nc_arraysArrays of data from NetCDF file
var_nameVariable name
NC_dim_typeDimension type for the variable as stored in the NetCDF file
tempFAB where we store the variable data from the NetCDF Arrays
244 {
245  int ns1, ns2, ns3; // bottom_top, south_north, west_east (these can be staggered or unstaggered)
246  if (NC_dim_type == NC_Data_Dims_Type::Time_BT) {
247  ns1 = nc_arrays[iv].get_vshape()[1];
248  ns2 = 1;
249  ns3 = 1;
250  // amrex::Print() << "TYPE BT " << ns1 << std::endl;
251  } else if (NC_dim_type == NC_Data_Dims_Type::Time_SN_WE) {
252  ns1 = 1;
253  ns2 = nc_arrays[iv].get_vshape()[1];
254  ns3 = nc_arrays[iv].get_vshape()[2];
255  // amrex::Print() << "TYPE SN WE " << ns2 << " " << ns3 << std::endl;
256  } else if (NC_dim_type == NC_Data_Dims_Type::Time_BT_SN_WE) {
257  ns1 = nc_arrays[iv].get_vshape()[1];
258  ns2 = nc_arrays[iv].get_vshape()[2];
259  ns3 = nc_arrays[iv].get_vshape()[3];
260  // amrex::Print() << "TYPE BT SN WE " << ns1 << " " << ns2 << " " << ns3 << std::endl;
261  } else if (NC_dim_type == NC_Data_Dims_Type::Time_SL_SN_WE) {
262  ns1 = nc_arrays[iv].get_vshape()[1];
263  ns2 = nc_arrays[iv].get_vshape()[2];
264  ns3 = nc_arrays[iv].get_vshape()[3];
265  // amrex::Print() << "TYPE SL SN WE " << ns1 << " " << ns2 << " " << ns3 << std::endl;
266  } else if (NC_dim_type == NC_Data_Dims_Type::Time_SL) {
267  ns1 = nc_arrays[iv].get_vshape()[1];
268  ns2 = 1;
269  ns3 = 1;
270  // amrex::Print() << "TYPE SL " << ns1 << std::endl;
271  }
272  else {
273  amrex::Abort("Dont know this NC_Data_Dims_Type");
274  }
275 
276  // allow fewer z levels to be read in
277  // - ns1 may be the bottom_top or bottom_top_stag dim
278  // - domain.bigEnd(2) is the input number of cells in z
279  // - khi may be nz==bottom_top, nz < bottom_top, or 0 for 2D fields
280  int khi = std::min(domain.bigEnd(2), ns1-1);
281 
282  // TODO: The box will only start at (0,0,0) at level 0 -- we need to generalize this
283  amrex::Box in_box(amrex::IntVect(0,0,0), amrex::IntVect(ns3-1,ns2-1,ns1-1)); // ns1 may or may not be staggered
284  amrex::Box my_box(amrex::IntVect(0,0,0), amrex::IntVect(ns3-1,ns2-1,khi));
285 
286  if (var_name == "PH" || var_name == "PHB") {
287  my_box.setType(amrex::IndexType(amrex::IntVect(0,0,1)));
288  my_box.setBig(2, khi+1);
289  }
290  else if (var_name == "U" || var_name == "UU" || var_name == "MAPFAC_U") {
291  my_box.setType(amrex::IndexType(amrex::IntVect(1,0,0)));
292  }
293  else if (var_name == "V" || var_name == "VV" || var_name == "MAPFAC_V") {
294  my_box.setType(amrex::IndexType(amrex::IntVect(0,1,0)));
295  }
296  else if (var_name == "W" || var_name == "WW") {
297  my_box.setType(amrex::IndexType(amrex::IntVect(0,0,1)));
298  my_box.setBig(2, khi+1);
299  }
300  else if (var_name == "TSLB" || var_name == "SMOIS" || var_name == "SH2O" || var_name == "ZS" || var_name == "DZS") {
301  // soil variables are staggered in z
302  my_box.setType(amrex::IndexType(amrex::IntVect(0,0,1)));
303  }
304 
305  amrex::Arena* Arena_Used = amrex::The_Arena();
306 #ifdef AMREX_USE_GPU
307  // Make sure temp lives on CPU since nc_arrays lives on CPU only
308  Arena_Used = amrex::The_Pinned_Arena();
309 #endif
310  temp.resize(my_box,1, Arena_Used);
311  amrex::Array4<DType> fab_arr = temp.array();
312 
313  int ioff = temp.box().smallEnd()[0];
314  int joff = temp.box().smallEnd()[1];
315  int kmax = temp.box().bigEnd()[2]; // highest z level to be read in
316  //amrex::Print() << var_name << " domain khi, kmax, ns1 = " << khi << " " << kmax << " " << ns1 << std::endl;
317 
318  auto num_pts = in_box.numPts();
319 
320  for (int n(0); n < num_pts; ++n) {
321  int k = n / (ns2*ns3);
322  if (k > kmax) continue;
323  int j = (n - k*(ns2*ns3)) / ns3 + joff;
324  int i = n - k*(ns2*ns3) - (j-joff) * ns3 + ioff;
325  fab_arr(i,j,k,0) = static_cast<DType>(*(nc_arrays[iv].get_data()+n));
326  }
327 }
const int khi
Definition: ERF_InitCustomPert_Bubble.H:21

◆ ReadNetCDFFile()

template<typename DType >
void ReadNetCDFFile ( const std::string &  fname,
amrex::Vector< std::string >  names,
amrex::Vector< NDArray< DType > > &  arrays,
amrex::Vector< int > &  success 
)
154 {
155  AMREX_ASSERT(arrays.size() == names.size());
156 
157  if (amrex::ParallelDescriptor::IOProcessor())
158  {
159  auto ncf = ncutils::NCFile::open(fname, NC_CLOBBER | NC_NETCDF4);
160 
161  /*
162  // get the dimension information
163  int Time = static_cast<int>(ncf.dim("Time").len());
164  int DateStrLen = static_cast<int>(ncf.dim("DateStrLen").len());
165  int west_east = static_cast<int>(ncf.dim("west_east").len());
166  int south_north = static_cast<int>(ncf.dim("south_north").len());
167  int bottom_top = static_cast<int>(ncf.dim("bottom_top").len());
168  int bottom_top_stag = static_cast<int>(ncf.dim("bottom_top_stag").len());
169  int west_east_stag = static_cast<int>(ncf.dim("west_east_stag").len());
170  int south_north_stag = static_cast<int>(ncf.dim("south_north_stag").len());
171  int bdy_width = static_cast<int>(ncf.dim("bdy_width").len());
172  */
173 
174  // amrex::Print() << "Reading the dimensions from the netcdf file " << "\n";
175 
176  for (auto n=0; n<arrays.size(); ++n)
177  {
178  std::string vname_to_write = names[n];
179  std::string vname_to_read = names[n];
180 
181  if (vname_to_read.substr(0,2) == "R_") {
182  vname_to_read = names[n+4]; // This allows us to read "T" instead -- we will over-write this later
183  }
184 
185  success[n] = ncf.has_var(vname_to_read);
186  if (success[n] == 0) {
187  amrex::Print() << " Skipping " << vname_to_read << std::endl;
188  } else {
189  amrex::Print() << " Reading " << vname_to_read << std::endl;
190  }
191 
192  if (success[n] == 1) {
193 
194  std::vector<std::string> dimnames = ncf.var(vname_to_read).dimnames();
195  AMREX_ALWAYS_ASSERT(dimnames[0] == "Time" || dimnames[0] == "time");
196 
197  std::vector<size_t> shape = ncf.var(vname_to_read).shape();
198  arrays[n] = NDArray<DType>(vname_to_read,shape);
199  DType* dataPtr = arrays[n].get_data();
200 
201  std::vector<size_t> start(shape.size(), 0);
202 
203 #if 0
204  auto numPts = arrays[n].ndim();
205  amrex::Print() << "NetCDF Variable name = " << vname_to_read << std::endl;
206  amrex::Print() << "numPts read from NetCDF file/var = " << numPts << std::endl;
207  amrex::Print() << "Dims in var = " << ncf.var(vname_to_read).ndim() << std::endl;
208  amrex::Print() << "Dim names = (";
209  for (auto &dim:dimnames)
210  amrex::Print() << dim << ", " ;
211  amrex::Print() << ")" << std::endl;
212  amrex::Print() << "Dims of the variable = (";
213  for (auto &dim:shape)
214  amrex::Print() << dim << ", " ;
215  amrex::Print() << ")" << std::endl;
216 #endif
217 
218  ncf.var(vname_to_read).get(dataPtr, start, shape);
219 
220  } // has_var
221  }
222  ncf.close();
223  }
224 }
AMREX_ALWAYS_ASSERT(bx.length()[2]==khi+1)
static NCFile open(const std::string &name, const int cmode=NC_NOWRITE)
Definition: ERF_NCInterface.cpp:707
Definition: ERF_NCWpsFile.H:52

Referenced by BuildFABsFromNetCDFFile().

Here is the call graph for this function:
Here is the caller graph for this function:

◆ ReadTimeSliceFromNetCDFFile()

template<typename DType >
void ReadTimeSliceFromNetCDFFile ( const std::string &  fname,
const int  tidx,
amrex::Vector< std::string >  names,
amrex::Vector< NDArray< DType > > &  arrays,
amrex::Vector< int > &  success 
)
110 {
111  amrex::Print() << "Reading time slice " << tidx << " from " << fname << std::endl;
112  AMREX_ASSERT(arrays.size() == names.size());
113 
114  if (amrex::ParallelDescriptor::IOProcessor())
115  {
116  auto ncf = ncutils::NCFile::open(fname, NC_CLOBBER | NC_NETCDF4);
117 
118  int ntimes = ncf.dim("Time").len();
119  AMREX_ALWAYS_ASSERT((tidx >= 0) && (tidx < ntimes));
120 
121  for (auto n=0; n<arrays.size(); ++n)
122  {
123  std::string vname_to_write = names[n];
124  std::string vname_to_read = names[n];
125  if (vname_to_read.substr(0,2) == "R_") {
126  vname_to_read = names[n+4]; // This allows us to read "T" instead -- we will over-write this later
127  }
128 
129  success[n] = ncf.has_var(vname_to_read);
130 
131  if (success[n] == 1)
132  {
133  std::vector<std::string> dimnames = ncf.var(vname_to_read).dimnames();
134  AMREX_ALWAYS_ASSERT(dimnames[0] == "Time");
135 
136  std::vector<size_t> count = ncf.var(vname_to_read).shape();
137  std::vector<size_t> start(count.size(), 0);
138  start[0] = tidx;
139  count[0] = 1;
140 
141  arrays[n] = NDArray<DType>(vname_to_read, count);
142  DType* dataPtr = arrays[n].get_data();
143 
144  ncf.var(vname_to_read).get(dataPtr, start, count);
145  } // has_var
146  }
147  ncf.close();
148  }
149 }
Here is the call graph for this function: