78 particle_masses.
resize(np_total, 1);
81 for (
Index i_ss = 0; i_ss < scat_meta.
nelem(); i_ss++) {
82 for (
Index i_se = 0; i_se < scat_meta[i_ss].
nelem(); i_se++) {
83 if (std::isnan(scat_meta[i_ss][i_se].mass) ||
84 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.) {
86 os <<
"A presumably incorrect value found for " 87 <<
"scat_meta[" << i_ss <<
"][" << i_se <<
"].mass.\n" 88 <<
"The value is " << scat_meta[i_ss][i_se].mass;
89 throw std::runtime_error(os.
str());
92 if (scat_meta[i_ss][i_se].diameter_volume_equ <= 0 ||
93 scat_meta[i_ss][i_se].diameter_volume_equ > 0.5) {
95 os <<
"A presumably incorrect value found for " 96 <<
"scat_meta[" << i_ss <<
"][" << i_se <<
"].diameter_volume_equ.\n" 97 <<
"The value is " << scat_meta[i_ss][i_se].diameter_volume_equ;
98 throw std::runtime_error(os.
str());
101 particle_masses(i_se_flat, 0) = scat_meta[i_ss][i_se].diameter_volume_equ;
116 particle_masses = 0.;
120 for (
Index i_ss = 0; i_ss < scat_meta.
nelem(); i_ss++) {
121 for (
Index i_se = 0; i_se < scat_meta[i_ss].
nelem(); i_se++) {
122 particle_masses(i_se_flat, i_ss) =
123 scat_meta[i_ss][i_se].diameter_volume_equ;
132 const Vector& pnd_size_grid,
134 const Vector& psd_size_grid,
136 const Index& quad_order,
142 const bool do_dx = !dpsd_data_dx.
empty();
147 "The method requires that length of *psd_size_grid* is >= 2.");
148 if (ng != pnd_size_grid.
nelem())
150 "So far, the method requires that *psd_size_grid* and " 151 "*pnd_size_grid* have same length.");
153 if (psd_size_grid[
i] != pnd_size_grid[
i])
155 "So far, the method requires that *psd_size_grid* and " 156 "*pnd_size_grid* are identical.");
158 if (psd_data.
ncols() != ng)
160 "Number of columns in *psd_data* and length of " 161 "*psd_size_grid* must match.");
165 if (dpsd_data_dx.
ncols() != ng)
167 "Number of columns in *dpsd_data_dx* and length of " 168 "*psd_size_grid* must match.");
169 ndx = dpsd_data_dx.
npages();
170 dpnd_data_dx.
resize(ndx, np, ng);
172 dpnd_data_dx.
resize(0, 0, 0);
178 Vector psd_size_grid_sorted(ng);
181 psd_size_grid_sorted[
i] = psd_size_grid[intarr[
i]];
184 throw std::runtime_error(
185 "*psd_size_grid* is not allowed to contain " 186 "duplicate values.");
193 for (
Index ip = 0; ip < np; ip++) {
194 pnd_data(ip, intarr[
i]) = quadweights[
i] * psd_data(ip, intarr[i]);
198 for (
Index ip = 0; ip < np; ip++) {
199 for (
Index ix = 0; ix < ndx; ix++) {
200 dpnd_data_dx(ix, ip, intarr[
i]) =
201 quadweights[
i] * dpsd_data_dx(ix, ip, intarr[i]);
211 const Vector& pnd_size_grid,
213 const Vector& psd_size_grid,
217 const Index& scat_data_checked,
218 const Index& quad_order,
219 const Index& scat_index,
228 const bool do_dx = !dpsd_data_dx.
empty();
233 "The method requires that length of *psd_size_grid* is >= 3.");
234 if (ng != pnd_size_grid.
nelem())
236 "So far, the method requires that *psd_size_grid* and" 237 " *pnd_size_grid* have the same length.");
239 if (psd_size_grid[
i] != pnd_size_grid[
i])
241 "So far, the method requires that *psd_size_grid* and" 242 " *pnd_size_grid* are identical.");
244 if (psd_data.
ncols() != ng)
246 "Number of columns in *psd_data* and length of" 247 " *psd_size_grid* must match.");
251 if (dpsd_data_dx.
ncols() != ng)
253 "Number of columns in *dpsd_data_dx* and length of" 254 " *psd_size_grid* must match.");
255 ndx = dpsd_data_dx.
npages();
256 dpnd_data_dx.
resize(ndx, np, ng);
258 dpnd_data_dx.
resize(0, 0, 0);
261 if (!scat_data_checked)
263 "*scat_data* must have passed a consistency check" 264 " (scat_data_checked=1).\n" 265 "Alternatively, use *pndFromPsdBasic*.");
266 if (scat_index >= scat_data.
nelem())
268 "*scat_index* exceeds the number of available" 269 " scattering species.");
270 if (scat_data[scat_index].
nelem() != ng)
272 "Number of scattering elements in this scattering" 273 " species (*scat_index*) inconsistent with length of" 274 " *pnd_size_grid*.");
279 Vector psd_size_grid_sorted(ng);
282 psd_size_grid_sorted[
i] = psd_size_grid[intarr[
i]];
285 throw std::runtime_error(
286 "*psd_size_grid* is not allowed to contain " 287 "duplicate values.");
296 for (
Index ip = 0; ip < np; ip++)
298 pnd_data(ip, intarr[
i]) = quadweights[
i] * psd_data(ip, intarr[i]);
302 for (
Index ip = 0; ip < np; ip++) {
303 for (
Index ix = 0; ix < ndx; ix++) {
304 dpnd_data_dx(ix, ip, intarr[
i]) =
305 quadweights[
i] * dpsd_data_dx(ix, ip, intarr[i]);
314 Matrix bulkext(np, nf, 0.);
315 Vector ext(nf), ext_s0(nf), ext_s1(nf), ext_l0(nf), ext_l1(nf);
359 for (
Index ise = 0; ise < ng;
362 if (sds[intarr[ise]].ext_mat_data.nshelves() > 1)
363 ext = sds[intarr[ise]].ext_mat_data(
joker, 0, 0, 0, 0);
365 ext = sds[intarr[ise]].ext_mat_data(0, 0, 0, 0, 0);
372 else if (ise == ng - 2)
374 else if (ise == ng - 1)
377 for (
Index ip = 0; ip < np; ip++)
378 if (
abs(pnd_data(ip,
joker).sum()) > 0.)
379 for (
Index f = fstart; f < (fstart + nf); f++)
380 bulkext(ip, f) += pnd_data(ip, intarr[ise]) * ext[f];
384 for (
Index ip = 0; ip < np; ip++)
386 max0 =
max(
abs(pnd_data(ip, intarr[0])), max0);
387 max1 =
max(
abs(pnd_data(ip, intarr[ng - 1])), max1);
391 for (
Index ip = 0; ip < np; ip++)
393 if (
abs(pnd_data(ip,
joker).sum()) > 0.) {
394 for (
Index f = fstart; f < (fstart + nf); f++) {
410 if (
abs(bulkext(ip, f)) > 1e-2 * threshold_bext) {
411 if (
abs(psd_data(ip, intarr[0])) > 0. and
412 ext_s0[f] *
abs(psd_data(ip, intarr[0])) >=
413 ext_s1[f] *
abs(psd_data(ip, intarr[1]))) {
415 os <<
" Bin-width normalized extinction (ext*psd) not decreasing" 416 <<
" at small size edge\n" 417 <<
" at atm level #" << ip <<
" and freq point #" << f <<
".\n" 418 <<
" ext_s0=" << ext_s0[f]
419 <<
", psd_s0=" <<
abs(psd_data(ip, intarr[0]))
420 <<
", ext_s0*psd_s0=" << ext_s0[f] *
abs(psd_data(ip, intarr[0]))
421 <<
"\n LARGER EQUAL\n" 422 <<
" ext_s1=" << ext_s1[f]
423 <<
", psd_s1=" <<
abs(psd_data(ip, intarr[1]))
424 <<
", ext_s1*psd_s1=" << ext_s1[f] *
abs(psd_data(ip, intarr[1]))
426 <<
" Total bulk ext = " <<
abs(bulkext(ip, f)) <<
"\n" 427 <<
" Need to add smaller sized particles!\n";
428 throw runtime_error(os.
str());
431 if (
abs(psd_data(ip, intarr[ng - 1])) > 0. and
432 ext_l0[f] *
abs(psd_data(ip, intarr[ng - 1])) >=
433 ext_l1[f] *
abs(psd_data(ip, intarr[ng - 2]))) {
435 os <<
"Bin-width normalized extinction (ext*psd) not decreasing" 436 <<
" at large size edge\n" 437 <<
"at atm level #" << ip <<
" and freq point #" << f <<
".\n" 438 <<
" ext_l0=" << ext_l0[f]
439 <<
", psd_l0=" <<
abs(psd_data(ip, intarr[ng - 1]))
440 <<
", ext_l0*psd_l0=" 441 << ext_l0[f] *
abs(psd_data(ip, intarr[ng - 1]))
442 <<
"\n LARGER EQUAL\n" 443 <<
" ext_l1=" << ext_l1[f]
444 <<
", psd_l1=" <<
abs(psd_data(ip, intarr[ng - 2]))
445 <<
", ext_l1*psd_l1=" 446 << ext_l1[f] *
abs(psd_data(ip, intarr[ng - 2])) <<
"\n" 447 <<
" Total bulk ext = " <<
abs(bulkext(ip, f)) <<
"\n" 448 <<
" Need to add larger sized particles!\n";
449 throw runtime_error(os.
str());
455 if (
abs(bulkext(ip, f)) > threshold_bext) {
456 if (
abs(pnd_data(ip, intarr[0])) > threshold_rpnd * max0) {
458 abs(pnd_data(ip, intarr[0])) * ext_s0[f] /
abs(bulkext(ip, f));
462 if (
abs(pnd_data(ip, intarr[0])) * ext_s0[f] >
463 threshold_rsec *
abs(bulkext(ip, f))) {
465 os <<
"Contribution of edge bin to total extinction too high" 466 <<
" (" << contrib * 1e2 <<
"% of " <<
abs(bulkext(ip, f))
467 <<
") at small size edge\n" 468 <<
"at atm level #" << ip <<
" and freq point #" << f <<
".\n" 469 <<
" Need to add smaller sized particles or refine the size" 470 <<
" grid on the small size edge!\n";
471 throw runtime_error(os.
str());
474 if (
abs(pnd_data(ip, intarr[ng - 1])) > threshold_rpnd * max1) {
475 contrib =
abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] /
480 if (
abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] >
481 threshold_rsec *
abs(bulkext(ip, f))) {
483 os <<
"Contribution of edge bin to total extinction too high" 484 <<
" (" << contrib * 1e2 <<
"% of " <<
abs(bulkext(ip, f))
485 <<
") at large size edge\n" 486 <<
"at atm level #" << ip <<
" and freq point #" << f <<
".\n" 487 <<
" Need to add larger sized particles or refine the size" 488 <<
" grid on the large size edge!\n";
489 throw runtime_error(os.
str());
503 const Index& atmosphere_dim,
508 const Index& cloudbox_on,
513 const Tensor4& particle_bulkprop_field,
517 const Index& jacobian_do,
525 if (particle_bulkprop_field.
empty())
526 throw runtime_error(
"*particle_bulkprop_field* is empty.");
531 chk_atm_field(
"t_field", t_field, atmosphere_dim, p_grid, lat_grid, lon_grid);
533 particle_bulkprop_field,
535 particle_bulkprop_names.
nelem(),
543 if (particle_bulkprop_names.
nelem() == 0 ||
544 particle_bulkprop_names.
nelem() != particle_bulkprop_field.
nbooks()) {
546 "Number of fields in *particle_bulkprop_field*" 547 " inconsistent with number of names in" 548 " *particle_bulkprop_names*.");
550 if (particle_bulkprop_field.
nbooks() < nss) {
552 "At least one field per scattering species required" 553 " in *particle_bulkprop_field*.");
556 if (cloudbox_limits.
nelem() != 2 * atmosphere_dim)
558 "Length of *cloudbox_limits* incorrect with respect " 559 "to *atmosphere_dim*.");
560 if (cloudbox_limits[1] <= cloudbox_limits[0] || cloudbox_limits[0] < 0 ||
561 cloudbox_limits[1] >= p_grid.
nelem())
562 throw runtime_error(
"Invalid data in pressure part of *cloudbox_limits*.");
563 if (atmosphere_dim > 1) {
564 if (cloudbox_limits[3] <= cloudbox_limits[2] || cloudbox_limits[2] < 0 ||
565 cloudbox_limits[3] >= lat_grid.
nelem())
567 "Invalid data in latitude part of *cloudbox_limits*.");
568 if (atmosphere_dim > 2) {
569 if (cloudbox_limits[5] <= cloudbox_limits[4] || cloudbox_limits[4] < 0 ||
570 cloudbox_limits[5] >= lon_grid.
nelem())
572 "Invalid data in longitude part of *cloudbox_limits*.");
576 if (nss < 1)
throw runtime_error(
"*scat_data* is empty!.");
577 if (scat_species.
nelem() != nss)
579 "*scat_data* and *scat_species* are inconsistent in size.");
580 if (scat_meta.
nelem() != nss)
582 "*scat_data* and *scat_meta* are inconsistent in size.");
583 if (pnd_agenda_array.
nelem() != nss)
585 "*scat_data* and *pnd_agenda_array* are inconsistent " 587 if (pnd_agenda_array_input_names.
nelem() != nss)
589 "*scat_data* and *pnd_agenda_array_input_names* are " 590 "inconsistent in size.");
594 const Index np = cloudbox_limits[1] - cloudbox_limits[0] + 1;
595 const Index ip_offset = cloudbox_limits[0];
597 Index ilat_offset = 0;
598 if (atmosphere_dim > 1) {
599 nlat = cloudbox_limits[3] - cloudbox_limits[2] + 1;
600 ilat_offset = cloudbox_limits[2];
603 Index ilon_offset = 0;
604 if (atmosphere_dim > 2) {
605 nlon = cloudbox_limits[5] - cloudbox_limits[4] + 1;
606 ilon_offset = cloudbox_limits[4];
612 "*particle_bulkprop_field* allowed to contain" 613 " non-zero values only inside the cloudbox.";
615 Index dp_start, dp_end;
616 if (cloudbox_limits[0] != 0) {
617 if (
max(particle_bulkprop_field(
619 min(particle_bulkprop_field(
621 throw runtime_error(estring);
625 if (cloudbox_limits[1] != p_grid.
nelem() - 1) {
626 const Index np_above = p_grid.
nelem() + 1 - (np + ip_offset);
627 if (
max(particle_bulkprop_field(
629 min(particle_bulkprop_field(
631 throw runtime_error(estring);
642 "*scat_data* and *scat_meta* have inconsistent sizes.");
643 ncumse[
i + 1] = ncumse[
i] + scat_data[
i].
nelem();
648 pnd_field.
resize(ncumse[nss], np, nlat, nlon);
656 dpnd_field_dx.resize(0);
658 nq = jacobian_quantities.
nelem();
659 dpnd_field_dx.resize(nq);
660 scatspecies_to_jq.resize(nss);
662 for (
Index iq = 0; iq < nq; iq++) {
663 if (jacobian_quantities[iq].MainTag() == SCATSPECIES_MAINTAG) {
665 find_first(scat_species, jacobian_quantities[iq].Subtag());
668 os <<
"Jacobian quantity with index " << iq <<
" refers to\n" 669 <<
" " << jacobian_quantities[iq].Subtag()
670 <<
"\nbut this species could not be found in *scat_species*.";
671 throw runtime_error(os.
str());
673 scatspecies_to_jq[ihit].push_back(iq);
674 dpnd_field_dx[iq].resize(ncumse[nss], np, nlat, nlon);
675 dpnd_field_dx[iq] = 0.0;
681 for (
Index is = 0; is < nss; is++) {
683 Range se_range(ncumse[is], ncumse[is + 1] - ncumse[is]);
687 const Index nin = pnd_agenda_array_input_names[is].
nelem();
691 i_pbulkprop[
i] =
find_first(particle_bulkprop_names,
692 pnd_agenda_array_input_names[is][
i]);
693 if (i_pbulkprop[i] < 0) {
695 os <<
"Pnd-agenda with index " << is <<
" is set to require \"" 696 << pnd_agenda_array_input_names[is][
i] <<
"\",\nbut this quantity " 697 <<
"could not found in *particle_bulkprop_names*.\n" 698 <<
"(Note that temperature must be written as \"Temperature\")";
699 throw runtime_error(os.
str());
709 ndx = scatspecies_to_jq[is].
nelem();
710 dpnd_data_dx_names.resize(ndx);
711 for (
Index ix = 0; ix < ndx; ix++) {
712 dpnd_data_dx_names[ix] =
713 jacobian_quantities[scatspecies_to_jq[is][ix]].SubSubtag();
718 for (
Index ilon = 0; ilon < nlon; ilon++) {
719 for (
Index ilat = 0; ilat < nlat; ilat++) {
723 if ((nlat > 1 && (ilat == 0 || ilat == nlat - 1)) ||
724 (nlon > 1 && (ilon == 0 || ilon == nlon - 1))) {
730 Matrix pnd_agenda_input(np, nin);
733 for (
Index ip = 0; ip < np; ip++) {
734 pnd_agenda_input(ip,
i) =
735 particle_bulkprop_field(i_pbulkprop[
i],
742 Vector pnd_agenda_input_t(np);
744 for (
Index ip = 0; ip < np; ip++) {
745 pnd_agenda_input_t[ip] =
746 t_field(ip_offset + ip, ilat_offset + ilat, ilon_offset + ilon);
759 pnd_agenda_array_input_names[is],
764 for (
Index ip = 0; ip < np; ip++) {
765 pnd_field(se_range, ip, ilat, ilon) = pnd_data(ip,
joker);
767 for (
Index ix = 0; ix < ndx; ix++) {
768 for (
Index ip = dp_start; ip < dp_end; ip++) {
769 dpnd_field_dx[scatspecies_to_jq[is][ix]](se_range, ip, ilat, ilon) =
770 dpnd_data_dx(ix, ip,
joker);
783 const Index& species_index,
787 const Index& do_only_x,
791 if (nss == 0)
throw runtime_error(
"*scat_meta* is empty!");
792 if (nss < species_index + 1) {
794 os <<
"Selected scattering species index is " << species_index
796 <<
"is not allowed since *scat_meta* has only " << nss <<
" elements.";
797 throw runtime_error(os.
str());
800 const Index nse = scat_meta[species_index].
nelem();
803 "The scattering species must have at least two " 804 "elements to use this method.");
811 mass[
i] = scat_meta[species_index][
i].mass;
816 scat_species_x.
resize(nse);
818 if (x_unit ==
"dveq") {
820 scat_species_x[
i] = scat_meta[species_index][
i].diameter_volume_equ;
835 else if (x_unit ==
"dmax") {
837 scat_species_x[
i] = scat_meta[species_index][
i].diameter_max;
852 else if (x_unit ==
"area") {
855 scat_meta[species_index][
i].diameter_area_equ_aerodynamical;
870 else if (x_unit ==
"mass") {
871 scat_species_x = mass;
879 os <<
"You have selected the x_unit: " << x_unit
880 <<
"while accepted choices are: \"dveq\", \"dmax\", \"mass\" and \"area\"";
881 throw runtime_error(os.
str());
INDEX Index
The type to use for all integer numbers and indices.
Index find_first(const Array< base > &x, const base &w)
Find first occurance.
void pndFromPsd(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const ArrayOfArrayOfSingleScatteringData &scat_data, const Vector &f_grid, const Index &scat_data_checked, const Index &quad_order, const Index &scat_index, const Numeric &threshold_rsec, const Numeric &threshold_bext, const Numeric &threshold_rpnd, const Verbosity &)
WORKSPACE METHOD: pndFromPsd.
Index nelem() const
Number of elements.
Declarations having to do with the four output streams.
void pnd_agenda_arrayExecute(Workspace &ws, Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Index agenda_array_index, const Vector &pnd_agenda_input_t, const Matrix &pnd_agenda_input, const ArrayOfString &pnd_agenda_input_names, const ArrayOfString &dpnd_data_dx_names, const ArrayOfAgenda &input_agenda_array)
bool is_increasing(ConstVectorView x)
Checks if a vector is sorted and strictly increasing.
void derive_scat_species_a_and_b(Numeric &a, Numeric &b, const Vector &x, const Vector &mass, const Numeric &x_fit_start, const Numeric &x_fit_end)
void particle_massesFromMetaDataSingleCategory(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaDataSingleCategory.
Linear algebra functions.
Internal functions for microphysics calculations (size distributions etc.)
bool empty() const
Check if variable is empty.
This file contains basic functions to handle XML data files.
This file contains basic functions to handle ASCII files.
Header file for interpolation.cc.
Index nelem() const
Returns the number of elements.
Contains sorting routines.
void pndFromPsdBasic(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const Index &quad_order, const Verbosity &)
WORKSPACE METHOD: pndFromPsdBasic.
void pnd_fieldCalcFromParticleBulkProps(Workspace &ws, Tensor4 &pnd_field, ArrayOfTensor4 &dpnd_field_dx, const Index &atmosphere_dim, const Vector &p_grid, const Vector &lat_grid, const Vector &lon_grid, const Tensor3 &t_field, const Index &cloudbox_on, const ArrayOfIndex &cloudbox_limits, const ArrayOfString &scat_species, const ArrayOfArrayOfSingleScatteringData &scat_data, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Tensor4 &particle_bulkprop_field, const ArrayOfString &particle_bulkprop_names, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const Index &jacobian_do, const ArrayOfRetrievalQuantity &jacobian_quantities, const Verbosity &)
WORKSPACE METHOD: pnd_fieldCalcFromParticleBulkProps.
This file contains the definition of Array.
Index ncols() const
Returns the number of columns.
Index TotalNumberOfElements(const Array< Array< base > > &aa)
Determine total number of elements in an ArrayOfArray.
The global header file for ARTS.
_CS_string_type str() const
Index ncols() const
Returns the number of columns.
NUMERIC Numeric
The type to use for all floating point numbers.
Header file for special_interp.cc.
void resize(Index p, Index r, Index c)
Resize function.
void get_sorted_indexes(ArrayOfIndex &sorted, const T &data)
get_sorted_indexes
Header file for logic.cc.
Index npages() const
Returns the number of pages.
This can be used to make arrays out of anything.
void resize(Index n)
Resize function.
bool empty() const
Check if variable is empty.
const String SCATSPECIES_MAINTAG
Index nelem(const Lines &l)
Number of lines.
void particle_massesFromMetaData(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaData.
Index nbooks() const
Returns the number of books.
Internal functions associated with size distributions.
void bin_quadweights(Vector &w, const Vector &x, const Index &order)
Internal cloudbox functions.
This file contains header information for the dealing with command line parameters.
Scattering database structure and functions.
This file contains declerations of functions of physical character.
Index nrows() const
Returns the number of rows.
void resize(Index b, Index p, Index r, Index c)
Resize function.
void ScatSpeciesSizeMassInfo(Vector &scat_species_x, Numeric &scat_species_a, Numeric &scat_species_b, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Index &species_index, const String &x_unit, const Numeric &x_fit_start, const Numeric &x_fit_end, const Index &do_only_x, const Verbosity &)
WORKSPACE METHOD: ScatSpeciesSizeMassInfo.
Declaration of functions in rte.cc.
void resize(Index r, Index c)
Resize function.