dimensionSetIO.C
Go to the documentation of this file.
1 /*---------------------------------------------------------------------------*\
2  ========= |
3  \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
4  \\ / O peration |
5  \\ / A nd | www.openfoam.com
6  \\/ M anipulation |
7 -------------------------------------------------------------------------------
8  Copyright (C) 2011-2015 OpenFOAM Foundation
9  Copyright (C) 2019-2022 OpenCFD Ltd.
10 -------------------------------------------------------------------------------
11 License
12  This file is part of OpenFOAM.
13 
14  OpenFOAM is free software: you can redistribute it and/or modify it
15  under the terms of the GNU General Public License as published by
16  the Free Software Foundation, either version 3 of the License, or
17  (at your option) any later version.
18 
19  OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
20  ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
21  FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
22  for more details.
23 
24  You should have received a copy of the GNU General Public License
25  along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
26 
27 \*---------------------------------------------------------------------------*/
28 
29 #include "dictionary.H"
30 #include "dimensionSet.H"
32 #include "IOstreams.H"
33 #include <limits>
34 
35 // * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
36 
38 (
39  const word& entryName,
40  const dictionary& dict,
42 )
43 :
44  exponents_(Zero)
45 {
46  readEntry(entryName, dict, readOpt);
47 }
48 
49 
51 {
52  is >> *this;
53 }
54 
55 
56 Foam::dimensionSet::tokeniser::tokeniser(Istream& is)
57 :
58  is_(is),
59  tokens_(100),
60  start_(0),
61  size_(0)
62 {}
63 
64 
65 // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
66 
67 void Foam::dimensionSet::tokeniser::push(const token& t)
68 {
69  const label end = (start_+size_)%tokens_.size();
70  tokens_[end] = t;
71  if (size_ == tokens_.size())
72  {
73  start_ = tokens_.fcIndex(start_);
74  }
75  else
76  {
77  ++size_;
78  }
79 }
80 
81 
82 Foam::token Foam::dimensionSet::tokeniser::pop()
83 {
84  token t = tokens_[start_];
85  start_ = tokens_.fcIndex(start_);
86  --size_;
87  return t;
88 }
89 
90 
91 void Foam::dimensionSet::tokeniser::unpop(const token& t)
92 {
93  ++size_;
94  start_ = tokens_.rcIndex(start_);
95  tokens_[start_] = t;
96 }
97 
98 
99 bool Foam::dimensionSet::tokeniser::hasToken() const
100 {
101  return size_ || is_.good();
102 }
103 
104 
105 bool Foam::dimensionSet::tokeniser::valid(char c)
106 {
107  return
108  (
109  !isspace(c)
110  && c != '"' // string quote
111  && c != '\'' // string quote
112  && c != '/' // div
113  && c != ';' // end statement
114  && c != '{' // beg subdict
115  && c != '}' // end subdict
116  && c != '(' // beg expr
117  && c != ')' // end expr
118  && c != '[' // beg dim
119  && c != ']' // end dim
120  && c != '^' // power
121  && c != '*' // mult
122  );
123 }
124 
125 
126 Foam::label Foam::dimensionSet::tokeniser::priority(const token& t)
127 {
128  if (t.isPunctuation())
129  {
130  if
131  (
132  t.pToken() == token::MULTIPLY
133  || t.pToken() == token::DIVIDE
134  )
135  {
136  return 2;
137  }
138  else if (t.pToken() == '^')
139  {
140  return 3;
141  }
142  }
143 
144  // Default priority
145  return 0;
146 }
147 
148 
149 void Foam::dimensionSet::tokeniser::splitWord(const word& w)
150 {
151  size_t start = 0;
152  for (size_t i=0; i<w.size(); ++i)
153  {
154  if (!valid(w[i]))
155  {
156  if (i > start)
157  {
158  const word subWord = w.substr(start, i-start);
159  if (isdigit(subWord[0]) || subWord[0] == token::SUBTRACT)
160  {
161  push(token(readScalar(subWord)));
162  }
163  else
164  {
165  push(token(subWord));
166  }
167  }
168  if (w[i] != token::SPACE)
169  {
170  if (isdigit(w[i]))
171  {
172  // Single digit: as scalar value
173  const scalar val = (w[i] - '0');
174  push(token(val));
175  }
176  else
177  {
178  push(token(token::punctuationToken(w[i])));
179  }
180  }
181  start = i+1;
182  }
183  }
184  if (start < w.size())
185  {
186  const word subWord = w.substr(start);
187  if (isdigit(subWord[0]) || subWord[0] == token::SUBTRACT)
188  {
189  push(token(readScalar(subWord)));
190  }
191  else
192  {
193  push(token(subWord));
194  }
195  }
196 }
197 
198 
199 Foam::token Foam::dimensionSet::tokeniser::nextToken()
200 {
201  if (size_ == 0)
202  {
203  token t(is_);
204  if (t.isWord())
205  {
206  splitWord(t.wordToken());
207  return pop();
208  }
209  else
210  {
211  return t;
212  }
213  }
214  else
215  {
216  return pop();
217  }
218 }
219 
220 
221 void Foam::dimensionSet::tokeniser::putBack(const token& t)
222 {
223  if (size_ == 0)
224  {
225  push(t);
226  }
227  else
228  {
229  unpop(t);
230  }
231 }
232 
233 
234 // * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
235 
236 void Foam::dimensionSet::round(const scalar tol)
237 {
238  scalar integralPart;
239  for (scalar& val : exponents_)
240  {
241  const scalar fractionalPart = std::modf(val, &integralPart);
242 
243  if (mag(fractionalPart-1.0) <= tol)
244  {
245  val = 1.0+integralPart;
246  }
247  else if (mag(fractionalPart+1.0) <= tol)
248  {
249  val = -1.0+integralPart;
250  }
251  else if (mag(fractionalPart) <= tol)
252  {
253  val = integralPart;
254  }
255  }
256 }
257 
258 
259 Foam::dimensionedScalar Foam::dimensionSet::parse
260 (
261  const label lastPrior,
262  tokeniser& tis,
263  const HashTable<dimensionedScalar>& readSet
264 ) const
265 {
266  dimensionedScalar ds("", dimless, 1);
267 
268  // Get initial token
269  token nextToken(tis.nextToken());
270 
271  // Store type of last token read. Used to detect two consecutive
272  // symbols and assume multiplication
273  bool haveReadSymbol = false;
274 
275 
276  while (true)
277  {
278  if (nextToken.isWord())
279  {
280  const word& unitName = nextToken.wordToken();
281  const dimensionedScalar& unitDim = readSet[unitName];
282  ds.dimensions() *= unitDim.dimensions();
283  ds.value() *= unitDim.value();
284  haveReadSymbol = true;
285  }
286  else if (nextToken.isNumber())
287  {
288  // no dimensions, just value
289  ds.value() *= nextToken.number();
290  haveReadSymbol = true;
291  }
292  else if (nextToken.isPunctuation())
293  {
294  label nextPrior = tokeniser::priority(nextToken);
295 
296  if (nextToken.pToken() == token::BEGIN_SQR)
297  {
298  // No idea when this will happen
299  tis.putBack(nextToken);
300  return ds;
301  }
302  else if (nextToken.pToken() == token::END_SQR)
303  {
304  tis.putBack(nextToken);
305  return ds;
306  }
307  else if (nextToken.pToken() == token::BEGIN_LIST)
308  {
309  dimensionedScalar sub(parse(nextPrior, tis, readSet));
310 
311  token t = tis.nextToken();
312  if (!t.isPunctuation() || t.pToken() != token::END_LIST)
313  {
314  FatalIOErrorInFunction(tis.stream())
315  << "Illegal token " << t << exit(FatalIOError);
316  }
317 
318  ds.dimensions() *= sub.dimensions();
319  ds.value() *= sub.value();
320 
321  haveReadSymbol = true;
322  }
323  else if (nextToken.pToken() == token::END_LIST)
324  {
325  tis.putBack(nextToken);
326  return ds;
327  }
328  else if (nextToken.pToken() == token::MULTIPLY)
329  {
330  if (nextPrior > lastPrior)
331  {
332  dimensionedScalar sub(parse(nextPrior, tis, readSet));
333 
334  ds.dimensions() *= sub.dimensions();
335  ds.value() *= sub.value();
336  }
337  else
338  {
339  // Restore token
340  tis.putBack(nextToken);
341  return ds;
342  }
343  haveReadSymbol = false;
344  }
345  else if (nextToken.pToken() == token::DIVIDE)
346  {
347  if (nextPrior > lastPrior)
348  {
349  dimensionedScalar sub(parse(nextPrior, tis, readSet));
350 
351  ds.dimensions() /= sub.dimensions();
352  ds.value() /= sub.value();
353  }
354  else
355  {
356  tis.putBack(nextToken);
357  return ds;
358  }
359  haveReadSymbol = false;
360  }
361  else if (nextToken.pToken() == '^')
362  {
363  if (nextPrior > lastPrior)
364  {
365  dimensionedScalar expon(parse(nextPrior, tis, readSet));
366 
367  ds.dimensions().reset(pow(ds.dimensions(), expon.value()));
368  // Round to nearest integer if close to it
369  ds.dimensions().round(10*smallExponent);
370  ds.value() = Foam::pow(ds.value(), expon.value());
371  }
372  else
373  {
374  tis.putBack(nextToken);
375  return ds;
376  }
377  haveReadSymbol = false;
378  }
379  else
380  {
381  FatalIOErrorInFunction(tis.stream())
382  << "Illegal token " << nextToken << exit(FatalIOError);
383  }
384  }
385  else
386  {
387  FatalIOErrorInFunction(tis.stream())
388  << "Illegal token " << nextToken << exit(FatalIOError);
389  }
390 
391 
392  if (!tis.hasToken())
393  {
394  break;
395  }
396 
397  nextToken = tis.nextToken();
398  if (nextToken.error())
399  {
400  break;
401  }
402 
403  if (haveReadSymbol && (nextToken.isWord() || nextToken.isNumber()))
404  {
405  // Two consecutive symbols. Assume multiplication
406  tis.putBack(nextToken);
407  nextToken = token(token::MULTIPLY);
408  }
409  }
410 
411  return ds;
412 }
413 
414 
416 (
417  const word& entryName,
418  const dictionary& dict,
420 )
421 {
422  if (readOpt == IOobjectOption::NO_READ)
423  {
424  return false;
425  }
426 
427  const entry* eptr = dict.findEntry(entryName, keyType::LITERAL);
428 
429  if (eptr)
430  {
431  const entry& e = *eptr;
432  ITstream& is = e.stream();
433 
434  is >> *this;
435 
436  e.checkITstream(is);
437 
438  return true;
439  }
440  else if (IOobjectOption::isReadRequired(readOpt))
441  {
443  << "Entry '" << entryName << "' not found in dictionary "
444  << dict.relativeName() << nl
445  << exit(FatalIOError);
446  }
447 
448  return false;
449 }
450 
451 
453 (
454  Istream& is,
455  scalar& multiplier,
456  const HashTable<dimensionedScalar>& readSet
457 )
458 {
459  multiplier = 1.0;
460 
461  // Read beginning of dimensionSet
462  token startToken(is);
463 
464  if (startToken != token::BEGIN_SQR)
465  {
467  << "Expected a '" << token::BEGIN_SQR << "' in dimensionSet\n"
468  << "in stream " << is.info() << nl
469  << exit(FatalIOError);
470  }
471 
472  // Read next token
473  token nextToken(is);
474 
475  if (!nextToken.isNumber())
476  {
477  is.putBack(nextToken);
478 
479  tokeniser tis(is);
480 
481  dimensionedScalar ds(parse(0, tis, readSet));
482 
483  multiplier = ds.value();
484  exponents_ = ds.dimensions().values();
485  }
486  else
487  {
488  // Read first five dimensions
489  exponents_[dimensionSet::MASS] = nextToken.number();
490  for (int d=1; d < dimensionSet::CURRENT; ++d)
491  {
492  is >> exponents_[d];
493  }
494 
495  // Read next token
496  token nextToken(is);
497 
498  // If next token is another number
499  // read last two dimensions
500  // and then read another token for the end of the dimensionSet
501  if (nextToken.isNumber())
502  {
503  exponents_[dimensionSet::CURRENT] = nextToken.number();
504  is >> nextToken;
505  exponents_[dimensionSet::LUMINOUS_INTENSITY] = nextToken.number();
506  is >> nextToken;
507  }
508  else
509  {
510  exponents_[dimensionSet::CURRENT] = 0;
511  exponents_[dimensionSet::LUMINOUS_INTENSITY] = 0;
512  }
513 
514  // Check end of dimensionSet
515  if (nextToken != token::END_SQR)
516  {
518  << "Expected a '" << token::END_SQR << "' in dimensionSet\n"
519  << "in stream " << is.info() << nl
520  << exit(FatalIOError);
521  }
522  }
524  is.check(FUNCTION_NAME);
525  return is;
526 }
527 
528 
530 (
531  Istream& is,
532  scalar& multiplier
533 )
534 {
535  return read(is, multiplier, unitSet());
536 }
537 
538 
540 (
541  Istream& is,
542  scalar& multiplier,
543  const dictionary& readSet
544 )
545 {
546  multiplier = 1.0;
547 
548  // Read beginning of dimensionSet
549  token startToken(is);
550 
551  if (startToken != token::BEGIN_SQR)
552  {
554  << "Expected a '" << token::BEGIN_SQR << "' in dimensionSet\n"
555  << "in stream " << is.info() << nl
556  << exit(FatalIOError);
557  }
558 
559  // Read next token
560  token nextToken(is);
561 
562  if (nextToken.isWord())
563  {
564  bool continueParsing = true;
565  do
566  {
567  word symbolPow = nextToken.wordToken();
568  if (symbolPow.back() == token::END_SQR)
569  {
570  symbolPow.resize(symbolPow.size()-1);
571  continueParsing = false;
572  }
573 
574 
575  // Parse unit
576  dimensionSet symbolSet; // dimless
577 
578  const auto index = symbolPow.find('^');
579  if (index != std::string::npos)
580  {
581  const word symbol = symbolPow.substr(0, index);
582  const scalar exponent = readScalar(symbolPow.substr(index+1));
583 
585  s.read(readSet.lookup(symbol, keyType::LITERAL), readSet);
586 
587  symbolSet.reset(pow(s.dimensions(), exponent));
588 
589  // Round to nearest integer if close to it
590  symbolSet.round(10*smallExponent);
591  multiplier *= Foam::pow(s.value(), exponent);
592  }
593  else
594  {
596  s.read(readSet.lookup(symbolPow, keyType::LITERAL), readSet);
597 
598  symbolSet.reset(s.dimensions());
599  multiplier *= s.value();
600  }
601 
602  // Add dimensions without checking
603  for (int i=0; i < dimensionSet::nDimensions; ++i)
604  {
605  exponents_[i] += symbolSet[i];
606  }
607 
608  if (continueParsing)
609  {
610  nextToken = token(is);
611 
612  if (!nextToken.isWord() || nextToken == token::END_SQR)
613  {
614  continueParsing = false;
615  }
616  }
617  }
618  while (continueParsing);
619  }
620  else
621  {
622  // Read first five dimensions
623  exponents_[dimensionSet::MASS] = nextToken.number();
624  for (int d=1; d < dimensionSet::CURRENT; ++d)
625  {
626  is >> exponents_[d];
627  }
628 
629  // Read next token
630  token nextToken(is);
631 
632  // If next token is another number
633  // read last two dimensions
634  // and then read another token for the end of the dimensionSet
635  if (nextToken.isNumber())
636  {
637  exponents_[dimensionSet::CURRENT] = nextToken.number();
638  is >> nextToken;
639  exponents_[dimensionSet::LUMINOUS_INTENSITY] = nextToken.number();
640  is >> nextToken;
641  }
642  else
643  {
644  exponents_[dimensionSet::CURRENT] = 0;
645  exponents_[dimensionSet::LUMINOUS_INTENSITY] = 0;
646  }
647 
648  // Check end of dimensionSet
649  if (nextToken != token::END_SQR)
650  {
652  << "Expected a '" << token::END_SQR << "' in dimensionSet\n"
653  << "in stream " << is.info() << nl
654  << exit(FatalIOError);
655  }
656  }
658  is.check(FUNCTION_NAME);
659  return is;
660 }
661 
662 
664 (
665  Ostream& os,
666  scalar& multiplier,
667  const dimensionSets& writeUnits
668 ) const
669 {
670  multiplier = 1.0;
671 
672  os << token::BEGIN_SQR;
673 
674  if (writeUnits.good() && os.format() == IOstreamOption::ASCII)
675  {
677  for (int d=0; d < dimensionSet::nDimensions; ++d)
678  {
679  exponents[d] = exponents_[d];
680  }
681  writeUnits.coefficients(exponents);
682 
683  bool hasPrinted = false;
684 
685  // Set precision to lots
686  std::streamsize oldPrecision = os.precision
687  (
688  std::numeric_limits<scalar>::digits10
689  );
690 
691  forAll(exponents, i)
692  {
693  if (mag(exponents[i]) > smallExponent)
694  {
695  const dimensionedScalar& ds = writeUnits.units()[i];
696 
697  if (hasPrinted)
698  {
699  os << token::SPACE;
700  }
701  hasPrinted = true;
702  os << ds.name();
703  if (mag(exponents[i]-1) > smallExponent)
704  {
705  os << '^' << exponents[i];
706 
707  multiplier *= Foam::pow(ds.value(), exponents[i]);
708  }
709  else
710  {
711  multiplier *= ds.value();
712  }
713  }
714  }
715 
716  // Reset precision
717  os.precision(oldPrecision);
718  }
719  else
720  {
721  for (int d=0; d < dimensionSet::nDimensions; ++d)
722  {
723  if (d) os << token::SPACE;
724  os << exponents_[d];
725  }
726  }
727 
728  os << token::END_SQR;
731  return os;
732 }
733 
734 
736 (
737  Ostream& os,
738  scalar& multiplier
739 ) const
740 {
741  return write(os, multiplier, writeUnitSet());
742 }
743 
744 
745 // * * * * * * * * * * * * * * * IOstream Operators * * * * * * * * * * * * //
746 
747 Foam::Istream& Foam::operator>>(Istream& is, dimensionSet& ds)
748 {
749  scalar mult(1.0);
750  ds.read(is, mult);
751 
752  if (mag(mult-1.0) > dimensionSet::smallExponent)
753  {
755  << "Cannot use scaled units in dimensionSet"
756  << exit(FatalIOError);
757  }
758 
759  is.check(FUNCTION_NAME);
760  return is;
761 }
762 
763 
764 Foam::Ostream& Foam::operator<<(Ostream& os, const dimensionSet& ds)
765 {
766  scalar mult(1.0);
767  ds.write(os, mult);
768 
770  return os;
771 }
772 
773 
774 // ************************************************************************* //
bool good() const noexcept
True if token is not UNDEFINED or ERROR.
Definition: tokenI.H:507
ITstream & lookup(const word &keyword, enum keyType::option matchOpt=keyType::REGEX) const
Find and return an entry data stream. FatalIOError if not found, or not a stream. ...
Definition: dictionary.C:367
const Type & value() const noexcept
Return const reference to value.
dictionary dict
errorManipArg< error, int > exit(error &err, const int errNo=1)
Definition: errorManip.H:125
dimensioned< typename typeOfMag< Type >::type > mag(const dimensioned< Type > &dt)
A list of keyword definitions, which are a keyword followed by a number of values (eg...
Definition: dictionary.H:129
virtual bool check(const char *operation) const
Check IOstream status for given operation.
Definition: IOstream.C:45
Begin dimensions [isseparator].
Definition: token.H:163
An Istream is an abstract base class for all input systems (streams, files, token lists etc)...
Definition: Istream.H:57
constexpr char nl
The newline &#39;\n&#39; character (0x0a)
Definition: Ostream.H:50
"ascii" (normal default)
A token holds an item read from Istream.
Definition: token.H:65
bool readEntry(const word &entryName, const dictionary &dict, IOobjectOption::readOption readOpt=IOobjectOption::MUST_READ)
Update the dimensions from dictionary entry. FatalIOError if it is found and the number of tokens is ...
static const scalar smallExponent
Tolerance for &#39;small&#39; exponents, for near-zero rounding.
Definition: dimensionSet.H:142
InfoProxy< IOstream > info() const noexcept
Return info proxy, used to print IOstream information to a stream.
Definition: IOstream.H:517
void coefficients(scalarField &exponents) const
(if valid) obtain set of coefficients of unitNames
Begin list [isseparator].
Definition: token.H:161
Ostream & write(Ostream &os, scalar &multiplier, const dimensionSets &writeUnits) const
Write using provided write units, return scaling in multiplier.
Subtract or start of negative number.
Definition: token.H:159
const dimensionSet dimless
Dimensionless.
End dimensions [isseparator].
Definition: token.H:164
bool read(const char *buf, int32_t &val)
Same as readInt32.
Definition: int32.H:127
#define forAll(list, i)
Loop across all elements in list.
Definition: stdFoam.H:421
Useful combination of include files which define Sin, Sout and Serr and the use of IO streams general...
const double expon
Definition: doubleFloat.H:48
void write(vtk::formatter &fmt, const Type &val, const label n=1)
Component-wise write of a value (N times)
const dimensionedScalar e
Elementary charge.
Definition: createFields.H:11
virtual int precision() const override
Get precision of output field.
Definition: OSstream.C:334
A class for handling words, derived from Foam::string.
Definition: word.H:63
Istream & operator>>(Istream &, directionInfo &)
Space [isspace].
Definition: token.H:131
dimensionSet()
Default construct (dimensionless).
Definition: dimensionSet.C:64
punctuationToken
Standard punctuation tokens (a character)
Definition: token.H:126
const HashTable< dimensionedScalar > & unitSet()
Set of all dimensions.
End list [isseparator].
Definition: token.H:162
String literal.
Definition: keyType.H:82
const dimensionSet & dimensions() const noexcept
Return const reference to dimensions.
An Ostream is an abstract base class for all output systems (streams, files, token lists...
Definition: Ostream.H:56
Divide [isseparator].
Definition: token.H:147
constexpr auto end(C &c) -> decltype(c.end())
Return iterator to the end of the container c.
Definition: stdFoam.H:201
OBJstream os(runTime.globalPath()/outputName)
#define FUNCTION_NAME
bool isReadRequired() const noexcept
True if (MUST_READ | READ_MODIFIED) bits are set.
fileName relativeName(const bool caseTag=false) const
The dictionary name relative to the case.
Definition: dictionary.C:179
const word & name() const noexcept
Return const reference to name.
dimensionedScalar pow(const dimensionedScalar &ds, const dimensionedScalar &expt)
Ostream & operator<<(Ostream &, const boundaryPatch &p)
Write boundaryPatch as dictionary entries (without surrounding braces)
Definition: boundaryPatch.C:77
const PtrList< dimensionedScalar > & units() const noexcept
Return the units.
#define FatalIOErrorInFunction(ios)
Report an error message using Foam::FatalIOError.
Definition: error.H:627
dimensioned< scalar > dimensionedScalar
Dimensioned scalar obtained from generic dimensioned type.
const dimensionedScalar c
Speed of light in a vacuum.
bool isspace(char c) noexcept
Test for whitespace (C-locale)
Definition: char.H:69
Nothing to be read.
const entry * findEntry(const word &keyword, enum keyType::option matchOpt=keyType::REGEX) const
Find an entry (const access) with the given keyword.
Definition: dictionaryI.H:84
static constexpr int nDimensions
There are 7 base dimensions.
Definition: dimensionSet.H:120
const dimensionSets & writeUnitSet()
Set of units.
bool good() const noexcept
Is there a valid inverse of the selected unit.
gmvFile<< "tracers "<< particles.size()<< nl;for(const passiveParticle &p :particles){ gmvFile<< p.position().x()<< " ";}gmvFile<< nl;for(const passiveParticle &p :particles){ gmvFile<< p.position().y()<< " ";}gmvFile<< nl;for(const passiveParticle &p :particles){ gmvFile<< p.position().z()<< " ";}gmvFile<< nl;forAll(lagrangianScalarNames, i){ word name=lagrangianScalarNames[i];IOField< scalar > s(IOobject(name, runTime.timeName(), cloud::prefix, mesh, IOobject::MUST_READ, IOobject::NO_WRITE))
streamFormat format() const noexcept
Get the current stream format.
Multiply [isseparator].
Definition: token.H:146
Istream & read(Istream &is, scalar &multiplier, const dictionary &)
Read using provided units, return scaling in multiplier. Used only in initial parsing.
IOerror FatalIOError
Error stream (stdout output on all processes), with additional &#39;FOAM FATAL IO ERROR&#39; header text and ...
static constexpr const zero Zero
Global zero (0)
Definition: zero.H:127
Construction of unit sets.
Definition: dimensionSets.H:89
readOption
Enumeration defining read preferences.