openModeller  Version 1.5.0
garp.cpp
Go to the documentation of this file.
1 
34 #include "garp.hh"
35 #include "rules_base.hh"
36 #include "rules_range.hh"
37 #include "rules_negrange.hh"
38 #include "rules_logit.hh"
39 #include "ruleset.hh"
40 #include "bioclim_histogram.hh"
41 #include "regression.hh"
43 #include <openmodeller/Random.hh>
46 
47 #include <string>
48 using std::string;
49 
50 #define NUM_PARAM 4
51 
52 /****************************************************************/
53 /*** Algorithm parameter metadata *******************************/
54 
56 {
57  // Metadata of the first parameter.
58  {
59  "MaxGenerations", // Id.
60  "Max generations", // Name.
61  Integer, // Type.
62 
63  // Overview.
64  "Maximum number of iterations run by the Genetic Algorithm.",
65 
66  // Description.
67  "Maximum number of iterations (generations) run by the Genetic\
68  Algorithm.",
69 
70  1, // Not zero if the parameter has lower limit.
71  1, // Parameter's lower limit.
72  0, // Not zero if the parameter has upper limit.
73  0, // Parameter's upper limit.
74  "400" // Parameter's typical (default) value.
75  },
76 
77  {
78  "ConvergenceLimit", // Id.
79  "Convergence limit", // Name.
80  Real, // Type.
81 
82  // Overview.
83  "Defines the convergence value that makes the algorithm stop\
84  (before reaching MaxGenerations).",
85 
86  // Description.
87  "",
88 
89  1, // Not zero if the parameter has lower limit.
90  0.0, // Parameter's lower limit.
91  1, // Not zero if the parameter has upper limit.
92  1.0, // Parameter's upper limit.
93  "0.01" // Parameter's typical (default) value.
94  },
95 
96  {
97  "PopulationSize", // Id.
98  "Population size", // Name.
99  Integer, // Type.
100 
101  "Maximum number of rules to be kept in solution.", // Overview.
102  "Maximum number of rules to be kept in solution.", // Description
103 
104  1, // Not zero if the parameter has lower limit.
105  1, // Parameter's lower limit.
106  1, // Not zero if the parameter has upper limit.
107  500, // Parameter's upper limit.
108  "50" // Parameter's typical (default) value.
109  },
110 
111  {
112  "Resamples", // Id.
113  "Resamples", // Name.
114  Integer, // Type.
115 
116  // Overview.
117  "Number of points sampled (with replacement) used to test rules.",
118 
119  // Description.
120  "Number of points sampled (with replacement) used to test rules.",
121 
122  1, // Not zero if the parameter has lower limit.
123  1, // Parameter's lower limit.
124  1, // Not zero if the parameter has upper limit.
125  100000, // Parameter's upper limit.
126  "2500" // Parameter's typical (default) value.
127  }
128 };
129 
130 
131 /*****************************************************************/
132 /*** Algorithm's general metadata ********************************/
133 
135 
136  "GARP", // Id.
137  "GARP (single run) - new openModeller implementation", // Name.
138  "3.3", // Version.
139 
140  // Overview.
141  "GARP is a genetic algorithm that creates ecological niche \
142 models for species. The models describe environmental conditions \
143 under which the species should be able to maintain populations. \
144 For input, GARP uses a set of point localities where the species \
145 is known to occur and a set of geographic layers representing \
146 the environmental parameters that might limit the species' \
147 capabilities to survive. Please refer to algorithm description for \
148 more information about the differences between this new GARP \
149 implementation and the Desktop GARP implementation.",
150 
151  // Description.
152  "GARP is a genetic algorithm that creates ecological niche \
153 models for species. The models describe environmental conditions \
154 under which the species should be able to maintain populations. \
155 For input, GARP uses a set of point localities where the species \
156 is known to occur and a set of geographic layers representing \
157 the environmental parameters that might limit the species' \
158 capabilities to survive. This implementation is a complete rewrite \
159 of the DesktopGarp code, and it also contains the following \
160 changes/improvements: (1) Gene values changed from integers (between 1 \
161 and 253) to floating point numbers (between -1.0 and 1.0). This avoids \
162 precision problems in environment values during projection (for example, \
163 if an environment variable has the value 2.56 in some raster cell and \
164 2.76 in another one, DesktopGarp rounds them off to 3). (2) Atomic rules \
165 were removed since they seem to have little significance compared to the \
166 other rules. (3) Heuristic operator parameters (percentage of mutation \
167 and crossover per iteration) are now static since they used to converge \
168 to fixed values during the very first iterations. This implementation \
169 simply keeps the converged values. (4) A bug was fixed in the procedure \
170 responsible for ordering the rules. When a rule was only replacing \
171 another, it was being included in the wrong position.",
172 
173  // Author
174  "Stockwell, D. R. B., modified by Ricardo Scachetti Pereira",
175 
176  // Bibliography.
177  "Stockwell, D. R. B. 1999. Genetic algorithms II. \
178 Pages 123-144 in A. H. Fielding, editor. \
179 Machine learning methods for ecological applications. \
180 Kluwer Academic Publishers, Boston.\
181 \n\
182 Stockwell, D. R. B., and D. P. Peters. 1999. \
183 The GARP modelling system: Problems and solutions to automated \
184 spatial prediction. International Journal of Geographic \
185 Information Systems 13:143-158.\
186 \n\
187 Stockwell, D. R. B., and I. R. Noble. 1992. \
188 Induction of sets of rules from animal distribution data: \
189 A robust and informative method of analysis. Mathematics and \
190 Computers in Simulation 33:385-390.",
191 
192  "Ricardo Scachetti Pereira", // Code author.
193  "ricardo [at] tdwg . org", // Code author's contact.
194 
195  0, // Does not accept categorical data.
196  1, // Does not need (pseudo)absence points.
197 
198  NUM_PARAM, // Algorithm's parameters.
199  parameters
200 };
201 
202 
203 
204 /****************************************************************/
205 /****************** Algorithm's factory function ****************/
206 #ifndef DONT_EXPORT_GARP_FACTORY
207 OM_ALG_DLL_EXPORT
208 AlgorithmImpl *
210 {
211  return new Garp();
212 }
213 
214 OM_ALG_DLL_EXPORT
215 AlgMetadata const *
217 {
218  return &metadata;
219 }
220 #endif
221 
222 
223 /****************************************************************/
224 /****************** Garp class **********************************/
225 
227 
228 /****************************************************************/
229 /****************** Garp constructor ****************************/
230 
232  : AlgorithmImpl(& metadata)
233 {
234  // fill in default values for parameters
235  _popsize = 0;
236  _resamples = 0;
237  _max_gen = 0;
238  _conv_limit = 0.0;
239 
240  _mortality = 0.9;
241  _gapsize = 0.1;
242  _acc_limit = 0.0;
243 
244  _crossover_rate = 0.1;
245  _mutation_rate = 0.6;
246 
247  _significance = 2.70;
248 
249  _maxProgress = 0.0;
250 
251  // reset private attributes
252  _fittest = _offspring = NULL;
253 
254  _gen = 0;
255  _convergence = 1.0;
256  _improvements = 0;
257 
258  int i;
259  for (i = 0; i < 5; i++)
260  {
261  _curr_heur_count[i] = 0;
262  _prev_heur_count[i] = 0;
263  }
264 
265  _normalizerPtr = new ScaleNormalizer( -1.0, +1.0, true );
266 }
267 
268 /****************************************************************/
269 /****************** Garp destructor *****************************/
271 {
272  // debug
273  if ( _fittest )
274  {
275  //Log::instance()->debug( "Resulting rules:\n");
276  //_fittest->log();
277  }
278 
279  if (_offspring)
280  delete _offspring;
281 
282  if (_fittest)
283  delete _fittest;
284 }
285 
286 
287 // ****************************************************************
288 // ************* initialize ***************************************
289 
291 {
292  if (!getParameter("MaxGenerations", &_max_gen)) {
293  Log::instance()->error("Parameter MaxGenerations not set properly.");
294  return 0;
295  }
296 
297  if (!getParameter("ConvergenceLimit", &_conv_limit)) {
298  Log::instance()->error("Parameter ConvergenceLimit not set properly.");
299  return 0;
300  }
301 
302  if (!getParameter("PopulationSize", &_popsize)) {
303  Log::instance()->error("Parameter PopulationSize not set properly.");
304  return 0;
305  }
306 
307  if (!getParameter("Resamples", &_resamples)) {
308  Log::instance()->error("Parameter Resamples not set properly.");
309  return 0;
310  }
311 
312  //Log::instance()->debug("MaxGenerations set to: %d\n", _max_gen);
313  //Log::instance()->debug("ConvergenceLimit set to: %.4f\n", _conv_limit);
314  //Log::instance()->debug("PopulationSize set to: %d\n", _popsize);
315  //Log::instance()->debug("Resamples set to: %d\n", _resamples);
316 
317  _offspring = new GarpRuleSet(2 * _popsize);
318  _fittest = new GarpRuleSet(2 * _popsize);
319 
323 
325 
326  return 1;
327 }
328 
329 /****************************************************************/
330 /****************************************************************/
331 void Garp::cacheSamples(const SamplerPtr& sampler,
332  OccurrencesPtr& cachedOccs,
333  int resamples)
334 {
335  OccurrencesImpl * occs = new OccurrencesImpl( "",
337  occs->reserve( resamples );
338  cachedOccs = ReferenceCountedPointer<OccurrencesImpl>( occs );
339 
340  for (int i = 0; i < resamples; ++i)
341  {
342  OccurrencePtr oc = sampler->getOneSample();
343  cachedOccs->insert(oc);
344  }
345 }
346 
347 
348 /****************************************************************/
349 /****************** iterate *************************************/
350 
352 {
353  double perfBest, perfWorst, perfAvg;
354 
355  if (done())
356  return 1;
357 
358  _gen++;
359 
363 
364  //_fittest->gatherRuleSetStats(_gen);
365  //_offspring->gatherRuleSetStats(-_gen);
366 
367 
369  &perfBest, &perfWorst, &perfAvg);
370  /*
371  // log info about current iteration
372  Log::instance()->debug( "%4d] ", _gen );
373  Log::instance()->debug( "[%2d] conv=%+7.4f | perfs=%+8.3f, %+8.3f, %+8.3f\n", _fittest->numRules(),
374  _convergence, perfBest, perfWorst, perfAvg );
375  */
376 
377  if (done())
378  {
379  // finalize processing of model
380  // by filtering out rules that have low performance
382  return 1;
383  }
384 
385  // algorithm is not done yet
386  // select fittest individuals
388 
389  // create new offspring
394 
395  return 1;
396 }
397 
398 /****************************************************************/
399 /****************** getProgress *********************************/
400 
401 float Garp::getProgress() const
402 {
403  if (done())
404  { return 1.0; }
405  else
406  {
407  float byIterations = ( _gen / (float) _max_gen );
408  float byConvergence = (float)( _conv_limit / _convergence );
409  float progress = (byIterations > byConvergence) ? byIterations : byConvergence;
410  if (progress > _maxProgress)
411  { _maxProgress = progress; }
412  return _maxProgress;
413  }
414 }
415 
416 /****************************************************************/
417 /****************** done ****************************************/
418 
419 int Garp::done() const
420 {
421  return ( (_gen >= _max_gen) || (_convergence < _conv_limit) );
422 }
423 
424 /****************************************************************/
425 /****************** getValue ************************************/
426 
427 Scalar Garp::getValue( const Sample& x ) const
428 {
429  return _fittest->getValue(x);
430 }
431 
432 /****************************************************************/
433 /****************** getConvergence ******************************/
434 
435 int Garp::getConvergence( Scalar * const val ) const
436 {
437  *val = _convergence;
438  return 0;
439 }
440 
441 /****************************************************************/
442 /****************** configuration *******************************/
443 void
445 {
446  if ( !done() )
447  return;
448 
449  ConfigurationPtr model_config ( new ConfigurationImpl("Garp") );
450  config->addSubsection( model_config );
451 
452  model_config->addNameValue( "Generations", _gen );
453  model_config->addNameValue( "AccuracyLimit", _acc_limit );
454  model_config->addNameValue( "Mortality", _mortality );
455  model_config->addNameValue( "Significance", _significance );
456  model_config->addNameValue( "FinalCrossoverRate", _crossover_rate );
457  model_config->addNameValue( "FinalMutationRate", _mutation_rate );
458  model_config->addNameValue( "FinalGapSize", _gapsize );
459 
460  if ( _fittest ) {
461 
462  int nrules = _fittest->numRules();
463 
464  ConfigurationPtr rules_config( new ConfigurationImpl("FittestRules") );
465  model_config->addSubsection( rules_config );
466 
467  rules_config->addNameValue( "Count", nrules );
468 
469  for( int i=0; i<nrules; i++ ) {
470 
471  GarpRule *rule = _fittest->get(i);
472  char type[16];
473  sprintf(type, "%c", rule->type() );
474 
475  ConfigurationPtr rule_config( new ConfigurationImpl("Rule") );
476  rules_config->addSubsection( rule_config );
477 
478  rule_config->addNameValue( "Type", type );
479  rule_config->addNameValue( "Prediction", rule->getPrediction() );
480  rule_config->addNameValue( "Chromosome1", rule->getChrom1());
481  rule_config->addNameValue( "Chromosome2", rule->getChrom2());
482  rule_config->addNameValue( "Performance", rule->getPerformanceArray(), 10 );
483  }
484  }
485 
486 }
487 
488 void
490 {
491  ConstConfigurationPtr model_config = config->getSubsection( "Garp", false );
492 
493  if (!model_config) {
494 
495  return;
496  }
497 
498  _gen = model_config->getAttributeAsInt( "Generations", 0 );
499  _acc_limit = model_config->getAttributeAsDouble( "AccuracyLimit", 0.0 );
500  _mortality = model_config->getAttributeAsDouble( "Mortality", 0.0 );
501  _significance = model_config->getAttributeAsDouble( "Significance", 0.0 );
502  _crossover_rate = model_config->getAttributeAsDouble( "FinalCrossoverRate", 0.0 );
503  _mutation_rate = model_config->getAttributeAsDouble( "FinalMutationRate", 0.0 );
504  _gapsize = model_config->getAttributeAsDouble( "FinalGapSize", 0.0 );
505 
506  // Need to read at least this parameter
507  if ( ! getParameter("PopulationSize", &_popsize) ) {
508 
509  Log::instance()->error("Could not read parameter PopulationSize from serialized model.");
510  return;
511  }
512 
513  _offspring = new GarpRuleSet( 2 * _popsize );
514  _fittest = new GarpRuleSet( 2 * _popsize );
515 
516  /*
517  * This code is commented out for now. Need to figure out how
518  * to get the algorithm primed with its custom sampler after
519  * it's deserialized.
520  */
521  //_bioclimHistogram.initialize( _cachedOccs );
522 
523  ConstConfigurationPtr rules_config = model_config->getSubsection( "FittestRules" );
524 
525  //next line commented out since the var is not used after being declared
526  //int nrules = rules_config->getAttributeAsInt( "Count", 0 );
527 
528  Configuration::subsection_list::const_iterator ss;
529  for( ss = rules_config->getAllSubsections().begin();
530  ss != rules_config->getAllSubsections().end();
531  ++ss ) {
532 
533  const ConstConfigurationPtr& c(*ss);
534  GarpRule * rule = NULL;
535 
536  string type = c->getAttribute( "Type" );
537 
538  Scalar pred = c->getAttributeAsDouble( "Prediction", 0.0 );
539 
540  Sample p_chrom1 = c->getAttributeAsSample( "Chromosome1" );
541 
542  Sample p_chrom2 = c->getAttributeAsSample( "Chromosome2" );
543 
544  Scalar *p_perf;
545  int n_perf;
546  c->getAttributeAsDoubleArray( "Performance", &p_perf, &n_perf );
547 
548  switch( type[0] ) {
549  case 'd':
550  rule = new RangeRule( pred, p_chrom1.size(), p_chrom1, p_chrom2, p_perf );
551  break;
552 
553  case 'r':
554  rule = new LogitRule( pred, p_chrom1.size(), p_chrom1, p_chrom2, p_perf );
555  break;
556 
557  case '!':
558  rule = new NegatedRangeRule( pred, p_chrom1.size(), p_chrom1, p_chrom2, p_perf );
559  break;
560 
561  }
562 
563  delete [] p_perf;
564 
565  _fittest->add(rule);
566 
567  }
568 
569 }
570 
571 /****************************************************************/
572 /***************** GARP Algorithm private methods ***************/
573 /****************************************************************/
574 
575 
576 /****************************************************************/
577 /***************** keepFittest **********************************/
578 
579 void Garp::keepFittest(GarpRuleSet * source, GarpRuleSet * target,
580  PerfIndex perfIndex)
581 {
582  int i, n, converged, similarIndex;
583  GarpRule * candidateRule, * similarRule;
584 
585  converged = 0;
586 
587  // step through source rule-set trying to insert rules into target
588  n = source->numRules();
589  for (i = 0; i < n; i++)
590  {
591  candidateRule = source->get(i);
592  similarIndex = target->findSimilar(candidateRule);
593  //if ((similarIndex < -1) || (similarIndex >= target->numRules()))
594  // Log::instance()->error("Index out of bounds (#8). Limits are (-1, %d), index is %d\n", target->numRules(), similarIndex);
595 
596  if (similarIndex >= 0)
597  {
598  converged++;
599 
600  // similar rule found replace it if better
601  similarRule = target->get(similarIndex);
602  if (candidateRule->getPerformance(perfIndex) >
603  similarRule->getPerformance(perfIndex))
604  {
605  // first create a clone of the rule, then replace the old one
606  candidateRule = candidateRule->clone();
607  //target->replace(similarIndex, candidateRule);
608  target->remove(similarIndex);
609  target->insert(perfIndex, candidateRule);
610  }
611  }
612  else
613  {
614  // no similar rule found: try to insert it into existing set
615  // first create a clone of the rule, then insert it into
616  // the target rs
617  candidateRule = candidateRule->clone();
618  target->insert(perfIndex, candidateRule);
619  }
620  }
621 
622  // update convergence value
623  _improvements += converged;
624  if (_improvements)
625  { _convergence = ( _convergence + ( (double) converged ) / _improvements ) / 2.0; }
626  else
627  { _convergence = 1.0; }
628 
629  /*
630  printf("Convergence: %+7.4f at generation %5d (%3d; %6d; %+7.4f) similar=%d\n", _convergence,
631  _gen, converged, _improvements, ( (double) converged ) / _improvements, similarIndex);
632  */
633 
634  // TODO: update heuristic rates based on who entered the target rule-set
635 }
636 
637 /****************************************************************/
638 /***************** evaluate *************************************/
639 
641 {
642  int i, n;
643 
644  n = ruleset->numRules();
645  for (i = 0; i < n; i++)
646  {
647  ruleset->get(i)->evaluate(_cachedOccs);
648  }
649 
650  return;
651 }
652 
653 /****************************************************************/
654 /***************** colonize *************************************/
655 
656 void Garp::colonize(GarpRuleSet * ruleset, int numRules)
657 {
658  int i, p, dim;
659  GarpRule * rule = 0;
660  Random rnd;
661 
662  dim = _samp->numIndependent();
663 
664  for (i = ruleset->numRules(); i < numRules; i++)
665  {
666  // pick the next rule to be generated
667  p = rnd(3);
668 
669  switch (p)
670  {
671  case 0:
672  rule = new RangeRule(dim);
673  rule->setPrediction(1.0);
674  ((RangeRule *) rule)->initialize(_bioclimHistogram);
675  break;
676 
677  case 1:
678  rule = new NegatedRangeRule(dim);
679  rule->setPrediction(0.0);
680  ((NegatedRangeRule *) rule)->initialize(_bioclimHistogram);
681  break;
682 
683  case 2:
684  rule = new LogitRule(dim);
685  Scalar pred = (rnd.get(0.0, 1.0) > 0.5) ? 1.0 : 0.0;
686  rule->setPrediction(pred);
687  ((LogitRule *) rule)->initialize(_regression);
688  break;
689  }
690 
691  //Log::instance()->debug("[%c] ", rule->type());
692  ruleset->add(rule);
693  }
694 }
695 
696 /****************************************************************/
697 /***************** select ***************************************/
698 
699 void Garp::select(GarpRuleSet * source, GarpRuleSet * target,
700  double gapsize)
701 {
702  Random rnd;
703  int * sample;
704  int i, j, k, n, temp;
705  double perfBest, perfWorst, perfAvg;
706  double sum, ptr, factor, expected, rulePerf, size;
707  perfBest = perfWorst = perfAvg = 0.0;
708  GarpRule * pRuleBeingInserted;
709 
710  source->performanceSummary(defaultPerfIndex, &perfBest, &perfWorst, &perfAvg);
711 
712  //Log::instance()->debug( "Performances: %f %f %f.\n", perfBest, perfWorst, perfAvg );
713 
714  // normalizer for proportional selection probabilities
715  if (perfAvg - perfWorst)
716  factor = 1.0 / (perfAvg - perfWorst);
717  else
718  factor = 1.0;
719 
720  // Stochastic universal sampling algorithm by James E. Baker
721  k = 0;
722  n = source->numRules();
723  sample = new int[_popsize + 1];
724  for (i = 0; i < _popsize; i++)
725  sample[i] = i % n;
726 
727  ptr = rnd.get(1.0);
728  sum = 0.0;
729  for (i = 0; i < n; i++) {
730  rulePerf = source->get(i)->getPerformance(defaultPerfIndex);
731  expected = (rulePerf - perfWorst) * factor;
732  for (sum += expected; (sum > ptr) && (k <= _popsize); ptr++) {
733  if ((k < 0) || (k > _popsize)) {
734  Log::instance()->error("Index out of bounds (#6). Limits are (0, %d), index is %d\n", _popsize, k);
735  throw AlgorithmException("Index out of bounds");
736  }
737  sample[k++] = i;
738  }
739  }
740 
741  /*
742  FILE * f = stdout;
743  fprintf(f, "Generation: %4d\n", _gen);
744  for (i = 0; i < _popsize; i++)
745  fprintf(f, "%+9.4f %3d\n", source->get(sample[i])->getPerformance(defaultPerfIndex), sample[i]);
746  */
747 
748  // randomly shuffle pointers to new structures
749  for (i = 0; i < _popsize; i++)
750  {
751  j = rnd.get (i , _popsize - 1);
752  temp = sample[j];
753  sample[j] = sample[i];
754  sample[i] = temp;
755  }
756 
757  // finally, form the new population
758  // Gapsize giving the proportion contribution
759  // to the new population from the objBest archive set
760  target->clear();
761  size = ((double) _popsize) * gapsize;
762 
763  for (i = 0; i < size; i++)
764  {
765  pRuleBeingInserted = source->get(sample[i])->clone();
766  pRuleBeingInserted->forceEvaluation();
767  if (!target->add(pRuleBeingInserted)) {
768  // target rs is full
769  std::string error = "Garp::reproduce(): Target rule set is full";
770  Log::instance()->error(error.c_str());
771  throw AlgorithmException(error.c_str());
772  }
773  }
774  delete[] sample;
775 }
776 
777 /****************************************************************/
778 /***************** mutate ***************************************/
779 
780 void Garp::mutate(GarpRuleSet * ruleset)
781 {
782  int i, n;
783 
784  double temperature = 2.0 / (double) _gen;
785  n = ruleset->numRules();
786  for (i = 0; i < n; i++)
787  ruleset->get(i)->mutate(temperature);
788 }
789 
790 /****************************************************************/
791 /***************** crossover ************************************/
792 
794 {
795  Random rnd;
796  int nrules, genes, xcount, last, mom, dad, xpt1, xpt2;
797 
798  genes = _samp->numIndependent();
799  nrules = ruleset->numRules();
800  last = (int) (_crossover_rate * (double) nrules);
801 
802  for (xcount = 0; xcount < last; xcount += 2)
803  {
804  mom = rnd.get(nrules);
805  dad = rnd.get(nrules);
806  if (dad == mom)
807  dad = (dad + 1) % nrules;
808 
809  xpt1 = rnd.get(genes);
810  xpt2 = rnd.get(genes);
811 
812  ruleset->get(mom)->crossover(ruleset->get(dad), xpt1, xpt2);
813  }
814 }
815 
816 
817 // *****************
819 {
820  if (_offspring)
821  delete _offspring;
822  _offspring = NULL;
823 }
824 
825 /****************************************************************/
826 /**** This is a debug function that checks if a rule set is
827  **** correctly sorted. If not it dumps the performance values
828  **** for that rule set.
829  **** It was used to debug Garp::keepFittest() (replace call bug)
830  **** TODO: move this code to the test harness when we have one */
831 
832 void printPerfs(char * msg, int index, GarpRuleSet * ruleset)
833 {
834  for (int i = 1; i < ruleset->numRules(); i++)
835  {
836  if (ruleset->get(i - 1)->getPerformance((PerfIndex)8) <
837  ruleset->get(i)->getPerformance((PerfIndex)8))
838  {
839  printf("\nError: rule set out of sort order (Index: %d)\n", index);
840  for (int i = 0; i < ruleset->numRules(); i++)
841  {
842  printf("[%2d]=%6.3f ", i, ruleset->get(i)->getPerformance((PerfIndex)8) );
843  if ((i + 1) % 5 == 0)
844  printf("\n");
845  }
846  }
847  }
848 }
849 
virtual void crossover(GarpRule *rule, int xpt1, int xpt2)
Definition: rules_base.cpp:212
GarpRuleSet * _fittest
Definition: garp.hh:210
PerfIndex
Definition: rules_base.hh:42
double get(double min, double max)
Definition: Random.cpp:54
virtual void _setConfiguration(const ConstConfigurationPtr &)
Definition: garp.cpp:489
void evaluate(GarpRuleSet *ruleset)
Definition: garp.cpp:640
double _convergence
Definition: garp.hh:222
void crossover(GarpRuleSet *ruleset)
Definition: garp.cpp:793
#define NUM_PARAM
Definition: garp.cpp:50
virtual void _getConfiguration(ConfigurationPtr &) const
Definition: garp.cpp:444
int findSimilar(GarpRule *rule)
Definition: ruleset.cpp:267
OccurrencesPtr _cachedOccs
Definition: garp.hh:220
void keepFittest(GarpRuleSet *source, GarpRuleSet *target, PerfIndex perfIndex)
Definition: garp.cpp:579
double Scalar
Type of map values.
Definition: om_defs.hh:39
void cacheSamples(const SamplerPtr &, OccurrencesPtr &, int resamples)
Definition: garp.cpp:331
void reserve(int estimate)
Definition: Occurrences.hh:123
int _resamples
Definition: garp.hh:188
static Log * instance()
Returns the instance pointer, creating the object on the first call.
Definition: Log.cpp:45
OM_ALG_DLL_EXPORT AlgorithmImpl * algorithmFactory()
Definition: garp.cpp:209
Scalar getValue(const Sample &x) const
Definition: ruleset.cpp:285
double _conv_limit
Definition: garp.hh:194
int _popsize
Definition: garp.hh:185
void clear()
Definition: ruleset.cpp:91
float _maxProgress
Definition: garp.hh:230
Garp()
Definition: garp.cpp:231
void filter(PerfIndex perfIndex, double threshold)
Definition: ruleset.cpp:122
int iterate()
Definition: garp.cpp:351
GarpRuleSet * _offspring
Definition: garp.hh:213
void error(const char *format,...)
'Error' level.
Definition: Log.cpp:290
void performanceSummary(PerfIndex perfIndex, double *best, double *worst, double *average)
Definition: ruleset.cpp:304
BioclimHistogram _bioclimHistogram
Definition: garp.hh:216
void printPerfs(char *msg, int index, GarpRuleSet *ruleset)
Definition: garp.cpp:832
Definition: Random.hh:44
void forceEvaluation()
Definition: rules_base.hh:154
int getParameter(std::string const &name, std::string *value)
double evaluate(const OccurrencesPtr &occs)
Definition: rules_base.cpp:297
void setPrediction(double pred)
Definition: rules_base.hh:156
const Sample & getChrom1() const
Definition: rules_base.hh:159
int _curr_heur_count[5]
Definition: garp.hh:225
int _gen
Definition: garp.hh:228
int add(GarpRule *rule)
Definition: ruleset.cpp:244
static char const * getDefaultCS()
int _improvements
Definition: garp.hh:223
GarpRule * get(int index)
Definition: ruleset.cpp:201
Definition: garp.hh:57
double _acc_limit
Definition: garp.hh:191
double _significance
Definition: garp.hh:200
void calculateParameters(const OccurrencesPtr &occs)
Definition: regression.cpp:50
const PerfIndex defaultPerfIndex
Definition: garp.cpp:226
Scalar getValue(const Sample &x) const
Definition: garp.cpp:427
virtual GarpRule * clone() const
Definition: rules_base.cpp:134
double _gapsize
Definition: garp.hh:203
int initialize()
Definition: garp.cpp:290
std::size_t size() const
Definition: Sample.hh:70
float getProgress() const
Definition: garp.cpp:401
double _crossover_rate
Definition: garp.hh:201
virtual void mutate(double temperature)
Definition: rules_base.cpp:250
int insert(PerfIndex perfIndex, GarpRule *rule)
Definition: ruleset.cpp:165
Scalar getPrediction() const
Definition: rules_base.hh:158
int remove(int index)
Definition: ruleset.cpp:225
virtual char type() const
Definition: Rule.h:122
Definition: Rule.h:116
SamplerPtr _samp
Definition: Algorithm.hh:245
const double * getPerformanceArray() const
Definition: rules_base.hh:161
Regression _regression
Definition: garp.hh:218
AlgParamMetadata parameters[NUM_PARAM]
Definition: garp.cpp:55
double getPerformance(PerfIndex perfIndex) const
Definition: rules_base.cpp:177
int numRules()
Definition: ruleset.cpp:83
void deleteTempDataMembers()
Definition: garp.cpp:818
int getConvergence(Scalar *const val) const
Definition: garp.cpp:435
const Sample & getChrom2() const
Definition: rules_base.hh:160
virtual ~Garp()
Definition: garp.cpp:270
double _mutation_rate
Definition: garp.hh:202
void mutate(GarpRuleSet *ruleset)
Definition: garp.cpp:780
int _prev_heur_count[5]
Definition: garp.hh:226
void colonize(GarpRuleSet *ruleset, int numRules)
Definition: garp.cpp:656
int done() const
Definition: garp.cpp:419
OM_ALG_DLL_EXPORT AlgMetadata const * algorithmMetadata()
Definition: garp.cpp:216
void select(GarpRuleSet *source, GarpRuleSet *target, double gapsize)
Definition: garp.cpp:699
void initialize(const OccurrencesPtr &occs)
void trim(int rules)
Definition: ruleset.cpp:99
Normalizer * _normalizerPtr
Definition: Algorithm.hh:247
double _mortality
Definition: garp.hh:197
int _max_gen
Definition: garp.hh:182
AlgMetadata metadata
Definition: garp.cpp:134
Definition: Sample.hh:25
static char error[256]
Definition: FileParser.cpp:42