FFSM++ 1.1.0
French Forest Sector Model ++
Loading...
Searching...
No Matches
ModelCoreSpatial.cpp
Go to the documentation of this file.
1/***************************************************************************
2 * Copyright (C) 2015 by Laboratoire d'Economie Forestière *
3 * http://ffsm-project.org *
4 * *
5 * This program is free software; you can redistribute it and/or modify *
6 * it under the terms of the GNU General Public License as published by *
7 * the Free Software Foundation; either version 3 of the License, or *
8 * (at your option) any later version, given the compliance with the *
9 * exceptions listed in the file COPYING that is distribued together *
10 * with this file. *
11 * *
12 * This program is distributed in the hope that it will be useful, *
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15 * GNU General Public License for more details. *
16 * *
17 * You should have received a copy of the GNU General Public License *
18 * along with this program; if not, write to the *
19 * Free Software Foundation, Inc., *
20 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
21 ***************************************************************************/
22#include <cmath>
23#include <algorithm>
24#include <tr1/array>
25
26
27#include "IpIpoptApplication.hpp"
28#include "IpSolveStatistics.hpp"
29
30#include "ModelCoreSpatial.h"
31#include "ModelData.h"
32#include "ThreadManager.h"
33#include "Opt.h"
34#include "Scheduler.h"
35#include "Gis.h"
36#include "Carbon.h"
37
38
40 MTHREAD = MTHREAD_h;
41
42}
43
47
48void
50 cacheSettings(); ///< cashe things like first year, second year, dClasses...
51 cacheDynamicSettings(); // cache settings that may change every year
52 initializePixelVolumes(); ///< compute px volumes vol for 2005 (including exogenous loaded volumes)
53 assignSpMultiplierPropToVols(); // assign the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
54 initMarketModule(); ///< inside it uses first year, second year
56 MTHREAD->DO->print(true);
57 MTHREAD->SCD->advanceYear(); ///< 2005->2006
58 int thisYear = MTHREAD->SCD->getYear(); // for debugging
59 msgOut(MSG_INFO, "### 2013 "+i2s(thisYear)+" year started..");
60 resetPixelValues(); ///< swap volumes->lagged_volumes and reset the other pixel vectors
61 cachePixelExogenousData(); ///< compute pixel tp, meta and mort
62 computeInventary(); ///< in=f(vol_t-1)
63 //printDebugInitRegionalValues();
64 computeCumulativeData(); ///< compute cumTp_exp, vHa_exp, vHa
65 initializePixelArea(); ///< compute px->area for each ft and dc (including exogenous loaded areas)
68 updateMapAreas(); ///< update the forArea_{ft} layer on each pixel as old value-hArea+regArea
69 updateOtherMapData(); ///< update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
70 sumRegionalForData(); ///< only for printing stats as forest data is never used at regional level
72 //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
73
74 MTHREAD->DO->print();
75}
76
77void
79 int thisYear = MTHREAD->SCD->getYear(); // for debugging
80 cacheDynamicSettings(); // cache settings that may change every year
81 resetPixelValues(); // swap volumes->lagged_volumes and reset the other pixel vectors
82 cachePixelExogenousData(); // compute pixel tp, meta and mort
83 computeInventary(); // in=f(vol_t-1)
84 runMarketModule(); // RUN THE MARKET OPTIMISATION HERE
85 computeCumulativeData(); // compute cumTp_exp, vHa_exp
89 //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
91 updateOtherMapData(); // update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
92 sumRegionalForData(); // only for printing stats as forest data is never used at regional level
94 computeEconomicBalances(); // policy costs and welfare analysis
95 MTHREAD->DO->print(false);
96}
97
98void
100 msgOut(MSG_INFO, "Starting market module (init stage)..");
101
102 for(uint i=0;i<regIds2.size();i++){
103 int r2 = regIds2[i];
104 //RPAR('pl',i,p_tr,t-1) = sum(p_pr, a(p_pr,p_tr)*RPAR('pl',i,p_pr,t-1))+m(i,p_tr);
105 for(uint sp=0;sp<secProducts.size();sp++){
106 double value = 0;
107 for (uint pp=0;pp<priProducts.size();pp++){
108 value += gpd("pl",r2,priProducts[pp],secondYear)*
109 gpd("a",r2,priProducts[pp],secondYear,secProducts[sp]);
110 }
111 value += (gpd("m",r2,secProducts[sp],secondYear)* gpd("pol_trSub",r2,secProducts[sp],secondYear));
112 spd(value,"pl",r2,secProducts[sp],secondYear,true);
113 }
114 // RPAR('dl',i,p_pr,t-1) = sum(p_tr, a(p_pr,p_tr)*RPAR('sl',i,p_tr,t-1));
115 for (uint pp=0;pp<priProducts.size();pp++){
116 double value=0;
117 for(uint sp=0;sp<secProducts.size();sp++){
118 value += gpd("sl",r2,secProducts[sp],secondYear)*
119 gpd("a",r2,priProducts[pp],secondYear, secProducts[sp]);
120 }
121 spd(value,"dl",r2,priProducts[pp],secondYear,true);
122 double stvalue = gpd("sl",r2,priProducts[pp],secondYear)
123 + gpd("sa",r2,priProducts[pp],secondYear);
124 spd(stvalue,"st",r2,priProducts[pp],secondYear,true);
125 double supply_fromForestShare = gpd("supply_fromForestShare",r2,priProducts[pp],secondYear);
126 spd(stvalue*supply_fromForestShare,"st_fromHarv",r2,priProducts[pp],secondYear,true); // needed for the biological module in year starty+1
127 }
128 // RPAR('st',i,prd,t-1) = RPAR('sl',i,prd,t-1)+RPAR('sa',i,prd,t-1);
129 // RPAR('dt',i,prd,t-1) = RPAR('dl',i,prd,t-1)+RPAR('da',i,prd,t-1);
130 for (uint ap=0;ap<allProducts.size();ap++){
131 //double debug = gpd("dl",r2,allProducts[ap],secondYear);
132 double dtvalue = gpd("dl",r2,allProducts[ap],secondYear)
133 + gpd("da",r2,allProducts[ap],secondYear);
134
135 spd(dtvalue,"dt",r2,allProducts[ap],secondYear,true);
136 }
137
138 // q1(i,p_tr) = 1/(1+((RPAR('dl',i,p_tr,t-1)/RPAR('da',i,p_tr,t-1))**(1/psi(i,p_tr)))*(RPAR('pl',i,p_tr,t-1)/PT(p_tr,t-1)));
139 // p1(i,p_tr) = 1-q1(i,p_tr);
140 // RPAR('dc',i,p_tr,t-1) = (q1(i,p_tr)*RPAR('da',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr))+ p1(i,p_tr)*RPAR('dl',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr)))**(psi(i,p_tr)/(psi(i,p_tr)-1));
141 // RPAR('pc',i,p_tr,t-1) = (RPAR('da',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*PT(p_tr,t-1)+(RPAR('dl',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*RPAR('pl',i,p_tr,t-1);
142 // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
143 // RPAR('pw',i,p_tr,t-1) = (RPAR('dl',i,p_tr,t-1)*RPAR('pl',i,p_tr,t-1)+RPAR('da',i,p_tr,t-1)*PT(p_tr,t-1))/RPAR('dt',i,p_tr,t-1) ; //changed 20120419
144 // K(i,p_tr,t-1) = k1(i,p_tr)*RPAR('sl',i,p_tr,t-1);
145 for(uint sp=0;sp<secProducts.size();sp++){
146 double psi = gpd("psi",r2,secProducts[sp],secondYear);
147 double dl = gpd("dl",r2,secProducts[sp],secondYear);
148 double da = gpd("da",r2,secProducts[sp],secondYear);
149 double pl = gpd("pl",r2,secProducts[sp],secondYear);
150 double sa = gpd("sa",r2,secProducts[sp],secondYear);
151 double sl = gpd("sl",r2,secProducts[sp],secondYear);
152 double k1 = gpd("k1",r2,secProducts[sp],secondYear);
153 double pWo = gpd("pl",WL2,secProducts[sp],secondYear); // World price (local price for region 99999)
154
155 double stvalue = sl+sa;
156 double q1 = 1/ ( 1+pow(dl/da,1/psi)*(pl/pWo) );
157 double p1 = 1-q1;
158 double dc = pow(
159 q1*pow(da,(psi-1)/psi) + p1*pow(dl,(psi-1)/psi)
160 ,
161 psi/(psi-1)
162 );
163 double pc = (da/dc)*pWo
164 +(dl/dc)*pl;
165 double pw = (dl*pl+da*pWo)/(dl+da);
166 double k = k1*sl;
167
168 spd(stvalue,"st",r2,secProducts[sp],secondYear,true);
169 spd(q1,"q1",r2,secProducts[sp],firstYear,true);
170 //spd(p1,"p1",r2,secProducts[sp],firstYear,true);
171 spd(dc,"dc",r2,secProducts[sp],secondYear,true);
172 spd(pc,"pc",r2,secProducts[sp],secondYear,true);
173 spd(pw,"pw",r2,secProducts[sp],secondYear,true);
174 spd(k,"k",r2,secProducts[sp],secondYear,true);
175 }
176
177 // t1(i,p_pr) = 1/(1+((RPAR('sl',i,p_pr,t-1)/RPAR('sa',i,p_pr,t-1))**(1/eta(i,p_pr)))*(RPAR('pl',i,p_pr,t-1)/PT(p_pr,t-1)));
178 // r1(i,p_pr) = 1-t1(i,p_pr);
179 // RPAR('sc',i,p_pr,t-1) = (t1(i,p_pr)*RPAR('sa',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr))+ r1(i,p_pr)*RPAR('sl',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr)))**(eta(i,p_pr)/(eta(i,p_pr)-1))
180 // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
181 // RPAR('pw',i,p_pr,t-1) = (RPAR('sl',i,p_pr,t-1)*RPAR('pl',i,p_pr,t-1)+RPAR('sa',i,p_pr,t-1)*PT(p_pr,t-1))/RPAR('st',i,p_pr,t-1) ; //changed 20120419
182 for(uint pp=0;pp<priProducts.size();pp++){
183
184 double sl = gpd("sl",r2,priProducts[pp],secondYear);
185 double sa = gpd("sa",r2,priProducts[pp],secondYear);
186 double eta = gpd("eta",r2,priProducts[pp],secondYear);
187 double pl = gpd("pl",r2,priProducts[pp],secondYear);
188 double pWo = gpd("pl",WL2,priProducts[pp],secondYear); // World price (local price for region 99999)
189
190
191 double t1 = 1/ ( 1+(pow(sl/sa,1/eta))*(pl/pWo) );
192 double r1 = 1-t1;
193 double sc = pow(
194 t1*pow(sa,(eta-1)/eta) + r1*pow(sl,(eta-1)/eta)
195 ,
196 eta/(eta-1)
197 );
198 double pc = (sa/sc)*pWo+(sl/sc)*pl;
199 double pw = (sl*pl+sa*pWo)/(sl+sa);
200
201 spd(t1,"t1",r2,priProducts[pp],firstYear,true);
202 //spd(r1,"r1",r2,priProducts[pp],firstYear,true);
203 spd(sc,"sc",r2,priProducts[pp],secondYear,true);
204 spd(pc,"pc",r2,priProducts[pp],secondYear,true);
205 spd(pw,"pw",r2,priProducts[pp],secondYear,true);
206 }
207
208 // up to here tested with gams output on 20120628, that's fine !!
209 } // end for each region in level 2
210
211
212 // initializing the exports to zero quantities
213 // initializing of the transport cost for the same region to one and distance to zero
214 for(uint r1=0;r1<l2r.size();r1++){
215 for(uint r2=0;r2<l2r[r1].size();r2++){
216 for(uint p=0;p<allProducts.size();p++){
217 for(uint r2To=0;r2To<l2r[r1].size();r2To++){
218 spd(0,"rt",l2r[r1][r2],allProducts[p],secondYear,true,i2s(l2r[r1][r2To])); // regional trade, it was exp in gams
219 if(l2r[r1][r2] == l2r[r1][r2To]){
220 spd(1,"ct",l2r[r1][r2],allProducts[p],firstYear,true,i2s(l2r[r1][r2To])); // as long this value is higher than zero, rt within the same region is not choosen by the solver, so the value doesn't really matters. If it is zero, the solver still works and results are the same, but reported rt within the region are crazy high (100000)
221 }
222 }
223 } // end each product
224
225 for(uint r2To=0;r2To<l2r[r1].size();r2To++){
226 if(l2r[r1][r2] == l2r[r1][r2To]){
227 spd(0,"dist",l2r[r1][r2],"",firstYear,true,i2s(l2r[r1][r2To])); // setting distance zero in code, so no need to put it in the data
228 }
229 }
230 } // end of r2 regions
231 } // end of r1 region
232}
233
234void
236 msgOut(MSG_INFO, "Starting market module");
237 static double cumOverHarvesting = 0.0;
238 int thisYear = MTHREAD->SCD->getYear();
239 int previousYear = MTHREAD->SCD->getYear()-1;
240 bool useDeathTimber = MTHREAD->MD->getBoolSetting("useDeathTimber",DATA_NOW);
241
242 // Added for carbon poject -------------------------------
243 double intRate = MTHREAD->MD->getDoubleSetting("ir");
244
245 // ----------------------------------------------------------------
246
247 // *** PRE-OPTIMISATION YEARLY OPERATIONS..
248 for(uint i=0;i<regIds2.size();i++){
249 int r2 = regIds2[i];
250 for(uint sp=0;sp<secProducts.size();sp++){
251 double g1 = gpd("g1",r2,secProducts[sp],previousYear);
252 double sigma = gpd("sigma",r2,secProducts[sp]);
253 double pc_1 = gpd("pc",r2,secProducts[sp],previousYear);
254 double dc_1 = gpd("dc",r2,secProducts[sp],previousYear);
255 double k_1 = gpd("k",r2,secProducts[sp],previousYear);
256 double pol_mktDirInt_d = gpd("pol_mktDirInt_d",r2,secProducts[sp]);
257 double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",r2,secProducts[sp],previousYear);
258 double q1 = gpd("q1",r2,secProducts[sp]);
259 double pol_mktStr_d = gpd("pol_mktStr_d",r2,secProducts[sp]);
260
261 double q1_polCorr = min(1.0, (q1-0.5)*pol_mktStr_d+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
262
263
264
265 double k = (1+g1)*k_1;
266 double aa = (sigma/(sigma+1))*pc_1*pow(dc_1,-1/sigma) * pol_mktDirInt_d_1/pol_mktDirInt_d ;
267 double gg = dc_1*pow(pc_1*pol_mktDirInt_d_1,-sigma); //alpha
268
269 spd(k, "k" ,r2,secProducts[sp]);
270 spd(aa,"aa",r2,secProducts[sp],DATA_NOW,true);
271 spd(gg,"gg",r2,secProducts[sp],DATA_NOW,true);
272 spd(q1_polCorr,"q1_polCorr",r2,secProducts[sp],DATA_NOW,true);
273
274 }
275
276 // BB(i,p_pr) = (sigma(p_pr)/(sigma(p_pr)+1))*RPAR('pc',i,p_pr,t-1)*(RPAR('sc',i,p_pr,t-1)**(-1/sigma(p_pr)))*(In(i,p_pr,t-1)/In(i,p_pr,t))**(gamma(p_pr)/sigma(p_pr));
277 // FF(i,p_pr) = RPAR('sc',i,p_pr,t-1)*((RPAR('pc',i,p_pr,t-1))**(-sigma(p_pr)))*(In(i,p_pr,t)/In(i,p_pr,t-1))**(gamma(p_pr)); //chi
278 for(uint pp=0;pp<priProducts.size();pp++){
279 double gamma_incr = gpd("gamma_incr",r2,priProducts[pp]); // elast supply to increasing stocks
280 double gamma_decr = gpd("gamma_decr",r2,priProducts[pp]); // elast supply to decreasing stocks
281 double gamma_d_incr = gpd("gamma_d_incr",r2,priProducts[pp]); // elast supply to increasing stocks death timber
282 double gamma_d_decr = gpd("gamma_d_decr",r2,priProducts[pp]); // elast supply to decreasing stocks death timber
283
284 double sigma = gpd("sigma",r2,priProducts[pp]); // elast supply to price
285 double pc_1 = gpd("pc",r2,priProducts[pp],previousYear);
286 double sc_1 = gpd("sc",r2,priProducts[pp],previousYear);
287 double in = gpd("in",r2,priProducts[pp]);
288 double in_0 = gpd("in",r2,priProducts[pp],MTHREAD->MD->getIntSetting("initialYear"));
289 double in_1 = gpd("in",r2,priProducts[pp],previousYear);
290 double in_d =gpd("in_deathTimber",r2,priProducts[pp]);
291 double in_d_1 ;
292 if (thisYear == MTHREAD->MD->getIntSetting("initialOptYear")){
293 in_d_1 = gpd("in_deathTimber",r2,priProducts[pp],thisYear);
294 } else {
295 in_d_1 = gpd("in_deathTimber",r2,priProducts[pp],previousYear);
296 }
297 double min_in_d = max(0.00001,in_0 * MTHREAD->MD->getDoubleSetting("minShareDeathIn"));
298
299
300 double pol_mktDirInt_s = gpd("pol_mktDirInt_s",r2,priProducts[pp]);
301 double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",r2,priProducts[pp],previousYear);
302 double t1 = gpd("t1",r2,priProducts[pp]);
303 double pol_mktStr_s = gpd("pol_mktStr_s",r2,priProducts[pp]);
304
305
306 // Added for carbon project ------------------------------------------
307 double carbonPrice_1 = gpd("carbonPrice",r2,"",previousYear); //using dummy region until Anto solves issue // Antonello: solved but also generalised
308 double omega = gpd("co2content_products", r2, priProducts[pp])/1000; //kg to tons
309 double Pct_1 = carbonPrice_1*intRate;
310
311 // -------------------------------------------------------------------
312
313
314 double t1_polCorr = min(1.0, (t1-0.5)*pol_mktStr_s+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
315
316
317 double gamma = in>in_1 ? gamma_incr: gamma_decr; // If inventories resources are decreasing we use a given elasticity, if they are increasing we use an other one
318 double gamma_d = in_d>in_d_1 ? gamma_d_incr: gamma_d_decr; // If inventories resources are decreasing we use a given elasticity, if they are increasing we use an other one
319
320
321 double in_ratio = in/in_1;
322 double in_d_ratio = useDeathTimber ? max(min_in_d,in_d) / max(min_in_d, in_d_1 ) : 1.0 ; // to avoid depending on the ratio of too small variations in mortality
323
324 //msgOut(MSG_INFO,"in_d: "+d2s(in_d));
325 //msgOut(MSG_INFO,"in_d_1: "+d2s(in_d_1));
326 //msgOut(MSG_INFO,"in: "+d2s(in));
327 //msgOut(MSG_INFO,"in_1: "+d2s(in_1));
328 //msgOut(MSG_INFO,"in_d_ratio: "+d2s(in_d_ratio));
329
330#ifdef QT_DEBUG
331if (in_d_ratio < 0.01){
332 msgOut(MSG_CRITICAL_ERROR,"Very low in_d_ratio ");
333}
334if (in_d_ratio > 100){
335 msgOut(MSG_CRITICAL_ERROR,"Very high in_d_ratio ");
336}
337#endif
338
339 double bb = (sigma/(sigma+1.0))*pc_1*pow(sc_1,-1.0/sigma)*pow(1/in_ratio,gamma/sigma)*pow(1.0,1.0/sigma) * pol_mktDirInt_s_1/pol_mktDirInt_s ;
340
341 /* Old FF (without carbon)
342 double ff = sc_1*pow(pc_1*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma); //chi
343 */
344
345 // Added for carbon project
346 double ff = sc_1*pow((pc_1 - omega * Pct_1)*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma)*pow(in_d_ratio,gamma_d);
347 // -----------------------------------------------
348
349 spd(bb,"bb",r2,priProducts[pp],DATA_NOW,true);
350 spd(ff,"ff",r2,priProducts[pp],DATA_NOW,true);
351 spd(sigma,"sigma",r2,priProducts[pp],DATA_NOW,true);
352 spd(t1_polCorr,"t1_polCorr",r2,priProducts[pp],DATA_NOW,true);
353
354 }
355
356 } // end for each region in level 2 (and updating variables)
357
358 //cout << "### "+i2s(thisYear-2)+"abb: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
359
360 // *** OPTIMISATION....
361
362 // Create an instance of the IpoptApplication
363 //Opt *OPTa = new Opt(MTHREAD);
364 //SmartPtr<TNLP> OPTa = new Opt(MTHREAD);
365 SmartPtr<IpoptApplication> application = new IpoptApplication();
366 string linearSolver = MTHREAD->MD->getStringSetting("linearSolver",DATA_NOW);
367 application->Options()->SetStringValue("linear_solver", linearSolver); // default in ipopt is ma27
368 //application->Options()->SetStringValue("hessian_approximation", "limited-memory"); // quasi-newton approximation of the hessian
369 //application->Options()->SetIntegerValue("mumps_mem_percent", 100);
370 application->Options()->SetNumericValue("obj_scaling_factor", -1); // maximisation
371 application->Options()->SetNumericValue("max_cpu_time", 1800); // max 1/2 hour to find the optimus for one single year
372 application->Options()->SetStringValue("check_derivatives_for_naninf", "yes");
373 //application->Options()->SetStringValue("halt_on_ampl_error", "yes"); // not a valid option
374 //application->Options()->SetStringValue("print_options_documentation", "yes"); // print the ipopt options (hundreds of pages!)
375 // Relaxing tollerance options... worster effect :-(
376 application->Options()->SetNumericValue("tol", 1e-06);
377 //application->Options()->SetNumericValue("constr_viol_tol",0.001);
378 //application->Options()->SetNumericValue("compl_inf_tol",0.001);
379 application->Options()->SetNumericValue("acceptable_tol", 1e-05);
380 //application->Options()->SetNumericValue("acceptable_dual_inf_tol", 1e+9);
381 //application->Options()->SetNumericValue("acceptable_constr_viol_tol", 0.1);
382 //application->Options()->SetNumericValue("acceptable_compl_inf_tol", 0.1);
383
384
385 // Initialize the IpoptApplication and process the options
386 ApplicationReturnStatus status;
387 status = application->Initialize();
388 if (status != Solve_Succeeded) {
389 printf("\n\n*** Error during initialization!\n");
390 msgOut(MSG_INFO,"Error during initialization! Do you have the solver compiled for the specified linear solver?");
391 return;
392 }
393
394 msgOut(MSG_INFO,"Running optimisation problem for this year (it may take a few minutes for large models)..");
395 status = application->OptimizeTNLP(MTHREAD->OPT);
396
397 // cout << "### "+i2s(thisYear-2)+"abc: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
398 // *** POST OPTIMISATION....
399
400 // post-equilibrium variables->parameters assignments..
401 // RPAR(type,i,prd,t) = RVAR.l(type,i,prd);
402 // EX(i,j,prd,t) = EXP.l(i,j,prd);
403 // ObjT(t) = Obj.l ;
404 // ==> in Opt::finalize_solution()
405
406 // Retrieve some statistics about the solve
407 if (status == Solve_Succeeded) {
408 Index iter_count = application->Statistics()->IterationCount();
409 Number final_obj = application->Statistics()->FinalObjective();
410 printf("\n*** The problem solved year %d in %d iterations!\n", thisYear, iter_count);
411 printf("\n*** The final value of the objective function is %e.\n", final_obj);
412 msgOut(MSG_INFO, "The problem solved successfully year "+i2s(thisYear)+" in "+i2s(iter_count)+" iterations.");
413 int icount = iter_count;
414 double obj = final_obj;
415 MTHREAD->DO->printOptLog(true, icount, obj);
416 } else {
417 //Number final_obj = application->Statistics()->FinalObjective();
418 int thisYear = MTHREAD->SCD->getYear();
419 cout << "***ERROR: MODEL DIDN'T SOLVE FOR THIS YEAR ("<<thisYear<<")"<< endl;
420 msgOut(MSG_CRITICAL_ERROR, "Model DIDN'T SOLVE for this year ("+i2s(thisYear)+")");
421 // IMPORTANT! Don't place the next two lines above the msgOut() function or it will crash in windows if the user press the stop button
422 //Index iter_count = application->Statistics()->IterationCount(); // syserror if model doesn't solve
423 //Number final_obj = application->Statistics()->FinalObjective();
424 int icount = 0;
425 double obj = 0;
426 MTHREAD->DO->printOptLog(false, icount, obj);
427 }
428
429 for(uint r2= 0; r2<regIds2.size();r2++){ // you can use r2<=regIds2.size() to try an out-of range memory error that is not detected other than by valgrind (with a message "Invalid read of size 4 in ModelCore::runSimulationYear() in src/ModelCore.cpp:351")
430 int regId = regIds2[r2];
431 ModelRegion* REG = MTHREAD->MD->getRegion(regId);
432
433 // *** st adjustments:***
434 // st_or[pp] -> original st from the mkt module
435 // st_fromFor[pp] -> cover the volumes from both alive trees and death ones. This should be bounded by inResByAnyCombination, that already includes death resources
436 // st_fromHarv[pp] -> from harvesting. This should be equal to hV and IGN data
437 // st
438 // st_or = st
439 // st_fromFor = st_or * a
440 // st_fromFor > in:
441 // (overharvesting)
442 // overHarv = st_fromFor - in
443 // st_fromFor = in
444 // st = st_fromFor/a
445 // st_fromHarv = distribute(st_fromFor)
446
447 // // total supply and total demand..
448 // RPAR('st',i,prd,t) = RPAR('sl',i,prd,t)+RPAR('sa',i,prd,t);
449 // RPAR('dt',i,prd,t) = RPAR('dl',i,prd,t)+RPAR('da',i,prd,t);
450 // // weighted prices.. //changed 20120419
451 // RPAR('pw',i,p_tr,t) = (RPAR('dl',i,p_tr,t)*RPAR('pl',i,p_tr,t)+RPAR('da',i,p_tr,t)*PT(p_tr,t))/RPAR('dt',i,p_tr,t) ; //changed 20120419
452 // RPAR('pw',i,p_pr,t) = (RPAR('sl',i,p_pr,t)*RPAR('pl',i,p_pr,t)+RPAR('sa',i,p_pr,t)*PT(p_pr,t))/RPAR('st',i,p_pr,t) ; //changed 20120419
453 for (uint p=0;p<allProducts.size();p++){
454 double st = gpd("sl",regId,allProducts[p])+gpd("sa",regId,allProducts[p]);
455 double dt = gpd("dl",regId,allProducts[p])+gpd("da",regId,allProducts[p]);
456 spd(st,"st",regId,allProducts[p]);
457 spd(st,"st_or",regId,allProducts[p],DATA_NOW,true); // original total supply, not corrected by resetting it to min(st, inv).
458 spd(dt,"dt",regId,allProducts[p]);
459 }
460 for (uint p=0;p<secProducts.size();p++){
461 double dl = gpd("dl",regId,secProducts[p]);
462 double pl = gpd("pl",regId,secProducts[p]);
463 double da = gpd("da",regId,secProducts[p]); // bug corrected 20120913
464 double pworld = gpd("pl", WL2,secProducts[p]);
465 double dt = gpd("dt",regId,secProducts[p]);
466 double pw = dt?(dl*pl+da*pworld)/dt:0.0;
467 spd(pw,"pw",regId,secProducts[p]);
468 }
469 for (uint p=0;p<priProducts.size();p++){
470 double sl = gpd("sl",regId,priProducts[p]);
471 double pl = gpd("pl",regId,priProducts[p]);
472 double sa = gpd("sa",regId,priProducts[p]); // bug corrected 20120913
473 double pworld = gpd("pl", WL2,priProducts[p]);
474 double st = gpd("st",regId,priProducts[p]);
475 double st_fromFor = st * gpd("supply_fromForestShare",regId,priProducts[p]);
476 double pw = st?(sl*pl+sa*pworld)/st:0.0;
477 spd(pw,"pw",regId,priProducts[p]);
478 spd(st_fromFor,"st_fromFor",regId,priProducts[p],DATA_NOW,true);
479 }
480
481 // Correcting st if this is over the in
482
483 // Create a vector with all possible combinations of primary products
484 vector<vector<int>> priPrCombs = MTHREAD->MD->createCombinationsVector(priProducts.size());
485 int nPriPrCombs = priPrCombs.size();
486
487 for (uint i=0;i<priPrCombs.size();i++){
488 double stMkMod = 0.0;
489 double sumIn = REG->inResByAnyCombination[i];
490 // double sumIn2 = 0.0;
491 for (uint p=0;p<priPrCombs[i].size();p++){
492 stMkMod += gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
493 //sumIn2 += gpd("in",regId,priProducts[priPrCombs[i][p]]);
494 }
495
496 //if(sumIn<=0.00001){
497 // for (uint p=0;p<priPrCombs[i].size();p++){
498 // spd(0.0,"st",regId,priProducts[priPrCombs[i][p]]);
499 // }
500 // } else {
501 if(stMkMod>sumIn){ // if we harvested more than available
502 string pProductsInvolved = "";
503 for (uint p=0;p<priPrCombs[i].size();p++){
504 pProductsInvolved += (priProducts[priPrCombs[i][p]]+"; ");
505 }
506 double inV_over_hV_ratio = stMkMod ? sumIn/stMkMod : 0.0;
507 cumOverHarvesting += (stMkMod-sumIn);
508 msgOut(MSG_DEBUG, "Overharvesting has happened. Year: "+i2s(thisYear)+ "Region: "+i2s(regId)+"Involved products: "+pProductsInvolved+". sumIn: "+d2s(sumIn)+" stMkMod:" +d2s(stMkMod) + " cumOverHarvesting: "+d2s(cumOverHarvesting));
509 for (uint p=0;p<priPrCombs[i].size();p++){
510 double st_fromFor_orig = gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
511 spd(st_fromFor_orig*inV_over_hV_ratio,"st_fromFor",regId,priProducts[priPrCombs[i][p]]);
512 }
513 }
514
515 //}
516
517
518 }
519
520 //cout << "### "+i2s(thisYear-2)+"abe: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
521 // here we create st_fromHarv as st_fromFor - st_from_deathbiomass
522 vector <double> total_st(priProducts.size(),0.);
523 vector <double> in_deathTimber(priProducts.size(),0.);
524 vector <double> in_aliveForest (priProducts.size(),0.);
525 for (uint i=0;i<priProducts.size();i++){
526 total_st[i] = gpd("st_fromFor",regId,priProducts[i]);
527 in_deathTimber[i] = gpd("in_deathTimber",regId,priProducts[i]);
528 in_aliveForest[i] = gpd("in",regId,priProducts[i]);
529 }
530 // cout << "### "+i2s(thisYear-2)+"abf: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
531 vector <double> st_fromHarv= allocateHarvesting(total_st, regId);
532 //cout << "### "+i2s(thisYear-2)+"abg: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
533 for (uint i=0;i<priProducts.size();i++){
534 spd(st_fromHarv[i],"st_fromHarv",regId,priProducts[i],DATA_NOW,true);
535 }
536
537 } // end of each region
538
539 // cout << "### "+i2s(thisYear-2)+"abh: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
540
541 if (cumOverHarvesting>0.0){
542 msgOut(MSG_DEBUG, "Overharvesting is present. Year: "+i2s(thisYear)+" cumOverHarvesting: "+d2s(cumOverHarvesting));
543 }
544}
545
546/**
547 * @brief ModelCoreSpatial::runBiologicalModule
548 *
549 * Changes in Area:
550 * dc area_l area diff
551 * 0 ---------> +regArea -areaFirstProdClass (areaMovingUp_00)
552 * 15 ---------> +areaFirstPrClass -hArea_15 -areaMovingUp_15
553 * 25 ---------> +areaMovingUp15 - hArea_25 - areaMovingUp_25
554 * 35 ---------> +areaMovingUp25 - hArea_35 - areaMovingUp_35
555 * ...
556 * 95 ---------> +areaMovingUp85 - hArea_95 - areaMovingUp_95
557 * 105 ---------> +areaMovingUp95 - hArea_105
558 *
559 * note: regArea is computed in the management module, not here. Further, regArea is already the net one of forest area changes
560 */
561void
563
564 msgOut(MSG_INFO, "Starting resource module..");
565 int thisYear = MTHREAD->SCD->getYear();
566 bool useDeathTimber = MD->getBoolSetting("useDeathTimber",DATA_NOW);
567
568 for(uint i=0;i<regIds2.size();i++){
569 int r2 = regIds2[i];
570 int regId = r2;
571 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
572 //Gis* GIS = MTHREAD->GIS;
573 regPx = REG->getMyPixels();
574 double shareMortalityUsableTimber = 0.0;
575
576 for (uint p=0;p<regPx.size();p++){
577 Pixel* px = regPx[p];
578
579 double pxId = px->getID();
580 //if (pxId == 3550.0){
581 // cout << "got the pixel" << endl;
582 //}
583 //px->expectedReturns.clear();
584 for(uint j=0;j<fTypes.size();j++){
585 string ft = fTypes[j];
586 //double pxArea_debug = px->getDoubleValue("forArea_"+ft, true);
587 vector <double> hV_byDiam;
588 vector <double> productivity_byDc; // mc/ha/y
589 vector < vector <double> > hV_byDiamAndPrd;
590 vector <double> hArea_byDc;
591 vector <double> newVol_byDiam;
592 vector <double> vMort_byDc;
593 vector <double> vMortAdd_byDc ; // fire mortality
594 vector <double> areasMovingUp(dClasses.size(), 0.0);
595 double areaFirstProdClass;
596
597
598 // A - COMPUTING THE REGENERATION..
599 // if we are in a year where the time of passage has not yet been reached
600 // for the specific i,e,l then we use the exogenous Vregen, otherwise we
601 // calculate it
602 //if ( not scen("fxVreg") ,
603 // loop( (i,essence,lambda),
604 // if( ord(t)>=(tp_u1(i,essence,lambda)+2),
605 // Vregen(i,lambda,essence,t)=regArea(i,essence,lambda,t-tp_u1(i,essence,lambda))*volHa_u1(i,essence,lambda)/1000000 ;
606 // );
607 // );
608 //);
609 int tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class // bug 20140318, added ceil. 20140318 removed it.. model did go crazy with it
610 if(thisYear == secondYear){
611 px->initialDc0Area.push_back(px->area_l.at(j).at(0));
612 }
613 if(regType != "fixed" && (thisYear-secondYear) >= tp_u0 ) { // T.O.D.O to be checked -> 20121109 OK
614 double pastRegArea = px->getPastRegArea(j,thisYear-tp_u0);
615 double availableArea = px->area_l.at(j).at(0);
616 //double entryVolHa = gfd("entryVolHa",regId,ft,"");
617 double vHa = px->vHa.at(j).at(1);
618 //attenction that at times could take the wrong pastRegArea if tp change too suddenly as in some "strange" scenarios
620 areaFirstProdClass = pastRegArea;
621 } else {
622 areaFirstProdClass = min(availableArea, pastRegArea); // this is just a start and will need to include the last year area
623 }
624 px->vReg.push_back(areaFirstProdClass*vHa/1000000.0); // TO.DO: check the 1000000. Should be ok, as area in ha vol in Mm^3
625 //if (pxId == 3550.0 && j==3){
626 // cout << "got the pixel" << endl;
627 //}
628 #ifdef QT_DEBUG
629 if (areaFirstProdClass < 0.0){
630 //msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in endogenous regeneration");
631 }
632 if ( (availableArea-pastRegArea) < -0.00000001 ){
633 // in a very rare cases tp change first in a direction and then in the other, so that the wrong past regeneration area
634 // is picken up.
635 //msgOut(MSG_CRITICAL_ERROR,"Upgrading from dc0 more area than the available one in endogenous regeneration");
636 }
637 #endif
638 } else {
639 double regionArea = REG->getValue("forArea_"+ft,OP_SUM);
640 double pxArea = px->getDoubleValue("forArea_"+ft, true); // 20121109 bug solved (add get zero for not data)
641 double regRegVolumes = gfd("vReg",r2,ft,"");
642 double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
643 px->vReg.push_back(newVReg); // 20121108 BUG !!! solved // as now we have the area we could also use here entryVolHa
644 // only a share of the exogenous area goes up, the regeneration one doesn't yet reach tp0:
645 // areaFirstProdClass = (1.0 / px->tp.at(j).at(0) ) * px->area_l.at(j).at(0);
646 areaFirstProdClass = (1.0 / ((double) tp_u0) ) * px->initialDc0Area.at(j);
647 // in the exogenous period we are exogenously upgrading u0->u1 some areas but, as we do not have the regeneration
648 // are corresponding to that we have also to manually add it to u0
649 //px->area_l.at(j).at(0) += areaFirstProdClass;
650 //areaFirstProdClass = entryVolHa ? newVReg*1000000 /entryVolHa:0.0;
651 //if (pxId == 3550.0 && j==3){
652 // cout << "got the pixel" << endl;
653 //}
654
655 #ifdef QT_DEBUG
656 if (areaFirstProdClass<0.0){
657 // msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in exogenous regeneration");
658 }
659 if (areaFirstProdClass > px->area_l.at(j).at(0)){
660 //msgOut(MSG_CRITICAL_ERROR,"Moving up area higher than available area in exogenous regeneration !");
661 }
662 #endif
663 // vReg and entryVolHa are NOT the same thing. vReg is the yearly regeneration volumes
664 // for the whole region. We can use them when we don't know the harvested area
665 // entryVolHa can lead to vReg calculation only when we know the regeneration area. So in the
666 // first years we use vReg and subsequently the endogenous one.
667 }
668
669 //double harvestedArea = 0;
670
671 if(useDeathTimber){
672 shareMortalityUsableTimber = gfd("shareMortalityUsableTimber",r2,ft,"");
673 } else {
674 shareMortalityUsableTimber = 0.0;
675 }
676
677 for (uint u=0; u<dClasses.size(); u++){
678 string dc = dClasses[u];
679 double hr = 0;
680 double age = getAvgAgeByDc(px, j, u);
681
682 //double pastYearVol_reg = u ? gfd("vol",r2,ft,dc,thisYear-1): 0;
683 double pastYearVol = px->vol_l.at(j).at(u);
684 vector <double> hV_byPrd;
685 vector <double> hr_byPrd;
686
687 // harvesting rate & volumes...
688 // hr is by region.. no reasons in one pixel the RATE of harvesting will be different than in an other pixel
689 //hr(u,i,essence,lambda,t) = sum(p_pr, prov(u,essence,lambda,p_pr)*RPAR('st',i,p_pr,t)/In(i,p_pr,t));
690 //hV(u,i,essence,lambda,t) = hr(u,i,essence,lambda,t) * V(u,i,lambda,essence,t-1);
691 //hV_byPrd(u,i,essence,lambda,p_pr,t) = prov(u,essence,lambda,p_pr)*(RPAR('st',i,p_pr,t)/In(i,p_pr,t))*V(u,i,lambda,essence,t-1);
692 for(uint pp=0;pp<priProducts.size();pp++){
693 double st = gpd("st_fromHarv",r2,priProducts[pp]);
694 double in = gpd("in",r2,priProducts[pp]);
695 double hr_pr = in ? app(priProducts[pp],ft,dc)*st/in : 0.0;
696 hr_byPrd.push_back( hr_pr);
697 hr += hr_pr;
698 }
699
700 // adjusting for overharvesting..
701 // 20160204: inserted to account that we let supply to be marginally higher than in in the mamarket module, to let the solver solving
702 double origHr = hr;
703 hr = min(1.0,hr);
704 for(uint pp=0;pp<priProducts.size();pp++){
705 double hr_pr = origHr ? hr_byPrd[pp] * min(1.0,1.0/origHr) : 0.0;
706 hV_byPrd.push_back( hr_pr*pastYearVol*px->avalCoef.at(j));
707 }
708
709 double hV = hr*pastYearVol*px->avalCoef.at(j);
710
711
712 hV_byDiam.push_back(hV);
713 hV_byDiamAndPrd.push_back(hV_byPrd);
714
715 // post harvesting remained volumes computation..
716 // loop(u$(ord(u)=1),
717 // first diameter class, no harvesting and fixed regenaration..
718 // V(u,i,lambda,essence,t)=(1-1/(tp(u,i,lambda,essence))-mort(u,i,lambda,essence) )*V(u,i,lambda,essence,t-1)
719 // +Vregen(i,lambda,essence,t);
720 // );
721 // loop(u$(ord(u)>1),
722 // generic case..
723 // V(u,i,lambda,essence,t)=((1-1/(tp(u,i,lambda,essence))
724 // -mort(u,i,lambda,essence) - hr(u,i,essence,lambda,t))*V(u,i,lambda,essence,t-1)
725 // +(1/(tp(u-1,i,lambda,essence)))*beta(u,i,lambda,essence)*V(u-1,i,lambda,essence,t-1));
726 double vol;
727 double tp = px->tp.at(j).at(u); //gfd("tp",regId,ft,dc);
728 double mort = px->mort.at(j).at(u); //gfd("mortCoef",regId,ft,dc);
729 double addMort = px->addMort.at(j).at(u); // fire mortality
730 double vReg = px->vReg.at(j); //gfd("vReg",regId,ft,""); // Taking it from the memory database as we could be in a fixed vReg scenario and not having calculated it from above!
731 double beta = px->beta.at(j).at(u); //gfd("betaCoef",regId,ft,dc);
732 //double hv2fa = gfd("hv2fa",regId,ft,dc);
733 double vHa = px->vHa.at(j).at(u); //gfd("vHa",regId,ft,dc);
734 double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
735
736 double vMort = mort*pastYearVol;
737 double vMortAdd = addMort*pastYearVol;
738
739 vMort_byDc.push_back(vMort);
740 vMortAdd_byDc.push_back(vMortAdd); // fire mortality
741
742 if(useDeathTimber){
743 iisskey key(thisYear,r2,ft,dc);
744
745 MD->deathTimberInventory_incrOrAdd(key,(vMort+vMortAdd)*shareMortalityUsableTimber);
746 }
747
748 if(u==0){
749 vol = 0.0;
750 }else if(u==1){
751 vol = max(0.0,(1-1/tp-mort-addMort))*pastYearVol+vReg; //Antonello, "bug" fixed 20160203: In case of very strong mortality this quantity (that doesn't include harvesting) could be negative!
752 //double debug = vol;
753 #ifdef QT_DEBUG
754 if ((1-1/tp-mort-addMort)<0.0){
755 msgOut(MSG_DEBUG,"The sum of leaving trres and mortality would have lead to nevative volume if we didn't put a max. 1/tp: "+d2s(1/tp)+", mort: "+d2s(mort)+", total coeff: "+d2s((1-1/tp-mort))+" ");
756 }
757 #endif
758 } else {
759 // time of passage and volume of smaller diameter class
760 double inc = (u==dClasses.size()-1)?0:1./tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
761 double tp_1 = px->tp.at(j).at(u-1); //gfd("tp",regId,ft,dClasses[u-1]);
762 double pastYearVol_1 = px->vol_l.at(j).at(u-1); //gfd("vol",regId,ft,dClasses[u-1],thisYear-1);
763 //vol = max(0.0,(1-inc-mort-hr)*pastYearVol+(1/tp_1)*beta*pastYearVol_1);
764 vol = max(0.0,(1-inc-mort-addMort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1); // I can't use any more hr as it is the harvesting rate over the available volumes, not the whole ones
765 #ifdef QT_DEBUG
766 if ((1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1 < 0){
767 double realVolumes = (1-inc-mort-addMort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1;
768 msgOut(MSG_DEBUG,"Negative real volumes ("+d2s(realVolumes)+"), possibly because of little bit larger bounds in the market module to avoid zeros. Volumes in the resource module set back to zero, so it should be ok.");
769 }
770 #endif
771 }
772 if(u != 0){ // this if is required to avoid a 0/0 and na error that then propage also in vSum()
773 double inc = (u==dClasses.size()-1)?0:1.0/tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
774 double volumesMovingUp = inc*pastYearVol;
775 double pastArea = px->area_l.at(j).at(u);
776
777 areasMovingUp.at(u) = inc*pastArea;
778
780 hArea_byDc.push_back(finalHarvestFlag*1000000*hV/vHa); // volumes are in Mm^3, area in ha, vHa in m^3/ha
781 } else {
782 double finalHarvestedVolumes = finalHarvestFlag* hV;
783 double finalHarvestedRate = pastYearVol?finalHarvestedVolumes/pastYearVol:0.0; // Here we want the harvested rate over the whole volumes, not just the available ones, so we don't need to multiply to px->avalCoef.at(j)
784 #ifdef QT_DEBUG
785 if (finalHarvestedRate > 1.0){
786 msgOut(MSG_CRITICAL_ERROR,"Negative final harvested rate.");
787 }
788 #endif
789 hArea_byDc.push_back(finalHarvestedRate*pastArea); // volumes are in Mm^3, area in ha, vHa in m^3/ha
790 }
791 px->area.at(j).at(u) = max(0.0, px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u));
792 #ifdef QT_DEBUG
793 if ((px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))< 0.0){
794 msgOut(MSG_DEBUG,"If not for a max, we would have had a negative area ("+d2s(px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))+" ha).");
795 }
796 #endif
797 } else {
798 areasMovingUp.at(u) = areaFirstProdClass;
799 hArea_byDc.push_back(0.);
800 px->area.at(j).at(u) = px->area_l.at(j).at(u) - areasMovingUp.at(u) - hArea_byDc.at(u);
801 //if (pxId == 3550.0 && j==3){
802 // cout << "got the pixel" << endl;
803 //}
804 }
805 newVol_byDiam.push_back(vol);
806 #ifdef QT_DEBUG
807 if(px->area.at(j).at(u)< 0.0 || areasMovingUp.at(u) < 0.0 || hArea_byDc.at(u) < 0.0 ){
808 msgOut(MSG_CRITICAL_ERROR, "Negative values in runBiologicalModel");
809 }
810 #endif
811
812 //double debug = hv2fa*hr*pastYearVol*100;
813 //cout << "regId|ft|dc| debug | freeArea: " << r2 << "|"<<ft<<"|"<<dc<<"| "<< debug << " | " << freeArea_byU << endl;
814
815 //sfd(hr,"hr",regId,ft,dc);
816 //sfd(hV,"hV",regId,ft,dc);
817 //sfd(vol,"vol",regId,ft,dc);
818
819 //sfd(freeArea_byU,"harvestedArea",regId,ft,dc,DATA_NOW,true);
820 double productivity = hArea_byDc.at(u) ? (1000000.0 * hV_byDiam.at(u))/(hArea_byDc.at(u)*age) : 0.0;
821 productivity_byDc.push_back(productivity);
822 } // end foreach diameter classes
823 px->hVol.push_back(hV_byDiam);
824 px->hVol_byPrd.push_back(hV_byDiamAndPrd);
825 px->hArea.push_back(hArea_byDc);
826 px->vol.push_back(newVol_byDiam);
827 px->vMort.push_back(vMort_byDc);
828 px->vMortAdd.push_back(vMortAdd_byDc); // fire mortality
829 px->hProductivity.push_back(productivity_byDc);
830
831
832 #ifdef QT_DEBUG
833 for (uint u=1; u<dClasses.size(); u++){
834 double volMort = vMort_byDc[u];
835 double harvVol = hV_byDiam[u];
836 double vol_new = newVol_byDiam[u];
837 double vol_lagged = px->vol_l.at(j).at(u);
838 double gain = vol_new - (vol_lagged-harvVol-volMort);
839 if (volMort > vol_lagged){
840 msgOut(MSG_CRITICAL_ERROR,"mort vol > lagged volumes ?");
841 }
842 }
843 #endif
844 } // end of each forest type
845 } // end of each pixel
846
847 #ifdef QT_DEBUG
848 // checking that in a region the total hVol is equal to the st for each products. 20150122 Test passed with the new availCoef
849 double sumSt = 0.0;
850 double sumHv = 0.0;
851 for(uint pp=0;pp<priProducts.size();pp++){
852 sumSt += gpd("st_fromHarv",r2,priProducts[pp]);
853 }
854 for (uint p=0;p<regPx.size();p++){
855 for(uint j=0;j<fTypes.size();j++){
856 for (uint u=0; u<dClasses.size(); u++){
857 for(uint pp=0;pp<priProducts.size();pp++){
858 // by ft, dc, pp
859 sumHv += regPx[p]->hVol_byPrd[j][u][pp];
860 }
861 }
862 }
863 }
864 if(abs(sumSt-sumHv) > 0.000001){
865 msgOut(MSG_DEBUG, "St and harvested volumes diverge in region "+REG->getRegSName()+". St: "+d2s(sumSt)+" hV: "+d2s(sumHv));
866 }
867 #endif
868 } // end of each region
869
870}
871
872
873
874void
876 msgOut(MSG_INFO, "Starting management module..");
877 vector<string> allFTypes = MTHREAD->MD->getForTypeIds(true);
878 map<string,double> hAreaByFTypeGroup = vectorToMap(allFTypes,0.0);
879 int thisYear = MTHREAD->SCD->getYear();
880 double ir = MTHREAD->MD->getDoubleSetting("ir");
881
882 // Post optimisation management module..
883 for(uint i=0;i<regIds2.size();i++){
884 int r2 = regIds2[i];
885 int regId = r2;
886 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
887 regPx = REG->getMyPixels();
888
889 string flagHartman = MTHREAD->MD->getStringSetting("flagHartman",DATA_NOW,r2);
890 double pickling = MTHREAD->MD->getDoubleSetting("pickling"); // pickling factor used to factor in sequestration ex-situ and substitution effects
891
892 //double exogenousReference = MTHREAD->MD->getBoolSetting("exogenousReference"); // new setting to distinguish between exogenous and endogenous carbon reference levels
893
894
895
896
897 // Caching futureCarbonPrices..
898 double carbonPrice_NOW = gpd("carbonPrice",r2,"",thisYear+1); // first carbon payments take place the year after replanting
899 vector<double> futureCarbonPrices;
900 if(flagHartman!="NONE"){
901 for (int y=0; y<1000; y++) {
902 futureCarbonPrices.push_back(gpd("carbonPrice",r2,"",thisYear + y));
903 }
904 }
905
906 vector<double> polBal_fiSub (fTypes.size(),0.0); // Expenses for forest investment subsides by region and destination ft €
907
908 // Dealing with area change..
909 double fArea_reg = REG->getArea();
910 double fArea_diff = 0.0;
911 double fArea_reldiff = 0.0;
912 if(forestAreaChangeMethod=="relative"){
913 fArea_reldiff = gfd("forestChangeAreaIncrementsRel",r2,"","",DATA_NOW);
914 fArea_diff = fArea_reg * fArea_reldiff;
915 } else if (forestAreaChangeMethod=="absolute"){
916 fArea_diff = gfd("forestChangeAreaIncrementsHa",r2,"","",DATA_NOW);
917 fArea_reldiff = fArea_diff / fArea_reg;
918 }
919
920 double regHArea = 0.0; // for the warning
921
922 // Management rate
923 // 20170123: if mr is defined at regional level in the forData table, good, otherwise we look at settings table.
924 // If it isn't there as well, we rise a critical error.
925 // 20170302: removed the (false positive) error message raised if we put mr in setting table
926 // 20180626: mr is now, like all the other settings, powered with a regional dimension, so this looking in the
927 // forest data is no longer needed, but we keep it as some old projects have mr in the forData sheet.
928 double mr;
929 int origErrorLevel = MD->getErrorLevel();
930 MTHREAD->MD->setTempBool(true); // this should not be needed as it is set positive in getForData() map soubroutine..
932 mr = MD->getForData("mr",regId,"","");
933 if(!MTHREAD->MD->getTempBool()) { // mr not found in the forest data
935 mr = MD->getDoubleSetting("mr",DATA_NOW,regId);
936 }
937 //try{
938 // mr = MD->getForData("mr",regId,"","");
939 //} catch (...){
940 // mr = MD->getDoubleSetting("mr");
941 //}
942 MD->setErrorLevel(origErrorLevel);
943
944 // Caching carbon reference levels by ftypes
945 vector<vector<double>> futureCarbonRef;
946 vector<vector<double>> futureExtraBiomass_ratio;
947
948 if(flagHartman!="NONE"){
949 for(uint j=0;j<fTypes.size();j++){
950 string ft = fTypes[j];
951 vector<double> futureCarbonRef_ft;
952 vector<double>futureExtraBiomass_ratio_ft;
953 for (int y=0; y<1000; y++) {
954 double carbonRef = gfd("carbonRef",r2,ft,"", thisYear + y);
955 futureCarbonRef_ft.push_back(carbonRef);
956 double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,ft,"",DATA_NOW);
957 futureExtraBiomass_ratio_ft.push_back(extraBiomass_ratio);
958 }
959 futureCarbonRef.push_back(futureCarbonRef_ft);
960 futureExtraBiomass_ratio.push_back(futureExtraBiomass_ratio_ft);
961 }
962 }
963
964
965 for (uint p=0;p<regPx.size();p++){
966 Pixel* px = regPx[p];
967 px->expectedReturns.clear();
968 px->expectedReturnsNotCorrByRa.clear(); // BUG discovered 20160825
969 //px->expectedReturnsCarbon.clear(); TODO
970 px->optDc.clear();
971 px->optFtChosen.clear();
972 px->optDcChosen.clear();
973 px->expectedAnnualIncome_carbon.clear();
974 px->expectedAnnualIncome_timber.clear();
975 resetMapValues(hAreaByFTypeGroup,0.0);
976 double totalHarvestedAreaOrig = vSum(px->hArea); // still need to remove the forest decrease areas..
977 double totalHarvestedArea = 0.0; // account for (eventual) forest decreases area)
978 vector<double> thisYearRegAreas(fTypes.size(),0.0); // initialize a vector of fTypes.size() zeros.
979 vector<double> expectedReturns(fTypes.size(),0.0); // uncorrected expected returns (without considering transaction costs). These are in form of eai
980 vector<double> expectedReturnsCarbon(fTypes.size(),0.0); // expected return of the carbon only
981 vector<int> rotation_dc(fTypes.size(),0.0); //vector to store optimal dc for each ftype (to be reused when flagHartman == "BAU")
982 vector< vector<double> > deltaAreas(fTypes.size()+1, vector<double>(fTypes.size()+1,0)); // matrix of regeneration areas ft_from/ft_to
983
984
985
986 double fArea_px = vSum(px->area);
987 double fArea_diff_px = fArea_px * fArea_diff/ fArea_reg;
988
989 double fArea_incr = max(0.0,fArea_diff_px);
990 double fArea_incr_rel = totalHarvestedAreaOrig?fArea_incr/totalHarvestedAreaOrig:0.0;
991 double fArea_decr = - min(0.0,fArea_diff_px);
992 double fArea_decr_rel = totalHarvestedAreaOrig?min(1.0,fArea_decr/totalHarvestedAreaOrig):0.0; // we can "remove" at maximum what has been harvested
993 regHArea += totalHarvestedAreaOrig;
994 totalHarvestedArea = totalHarvestedAreaOrig *(1-fArea_decr_rel);
995
996
997 // A - Computing the harvestingArea by parent ft group (for the allocation according to the prob of presence):
998 for(uint j=0;j<fTypes.size();j++){
999 string ft = fTypes[j];
1000 string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1001 double hAreaThisFt=vSum(px->hArea.at(j))*(1-fArea_decr_rel);
1002 incrMapValue(hAreaByFTypeGroup,parentFt,hAreaThisFt); // increment the parent ft of the harvested area, neeed for assigning the frequences (prob. of presence)
1003 }
1004
1005 // B - Computing the uncorrected expected returns (without considering transaction costs)
1006 // 20120910, Antonello: changed.. calculating the expected returns also for fixed and fromHrLevel regeneration (then not used but gives indication)
1007 // calculating the expected returns..
1008 // loop ( (u,i,essence,lambda,p_pr),
1009 // if (sum(u2, hV(u2,i,essence,lambda,t))= 0,
1010 // expRetPondCoef(u,i,essence,lambda,p_pr) = 0;
1011 // else
1012 // expRetPondCoef(u,i,essence,lambda,p_pr) = hV_byPrd(u,i,essence,lambda,p_pr,t)/ sum(u2, hV(u2,i,essence,lambda,t));
1013 // );
1014 // );
1015 // expReturns(i,essence,lambda) = sum( (u,p_pr),
1016 // RPAR("pl",i,p_pr,t)*hv2fa(i,essence,lambda,u)*(1/df_byFT(u,i,lambda,essence))* // df_byFT(u,i,lambda,essence)
1017 // expRetPondCoef(u,i,essence,lambda,p_pr)
1018 // );
1019
1020 for(uint j=0;j<fTypes.size();j++){
1021 string ft = fTypes[j];
1022
1023 // Added for carbon project
1024 double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
1025 // End of added
1026
1027
1028 double expReturns = std::numeric_limits<double>::lowest(); // these are reset at the beginning of each forest type, and need to be declared before the next loop
1029 double expReturns_carbon = std::numeric_limits<double>::lowest();
1030 double expReturns_timber = std::numeric_limits<double>::lowest();
1031 int optDc = 0; // "optimal diameter class", the one on which the expected returns are computed
1032 for (uint u=0; u<dClasses.size(); u++){
1033 string dc = dClasses[u];
1034 double vHa = px->vHa_exp.at(j).at(u);
1035 double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
1036 double cumTp_u = px->cumTp_exp.at(j).at(u);
1037 for (uint pp=0;pp<priProducts.size();pp++){
1038 double pol_mktDirInt_s = gpd("pol_mktDirInt_s",regId,priProducts[pp]); // This is valid also for the exported timber
1039 double pol_mktDirInt_s_fut = gpd("pol_mktDirInt_s",regId,priProducts[pp],thisYear+cumTp_u);
1040 double pl = gpd("pl",regId,priProducts[pp])*pol_mktDirInt_s; // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
1041 double worldCurPrice = gpd("pl",WL2,priProducts[pp])*pol_mktDirInt_s*pol_mktDirInt_s;
1042 double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u)*pol_mktDirInt_s_fut;
1043 double sl = gpd("sl",regId,priProducts[pp]);
1044 double sa = gpd("sa",regId,priProducts[pp]);
1045 double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1046 double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1047 double anualised_amount_timber = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir); // expected revenues from timber
1048 double anualised_amount = anualised_amount_timber; // total expected revenues (for now there is no carbon payment yet)
1049
1050 // If carbon payments are included and the reference is an exogenous fixed value (e.g. year of reference), we compute revenues from carbon after retrieving reference from input files
1051 // Added for carbon project-------------------------------------------------------------------------
1052 double raw_amount_carbon = 0; // declared and initialized before IF statement and FOR loop
1053 if (flagHartman =="EXO") {
1054 double carbonPrice_y = carbonPrice_NOW;
1055 for (int y=0; y<cumTp_u; y++) {
1056 //double carbonPrice_y = futureCarbonPrices[y];
1057 double carbonRef = futureCarbonRef[j][y];
1058 double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y], regId);
1059 double carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - carbonRef);
1060 raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // raw amount is calculated iteratively
1061 }
1062 //Now we add the final payment corresponding to carbon sequestered in long-lived products
1063 //double CarbonPrice_cumTp = futureCarbonPrices[floor(cumTp_u)]; INSTEAD WE USE CURRENT CARBON PRICE
1064 raw_amount_carbon += pickling * carbonPrice_y * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc); // we add the final payment to raw amount for the final year, outside the FOR loop
1065 }
1066 // And we finally annualise everything
1067 double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1068 anualised_amount += anualised_amount_carbon; // anualised_amount already had the timber revenues in it, now we add carbon
1069
1070 // End of added-----------------------------------------------------------------------------------
1071
1072
1073
1074 if (anualised_amount>expReturns) {
1075 expReturns=anualised_amount;
1076 optDc = u;// this is the index of the optimal dc, not the dc itself
1077 expReturns_carbon = anualised_amount_carbon;
1078 expReturns_timber = anualised_amount_timber;
1079 }
1080 } // end for each product
1081 } // end for each diameter class
1082
1083 //results are pushed back to pixel only if they are final
1084 if (flagHartman=="NONE" || flagHartman=="EXO") {
1085 px->expectedAnnualIncome_timber.push_back(expReturns_timber);
1086 px->expectedAnnualIncome_carbon.push_back(expReturns_carbon);
1087 px->expectedReturnsNotCorrByRa.push_back(expReturns);
1088 px->optDc.push_back(optDc);
1089 }
1090
1091 //calculation of expected returns with risk aversion
1092 if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1093 double ra = px->getDoubleValue("ra");
1094 double cumMort = 1-px->cumAlive_exp.at(j).at(optDc);
1095 //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1096 double origExpReturns = expReturns;
1097 expReturns = origExpReturns * (1.0 - ra*cumMort);
1098 }
1099
1100 // results are pushed back to pixel only if they are final
1101 if (flagHartman=="NONE" || flagHartman=="EXO") {
1102 px->expectedReturns.push_back(expReturns); // if there is no risk version, then expectedReturns and expectedReturnsNotCorrByRa will have the same value
1103 }
1104
1105 expectedReturns.at(j) = expReturns;
1106 rotation_dc.at(j) = optDc; // here we store the index of optimal dc for the forest type considered
1107
1108 } // end foreach forest type
1109
1110
1111 // Looping again by forest types, here by ft in input (those harvested)
1112 // Added for second carbon project-------------------------------------------------------------------------------------------------------------------------------------
1113 // If carbon payments are included and the reference is BAU (ie, endogenous, we need a new loop-------------------------------------------------------------------
1114 // In this case, the reference is what happens without policy, i.e. the result of the previous loop when (flagHartman =="REFERENCE") is FALSE
1115 // So we just redo another loop with carbon payments using the previous result as reference
1116
1117 if (flagHartman =="ENDO") { // if carbon payments take place and reference is endogenous (is BAU)
1118 // STEP 1 - retrieve optimal regeneration choice without carbon payments
1119 int ft_opt_index = getMaxPos(expectedReturns); // Faustmannian optimal forest type index
1120 string ft_opt = fTypes[ft_opt_index]; // Faustmannian optimal forest type name
1121 int dc_opt_index = rotation_dc[ft_opt_index]; // Faustmannian optimal diameter class index
1122 string dc_opt = dClasses[dc_opt_index]; // Faustmannian optimal diameter class name
1123 double cumTP_opt = px->cumTp_exp.at(ft_opt_index).at(dc_opt_index); // Faustmannian optimal rotation length
1124 double co2content_inventory_opt = gfd("co2content_inventory",r2,ft_opt,"",DATA_NOW)/1000; // kilos to tons
1125
1126 // STEP 2 - calculate expected carbon contents on the optimal choice without carbon payments (Faustmannian optimum), to use as reference
1127 vector <double> FutureCarbonContent_opt;
1128 for (int y=0; y<=cumTP_opt; y++) {
1129 double expectedVHa_opt_y = getVHaByYear(px, ft_opt_index, y, futureExtraBiomass_ratio[ft_opt_index][y], regId);
1130 FutureCarbonContent_opt.push_back(co2content_inventory_opt*expectedVHa_opt_y);
1131 }
1132 // the Hartmanian choice will usually be longer, so the Faustmannian rotation needs to be replicated several times (at least 2000 years, should be enough)
1133 while (FutureCarbonContent_opt.size()<=2000){
1134 FutureCarbonContent_opt.insert(std::end(FutureCarbonContent_opt), std::begin(FutureCarbonContent_opt), std::end(FutureCarbonContent_opt));
1135 }
1136 //int cumTP_opt_floor = FutureCarbonContent_opt.size(); // need to get an integer value
1137
1138 // STEP 3 - new loop to find optimal regeneration choice WITH carbon payments, using the choice WITHOUT carbon payments as a reference
1139
1140 for(uint j=0;j<fTypes.size();j++){
1141 string ft = fTypes[j];
1142 double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
1143
1144 double expReturns_hartman = -10000000000; // need to initialize because we use it for a comparison
1145 double expReturns_timber_hartman; // no need to give an initial value because they are not used before being given a value
1146 double expReturns_carbon_hartman;
1147 int optDc_hartman = -1; // "optimal diameter class", the one on which the expected returns are computed
1148 for (uint u=1; u<dClasses.size(); u++){
1149 string dc = dClasses[u];
1150 double cumTp_u = px->cumTp_exp.at(j).at(u);
1151
1152 // First we calculate expected revenues from carbon (they used to be inside the products loop, but they don't need to be since they don't depend on products to be sold)
1153 double raw_amount_carbon=0;
1154 double carbonPrice_y = carbonPrice_NOW; //carbon price is by default current price, ie it is assumed to be constant from owner's perspective
1155 for (int y=0; y<=cumTp_u; y++) { // compute yearly carbon payments
1156 double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y], regId);
1157 double carbonPayment_y;
1158 carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y]);
1159
1160 //r if (y==0) { // we use euclidian division, so an exception must be made when denominator is equal to 0
1161 //r carbonPayment_y = 0; // in the first year, no forest can reach a non-zero volume so payments are 0
1162 //r} else {
1163 //r int factor = y/cumTP_opt_floor; // euclidian division of the two lengths we compare
1164 //r if (cumTP_opt_floor%y >0) { // if the year is NOT a multiple of optimal rotation time, we need to take the year-th value of vector modulus how many rotations we have passed
1165 //r carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y - factor*cumTP_opt_floor]);
1166 //r } else { // if the year is EXACTLY a multiple of optimal rotation time, we need to take the last value of vector, not the first one
1167 //r carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[cumTP_opt_floor]);
1168 //r }
1169 //r}
1170 raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // all payments compounded to end of rotation
1171 }
1172
1173 // And now we compute revenues from timber (we try several products, as in the Faustmannian rotation)
1174 double vHa = px->vHa_exp.at(j).at(u);
1175 double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
1176 for (uint pp=0;pp<priProducts.size();pp++){
1177 double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
1178 double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
1179 double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
1180 double sl = gpd("sl",regId,priProducts[pp]);
1181 double sa = gpd("sa",regId,priProducts[pp]);
1182 double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1183 double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1184 double anualised_amount_timber = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir);
1185
1186 // Now we add the final carbon payment corresponding to carbon sequestered in long-lived products
1187 raw_amount_carbon += pickling * carbonPrice_y * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc);
1188 // And we annualise carbon revenues and add them to annualised timber revenues
1189 double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1190 //px->expectedReturnsCarbon = anualised_amount_carbon; TODO
1191
1192 double anualised_amount = anualised_amount_timber + anualised_amount_carbon;
1193
1194 // If the new expected revenues is higher than previously, we store the maximum value and corresponding dc.
1195 if (anualised_amount>=expReturns_hartman) {
1196 expReturns_hartman=anualised_amount;
1197 expReturns_carbon_hartman = anualised_amount_carbon;
1198 expReturns_timber_hartman = anualised_amount_timber;
1199 optDc_hartman = u;
1200 }
1201 } // end foreach primary product
1202 } // end foreach DC
1203
1204 // Here we send results to pixel because they are final
1205 px->expectedAnnualIncome_timber.push_back(expReturns_timber_hartman);
1206 px->expectedAnnualIncome_carbon.push_back(expReturns_carbon_hartman);
1207 px->expectedReturnsNotCorrByRa.push_back(expReturns_hartman);
1208 px->optDc.push_back(optDc_hartman);
1209
1210 // And with risk aversion
1211 if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1212 double ra = px->getDoubleValue("ra");
1213 double cumMort = 1-px->cumAlive_exp.at(j).at(optDc_hartman);
1214 //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1215 double origExpReturns_hartman = expReturns_hartman;
1216 expReturns_hartman = origExpReturns_hartman * (1.0 - ra*cumMort);
1217 }
1218
1219 px->expectedReturns.push_back(expReturns_hartman);
1220
1221 // Now we replace results from the previous loop with the new ones
1222
1223 expectedReturns.at(j) = expReturns_hartman;
1224 rotation_dc.at(j) = optDc_hartman;
1225
1226
1227 } // end foreach forest type
1228
1229 } // end IF Hartman reference is BAU (no policy)----------------------------------------------------------------
1230
1231 // End of added second carbon project---------------------------------------------------------------------------
1232
1233 for(uint j=0;j<fTypes.size();j++){
1234 string ft = fTypes[j];
1235 forType* thisFt = MTHREAD->MD->getForType(ft);
1236
1237 double harvestedAreaForThisFT = vSum(px->hArea.at(j))*(1-fArea_decr_rel); // gfd("harvestedArea",regId,ft,DIAM_ALL);
1238 deltaAreas[j][fTypes.size()] += vSum(px->hArea.at(j))*(fArea_decr_rel);
1239 vector<double> corrExpectedReturns(fTypes.size(),0.0); // corrected expected returns (considering transaction costs). These are in form of NPV
1240 vector<double> polBal_fiSub_unitvalue(fTypes.size(),0.0); // subside (tax) per ha per destination ft €/ha
1241
1242 // C - Computing the corrected expected returns including transaction costs
1243
1244 for(uint j2=0;j2<fTypes.size();j2++){
1245 string ft2 = fTypes[j2];
1246 double invTransCost = (gfd("invTransCost",regId,ft,ft2,DATA_NOW) * gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) ) ;
1247 polBal_fiSub_unitvalue[j2] = gfd("invTransCost",regId,ft,ft2,DATA_NOW) * (gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) - 1) ;
1248 corrExpectedReturns[j2] = (expectedReturns[j2]/ir)-invTransCost; // changed 20150718: npv = eai/ir + tr. cost // HUGE BUG 20151202: transaction costs should be REDUCED, not added to the npv...
1249 }
1250
1251 //int highestReturnFtIndex = getMaxPos(corrExpectedReturns);
1252
1253 // D - Assigning the Managed area
1254 // calculating freeArea at the end of the year and choosing the new regeneration area..
1255 //freeArea(i,essence,lambda) = sum(u, hv2fa(i,essence,lambda,u)*hr(u,i,essence,lambda,t)*V(u,i,lambda,essence,t-1)*100);
1256 //if(scen("endVreg") ,
1257 // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda); // here we could introduce in/out area from other land usages
1258 //else
1259 // loop (i,
1260 // loop( (essence,lambda),
1261 // if ( expReturns(i,essence,lambda) = smax( (essence2,lambda2),expReturns(i,essence2,lambda2) ),
1262 // regArea (i,essence,lambda,t) = sum( (essence2, lambda2), freeArea(i,essence2, lambda2) ) * mr;
1263 // );
1264 // );
1265 // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda)*(1-mr); // here we could introduce in/out area from other land usages
1266 // );
1267 //if (j==highestReturnFtIndex){
1268 // thisYearRegAreas[j] += totalHarvestedArea*mr;
1269 //}
1270 // If I Implement this I'll have a minimal diff in total area.. why ?????
1271
1272 int optFtChosen = getMaxPos(corrExpectedReturns);
1273 px->optFtChosen.push_back(optFtChosen);
1274 px->optDcChosen.push_back(px->optDc.at(optFtChosen));
1275
1276 thisYearRegAreas[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1277 thisYearRegAreas[getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size()); // mr quota of new forest area assigned to highest expected returns ft (not considering transaction costs). Done for each forest types
1278
1279 deltaAreas[j][getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1280 deltaAreas[fTypes.size()][getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size());
1281
1282 polBal_fiSub[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr*polBal_fiSub_unitvalue[getMaxPos(corrExpectedReturns)];
1283
1284 // E - Assigning unmanaged area
1285 //for(uint j2=0;j2<fTypes.size();j2++){
1286 if(natRegAllocation=="pp"){ // according to prob presence
1287 //string ft2 = fTypes[j2];
1288 string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1289 double freq = rescaleFrequencies ? gfd("freq_norm",regId,parentFt,""):gfd("freq",regId,parentFt,""); // "probability of presence" for unmanaged forest, added 20140318
1290 double hAreaThisFtGroup = findMap(hAreaByFTypeGroup,parentFt);
1291 double hRatio = 1.0;
1292 if(hAreaThisFtGroup>0){
1293 //double harvestedAreaForThisFT2 = vSum(px->hArea.at(j2));
1294 hRatio = harvestedAreaForThisFT/hAreaThisFtGroup;
1295 } else {
1296 int nFtChilds = MTHREAD->MD->getNForTypesChilds(parentFt);
1297 hRatio = 1.0/nFtChilds;
1298 }
1299 thisYearRegAreas[j] += totalHarvestedArea*(1-mr)*freq*hRatio;
1300 thisYearRegAreas[j] += fArea_incr*(1-mr)*freq*hRatio; // non-managed quota of new forest area assigning proportionally on pp at sp group level
1301 //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr)*freq*hRatio;
1302 // Here is the problem with building a transaction matrix ft_from/ft_to: we don't explicitly assign from/to for unmanaged areas !
1303 for(uint j2=0;j2<fTypes.size();j2++){
1304 deltaAreas[j2][j] += vSum(px->hArea.at(j2))*(1-fArea_decr_rel)*(1-mr)*freq*hRatio;
1305 }
1306 deltaAreas[fTypes.size()][j] += fArea_incr*(1-mr)*freq*hRatio;
1307
1308
1309
1310 } else { // prob presence not used..
1311
1312 // Accounting for mortality arising from pathogens. Assigning the area to siblings according to area..
1313 // TODO: chek that mortRatePath doesn't involve adding forest area to deltaAreas[j][""]
1314
1315 double mortRatePath = px->getPathMortality(ft, "0");
1316 if(mortRatePath > 0){
1317
1318 string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1319 vector <string> siblings = MTHREAD->MD->getForTypeChilds(parentFt);
1320 vector <double> siblingAreas;
1321 for(uint j2=0;j2<siblings.size();j2++){
1322 if(siblings[j2]==ft){
1323 siblingAreas.push_back(0.0);
1324 } else {
1325 //string debug_sibling_ft = siblings[j2];
1326 //int debug_positin = getPos(debug_sibling_ft,fTypes);
1327 double thisSiblingArea = vSum(px->area.at(getPos(siblings[j2],fTypes)));
1328 siblingAreas.push_back(thisSiblingArea);
1329 }
1330 }
1331 double areaAllSiblings = vSum(siblingAreas);
1332 thisYearRegAreas[j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1333 deltaAreas[j][j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1334
1335 if(areaAllSiblings>0.0){ // area of siblings is >0: we attribute the area from the pathogen induced mortality to the siblings proportionally to area..
1336 for(uint j2=0;j2<siblings.size();j2++){
1337// int debug1 = getPos(siblings[j2],fTypes);
1338// double debug2= harvestedAreaForThisFT;
1339// double debug3 = 1.0-mr;
1340// double debug4 = mortRatePath;
1341// double debug5 = siblingAreas[j2];
1342// double debug6 = areaAllSiblings;
1343// double debug7 = harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1344 thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1345 deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1346 }
1347 } else if (siblings.size()>1) { // area of all siblings is 0, we just give them the mortality area in equal parts..
1348 for(uint j2=0;j2<siblings.size();j2++){
1349 if (siblings[j2] != ft){
1350 thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1351 deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1352 }
1353 }
1354 }
1355 } else { // mortRatePath == 0
1356 thisYearRegAreas[j] += harvestedAreaForThisFT*(1.0-mr);
1357 deltaAreas[j][j] += harvestedAreaForThisFT*(1.0-mr);
1358 }
1359
1360 // Allocating non-managed quota of new forest area to ft proportionally to the current area share by ft
1361 double newAreaThisFt = vSum(px->area) ? fArea_incr*(1-mr)*vSum(px->area.at(j))/vSum(px->area): 0.0;
1362 thisYearRegAreas[j] += newAreaThisFt;
1363 deltaAreas[fTypes.size()][j] += newAreaThisFt;
1364
1365 if(! (thisYearRegAreas[j] >= 0.0) ){
1366 msgOut(MSG_ERROR,"thisYearRegAreas[j] is not non negative (j: "+i2s(j)+", thisYearRegAreas[j]: "+i2s( thisYearRegAreas[j])+").");
1367 }
1368 //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr);
1369 }
1370 //}
1371 } // end for each forest type
1372
1373 // adding regeneration area to the fist (00) diameter class
1374 for(uint j=0;j<fTypes.size();j++){
1375 px->area.at(j).at(0) += thisYearRegAreas.at(j);
1376 }
1377
1378 #ifdef QT_DEBUG
1379 double totalRegArea = vSum(thisYearRegAreas);
1380 if (! (totalRegArea==0.0 && totalHarvestedArea==0.0)){
1381 double ratio = totalRegArea / totalHarvestedArea ;
1382 if(rescaleFrequencies && (ratio < 0.99999999999 || ratio > 1.00000000001) && forestAreaChangeMethod == "fixed") {
1383 msgOut(MSG_CRITICAL_ERROR, "Sum of regeneration areas not equal to sum of harvested area in runManagementModel()!");
1384 }
1385 }
1386 #endif
1387
1388 px->regArea.insert(pair <int, vector<double> > (MTHREAD->SCD->getYear(), thisYearRegAreas));
1389 px->deltaArea = deltaAreas;
1390
1391 } // end of each pixel
1392 if (-fArea_diff > regHArea){
1393 msgOut(MSG_WARNING,"In region "+ i2s(regId) + " the exogenous area decrement ("+ d2s(-fArea_diff) +" ha) is higger than the harvesting ("+ d2s(regHArea) +" ha). Ratio forced to 1.");
1394 }
1395
1396 for(uint j=0;j<fTypes.size();j++){
1397 double debug = polBal_fiSub[j]/1000000;
1398 sfd((polBal_fiSub[j]/1000000.0),"polBal_fiSub",regId,fTypes[j],"",DATA_NOW,true); //M€
1399
1400 }
1401 } // end of each region
1402}
1403
1404void
1406 msgOut(MSG_INFO, "Cashing initial model settings..");
1407 MD = MTHREAD->MD;
1408 firstYear = MD->getIntSetting("initialYear");
1410 thirdYear = firstYear+2;
1411 WL2 = MD->getIntSetting("worldCodeLev2");
1412 regIds2 = MD->getRegionIds(2);
1413 priProducts = MD->getStringVectorSetting("priProducts");
1414 secProducts = MD->getStringVectorSetting("secProducts");
1416 allProducts.insert( allProducts.end(), secProducts.begin(), secProducts.end() );
1417 dClasses = MD->getStringVectorSetting("dClasses");
1418 pDClasses; // production diameter classes: exclude the fist diameter class below 15 cm
1419 pDClasses.insert(pDClasses.end(), dClasses.begin()+1, dClasses.end() );
1421 l2r = MD->getRegionIds();
1422}
1423
1424void
1426 // Settings needs explicitly DATA_NOW to have a temporal dymension..
1427 msgOut(MSG_INFO, "Cashing model settings that may change every year..");
1428 regType = MTHREAD->MD->getStringSetting("regType",DATA_NOW); // how the regeneration should be computed (exogenous, from hr, from allocation choises)
1429 natRegAllocation = MTHREAD->MD->getStringSetting("natRegAllocation",DATA_NOW); // how to allocate natural regeneration
1430 rescaleFrequencies = MD->getBoolSetting("rescaleFrequencies",DATA_NOW);
1431 oldVol2AreaMethod = MD->getBoolSetting("oldVol2AreaMethod",DATA_NOW);
1432 //mr = MD->getDoubleSetting("mr");
1433 forestAreaChangeMethod = MTHREAD->MD->getStringSetting("forestAreaChangeMethod",DATA_NOW);
1434 ir = MD->getDoubleSetting("ir",DATA_NOW);
1435}
1436
1437void
1439 msgOut(MSG_INFO, "Starting initializing pixel-level values");
1440
1441 // pxVol = regVol * pxArea/regForArea
1442 // this function can be done only at the beginning of the model, as it assume that the distribution of volumes by diameter class in the pixels within a certain region is homogeneous, but as the model progress along the time dimension this is no longer true.
1443 if(!MD->getBoolSetting("usePixelData")) return;
1444 for(uint i=0;i<regIds2.size();i++){
1445 ModelRegion* reg = MD->getRegion(regIds2[i]);
1446 vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1447 for (uint j=0;j<rpx.size();j++){
1448 //int debugPx = rpx[j]->getID();
1449 //int debug2 = debugPx;
1450 rpx[j]->vol.clear(); // not actually necessary
1451 for(uint y=0;y<fTypes.size();y++){
1452 vector <double> vol_byu;
1453 double regForArea = reg->getValue("forArea_"+fTypes[y]);
1454 for (uint z=0;z<dClasses.size();z++){
1455 double regVol;
1456 regVol = z ? gfd("vol",regIds2[i],fTypes[y],dClasses[z],firstYear) : 0 ; // if z=0-> regVol= gfd(), otherwise regVol=0;
1457 double pxArea = rpx[j]->getDoubleValue("forArea_"+fTypes[y], true); // bug solved 20121109. get zero for not data
1458 if (pxArea<0.0){
1459 msgOut(MSG_CRITICAL_ERROR,"Error in initializePixelVolumes, negative pxArea!");
1460 }
1461 double pxVol = regForArea ? regVol * pxArea/regForArea: 0; // if we introduce new forest types without initial area we must avoid a 0/0 division
1462 //rpx[j]->changeValue(pxVol,"vol",fTypes[y],dClasses[z],firstYear);
1463 vol_byu.push_back(pxVol);
1464 }
1465 rpx[j]->vol.push_back(vol_byu);
1466 }
1467 }
1468 }
1470}
1471
1472/**
1473 * @brief ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
1474 *
1475 * This function apply to the pixel a multiplier of time of passage that is inversely proportional to the volumes of that forest type present in the pixel.
1476 * The idea is that in the spots where we observe more of a given forest type are probabily the most suited ones to it.
1477 *
1478 * The overall multipliers **of time of passage** (that is, the one returned by Pixel::getMultiplier("tp_multiplier") ) will then be the product of this multiplier that account for spatial heterogeneity and of an eventual exogenous
1479 * multiplier that accounts for different scenarios among the spatio-temporal dimensions.
1480 *
1481 * Given that (forest type index omitted):
1482 * - \f$V_{p}\f$ = volume of a given ft in each pixel (p)
1483 * - \f$\bar{g}\f$ and \f$\sigma_{g}\f$ = regional average and standard deviation of the growth rate
1484 * - \f$m_{p}\f$ = multiplier of time of passage
1485 *
1486 * This multiplier is computed as:
1487 * - \f$ v_{p} = max(V) - V_{p}~~ \f$ A diff from the max volume is computed in each pixel
1488 * - \f$ vr_{p} = v_{p} * \bar{g}/\bar{v}~~ \f$ The volume diff is rescaled to match the regional growth rate
1489 * - \f$ vrd_{p} = vr_{p} - \bar{vr}~~ \f$ Deviation of the rescaled volumes are computed
1490 * - \f$ vrdr_{p} = vrd_{p} * \sigma_{g}/\sigma_{vr}~~ \f$ The deviations are then rescaled to match the standard deviations of the regional growth rate
1491 * - \f$ m_{p} = (vrdr_{p} + \bar{vr}) / \bar{g}~~ \f$ The multiplier is computed from the ratio of the average rescaled volumes plus rescaled deviation over the average growth rate.
1492 *
1493 * And it has the following properties:
1494 * - \f$\bar{m} = 1\f$
1495 * - \f$\sigma_{m} = cv_{g}\f$
1496 * - \f$m_{p} = V_{p}*\alpha+\beta\f$
1497 * - \f$m_{\bar{V}} = 1\f$
1498 *
1499 * For spreadsheet "proof" see the file computation_of_growth_multipliers_from_know_avg_sd_and_proportional_to_share_of_area_in_each_pixel.ods
1500 */
1501void
1503
1504 if(!MTHREAD->MD->getBoolSetting("useSpatialVarPropToVol")){return;}
1505 for(uint r=0;r<regIds2.size();r++){
1506 int rId = regIds2[r];
1507 ModelRegion* reg = MD->getRegion(regIds2[r]);
1508 vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[r]);
1509 for(uint f=0;f<fTypes.size();f++){
1510 string ft = fTypes[f];
1511 double agr = gfd("agr",regIds2[r],ft,"");
1512 double sStDev = gfd("sStDev",regIds2[r],ft,"");
1513 vector<double> vols;
1514 vector<double> diffVols;
1515 vector<double> diffVols_rescaled;
1516 double diffVols_rescaled_deviation;
1517 double diffVols_rescaled_deviation_rescaled;
1518 double final_value;
1519 double multiplier;
1520 vector<double> multipliers; // for tests
1521
1522 double vol_max, rescale_ratio_avg, rescale_ratio_sd;
1523 double diffVols_avg, diffVols_rescaled_avg;
1524 double diffVols_rescaled_sd;
1525
1526 for (uint p=0;p<rpx.size();p++){
1527 Pixel* px = rpx[p];
1528 vols.push_back(vSum(px->vol[f]));
1529 } // end for each pixel
1530 vol_max=getMax(vols);
1531
1532 for(uint p=0;p<vols.size();p++){
1533 diffVols.push_back(vol_max-vols[p]);
1534 }
1535
1536 diffVols_avg = getAvg(diffVols);
1537 rescale_ratio_avg = (diffVols_avg != 0.0) ? agr/diffVols_avg : 1.0;
1538 for(uint p=0;p<diffVols.size();p++){
1539 diffVols_rescaled.push_back(diffVols[p]*rescale_ratio_avg);
1540 }
1541 diffVols_rescaled_avg = getAvg(diffVols_rescaled);
1542 diffVols_rescaled_sd = getSd(diffVols_rescaled,false);
1543
1544 rescale_ratio_sd = (diffVols_rescaled_sd != 0.0) ? sStDev/diffVols_rescaled_sd : 1.0;
1545 for(uint p=0;p<diffVols_rescaled.size();p++){
1546 diffVols_rescaled_deviation = diffVols_rescaled[p] - diffVols_rescaled_avg;
1547 diffVols_rescaled_deviation_rescaled = diffVols_rescaled_deviation * rescale_ratio_sd;
1548 final_value = diffVols_rescaled_avg + diffVols_rescaled_deviation_rescaled;
1549 multiplier = (agr != 0.0) ? min(1.6, max(0.4,final_value/agr)) : 1.0; //20151130: added bounds for extreme cases. Same bonds as in Gis::applySpatialStochasticValues()
1550 // multiplier = 1.0;
1551
1552 Pixel* px = rpx[p];
1553 px->setSpModifier(multiplier,f);
1554 multipliers.push_back(multiplier);
1555 }
1556
1557 #ifdef QT_DEBUG
1558 // Check relaxed as we introduced bounds that may change slighly the avg and sd...
1559 double avgMultipliers = getAvg(multipliers);
1560 double sdMultipliers = getSd(multipliers,false);
1561 if ( avgMultipliers < 0.9 || avgMultipliers > 1.1){
1562 msgOut(MSG_CRITICAL_ERROR, "The average of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not 1!");
1563 }
1564 if ( ( sdMultipliers - (sStDev/agr) ) < -0.5 || ( sdMultipliers - (sStDev/agr) ) > 0.5 ){
1565 double cv = sStDev/agr;
1566 msgOut(MSG_CRITICAL_ERROR, "The sd of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not equal to the spatial cv for the region!");
1567 }
1568 #endif
1569 } // end for each ft
1570 } // end for each region
1571}
1572
1573
1574
1575void
1577
1578 ///< call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters()
1580
1581 for(uint i=0;i<regIds2.size();i++){
1582 vector<double> deathBiomass;
1583 for(uint j=0;j<fTypes.size();j++){
1584 double deathBiomass_ft = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW)+gfd("vMortAdd",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
1585 deathBiomass.push_back(deathBiomass_ft);
1586 }
1588 vector<double>qProducts;
1589 for(int p=0;p<priProducts.size();p++){
1590 // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
1591 double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
1592 qProducts.push_back(int_exports);
1593 }
1594 for(int p=0;p<secProducts.size();p++){
1595 // for the tranformed product we skip those that are imported, hence derived from other forest systems
1596 double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
1597 qProducts.push_back(consumption);
1598 }
1600
1601 }
1602}
1603
1604void
1606 int currentYear = MTHREAD->SCD->getYear();
1607 for(int y=currentYear;y>currentYear-30;y--){
1608 for(uint i=0;i<regIds2.size();i++){
1609 for(uint j=0;j<fTypes.size();j++){
1610 for (uint u=0;u<dClasses.size();u++){
1611 iisskey key(y,regIds2[i],fTypes[j],dClasses[u]);
1613 }
1614 }
1615 }
1616 }
1617}
1618
1619/**
1620 * @brief ModelCoreSpatial::initializePixelArea
1621 *
1622 * This function compute the initial area by ft and dc. It requires vHa computed in computeCumulativeData, this is why it is
1623 * separated form the other initialisedPixelValues().
1624 * As the sum of area computed using vHa may differ from the one memorised in forArea_* layer, all values are scaled to match
1625 * it before being memorised.
1626 * Also assign area = area_l
1627 */
1628
1629void
1631 msgOut(MSG_INFO, "Starting initializing pixel-level area");
1632 if(!MD->getBoolSetting("usePixelData")) return;
1633 for(uint i=0;i<regIds2.size();i++){
1634 ModelRegion* reg = MD->getRegion(regIds2[i]);
1635 vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1636 for (uint p=0;p<rpx.size();p++){
1637 Pixel* px = rpx[p];
1638 double pxid= px->getID();
1639 for(uint j=0;j<fTypes.size();j++){
1640 string ft = fTypes[j];
1641 vector <double> tempAreas;
1642 vector <double> areasByFt;
1643 double pxArea = px->getDoubleValue("forArea_"+ft,true)/10000.0; //ha
1644 for (uint u=0;u<dClasses.size();u++){
1645 if(u==0){
1646 double regionArea = reg->getValue("forArea_"+ft,OP_SUM)/10000.0; //ha
1647 double regRegVolumes = gfd("vReg",regIds2[i],ft,""); // regional regeneration volumes.. ugly name !!
1648 double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
1649 double tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class
1650 double entryVolHa = gfd("entryVolHa",regIds2[i],ft,"");
1651 double tempArea = (newVReg*1000000.0/entryVolHa)*tp_u0;
1652 tempAreas.push_back(tempArea);
1653 } else {
1654 string dc = dClasses[u];
1655 double dcVol = px->vol_l.at(j).at(u)*1000000.0; // m^3
1656 double dcVHa = px->vHa.at(j).at(u); // m^3/ha
1657 #ifdef QT_DEBUG
1658 if(dcVol < 0.0 || dcVHa < 0.0){
1659 msgOut(MSG_CRITICAL_ERROR, "Negative volumes or density in initializePixelArea");
1660 }
1661 #endif
1662 double tempArea = dcVHa?dcVol/dcVHa:0;
1663 tempAreas.push_back(tempArea);
1664 }
1665
1666 } // end dc
1667 double sumTempArea = vSum(tempAreas);
1668 // double sharedc0 = 5.0/90.0; // an arbitrary share of total area allocated to first diameter class
1669 //tempAreas.at(0) = sumTempArea * sharedc0;
1670 //sumTempArea = vSum(tempAreas);
1671 double normCoef = sumTempArea?pxArea/ sumTempArea:0;
1672 //cout << i << '\t' << pxid << '\t' << ft << '\t' << normCoef << endl;
1673 #ifdef QT_DEBUG
1674 if(normCoef < 0.0){
1675 msgOut(MSG_CRITICAL_ERROR, "Negative normCoef in initializePixelArea");
1676 }
1677 #endif
1678 for (uint u=0;u<dClasses.size();u++){
1679 areasByFt.push_back(tempAreas.at(u)*normCoef); //manca la costruzione originale del vettore
1680 }
1681 #ifdef QT_DEBUG
1682 if (pxArea != 0.0){
1683 double ratio = vSum(areasByFt)/ pxArea; // vSum(areasByFt) should be equal to pxArea
1684 if(ratio < 0.99999999999 || ratio > 1.00000000001) {
1685 msgOut(MSG_CRITICAL_ERROR, "pxArea is not equal to vSum(areasByFt) in initializePixelArea");
1686 }
1687 }
1688 #endif
1689 px->area_l.push_back(areasByFt);
1690 /// \todo here I have finally also area_ft_dc_px and I can implement the new one I am in 2006
1691 } // end ft
1692 px->area = px->area_l; //Assigning initial value of area to the area of the old year
1693 } // end px
1694 } // end region
1696 /// \todo: also update area_l
1697}
1698
1699void
1701
1702 msgOut(MSG_INFO, "Starting computing some cumulative values..");
1703 int thisYear = MTHREAD->SCD->getYear();
1704
1705// double sumCumTP=0;
1706// double sumVHa = 0;
1707// double count = 0;
1708// double avg_sumCumTp;
1709// double avg_sumVHa;
1710
1711 for(uint r2= 0; r2<regIds2.size();r2++){
1712 int regId = regIds2[r2];
1713 regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1714
1715 for (uint p=0;p<regPx.size();p++){
1716 Pixel* px = regPx[p];
1717 px->cumTp.clear();
1718 px->cumTp_exp.clear();
1719 px->vHa_exp.clear();
1720 px->vHa.clear();
1721 px->cumAlive.clear();
1722 px->cumAlive_exp.clear();
1723 double expType = px->expType;
1724
1725 for(uint j=0;j<fTypes.size();j++){
1726 string ft = fTypes[j];
1727
1728 double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1729 double tp_multiplier_t0 = px->getMultiplier("tp_multiplier",ft,firstYear);
1730 double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1731 double mortCoef_multiplier_t0 = px->getMultiplier("mortCoef_multiplier",ft,firstYear);
1732 double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1733 double betaCoef_multiplier_t0 = px->getMultiplier("betaCoef_multiplier",ft,firstYear);
1734 double pathMort_now, pathMort_t0;
1735
1736 // calculating the cumulative time of passage and the (cumulativelly generated) vHa for each diameter class (depending on forest owners diam growth expectations)
1737 //loop(u$(ord(u)=1),
1738 // cumTp(u,i,lambda,essence) = tp_u1(i,essence,lambda);
1739 //);
1740 //loop(u$(ord(u)>1),
1741 // cumTp(u,i,lambda,essence) = cumTp(u-1,i,lambda,essence)+tp(u-1,i,lambda,essence);
1742 //);
1743 ////ceil(x) DNLP returns the smallest integer number greater than or equal to x
1744 //loop( (u,i,lambda,essence),
1745 // cumTp(u,i,lambda,essence) = ceil(cumTp(u,i,lambda,essence));
1746 //);
1747 vector <double> cumTp_temp; // cumulative time of passage to REACH a diameter class (tp is to LEAVE to the next one)
1748 vector <double> vHa_temp; // volume at hectar by each diameter class [m^3/ha]
1749 vector <double> cumAlive_temp; // cumulated alive rate to reach a given diameter class
1750 vector <double> cumTp_exp_temp; // expected version of cumTp_temp
1751 vector <double> vHa_exp_temp; // expected version of vHa_temp
1752 vector <double> cumAlive_exp_temp; // "expected" version of cumMort
1753
1754 MD->setErrorLevel(MSG_NO_MSG); // as otherwise on 2007 otherwise sfd() will complain that is filling multiple years (2006 and 2007)
1755 for (uint u=0; u<dClasses.size(); u++){
1756 string dc = dClasses[u];
1757 double cumTp_u, cumTp_u_exp, cumTp_u_noExp, cumTp_u_fullExp;
1758 double tp, tp_exp, tp_noExp, tp_fullExp;
1759 double vHa_u, vHa_u_exp, vHa_u_noExp, vHa_u_fullExp, beta, beta_exp, beta_noExp, beta_fullExp, mort, mort_exp, mort_noExp, mort_fullExp;
1760 double cumAlive_u, cumAlive_exp_u;
1761 pathMort_now = px->getPathMortality(ft,dc,DATA_NOW);
1762 pathMort_t0 = px->getPathMortality(ft,dc,firstYear);
1763 double additionalMortality_multiplier_reg_ft_dc = gfd("additionalMortality_multiplier",regId,ft,dc, DATA_NOW);
1764 double additionalMortality_multiplier_reg_ft_dc_t0 = gfd("additionalMortality_multiplier",regId,ft,dc, firstYear);
1765 double additionalMortality_now = px->getSTData("additionalMortality", ft, DATA_NOW, dc, 0.0)*additionalMortality_multiplier_reg_ft_dc;
1766 double additionalMortality_t0 = px->getSTData("additionalMortality", ft, firstYear, dc, 0.0)*additionalMortality_multiplier_reg_ft_dc_t0;
1767
1768 // only cumTp is depending for the expectations, as it is what it is used by owner to calculate return of investments.
1769 // the tp, beta and mort coefficients instead are the "real" ones as predicted by scientist for that specific time
1770
1771 if(u==0) {
1772 // first diameter class.. expected and real values are the same (0)
1773 cumTp_u = 0.;
1774 vHa_u = 0.;
1775 cumAlive_u = 1.;
1776 cumTp_temp.push_back(cumTp_u);
1777 vHa_temp.push_back(vHa_u);
1778 cumTp_exp_temp.push_back(cumTp_u);
1779 vHa_exp_temp.push_back(vHa_u);
1780 cumAlive_temp.push_back(cumAlive_u);
1781 cumAlive_exp_temp.push_back(cumAlive_u);
1782 } else {
1783 // other diameter classes.. first dealing with real values and then with expected ones..
1784 // real values..
1785 // real values..
1786 tp = gfd("tp",regId,ft,dClasses[u-1],thisYear)*tp_multiplier_now;
1787 cumTp_u = cumTp_temp[u-1] + tp;
1788 if (u==1){
1789 /**
1790 Note on the effect of mortality modifiers on the entryVolHa.
1791 Unfortunatly for how it is defined the mortality multiplier (the ratio with the new mortality rate over the old one) we can't
1792 compute a entryVolHa based on it. It is NOT infact just like: vHa_adjusted = vHa_orig / mort_multiplier.
1793 The effect of mortality on the vHa of the first diameter class is unknow, and so we can't compute the effect of a relative
1794 increase.
1795 */
1796 vHa_u = gfd("entryVolHa",regId,ft,"",thisYear);
1797 mort = 0.; // not info about mortality first diameter class ("00")
1798 } else {
1799 mort = 1-pow(1-min(gfd("mortCoef",regId,ft,dClasses[u-1],thisYear)*mortCoef_multiplier_now+pathMort_now+additionalMortality_now,1.0),tp); // mortality of the previous diameter class
1800 beta = gfd("betaCoef",regId,ft,dc, thisYear)*betaCoef_multiplier_now;
1801 vHa_u = vHa_temp[u-1]*beta*(1-mort);
1802 }
1803 cumAlive_u = max(0.,cumAlive_temp[u-1]*(1-mort));
1804 cumAlive_temp.push_back(cumAlive_u);
1805 cumTp_temp.push_back(cumTp_u);
1806 vHa_temp.push_back(vHa_u);
1807 // expected values..
1808 /**
1809 param expType Specify how the forest owners (those that make the investments) behave will be the time of passage in the future in order to calculate the cumulative time of passage in turn used to discount future revenues.
1810 Will forest owners behave adaptively believing the time of passage between diameter classes will be like the observed one at time they make decision (0) or they will have full expectations believing forecasts (1) or something in the middle ?
1811 For compatibility with the GAMS code, a -1 value means using initial simulation tp values (fixed cumTp)."
1812 */
1813 if (expType == -1){
1814 tp_exp = gfd("tp",regId,ft,dClasses[u-1],firstYear)*tp_multiplier_t0;
1815 //tp = px->tp.at(u); no. not possible, tp stored at pixel level is the current year one
1816 cumTp_u_exp = cumTp_exp_temp[u-1]+tp_exp;
1817 cumTp_exp_temp.push_back(cumTp_u_exp);
1818 if(u==1) {
1819 vHa_u_exp = gfd("entryVolHa",regId,ft,"",firstYear);
1820 mort_exp = 0.; // not info about mortality first diameter class ("00")
1821 } else {
1822 mort_exp = 1-pow(1-min(gfd("mortCoef",regId,ft,dClasses[u-1], firstYear)*mortCoef_multiplier_t0+pathMort_t0+additionalMortality_t0,1.0),tp_exp); // mortality rate of previous diameter class
1823 beta_exp = gfd("betaCoef",regId,ft,dc, firstYear)*betaCoef_multiplier_t0;
1824 vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp);
1825 }
1826 } else {
1827 double tp_multiplier_dynamic = px->getMultiplier("tp_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1828 tp_noExp = gfd("tp",regId,ft,dClasses[u-1])*tp_multiplier_now;
1829 cumTp_u_noExp = cumTp_exp_temp[u-1]+tp_noExp;
1830 tp_fullExp = gfd("tp",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*tp_multiplier_dynamic ; // time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1831 cumTp_u_fullExp = cumTp_exp_temp[u-1]+tp_fullExp ; // it adds to the time of passage to reach the previous diameter class the time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1832 cumTp_u_exp = cumTp_u_fullExp*expType+cumTp_u_noExp*(1-expType); // 20121108: it's math the same as cumTp_exp_temp[u-1] + tp
1833 cumTp_exp_temp.push_back(cumTp_u_exp);
1834 if(u==1) {
1835 vHa_u_noExp = gfd("entryVolHa",regId,ft,"",DATA_NOW);
1836 vHa_u_fullExp = gfd("entryVolHa",regId,ft,"",thisYear+ceil(cumTp_u));
1837 vHa_u_exp = vHa_u_fullExp*expType+vHa_u_noExp*(1-expType);
1838 mort_exp = 0.; // not info about mortality first diameter class ("00")
1839 } else {
1840 mort_noExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1], DATA_NOW)*mortCoef_multiplier_now+pathMort_now+additionalMortality_now), tp_noExp); // mortCoef is a yearly value. Mort coeff between class is 1-(1-mortCoeff)^tp
1841 double mortCoef_multiplier_dynamic = px->getMultiplier("mortCoef_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1842 double pathMort_dynamic = px->getPathMortality(ft,dc,thisYear+ceil(cumTp_exp_temp[u-1]));
1843 double additionalMortality_multiplier_reg_ft_dc_dynamic = gfd("additionalMortality_multiplier",regId,ft,dc, thisYear+ceil(cumTp_exp_temp[u-1]));
1844 double additionalMortality_dynamic = px->getSTData("additionalMortality", ft, thisYear+ceil(cumTp_exp_temp[u-1]), dc, 0.0)*additionalMortality_multiplier_reg_ft_dc_dynamic;
1845 mort_fullExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*mortCoef_multiplier_dynamic+pathMort_dynamic+additionalMortality_dynamic),tp_fullExp); // mortality of the previous diameter class
1846 //double debug1 = gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]));
1847 //double debug2 = debug1*mortCoef_multiplier_dynamic+pathMort_dynamic;
1848 //double debug3 = min(1.0,debug2);
1849 //double debug4 = 1.0-debug3;
1850 //double debug5 = pow(debug4,tp_fullExp);
1851 //double debug6 = 1.0-debug5;
1852
1853
1854 beta_noExp = gfd("betaCoef",regId,ft,dc, DATA_NOW)*betaCoef_multiplier_now;
1855 double betaCoef_multiplier_dynamic = px->getMultiplier("betaCoef_multiplier",ft,thisYear+ceil(cumTp_u));
1856 beta_fullExp = gfd("betaCoef",regId,ft,dc, thisYear+ceil(cumTp_u))*betaCoef_multiplier_dynamic;
1857 mort_exp = mort_fullExp*expType+mort_noExp*(1-expType);
1858 beta_exp = beta_fullExp*expType+beta_noExp*(1-expType);
1859 vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp); // BUG !!! mort is yearly value, not between diameter class. SOLVED 20121108
1860 }
1861 }
1862 vHa_exp_temp.push_back(vHa_u_exp);
1863 cumAlive_exp_u = max(0.,cumAlive_exp_temp[u-1]*(1-mort_exp));
1864 cumAlive_exp_temp.push_back(cumAlive_exp_u);
1865
1866 //cout << "********" << endl;
1867 //cout << "dc;mort;cumAlive;cumAlive_exp "<< endl ;
1868 //cout << dClasses[u] << ";"<< mort << ";" << cumAlive_u << ";" << cumAlive_exp_u << endl;
1869
1870 }
1871 // debug stuff on vHa
1872 //double vHa_new = gfd("vHa",regId,ft,dc,DATA_NOW);
1873 //double hv2fa_old = gfd("hv2fa",regId,ft,dc,DATA_NOW);
1874 //cout << "Reg|Ft|dc|vHa (new)|1/hv2fa (old): " << regId << " | " << ft;
1875 //cout << " | " << dc << " | " << vHa_new << " | " << 1/hv2fa_old << endl;
1876
1877 } // end of each diam
1878 //double pixID = px->getID();
1879 //cout << thisYear << ";"<< regIds2[r2] << ";" << pixID << ";" << ft << ";" << cumTp_exp_temp[3] << ";" << vHa_exp_temp[3] << endl;
1880 px->cumTp.push_back(cumTp_temp);
1881 px->vHa.push_back(vHa_temp);
1882 px->cumAlive.push_back(cumAlive_temp);
1883 px->cumTp_exp.push_back(cumTp_exp_temp);
1884 px->vHa_exp.push_back(vHa_exp_temp);
1885 px->cumAlive_exp.push_back(cumAlive_exp_temp);
1886
1887 //sumCumTP += cumTp_exp_temp[3];
1888 //sumVHa += vHa_exp_temp[3];
1889 //count ++;
1890
1891
1892 } // end of each ft
1893 //double debug = 0.0;
1894 } // end of each pixel
1895 } // end of each region
1897 //avg_sumCumTp = sumCumTP/ count;
1898 //avg_sumVHa = sumVHa / count;
1899 //cout << "Avg sumCumTp_35 and sumVha_35: " << avg_sumCumTp << " and " << avg_sumVHa << " (" << count << ")" << endl;
1900 //exit(0);
1901}
1902
1903void
1905 msgOut(MSG_INFO, "Starting resetting pixel level values");
1906 for(uint r2= 0; r2<regIds2.size();r2++){
1907 int regId = regIds2[r2];
1908 regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1909 for (uint p=0;p<regPx.size();p++){
1910 Pixel* px = regPx[p];
1911 px->swap(VAR_VOL); // vol_l = vol
1912 px->swap(VAR_AREA); // area_l = area
1913 // 20121108 BUG! Solved, used empty (just return true if the vector is empty) instead of clear (it actually clears the vector)
1914 px->vol.clear(); // by ft,dc
1915 px->area = px->area_l; // ATTENCTION, DIFFERENT FROM THE OTHERS. Here it is not cleared, it is assigned the previous year as default
1916 /*px->area.clear(); // by ft,dc*/
1917 px->hArea.clear(); // by ft, dc
1918 px->deltaArea.clear();
1919 //px->regArea.clear(); // by year, ft NO, this one is a map, it doesn't need to be changed
1920 px->hVol.clear(); // by ft, dc
1921 px->hVol_byPrd.clear(); // by ft, dc, pp
1922 //px->in.clear(); // by pp
1923 //px->hr.clear(); // by pp
1924 px->vReg.clear(); // by ft
1925 px->expectedReturns.clear(); // by ft
1926 px->beta.clear();
1927 px->mort.clear();
1928 px->addMort.clear(); // fire mortality
1929 px->tp.clear();
1930 px->cumTp.clear();
1931 px->vHa.clear();
1932 px->cumTp_exp.clear();
1933 px->vHa_exp.clear();
1934 px->cumAlive.clear();
1935 px->cumAlive_exp.clear();
1936 px->vMort.clear();
1937 px->vMortAdd.clear(); // fire mortality
1938 //std::fill(rpx[j]->vMort.begin(), rpx[j]->vMort.end(), 0.0);
1939
1940 }
1941 }
1942}
1943
1944void
1946
1947 msgOut(MSG_INFO, "Starting cashing on pixel spatial-level exogenous data");
1948 bool applyAvalCoef = MTHREAD->MD->getBoolSetting("applyAvalCoef");
1949
1950 for(uint r2= 0; r2<regIds2.size();r2++){
1951 int regId = regIds2[r2];
1952 regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1953 for (uint p=0;p<regPx.size();p++){
1954 Pixel* px = regPx[p];
1955 px->tp.clear();
1956 px->beta.clear();
1957 px->mort.clear();
1958 px->addMort.clear(); // fire mortality
1959 px->avalCoef.clear();
1960
1961 for(uint j=0;j<fTypes.size();j++){
1962 string ft = fTypes[j];
1963 vector <double> tp_byu;
1964 vector <double> beta_byu;
1965 vector <double> mort_byu;
1966 vector <double> addMort_byu; // fire mortality
1967
1968 double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1969 double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1970
1971 double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1972 double avalCoef_ft = applyAvalCoef?px->getSTData("avalCoef",ft,DATA_NOW):1.0;
1973
1974 if(avalCoef_ft == MTHREAD->MD->getDoubleSetting("noValue")){
1975 msgOut(MSG_CRITICAL_ERROR,"applyAvalCoef has benn asked for, but I can't find the avalCoef in the data!");
1976 }
1977
1978 for (uint u=0; u<dClasses.size(); u++){
1979 string dc = dClasses[u];
1980 double pathMortality = px->getPathMortality(ft,dc,DATA_NOW);
1981 double additionalMortality_multiplier_reg_ft_dc = gfd("additionalMortality_multiplier",regId,ft,dc, DATA_NOW);
1982 double additionalMortality = px->getSTData("additionalMortality", ft, DATA_NOW, dc, 0.0)*additionalMortality_multiplier_reg_ft_dc; // these are volumes not rates !
1983 double tp, beta_real, mort_real;
1984 if (u==0){
1985 // tp of first diameter class not making it change across the time dimension, otherwise problems in getting the rigth past
1986 // regenerations. BUT good, px->tp.at(0) is used only to pick up the right regeneration, so the remaining of the model
1987 // uses the getMultiplier version and cumTp consider the dynamic effects also in the first dc.
1988 tp = gfd("tp",regId,ft,dClasses[u],firstYear)*px->getMultiplier("tp_multiplier",ft,firstYear); // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1989 } else {
1990 tp = gfd("tp",regId,ft,dClasses[u],DATA_NOW)*tp_multiplier_now; // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1991 }
1992 beta_real = u?gfd("betaCoef",regId,ft,dClasses[u],DATA_NOW)*betaCoef_multiplier_now:0;
1993 mort_real = min(u?gfd("mortCoef",regId,ft,dClasses[u],DATA_NOW)*mortCoef_multiplier_now+pathMortality :0,1.0); // 20230428 bugfix Antonello, removed additionalMortality //Antonello, bug fixed 20160203: In any case, natural plus pathogen mortality can not be larger than 1!
1994 tp_byu.push_back(tp);
1995 beta_byu.push_back(beta_real);
1996 mort_byu.push_back(mort_real);
1997 addMort_byu.push_back(additionalMortality); // fire mortality
1998 } // end of each dc
1999 px->tp.push_back(tp_byu);
2000 px->beta.push_back(beta_byu);
2001 px->mort.push_back(mort_byu);
2002 px->addMort.push_back(addMort_byu); // fire mortality
2003 px->avalCoef.push_back(avalCoef_ft);
2004 } // end of each ft
2005 } // end of each pixel
2006 } // end of each region
2007}
2008
2009void
2011 msgOut(MSG_INFO, "Starting computing inventary available for this year..");
2012 int nbounds = pow(2,priProducts.size());
2013 vector<vector<int>> concernedPriProductsTotal = MTHREAD->MD->createCombinationsVector(priProducts.size());
2014 int currentYear = MTHREAD->SCD->getYear();
2015
2016 for(uint i=0;i<regIds2.size();i++){
2017 int r2 = regIds2[i];
2018 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2019 //Gis* GIS = MTHREAD->GIS;
2020 regPx = REG->getMyPixels();
2021 vector <double> in_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
2022 vector <double> in_deathTimber_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
2023 for (uint p=0;p<regPx.size();p++){
2024 Pixel* px = regPx[p];
2025 //int debugPx = px->getID();
2026 //int debug2 = debugPx;
2027 //px->in.clear();
2028 for(uint pp=0;pp<priProducts.size();pp++){
2029 double in = 0;
2030 for(uint ft=0;ft<fTypes.size();ft++){
2031 for(uint dc=0;dc<dClasses.size();dc++){
2032 in += app(priProducts[pp],fTypes[ft],dClasses[dc])*px->vol_l.at(ft).at(dc)*px->avalCoef.at(ft);
2033 }
2034 }
2035 //px->in.push_back(in);
2036 in_reg.at(pp) += in;
2037 } // end of each priProduct
2038 } // end each pixel
2039
2040
2041 for(uint pp=0;pp<priProducts.size();pp++){
2042 vector<string> priProducts_vector;
2043 priProducts_vector.push_back(priProducts[pp]);
2044
2045 double in_deathMortality = MD->getAvailableDeathTimber(priProducts_vector,r2,currentYear-1);
2046 in_deathTimber_reg.at(pp) += in_deathMortality;
2047
2048 // Even if I fixed all the lower bounds to zero in Opt::get_bounds_info still the model
2049 // doesn't solve with no-forest in a region.
2050 // Even with 0.0001 doesn't solve !!
2051 // With 0.001 some scenarios doesn't solve in 2093
2052 // With 0.003 vRegFixed doesn't solve in 2096
2053 // Tried with 0.2 but no changes, so put it back on 0.003
2054 //spd(max(0.001,in_reg.at(pp)),"in",r2,priProducts[pp],DATA_NOW,true);
2055 spd(in_reg.at(pp),"in",r2,priProducts[pp],DATA_NOW,true);
2056 spd(in_deathTimber_reg.at(pp),"in_deathTimber",r2,priProducts[pp],DATA_NOW,true);
2057 #ifdef QT_DEBUG
2058 if (in_reg.at(pp) < -0.0){
2059 msgOut(MSG_CRITICAL_ERROR,"Negative inventory");
2060 }
2061 #endif
2062 }
2063
2064 // ##### Now creating a set of bonds for the optimisation that account of the fact that the same ft,dc can be used for multiple products:
2065
2066 // 20160928: Solved a big bug: for each combination instead of taking the UNION of the various priProduct inventory sets I was taking the sum
2067 // Now both the alive and the death timber are made from the union
2068 // 20150116: As the same (ft,dc) can be used in more than one product knowing -and bounding the supply in the optimisation- each single
2069 // in(pp) is NOT enought.
2070 // We need to bound the supply for each possible combination, that is for 2^(number of prim.pr)
2071 // Here we compute the detailed inventory. TO.DO: Create the pounds in Opt. done
2072 // 20160209: Rewritten and corrected a bug that was not giving enought inv to multiproduct combinations
2073 for (uint i=0; i<nbounds; i++){
2074 vector<int> concernedPriProducts = concernedPriProductsTotal[i];
2075 vector<string> concernedPriProducts_ids = positionsToContent(priProducts,concernedPriProducts);
2076 //double debug = 0.0;
2077 //for(uint z=0;z<concernedPriProducts.size();z++){
2078 // debug += gpd("in",r2,priProducts[concernedPriProducts[z]]); // to.do: this will need to be rewritten checked!
2079 //}
2080 double bound_alive = MD->getAvailableAliveTimber(concernedPriProducts_ids,r2); // From px->vol_l, as in "in"
2081 double bound_deathTimber = MD->getAvailableDeathTimber(concernedPriProducts_ids,r2,currentYear-1); // From deathTimberInventory map
2082 double bound_total = bound_alive + bound_deathTimber;
2083
2084 REG->inResByAnyCombination[i] = bound_total;
2085 //REG->inResByAnyCombination_deathTimber[i] = bound_deathTimber;
2086 } // end for each bond
2087 } // end each region
2088}
2089
2090void
2092 msgOut(MSG_INFO, "Updating map areas..");
2093
2094 if (!oldVol2AreaMethod){
2095 if(!MD->getBoolSetting("usePixelData")) return;
2096 for(uint i=0;i<regIds2.size();i++){
2097 ModelRegion* reg = MD->getRegion(regIds2[i]);
2098 vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
2099 for (uint p=0;p<rpx.size();p++){
2100 Pixel* px = rpx[p];
2101 double pxid= px->getID();
2102 for(uint j=0;j<fTypes.size();j++){
2103 string ft = fTypes[j];
2104 double forArea = vSum(px->area.at(j));
2105 #ifdef QT_DEBUG
2106 if(forArea < 0.0 ){
2107 msgOut(MSG_CRITICAL_ERROR, "Negative forArea in updateMapAreas");
2108 }
2109 #endif
2110 px->changeValue("forArea_"+ft, forArea*10000);
2111 } // end ft
2112 } // end px
2113 } // end region
2114 } else {
2115 int currentYear = MTHREAD->SCD->getYear();
2116 map<int,double> forestArea; // foresta area by each region
2117 pair<int,double > forestAreaPair;
2118 vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2119 vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2120 int nFTypes = fTypes.size();
2121 int nL2Regions = l2Regions.size();
2122 for(int i=0;i<nL2Regions;i++){
2123 int regId = l2Regions[i];
2124 vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2125 for(int j=0;j<nFTypes;j++){
2126 string ft = fTypes[j];
2127 //double regForArea = reg->getValue("forArea_"+ft);
2128 //double harvestedArea = gfd("harvestedArea",regId,ft,DIAM_ALL);
2129 //double regArea = gfd("regArea",regId,ft,DIAM_ALL);
2130 //cout << "Regid/ft/area/harvested/regeneration: " <<regId<<";"<<ft<<";"<<regForArea<<";"<<harvestedArea<<";" <<regArea<<endl;
2131 //double newAreaNet = regArea-harvestedArea;
2132 //double newAreaRatio = newAreaNet / regForArea;
2133 for(uint z=0;z<rpx.size();z++){
2134 Pixel* px = rpx[z];
2135 double oldValue = px->getDoubleValue("forArea_"+ft,true)/10000;
2136 double hArea = vSum(px->hArea.at(j)); //bug 20140205 areas in the model are in ha, in the layer in m^2
2137 double regArea = findMap(px->regArea,currentYear).at(j); //bug 20140205 areas in the model are in ha, in the layer in m^2
2138 //double newValue = oldValue*(1. + newAreaRatio);
2139 double newValue = oldValue-hArea+regArea;
2140 double areaNetOfRegeneration = oldValue-hArea;
2141 #ifdef QT_DEBUG
2142 if (areaNetOfRegeneration<0.0){
2143 msgOut(MSG_CRITICAL_ERROR,"areaNetOfRegeneration negative in updateMapAreas");
2144 }
2145 if (newValue<0.0){
2146 msgOut(MSG_CRITICAL_ERROR,"for area negative in updateMapAreas");
2147 }
2148 #endif
2149 rpx[z]->changeValue("forArea_"+ft, newValue*10000);
2150 }
2151 }
2152 }
2153 }
2154}
2155
2156void
2158
2159vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2160vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2161int nFTypes = fTypes.size();
2162int nL2Regions = l2Regions.size();
2163for(int i=0;i<nL2Regions;i++){
2164 int regId = l2Regions[i];
2165 vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2166 for(int j=0;j<nFTypes;j++){
2167 string ft = fTypes[j];
2168 for(uint z=0;z<rpx.size();z++){
2169 Pixel* px = rpx[z];
2170 double vol = vSum(px->vol.at(j));
2171 double expectedReturns = px->expectedReturns.at(j);
2172 if(MTHREAD->GIS->layerExist("vol_"+ft)){
2173 rpx[z]->changeValue("vol_"+ft, vol);
2174 }
2175 if(MTHREAD->GIS->layerExist("expectedReturns_"+ft)){
2176 rpx[z]->changeValue("expectedReturns_"+ft, expectedReturns);
2177 }
2178 }
2179 }
2180}
2181
2182// update GUI image..
2183for(int j=0;j<nFTypes;j++){
2184 string ft = fTypes[j];
2185 MTHREAD->GIS->updateImage("vol_"+ft);
2186 MTHREAD->GIS->updateImage("expectedReturns_"+ft);
2187}
2188
2189
2190}
2191
2192
2193void
2195
2196 msgOut(MSG_INFO, "Summing data pixels->region..");
2197 //vector <string> outForVariables = MTHREAD->MD->getStringVectorSetting("outForVariables");
2198 int currentYear = MTHREAD->SCD->getYear();
2199
2200 //map<vector<string>,double> hVol_byPrd; // by regid, ft, dc, pp
2201 map<tr1::array<string, 4>,double> hVol_byPrd; // by regid, ft, dc, pp
2202
2203 // OLD CODE TO
2204 for(uint r2= 0; r2<regIds2.size();r2++){
2205 int regId = regIds2[r2];
2206 regPx = MTHREAD->MD->getRegion(regId)->getMyPixels(); // by regId, ft, dc, pp
2207
2208 // vector < vector <vector <double> > >hVol_byPrd; // by ft, dc, pp
2209
2210 for(uint j=0;j<fTypes.size();j++){
2211 string ft = fTypes[j];
2212
2213 double regArea = 0.;
2214 double sumAreaByFt = 0.;
2215 double pxForAreaByFt = 0.;
2216 double vReg = 0.;
2217 double sumSumHProductivity = 0.;
2218 double sumHArea = 0.;
2219
2220 for (uint u=0; u<dClasses.size(); u++){
2221 string dc = dClasses[u];
2222 double vol =0.;
2223 double hV = 0.;
2224 double hArea = 0.;
2225 double vMort = 0.;
2226 double vMortAdd = 0; // fire mortality
2227 double sumHProductivity = 0.; // productivity * area
2228 for (uint pi=0;pi<regPx.size();pi++){
2229 Pixel* px = regPx[pi];
2230 vol += px->vol.at(j).at(u);
2231 hV += px->hVol.at(j).at(u);
2232 hArea += px->hArea.at(j).at(u);
2233 vMort += px->vMort.at(j).at(u);
2234 vMortAdd += px->vMortAdd.at(j).at(u); // fire mortality
2235 sumHProductivity += (px->hProductivity.at(j).at(u) * px->hArea.at(j).at(u));
2236 if(MD->getBoolSetting("outDetailedHv",DATA_NOW)){
2237 for(uint pp=0;pp<priProducts.size();pp++){
2238 string pr = priProducts[pp];
2239 // vector <string> hVKey = {i2s(regId), ft, dc, pr};
2240 tr1::array<string, 4> hVKey = {i2s(regId), ft, dc, pr};
2241 //double debug = px->hVol_byPrd.at(j).at(u).at(pp);
2242 //cout << px->getID() << '\t' << j << '\t' << u << '\t' << pp << '\t' << debug << endl;
2243 incrOrAddMapValue(hVol_byPrd, hVKey, px->hVol_byPrd.at(j).at(u).at(pp));
2244 }
2245 }
2246 }
2247 if(u){
2248 sfd(vol,"vol",regId,ft,dc,DATA_NOW);
2249 sfd(hV,"hV",regId,ft,dc,DATA_NOW,true);
2250 double hProductivityByDc = hArea ? sumHProductivity/hArea : 0.;
2251 sumSumHProductivity += (hProductivityByDc*hArea);
2252 sumHArea += hArea;
2253 sfd(hProductivityByDc,"hProductivityByDc",regId,ft,dc,DATA_NOW,true);
2254 sfd(hArea,"harvestedArea",regId,ft,dc,DATA_NOW, true);
2255 sfd(vMort,"vMort",regId,ft,dc,DATA_NOW,true);
2256 sfd(vMortAdd,"vMortAdd",regId,ft,dc,DATA_NOW,true); // fire mortality
2257 double vol_1 = gfd("vol",regId,ft,dc,currentYear-1);
2258 if(vol_1){
2259 sfd(hV/vol_1,"hr",regId,ft,dc,DATA_NOW, true);
2260 } else {
2261 sfd(0.,"hr",regId,ft,dc,DATA_NOW, true);
2262 }
2263
2264 }
2265 } // end foreach dc
2266 double hProductivity = sumHArea? sumSumHProductivity / sumHArea : 0.;
2267 sfd(hProductivity,"hProductivity",regId,ft,"",DATA_NOW,true);
2268
2269 for (uint p=0;p<regPx.size();p++){
2270 Pixel* px = regPx[p];
2271 vReg += px->vReg.at(j);
2272 regArea += findMap(px->regArea,currentYear).at(j);
2273 pxForAreaByFt = (px->getDoubleValue("forArea_"+ft,true)/10000);
2274
2275 sumAreaByFt += pxForAreaByFt;
2276 //double debug1 = sumAreaByFt;
2277 if(! (sumAreaByFt >= 0.0) ){
2278 msgOut(MSG_CRITICAL_ERROR,"sumAreaByFt is not non negative.");
2279 }
2280 }
2281 sfd(vReg,"vReg",regId,ft,"",DATA_NOW, true);
2282 sfd(regArea,"regArea",regId,ft,"",DATA_NOW, true);
2283 sfd(sumAreaByFt,"forArea",regId,ft,"",DATA_NOW, true);
2284 } // end of for each ft
2285
2286 for (uint p=0;p<regPx.size();p++){
2287 Pixel* px = regPx[p];
2288 double totPxForArea = vSum(px->area);
2289
2290#ifdef QT_DEBUG
2291 double totPxForArea_debug = 0.0;
2292 for(uint j=0;j<fTypes.size();j++){
2293 string ft = fTypes[j];
2294 totPxForArea_debug += (px->getDoubleValue("forArea_"+ft,true)/10000);
2295 }
2296
2297 if ( (totPxForArea - totPxForArea_debug) > 0.0001 || (totPxForArea - totPxForArea_debug) < -0.0001 ){
2298 cout << "*** ERROR: area discrepance in pixel " << px->getID() << " of " << (totPxForArea - totPxForArea_debug) << " ha!" << endl;
2299 msgOut(MSG_CRITICAL_ERROR,"Total forest area in pixel do not coincide if token from layer forArea or (pixel) vector area!");
2300 }
2301#endif
2302 } // end of each pixel
2303
2304
2305 } // end each region
2306 //cout << "done" << endl;
2307 MTHREAD->DO->printDetailedHV(hVol_byPrd);
2308
2309
2310 // Taking care of expected returns here..
2311 // (Changed 25/08/2016 afternoon: expRet{ft,r} are now sum{px}{expRet{ft,px}*fArea_{px}}/fArea{r} and no longer sum{px}{expRet{ft,px}*fArea_{px,ft}}/fArea{r,ft} )
2312 // Also now we report the expReturns by group and by forest, each of which is made only with the best ones within their group
2313
2314 vector<string> parentFtypes = MTHREAD->MD->getForTypeParents();
2315
2316 for(uint r2= 0; r2<regIds2.size();r2++){
2317 int regId = regIds2[r2];
2318 regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2319 double totRegionForArea = 0.;
2320 double totSumExpRet = 0.;
2321 vector <double> totSumExpRet_byFTParent(parentFtypes.size(),0.0);
2322 vector <double> totSumExpRet_byFTypes(fTypes.size(),0.0);
2323
2324 // First computing the sumExpectedReturns..
2325 for (uint p=0;p<regPx.size();p++){
2326 Pixel* px = regPx[p];
2327 //int debug_pxid = px->getID();
2328 double pxForArea = vSum(px->area);
2329 totRegionForArea += pxForArea;
2330 double bestPxExpectedRet = getMax(px->expectedReturnsNotCorrByRa);
2331 for(uint i=0;i<parentFtypes.size();i++){
2332 vector <string> childIds = MTHREAD->MD->getForTypeChilds(parentFtypes[i]);
2333 vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2334 vector<double> pxExpReturnsByChilds(childPos.size(),0.0);
2335 for(uint j=0;j<childPos.size();j++){
2336 double pxExpReturn_singleFt = px->expectedReturns.at(childPos[j]);
2337 // Manual fix to not have the expected returns of ash within the general "broadL" expected returns.
2338 // To do: remove it after we work on the ash project.. I don't like manual fixes !!!
2339 pxExpReturnsByChilds.at(j) = (childIds.at(j) == "ash") ? 0.0 : pxExpReturn_singleFt;
2340 //pxExpReturnsByChilds.at(j) = pxExpReturn_singleFt;
2341 totSumExpRet_byFTypes.at(childPos[j]) += pxExpReturn_singleFt*pxForArea;
2342 } // end of each ft
2343 totSumExpRet_byFTParent[i] += getMax(pxExpReturnsByChilds)*pxForArea;
2344 } // end for each partentFt
2345 totSumExpRet += bestPxExpectedRet * pxForArea;
2346 } // end for each px
2347
2348 // ..and now computing the expReturns and storing them
2349 for(uint i=0;i<parentFtypes.size();i++){
2350 vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2351 for(uint j=0;j<childPos.size();j++){
2352 //double debug1 = totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea;
2353 sfd(totSumExpRet_byFTypes.at(childPos[j]),"sumExpReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2354 sfd(totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea,"expReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2355 } // end of each ft
2356 //double debug2 = totSumExpRet_byFTParent.at(i)/totRegionForArea;
2357 sfd(totSumExpRet_byFTParent.at(i),"sumExpReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2358 sfd(totSumExpRet_byFTParent.at(i)/totRegionForArea,"expReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2359
2360 } // end for each partentFt
2361 //double debug3 = totSumExpRet/totRegionForArea;
2362 sfd(totSumExpRet,"sumExpReturns",regId,"","",DATA_NOW, true);
2363 sfd(totSumExpRet/totRegionForArea,"expReturns",regId,"","",DATA_NOW, true);
2364
2365 } // end for each region
2366
2367 // Computing pathogens share of forest invasion
2368 if(MD->getBoolSetting("usePathogenModule")){
2369 for(uint r2= 0; r2<regIds2.size();r2++){
2370 int regId = regIds2[r2];
2371 regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2372 double totalForArea = 0.0;
2373 double invadedArea = 0.0;
2374 for (uint p=0;p<regPx.size();p++){
2375 Pixel* px = regPx[p];
2376 int invaded = 0.0;
2377 for(uint j=0;j<fTypes.size();j++){
2378 for (uint u=0; u<dClasses.size(); u++){
2379 if(px->getPathMortality(fTypes[j],dClasses[u]) > 0){
2380 invaded = 1.0;
2381 }
2382 }
2383 }
2384 totalForArea += vSum(px->area);
2385 invadedArea += vSum(px->area)*invaded;
2386 }
2387 sfd(invadedArea/totalForArea,"totalShareInvadedArea",regId,"","",DATA_NOW, true);
2388 }
2389 } // end we are using path model
2390
2391 // Storing the overall deathTimber in the region
2392 vector <string> priPr = MTHREAD->MD->getStringVectorSetting("priProducts");
2393 for(uint r2= 0; r2<regIds2.size();r2++){
2394 int regId = regIds2[r2];
2395 float deadTimber = MTHREAD->MD->getAvailableDeathTimber(priPr,regId,currentYear);
2396 sfd(deadTimber, "usableDeadTimber", regId, "", "", currentYear, true);
2397 }
2398}
2399
2400/**
2401 * This function call registerHarvesting() (accounts for emissions from for. operations), registerDeathBiomass() (registers new stocks of death biomass),
2402 * registerProducts() (registers new stock of products) and registerTransports() (accounts for emissions from transportation).
2403 *
2404 * It pass to registerProducts():
2405 * - for primary products, the primary products exported out of the country, but not those exported to other regions or used in the region as
2406 * these are assumed to be totally transformed to secondary products;
2407 * - for secondary products, those produced in the region from locally or regionally imported primary product plus those secondary products
2408 * imported from other regions, less those exported to other regions. It doesn't include the secondary products imported from abroad the country.
2409 */
2410void
2412
2413 //void registerHarvesting(const int & regId, const string & fType, const double & value); ///< register the harvesting of trees -> cumEmittedForOper
2414 //void registerDeathBiomass(const double &value, const int & regId, const string &fType);
2415 //void registerProducts(const double &value, const int & regId, const string &productName);
2416 //void registerTransports(const double &distQ, const int & regId);
2417
2418 for(uint i=0;i<regIds2.size();i++){
2419 for(uint j=0;j<fTypes.size();j++){
2420 double deathBiomass = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW)+ gfd("vMortAdd",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2421 double harvesting = gfd("hV",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2422 MTHREAD->CBAL->registerDeathBiomass(deathBiomass, regIds2[i], fTypes[j]); // register new stock
2423 MTHREAD->CBAL->registerHarvesting(harvesting, regIds2[i], fTypes[j]); // account for emissions. Added 201500715: it also moves the extra biomass to the death biomass pool
2424 }
2425
2426 for(uint p=0;p<priProducts.size();p++){
2427 // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
2428 double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
2429 MTHREAD->CBAL->registerProducts(int_exports, regIds2[i], priProducts[p]); // register new stock
2430 }
2431 for(uint p=0;p<secProducts.size();p++){
2432 // for the tranformed product we skip those that are imported, hence derived from other forest systems
2433 // but we consider those coming from other regions
2434 double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
2435 MTHREAD->CBAL->registerProducts(consumption, regIds2[i], secProducts[p]); // register new stock
2436 }
2437
2438 }
2439 for (uint r1=0;r1<l2r.size();r1++){
2440 for (uint r2=0;r2<l2r[r1].size();r2++){
2441 int rfrom= l2r[r1][r2];
2442 double distQProd = 0.0;
2443 for (uint r3=0;r3<l2r[r1].size();r3++){
2444 int rto = l2r[r1][r3];
2445 double dist = gpd("dist",rfrom,"",DATA_NOW,i2s(rto)); //km
2446 for(uint p=0;p<allProducts.size();p++){
2447 distQProd += dist*gpd("rt",rfrom,allProducts[p],DATA_NOW,i2s(rto)); //km*Mm^3
2448 }
2449 }
2450 MTHREAD->CBAL->registerTransports(distQProd, rfrom);
2451 }
2452 }
2453 MTHREAD->CBAL->HWP_eol2energy(); // used to compute the energy substitution from hwp that reach the end of life and doesn't go to landfil. Previously the energy substitution was computed in registerProducts(), that is at the time when the product was produced.
2454
2455}
2456
2457/**
2458 * Compute the expectation weighted price based on the ratio of the international (world) price between the future and now.
2459 *
2460 * @param curLocPrice The local current price
2461 * @param worldCurPrice The world current price
2462 * @param worldFutPrice The world future price
2463 * @param sl Supply local
2464 * @param sa Supply abroad
2465 * @param expCoef The expectation coefficient for prices for the agent [0,1]
2466 * @return The expType-averaged local (or weighter) price
2467 */
2468double
2469ModelCoreSpatial::computeExpectedPrice(const double & curLocPrice, const double & worldCurPrice, const double & worldFutPrice, const double & sl, const double & sa, const double & expCoef){
2470 double fullExpWPrice = (curLocPrice*(worldFutPrice/worldCurPrice)*sl+worldFutPrice*sa)/(sa+sl);
2471 double curWPrice = (curLocPrice*sl+worldCurPrice*sa)/(sl+sa);
2472 return curWPrice * (1-expCoef) + fullExpWPrice * expCoef;
2473}
2474
2475/**
2476 * It uses volumes from gis data to "move" volumes from one forest type to the other (when called with what="vol"). Then it moves areas
2477 * proportionally and, as dc0 volumes are not defined but area it is, compute, again proportionally, area in destination forest times for dc=0
2478 * It acts on the pix->vol, pix->area and pix->area_l vectors. It also create/update the px->values layer map for the area, but it doesn't cash the
2479 * results in forDataMap.
2480 *
2481 * It is called first with parameter what="vol" in initializePixelVolumes() and then with what="area" in initializePixelAreas().
2482 * As we need the original volumes in the area allocation, original_vols is set as a static variable.
2483 *
2484 */
2485void
2487 if(!MD->getBoolSetting("useSpExplicitForestTypes")) return;
2488
2489 int nFTypes = fTypes.size();
2490 int nDC = dClasses.size();
2491 int pxC = 0;
2492
2493 for(uint ir=0;ir<regIds2.size();ir++){
2494 int r2 = regIds2[ir];
2495 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2496 regPx = REG->getMyPixels();
2497 pxC += regPx.size();
2498 }
2499
2500 static vector<vector<vector<double>>> original_vols(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2501
2502 if(what=="vol"){
2503 // first, before transfering volumes, saving the original ones..
2504 for(uint i=0;i<fTypes.size();i++){
2505 for (uint u=0; u<dClasses.size(); u++){
2506 int pxC_loc = 0;
2507 for(uint ir=0;ir<regIds2.size();ir++){
2508 int r2 = regIds2[ir];
2509 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2510 regPx = REG->getMyPixels();
2511 for (uint p=0;p<regPx.size();p++){
2512 Pixel* px = regPx[p];
2513 original_vols[pxC_loc][i][u] += px->vol[i][u];
2514 pxC_loc ++;
2515 }
2516 }
2517 }
2518 }
2519 for(uint i=0;i<fTypes.size();i++){
2520 string fti = fTypes[i];
2521 for(uint o=0;o<fTypes.size();o++){
2522 string fto = fTypes[o];
2523 for (uint u=1; u<dClasses.size(); u++){ // first diameter class volumes are computed from the model..
2524 string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2525 if (MTHREAD->GIS->layerExist(layerName)){
2526 for(uint ir=0;ir<regIds2.size();ir++){
2527 int r2 = regIds2[ir];
2528 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2529 regPx = REG->getMyPixels();
2530 for (uint p=0;p<regPx.size();p++){
2531 Pixel* px = regPx[p];
2532 double vol_transfer = min(px->getDoubleValue(layerName,true)/1000000,px->vol[i][u]) ; // Vol in the layer are in m^3, in the model in Mm^3
2533 px->vol[i][u] -= vol_transfer;
2534 px->vol[o][u] += vol_transfer;
2535 }
2536 }
2537 }
2538 }
2539 }
2540 }
2541 }
2542
2543 if(what=="area"){
2544 /**
2545 Allocate area proportionally to volumes (see file test_proportional_computation_of_areas_from_volumes.ods)
2546 Example:
2547 FtIn FtOut Vtrasfer
2548 con ash 0.2
2549 brHf ash 0.1
2550 brCopp ash 0.3
2551 con oak 0.3
2552 brHf oak 0.2
2553 brCopp oak 0.1
2554
2555 Vorig Aorig Vnew Anew
2556 con 10 30 9.5 28.5 Aorig-Aorig*(Vtrasfer1/Vorig)-Aorig(Vtrasfer2/Vorig)
2557 brHf 5 20 4.7 18.8
2558 brCopp 2 20 1.6 16
2559 ash 0 0 0.6 4 Aorig1*Vtrasfer1/(Vorig1)+Aorig2*Vtrasfer2/(Vorig2)+...
2560 oak 0 0 0.6 2.7
2561 70 70
2562 */
2563 // first, before transfering areas, saving the original ones (we already saved the vols in the what="vol" section, that is called before this one)..
2564 vector<vector<vector<double>>> original_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2565 for(uint i=0;i<fTypes.size();i++){
2566 for (uint u=0; u<dClasses.size(); u++){
2567 int pxC_loc = 0;
2568 for(uint ir=0;ir<regIds2.size();ir++){
2569 int r2 = regIds2[ir];
2570 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2571 regPx = REG->getMyPixels();
2572 for (uint p=0;p<regPx.size();p++){
2573 Pixel* px = regPx[p];
2574 original_areas[pxC_loc][i][u] += px->area_l[i][u];
2575 pxC_loc ++;
2576 }
2577 }
2578 }
2579 }
2580
2581
2582 // transferred areas ordered by pxcounter, i and then o ftype. Used to then repart the 0 diameter class..
2583 vector<vector<vector<double>>> transferred_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nFTypes, 0.0))); // initialize a 3d vector of nFTypes zeros.
2584
2585 for(uint i=0;i<fTypes.size();i++){
2586 string fti = fTypes[i];
2587 for(uint o=0;o<fTypes.size();o++){
2588 string fto = fTypes[o];
2589 for (uint u=1; u<dClasses.size(); u++){ // first diameter class area is comuted proportionally..
2590 string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2591 if (MTHREAD->GIS->layerExist(layerName)){
2592 int pxC_loc = 0;
2593 for(uint ir=0;ir<regIds2.size();ir++){
2594 int r2 = regIds2[ir];
2595 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2596 regPx = REG->getMyPixels();
2597 for (uint p=0;p<regPx.size();p++){
2598 Pixel* px = regPx[p];
2599 double vol_i_orig = original_vols[pxC_loc][i][u];
2600 double vol_transfer = vol_i_orig?px->getDoubleValue(layerName,true)/1000000:0.0; // Vol in the layer are in m^3, in the model in Mm^3
2601 double area_i_orig = original_areas[pxC_loc][i][u];
2602 double area_transfer = vol_i_orig?area_i_orig*vol_transfer/vol_i_orig:0.0;
2603 px->area_l[i][u] -= area_transfer;
2604 px->area[i][u] = px->area_l[i][u];
2605 px->area_l[o][u] += area_transfer;
2606 px->area[o][u] = px->area_l[o][u];
2607 transferred_areas[pxC_loc][i][o] += area_transfer;
2608 pxC_loc ++;
2609 }
2610 }
2611 }
2612 }
2613 }
2614 }
2615
2616 // Moving the area in the 0 diameter class, for which no info is normally available..
2617 double pxC_loc = 0;
2618 for(uint ir=0;ir<regIds2.size();ir++){
2619 int r2 = regIds2[ir];
2620 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2621 regPx = REG->getMyPixels();
2622 for (uint p=0;p<regPx.size();p++){
2623 Pixel* px = regPx[p];
2624 for(uint i=0;i<fTypes.size();i++){
2625 for(uint o=0;o<fTypes.size();o++){
2626 double area_i_orig = 0.0;
2627 for (uint u=1; u<dClasses.size(); u++){ // we want to skip the 0 diameter class, this is why we don't simply use vSum()..
2628 area_i_orig += original_areas[pxC_loc][i][u];
2629 }
2630 double area_transfer_u0 = area_i_orig?original_areas[pxC_loc][i][0]*(transferred_areas[pxC_loc][i][o]/area_i_orig):0.0;
2631 px->area_l[i][0] -= area_transfer_u0 ;
2632 px->area[i][0] = px->area_l[i][0];
2633 px->area_l[o][0] += area_transfer_u0 ; // bug corrected 20151130: it was 0 instead of o (output) !!
2634 px->area[o][0] = px->area_l[0][0]; // bug corrected 20151130: it was 0 instead of o (output) !!
2635 }
2636 }
2637 pxC_loc++;
2638 }
2639 }
2640
2641 // Alligning the area memorised in the px layers to the new areas of the ft..
2642 for(uint i=0;i<fTypes.size();i++){
2643 string fti_id = fTypes[i];
2644 forType* fti = MTHREAD->MD->getForType(fti_id);
2645 int ft_memType = fti->memType;
2646 string ft_layerName = fti->forLayer;
2647 //if(ft_memType==3){
2648 // MTHREAD->GIS->addLayer(ft_layerName,ft_layerName,false,true); //20151130: no needed as we already added it in applyForestReclassification (yes, odd, as memory type 3 layerrs do not have any reclassification rule associated, but if I don't add the layer at that time I got other errors)
2649 // }
2650 if(ft_memType==3 ||ft_memType==2){
2651 for(uint ir=0;ir<regIds2.size();ir++){
2652 int r2 = regIds2[ir];
2653 ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2654 regPx = REG->getMyPixels();
2655 for (uint p=0;p<regPx.size();p++){
2656 Pixel* px = regPx[p];
2657 double area_px = vSum(px->area[i]);
2658 px->changeValue(ft_layerName,area_px*10000);
2659 }
2660 }
2661 }
2662 }
2663 } // end if what is area
2664}
2665
2666void
2668 // Print debug stats on inventory and supplies in each region..
2669 cout << "Printing debug information on initial regional inventories and supplies.." << endl;
2670 cout << "Reg\tProduct\t\tInv\tSt\tSa\tSl" << endl;
2671 for(uint r1=0;r1<l2r.size();r1++){
2672 for(uint r2c=0;r2c<l2r[r1].size();r2c++){
2673 for(uint p=0;p<priProducts.size();p++){
2674 int r2 = l2r[r1][r2c];
2675 double inv = gpd("in",r2,priProducts[p],secondYear);
2676 double st = gpd("st",r2,priProducts[p],secondYear);
2677 double sl = gpd("sl",r2,priProducts[p],secondYear);
2678 double sa = gpd("sa",r2,priProducts[p],secondYear);
2679 cout << r2 << "\t" << priProducts[p] << "\t\t" << inv << "\t" << st << "\t" << sl << "\t" << sa << endl;
2680 }
2681 }
2682 } // end of r1 region
2683 exit(0);
2684
2685}
2686
2687/**
2688 * @brief ModelCoreSpatial::allocateHarvesting
2689 * @param total_st vector of total supply by primary products
2690 * @return a vector of the remaining supply that goes allocated to alive timber (that is, to harvesting)
2691 *
2692 * The algorithm is such that is loops the deathTimberInventory map for each year (newer to older), dc (higher to smaller) and ft.
2693 * It compute the primary products allocable from that combination and allocate the cell amount to decrease the total_st of that products
2694 * in a proportional way to what still remain of the allocable products.
2695 *
2696 * It is called in the runMarketModule() function.
2697 *
2698 */
2699
2700vector <double>
2701ModelCoreSpatial::allocateHarvesting(vector<double> total_st, const int &regId){
2702 if(!MD->getBoolSetting("useDeathTimber",DATA_NOW)) return total_st;
2703 //vector <double> st_fromHarv(priProducts.size(),0.);
2704 //map<iisskey, double > * deathTimberInventory= MD->getDeathTimberInventory();
2705 int maxYears = MD->getMaxYearUsableDeathTimber();
2706 int currentYear = MTHREAD->SCD->getYear();
2707 for(uint y = currentYear-1; y>currentYear-1-maxYears; y--){
2708 for (int u = dClasses.size()-1; u>=0; u--){ // I need to specify u as an integer !
2709 string dc = dClasses.at(u);
2710 for (uint f=0; f<fTypes.size(); f++){
2711 string ft = fTypes[f];
2712 vector<int>allocableProducts = MD->getAllocableProductIdsFromDeathTimber(regId, ft, dc, y, currentYear-1);
2713 iisskey key(y,regId,ft,dc);
2714 double deathTimber = MD->deathTimberInventory_get(key);
2715 double sum_total_st_allocable = 0;
2716 // Computing shares/weights or remaining st to allcate
2717 for(uint ap=0;ap<allocableProducts.size();ap++){
2718 sum_total_st_allocable += total_st.at(allocableProducts[ap]);
2719 }
2720 for(uint ap=0;ap<allocableProducts.size();ap++){
2721 double allocableShare = sum_total_st_allocable?total_st.at(allocableProducts[ap])/sum_total_st_allocable:0.0;
2722 double allocated = min(total_st[allocableProducts[ap]],deathTimber*allocableShare);
2723 MD->deathTimberInventory_incrOrAdd(key,-allocated);
2724 total_st[allocableProducts[ap]] -= allocated;
2725 }
2726 }
2727 }
2728 }
2729 return total_st;
2730}
2731
2732
2733/**
2734 * This function has two tasks:
2735 * 1) compute the policy balances (- -> public cost,subside; + -> public revenue, tax) for pol_mktDirInt_s, pol_mktDirInt_d, pol_trSub and pol_tcSub
2736 * (pol_fiSub are done directly in the runManagementModule() function)
2737 * 2) compute the producer/consumer surpluses
2738 */
2739void
2741 //int thisYear = MTHREAD->SCD->getYear();
2742 int previousYear = MTHREAD->SCD->getYear()-1;
2743
2744 for(uint r2=0; r2<regIds2.size();r2++){
2745 int regId = regIds2[r2];
2746 ModelRegion* reg = MD->getRegion(regId);
2747
2748 for(uint p=0;p<priProducts.size();p++){
2749 double pol_mktDirInt_s = gpd("pol_mktDirInt_s",regId,priProducts[p]);
2750 double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",regId,priProducts[p],previousYear);
2751 double sigma = gpd("sigma",regId,priProducts[p]);
2752 double in = gpd("in",regId,priProducts[p]);
2753 double in_1 = gpd("in",regId,priProducts[p], previousYear);
2754 double pc = gpd("pc",regId,priProducts[p]);
2755 double pc_1 = gpd("pc",regId,priProducts[p], previousYear);
2756 double sc = gpd("sc",regId,priProducts[p]);
2757 double sc_1 = gpd("sc",regId,priProducts[p],previousYear);
2758 double gamma_incr = gpd("gamma_incr",regId,priProducts[p]); // elast supply to increasing stocks
2759 double gamma_decr = gpd("gamma_decr",regId,priProducts[p]); // elast supply to decreasing stocks
2760
2761 double gamma = in>in_1 ? gamma_incr: gamma_decr;
2762 double polBal_mktDirInt_s = pc * (1-pol_mktDirInt_s) * sc;
2763 double surplus_prod = pc * sc -
2764 pc_1
2765 * pol_mktDirInt_s_1
2766 * pow(pol_mktDirInt_s,-1)
2767 * pow(sc_1,-1/sigma)
2768 * pow(in_1/in,gamma/sigma)
2769 * (sigma/(sigma+1))
2770 * pow(sc,(sigma+1)/sigma);
2771
2772 spd(polBal_mktDirInt_s,"polBal_mktDirInt_s",regId,priProducts[p],DATA_NOW,true); // M€
2773 spd(surplus_prod,"surplus_prod",regId,priProducts[p],DATA_NOW,true);
2774 }
2775 for(uint p=0;p<secProducts.size();p++){
2776 double pol_mktDirInt_d = gpd("pol_mktDirInt_d",regId,secProducts[p]);
2777 double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",regId,secProducts[p],previousYear);
2778 double sigma = gpd("sigma",regId,secProducts[p]);
2779 double pol_trSub = gpd("pol_trSub",regId,secProducts[p]);
2780 double pc = gpd("pc",regId,secProducts[p]);
2781 double pc_1 = gpd("pc",regId,secProducts[p],previousYear);
2782 double dc = gpd("dc",regId,secProducts[p]);
2783 double dc_1 = gpd("dc",regId,secProducts[p],previousYear);
2784 double dc0 = gpd("dc",regId,secProducts[p],secondYear);
2785 double m = gpd("m",regId,secProducts[p]);
2786 double sl = gpd("sl",regId,secProducts[p]); // sl because dl include also the production from byproduct that has not subsides in the model
2787
2788 double polBal_mktDirInt_d = pc * (pol_mktDirInt_d - 1) * dc;
2789 double polBal_trSub = m * (pol_trSub-1) * sl;
2790 double trs = 0.5; // 0.1: we consider 90 percent of the demand integral
2791 double surplus_cons = pc_1
2792 * pol_mktDirInt_d_1
2793 * pow(dc_1,-1/sigma)
2794 * pow(pol_mktDirInt_d,-1)
2795 * (sigma/(sigma+1))
2796 * (pow(dc,(sigma+1)/sigma) - pow(trs*dc0,(sigma+1)/sigma))
2797 - (dc - trs * dc0) * pc ;
2798
2799 spd(polBal_mktDirInt_d,"polBal_mktDirInt_d",regId,secProducts[p],DATA_NOW,true); // M€
2800 spd(polBal_trSub,"polBal_trSub",regId,secProducts[p],DATA_NOW,true); // M€
2801 spd(surplus_cons,"surplus_cons",regId,secProducts[p],DATA_NOW,true); // M€
2802 }
2803
2804 for(uint p=0;p<allProducts.size();p++){
2805 double pol_tcSub = gpd("pol_tcSub",regId,allProducts[p]);
2806 double polBal_tcSub = 0;
2807 vector<ModelRegion*> siblings = reg->getSiblings();
2808 for(uint rTo=0;rTo<siblings.size();rTo++){
2809 int regId2 = siblings[rTo]->getRegId();
2810 double ct = gpd("ct",regId,allProducts[p],DATA_NOW,i2s(regId2));
2811 double rt = gpd("rt",regId,allProducts[p],DATA_NOW,i2s(regId2));
2812 polBal_tcSub += ct*(pol_tcSub-1) * rt;
2813 }
2814 spd(polBal_tcSub,"polBal_tcSub",regId,allProducts[p],DATA_NOW,true); // M€
2815 }
2816
2817 }
2818
2819}
2820
2821/**
2822 * Return the average age using cumTp vector
2823 */
2824double
2826 if(dc == dClasses.size()-1) { return px->cumTp[ft][dc] * 1.2;}
2827 else {return (px->cumTp[ft][dc]+px->cumTp[ft][dc+1])/2; }
2828}
2829
2830
2831
2832
2833// added for carbon project
2834
2835double
2836ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio, const int& regId ) const {
2837 // This function computes and send back the expected volume/ha in a forest type after a specific amount of years
2838 // It uses expected volumes/ha at each diameter class, and makes a linear interpolation between the corresponding times of passage
2839 // Ancient version was a "stairs" function of time, now it is a succession of linear approximations
2840
2841 double vHa = 0;
2842 int thisYear = MTHREAD->SCD->getYear();
2843
2844 // This loop goes through all diameter classes to find the last one that was reached at year tested
2845 for (int u = 0; u<dClasses.size(); u++){
2846 // We retrieve the expected time of passage for the current diameter class
2847 // and we compare it to the year tested
2848 double cumTP = px->cumTp_exp[ft][u];
2849 if (year>=cumTP) {
2850 // Forest has already reached this class,
2851 // We do nothing and go to the next one
2852 }
2853 else {// Forest has not reached this class yet, we stop here
2854 // Assumption : Volume growth is a linear approximation between volume at previous DC and at current DC
2855 // There would be a problem if "year" was negative, but it should not happen since year starts at 0
2856 double cumTP_plus = px->cumTp_exp[ft][u];
2857 double cumTP_minus = px->cumTp_exp[ft][u-1];
2858 double volHa_plus = px->vHa_exp[ft][u];
2859 double volHa_minus = px->vHa_exp[ft][u-1];
2860 double slope = (volHa_plus - volHa_minus)/(cumTP_plus - cumTP_minus);
2861
2862 vHa = (volHa_minus + (year-cumTP_minus)*slope)*(1+extraBiomass_ratio);
2863 return vHa;
2864 }
2865 }
2866 // if we arrive here, we have exited the loop
2867 // and the forest has reached the last diameter class
2868 int dc = dClasses.size()-1; // index of the last diameter class
2869 double mortCoeff = gfd("mort",regId,fTypes[ft],dClasses[dc],thisYear); // mortality rate of the last diameter class
2870 double cumTP_max = px-> cumTp_exp[ft][dc]; // cumulative TP to reach last diameter class
2871 double time_elapsed = year - cumTP_max; // time since the last class was reached
2872 double volHa_max = px->cumTp_exp[ft][dc]; // volume/ha when the last class was reached
2873
2874 // Now we compute the volume/ha at the year tested
2875 // Assumption: trees don't grow anymore but slowly die according to mortality rate
2876 vHa = volHa_max*pow(1-mortCoeff,time_elapsed)*(1+extraBiomass_ratio);
2877 return vHa;
2878} // end of function
2879
2880
2881
2882/** ANCIENT VERSION OF FUNCTION. VOLUME WAS A STAIRS FUNCTION OF TIME
2883 * Return the Volume/ha in a forest after a given number of year after planting, for a specific forest type
2884
2885double
2886ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio) const {
2887 double vHa = 0;
2888 //int regId = px->getMyRegion()->getRegId();
2889 //double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,fTypes[ft],"",DATA_NOW);
2890
2891 for (int u = 0; u<dClasses.size(); u++){
2892 double cumTP_exp = px->cumTp_exp[ft][u];
2893 if (year>=cumTP_exp){
2894 vHa = px->vHa_exp[ft][u]*(1+extraBiomass_ratio);
2895 } else {
2896 return vHa;
2897 }
2898 }
2899 return vHa;
2900}
2901*/
@ OP_SUM
Perform a SUM operation.
Definition BaseClass.h:77
@ DATA_NOW
The required data is for the current year.
Definition BaseClass.h:73
@ VAR_VOL
Definition BaseClass.h:122
@ VAR_AREA
Definition BaseClass.h:123
#define DIAM_ALL
All diameter classes.
Definition BaseClass.h:157
@ MSG_CRITICAL_ERROR
Print an error message and stop the model.
Definition BaseClass.h:62
@ MSG_ERROR
Print an ERROR message, but don't stop the model.
Definition BaseClass.h:61
@ MSG_DEBUG
Print a debug message, normally filtered out.
Definition BaseClass.h:58
@ MSG_WARNING
Print a WARNING message.
Definition BaseClass.h:60
@ MSG_INFO
Print an INFO message.
Definition BaseClass.h:59
@ MSG_NO_MSG
Do not actually output any message.
Definition BaseClass.h:57
ThreadManager * MTHREAD
Pointer to the Thread manager.
Definition BaseClass.h:467
int vSum(const vector< int > &vector_h) const
Definition BaseClass.h:276
vector< T > positionsToContent(const vector< T > &vector_h, const vector< int > &positions)
Return a vector of content from a vector and a vector of positions (int)
Definition BaseClass.h:359
void incrOrAddMapValue(map< K, V > &mymap, const K &key, const V &value)
Increments a value stored in a map of the specified value, given the key.
Definition BaseClass.h:325
int getMaxPos(const vector< K > &v)
Returns the position of the maximum element in the vector (the last one in case of multiple equivalen...
Definition BaseClass.h:383
void incrMapValue(map< K, V > &mymap, const K &key, const V &value, const int &error_level=MSG_CRITICAL_ERROR)
Increments a value stored in a map of the specified value, given the key.
Definition BaseClass.h:312
string d2s(const double &double_h) const
double to string conversion
K getMax(const vector< K > &v)
Returns the value of the maximum element in the vector (the last one in case of multiple equivalent m...
Definition BaseClass.h:391
map< K, V > vectorToMap(const vector< K > &keys, const V &value=0.0)
Returns a map built using the given vector and the given (scalar) value as keys/values pairs.
Definition BaseClass.h:349
double getAvg(const vector< K > &v)
Returns the average of the elements in the vector.
Definition BaseClass.h:399
V findMap(const map< K, V > &mymap, const K &key, const int &error_level=MSG_CRITICAL_ERROR, const V &notFoundValue=numeric_limits< V >::min()) const
Lookup a map for a value. Return the value starting from the key.
Definition BaseClass.h:286
void resetMapValues(map< K, V > &mymap, const V &value)
Reset all values stored in a map to the specified one.
Definition BaseClass.h:341
void msgOut(const int &msgCode_h, const string &msg_h, const bool &refreshGUI_h=true) const
Overloaded function to print the output log.
Definition BaseClass.cpp:50
string i2s(const int &int_h) const
integer to string conversion
int getPos(const K &element, const vector< K > &v, const int &msgCode_h=MSG_CRITICAL_ERROR)
Definition BaseClass.h:421
double getSd(const vector< K > &v, bool sample=true)
Definition BaseClass.h:408
void initialiseDeathBiomassStocks(const vector< double > &deathByFt, const int &regId)
Initialises the stocks of death biomass for the avgLive_* years before the simulation starts.
Definition Carbon.cpp:169
void HWP_eol2energy()
Computes the energy substitution for the quota of HWP that reachs end of life and doesn't go to landf...
Definition Carbon.cpp:288
void initialiseProductsStocks(const vector< double > &qByProduct, const int &regId)
Initialises the stocks of products for the avgLive_* years before the simulation starts.
Definition Carbon.cpp:203
void registerDeathBiomass(const double &value, const int &regId, const string &fType)
Registers the "death" of a given amount of biomass, storing it in the deathBiomass map.
Definition Carbon.cpp:243
void registerTransports(const double &distQ, const int &regId)
Registers the quantities emitted by transport of wood FROM a given region.
Definition Carbon.cpp:277
void registerProducts(const double &value, const int &regId, const string &productName)
Registers the production of a given amount of products, storing it in the products maps....
Definition Carbon.cpp:257
void initialiseEmissionCounters()
Initialises the emission counters to zero.
Definition Carbon.cpp:158
void registerHarvesting(const double &value, const int &regId, const string &fType)
Registers the harvesting of trees increasing the value of cumEmittedForOper.
Definition Carbon.cpp:225
vector< Pixel * > getAllPlotsByRegion(ModelRegion &region_h, bool shuffle=false)
Return the vector of all plots by a specific region (main region or subregion), optionally shuffled;.
Definition Gis.cpp:855
bool layerExist(const string &layerName_h, bool exactMatch=true) const
Return a pointer to a layer given its name.
Definition Gis.cpp:552
void updateImage(string layerName_h)
Add one layer to the system.
Definition Gis.cpp:700
void runMarketModule()
computes st (supply total) and pw (weighted price). Optimisation inside.
vector< double > allocateHarvesting(vector< double > total_st, const int &regId)
Using the deathTimberInventory map, this function allocate the total st in st from death timber (that...
void sumRegionalForData()
computes vol, hV, harvestedArea, regArea and expReturns at reg level from the pixel level
vector< string > allProducts
vector< string > fTypes
void cachePixelExogenousData()
computes pixel level tp, meta and mort
void cacheSettings()
just cache exogenous settings from ModelData
void computeInventary()
in=f(vol_t-1)
ModelCoreSpatial(ThreadManager *MTHREAD_h)
void registerCarbonEvents()
call registerHarvesting(), registerDeathBiomass(), registerProducts() and registerTransports()
void printDebugInitRegionalValues()
print initial inv, st, sl and sa in each region
void computeEconomicBalances()
compute the policy balances (- -> costs; + -> rev) and the producer/consumer surpluses
void loadExogenousForestLayers(const string &what)
Set pixel volumes (what="vol") OR areas (what="area") by specific forest types as defined in gis laye...
void runBiologicalModule()
computes hV, hArea and new vol at end of year
void resetPixelValues()
swap volumes->lagged_volumes and reset the other pixel vectors
vector< Pixel * > regPx
void updateMapAreas()
computes forArea_{ft}
vector< string > priProducts
void assignSpMultiplierPropToVols()
ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of re...
void runManagementModule()
computes regArea and expectedReturns
double getAvgAgeByDc(Pixel *px, int ft, int dc)
return the average age of a tree at a given diameter class, using the cumTp vector
double gfd(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW) const
void computeCumulativeData()
computes cumTp_exp, vHa_exp, vHa
void initMarketModule()
computes st and pw for second year and several needed-only-at-t0-vars for the market module
vector< vector< int > > l2r
void initializePixelVolumes()
distribuite regional exogenous volumes to pixel volumes using corine land cover area as weight
vector< string > secProducts
void sfd(const double &value_h, const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW, const bool &allowCreate=false) const
double gpd(const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const string &freeDim_h="") const
double computeExpectedPrice(const double &curLocPrice, const double &worldCurPrice, const double &worldFutPrice, const double &sl, const double &sa, const double &expCoef)
Compute weighted expected price for a given product.
bool app(const string &prod_h, const string &forType_h, const string &dClass_h) const
vector< string > dClasses
vector< string > pDClasses
void initialiseDeathTimber()
Set deathTimberInventory to zero for the previous years (under the hipotesis that we don't have advan...
double getVHaByYear(const Pixel *px, const int &ft, const int &year, const double &extraBiomass_ratio, const int &regId) const
return the Volume/ha in a forest after a given number of year after planting, for a specific forest t...
void spd(const double &value_h, const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const bool &allowCreate=false, const string &freeDim_h="") const
void initialiseCarbonModule()
call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters()
void initializePixelArea()
compute px->area for each ft and dc
vector< int > regIds2
void updateOtherMapData()
update (if the layer exists) other gis-based data, as volumes and expected returns,...
void cacheDynamicSettings()
cache settings that may change with time
vector< string > getForTypeParents()
forType * getForType(int position)
Definition ModelData.h:128
vector< int > getAllocableProductIdsFromDeathTimber(const int &regId_h, const string &ft, const string &dc, const int &harvesting_year, int request_year=DATA_NOW)
Returns the ids of the primary products that is possible to obtain using the timber recorded death in...
double getAvailableAliveTimber(const vector< string > &primProd_h, int regId_h)
Returns the timber available for a given set of primary products as stored in the px->vol_l vector.
bool getBoolSetting(const string &name_h, int position=0, int reg=WORLD) const
const int getMaxYearUsableDeathTimber(const string &prod_h, const string &forType_h, const string &dClass_h)
double getDoubleSetting(const string &name_h, int position=0, int reg=WORLD) const
vector< string > getForTypeIds(bool all=false)
By default it doesn't return forTypes used only as input.
vector< vector< int > > createCombinationsVector(const int &nItems)
Return a vector containing any possible combination of nItems items (including any possible subset)....
vector< int > getForTypeChilds_pos(const string &forTypeId_h, bool all=false)
int getErrorLevel()
Definition ModelData.h:144
string getForTypeParentId(const string &forTypeId_h)
Definition ModelData.cpp:90
vector< string > getStringVectorSetting(const string &name_h, int reg=WORLD) const
vector< string > getForTypeChilds(const string &forTypeId_h)
Definition ModelData.cpp:98
double deathTimberInventory_get(const iisskey &thekey)
Definition ModelData.h:197
bool getTempBool()
Definition ModelData.h:145
const double getForData(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW)
vector< int > getRegionIds(int level_h, bool excludeResidual=true)
ModelRegion * getRegion(int regId_h)
int getNForTypesChilds(const string &forTypeId_h)
void setErrorLevel(int errorLevel_h)
Definition ModelData.h:143
int getIntSetting(const string &name_h, int position=0, int reg=WORLD) const
void setTempBool(bool tempBool_h)
Definition ModelData.h:146
string getStringSetting(const string &name_h, int position=0, int reg=WORLD) const
void deathTimberInventory_incrOrAdd(const iisskey &thekey, double value_h)
Definition ModelData.h:195
double calculateAnnualisedEquivalent(const double &amount_h, const int &years_h, const double &ir) const
Calculate the annual equivalent flow.
double getAvailableDeathTimber(const vector< string > &primProd_h, int regId_h, int year_h)
Returns the timber available for a given set of primary products as stored in the deathTimberInventor...
double getValue(string layerName, int op=OP_SUM)
return the values of its own pixels for the specified layer. Possible operations: OP_SUM or OP_AVG
vector< ModelRegion * > getSiblings(bool excludeResidual=true)
Return a vector of pointers to the siblings regions.
vector< Pixel * > getMyPixels()
Definition ModelRegion.h:86
string getRegSName() const
Definition ModelRegion.h:67
vector< double > inResByAnyCombination
Vector of inventory resource for each possible combination of primary products. This store both alive...
Definition ModelRegion.h:88
double getArea(const string &fType_h, const string &dClass_h)
Get area by ft and dc (from pixel->area matrix)
void printOptLog(bool optimal, int &nIterations, double &obj)
Definition Output.cpp:827
void print(bool earlyPrint=true)
Print output. If earlyPrinting it doesn't print some stuff for which we don't yet have values.
Definition Output.cpp:426
void printDetailedHV(map< tr1::array< string, 4 >, double > hVol_byPrd)
Definition Output.cpp:1173
Pixel-level class.
Definition Pixel.h:47
vector< double > expectedAnnualIncome_carbon
Definition Pixel.h:124
map< int, vector< double > > regArea
Definition Pixel.h:114
vector< vector< vector< double > > > hVol_byPrd
Definition Pixel.h:113
vector< vector< double > > area
Definition Pixel.h:107
vector< vector< double > > area_l
store the areas of the previous year
Definition Pixel.h:130
vector< double > expectedReturns
Definition Pixel.h:120
vector< vector< double > > mort
Definition Pixel.h:133
vector< vector< double > > vHa_exp
This is the expected version of vHa, used for calculating profits.
Definition Pixel.h:140
vector< vector< double > > hVol
Definition Pixel.h:111
vector< double > expectedReturnsNotCorrByRa
by ft. Attenction, reported expReturns at "forest" level (compared with those at forest type level) d...
Definition Pixel.h:127
double getSTData(const string &parName, const string &forName, int year, const string &d2="", double naToReturn=RETNA)
Definition Pixel.cpp:246
void swap(const int &swap_what)
Assign to the delayed value the current values, e.g. vol_l = vol.
Definition Pixel.cpp:422
double getPathMortality(const string &forType, const string &dC, int year=DATA_NOW)
Return the INCREASED mortality due to pathogen presence for a given ft and dc in a certain year (defa...
Definition Pixel.cpp:329
void setSpModifier(const double &value, const int &ftindex)
Definition Pixel.h:87
vector< double > avalCoef
Availability (of wood resources) coefficient. A [0,1] coefficient (new: by forest type) that reduces ...
Definition Pixel.h:148
vector< double > expectedAnnualIncome_timber
Definition Pixel.h:125
double expTypePrices
Sampling derived expectation types of this agent (prices)
Definition Pixel.h:146
double expType
Sampling derived expectation types of this agent (forest bilogical parameters: growth,...
Definition Pixel.h:145
vector< vector< double > > hProductivity
Definition Pixel.h:112
vector< vector< double > > cumAlive_exp
This is the expected version of cumAlive, used for calculating profits.
Definition Pixel.h:141
vector< vector< double > > vMort
Definition Pixel.h:118
double getPastRegArea(const int &ft_idx, const int &year)
Definition Pixel.cpp:390
vector< double > initialDc0Area
Definition Pixel.h:109
vector< int > optFtChosen
Definition Pixel.h:122
double getID() const
Definition Pixel.h:67
void changeValue(const string &layerName_h, const double &value_h, const bool &setNoValueForZero=false)
Change the value of an existing layerMTHREAD->GIS->pack(parName, forName, dClass, year),...
Definition Pixel.cpp:135
vector< vector< double > > vHa
Volume at hectar by each diameter class [m^3/ha].
Definition Pixel.h:137
vector< vector< double > > vol_l
store the volumes of the previous year
Definition Pixel.h:129
vector< vector< double > > vol
Definition Pixel.h:106
vector< vector< double > > deltaArea
Definition Pixel.h:108
vector< vector< double > > addMort
Definition Pixel.h:134
vector< vector< double > > tp
Definition Pixel.h:135
vector< vector< double > > cumTp_exp
This is the expected version of cumTp, used for calculating profits.
Definition Pixel.h:139
vector< vector< double > > hArea
Definition Pixel.h:110
vector< vector< double > > vMortAdd
Definition Pixel.h:119
vector< vector< double > > cumAlive
Cumulative prob of remaining alive at beginnin of a given diam class.
Definition Pixel.h:138
double getDoubleValue(const string &layerName_h, const bool &returnZeroForNoValue=false) const
Return the value for a specific layer.
Definition Pixel.cpp:158
vector< int > optDc
Definition Pixel.h:121
vector< int > optDcChosen
Definition Pixel.h:123
vector< vector< double > > cumTp
This is time of passage to REACH a diameter class (while the exogenous tp by diameter class is the ti...
Definition Pixel.h:136
vector< double > vReg
Definition Pixel.h:117
vector< vector< double > > beta
Definition Pixel.h:132
double getMultiplier(const string &multiplierName, const string &forName, int year=DATA_NOW)
Definition Pixel.cpp:184
int getYear()
Definition Scheduler.h:49
void advanceYear()
Definition Scheduler.h:51
Thread manager. Responsable to manage the main thread and "speak" with the GUI.
Carbon * CBAL
Module for the Carbon Balance.
Scheduler * SCD
the scheduler object (simulation-loops scheduler)
Gis * GIS
GIS information and methods.
ModelData * MD
the model data object
Ipopt::SmartPtr< Ipopt::TNLP > OPT
Market optimisation.
Output * DO
data output
Class to provide a simple integer-integer-string-string key in std maps.
Definition BaseClass.h:213
Forest types (struct)
Definition ModelData.h:293
string forLayer
Definition ModelData.h:297
int memType
Definition ModelData.h:296