diff --git a/BVDecompTest.m b/BVDecompTest.m new file mode 100644 index 0000000..37dc751 --- /dev/null +++ b/BVDecompTest.m @@ -0,0 +1,39 @@ +function [chi, p] = BVDecompTest(K,NQ,N) +%function [chi, p] = DecompositionTest(K,NQ,N) +%tests whether the components of a kernel deviates from the composite +% +%K is the matrix of component kernels, from +%[K,grid] = KernelDecomposition(data,partition,kernel) +%NQ>1 is the number of quantiles +%N is the number of data used to estimate the kernels +% +%chi is the test-statistic, distributed as a chi-squared with degrees of +%freedom equal to the number of quantiles minus one times the number of +%component minus one +%p is the p-value +% +%First version, Richard Tol, 11 March 2020 +%This version, Richard Tol, 11 March 2020 +P = size(K,2); +G = size(K,1); +CDF = K(:,1); +for i=2:G, + CDF(i) = CDF(i-1)+K(i,1); +end + +for i=1:NQ, + Q(i,:) = sum(K(CDF<=i/NQ & CDF > (i-1)/NQ,2:P)); +end + +w = sum(K(:,2:P),1); + +for i=1:NQ, + Pearson(i,:) = ((Q(i,:) - w/NQ).^2)./(w/NQ); +end + +chi = sum(sum(Pearson))*N/NQ; + +p = 1-chi2cdf(chi,(P-2)*(NQ-1)); + +end + diff --git a/Characteristics.m b/Characteristics.m index 6f3818e..fa90220 100644 --- a/Characteristics.m +++ b/Characteristics.m @@ -1,50 +1,55 @@ -%% Characteristics -% -% First version: Richard Tol, 2 November 2011 -% This version: Richard Tol, 2 November 2011 - -display('Characteristics'); - -for i=1:NFilter, - KernelMean(i) = SCCgrid*JointPDF(:,i); - vmeansq= (SCCgrid.*SCCgrid)*JointPDF(:,i); - var = vmeansq-KernelMean(i)*KernelMean(i); - KernelStDev(i) = sqrt(var); - [vmax vind] = max(JointPDF(:,i)); - KernelMode(i) = SCCgrid(vind); - vsmaller = (JointCDF(:,i) < 0.5); - vlarger = (JointCDF(:,i) > 0.5); - KernelMedian(i) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; - vsmaller = (JointCDF(:,i) < 0.05); - vlarger = (JointCDF(:,i) > 0.05); - Kernel05(i) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; - vsmaller = (JointCDF(:,i) < 0.95); - vlarger = (JointCDF(:,i) > 0.95); - Kernel95(i) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; -end - - -%{ -for i=1:NTime, - for f=1:NFilter, - if sum(CondCDF(:,i,f)) > 0, - TimeMean(i,f) = SCCgrid*CondPDF(:,i,f); - vmeansq = (SCCgrid.*SCCgrid)*CondPDF(:,i,f); - var = vmeansq - TimeMean(i,f)*TimeMean(i,f); - TimeStDev(i,f) = sqrt(var); - [vmax vind] = max(CondPDF(:,i,f)); - TimeMode(i,f) = SCCgrid(vind); - vsmaller = (CondCDF(:,i,f) < 0.5); - vlarger = (CondCDF(:,i,f) > 0.5); - TimeMedian(i,f) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; - vsmaller = (CondCDF(:,i,f) < 0.05); - vlarger = (CondCDF(:,i,f) > 0.05); - Time05(i,f) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; - vsmaller = (CondCDF(:,i,f) < 0.95); - vlarger = (CondCDF(:,i,f) > 0.95); - Time95(i,f) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; - end - end -end -%} +%% Characteristics +% +% First version: Richard Tol, 2 November 2011 +% This version: Richard Tol, 2 November 2011 + +display('Characteristics'); + +for i=1:NFilter, + KernelMean(i) = SCCgrid*JointPDF(:,i); + vmeansq= (SCCgrid.*SCCgrid)*JointPDF(:,i); + var = vmeansq-KernelMean(i)*KernelMean(i); + KernelStDev(i) = sqrt(var); + [vmax vind] = max(JointPDF(:,i)); + KernelMode(i) = SCCgrid(vind); + vsmaller = (JointCDF(:,i) < 0.5); + vlarger = (JointCDF(:,i) > 0.5); + KernelMedian(i) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; + vsmaller = (JointCDF(:,i) < 0.05); + vlarger = (JointCDF(:,i) > 0.05); + Kernel05(i) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; + vsmaller = (JointCDF(:,i) < 0.95); + vlarger = (JointCDF(:,i) > 0.95); + Kernel95(i) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; +end + +%% kernelmeans.tex +Report = [KernelMean(18:23);KernelMean(24:29);KernelMean(30:35);KernelMean(36:41);KernelMean(42:47)]; +Report2 = [KernelMean(1) KernelMean(2) KernelMean(3) KernelMean(5) KernelMean(7); + KernelStDev(1) KernelStDev(2) KernelStDev(3) KernelStDev(5) KernelStDev(7); + KernelMedian(1) KernelMedian(2) KernelMedian(3) KernelMedian(5) KernelMedian(7); + KernelMode(1) KernelMode(2) KernelMode(3) KernelMode(5) KernelMode(7)]; +%{ +for i=1:NTime, + for f=1:NFilter, + if sum(CondCDF(:,i,f)) > 0, + TimeMean(i,f) = SCCgrid*CondPDF(:,i,f); + vmeansq = (SCCgrid.*SCCgrid)*CondPDF(:,i,f); + var = vmeansq - TimeMean(i,f)*TimeMean(i,f); + TimeStDev(i,f) = sqrt(var); + [vmax vind] = max(CondPDF(:,i,f)); + TimeMode(i,f) = SCCgrid(vind); + vsmaller = (CondCDF(:,i,f) < 0.5); + vlarger = (CondCDF(:,i,f) > 0.5); + TimeMedian(i,f) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; + vsmaller = (CondCDF(:,i,f) < 0.05); + vlarger = (CondCDF(:,i,f) > 0.05); + Time05(i,f) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; + vsmaller = (CondCDF(:,i,f) < 0.95); + vlarger = (CondCDF(:,i,f) > 0.95); + Time95(i,f) = (min(SCCgrid(vlarger)) + max(SCCgrid(vsmaller)))/2; + end + end +end +%} clear v* \ No newline at end of file diff --git a/ConfirmationBias.m b/ConfirmationBias.m index d7ddf80..37b866e 100644 --- a/ConfirmationBias.m +++ b/ConfirmationBias.m @@ -1,44 +1,44 @@ -%% ConfirmationBias - Tests for confirmation bias -% -% First version: Richard Tol, 1 November 2011 -% This version: Richard Tol, 4 April 2015 - -display('Test confirmation bias'); - -StartYear = 1991; -EndYear = Newest; -BiasYear(1)= StartYear; - -for j=1:NFilter, - clear Average StDev Max Min Mean; - FilterYear = (Year <= 1991) & Filter(:,j); - Average(1) = sum(SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); - AveSq = sum(SCC(FilterYear).*SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); - StDev(1) = sqrt(AveSq-Average(1)*Average(1)); - - for i= 2:(EndYear - StartYear + 1), - BiasYear(i) = StartYear+i-1; - FilterYear = (Year <= BiasYear(i)) & Filter(:,j); - Average(i) = sum(SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); - AveSq = sum(SCC(FilterYear).*SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); - StDev(i) = sqrt(AveSq-Average(i)*Average(i)); - FilterYear = (Year == BiasYear(i)) & Filter(:,j); - Mean(i) = sum(SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); - if length(SCC(FilterYear)) > 0, - Max(i) = max(SCC(FilterYear)); - Min(i) = min(SCC(FilterYear)); - else - Max(i) = NaN; - Min(i) = NaN; - end - end - - subplot(NFilter,1,j); - plot(BiasYear,Average,'b',BiasYear,Average-2*StDev,'b:',BiasYear,Average+2*StDev,'b:',BiasYear,Mean,'r*',BiasYear,Min,'rx',BiasYear,Max,'rx'); - title(Titles(j,:)); - xlabel('year of publication') - ylabel('dollar per metric ton of carbon'); - legend('average of previous studies','upper bound of previous studies','lower bound of previous studies','mean of current studies','maximum of current studies','minimum of current studies'); -end - +%% ConfirmationBias - Tests for confirmation bias +% +% First version: Richard Tol, 1 November 2011 +% This version: Richard Tol, 4 April 2015 + +display('Test confirmation bias'); + +StartYear = 1991; +EndYear = Newest; +BiasYear(1)= StartYear; + +for j=1:NFilter, + clear Average StDev Max Min Mean; + FilterYear = (Year <= 1991) & Filter(:,j); + Average(1) = sum(SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); + AveSq = sum(SCC(FilterYear).*SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); + StDev(1) = sqrt(AveSq-Average(1)*Average(1)); + + for i= 2:(EndYear - StartYear + 1), + BiasYear(i) = StartYear+i-1; + FilterYear = (Year <= BiasYear(i)) & Filter(:,j); + Average(i) = sum(SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); + AveSq = sum(SCC(FilterYear).*SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); + StDev(i) = sqrt(AveSq-Average(i)*Average(i)); + FilterYear = (Year == BiasYear(i)) & Filter(:,j); + Mean(i) = sum(SCC(FilterYear).*AuthorWeight(FilterYear))/sum(AuthorWeight(FilterYear)); + if length(SCC(FilterYear)) > 0, + Max(i) = max(SCC(FilterYear)); + Min(i) = min(SCC(FilterYear)); + else + Max(i) = NaN; + Min(i) = NaN; + end + end + + subplot(NFilter,1,j); + plot(BiasYear,Average,'b',BiasYear,Average-2*StDev,'b:',BiasYear,Average+2*StDev,'b:',BiasYear,Mean,'r*',BiasYear,Min,'rx',BiasYear,Max,'rx'); + title(Titles(j,:)); + xlabel('year of publication') + ylabel('dollar per metric ton of carbon'); + legend('average of previous studies','upper bound of previous studies','lower bound of previous studies','mean of current studies','maximum of current studies','minimum of current studies'); +end + clear v* \ No newline at end of file diff --git a/ConstructBivarPDF.m b/ConstructBivarPDF.m index 2c174f0..ba72b0c 100644 --- a/ConstructBivarPDF.m +++ b/ConstructBivarPDF.m @@ -1,37 +1,37 @@ -%% Construct PDF Bivariate -% -% First version: Richard Tol, 30 October 2012 -% This version: Richard Tol, 30 October 2012 - -display('Construct bivariate PDF'); - -TimeGrid(1) = Oldest; -NTime = Newest-Oldest+1; -for i=2:NTime, - TimeGrid(i)=TimeGrid(i-1)+1; -end - -%Bivar = JointPDF(:,1)*PDFTime; - -Bivar = zeros(NGrid+1,NTime,NFilter); - -for i=1:NGrid+1, - for j=1:NTime, - for f=1:NFilter, - for k=1:NEstimates, - %Bivar(i,j) = Bivar(i,j) + exp(-(SCCgrid(i)-SCC(k))/SampleStDev(1)).*exp(-exp(-(SCCgrid(i)-SCC(k))/SampleStDev(1)))/SampleStDev(1) * Weight(k,1) * exp(-0.5*((TimeGrid(j)-Year(k))/StDevYear)^2); - if Year(k) >= TimeGrid(j), - Bivar(i,j,f) = Bivar(i,j,f) + exp(-(SCCgrid(i)-SCC(k))/SampleStDev(f)).*exp(-exp(-(SCCgrid(i)-SCC(k))/SampleStDev(f)))/SampleStDev(f) * Weight(k,f) * exp(-(Year(k)-TimeGrid(j))); - end - end - end - end -end -for f = 1:NFilter, - vsum= sum(sum(Bivar(:,:,f))); - Bivar(:,:,f) = Bivar(:,:,f)/vsum; - MargTime(:,f) = sum(Bivar(:,:,f),1); - MargSCC(:,f) = sum(Bivar(:,:,f),2); -end - +%% Construct PDF Bivariate +% +% First version: Richard Tol, 30 October 2012 +% This version: Richard Tol, 30 October 2012 + +display('Construct bivariate PDF'); + +TimeGrid(1) = Oldest; +NTime = Newest-Oldest+1; +for i=2:NTime, + TimeGrid(i)=TimeGrid(i-1)+1; +end + +%Bivar = JointPDF(:,1)*PDFTime; + +Bivar = zeros(NGrid+1,NTime,NFilter); + +for i=1:NGrid+1, + for j=1:NTime, + for f=1:NFilter, + for k=1:NEstimates, + %Bivar(i,j) = Bivar(i,j) + exp(-(SCCgrid(i)-SCC(k))/SampleStDev(1)).*exp(-exp(-(SCCgrid(i)-SCC(k))/SampleStDev(1)))/SampleStDev(1) * Weight(k,1) * exp(-0.5*((TimeGrid(j)-Year(k))/StDevYear)^2); + if Year(k) >= TimeGrid(j), + Bivar(i,j,f) = Bivar(i,j,f) + exp(-(SCCgrid(i)-SCC(k))/SampleStDev(f)).*exp(-exp(-(SCCgrid(i)-SCC(k))/SampleStDev(f)))/SampleStDev(f) * Weight(k,f) * exp(-(Year(k)-TimeGrid(j))); + end + end + end + end +end +for f = 1:NFilter, + vsum= sum(sum(Bivar(:,:,f))); + Bivar(:,:,f) = Bivar(:,:,f)/vsum; + MargTime(:,f) = sum(Bivar(:,:,f),1); + MargSCC(:,f) = sum(Bivar(:,:,f),2); +end + clear v* \ No newline at end of file diff --git a/ConstructCondPDF.m b/ConstructCondPDF.m index 1a4533c..ef12828 100644 --- a/ConstructCondPDF.m +++ b/ConstructCondPDF.m @@ -1,27 +1,27 @@ -%% Construct Conditional PDF -% -% First version: Richard Tol, 30 October 2012 -% This version: Richard Tol, 30 October 2012 - -display('Construct conditional PDF'); - -for i=1:NTime, - for f=1:NFilter, - if MargTime(i,f) > 0, - CondPDF(:,i,f) = Bivar(:,i,f)/MargTime(i,f); - else - CondPDF(:,i,f) = 0; - end - end -end; - -CondCDF = CondPDF; -for i=2:NGrid+1, - for j=1:NTime, - for f=1:NFilter, - CondCDF(i,j,f) = CondCDF(i-1,j,f) + CondPDF(i,j,f); - end - end -end - +%% Construct Conditional PDF +% +% First version: Richard Tol, 30 October 2012 +% This version: Richard Tol, 30 October 2012 + +display('Construct conditional PDF'); + +for i=1:NTime, + for f=1:NFilter, + if MargTime(i,f) > 0, + CondPDF(:,i,f) = Bivar(:,i,f)/MargTime(i,f); + else + CondPDF(:,i,f) = 0; + end + end +end; + +CondCDF = CondPDF; +for i=2:NGrid+1, + for j=1:NTime, + for f=1:NFilter, + CondCDF(i,j,f) = CondCDF(i-1,j,f) + CondPDF(i,j,f); + end + end +end + clear v* \ No newline at end of file diff --git a/ConstructHistogram.m b/ConstructHistogram.m index 1b75baa..efea899 100644 --- a/ConstructHistogram.m +++ b/ConstructHistogram.m @@ -6,11 +6,14 @@ %% hold on +figure %not used plot(SCCgrid(301:2101),JointPDFSilverman(301:2101,1),SCCgrid(301:2101),JointPDFGauss(301:2101,1),SCCgrid(301:2101),JointPDFJSUS(301:2101,1),SCCgrid(301:2101),JointPDFJSU(301:2101,1),SCCgrid(301:2101),JointPDFGaussGauss(301:2101,1),SCCgrid(301:2101),JointPDFGumbelGauss(301:2101,1),SCCgrid(301:2101),JointPDFGumbelWeibull(301:2101,1)) bar(SCCgrid(301:2101),histo(301:2101,1)) legend('Normal, Silverman', 'Normal', 'Johnson SU, Silverman', 'Johnson SU', 'Normal, Normal','Gumbel, Normal', 'Gumbel, Weibull', 'Histogram'); xlabel('dollar per tonne of carbon') -ylabel('Probability density') +ylabel('probability density') + +hold off %% MISE(1) = histo'*((histo - JointPDFSilverman).*(histo - JointPDFSilverman)); @@ -49,7 +52,7 @@ mean(7) = SCCgrid*JointPDFGumbelWeibull; mean(8) = SCCgrid*histo; -%% +%% Table S3 kernelchar = [mean; p0; pl; MISE*1000]; columnLabels = {'Normal, Silverman', 'Normal', 'Johnson SU, Silverman', 'Johnson SU', 'Normal, Normal','Gumbel, Normal', 'Gumbel, Weibull', 'Observations'}; rowLabels = {'Average','P(SCC<0)','P(SCC>1186)','MISE'}; diff --git a/ConstructPDF.m b/ConstructPDF.m index 2f7f9b9..6690795 100644 --- a/ConstructPDF.m +++ b/ConstructPDF.m @@ -1,48 +1,182 @@ -%% Construct PDF -% -% First version: Richard Tol, 2 November 2011 -% This version: Richard Tol, 4 April 2015 - -display('Construct joint PDF'); - -SCCgrid(1)= -500; -NGrid = 8110; %210500 for unrestricted -for i=1:NGrid, - SCCgrid(i+1)=SCCgrid(i)+1; -end - -for j=1:NFilter, - SampleAverage(j) = sum(SCC(Filter(:,j)).*TotalWeight(Filter(:,j)))/sum(TotalWeight(Filter(:,j))); - vAveSq = sum(SCC(Filter(:,j)).*SCC(Filter(:,j)).*TotalWeight(Filter(:,j)))/sum(TotalWeight(Filter(:,j))); - SampleStDev(j) = sqrt(vAveSq-SampleAverage(j)*SampleAverage(j)); - for i=1:NEstimates, - FTkernel(:,i) = exp(-(SCCgrid-SCC(i))/SampleStDev(j)).*exp(-exp(-(SCCgrid-SCC(i))/SampleStDev(j)))/SampleStDev(j); - end - FTkernel(isnan(FTkernel)) = 0; - for i=1:NEstimates, - if ((Tol(i)==0) & (Hope(i)==0)) - FTkernel(1:501,i) = 0; - end - end - scale = sum(FTkernel); - for i=1:NEstimates, - if scale(i)>0 - FTkernel(:,i) = FTkernel(:,i)/scale(i); - end - end - - Weight(:,j) = Filter(:,j).*TotalWeight; - JointPDF(:,j) = FTkernel*Weight(:,j); - vsum = sum(JointPDF(:,j)); - JointPDF(:,j) = JointPDF(:,j)/vsum; - subplot(NFilter,1,j) - plot(SCCgrid,JointPDF(:,j)) - title(Titles(j,:)) -end -JointCDF=zeros(NGrid+1,NFilter); -JointCDF(1,:)=JointPDF(1,:); -for i=2:NGrid+1, - JointCDF(i,:)=JointCDF(i-1,:)+JointPDF(i,:); -end - -clear v* \ No newline at end of file +%% Construct PDF +% +% First version: Richard Tol, 2 November 2011 +% This version: Richard Tol, 29 March 2021 + +%display('Construct joint PDF'); + +global M +global V + +SCCmin = -500; + +SCCgrid(1)= SCCmin; +NGrid = 8111; %7601 is the maximum +for i=1:NGrid-1 + SCCgrid(i+1)=SCCgrid(i)+1; +end + +SCCmax = SCCgrid(end); +if JohnsonSU + +gridamp = 50; + +SCCtgrid(1) = asinh(SCCmin); +SCCtstep = (asinh(SCCmax) - asinh(SCCmin))/(gridamp*NGrid-1); +for i=1:gridamp*NGrid-1 + SCCtgrid(i+1) = SCCtgrid(i) + SCCtstep; +end + +vkernel = zeros(gridamp*NGrid,NEstimates); + +%figure + + j = 1; + SCCt = asinh(SCC); + SampleAve = sum(SCCt(Filter(:,j)).*TotalWeight(Filter(:,j)))/sum(TotalWeight(Filter(:,j))); + vAveSq = sum(SCCt(Filter(:,j)).*SCCt(Filter(:,j)).*TotalWeight(Filter(:,j)))/sum(TotalWeight(Filter(:,j))); + SampleStDev = sqrt(vAveSq-SampleAve*SampleAve); + V = SampleStDev^2; + + for i=1:NEstimates, + mu = SCCt(i); + if Silverman + sig = 1.06*SampleStDev(j)/NEstimates^0.2; + else + sig = SampleStDev(j); + end + vkernel(:,i) = exp(-0.5*(SCCtgrid-mu).^2/sig/sig)/sig/sqrt(2*pi); + end + + vkernel(isnan(vkernel)) = 0; + + scale = sum(vkernel); + for i=1:NEstimates, + if scale(i)>0 + vkernel(:,i) = vkernel(:,i)/scale(i); + end + end + + Weight(:,j) = Filter(:,j).*TotalWeight; + PDFt = vkernel*Weight(:,j); + vsum = sum(PDFt); + PDFt = PDFt/vsum; + + %plot(SCCtgrid,PDFt) + + vgrid = sinh(SCCtgrid); + jacob = (1 + vgrid.*(vgrid.*vgrid+1).^-0.5)./(vgrid + (vgrid.*vgrid+1).^0.5); %note: divide and multiply by SCCtgrid + PDFtt = PDFt./jacob'; + + for i=1:NGrid + JointCDF(i,j) = sum(PDFtt(SCCtgrid0 + vkernel(:,i) = vkernel(:,i)/scale(i); + end + end + + Weight(:,j) = Filter(:,j).*TotalWeight; + JointPDF(:,j) = vkernel*Weight(:,j); + vsum = sum(JointPDF(:,j)); + JointPDF(:,j) = JointPDF(:,j)/vsum; + if j == 1 %store results for decomposition + allkernel = vkernel; + end + %subplot(NFilter,1,j) + %plot(SCCgrid,JointPDF(:,j)) + %title(Titles{j}) +end +end + +JointCDF=zeros(NGrid,NFilter); +JointCDF(1,:)=JointPDF(1,:); +for i=2:NGrid, + JointCDF(i,:)=JointCDF(i-1,:)+JointPDF(i,:); +end + +%clear v* \ No newline at end of file diff --git a/ConstructPDFTime.m b/ConstructPDFTime.m index e1cac90..0ad6af2 100644 --- a/ConstructPDFTime.m +++ b/ConstructPDFTime.m @@ -1,25 +1,25 @@ -%% Construct PDF Time -% -% First version: Richard Tol, 30 October 2012 -% This version: Richard Tol, 30 October 2012 - -display('Construct time PDF'); - -AveYear = TotalWeight'*Year/sum(TotalWeight); -AveSqYear = TotalWeight'*Year.^2/sum(TotalWeight); -StDevYear = sqrt(AveSqYear - AveYear*AveYear); -TimeGrid(1) = Oldest; -NTime = Newest-Oldest+1; -for i=2:NTime, - TimeGrid(i)=TimeGrid(i-1)+1; -end - -PDFTime = zeros(1,NTime); -for i=1:NEstimates, - for j=1:NTime, - PDFTime(j) = PDFTime(j) + exp(-0.5*((TimeGrid(j)-Year(i))/StDevYear)^2); - end -end -PDFTime = PDFTime/sum(PDFTime); - +%% Construct PDF Time +% +% First version: Richard Tol, 30 October 2012 +% This version: Richard Tol, 30 October 2012 + +display('Construct time PDF'); + +AveYear = TotalWeight'*Year/sum(TotalWeight); +AveSqYear = TotalWeight'*Year.^2/sum(TotalWeight); +StDevYear = sqrt(AveSqYear - AveYear*AveYear); +TimeGrid(1) = Oldest; +NTime = Newest-Oldest+1; +for i=2:NTime, + TimeGrid(i)=TimeGrid(i-1)+1; +end + +PDFTime = zeros(1,NTime); +for i=1:NEstimates, + for j=1:NTime, + PDFTime(j) = PDFTime(j) + exp(-0.5*((TimeGrid(j)-Year(i))/StDevYear)^2); + end +end +PDFTime = PDFTime/sum(PDFTime); + clear v* \ No newline at end of file diff --git a/ConstructPDFgrowth.m b/ConstructPDFgrowth.m new file mode 100644 index 0000000..81b8702 --- /dev/null +++ b/ConstructPDFgrowth.m @@ -0,0 +1,71 @@ +%% Construct PDF growth rate +% +% First version: Richard Tol, 2 November 2011 +% This version: Richard Tol, 27 March 2021 + +display('Construct PDF'); + +dist = 'normal'; +Silverman = true; + +grgrid(1)= -0.02; +Ngg = 1000; +for i=1:Ngg-1, + grgrid(i+1)=grgrid(i)+0.0001; +end + +vkernel = zeros(Ngg,1); + +ObsGrowth = Growth(~isnan(Growth)); +ObsWeight = GWeight(~isnan(Growth)); +NE = length(ObsGrowth); +Gilter = Filter(~isnan(Growth),1:8); %only separate by discount rate +NGilter = size(Gilter,2); + +vkernel = zeros(Ngg,NE); + +for j=1:NGilter, + SampleAveGr = sum(ObsGrowth(Gilter(:,j)).*ObsWeight(Gilter(:,j)))/sum(ObsWeight(Gilter(:,j))); + vAveSq = sum(ObsGrowth(Gilter(:,j)).*ObsGrowth(Gilter(:,j)).*ObsWeight(Gilter(:,j)))/sum(ObsWeight(Gilter(:,j))); + SampleSDgr = sqrt(vAveSq-SampleAveGr*SampleAveGr); + + for i=1:NE, + switch dist + case 'normal' + mu = ObsGrowth(i); + if Silverman + sig = 1.06*SampleSDgr/NE^0.2; + else + sig = SampleSDgr; + end + vkernel(:,i) = exp(-0.5*(grgrid-mu).^2/sig/sig)/sig/sqrt(2*pi); + otherwise + vkernel = zeros(NE,1); + end + end + vkernel(isnan(vkernel)) = 0; + + scale = sum(vkernel); %fine + for i=1:NE, + if scale(i)>0 + vkernel(:,i) = vkernel(:,i)/scale(i); + end + end + + allkernelgr = vkernel; %save for decomposition + veight = Gilter(:,j).*ObsWeight; + PDFgrowth(:,j) = vkernel*veight; + PDFgrowth(:,j) = PDFgrowth(:,j)/sum(PDFgrowth(:,j)); +end +%% +%figure %not used +%plot(grgrid,PDFgrowth(:,1)) +%xlabel('percent per year') +%ylabel('probability density') +%% +CDFgrowth = PDFgrowth; +for i=2:Ngg, + CDFgrowth(i,:) = CDFgrowth(i-1,:)+PDFgrowth(i,:); +end + +clear v* \ No newline at end of file diff --git a/ConstructPDFs.m b/ConstructPDFs.m new file mode 100644 index 0000000..80b663b --- /dev/null +++ b/ConstructPDFs.m @@ -0,0 +1,204 @@ +%% Construct PDF +% +% First version: Richard Tol, 29 March 2021 +% This version: Richard Tol, 29 March 2021 + +%% Sensitivity analysis on kernel functions +NFilterSave = NFilter; +NFilter = 1; +nosplit = true; +distpos = 'normal'; +distneg = 'normal'; + +%% +JohnsonSU = true; +Silverman = true; +ConstructPDF; +JointPDFJSUS = JointPDF; + +Silverman = false; +ConstructPDF; +JointPDFJSU = JointPDF; + +%% +Silverman = true; +JohnsonSU = false; +ConstructPDF; +JointPDFSilverman = JointPDF; + +%% adaptive kernel +TotalWeightSave = TotalWeight; +TotalWeight = TotalWeightNS; +ConstructPDF; +JointPDF0 = JointPDF; + +for i=1:NEstimates, + Aux = JointPDF0(SCCgrid==round(SCC(i))); + if isempty(Aux) + AdapWeight(i) = 0; + else + AdapWeight(i) = Aux(1); + end +end +TotalWeight = AdapWeight'.*TotalWeightNS; +ConstructPDF; +JointPDF1 = JointPDF; + +for i=1:NEstimates, + Aux = JointPDF1(SCCgrid==round(SCC(i))); + if isempty(Aux) + AdapWeight(i) = 0; + else + AdapWeight(i) = Aux(1); + end +end +TotalWeight = AdapWeight'.*TotalWeightNS; +ConstructPDF; +JointPDF2 = JointPDF; + +for i=1:NEstimates, + Aux = JointPDF2(SCCgrid==round(SCC(i))); + if isempty(Aux) + AdapWeight(i) = 0; + else + AdapWeight(i) = Aux(1); + end +end +TotalWeight = AdapWeight'.*TotalWeightNS; +ConstructPDF; +JointPDF3 = JointPDF; + +figure %S5 adaptive.jpg +plot(SCCgrid(301:2101),JointPDF0(301:2101,1),SCCgrid(301:2101),JointPDF1(301:2101,1),SCCgrid(301:2101),JointPDF2(301:2101,1),SCCgrid(301:2101),JointPDF3(301:2101,1)) +legend('Initial','Iteration 1', 'Iteration 2','Iteration 3'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +TotalWeight = TotalWeightSave; + +%% +Silverman = false; +JohnsonSU = false; +ConstructPDF; +JointPDFGauss = JointPDF; + +nosplit = false; +ConstructPDF; +JointPDFGaussGauss = JointPDF; + +distneg = 'gumbel'; +ConstructPDF; +JointPDFGumbelGauss = JointPDF; + +distpos = 'weibull'; +ConstructPDF; +JointPDFGumbelWeibull = JointPDF; + +%% +figure %S1 kernel.jpg +plot(SCCgrid(301:2101),JointPDFSilverman(301:2101,1),SCCgrid(301:2101),JointPDFGauss(301:2101,1),SCCgrid(301:2101),JointPDFGaussGauss(301:2101,1),SCCgrid(301:2101),JointPDFGumbelGauss(301:2101,1),SCCgrid(301:2101),JointPDFGumbelWeibull(301:2101,1)) +legend('Normal, Silverman', 'Normal', 'Normal, Normal','Gumbel, Normal', 'Gumbel, Weibull'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +%% Sensitivity analysis on weights +TotalWeightSave = TotalWeight; + +TotalWeight = ones(NEstimates,1); +ConstructPDF; +JointPDFUncensored = JointPDF; + +SCCsave = SCC; +SCC = SCCW; +ConstructPDF; +JointPDFWinsorized = JointPDF; +SCC = SCCsave; + +TotalWeight = Censor.*ones(NEstimates,1); +ConstructPDF; +JointPDFNone = JointPDF; + +TotalWeight = Censor.*PaperWeight; +ConstructPDF; +JointPDFPaper = JointPDF; + +TotalWeight = Censor.*AuthorWeight; +ConstructPDF; +JointPDFAuthor = JointPDF; + +TotalWeight = TotalWeightSave; +ConstructPDF; +JointPDFQuality = JointPDF; + +%% +figure %S2 weigth.jpg +plot(SCCgrid(301:2101),JointPDFUncensored(301:2101,1),SCCgrid(301:2101),JointPDFWinsorized(301:2101,1),SCCgrid(301:2101),JointPDFNone(301:2101,1),SCCgrid(301:2101),JointPDFPaper(301:2101,1),SCCgrid(301:2101),JointPDFAuthor(301:2101,1),SCCgrid(301:2101),JointPDFQuality(301:2101,1)) +legend('Uncensored','Winsorized','No weights','Paper weights', 'Author weights', 'Quality weights'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +%% More sensitivity analysis on censoring +TotalWeightSave = TotalWeight; + +TotalWeight = AltCensor1.*TotalWeightNS; +ConstructPDF; +JointPDFAltCensor1 = JointPDF; + +TotalWeight = AltCensor2.*TotalWeightNS; +ConstructPDF; +JointPDFAltCensor2 = JointPDF; + +figure %S4 censor.jpg +plot(SCCgrid(301:2101),JointPDFQuality(301:2101,1),SCCgrid(301:2101),JointPDFAltCensor1(301:2101,1),SCCgrid(301:2101),JointPDFAltCensor2(301:2101,1)) +legend('Top censor','Top and bottom censor','Asymmetric censor'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +TotalWeight = TotalWeightSave; +%% More sensitivity analysis on weights +TotalWeight = Censor.*TWeightPeer; +ConstructPDF; +JointPDFPeer = JointPDF; + +TotalWeight = Censor.*TWeightImpact; +ConstructPDF; +JointPDFImpact = JointPDF; + +TotalWeight = Censor.*TWeightMethod; +ConstructPDF; +JointPDFMethod = JointPDF; + +TotalWeight = Censor.*TWeightDyn; +ConstructPDF; +JointPDFDyn = JointPDF; + +TotalWeight = Censor.*TWeightScen; +ConstructPDF; +JointPDFScen = JointPDF; + +TotalWeight = Censor.*TWeightAge; +ConstructPDF; +JointPDFAge = JointPDF; + +figure %S3 qweight.jpg +plot(SCCgrid(301:2101),JointPDFQuality(301:2101,1),SCCgrid(301:2101),JointPDFPeer(301:2101,1),SCCgrid(301:2101),JointPDFImpact(301:2101,1),SCCgrid(301:2101),JointPDFMethod(301:2101,1),SCCgrid(301:2101),JointPDFDyn(301:2101,1),SCCgrid(301:2101),JointPDFScen(301:2101,1),SCCgrid(301:2101),JointPDFAge(301:2101,1)) +legend('Quality weights','Not peer-reviewed','No original estimate','Not marginal','Static vulnerability','No scenario','Age of publication'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +TotalWeight = TotalWeightSave; + +%% +%restore defaults +distpos = 'weibull'; %normal gamma lognormal gumbel weibull=default +distneg = 'gumbel'; %normal gumbel=default +Silverman = false; +nosplit = false; +NFilter = NFilterSave; +ConstructPDF + +figure +plot(SCCgrid(301:2101),JointPDF(301:2101,1)) +legend('Test before bootstrap'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') \ No newline at end of file diff --git a/DecomposEqual.m b/DecomposEqual.m new file mode 100644 index 0000000..743cd45 --- /dev/null +++ b/DecomposEqual.m @@ -0,0 +1,214 @@ +%This version assume equal bandwith for all components + +CDF = JointCDF(:,1); +TotW = sum(TotalWeight); + +%% +dcauthor(:,1) = allkernel*((Filter(:,1)-Hope-Nordhaus-Tol-Ploeg).*TotalWeight); +dcauthor(:,2) = allkernel*(Hope.*TotalWeight); +dcauthor(:,3) = allkernel*(Nordhaus.*TotalWeight); +dcauthor(:,4) = allkernel*(Tol.*TotalWeight); +dcauthor(:,5) = allkernel*(Ploeg.*TotalWeight); + +w(1) = sum((Filter(:,1)-Hope-Nordhaus-Tol).*TotalWeight); +w(2) = sum(Hope.*TotalWeight); +w(3) = sum(Nordhaus.*TotalWeight); +w(4) = sum(Tol.*TotalWeight); +w(5) = sum(Ploeg.*TotalWeight); + +dcauthor = dcauthor/sum(w); + +area(SCCgrid(301:2101), dcauthor(301:2101,:)) +legend('Other','Hope','Nordhaus','Tol','Ploeg') +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +Q(1,:) = sum(dcauthor(CDF<=0.2,:)); +Q(2,:) = sum(dcauthor(CDF<=0.4 & CDF > 0.2,:)); +Q(3,:) = sum(dcauthor(CDF<=0.6 & CDF > 0.4,:)); +Q(4,:) = sum(dcauthor(CDF<=0.8 & CDF > 0.6,:)); +Q(5,:) = sum(dcauthor(CDF > 0.8,:)); + +w = w/sum(w); + +columnLabels = {'Other', 'Hope','Nordhaus','Tol','Ploeg'}; +rowLabels = {'Q1', 'Q2', 'Q3', 'Q4', 'Q5','Null'}; +matrix2latex([Q; 0.2*w], 'author.tex', 'rowLabels', rowLabels, 'columnLabels', columnLabels, 'alignment', 'c', 'format', '%-6.4f', 'size', 'normal'); + +for i=1:5, + P(i,:) = ((Q(i,:) - 0.2*w).^2)./(0.2*w); +end + +chiauthor = sum(sum(P))*TotW/5; +pauthor = 1-chi2cdf(chiauthor,(size(Q,1)-1)*(size(Q,2)-1)); + +clear w Q P +%% +years = [1982 1991 1992 1993 1994 1995 1996 1997 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020]; +nyears = size(years,2); + +for i= 1:nyears, + dcyear(:,i) = allkernel*((Year==years(i)).*TotalWeight); + w(i) = sum((Year==years(i)).*TotalWeight); +end + +dcyear = dcyear/sum(w); +area(SCCgrid, dcyear); + +dcperiod(:,1) = sum(dcyear(:,1:6),2); +dcperiod(:,2) = sum(dcyear(:,7:11),2); +dcperiod(:,3) = sum(dcyear(:,12:16),2); +dcperiod(:,4) = sum(dcyear(:,17:23),2); +dcperiod(:,5) = sum(dcyear(:,24:30),2); + +v = w; +clear w +w(1) = sum(v(1:6)); +w(2) = sum(v(7:11)); +w(3) = sum(v(12:16)); +w(4) = sum(v(17:23)); +w(5) = sum(v(24:30)); + +area(SCCgrid(301:2101), dcperiod(301:2101,:)); +legend('1982-1995','1996-2001','2002-2006','2007-2013', '2014-2020'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +Q(1,:) = sum(dcperiod(CDF<=0.2,:)); +Q(2,:) = sum(dcperiod(CDF<=0.4 & CDF > 0.2,:)); +Q(3,:) = sum(dcperiod(CDF<=0.6 & CDF > 0.4,:)); +Q(4,:) = sum(dcperiod(CDF<=0.8 & CDF > 0.6,:)); +Q(5,:) = sum(dcperiod(CDF > 0.8,:)); + +w = w/sum(w); + +columnLabels = {'1982-1995','1996-2001','2002-2006','2007-2013', '2014-2020'}; +rowLabels = {'Q1', 'Q2', 'Q3', 'Q4', 'Q5','Null'}; +matrix2latex([Q; 0.2*w], 'period.tex', 'rowLabels', rowLabels, 'columnLabels', columnLabels, 'alignment', 'c', 'format', '%-6.4f', 'size', 'normal'); + +for i=1:5, + P(i,:) = ((Q(i,:) - 0.2*w).^2)./(0.2*w); +end + +chiperiod = sum(sum(P))*TotW/5; +pperiod = 1-chi2cdf(chiperiod,(size(Q,1)-1)*(size(Q,2)-1)); + +clear w Q P + +%% +prtps = [3.0 2.0 1.5 1.0 0.1 0.0]; +nprtps = size(prtps,2); +for i= 1:nprtps, + dcprtp(:,i) = allkernel*((PRTP==prtps(i)).*TotalWeight); + w(i) = sum((PRTP==prtps(i)).*TotalWeight); +end +dcprtp(:,nprtps+1) = allkernel*(isnan(PRTP).*TotalWeight); +w(nprtps+1) = sum(isnan(PRTP).*TotalWeight); +dcprtp = dcprtp/sum(w); +area(SCCgrid(301:2101), dcprtp(301:2101,:)); +legend('3.0', '2.0', '1.5','1.0', '0.1', '0.0', 'other'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +Q(1,:) = sum(dcprtp(CDF<=0.2,:)); +Q(2,:) = sum(dcprtp(CDF<=0.4 & CDF > 0.2,:)); +Q(3,:) = sum(dcprtp(CDF<=0.6 & CDF > 0.4,:)); +Q(4,:) = sum(dcprtp(CDF<=0.8 & CDF > 0.6,:)); +Q(5,:) = sum(dcprtp(CDF > 0.8,:)); + +w = w/sum(w); + +columnLabels = {'3.0', '2.0', '1.5','1.0', '0.1', '0.0', 'other'}; +rowLabels = {'Q1', 'Q2', 'Q3', 'Q4', 'Q5','Null'}; +matrix2latex([Q; 0.2*w], 'discount.tex', 'rowLabels', rowLabels, 'columnLabels', columnLabels, 'alignment', 'c', 'format', '%-6.4f', 'size', 'normal'); + +for i=1:5, + P(i,:) = ((Q(i,:) - 0.2*w).^2)./(0.2*w); +end + +chiprtp = sum(sum(P))*TotW/5; +pprtp = 1-chi2cdf(chiprtp,(size(Q,1)-1)*(size(Q,2)-1)); + +clear w Q P + +%% Kruskal-Wallis +SCC3 = SCC(PRTP==3); +SCC2 = SCC(PRTP==2); +SCC15 = SCC(PRTP==1.5); +SCC1= SCC(PRTP==1); +SCC01 = SCC(PRTP==0.1); +SCC0 = SCC(PRTP==0); +SCCO = SCC(isnan(PRTP)); + +l3 = length(SCC3); +l2 = length(SCC2); +l15 = length(SCC15); +l1= length(SCC1); +l01 = length(SCC01); +l0 = length(SCC0); +lO = length(SCCO); + +maxl = max([l3, l2, l15, l1, l01, l0, lO]); + +vext = ones(maxl-l3,1); +vext = NaN*vext; +SCC3 = [SCC3; vext]; +vext = ones(maxl-l2,1); +vext = NaN*vext; +SCC2 = [SCC2; vext]; +vext = ones(maxl-l1,1); +vext = NaN*vext; +SCC1 = [SCC1; vext]; +vext = ones(maxl-l01,1); +vext = NaN*vext; +SCC01 = [SCC01; vext]; +vext = ones(maxl-l0,1); +vext = NaN*vext; +SCC0 = [SCC0; vext]; +vext = ones(maxl-lO,1); +vext = NaN*vext; +SCCO = [SCCO; vext]; + +p = kruskalwallis([SCC3 SCC2 SCC15 SCC1 SCC01 SCC0 SCC0]); + +%% +scl = sum(dcprtp,2); +dcprtps = dcprtp./scl; +area(SCCgrid(301:2101),dcprtps(301:2101,:)) +%legend('3.0','2.0','1.5','1.0', '0.1', '0.0'); + +%% +vprtp = PRTP(~isnan(Growth)); + +for i= 1:nprtps, + dcprtpgr(:,i) = allkernelgr*((vprtp==prtps(i)).*ObsWeight); + w(i) = sum((vprtp==prtps(i)).*ObsWeight); +end +dcprtpgr(:,nprtps+1) = allkernelgr*(isnan(vprtp).*ObsWeight); +w(nprtps+1) = sum(isnan(vprtp).*ObsWeight); +dcprtpgr = dcprtpgr/sum(w); +area(grgrid, dcprtpgr); +legend('3.0', '2.0', '1.5','1.0', '0.1', '0.0', 'other'); +xlabel('per year') +ylabel('Probability density') + +Q(1,:) = sum(dcprtpgr(CDFgrowth<=0.2,:)); +Q(2,:) = sum(dcprtpgr(CDFgrowth<=0.4 & CDFgrowth > 0.2,:)); +Q(3,:) = sum(dcprtpgr(CDFgrowth<=0.6 & CDFgrowth > 0.4,:)); +Q(4,:) = sum(dcprtpgr(CDFgrowth<=0.8 & CDFgrowth > 0.6,:)); +Q(5,:) = sum(dcprtpgr(CDFgrowth > 0.8,:)); + +w = w/sum(w); + +columnLabels = {'3.0', '2.0', '1.5','1.0', '0.1', '0.0', 'other'}; +rowLabels = {'Q1', 'Q2', 'Q3', 'Q4', 'Q5','Null'}; +matrix2latex([Q; 0.2*w], 'growth.tex', 'rowLabels', rowLabels, 'columnLabels', columnLabels, 'alignment', 'c', 'format', '%-6.4f', 'size', 'normal'); + +for i=1:5, + P(i,:) = ((Q(i,:) - 0.2*w).^2)./(0.2*w); +end + +chigr = sum(sum(P))*sum(ObsWeight)/5; +pgr = 1-chi2cdf(chigr,(size(Q,1)-1)*(size(Q,2)-1)); + +clear v* w Q P \ No newline at end of file diff --git a/Decompose.m b/Decompose.m index 000d00d..acfb0f6 100644 --- a/Decompose.m +++ b/Decompose.m @@ -19,7 +19,7 @@ dcprtp(:,i) = JointPDF(:,i+1)*w(i)/sum(w); end -figure +figure %S19 prtp.jpg area(SCCgrid(301:2101), dcprtp(301:2101,:)); legend('3.0', '2.0', '1.5','1.0', '0.1', '0.0', 'other'); xlabel('dollar per tonne of carbon') @@ -55,7 +55,7 @@ dcauthor(:,i) = JointPDF(:,i+12)*w(i)/sum(w); end -figure +figure %S20 author.jpg area(SCCgrid(301:2101), dcauthor(301:2101,:)) legend('Hope','Ploeg','Nordhaus','Tol','Other') xlabel('dollar per tonne of carbon') @@ -91,12 +91,14 @@ dcperiod(:,i) = JointPDF(:,i+17)*w(i)/sum(w); end -figure +figure %2 period.jpg area(SCCgrid(301:2101), dcperiod(301:2101,:)); legend('1982-1995','1996-2001','2002-2006','2007-2013','2014-2017','2018-2021'); xlabel('dollar per tonne of carbon') ylabel('Probability density') +print('period','-dpng','-r1500'); + Q(1,:) = sum(dcperiod(CDF<=0.2,:)); Q(2,:) = sum(dcperiod(CDF<=0.4 & CDF > 0.2,:)); Q(3,:) = sum(dcperiod(CDF<=0.6 & CDF > 0.4,:)); @@ -130,7 +132,7 @@ dcperiod3(:,i) = JointPDF(:,i+23)*w(i)/sum(w); end -figure +figure %S17 period3.jpg area(SCCgrid(301:2101), dcperiod3(301:2101,:)); legend('1982-1995','1996-2001','2002-2006','2007-2013','2014-2017','2018-2021'); xlabel('dollar per tonne of carbon') @@ -169,7 +171,7 @@ dcperiod2(:,i) = JointPDF(:,i+29)*w(i)/sum(w); end -figure +figure %S16 period2.jpg area(SCCgrid(301:2101), dcperiod2(301:2101,:)); legend('1982-1995','1996-2001','2002-2006','2007-2013','2014-2017','2018-2021'); xlabel('dollar per tonne of carbon') @@ -208,7 +210,7 @@ dcperiod1(:,i) = JointPDF(:,i+35)*w(i)/sum(w); end -figure +figure %S15 period1.jpg area(SCCgrid(301:2101), dcperiod1(301:2101,:)); legend('1982-1995','1996-2001','2002-2006','2007-2013','2014-2017','2018-2021'); xlabel('dollar per tonne of carbon') @@ -247,7 +249,7 @@ dcperiod0(:,i) = JointPDF(:,i+41)*w(i)/sum(w); end -figure +figure %S14 period0.jpg area(SCCgrid(301:2101), dcperiod0(301:2101,:)); legend('1982-1995','1996-2001','2002-2006','2007-2013','2014-2017','2018-2021'); xlabel('dollar per tonne of carbon') @@ -287,7 +289,7 @@ dcprtpgr(:,i) = PDFgrowth(:,i+1)*w(i)/sum(w); end -figure +figure %S10 growth.jpg area(grgrid, dcprtpgr); legend('3.0', '2.0', '1.5','1.0', '0.1', '0.0', 'other'); xlabel('per year') @@ -358,7 +360,7 @@ disp(k) SCC = SCCsave(BSU(:,k)); TotalWeight = Weightsave(BSU(:,k)); - ConstructPDF + ConstructPDF %this would be faster without the graph CDF = JointCDF(:,1); TotW = sum(TotalWeight); for i=1:7, diff --git a/DecomposeShort.m b/DecomposeShort.m new file mode 100644 index 0000000..1706ba6 --- /dev/null +++ b/DecomposeShort.m @@ -0,0 +1,35 @@ +CDF = JointCDF(:,1); +TotW = sum(TotalWeight); + +%% Filter 18-23 are periods +for i=1:6, + w(i) = sum(Filter(:,i+17).*TotalWeight); +end + +for i=1:6, + dcperiod(:,i) = JointPDF(:,i+17)*w(i)/sum(w); +end + +figure +area(SCCgrid(301:2101), dcperiod(301:2101,:)); +legend('1982-1995','1996-2001','2002-2006','2007-2013','2014-2017','2018-2021'); +xlabel('dollar per tonne of carbon') +ylabel('Probability density') + +Q(1,:) = sum(dcperiod(CDF<=0.2,:)); +Q(2,:) = sum(dcperiod(CDF<=0.4 & CDF > 0.2,:)); +Q(3,:) = sum(dcperiod(CDF<=0.6 & CDF > 0.4,:)); +Q(4,:) = sum(dcperiod(CDF<=0.8 & CDF > 0.6,:)); +Q(5,:) = sum(dcperiod(CDF > 0.8,:)); + +w = w/sum(w); + +for i=1:5, + P(i,:) = ((Q(i,:) - 0.2*w).^2)./(0.2*w); +end + +chiperiod = sum(sum(P))*TotW/5; +pperiod = 1-chi2cdf(chiperiod,(size(Q,1)-1)*(size(Q,2)-1)); + +clear w Q P + diff --git a/DecompositionTest.m b/DecompositionTest.m new file mode 100644 index 0000000..88f3d5a --- /dev/null +++ b/DecompositionTest.m @@ -0,0 +1,39 @@ +function [chi, p] = DecompositionTest(K,NQ,N) +%function [chi, p] = DecompositionTest(K,NQ,N) +%tests whether the components of a kernel deviates from the composite +% +%K is the matrix of component kernels, from +%[K,grid] = KernelDecomposition(data,partition,kernel) +%NQ>1 is the number of quantiles +%N is the number of data used to estimate the kernels +% +%chi is the test-statistic, distributed as a chi-squared with degrees of +%freedom equal to the number of quantiles minus one times the number of +%component minus one +%p is the p-value +% +%First version, Richard Tol, 11 March 2020 +%This version, Richard Tol, 11 March 2020 +P = size(K,2); +G = size(K,1); +CDF = K(:,1); +for i=2:G, + CDF(i) = CDF(i-1)+K(i,1); +end + +for i=1:NQ, + Q(i,:) = sum(K(CDF<=i/NQ & CDF > (i-1)/NQ,2:P)); +end + +w = sum(K(:,2:P),1); + +for i=1:NQ, + Pearson(i,:) = ((Q(i,:) - w/NQ).^2)./(w/NQ); +end + +chi = sum(sum(Pearson))*N/NQ; + +p = 1-chi2cdf(chi,(P-2)*(NQ-1)); + +end +