#setting work directory #setwd("...") rm(list=ls()) #reading Data.txt and means.txt Data = read.table("Data.txt", sep="\t", header = TRUE) DailyMean = read.table("means.txt", sep="\t", header = TRUE) #creating daily maximum DailyMax = rep(NA, nrow(Data)/96) for(i in 1:length(DailyMax)){ DailyMax[i] = max(Data[,5][(96*(i-1)+1):(96*i)] ,na.rm = TRUE) } #filling up missing daily maximums by regression on daily means #missing set and corresponding daily means set = which(DailyMax == -Inf) start = which(DailyMean[,1] == "1990-10-01") DMean = DailyMean[,2][start:nrow(DailyMean)] #regression part lm=lm(DailyMax[-set]~DMean[-set]) DailyMax[set] = coef(lm)[1]+coef(lm)[2]*DMean[set] summary(lm) #Deseasonalization data DayFull = 1:(dim(DailyMean)[1])%%365 Day = 1:(length(DailyMax))%%365 DailyMean = cbind(DayFull, DailyMean) Zeros = rep(0,length(DailyMax)) Normed = cbind(Day,DailyMax,Zeros,Zeros,Zeros) for (i in 0:364){ set = c() for(j in 1:nrow(DailyMean)){ if (is.element(DailyMean[,1][j], (i-6):(i+6)%%365) == TRUE) { set = c(set,j) } else {set = set} } Normed[Normed[,1]==i,3]=mean(DailyMean[,3][set]) Normed[Normed[,1]==i,4]=var(DailyMean[,3][set]) } Normed[,5] = (Normed[,2]-Normed[,3])/sqrt(Normed[,4]) #Deseasonalized weekly maxima WeekMax = rep(0,ceiling(length(Normed[,5]))/7) for(i in 1:length(WeekMax)){ WeekMax[i] = max(Normed[,5][((7*i)-6):(7*i)]) } #plot of deseasonalized weekly maxima plot(WeekMax, type='l', ylab= "Deseaonalized Weekly Max Flow Rate", xlab="Time", xaxt='n', main="The Eagle River, Colorado") axis(1,at=c(14,14+104*c(1,2,3,4,5,6,7,8,9,10,11)), cex.axis=0.55, label=c("1/1/91", "1/1/93", "1/1/95", "1/1/97", "1/1/99", "1/1/01", "1/1/03", "1/1/05", "1/1/07", "1/1/09", "1/1/11", "1/1/13")) #compare consecutive weekly maxima MaxNow = WeekMax[1:length(WeekMax)-1] MaxNext = WeekMax[2:length(WeekMax)] #plot of comparison plot(MaxNow, MaxNext, ylab="Deasonalized Weekly Max (Following Week)", xlab="Deasonalized Weekly Max (Present Week)", main="The Eagle River, Colorado") #original weekly maxima WeekMaxReal = rep(0,ceiling(length(DailyMax)/7)) for(i in 1:length(WeekMaxReal)){ WeekMaxReal[i] = max(DailyMax[((7*i)-6):(7*i)]) } plot(WeekMaxReal, type='l', ylab="Weekly Max Flow Rate", xlab="Time", xaxt='n', main="The Eagle River, Colorado") axis(1,at=c(14,14+104*c(1,2,3,4,5,6,7,8,9,10,11)), cex.axis=0.55, label=c("1/1/91", "1/1/93", "1/1/95", "1/1/97", "1/1/99", "1/1/01", "1/1/03", "1/1/05", "1/1/07", "1/1/09", "1/1/11", "1/1/13")) #real data analysis to estimate parameters using deseasonalized data install.packages('nloptr') library(stats) library(nloptr) #objective function eval_f = function(x){ a = x[1] c = x[2] d = 0.5*(a*c^(3/2)*sqrt(c*(a^2)+4)+(a^2)*(c^2)+2*c) return((T1-(d/(d+1)))^2 + (T2-((c*d + a*c*d*sqrt(d+1))/((d+1)*(c+1+a*c*sqrt(d+1)))))^2) } #'d' relationship with 'a' and 'c' df = function(x){ return(0.5*(x[1]*x[2]^(3/2)*sqrt(x[2]*(x[1]^2)+4)+(x[1]^2)*(x[2]^2)+2*x[2])) } #putting deseasonalized data in model form ZZ = exp(WeekMax) #objective function sum parts t1 = exp(-1/(ZZ[2:(length(ZZ))])) T1 = mean(t1) t2 = exp((-1/ZZ[1:(length(ZZ)-1)])+(-1/ZZ[2:(length(ZZ))])) T2 = mean(t2) #optimization and estimates optim = optim(par = c(0.5,0.5), fn = eval_f, method = "L-BFGS-B", lower = c(0,0), upper = c(1,1), control = list(maxit = 1000000)) optim ahat = optim$par[1] chat = optim$par[2] dhat = df(c(ahat,chat)) ahat chat dhat #simulation using estimates #setting seed set.seed(77) #Frechet and Levy values length = length(WeekMax)+3000 U = runif(length+1) epsilon = -1/log(U) V = runif(length) S = 0.5/((qnorm(1-(V/2)))^2) #MAP process Y = rep(0,length) Y[1] = max(ahat*chat*sqrt(S[1]*epsilon[length+1]), chat*epsilon[1]) for(j in 2:length){ Y[j] = max(ahat*chat*sqrt(S[j]*Y[j-1]), chat*epsilon[j]) } #deleting non-stationary part YY = Y[3001:length] #Quantile Q-Q plot plot(sort(WeekMax), sort(log(YY)), xlab = "Eagle River",ylab = "Simulation Data", main = "Quantile-Quantile Plot", xlim = c(-2,8), ylim = c(-2,8)) x=c(-3,10) y=c(-3,10) lines(x,y)