Skip to content

Commit

Permalink
Partial update to use the code in python 3
Browse files Browse the repository at this point in the history
  • Loading branch information
RajaBellebon committed May 4, 2020
1 parent 4d21882 commit e6538fe
Show file tree
Hide file tree
Showing 6 changed files with 213 additions and 199 deletions.
6 changes: 6 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
.idea/**
__pycache__
venv/
*.csv
*.pdf
*.jpg
327 changes: 168 additions & 159 deletions AAA_RUN_KANSM2_Est_LB.py

Large diffs are not rendered by default.

25 changes: 12 additions & 13 deletions AAC_KAGM_SingleLoop_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def AAC_KAGM_SingleLoop(R_data, Tau_K, N, Parameters, Dt, dTau, KappaP_Constrain
F = numpy.matrix(scipy.linalg.expm(-KappaP*Dt))
D, V = numpy.linalg.eig(F)
if (numpy.any(numpy.abs(D)>1.0001)):
print 'SingleLoop:argChk abs(eig(F))>1'
print('SingleLoop:argChk abs(eig(F))>1')
exit()

Q = numpy.matrix([[G(2*d1,Dt), G(d1+d2,Dt)], [0, G(2*d2,Dt)]])
Expand Down Expand Up @@ -116,8 +116,8 @@ def AAC_KAGM_SingleLoop(R_data, Tau_K, N, Parameters, Dt, dTau, KappaP_Constrain
# Observations for time t.
y_Obs = numpy.matrix(0.01 * R_data[t,:]).getH()
y_Missing = numpy.squeeze(numpy.array(numpy.isnan(y_Obs))) #numpy.array(numpy.isnan(y_Obs))#[:,0]
y_Obs = y_Obs[-y_Missing]
R = numpy.diag(numpy.power(Sigma_Nu[-y_Missing], 2))
y_Obs = y_Obs[~y_Missing]
R = numpy.diag(numpy.power(Sigma_Nu[~y_Missing], 2))

x_Plus_i_Minus_1 = numpy.copy(x_Minus)
x_Plus_i0 = numpy.copy(x_Minus)
Expand All @@ -131,18 +131,18 @@ def AAC_KAGM_SingleLoop(R_data, Tau_K, N, Parameters, Dt, dTau, KappaP_Constrain

(y_Hat, H_i) = AAD_KAGM_R_and_dR_dx(x_Plus_i0, rL, KappaQ2, Sigma1, Sigma2, Rho12, Tau_K, dTau, ZLB_Imposed)

y_Hat = y_Hat[-y_Missing]
H_i = H_i[-y_Missing,:]
y_Hat = y_Hat[~y_Missing]
H_i = H_i[~y_Missing,:]
HPHR_i = (H_i * P_Minus * H_i.getH() + R)
K_i = numpy.linalg.solve(HPHR_i.getH(), (P_Minus*H_i.getH()).getH()).getH()
w_i = y_Obs - y_Hat - H_i * (x_Minus - x_Plus_i0)
x_Plus_i1 = x_Minus + K_i * w_i

if (IEKF_Count == 20):
# Using tolerance, so check for convergence.
# Using tolerance, so check for convergence.
if (i > 15):
# Large number of iterations, so print output to screen.
print t, i-1, numpy.matrix(x_Plus_i1).getH(), numpy.matrix(x_Plus_i0).getH(), numpy.matrix(x_Plus_i1).getH() - numpy.matrix(x_Plus_i0).getH()
print(t, i-1, numpy.matrix(x_Plus_i1).getH(), numpy.matrix(x_Plus_i0).getH(), numpy.matrix(x_Plus_i1).getH() - numpy.matrix(x_Plus_i0).getH())
if (numpy.all(numpy.abs(x_Plus_i1-x_Plus_i0)<x_Tolerance)):
# Difference from last update within tolerance, so exit.
break
Expand All @@ -161,9 +161,11 @@ def AAC_KAGM_SingleLoop(R_data, Tau_K, N, Parameters, Dt, dTau, KappaP_Constrain
P_Plus = (numpy.matrix(numpy.eye(N)) - K_i * H_i) * P_Minus
x_T[:,t] = x_Plus[:,0]
P_T[:,:,t] = numpy.copy(P_Plus)
logL = logL + numpy.log(numpy.linalg.det(HPHR_i)) + numpy.linalg.solve(HPHR_i.getH(),w_i).getH() * w_i
logL = logL + numpy.log(numpy.linalg.det(HPHR_i), where=numpy.linalg.det(HPHR_i)>0) + numpy.linalg.solve(HPHR_i.getH(),w_i).getH() * w_i

# Hold IEKF count.
if Max_IEKF_Count is None:
Max_IEKF_Count = IEKF_Count
if (i-1 > Max_IEKF_Count):
Max_IEKF_Count = i - 1
Max_IEKF_Point = t
Expand All @@ -182,7 +184,7 @@ def AAC_KAGM_SingleLoop(R_data, Tau_K, N, Parameters, Dt, dTau, KappaP_Constrain
# Negate the log likelihood value because fminunc minimizes.
EKF_logL = -EKF_logL

print EKF_logL*1e-3, Parameters[0:10], Rho12
print(EKF_logL*1e-3, Parameters[0:10], Rho12)

tmp1 = numpy.sum(x_T, axis=0)
figure = pyplot.figure(num=None, figsize=(8, 6), dpi=100, facecolor='w')
Expand All @@ -193,7 +195,4 @@ def AAC_KAGM_SingleLoop(R_data, Tau_K, N, Parameters, Dt, dTau, KappaP_Constrain
pyplot.savefig("plot.pdf")
pyplot.show()

return (EKF_logL,x_T)



return (EKF_logL,x_T)
2 changes: 1 addition & 1 deletion AAE_KAGM_f_and_df_dx_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def AAE_KAGM_f_and_df_dx(x_t, rL, KappaQ2, Sigma1, Sigma2, Rho12, TauGrid, ZLB_I
Omega = numpy.sqrt(Sigma1**2 * G_11 + Sigma2**2 * G_22 + 2 * Rho12 * Sigma1 * Sigma2 * G_12)

# Calculate cumulative normal probabilities for N(0,1) distribution.
d = numpy.divide(GATSM_f-rL, Omega)
d = numpy.divide(GATSM_f-rL, Omega,where=Omega!=0)
normsdist_erf_d = normsdist_erf(d)

# Calculate gradiant and CAB_GATSM_f
Expand Down
46 changes: 23 additions & 23 deletions data_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@
import os

def write_to_csv(idx,idx1,val,val1,sub_datenum,name):
daily_values_data=map(list,zip(*val))
daily_values_data1=map(list,zip(*val1))
daily_values_data=list(map(list,list(zip(*val))))
daily_values_data1=list(map(list,list(zip(*val1))))

daily_date_index=list(idx)
daily_date_index[:] = [x - sub_datenum for x in idx]
Expand All @@ -22,19 +22,19 @@ def write_to_csv(idx,idx1,val,val1,sub_datenum,name):
for i in range(len(daily_values_data1)):
daily_values_data1[i].insert(0,daily_date_index1[i])

with open(name+'.csv', 'wb') as csvfile:
with open(name+'.csv', 'w') as csvfile:
spamwriter = csv.writer(csvfile, delimiter=',',quotechar='|', quoting=csv.QUOTE_NONNUMERIC)
spamwriter.writerows(daily_values_data1)
spamwriter.writerows(daily_values_data)

def filter_data_index(a,thr,method):
if method is 'ge':
if method =='ge':
index=list([idx for idx,i in enumerate(a) if i<thr])
elif method is 'le':
elif method =='le':
index=list([idx for idx,i in enumerate(a) if i>thr])
elif method is 'l':
elif method =='l':
index=list([idx for idx,i in enumerate(a) if i>=thr])
elif method is 'g':
elif method =='g':
index=list([idx for idx,i in enumerate(a) if i<=thr])
return index

Expand Down Expand Up @@ -62,16 +62,16 @@ def filter_data(a,index,dim):
CurveType='OIS'
PathName=os.getcwd()
ExcelName=os.path.join(PathName, 'A_'+Country+'_All_Data_Bloomberg.xlsx')
if CurveType is 'OIS':
if CurveType =='OIS':
ExcelSheetName='D. Live OIS data'
if CurveType is 'Govt':
if CurveType =='Govt':
ExcelSheetName='D. Live Govt data';

Maturities=[0.25,0.5,1,2,3,4,5,7,10,15,20,30]

wb=openpyxl.load_workbook(ExcelName)
sheet_names=wb.get_sheet_names()
sheet=wb.get_sheet_by_name('D. Live OIS data')
sheet_names=wb.get_sheet_names
sheet=wb['D. Live OIS data']
datenum = [[] for i in range(len(datelist))]
values_data=[[] for i in range(len(datelist))]
for col_num in range(len(datelist)):
Expand All @@ -97,15 +97,15 @@ def filter_data(a,index,dim):
diff_dates=list(set(common_datenum) ^ set(datenum[i]))
for j in diff_dates:
index=datenum[i].index(j)
print(str(j)+' date num at '+str(index)+'removed')
print((str(j)+' date num at '+str(index)+'removed'))
value_to_remove = values_data[i][index]
values_data[i].remove(value_to_remove)

ref_friday=datetime.date(2013,05,24).toordinal()
ref_friday=datetime.date(2013,0o5,24).toordinal()
weeks_to_step_back = (ref_friday - common_datenum[0])/7
first_friday = ref_friday - (weeks_to_step_back*7)

week_date_index = range(first_friday,common_datenum[-1],7)
week_date_index = list(range(int(first_friday),common_datenum[-1],7))

week_values_data=[[] for i in range(len(datelist))]
for i in range(len(datelist)):
Expand Down Expand Up @@ -139,7 +139,7 @@ def filter_data(a,index,dim):


#GOVT
if Country is 'EA':
if Country =='EA':
wbG=openpyxl.load_workbook(os.path.join(PathName, 'A_GE_All_Data_Bloomberg.xlsx'))
sheet=wbG.get_sheet_by_name('D. Live Govt data')
datenumG = [[] for i in range(len(datelist))]
Expand Down Expand Up @@ -168,7 +168,7 @@ def filter_data(a,index,dim):
diff_dates=list(set(common_datenumG) ^ set(datenumG[i]))
for j in diff_dates:
index=datenumG[i].index(j)
print(str(j)+' date num at '+str(index)+'removed')
print((str(j)+' date num at '+str(index)+'removed'))
value_to_remove = values_dataG[i][index]
values_dataG[i].remove(value_to_remove)

Expand Down Expand Up @@ -210,7 +210,7 @@ def filter_data(a,index,dim):
diff_dates=list(set(common_datenumF) ^ set(datenumF[i]))
for j in diff_dates:
index=datenumG[i].index(j)
print(str(j)+' date num at '+str(index)+'removed')
print((str(j)+' date num at '+str(index)+'removed'))
value_to_remove = values_dataF[i][index]
values_dataF[i].remove(value_to_remove)

Expand All @@ -227,14 +227,14 @@ def filter_data(a,index,dim):
diff_dates=list(set(post_inter_index) ^ set(post_euro_fr_index))
for j in diff_dates:
index=post_euro_fr_index.index(j)
print(str(j)+' date num at '+str(index)+'removed')
print((str(j)+' date num at '+str(index)+'removed'))
value_to_remove = post_euro_fr_vlues[i][index]
post_euro_fr_vlues[i].remove(value_to_remove)
for i in range(len(post_euro_ge_vlues)):
diff_dates=list(set(post_inter_index) ^ set(post_euro_ge_index))
for j in diff_dates:
index=post_euro_ge_index.index(j)
print(str(j)+' date num at '+str(index)+'removed')
print((str(j)+' date num at '+str(index)+'removed'))
value_to_remove = post_euro_ge_vlues[i][index]
post_euro_ge_vlues[i].remove(value_to_remove)

Expand All @@ -246,7 +246,7 @@ def filter_data(a,index,dim):
values_data1[i] = pre_euro_ge_vlues[i] + post_euro_avg_values

else:
sheet=wb.get_sheet_by_name('D. Live Govt data')
sheet=wb['D. Live Govt data']
datenum1 = [[] for i in range(len(datelist))]
values_data1=[[] for i in range(len(datelist))]
for col_num in range(len(datelist)):
Expand All @@ -273,15 +273,15 @@ def filter_data(a,index,dim):
diff_dates=list(set(common_datenum1) ^ set(datenum1[i]))
for j in diff_dates:
index=datenum1[i].index(j)
print(str(j)+' date num at '+str(index)+'removed')
print((str(j)+' date num at '+str(index)+'removed'))
value_to_remove = values_data1[i][index]
values_data1[i].remove(value_to_remove)

ref_friday=datetime.date(2013,05,24).toordinal()
ref_friday=datetime.date(2013,0o5,24).toordinal()
weeks_to_step_back = (ref_friday - common_datenum1[0])/7
first_friday = ref_friday - (weeks_to_step_back*7)

week_date_index1 = range(first_friday,common_datenum1[-1],7)
week_date_index1 = list(range(int(first_friday),common_datenum1[-1],7))
week_values_data1=[[] for i in range(len(datelist))]
for i in range(len(datelist)):
f=interpolate.interp1d(common_datenum1,values_data1[i],'zero',bounds_error=False)
Expand Down
6 changes: 3 additions & 3 deletions functions.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import datetime
import numpy


#######################################################################################################################
def datetime_to_matlab_datenum(dt):
return 366 + 1 + (dt - datetime.datetime(1, 1, 1, 0, 0)).total_seconds() / datetime.timedelta(1).total_seconds()


#######################################################################################################################
def matlab_datenum_to_datetime(dn):
# Convert a datetime object to its MATLAB datenum() equivalent
return datetime.datetime(1,1,1,0,0) + datetime.timedelta(dn-366-1)
return datetime.datetime(1, 1, 1, 0, 0) + datetime.timedelta(int(dn - 366 - 1))

#######################################################################################################################


0 comments on commit e6538fe

Please sign in to comment.