install.packages("learnr")
install.packages("tidyverse")
rm(list = ls());
graphics.off();
options("width"=300);
x <- c(1,0.02,0.05,-0.09,0.08,-0.02,0.00,0.12,0.06,0.02,-0.08);
a <- acf(x=rnorm(400),lag.max=10,type="correlation",plot=F);
a$acf[,,1] <- x;
par(mfrow=c(1,1));
par(oma=c(0,2,4,2));
plot(x=a,xlab=expression(paste("lag ",k,sep="")),ylab=expression(paste("acf(",k,")",sep="")),main="Correlogram",col="green4",main="");
legend(x="topright",legend=c("acf","95% confidence bands "),col=c("green4","blue"),lty=c(1,2));
mtext(text="Correlogram",side=3,line=0,outer=T);
par(mfrow=c(1,1));
par(oma=c(0,2,4,2));
plot(x=a,xlab=expression(paste("lag ",k,sep="")),ylab=expression(paste("acf(",k,")",sep="")),main="Correlogram",col="green4",main="");
legend(x="topright",legend=c("acf","95% confidence bands "),col=c("green4","blue"),lty=c(1,2));
par(mfrow=c(1,1));
par(oma=c(0,2,4,2));
plot(x=a,xlab=expression(paste("lag ",tau,sep="")),ylab=expression(paste("acf(",tau,")",sep="")),col="green4",main="");
par(mfrow=c(1,1));
par(oma=c(0,2,4,2));
plot(x=a,xlab=expression(paste("lag ",tau,sep="")),ylab=expression(paste("acf(",tau,")",sep="")=col="green4",main="");
legend(x="topright",legend=c("acf","95% confidence bands "),col=c("green4","blue"),lty=c(1,2));
mtext(text="Correlogram",side=3,line=0,outer=T);
par(mfrow=c(1,1));
par(oma=c(0,2,4,2));
plot(x=a,xlab=expression(paste("lag ",tau,sep="")),ylab=expression(paste("acf(",tau,")",sep="")=col="green4",main="");
legend(x="topright",legend=c("acf","95% confidence bands "),col=c("green4","blue"),lty=c(1,2));
mtext(text="Correlogram",side=3,line=0,outer=T);
2207 + 1550 + 1125 + 322 + 148 + 120 + 117 + 56 + 29
Sys.getenv()
Sys.setenv(LANG = "en")
q()
q()
n
1 + 0.25
(1 - 0.5^4)/(1-0.5^2)
(1 - 0.5^8)/(1-0.5^2)
1.25 + 0.5^3
1.25 + 0.5^4
1 + 0.5^2 + 0.5^4
(1 - 0.5^3)/(1-0.5^2)
(1 - 0.5^6)/(1-0.5^2)
load("class.RData")
q()
help("poly.root")
help("polyroot")
polyroot(c(1,-0.8,-0.15,0.09,-0.15))
x=polyroot(c(1,-0.8,-0.15,0.09,-0.15))
abs(x)
x=polyroot(c(1,-0.8,-0.35,0.15,-0.02))
abs(x)
x=polyroot(c(1,-0.8,-0.35,0.15,0.15))
abs(x)
x=polyroot(c(1,-0.8,-0.35,0.15,0.15))
abs(x)
x=polyroot(c(1,-0.8,-0.35,0.15,0.2))
abs(x)
x=polyroot(c(1,-0.8,-0.35,0.15,-0.2))
abs(x)
x=polyroot(c(1,-0.8,-0.35,0.15,0.2))
x
900 + 1800 + 3600 + 1800 + 10000 + 10500 + 15000
900 + 1800 + 3600 + 1800 + 10000 + 10500 + 15000 + 126240
900 + 1800 + 3600 + 1800 + 10000 + 10500 + 15000 + 1262.40
q()
cal.ret.c.q1
ppois(5, 1)
1-ppois(3, 6.5)
1-ppois(2, 6.5)
1-ppois(4, 6.5)
1-ppois(5, 6.5)
a = 10000 * 0.95
b = 0.05 / (999/1000)
a / (a +b)
a = (1/10000) * 0.95
b = (999/1000) * 0.05
a / (a +b)
a = (1/1000) * 0.95
a / (a +b)
x = 22.9 - 18.6
x / 1.8
x / 1.8 + 32
x * 1.8
x * 1.8 + 32
a = 10 * 0.25
b = y = 2.92
4 * a +  4 * b
2 * a + 2 * b
sqrt(4*a + 4*b)
2 * a^0.5 + 2*b^0.5
(2/3) * (b^3 - a^3) / (b^2 - a^2)
a = 1
b = 2
(2/3) * (b^3 - a^3) / (b^2 - a^2)
0.08 * 0.92 / 500
(2/3) * (b^3 - a^3) / (b^2 - a^2)
(0.15 * 0.85 / 500)
(0.15 * 0.85 / 500)^0.5
(0.15 * 0.85 / 1000)
(0.15 * 0.85 / 1000)^0.5
sqrt(120) * 0.1 / 1.645
sqrt(120) * 0.1 / 1.96
save.image()
quit()
rm(list=ls())
load("SmallNumberPerm.RData")
setwd("~/Research/PhuongAnh/Code/SmallNumberPermR")
load("SmallNumberPerm.RData")
perm.test.caar
perm.test.caar <- function(ar.est, ar.ev, w.sizes, B = 9999, one.sided = FALSE, K = 2, rund = 3) {
#
# function to carry out the permuation test of Section 4.1 of the paper Nguyen and Wolf (2026)
#
# ar.est    = F x n matrix of abnormal returns in the respective estimation windows (of common size)
# ar.ev     = F x m matrix of abnormal returns in the respective event windows
# w.sizes   = W x 1 vector of sizes of common estimation windows
#.            - first entry says how many firms share the first common window
#             - second entry says how many of the next firms share the second window, etc.
#             - the entries must sum to F, the total number of firms
#.            - of courrs, the order of the firms in ar.est and ar.ev must align with w.sizes
# B         = number of permutations (the test implicitly adds one corresponding to the original data)
# one.sided = logical variable whether to use a one-sided test (as opposed to a two-sided test)
# K         = number of parameters estimated in abnormal-return model (degrees-of-freedom correction)
# rund      = number of significant digits used in rounding the results
#
# returns the value of the test statistic and the corresponding p-value
#
dim.est <- dim(ar.est)
n.est <- dim.est[1]
n.firms <- dim.est[2]
if (sum(w.sizes) != n.firms)
stop("function perm.test.caar: sum(w,sizes) doest not equal number of firms")
if (is.vector(ar.ev))
ar.ev <- matrix(ar.ev, nrow = 1)
dim.ev = dim(ar.ev)
n.ev <- dim.ev[1]
if (dim.ev[2] != n.firms)
stop("function perm.test.caar: ar.est and ar.ev have different numbers of columns")
if (dim.ev[2] != n.firms)
stop("function perm.test.caar: sum(w.sizes) musbe be equal to number of columns of ar.est")
W = length(w.sizes)
index.list = list()
if (length(w.sizes ) == 1) {
index.list[[1]] = 1:n.firms
}
else {
index <- 1:w.sizes[1]
index.list[[1]] <- index
for (w in (2:W)) {
index <- tail(index, 1) + 1:w.sizes[w]
index.list[[w]] <- index
}
}
car.vec <- apply(ar.ev, 2, sum)
n.comb <- n.ev + n.est
t <- t.caar(ar.est, ar.ev, K)
count <- 1
ar.comb <- rbind(ar.est, ar.ev)
ar.comb.perm <- ar.comb
for (i in (1:B)) {
for (w in (1:W)) {
index <- index.list[[w]]
index.perm <- sample(n.comb, n.comb, replace = FALSE)
ar.comb.perm[, index] <- ar.comb[index.perm, index]
}
ar.est.perm <- ar.comb.perm[1:n.est, ]
ar.ev.perm <- ar.comb.perm[(n.est+1):n.comb, ]
t.perm <- t.caar(ar.est.perm, ar.ev.perm, K)
if (one.sided) {
if (t.perm >= t)
count <- count + 1
}
else {
if (abs(t.perm) >= abs(t))
count <- count + 1
}
}
pv <- count / (B + 1)
list(t = round(t, rund), pv = round(pv, rund))
}
save.image()
quit()
