Skip to content

Commit

Permalink
Raise if mx < k. Handle mx = k separately.
Browse files Browse the repository at this point in the history
  • Loading branch information
mlondschien committed Aug 22, 2024
1 parent 8f829a1 commit d7a5b4f
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 27 deletions.
39 changes: 24 additions & 15 deletions ivmodels/models/kclass.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,11 @@ def fit(self, X, y, Z=None, C=None, *args, **kwargs):
n, k = Z.shape
mx, mc = X.shape[1], C.shape[1]

if k < mx:
raise ValueError(
f"Need at least as many instruments {k} as endogenous regressors {mx}."
)

# Including an intercept is equivalent to replacing y <- M_1 y, X <- M_1 X,
# C <- M_1 C, Z <- M_1 Z, where M_1 = Id - 1/n 1 1^T is the projection
# orthogonal to np.ones(n). I.e., center the columns of all design matrices.
Expand Down Expand Up @@ -418,21 +423,25 @@ def fit(self, X, y, Z=None, C=None, *args, **kwargs):
self.kappa_ = 0
else:
self.fuller_alpha_ = self._fuller_alpha(self.kappa)
# If C!=None, we compute the ar_min as if we removed C from all design
# matrices. I.e., we replace Z <- M_C Z, X <- M_C X, y <- M_C y.
# We also exclude the columns in X coming from C.
X_proj_C, y_proj_C = proj(C, X[:, :mx], y)
# Here ar_min = lambdamin (
# (X y)^T M_{[Z, C]} (X y)^{-1} (X y)^T P_{M_C Z} (X y)
# ).
# Thus X_proj <- P_[M_C Z] X = P_[Z, C] X - P_C X = X_proj - X_proj_C
# and X <- M_C X = X - X_proj_C. Some for y.
self.ar_min_ = self.ar_min(
X[:, :mx] - X_proj_C,
y - y_proj_C,
X_proj=X_proj[:, :mx] - X_proj_C,
y_proj=y_proj - y_proj_C,
)

if mx == k:
self.ar_min_ = 0
else:
# If C!=None, we compute the ar_min as if we removed C from all design
# matrices. I.e., we replace Z <- M_C Z, X <- M_C X, y <- M_C y.
# We also exclude the columns in X coming from C.
X_proj_C, y_proj_C = proj(C, X[:, :mx], y)
# Here ar_min = lambdamin (
# (X y)^T M_{[Z, C]} (X y)^{-1} (X y)^T P_{M_C Z} (X y)
# ).
# Thus X_proj <- P_[M_C Z] X = P_[Z, C] X - P_C X = X_proj - X_proj_C
# and X <- M_C X = X - X_proj_C. Some for y.
self.ar_min_ = self.ar_min(
X[:, :mx] - X_proj_C,
y - y_proj_C,
X_proj=X_proj[:, :mx] - X_proj_C,
y_proj=y_proj - y_proj_C,
)
self.kappa_liml_ = 1 + self.ar_min_
self.kappa_ = self.kappa_liml_ - self.fuller_alpha_ / (
n - k - mc - self.fit_intercept
Expand Down
36 changes: 24 additions & 12 deletions ivmodels/tests/likelihood_ratio.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,12 @@ def likelihood_ratio_test(Z, X, y, beta, W=None, C=None, D=None, fit_intercept=T
n, k = Z.shape
mx, mw, mc, md = X.shape[1], W.shape[1], C.shape[1], D.shape[1]

if k < mx + mw:
raise ValueError(
"The number of instruments must be at least the number of endogenous "
"regressors."
)

if fit_intercept:
C = np.hstack([np.ones((n, 1)), C])

Expand All @@ -91,11 +97,14 @@ def likelihood_ratio_test(Z, X, y, beta, W=None, C=None, D=None, fit_intercept=T

XWy_proj = np.hstack([X_proj, W_proj, y_proj.reshape(-1, 1)])

ar_min = _characteristic_roots(
a=oproj(D, XWy).T @ XWy_proj,
b=XWy.T @ (XWy - XWy_proj),
subset_by_index=[0, 0],
)[0]
if k == mx + mw:
ar_min = 0
else:
ar_min = _characteristic_roots(
a=oproj(D, XWy).T @ XWy_proj,
b=XWy.T @ (XWy - XWy_proj),
subset_by_index=[0, 0],
)[0]

if md > 0:
X = np.hstack([X, D])
Expand Down Expand Up @@ -189,13 +198,16 @@ def inverse_likelihood_ratio_test(
XWy_proj = np.concatenate([XW_proj, y_proj.reshape(-1, 1)], axis=1)
XWy = np.concatenate([XW, y.reshape(-1, 1)], axis=1)

kappa_liml = np.real(
_characteristic_roots(
a=oproj(D, XWy).T @ XWy_proj,
b=XWy.T @ (XWy - XWy_proj),
subset_by_index=[0, 0],
)[0]
)
if k == mx + mw:
kappa_liml = 0
else:
kappa_liml = np.real(
_characteristic_roots(
a=oproj(D, XWy).T @ XWy_proj,
b=XWy.T @ (XWy - XWy_proj),
subset_by_index=[0, 0],
)[0]
)

dfd = n - k - mc - md - fit_intercept
quantile = scipy.stats.chi2.ppf(1 - alpha, df=mx + md) + dfd * kappa_liml
Expand Down

0 comments on commit d7a5b4f

Please sign in to comment.