@@ -52,11 +52,11 @@ characteristics :math:`X` of the treated samples, then one can use this method.
5252
5353 # DML
5454 import numpy as np
55- X = np.random.choice(np.arange(5) , size=(100,3))
55+ X = np.random.choice(6 , size=(100,3))
5656 Y = np.random.normal(size=(100,2))
5757 y = np.random.normal(size=(100,))
58- T = T0 = T1 = np.random.choice(np.arange(3), size=(100,2))
59- t = t0 = t1 = T [:,0]
58+ (T, T0, T1) = ( np.random.choice(np.arange(3), size=(100,2)) for _ in range(3 ))
59+ (t, t0, t1) = (a [:,0] for a in (T, T0, T1))
6060 W = np.random.normal(size=(100,2))
6161
6262.. testcode ::
@@ -646,7 +646,7 @@ Then we can estimate the coefficients :math:`\alpha_i` by running:
646646 from sklearn.preprocessing import PolynomialFeatures
647647 est = LinearDML(model_y=RandomForestRegressor(),
648648 model_t=RandomForestRegressor(),
649- featurizer=PolynomialFeatures(degree=3, include_bias=True ))
649+ featurizer=PolynomialFeatures(degree=3, include_bias=False ))
650650 est.fit(y, T, X=X, W=W)
651651
652652 # To get the coefficients of the polynomial fitted in the final stage we can
@@ -663,7 +663,7 @@ To add fixed effect heterogeneity, we can create one-hot encodings of the id, wh
663663 from econml.dml import LinearDML
664664 from sklearn.preprocessing import OneHotEncoder
665665 # removing one id to avoid colinearity, as is standard for fixed effects
666- X_oh = OneHotEncoder(sparse_output=False).fit_transform(X)[:, 1:]
666+ X_oh = OneHotEncoder(sparse_output=False, drop="first" ).fit_transform(X)
667667
668668 est = LinearDML(model_y=RandomForestRegressor(),
669669 model_t=RandomForestRegressor())
@@ -703,7 +703,7 @@ We can even create a Pipeline or Union of featurizers that will apply multiply f
703703 est = LinearDML(model_y=RandomForestRegressor(),
704704 model_t=RandomForestRegressor(),
705705 featurizer=Pipeline([('log', LogFeatures()),
706- ('poly', PolynomialFeatures(degree=2))]))
706+ ('poly', PolynomialFeatures(degree=2, include_bias=False ))]))
707707 est.fit(y, T, X=X, W=W)
708708
709709
0 commit comments