diff --git a/FAQ.readme b/FAQ.readme index 16b313d..461c808 100644 --- a/FAQ.readme +++ b/FAQ.readme @@ -11,10 +11,10 @@ contributed by Rachael Mansbach: I've been using the glmnet python package (great code by the way!) and I've discovered that when trying to run this code with more up-to-date versions of numpy it errors out on line 260 of cvglmnet.py: - ma = scipy.tile(scipy.arange(nfolds), [1, int(scipy.floor(nobs/nfolds))]) + ma = np.tile(np.arange(nfolds), [1, int(np.floor(nobs/nfolds))]) Due to computation of nobs/nfolds as float division rather than int division. This is easily solved by changing the line to - ma = scipy.tile(scipy.arange(nfolds), [1, int(scipy.floor(int(nobs/nfolds)))]) + ma = np.tile(np.arange(nfolds), [1, int(np.floor(int(nobs/nfolds)))]) but I did have to install from source to get it to work. It might be worth a note in the installation section or a code update? diff --git a/build/lib/glmnet_python/GLMnet.so b/build/lib/glmnet_python/GLMnet.so new file mode 100755 index 0000000..8f1cc76 Binary files /dev/null and b/build/lib/glmnet_python/GLMnet.so differ diff --git a/build/lib/glmnet_python/__init__.py b/build/lib/glmnet_python/__init__.py new file mode 100644 index 0000000..8ccbf5d --- /dev/null +++ b/build/lib/glmnet_python/__init__.py @@ -0,0 +1,40 @@ +from __future__ import absolute_import +import sys +import os +sys.path.append(os.path.join(os.path.dirname(__file__))) + +from .glmnetSet import glmnetSet +from .glmnet import glmnet +from .glmnetPlot import glmnetPlot +from .glmnetPrint import glmnetPrint +from .glmnetCoef import glmnetCoef +from .glmnetPredict import glmnetPredict +from .cvglmnet import cvglmnet +from .cvglmnetCoef import cvglmnetCoef +from .cvglmnetPlot import cvglmnetPlot +from .cvglmnetPredict import cvglmnetPredict +from .coxnet import coxnet +from .cvelnet import cvelnet +from .cvlognet import cvlognet +from .cvmultnet import cvmultnet +from .fishnet import fishnet +from .glmnetControl import glmnetControl +from .lognet import lognet +from .printDict import printDict +from .wtmean import wtmean +from .cvcompute import cvcompute +from .cvfishnet import cvfishnet +from .cvmrelnet import cvmrelnet +from .elnet import elnet +from .loadGlmLib import loadGlmLib +from .mrelnet import mrelnet +from .structtype import structtype +from .dataprocess import dataprocess + +__all__ = ['glmnet', 'glmnetPlot', 'glmnetPrint', 'glmnetPrint', 'glmnetPredict', 'cvglmnet', 'cvglmnetCoef', + 'cvglmnetPlot', 'cvglmnetPredict' , 'coxnet', 'cvelnet', 'cvlognet', 'cvmultnet', 'fishnet', + 'glmnetControl', 'lognet', 'printDict', 'wtmean', 'cvcompute', 'cvfishnet', 'cvmrelnet', 'elnet', + 'glmnetSet', 'loadGlmLib', 'mrelnet', 'structtype', 'dataprocess'] + +#__version__ = get_versions()['version'] +#del get_versions diff --git a/build/lib/glmnet_python/coxnet.py b/build/lib/glmnet_python/coxnet.py new file mode 100644 index 0000000..1a6ee2d --- /dev/null +++ b/build/lib/glmnet_python/coxnet.py @@ -0,0 +1,186 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by glmnet. See also glmnet, cvglmnet + +time -- column 0 +status -- column 1 +""" +# import packages/methods +import numpy as np +import numpy as np +import ctypes +from loadGlmLib import loadGlmLib + +def coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, maxit, family): + + # load shared fortran library + glmlib = loadGlmLib() + + # pre-process data + ty = y[:, 0] + tevent = y[:, 1] + if np.any(ty <= 0): + raise ValueError('negative event time not permitted for cox family') + if len(offset) == 0: + offset = ty*0 + is_offset = False + else: + is_offset = True + + # now convert types and allocate memory before calling + # glmnet fortran library + ###################################### + # --------- PROCESS INPUTS ----------- + ###################################### + # force inputs into fortran order and scipy float64 + copyFlag = False + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + ty = ty.astype(dtype = np.float64, order = 'F', copy = copyFlag) + tevent = tevent.astype(dtype = np.float64, order = 'F', copy = copyFlag) + offset = offset.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) + + ###################################### + # --------- ALLOCATE OUTPUTS --------- + ###################################### + # lmu + lmu = -1 + lmu_r = ctypes.c_int(lmu) + # ca + ca = np.zeros([nx, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) + ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ia + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) + ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # nin + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) + nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # dev + dev = -1*np.ones([nlam], dtype = np.float64) + dev = dev.astype(dtype = np.float64, order = 'F', copy = False) + dev_r = dev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # alm + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) + alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # nlp + nlp = -1 + nlp_r = ctypes.c_int(nlp) + # jerr + jerr = -1 + jerr_r = ctypes.c_int(jerr) + # dev0 + dev0 = -1 + dev0_r = ctypes.c_double(dev0) + + # ################################### + # main glmnet fortran caller + # ################################### + if is_sparse: + # no sparse coxnet implemented + raise ValueError('Cox model not implemented for sparse x in glmnet') + + else: + # call fortran coxnet routine + glmlib.coxnet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ty.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + tevent.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + offset.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(lmu_r), + ca_r, + ia_r, + nin_r, + ctypes.byref(dev0_r), + dev_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + + # ################################### + # post process results + # ################################### + + # check for error + if (jerr_r.value > 0): + raise ValueError("Fatal glmnet error in library call : error code = ", jerr_r.value) + elif (jerr_r.value < 0): + print("Warning: Non-fatal error in glmnet library call: error code = ", jerr_r.value) + print("Check results for accuracy. Partial or no results returned.") + + # clip output to correct sizes + lmu = lmu_r.value + ca = ca[0:nx, 0:lmu] + ia = ia[0:nx] + nin = nin[0:lmu] + dev = dev[0:lmu] + alm = alm[0:lmu] + + # ninmax + ninmax = max(nin) + # fix first value of alm (from inf to correct value) + if ulam[0] == 0.0: + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) + # create return fit dictionary + if ninmax > 0: + ca = ca[0:ninmax, :] + df = np.sum(np.absolute(ca) > 0, axis=0) + ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + beta = np.zeros([nvars, lmu], dtype = np.float64) + beta[ja1, :] = ca[oja, :] + else: + beta = np.zeros([nvars, lmu], dtype = np.float64) + df = np.zeros([1, lmu], dtype = np.float64) + + fit = dict() + fit['beta'] = beta + fit['dev'] = dev + fit['nulldev'] = dev0_r.value + fit['df']= df + fit['lambdau'] = alm + fit['npasses'] = nlp_r.value + fit['jerr'] = jerr_r.value + fit['dim'] = np.array([nvars, lmu], dtype = np.integer) + fit['offset'] = is_offset + fit['class'] = 'coxnet' + + # ################################### + # return to caller + # ################################### + + return fit +#----------------------------------------- +# end of method coxnet +#----------------------------------------- + diff --git a/build/lib/glmnet_python/cvcompute.py b/build/lib/glmnet_python/cvcompute.py new file mode 100644 index 0000000..eef8e4b --- /dev/null +++ b/build/lib/glmnet_python/cvcompute.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- +""" +Internal glmnet function. See also cvglmnet. + +Compute the weighted mean and SD within folds, and hence the SE of the mean +""" +import numpy as np +from wtmean import wtmean + +def cvcompute(mat, weights, foldid, nlams): + if len(weights.shape) > 1: + weights = np.reshape(weights, [weights.shape[0], ]) + wisum = np.bincount(foldid, weights = weights) + nfolds = np.amax(foldid) + 1 + outmat = np.ones([nfolds, mat.shape[1]])*np.NaN + good = np.zeros([nfolds, mat.shape[1]]) + mat[np.isinf(mat)] = np.NaN + for i in range(nfolds): + tf = foldid == i + mati = mat[tf, ] + wi = weights[tf, ] + outmat[i, :] = wtmean(mati, wi) + good[i, 0:nlams[i]] = 1 + N = np.sum(good, axis = 0) + cvcpt = dict() + cvcpt['cvraw'] = outmat + cvcpt['weights'] = wisum + cvcpt['N'] = N + + return(cvcpt) + +# end of cvcompute +#========================= + diff --git a/build/lib/glmnet_python/cvelnet.py b/build/lib/glmnet_python/cvelnet.py new file mode 100644 index 0000000..f4d6533 --- /dev/null +++ b/build/lib/glmnet_python/cvelnet.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +""" +Internal cvglmnet function. See also cvglmnet. + +""" +import numpy as np +from glmnetPredict import glmnetPredict +from wtmean import wtmean +from cvcompute import cvcompute + +def cvelnet(fit, \ + lambdau, \ + x, \ + y, \ + weights, \ + offset, \ + foldid, \ + ptype, \ + grouped, \ + keep = False): + + typenames = {'deviance':'Mean-Squared Error', 'mse':'Mean-Squared Error', + 'mae':'Mean Absolute Error'} + if ptype == 'default': + ptype = 'mse' + + ptypeList = ['mse', 'mae', 'deviance'] + if not ptype in ptypeList: + print('Warning: only ', ptypeList, 'available for Gaussian models; ''mse'' used') + ptype = 'mse' + if len(offset) > 0: + y = y - offset + + predmat = np.ones([y.size, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 + nlams = [] + for i in range(nfolds): + which = foldid == i + fitobj = fit[i].copy() + fitobj['offset'] = False + preds = glmnetPredict(fitobj, x[which, ]) + nlami = np.size(fit[i]['lambdau']) + predmat[which, 0:nlami] = preds + nlams.append(nlami) + # convert nlams to scipy array + nlams = np.array(nlams, dtype = np.integer) + + N = y.shape[0] - np.sum(np.isnan(predmat), axis = 0) + yy = np.tile(y, [1, lambdau.size]) + + if ptype == 'mse': + cvraw = (yy - predmat)**2 + elif ptype == 'deviance': + cvraw = (yy - predmat)**2 + elif ptype == 'mae': + cvraw = np.absolute(yy - predmat) + + if y.size/nfolds < 3 and grouped == True: + print('Option grouped=false enforced in cv.glmnet, since < 3 observations per fold') + grouped = False + + if grouped == True: + cvob = cvcompute(cvraw, weights, foldid, nlams) + cvraw = cvob['cvraw'] + weights = cvob['weights'] + N = cvob['N'] + + cvm = wtmean(cvraw, weights) + sqccv = (cvraw - cvm)**2 + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) + + result = dict() + result['cvm'] = cvm + result['cvsd'] = cvsd + result['name'] = typenames[ptype] + + if keep: + result['fit_preval'] = predmat + + return(result) + +# end of cvelnet +#========================= diff --git a/build/lib/glmnet_python/cvfishnet.py b/build/lib/glmnet_python/cvfishnet.py new file mode 100644 index 0000000..4bf7ef7 --- /dev/null +++ b/build/lib/glmnet_python/cvfishnet.py @@ -0,0 +1,99 @@ +# -*- coding: utf-8 -*- +""" +Internal cvglmnet function. See also cvglmnet. + +""" +import numpy as np +from glmnetPredict import glmnetPredict +from wtmean import wtmean +from cvcompute import cvcompute + +def cvfishnet(fit, \ + lambdau, \ + x, \ + y, \ + weights, \ + offset, \ + foldid, \ + ptype, \ + grouped, \ + keep = False): + + typenames = {'deviance':'Poisson Deviance', 'mse':'Mean-Squared Error', + 'mae':'Mean Absolute Error'} + if ptype == 'default': + ptype = 'deviance' + + ptypeList = ['mse', 'mae', 'deviance'] + if not ptype in ptypeList: + print('Warning: only ', ptypeList, 'available for Poisson models; ''deviance'' used') + ptype = 'deviance' + + if len(offset) > 0: + is_offset = True + else: + is_offset = False + + predmat = np.ones([y.size, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 + nlams = [] + for i in range(nfolds): + which = foldid == i + fitobj = fit[i].copy() + if is_offset: + off_sub = offset[which] + else: + off_sub = np.empty([0]) + preds = glmnetPredict(fitobj, x[which, ], offset = off_sub) + nlami = np.size(fit[i]['lambdau']) + predmat[which, 0:nlami] = preds + nlams.append(nlami) + # convert nlams to np array + nlams = np.array(nlams, dtype = np.integer) + + N = y.shape[0] - np.sum(np.isnan(predmat), axis = 0) + yy = np.tile(y, [1, lambdau.size]) + + if ptype == 'mse': + cvraw = (yy - predmat)**2 + elif ptype == 'deviance': + cvraw = devi(yy, predmat) + elif ptype == 'mae': + cvraw = np.absolute(yy - predmat) + + if y.size/nfolds < 3 and grouped == True: + print('Option grouped=false enforced in cvglmnet, since < 3 observations per fold') + grouped = False + + if grouped == True: + cvob = cvcompute(cvraw, weights, foldid, nlams) + cvraw = cvob['cvraw'] + weights = cvob['weights'] + N = cvob['N'] + + cvm = wtmean(cvraw, weights) + sqccv = (cvraw - cvm)**2 + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) + + result = dict() + result['cvm'] = cvm + result['cvsd'] = cvsd + result['name'] = typenames[ptype] + + if keep: + result['fit_preval'] = predmat + + return(result) + +# end of cvfishnet +#========================= +def devi(yy, eta): + deveta = yy*eta - np.exp(eta) + devy = yy*np.log(yy) - yy + devy[yy == 0] = 0 + result = 2*(devy - deveta) + return(result) + + + + diff --git a/build/lib/glmnet_python/cvglmnet.py b/build/lib/glmnet_python/cvglmnet.py new file mode 100644 index 0000000..6fd4d0f --- /dev/null +++ b/build/lib/glmnet_python/cvglmnet.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + cvglmnet.m: cross-validation for glmnet +-------------------------------------------------------------------------- + + DESCRIPTION: + Does k-fold cross-validation for glmnet, produces a plot, and returns + a value for lambdau. Cross-validation is not implemented for Cox model yet. + + USAGE: + + Note that like glmnet, all arguments are keyword-only: + + CVerr = cvglmnet(x, y, family, options, type, nfolds, foldid, + parallel, keep, grouped); + + Fewer input arguments(more often) are allowed in the call. Default values + for the arguments are used unless specified by the user. + +======================= +INPUT ARGUMENTS + x nobs x nvar np 2D array of x parameters (as in glmnet). + y nobs x nc np Response y as in glmnet. + family Response type as family in glmnet. + options Options as in glmnet. + ptype loss to use for cross-validation. Currently five options, not + all available for all models. The default is ptype='deviance', which uses + squared-error for Gaussian models (a.k.a ptype='mse' there), deviance for + logistic and Poisson regression, and partial-likelihood for the Cox + model (Note that CV for cox model is not implemented yet). + ptype='class' applies to binomial and multinomial logistic + regression only, and gives misclassification error. ptype='auc' is for + two-class logistic regression only, and gives area under the ROC curve. + ptype='mse' or ptype='mae' (mean absolute error) can be used by all models + except the 'cox'; they measure the deviation from the fitted mean to the + response. + nfolds number of folds - default is 10. Although nfolds can be as + large as the sample size (leave-one-out CV), it is not recommended for + large datasets. Smallest value allowable is nfolds=3. + foldid an optional vector of values between 0 and nfold-1 identifying + what fold each observation is in. If supplied, nfold can be + missing. + parallel Number of CPU cores used to fit each fold . If given a value of -1, + all cores are used. + keep If keep=True, a prevalidated array is returned containing + fitted values for each observation and each value of lambda. + This means these fits are computed with this observation and + the rest of its fold omitted. The foldid vector is also + returned. Default is keep=False. + grouped This is an experimental argument, with default true, and can + be ignored by most users. For all models except the 'cox', + this refers to computing nfolds separate statistics, and then + using their mean and estimated standard error to describe the + CV curve. If grouped=false, an error matrix is built up at + the observation level from the predictions from the nfold + fits, and then summarized (does not apply to + type='auc'). For the 'cox' family, grouped=true obtains the + CV partial likelihood for the Kth fold by subtraction; by + subtracting the log partial likelihood evaluated on the full + dataset from that evaluated on the on the (K-1)/K dataset. + This makes more efficient use of risk sets. With + grouped=FALSE the log partial likelihood is computed only on + the Kth fold. + +======================= +OUTPUT ARGUMENTS: + A dict() is returned with the following fields. + lambdau the values of lambda used in the fits. + cvm the mean cross-validated error - a vector of length + length(lambdau). + cvsd estimate of standard error of cvm. + cvup upper curve = cvm+cvsd. + cvlo lower curve = cvm-cvsd. + nzero number of non-zero coefficients at each lambda. + name a text string indicating type of measure (for plotting + purposes). + glmnet_fit a fitted glmnet object for the full data. + lambda_min value of lambda that gives minimum cvm. + lambda_1se largest value of lambda such that error is within 1 standard + error of the minimum. + class Type of regression - internal usage. + fit_preval if keep=true, this is the array of prevalidated fits. Some + entries can be NA, if that and subsequent values of lambda + are not reached for that fold. + foldid if keep=true, the fold assignments used. + + DETAILS: + The function runs glmnet nfolds+1 times; the first to get the lambda + sequence, and then the remainder to compute the fit with each of the + folds omitted. The error is accumulated, and the average error and + standard deviation over the folds is computed. Note that cvglmnet + does NOT search for values for alpha. A specific value should be + supplied, else alpha=1 is assumed by default. If users would like to + cross-validate alpha as well, they should call cvglmnet with a + pre-computed vector foldid, and then use this same fold vector in + separate calls to cvglmnet with different values of alpha. + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) is written by Balakumar B.J., + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + cvglmnetPlot, cvglmnetCoef, cvglmnetPredict, and glmnet. + + EXAMPLES: + + # Gaussian + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) + cvfit = cvglmnet(x = x, y = y) + cvglmnetPlot(cvfit) + print( cvglmnetCoef(cvfit) ) + print( cvglmnetPredict(cvfit, x[0:5, :], 'lambda_min') ) + cvfit1 = cvglmnet(x = x, y = y, ptype = 'mae') + cvglmnetPlot(cvfit1) + + # Binomial + x = np.random.rand(100, 10) + y = np.random.rand(100,1) + y = (y > 0.5)*1.0 + fit = cvglmnet(x = x, y = y, family = 'binomial', ptype = 'class') + cvglmnetPlot(fit) + + # poisson + x = np.random.rand(100,10) + y = np.random.poisson(size = [100, 1])*1.0 + cvfit = cvglmnet(x = x, y = y, family = 'poisson') + cvglmnetPlot(cvfit) + + # Multivariate Gaussian: + x = np.random.rand(100, 10) + y = np.random.rand(100,3) + cvfit = cvglmnet(x = x, y = y, family = 'mgaussian') + cvglmnetPlot(cvfit) + + # Multinomial + x = np.random.rand(100,10) + y = np.random.rand(100,1) + y[y < 0.3] = 1.0 + y[y < 0.6] = 2.0 + y[y < 1.0] = 3.0 + cvfit = cvglmnet(x = x, y = y, family = 'multinomial') + cvglmnetPlot(cvfit) + + #cox + Not implemented for cvglmnet.py + + + + % Cox + n=1000;p=30; + nzc=p/3; + x=randn(n,p); + beta=randn(nzc,1); + fx=x(:,1:nzc)*beta/3; + hx=exp(fx); + ty=exprnd(1./hx,n,1); + tcens=binornd(1,0.3,n,1); + y=cat(2,ty,1-tcens); + foldid=randsample(10,n,true); + fit1_cv=cvglmnet(x,y,'cox',[],[],[],foldid); + cvglmnetPlot(fit1_cv); + + % Parallel + matlabpool; + x=randn(1e3,100); + y=randn(1e3,1); + tic; + cvglmnet(x,y); + toc; + tic; + cvglmnet(x,y,[],[],[],[],[],true); + toc; + +""" +import sys +import joblib +import multiprocessing +from glmnetSet import glmnetSet +from glmnetPredict import glmnetPredict +import numpy as np +from glmnet import glmnet +from cvelnet import cvelnet +from cvlognet import cvlognet +from cvmultnet import cvmultnet +from cvmrelnet import cvmrelnet +from cvfishnet import cvfishnet + +def cvglmnet(*, x, + y, + family = 'gaussian', + ptype = 'default', + nfolds = 10, + foldid = np.empty([0]), + parallel = 1, + keep = False, + grouped = True, + **options): + + options = glmnetSet(options) + + if 0 < len(options['lambdau']) < 2: + raise ValueError('Need more than one value of lambda for cv.glmnet') + + nobs = x.shape[0] + + # we should not really need this. user must supply the right shape + # if y.shape[0] != nobs: + # y = np.transpose(y) + + # convert 1d python array of size nobs to 2d python array of size nobs x 1 + if len(y.shape) == 1: + y = np.reshape(y, [y.size, 1]) + + # we should not really need this. user must supply the right shape + # if (len(options['offset']) > 0) and (options['offset'].shape[0] != nobs): + # options['offset'] = np.transpose(options['offset']) + + if len(options['weights']) == 0: + options['weights'] = np.ones([nobs, 1], dtype = np.float64) + + # main call to glmnet + glmfit = glmnet(x = x, y = y, family = family, **options) + + is_offset = glmfit['offset'] + options['lambdau'] = glmfit['lambdau'] + + nz = glmnetPredict(glmfit, np.empty([0]), np.empty([0]), 'nonzero') + if glmfit['class'] == 'multnet': + nnz = np.zeros([len(options['lambdau']), len(nz)]) + for i in range(len(nz)): + nnz[:, i] = np.transpose(np.sum(nz[i], axis = 0)) + nz = np.ceil(np.median(nnz, axis = 1)) + elif glmfit['class'] == 'mrelnet': + nz = np.transpose(np.sum(nz[0], axis = 0)) + else: + nz = np.transpose(np.sum(nz, axis = 0)) + + if len(foldid) == 0: + ma = np.tile(np.arange(nfolds), [1, int(np.floor(nobs/nfolds))]) + mb = np.arange(np.mod(nobs, nfolds)) + mb = np.reshape(mb, [1, mb.size]) + population = np.append(ma, mb, axis = 1) + mc = np.random.permutation(len(population)) + mc = mc[0:nobs] + foldid = population[mc] + foldid = np.reshape(foldid, [foldid.size,]) + else: + nfolds = np.amax(foldid) + 1 + + if nfolds < 3: + raise ValueError('nfolds must be bigger than 3; nfolds = 10 recommended') + + cpredmat = list() + foldid = np.reshape(foldid, [foldid.size, ]) + if parallel != 1: + if parallel == -1: + num_cores = multiprocessing.cpu_count() + else: + num_cores = parallel + sys.stderr.write("[status]\tParallel glmnet cv with " + str(num_cores) + " cores\n") + cpredmat = joblib.Parallel(n_jobs=num_cores)(joblib.delayed(doCV)(i, x, y, family, foldid, nfolds, is_offset, **options) for i in range(nfolds)) + else: + for i in range(nfolds): + newFit = doCV(i, x, y, family, foldid, nfolds, is_offset, **options) + cpredmat.append(newFit) + + if cpredmat[0]['class'] == 'elnet': + cvstuff = cvelnet( cpredmat, options['lambdau'], x, y \ + , options['weights'], options['offset'] \ + , foldid, ptype, grouped, keep) + elif cpredmat[0]['class'] == 'lognet': + cvstuff = cvlognet(cpredmat, options['lambdau'], x, y \ + , options['weights'], options['offset'] \ + , foldid, ptype, grouped, keep) + elif cpredmat[0]['class'] == 'multnet': + cvstuff = cvmultnet(cpredmat, options['lambdau'], x, y \ + , options['weights'], options['offset'] \ + , foldid, ptype, grouped, keep) + elif cpredmat[0]['class'] == 'mrelnet': + cvstuff = cvmrelnet(cpredmat, options['lambdau'], x, y \ + , options['weights'], options['offset'] \ + , foldid, ptype, grouped, keep) + elif cpredmat[0]['class'] == 'fishnet': + cvstuff = cvfishnet(cpredmat, options['lambdau'], x, y \ + , options['weights'], options['offset'] \ + , foldid, ptype, grouped, keep) + elif cpredmat[0]['class'] == 'coxnet': + raise NotImplementedError('Cross-validation for coxnet not implemented yet.') + #cvstuff = cvcoxnet(cpredmat, options['lambdau'], x, y \ + # , options['weights'], options['offset'] \ + # , foldid, ptype, grouped, keep) + + cvm = cvstuff['cvm'] + cvsd = cvstuff['cvsd'] + cvname = cvstuff['name'] + + CVerr = dict() + CVerr['lambdau'] = options['lambdau'] + CVerr['cvm'] = np.transpose(cvm) + CVerr['cvsd'] = np.transpose(cvsd) + CVerr['cvup'] = np.transpose(cvm + cvsd) + CVerr['cvlo'] = np.transpose(cvm - cvsd) + CVerr['nzero'] = nz + CVerr['name'] = cvname + CVerr['glmnet_fit'] = glmfit + if keep: + CVerr['fit_preval'] = cvstuff['fit_preval'] + CVerr['foldid'] = foldid + if ptype == 'auc': + cvm = -cvm + CVerr['lambda_min'] = np.amax(options['lambdau'][cvm <= np.amin(cvm)]).reshape([1]) + idmin = options['lambdau'] == CVerr['lambda_min'] + semin = cvm[idmin] + cvsd[idmin] + CVerr['lambda_1se'] = np.amax(options['lambdau'][cvm <= semin]).reshape([1]) + CVerr['class'] = 'cvglmnet' + + return(CVerr) + +# end of cvglmnet +#========================== +def doCV(i, x, y, family, foldid, nfolds, is_offset, **options): + which = foldid == i + opts = options.copy() + opts['weights'] = opts['weights'][~which, ] + opts['lambdau'] = options['lambdau'] + if is_offset: + if opts['offset'].size > 0: + opts['offset'] = opts['offset'][~which, ] + xr = x[~which, ] + yr = y[~which, ] + newFit = glmnet(x = xr, y = yr, family = family, **opts) + return(newFit) + diff --git a/build/lib/glmnet_python/cvglmnetCoef.py b/build/lib/glmnet_python/cvglmnetCoef.py new file mode 100644 index 0000000..711ce2f --- /dev/null +++ b/build/lib/glmnet_python/cvglmnetCoef.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + cvglmnetCoef computes coefficients from a "cvglmnet" object. +-------------------------------------------------------------------------- + + DESCRIPTION: + This function extracts coefficients at certain lambdas if they are + in the lambda sequence of a "cvglmnet" object or make predictions + if they are not. + + USAGE: + mcoef=cvglmnetCoef(object); + ncoef=cvglmnetCoef(object, s); + + INPUT ARGUMENTS: + obj Fitted "cvglmnet" model object. + s Value(s) of the penalty parameter lambdau at which computation + is required. Default is the value s='lambda_1se' stored on + the CV object. Alternatively s='lambda_min' can be used. If s + is numeric, it is taken as the value(s) of lambda to be used. + + OUTPUT ARGUMENTS: + result If s is 'lambda_1se' or 'lambda_min', the coefficients at + that s is returned. If s is numeric, a (nvars+1) x length(s) + matrix is returned with each column being the coefficients + at an s. Note that the first row are the intercepts (0 if no + intercept in the original model). + + DETAILS: + The function uses linear interpolation to make predictions for values + of s that do not coincide with those used in the fitting algorithm. + Exact prediction is not supported currently. + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + cvglmnet, cvglmnetPrint, and cvglmnetPredict. + + EXAMPLES: + x=randn(100,20); + y=randn(100,1); + cvfit=cvglmnet(x,y); + ncoef=cvglmnetCoef(cvfit,'lambda_min'); + +""" + +import numpy as np +from glmnetCoef import glmnetCoef + +def cvglmnetCoef(obj, s = None): + + if s is None or len(s) == 0: + s = obj['lambda_1se'] + + if isinstance(s, np.ndarray): + lambdau = s + elif isinstance(s, str): + sbase = ['lambda_1se', 'lambda_min'] + indxtf = [x.startswith(s.lower()) for x in sbase] # find index of family in fambase + sind= [i for i in range(len(indxtf)) if indxtf[i] == True] + s = sbase[sind[0]] + lambdau = obj[s] + else: + raise ValueError('Invalid form of s') + + result = glmnetCoef(obj['glmnet_fit'], lambdau) + + return(result) + + \ No newline at end of file diff --git a/build/lib/glmnet_python/cvglmnetPlot.py b/build/lib/glmnet_python/cvglmnetPlot.py new file mode 100644 index 0000000..9392293 --- /dev/null +++ b/build/lib/glmnet_python/cvglmnetPlot.py @@ -0,0 +1,129 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + cvglmnetPlot.m: plot the cross-validation curve produced by cvglmnet +-------------------------------------------------------------------------- + + DESCRIPTION: + Plots the cross-validation curve, and upper and lower standard + deviation curves, as a function of the lambda values used. + + USAGE: + cvglmnetPlot(cvfit); + cvglmnetPlot(cvfit, sign_lambda) + cvglmnetPlot(cvfit, sign_lambda, options) + + INPUT ARGUMENTS: + cvobject fitted "cvglmnet" object + sign_lambda Either plot against log(lambda) (default) or its negative if + sign_lambda=-1. + varargin Other errorbar parameters. + + DETAILS: + A plot is produced, and nothing is returned. + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + SEE ALSO: + cvglmnet and glmnet. + + EXAMPLES: + + np.random.seed(1) + x=np.random.normal(size = (100,20)) + y=np.random.normal(size = (100,1)) + g2=np.random.choice(2,size = (100,1))*1.0 + g4=np.random.choice(4,size = (100,1))*1.0 + + plt.figure() + fit1=cvglmnet(x = x.copy(),y = y.copy()) + cvglmnetPlot(fit1) + + plt.figure() + fit2=cvglmnet(x = x.copy(),y = g2.copy(), family = 'binomial') + cvglmnetPlot(fit2) + + plt.figure() + fit3=cvglmnet(x = x.copy(),y = g2.copy(), family = 'binomial', ptype = 'class') + cvglmnetPlot(fit3) + +""" + +import numpy as np + + +def cvglmnetPlot(cvobject, sign_lambda = 1.0, **options): + import matplotlib.pyplot as plt + + sloglam = sign_lambda*np.log(cvobject['lambdau']) + + fig = plt.gcf() + ax1 = plt.gca() + #fig, ax1 = plt.subplots() + + plt.errorbar(sloglam, cvobject['cvm'], cvobject['cvsd'], \ + ecolor = (0.5, 0.5, 0.5), \ + **options + ) + #plt.hold(True) + plt.plot(sloglam, cvobject['cvm'], linestyle = 'dashed',\ + marker = 'o', markerfacecolor = 'r') + + xlim1 = ax1.get_xlim() + ylim1 = ax1.get_ylim() + + xval = sign_lambda*np.log(np.array([cvobject['lambda_min'], cvobject['lambda_min']])) + plt.plot(xval, ylim1, color = 'b', linestyle = 'dashed', \ + linewidth = 1) + + if cvobject['lambda_min'] != cvobject['lambda_1se']: + xval = sign_lambda*np.log([cvobject['lambda_1se'], cvobject['lambda_1se']]) + plt.plot(xval, ylim1, color = 'b', linestyle = 'dashed', \ + linewidth = 1) + + ax2 = ax1.twiny() + ax2.xaxis.tick_top() + + atdf = ax1.get_xticks() + indat = np.ones(atdf.shape, dtype = np.integer) + if sloglam[-1] >= sloglam[1]: + for j in range(len(sloglam)-1, -1, -1): + indat[atdf <= sloglam[j]] = j + else: + for j in range(len(sloglam)): + indat[atdf <= sloglam[j]] = j + + prettydf = cvobject['nzero'][indat] + + ax2.set(XLim=xlim1, XTicks = atdf, XTickLabels = prettydf) + ax2.grid() + ax1.yaxis.grid() + + ax2.set_xlabel('Degrees of Freedom') + + # plt.plot(xlim1, [ylim1[1], ylim1[1]], 'b') + # plt.plot([xlim1[1], xlim1[1]], ylim1, 'b') + + if sign_lambda < 0: + ax1.set_xlabel('-log(Lambda)') + else: + ax1.set_xlabel('log(Lambda)') + + ax1.set_ylabel(cvobject['name']) + + #plt.show() diff --git a/build/lib/glmnet_python/cvglmnetPredict.py b/build/lib/glmnet_python/cvglmnetPredict.py new file mode 100644 index 0000000..df2ee52 --- /dev/null +++ b/build/lib/glmnet_python/cvglmnetPredict.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + cvglmnetPredict makes predictions from a "cvglmnet" object. +-------------------------------------------------------------------------- + + DESCRIPTION: + This function makes predictions from a cross-validated glmnet model, + using the stored "glmnet_fit" object, and the optimal value chosen for + lambda. + + USAGE: + pred = cvglmnetPredict(cvfit) + pred = cvglmnetPredict(cvfit, newx) + pred = cvglmnetPredict(cvfit, newx, s) + pred = cvglmnetPredict(cvfit, newx, s, ...) + + INPUT ARGUMENTS: + object Fitted "glmnet" model object. + newx 2D array of new values for x at which predictions are to be + made. Must be a 2D array; can be sparse. See documentation for + glmnetPredict. + s Value(s) of the penalty parameter lambda at which predictions + are required. Default is the value s='lambda_1se' stored on + the CV object. Alternatively s='lambda_min' can be used. If s + is numeric, it is taken as the value(s) of lambda to be used. + If s is numeric, it must be a scipy 1D array. + options Other arguments to predict (see glmnetPredict). + + OUTPUT ARGUMENTS: + If only the cvglmnet object is provided, the function returns the + coefficients at the default s = 'lambda_1se'. Otherwise, the object + returned depends on the ... argument which is passed on to the + glmnetPredict for glmnet objects. + + + DETAILS: + This function makes it easier to use the results of cross-validation + to make a prediction. + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + cvglmnet and glmnetPredict. + + EXAMPLES: + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) + cvfit = cvglmnet(x = x, y = y) + cvglmnetPredict(cvfit, x[0:5, :], 'lambda_min') + cvglmnetPredict(cvfit, x[0:5, :], np.array([0.0866, 0.2323])) + +""" +from cvglmnetCoef import cvglmnetCoef +from glmnetPredict import glmnetPredict +import numpy as np + +def cvglmnetPredict(obj, newx = None, s = 'lambda_1se', **options): + if newx is None: + CVpred = cvglmnetCoef(obj) + return(CVpred) + + if type(s) == np.ndarray and s.dtype == 'float64': + lambdau = s + elif s in ['lambda_1se', 'lambda_min']: + lambdau = obj[s] + else: + raise ValueError('Invalid form for s') + + CVpred = glmnetPredict(obj['glmnet_fit'], newx, lambdau, **options) + + return(CVpred) + \ No newline at end of file diff --git a/build/lib/glmnet_python/cvlognet.py b/build/lib/glmnet_python/cvlognet.py new file mode 100644 index 0000000..1b90cdb --- /dev/null +++ b/build/lib/glmnet_python/cvlognet.py @@ -0,0 +1,173 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by cvglmnet. See also cvglmnet + +""" +import numpy as np +from glmnetPredict import glmnetPredict +from wtmean import wtmean +from cvcompute import cvcompute + +def cvlognet(fit, \ + lambdau, \ + x, \ + y, \ + weights, \ + offset, \ + foldid, \ + ptype, \ + grouped, \ + keep = False): + + typenames = {'deviance':'Binomial Deviance', 'mse':'Mean-Squared Error', + 'mae':'Mean Absolute Error', 'auc':'AUC', 'class':'Misclassification Error'} + if ptype == 'default': + ptype = 'deviance' + + ptypeList = ['mse', 'mae', 'deviance', 'auc', 'class'] + if not ptype in ptypeList: + print('Warning: only ', ptypeList, 'available for binomial models; ''deviance'' used') + ptype = 'deviance' + + prob_min = 1.0e-5 + prob_max = 1 - prob_min + nc = y.shape[1] + if nc == 1: + classes, sy = np.unique(y, return_inverse = True) + nc = len(classes) + indexes = np.eye(nc, nc) + y = indexes[sy, :] + else: + classes = np.arange(nc) + 1 # 1:nc + + N = y.size + nfolds = np.amax(foldid) + 1 + if (N/nfolds < 10) and (type == 'auc'): + print('Warning: Too few (<10) observations per fold for type.measure=auc in cvlognet') + print('Warning: changed to type.measure = deviance. Alternately, use smaller value ') + print('Warning: for nfolds') + ptype = 'deviance' + + if (N/nfolds < 3) and grouped: + print('Warning: option grouped = False enforced in cvglmnet as there are < 3 observations per fold') + grouped = False + + is_offset = not(len(offset) == 0) + predmat = np.ones([y.shape[0], lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 + nlams = [] + for i in range(nfolds): + which = foldid == i + fitobj = fit[i].copy() + if is_offset: + off_sub = offset[which, ] + else: + off_sub = np.empty([0]) + preds = glmnetPredict(fitobj, x[which, ], np.empty([0]), 'response', False, off_sub) + nlami = np.size(fit[i]['lambdau']) + predmat[which, 0:nlami] = preds + nlams.append(nlami) + # convert nlams to np array + nlams = np.array(nlams, dtype = np.integer) + + if ptype == 'auc': + cvraw = np.zeros([nfolds, lambdau.size])*np.NaN + good = np.zeros([nfolds, lambdau.size]) + for i in range(nfolds): + good[i, 0:nlams[i]] = 1 + which = foldid == i + for j in range(nlams[i]): + cvraw[i,j] = auc_mat(y[which,], predmat[which,j], weights[which]) + N = np.sum(good, axis = 0) + sweights = np.zeros([nfolds, 1]) + for i in range(nfolds): + sweights[i]= np.sum(weights[foldid == i], axis = 0) + weights = sweights + else: + ywt = np.sum(y, axis = 1, keepdims = True) + y = y/np.tile(ywt, [1, y.shape[1]]) + weights = weights*ywt + N = y.shape[0] - np.sum(np.isnan(predmat), axis = 0, keepdims = True) + yy1 = np.tile(y[:,0:1], [1, lambdau.size]) + yy2 = np.tile(y[:,1:2], [1, lambdau.size]) + + if ptype == 'mse': + cvraw = (yy1 - (1 - predmat))**2 + (yy2 - (1 - predmat))**2 + elif ptype == 'deviance': + predmat = np.minimum(np.maximum(predmat, prob_min), prob_max) + lp = yy1*np.log(1-predmat) + yy2*np.log(predmat) + ly = np.log(y) + ly[y == 0] = 0 + ly = np.dot(y*ly, np.array([1.0, 1.0]).reshape([2,1])) + cvraw = 2*(np.tile(ly, [1, lambdau.size]) - lp) + elif ptype == 'mae': + cvraw = np.absolute(yy1 - (1 - predmat)) + np.absolute(yy2 - (1 - predmat)) + elif ptype == 'class': + cvraw = yy1*(predmat > 0.5) + yy2*(predmat <= 0.5) + + if y.size/nfolds < 3 and grouped == True: + print('Option grouped=false enforced in cv.glmnet, since < 3 observations per fold') + grouped = False + + if grouped == True: + cvob = cvcompute(cvraw, weights, foldid, nlams) + cvraw = cvob['cvraw'] + weights = cvob['weights'] + N = cvob['N'] + + cvm = wtmean(cvraw, weights) + sqccv = (cvraw - cvm)**2 + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) + + result = dict() + result['cvm'] = cvm + result['cvsd'] = cvsd + result['name'] = typenames[ptype] + + if keep: + result['fit_preval'] = predmat + + return(result) + +# end of cvelnet +#========================= +# +#========================= +# Helper functions +#========================= +def auc_mat(y, prob, weights = None): + if weights == None or len(weights) == 0: + weights = np.ones([y.shape[0], 1]) + wweights = weights*y + wweights = wweights.flatten() + wweights = np.reshape(wweights, [1, wweights.size]) + ny= y.shape[0] + a = np.zeros([ny, 1]) + b = np.ones([ny, 1]) + yy = np.vstack((a, b)) + pprob = np.vstack((prob,prob)) + result = auc(yy, pprob, wweights) + return(result) +#========================= +def auc(y, prob, w): + if len(w) == 0: + mindiff = np.amin(np.diff(np.unique(prob))) + pert = np.random.uniform(0, mindiff/3, prob.size) + t, rprob = np.unique(prob + pert, return_inverse = True) + n1 = np.sum(y, keepdims = True) + n0 = y.shape[0] - n1 + u = np.sum(rprob[y == 1]) - n1*(n1 + 1)/2 + result = u/(n1*n0) + else: + op = np.argsort(prob) + y = y[op] + w = w[op] + cw = np.cumsum(w) + w1 = w[y == 1] + cw1 = np.cumsum(w1) + wauc = np.sum(w1*(cw[y == 1] - cw1)) + sumw = cw1[-1] + sumw = sumw*(c1[-1] - sumw) + result = wauc/sumw + return(result) +#========================= diff --git a/build/lib/glmnet_python/cvmrelnet.py b/build/lib/glmnet_python/cvmrelnet.py new file mode 100644 index 0000000..c3fe8f8 --- /dev/null +++ b/build/lib/glmnet_python/cvmrelnet.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by cvglmnet. See also cvglmnet + +""" +import numpy as np +from glmnetPredict import glmnetPredict +from wtmean import wtmean +from cvcompute import cvcompute + +def cvmrelnet(fit, \ + lambdau, \ + x, \ + y, \ + weights, \ + offset, \ + foldid, \ + ptype, \ + grouped, \ + keep = False): + + typenames = {'deviance':'Mean-Squared Error', 'mse':'Mean-Squared Error', + 'mae':'Mean Absolute Error'} + if ptype == 'default': + ptype = 'mse' + + ptypeList = ['mse', 'mae', 'deviance'] + if not ptype in ptypeList: + print('Warning: only ', ptypeList, 'available for Gaussian models; ''mse'' used') + ptype = 'mse' + + nobs, nc = y.shape + + if len(offset) > 0: + y = y - offset + + predmat = np.ones([nobs, nc, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 + nlams = [] + for i in range(nfolds): + which = foldid == i + fitobj = fit[i].copy() + fitobj['offset'] = False + preds = glmnetPredict(fitobj, x[which, ]) + nlami = np.size(fit[i]['lambdau']) + predmat[which, 0:nlami] = preds + nlams.append(nlami) + # convert nlams to scipy array + nlams = np.array(nlams, dtype = np.integer) + + N = nobs - np.reshape(np.sum(np.isnan(predmat[:, 1, :]), axis = 0), (1, -1)) + bigY = np.tile(y[:, :, None], [1, 1, lambdau.size]) + + if ptype == 'mse': + cvraw = np.sum((bigY - predmat)**2, axis = 1).squeeze() + elif ptype == 'mae': + cvraw = np.sum(np.absolute(bigY - predmat), axis = 1).squeeze() + + if y.size/nfolds < 3 and grouped == True: + print('Option grouped=false enforced in cv.glmnet, since < 3 observations per fold') + grouped = False + + if grouped == True: + cvob = cvcompute(cvraw, weights, foldid, nlams) + cvraw = cvob['cvraw'] + weights = cvob['weights'] + N = cvob['N'] + + cvm = wtmean(cvraw, weights) + sqccv = (cvraw - cvm)**2 + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) + + result = dict() + result['cvm'] = cvm + result['cvsd'] = cvsd + result['name'] = typenames[ptype] + + if keep: + result['fit_preval'] = predmat + + return(result) + +# end of cvelnet +#========================= diff --git a/build/lib/glmnet_python/cvmultnet.py b/build/lib/glmnet_python/cvmultnet.py new file mode 100644 index 0000000..f14a2ba --- /dev/null +++ b/build/lib/glmnet_python/cvmultnet.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by cvglmnet. See also cvglmnet + +""" +import numpy as np +from glmnetPredict import glmnetPredict +from wtmean import wtmean +from cvcompute import cvcompute + +def cvmultnet(fit, \ + lambdau, \ + x, \ + y, \ + weights, \ + offset, \ + foldid, \ + ptype, \ + grouped, \ + keep = False): + + typenames = {'deviance':'Multinomial Deviance', 'mse':'Mean-Squared Error', + 'mae':'Mean Absolute Error', 'class':'Misclassification Error'} + if ptype == 'default': + ptype = 'deviance' + + ptypeList = ['mse', 'mae', 'deviance', 'class'] + if not ptype in ptypeList: + print('Warning: only ', ptypeList, 'available for multinomial models; ''deviance'' used') + ptype = 'deviance' + + prob_min = 1.0e-5 + prob_max = 1 - prob_min + nc = y.shape + if nc[1] == 1: + classes, sy = np.unique(y, return_inverse = True) + nc = len(classes) + indexes = np.eye(nc, nc) + y = indexes[sy, :] + else: + nc = nc[1] + + is_offset = not(len(offset) == 0) + predmat = np.ones([y.shape[0], nc, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 + nlams = [] + for i in range(nfolds): + which = foldid == i + fitobj = fit[i].copy() + if is_offset: + off_sub = offset[which, ] + else: + off_sub = np.empty([0]) + preds = glmnetPredict(fitobj, x[which, ], np.empty([0]), 'response', False, off_sub) + nlami = np.size(fit[i]['lambdau']) + predmat[which, 0:nlami] = preds + nlams.append(nlami) + # convert nlams to np array + nlams = np.array(nlams, dtype = np.integer) + + ywt = np.sum(y, axis = 1, keepdims = True) + y = y/np.tile(ywt, [1, y.shape[1]]) + weights = weights*ywt + N = y.shape[0] - np.sum(np.isnan(predmat[:,1,:]), axis = 0, keepdims = True) + bigY = np.tile(y[:, :, None], [1, 1, lambdau.size]) + + if ptype == 'mse': + cvraw = np.sum((bigY - predmat)**2, axis = 1).squeeze() + elif ptype == 'deviance': + predmat = np.minimum(np.maximum(predmat, prob_min), prob_max) + lp = bigY*np.log(predmat) + ly = bigY*np.log(bigY) + ly[y == 0] = 0 + cvraw = np.sum(2*(ly - lp), axis = 1).squeeze() + elif ptype == 'mae': + cvraw = np.sum(np.absolute(bigY - predmat), axis = 1).squeeze() + elif ptype == 'class': + classid = np.zeros([y.shape[0], lambdau.size])*np.NaN + for i in range(lambdau.size): + classid[:, i] = glmnet_softmax(predmat[:,:,i]) + classid = classid.reshape([classid.size,1]) + yperm = bigY.transpose((0,2,1)) + yperm = yperm.reshape([yperm.size, 1]) + idx = sub2ind(yperm.shape, range(len(classid)), classid.transpose()) + cvraw = np.reshape(1 - yperm[idx], [-1, lambdau.size]); + + if grouped == True: + cvob = cvcompute(cvraw, weights, foldid, nlams) + cvraw = cvob['cvraw'] + weights = cvob['weights'] + N = cvob['N'] + + cvm = wtmean(cvraw, weights) + sqccv = (cvraw - cvm)**2 + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) + + result = dict() + result['cvm'] = cvm + result['cvsd'] = cvsd + result['name'] = typenames[ptype] + + if keep: + result['fit_preval'] = predmat + + return(result) + +# end of cvelnet +#========================= +# +#========================= +# Helper functions +#========================= +def sub2ind(array_shape, rows, cols): + return rows*array_shape[1] + cols +#========================= +def glmnet_softmax(x): + d = x.shape + nas = np.any(np.isnan(x), axis = 1) + if np.any(nas): + pclass = np.zeros([d[0], 1])*np.NaN + if np.sum(nas) < d[0]: + pclass2 = glmnet_softmax(x[~nas, :]) + pclass[~nas] = pclass2 + result = pclass + else: + maxdist = x[:, 1] + pclass = np.ones([d[0], 1]) + for i in range(1, d[1], 1): + t = x[:, i] > maxdist + pclass[t] = i + maxdist[t] = x[t, i] + result = pclass + + return(result) +#========================= diff --git a/build/lib/glmnet_python/dataprocess.py b/build/lib/glmnet_python/dataprocess.py new file mode 100644 index 0000000..d207c55 --- /dev/null +++ b/build/lib/glmnet_python/dataprocess.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +# ---------------------------------------- +# glmnet-python +# ---------------------------------------- +# a module for data processing +# ---------------------------------------- +# author: Han Fang +# contact: hanfang.cshl@gmail.com +# website: hanfang.github.io +# date: 3/1/2017 +# ---------------------------------------- + +import sys +import os +import numpy as np +from scipy import sparse + +class dataprocess(object): + """ + data process module + """ + def __init__(self): + """ + """ + pass + + def sparseDf(self, df, matrixType="csc"): + """ + convert a pandas sparse df to numpy sparse array + :param df: pandas sparse df + :param matrixType: csc or csr + :return: numpy sparse array + """ + columns = df.columns + dat, rows = map(list, zip( + *[(df[col].sp_values - df[col].fill_value, df[col].sp_index.to_int_index().indices) for col in columns])) + cols = [np.ones_like(a) * i for (i, a) in enumerate(dat)] + datF, rowsF, colsF = np.concatenate(dat), np.concatenate(rows), np.concatenate(cols) + arr = sparse.coo_matrix((datF, (rowsF, colsF)), df.shape, dtype=np.float64) + if matrixType == "csc": + return arr.tocsc() + elif matrixType == "csr": + return arr.tocsc() + else: + raise ValueError("Only accept csc or csr") + +def main(): + dataprocess.sparseDf() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/build/lib/glmnet_python/elnet.py b/build/lib/glmnet_python/elnet.py new file mode 100644 index 0000000..d0aba5c --- /dev/null +++ b/build/lib/glmnet_python/elnet.py @@ -0,0 +1,226 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by glmnet. See also glmnet, cvglmnet +""" + + +# import packages/methods +import numpy as np +import ctypes +from loadGlmLib import loadGlmLib + +def elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, lempty, + nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, thresh, isd, intr, + maxit, family): + + # load shared fortran library + glmlib = loadGlmLib() + + # pre-process data + ybar = np.dot(np.transpose(y), weights) + ybar = ybar/sum(weights) + nulldev = (y - ybar)**2 * weights + # ka + lst = ['covariance', 'naive'] + ka = [i for i in range(len(lst)) if lst[i] == gtype] + if len(ka) == 0: + raise ValueError('unrecognized type for ka'); + else: + ka = ka[0] + 1 # convert from 0-based to 1-based index for fortran + # offset + if len(offset) == 0: + offset = y*0 + is_offset = False + else: + is_offset = True + + # remove offset from y + y = y - offset + + # now convert types and allocate memory before calling + # glmnet fortran library + ###################################### + # --------- PROCESS INPUTS ----------- + ###################################### + # force inputs into fortran order and into the correct scipy datatype + copyFlag = False + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) + + ###################################### + # --------- ALLOCATE OUTPUTS --------- + ###################################### + # lmu + lmu = -1 + lmu_r = ctypes.c_int(lmu) + # a0 + a0 = np.zeros([nlam], dtype = np.float64) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) + a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ca + ca = np.zeros([nx, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) + ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ia + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) + ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # nin + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) + nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # rsq + rsq = -1*np.ones([nlam], dtype = np.float64) + rsq = rsq.astype(dtype = np.float64, order = 'F', copy = False) + rsq_r = rsq.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # alm + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) + alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # nlp + nlp = -1 + nlp_r = ctypes.c_int(nlp) + # jerr + jerr = -1 + jerr_r = ctypes.c_int(jerr) + + + # ################################### + # main glmnet fortran caller + # ################################### + if is_sparse: + # sparse elnet + glmlib.spelnet_( + ctypes.byref(ctypes.c_int(ka)), + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(len(weights))), + ctypes.byref(ctypes.c_int(nvars)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + pcs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + irs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + rsq_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + else: + # call fortran elnet routine + glmlib.elnet_( + ctypes.byref(ctypes.c_int(ka)), + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(len(weights))), + ctypes.byref(ctypes.c_int(nvars)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + rsq_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + + # ################################### + # post process results + # ################################### + + # check for error + if (jerr_r.value > 0): + raise ValueError("Fatal glmnet error in library call : error code = ", jerr_r.value) + elif (jerr_r.value < 0): + print("Warning: Non-fatal error in glmnet library call: error code = ", jerr_r.value) + print("Check results for accuracy. Partial or no results returned.") + + # clip output to correct sizes + lmu = lmu_r.value + a0 = a0[0:lmu] + ca = ca[0:nx, 0:lmu] + ia = ia[0:nx] + nin = nin[0:lmu] + rsq = rsq[0:lmu] + alm = alm[0:lmu] + + # ninmax + ninmax = max(nin) + # fix first value of alm (from inf to correct value) + if lempty: + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) + # create return fit dictionary + if ninmax > 0: + ca = ca[0:ninmax, :] + df = np.sum(np.absolute(ca) > 0, axis=0) + ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + beta = np.zeros([nvars, lmu], dtype = np.float64) + beta[ja1, :] = ca[oja, :] + else: + beta = np.zeros([nvars, lmu], dtype = np.float64) + df = np.zeros([1, lmu], dtype = np.float64) + + fit = dict() + fit['a0'] = a0 + fit['beta'] = beta + fit['dev'] = rsq + fit['nulldev'] = nulldev + fit['df']= df + fit['lambdau'] = alm + fit['npasses'] = nlp_r.value + fit['jerr'] = jerr_r.value + fit['dim'] = np.array([nvars, lmu], dtype = np.integer) + fit['offset'] = is_offset + fit['class'] = 'elnet' + + # ################################### + # return to caller + # ################################### + + return fit +#----------------------------------------- +# end of method elmnet +#----------------------------------------- + diff --git a/build/lib/glmnet_python/fishnet.py b/build/lib/glmnet_python/fishnet.py new file mode 100644 index 0000000..e71b8fd --- /dev/null +++ b/build/lib/glmnet_python/fishnet.py @@ -0,0 +1,218 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by glmnet. See also glmnet, cvglmnet +""" +# import packages/methods +import numpy as np +import ctypes +from loadGlmLib import loadGlmLib + +def fishnet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, intr, maxit, family): + + # load shared fortran library + glmlib = loadGlmLib() + + if np.any( y < 0): + raise ValueError('negative responses not permitted for Poisson family') + + if len(offset) == 0: + offset = y*0 + is_offset = False + else: + is_offset = True + + # now convert types and allocate memory before calling + # glmnet fortran library + ###################################### + # --------- PROCESS INPUTS ----------- + ###################################### + # force inputs into fortran order and scipy float64 + copyFlag = False + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + offset = offset.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) + + ###################################### + # --------- ALLOCATE OUTPUTS --------- + ###################################### + # lmu + lmu = -1 + lmu_r = ctypes.c_int(lmu) + # a0 + a0 = np.zeros([nlam], dtype = np.float64) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) + a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ca + ca = np.zeros([nx, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) + ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ia + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) + ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # nin + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) + nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # dev + dev = -1*np.ones([nlam], dtype = np.float64) + dev = dev.astype(dtype = np.float64, order = 'F', copy = False) + dev_r = dev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # alm + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) + alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # nlp + nlp = -1 + nlp_r = ctypes.c_int(nlp) + # jerr + jerr = -1 + jerr_r = ctypes.c_int(jerr) + # dev0 + dev0 = -1 + dev0_r = ctypes.c_double(dev0) + + # ################################### + # main glmnet fortran caller + # ################################### + if is_sparse: + # sparse lognet + glmlib.spfishnet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + pcs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + irs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + offset.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + ctypes.byref(dev0_r), + dev_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + else: + glmlib.fishnet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + offset.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + ctypes.byref(dev0_r), + dev_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + + # ################################### + # post process results + # ################################### + + # check for error + if (jerr_r.value > 0): + raise ValueError("Fatal glmnet error in library call : error code = ", jerr_r.value) + elif (jerr_r.value < 0): + print("Warning: Non-fatal error in glmnet library call: error code = ", jerr_r.value) + print("Check results for accuracy. Partial or no results returned.") + + # clip output to correct sizes + lmu = lmu_r.value + a0 = a0[0:lmu] + ca = ca[0:nx, 0:lmu] + ia = ia[0:nx] + nin = nin[0:lmu] + dev = dev[0:lmu] + alm = alm[0:lmu] + + # ninmax + ninmax = max(nin) + # fix first value of alm (from inf to correct value) + if ulam[0] == 0.0: + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) + # create return fit dictionary + dd = np.array([nvars, lmu], dtype = np.integer) + if ninmax > 0: + ca = ca[0:ninmax, :] + df = np.sum(np.absolute(ca) > 0, axis = 0) + ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + beta = np.zeros([nvars, lmu], dtype = np.float64) + beta[ja1, :] = ca[oja, :] + else: + beta = np.zeros([nvars, lmu], dtype = np.float64) + df = np.zeros([1, lmu], dtype = np.float64) + + fit = dict() + fit['a0'] = a0 + fit['beta'] = beta + fit['dev'] = dev + fit['nulldev'] = dev0_r.value + fit['df'] = df + fit['lambdau'] = alm + fit['npasses'] = nlp_r.value + fit['jerr'] = jerr_r.value + fit['dim'] = dd + fit['offset'] = is_offset + fit['class'] = 'fishnet' + + + # ################################### + # return to caller + # ################################### + + return fit +#----------------------------------------- +# end of method lognet +#----------------------------------------- + diff --git a/build/lib/glmnet_python/glmnet.py b/build/lib/glmnet_python/glmnet.py new file mode 100644 index 0000000..a293ab2 --- /dev/null +++ b/build/lib/glmnet_python/glmnet.py @@ -0,0 +1,484 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- +glmnet.py: + Fit a GLM with lasso or elastic-net regularization. + glmnet.py provides a wrapper to the glmnet fortran routines. All + variables in the arguments are keyword-only. (see examples below). +-------------------------------------------------------------------------- + +DESCRIPTION: +----------- + Fit a generalized linear model via penalized maximum likelihood. The + regularization path is computed for the lasso or elasticnet penalty + at a grid of values for the regularization parameter lambda. Can deal + with all shapes of data, including very large sparse data matrices. + Fits linear, logistic and multinomial, Poisson, and Cox regression + models. + +EXTERNAL FUNCTIONS: +------------------ + options = glmnetSet() # provided with this (glmnet python) package + +INPUT ARGUMENTS: +--------------- + x Input np 2D array of nobs x nvars (required). Each row is an + observation vector. Can be in sparse matrix format. Must be in + np csc_matrix format + + y Response variable (np 2D array of size nobs x 1, nobs x nc, etc). (required) + For family = 'gaussian', Quantitative column vector + For family = 'poisson' (non-negative counts), Quantitative column vector + For family = 'binomial', should be either a column vector with two + levels or a two column matrix of counts of proportions. + For family = 'multinomial', can be a column vector of nc >= 2 levels + or a matrix with nc columns of counts or proportions. + For family = 'cox', y should be a two-column array with the first column + for time and the second for status. The latter is a binary variable, + with 1 indicating death and 0 indicating right censored. + For family = 'mgaussian', y is an array of quantitative responses. + (see examples for illustrations) + + family Response type. Default is 'gaussian'. (optional) + Currently, 'gaussian', 'poisson', 'binomial', 'multinomial', 'mgaussian' + and 'cox' are supported + + options optional parameters that can be set and altered by glmnetSet() + Default values for some often used parameters: + alpha = 1.0 (elastic-net mixing parameter) + nlambda = 100 (number of lambda values) + lambdau depends on data, nlambda and lambda_min (user supplied lambda sequence) + standardize = True (variable standardization) + weights = all ones np vector (observation weights) + For more details see help for glmnetSet + +OUTPUT ARGUMENTS: +---------------- +fit glmnet(...) outputs a dict() of fit parameters with the following keys: + +a0 Intercept sequence of length len(fit['lambdau']) + +beta For 'elnet' and 'lognet' models, nvars x len(lambdau) array of coefficients + For 'multnet', a list of nc such matrices, one for each class + +lambdau The actual sequence of lambdau values used + +dev The fraction of (null) deviance explained (for 'elnet', this is the R-squared) + +nulldev Null deviance (per observation) + +df The number of nonzero coefficients for each value of lambdau. + For 'multnet', this is the number of variables with a nonezero + coefficient for any class + +dfmat For 'multnet' only: A 2D array consisting of the number of nonzero + coefficients per class + +dim Dimension of coefficient matrix (ices) + +npasses Total passes over the data summed over all lambdau values + +offset A logical variable indicating whether an offset was included in the model + +jerr Error flag, for warnings and errors (largely for internal debugging) + +class Type of regression - internal usage + +EXAMPLES: +-------- + # Gaussian + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) + fit = glmnet(x = x, y = y) + fit = glmnet(x = x, y = y, alpha = 0.5) + glmnetPrint(fit) + glmnetPredict(fit, np.empty([0]), np.array([0.01]), 'coef') # extract coefficients at a single value of lambdau + glmnetPredict(fit, x[0:10,:], np.array([0.01, 0.005])) # make predictions + + # Multivariate Gaussian: + x = np.random.rand(100, 10) + y = np.random.rand(100,3) + fit = glmnet(x, y, 'mgaussian') + glmnetPlot(fit, 'norm', False, '2norm') + + # Binomial + x = np.random.rand(100, 10) + y = np.random.rand(100,1) + y = (y > 0.5)*1.0 + fit = glmnet(x = x, y = y, family = 'binomial', alpha = 0.5) + + # Multinomial + x = np.random.rand(100,10) + y = np.random.rand(100,1) + y[y < 0.3] = 1.0 + y[y < 0.6] = 2.0 + y[y < 1.0] = 3.0 + fit = glmnet(x = x, y = y, family = 'multinomial', mtype = 'grouped') + + # poisson + x = np.random.rand(100,10) + y = np.random.poisson(size = [100, 1])*1.0 + fit = glmnet(x = x, y = y, family = 'poisson') + + # cox + N = 1000; p = 30; + nzc = p/3; + x = np.random.normal(size = [N, p]) + beta = np.random.normal(size = [nzc, 1]) + fx = np.dot(x[:, 0:nzc], beta/3) + hx = np.exp(fx) + ty = np.random.exponential(scale = 1/hx, size = [N, 1]) + tcens = np.random.binomial(1, 0.3, size = [N, 1]) + tcens = 1 - tcens + y = np.column_stack((ty, tcens)) + fit = glmnet(x = x.copy(), y = y.copy(), family = 'cox') + glmnetPlot(fit) + + # sparse example + N = 1000000; + x = np.random.normal(size = [N,10]) + x[x < 3.0] = 0.0 + xs = scipy.sparse.csc_matrix(x, dtype = np.float64) + y = np.random.binomial(1, 0.5, size =[N,1]) + y = y*1.0 + st = time.time() + fit = glmnet.glmnet(x = xs, y = y, family = 'binomial') + en = time.time() + print("time elapsed (sparse) = ", en - st) + print("nbytes = ", xs.data.nbytes) + # non-sparse (same as sparse case) + st = time.time() + fit = glmnet.glmnet(x = x, y = y, family = 'binomial') + en = time.time() + print("time elapsed (full) = ", en - st) + print("nbytes = ", x.data.nbytes) + +DETAILS: +------- + The sequence of models implied by lambda is fit by coordinate descent. + For family='gaussian' this is the lasso sequence if alpha=1, else it + is the elasticnet sequence. For the other families, this is a lasso or + elasticnet regularization path for fitting the generalized linear + regression paths, by maximizing the appropriate penalized + log-likelihood (partial likelihood for the 'cox' model). Sometimes the + sequence is truncated before nlambda values of lambda have been used, + because of instabilities in the inverse link functions near a + saturated fit. glmnet(...,family='binomial') fits a traditional + logistic regression model for the log-odds. + glmnet(...,family='multinomial') fits a symmetric multinomial model, + where each class is represented by a linear model (on the log-scale). + The penalties take care of redundancies. A two-class 'multinomial' + model will produce the same fit as the corresponding 'binomial' model, + except the pair of coefficient matrices will be equal in magnitude and + opposite in sign, and half the 'binomial' values. Note that the + objective function for 'gaussian' is + + 1/2 RSS / nobs + lambda * penalty, + + and for the logistic models it is + + -loglik / nobs + lambda * penalty. + + Note also that for 'gaussian', glmnet standardizes y to have unit + variance before computing its lambda sequence (and then unstandardizes + the resulting coefficients); if you wish to reproduce/compare results + with other software, best to supply a standardized y. The latest two + features in glmnet are the family='mgaussian' family and the + mtype='grouped' in options for multinomial fitting. The former + allows a multi-response gaussian model to be fit, using a "group + -lasso" penalty on the coefficients for each variable. Tying the + responses together like this is called "multi-task" learning in some + domains. The grouped multinomial allows the same penalty for the + family='multinomial' model, which is also multi-responsed. For both of + these the penalty on the coefficient vector for variable j is + + (1-alpha)/2 * ||beta_j||_2^2 + alpha * ||beta_j||_2 + + When alpha=1 this is a group-lasso penalty, and otherwise it mixes + with quadratic just like elasticnet. + +LICENSE: +------- + GPL-2 + +AUTHORS: +------- + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + +REFERENCES: +---------- + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + +SEE ALSO: +-------- + glmnetPrint, glmnetPlot, glmnetCoef, glmnetPredict, + glmnetSet, glmnetControl and cvglmnet. + +""" + +# import packages/methods +from glmnetSet import glmnetSet +from glmnetControl import glmnetControl +import numpy as np +import scipy.sparse +from elnet import elnet +from lognet import lognet +from coxnet import coxnet +from mrelnet import mrelnet +from fishnet import fishnet + +def glmnet(*, x, y, family='gaussian', **options): + + # check inputs: make sure x and y are np, float64 arrays + # fortran order is not checked as we force a convert later + if not( isinstance(x, scipy.sparse.csc.csc_matrix) ): + if not( isinstance(x, np.ndarray) and x.dtype == 'float64'): + raise ValueError('x input must be a np float64 ndarray') + else: + if not (x.dtype == 'float64'): + raise ValueError('x input must be a float64 array') + + if not( isinstance(y, np.ndarray) and y.dtype == 'float64'): + raise ValueError('y input must be a np float64 ndarray') + + # create options + if options is None: + options = glmnetSet(); + + ## match the family, abbreviation allowed + fambase = ['gaussian','binomial','poisson','multinomial','cox','mgaussian']; + # find index of family in fambase + indxtf = [x.startswith(family.lower()) for x in fambase] # find index of family in fambase + famind = [i for i in range(len(indxtf)) if indxtf[i] == True] + if len(famind) == 0: + raise ValueError('Family should be one of ''gaussian'', ''binomial'', ''poisson'', ''multinomial'', ''cox'', ''mgaussian''') + elif len(famind) > 1: + raise ValueError('Family could not be uniquely determined : Use a longer description of the family string.') + else: + family = fambase[famind[0]] + + ## prepare options + options = glmnetSet(options) + #print('glmnet.py options:') + #print(options) + + ## error check options parameters + alpha = np.float64(options['alpha']) + if alpha > 1.0 : + print('Warning: alpha > 1.0; setting to 1.0') + options['alpha'] = np.float64(1.0) + + if alpha < 0.0 : + print('Warning: alpha < 0.0; setting to 0.0') + options['alpha'] = np.float64(0.0) + + parm = np.float64(options['alpha']) + nlam = np.int32(options['nlambda']) + nobs, nvars = x.shape + + # check weights length + weights = options['weights'].copy() + if len(weights) == 0: + weights = np.ones([nobs, 1], dtype = np.float64) + elif len(weights) != nobs: + raise ValueError('Error: Number of elements in ''weights'' not equal to number of rows of ''x''') + # check if weights are np nd array + if not( isinstance(weights, np.ndarray) and weights.dtype == 'float64'): + raise ValueError('weights input must be a np float64 ndarray') + + # check y length + nrowy = y.shape[0] + if nrowy != nobs: + raise ValueError('Error: Number of elements in ''y'' not equal to number of rows of ''x''') + + # check ne + ne = options['dfmax'] + if len(ne) == 0: + ne = nvars + 1 + + # check nx + nx = options['pmax'] + if len(nx) == 0: + nx = min(ne*2 + 20, nvars) + + # check jd + exclude = options['exclude'] + # TBD: test this + if not (len(exclude) == 0): + exclude = np.unique(exclude) + if np.any(exclude < 0) or np.any(exclude >= nvars): + raise ValueError('Error: Some excluded variables are out of range') + else: + jd = np.append(len(exclude), exclude + 1) # indices are 1-based in fortran + else: + jd = np.zeros([1,1], dtype = np.integer) + + # check vp + vp = options['penalty_factor'] + if len(vp) == 0: + vp = np.ones([1, nvars]) + + # inparms + inparms = glmnetControl() + + # cl + cl = options['cl'] + if any(cl[0,:] > 0): + raise ValueError('Error: The lower bound on cl must be non-positive') + + if any(cl[1,:] < 0): + raise ValueError('Error: The lower bound on cl must be non-negative') + + cl[0, cl[0, :] == np.float64('-inf')] = -1.0*inparms['big'] + cl[1, cl[1, :] == np.float64('inf')] = 1.0*inparms['big'] + + if cl.shape[1] < nvars: + if cl.shape[1] == 1: + cl = cl*np.ones([1, nvars]) + else: + raise ValueError('Error: Require length 1 or nvars lower and upper limits') + else: + cl = cl[:, 0:nvars] + + + exit_rec = 0 + if np.any(cl == 0.0): + fdev = inparms['fdev'] + if fdev != 0: + optset = dict() + optset['fdev'] = 0 + glmnetControl(optset) + exit_rec = 1 + + isd = np.int32(options['standardize']) + intr = np.int32(options['intr']) + if (intr == True) and (family == 'cox'): + print('Warning: Cox model has no intercept!') + + jsd = np.int32(options['standardize_resp']) + thresh = options['thresh'] + lambdau = options['lambdau'] + lambda_min = options['lambda_min'] + + if len(lambda_min) == 0: + if nobs < nvars: + lambda_min = 0.01 + else: + lambda_min = 1e-4 + + lempty = (len(lambdau) == 0) + if lempty: + if (lambda_min >= 1): + raise ValueError('ERROR: lambda_min should be less than 1') + flmin = lambda_min + ulam = np.zeros([1,1], dtype = np.float64) + else: + flmin = 1.0 + if any(lambdau < 0): + raise ValueError('ERROR: lambdas should be non-negative') + + ulam = -np.sort(-lambdau) # reverse sort + nlam = lambdau.size + + maxit = np.int32(options['maxit']) + gtype = options['gtype'] + if len(gtype) == 0: + if (nvars < 500): + gtype = 'covariance' + else: + gtype = 'naive' + + # ltype + ltype = options['ltype'] + ltypelist = ['newton', 'modified.newton'] + indxtf = [x.startswith(ltype.lower()) for x in ltypelist] + indl = [i for i in range(len(indxtf)) if indxtf[i] == True] + if len(indl) != 1: + raise ValueError('ERROR: ltype should be one of ''Newton'' or ''modified.Newton''') + else: + kopt = indl[0] + + if family == 'multinomial': + mtype = options['mtype'] + mtypelist = ['ungrouped', 'grouped'] + indxtf = [x.startswith(mtype.lower()) for x in mtypelist] + indm = [i for i in range(len(indxtf)) if indxtf[i] == True] + if len(indm) == 0: + raise ValueError('Error: mtype should be one of ''ungrouped'' or ''grouped''') + elif (indm == 2): + kopt = 2 + # + offset = options['offset'] + # sparse (if is_sparse, convert to compressed sparse row format) + is_sparse = False + if scipy.sparse.issparse(x): + is_sparse = True + tx = scipy.sparse.csc_matrix(x, dtype = np.float64) + x = tx.data; x = x.reshape([len(x), 1]) + irs = tx.indices + 1 + pcs = tx.indptr + 1 + irs = np.reshape(irs, [len(irs),]) + pcs = np.reshape(pcs, [len(pcs),]) + else: + irs = np.empty([0]) + pcs = np.empty([0]) + + if scipy.sparse.issparse(y): + y = y.todense() + + ## finally call the appropriate fit code + if family == 'gaussian': + # call elnet + fit = elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, + lempty, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, intr, maxit, family) + elif (family == 'binomial') or (family == 'multinomial'): + # call lognet + fit = lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, intr, maxit, kopt, family) + elif family == 'cox': + # call coxnet + fit = coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, maxit, family) + elif family == 'mgaussian': + # call mrelnet + fit = mrelnet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, jsd, intr, maxit, family) + elif family == 'poisson': + # call fishnet + fit = fishnet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, intr, maxit, family); + else: + raise ValueError('calling a family of fits that has not been implemented yet') + + if exit_rec == 1: + optset['fdev'] = fdev + #TODO: Call glmnetControl(optset) to set persistent parameters + + # return fit + return fit + +#----------------------------------------- +# end of method glmnet +#----------------------------------------- diff --git a/build/lib/glmnet_python/glmnetCoef.py b/build/lib/glmnet_python/glmnetCoef.py new file mode 100644 index 0000000..9fa0907 --- /dev/null +++ b/build/lib/glmnet_python/glmnetCoef.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + glmnetCoef computes coefficients from a "glmnet" object. +-------------------------------------------------------------------------- + + DESCRIPTION: + This function extracts coefficients at certain lambdas if they are + in the lambda sequence of a "glmnet" object or make predictions + if they are not in that sequence. + + USAGE: + glmnetCoef(object, s, exact) + + Fewer input arguments (more often) are allowed in the call, but must + come in the order listed above. To set default values on the way, use + np.empty([0]). + For example, ncoef = glmnetCoef(fit,np.empty([0]),False). + + INPUT ARGUMENTS: + obj Fitted "glmnet" model object. + s Value(s) of the penalty parameter lambda at which computation + is required. Default is the entire sequence used to create + the model. + exact If exact = False (default), then the function uses + linear interpolation to make predictions for values of s + that do not coincide with those used in the fitting + algorithm. Note that exact = True is not implemented. + + OUTPUT ARGUMENTS: + result A (nvars+1) x length(s) np 2D array with each column being the + coefficients at an s. Note that the first row are the + intercepts (0 if no intercept in the original model). + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + glmnet, glmnetPrint, glmnetPredict, and cvglmnet. + + EXAMPLES: + x = np.random.rand(100,20); + y = np.random.rand(100,1); + fit = glmnet(x = x.copy(),y = y.copy()); + ncoef = glmnetCoef(fit,np.array([0.01, 0.001])); + +""" + +import numpy as np +from glmnetPredict import glmnetPredict + +def glmnetCoef(obj, s = None, exact = False): + + if s is None: + s = obj['lambdau'] + + if exact and len(s) > 0: + raise NotImplementedError('exact = True not implemented in glmnetCoef') + + result = glmnetPredict(obj, np.empty([0]), s, 'coefficients') + + return(result) + + \ No newline at end of file diff --git a/build/lib/glmnet_python/glmnetControl.py b/build/lib/glmnet_python/glmnetControl.py new file mode 100644 index 0000000..f9681a3 --- /dev/null +++ b/build/lib/glmnet_python/glmnetControl.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +""" + DESCRIPTION: + View and/or change the factory default parameters in glmnet. Currently, only + fdev and big are used in the glmnet libraries. + + USAGE: + glmnetControl; (with no input or output arguments) + displays all inner parameters and their current values. + glmnetControl(pars); + sets the internal parameters that appear in the fields of pars to the + new values. + + ARGUMENTS: + pars is a structure with the following fields. + fdev minimum fractional change in deviance for stopping path; + factory default = 1.0e-5. + devmax maximum fraction of explained deviance for stopping path; + factory default = 0.999. + eps minimum value of lambda.min.ratio (see glmnet); factory + default= 1.0e-6. + big large floating point number; factory default = 9.9e35. Inf in + definition of upper.limit is set to big. + mnlam minimum number of path points (lambda values) allowed; + factory default = 5. + pmin minimum null probability for any class. factory default = + 1.0e-5. + exmx maximum allowed exponent. factory default = 250.0. + prec convergence threshold for multi response bounds adjustment + solution. factory default = 1.0e-10. + mxit maximum iterations for multiresponse bounds adjustment + solution. factory default = 100. + factory If true, reset all the parameters to the factory default; + default is false. + + DETAILS: + If called with no arguments, glmnetControl() returns a structure with + the current settings of these parameters. Any arguments included in the + fields of the input structure sets those parameters to the new values, + and then silently returns. The values set are persistent for the + duration of the Matlab session. + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + glmnet. +""" + +def glmnetControl(pars = None): + import numpy as np + + # default options + ivals = dict(); + ivals["fdev"] = np.float64(1e-5) + ivals["devmax"] = np.float64(0.999) + ivals["eps"] = np.float64(1e-6) + ivals["big"] = np.float64(9.9e35) + ivals["mnlam"] = np.float64(5) + ivals["pmin"] = np.float64(1e-5) + ivals["exmx"] = np.float64(250) + ivals["prec"] = np.float64(1e-10) + ivals["mxit"] = np.float64(100) + + # quick return if no user opts + if pars == None: + return ivals + + # if options are passed in by user, update options with values from opts + parsInIvals = set(pars.keys()) - set(ivals.keys()); + if len(parsInIvals) > 0: # assert 'opts' keys are subsets of 'options' keys + raise ValueError('attempting to set glmnet controls that are not known to glmnetControl') + else: + ivals = merge_dicts(ivals, pars) + + return ivals + +def merge_dicts(*dict_args): + """ + Given any number of dicts, shallow copy and merge into a new dict, + precedence goes to key value pairs in latter dicts. + """ + result = {} + for dictionary in dict_args: + result.update(dictionary) + return result + +# end of glmnetControl() + + + diff --git a/build/lib/glmnet_python/glmnetPlot.py b/build/lib/glmnet_python/glmnetPlot.py new file mode 100644 index 0000000..bbaad9e --- /dev/null +++ b/build/lib/glmnet_python/glmnetPlot.py @@ -0,0 +1,241 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + glmnetPlot.m: plot coefficients from a "glmnet" object +-------------------------------------------------------------------------- + + DESCRIPTION: + Produces a coefficient profile plot fo the coefficient paths for a + fitted "glmnet" object. + + USAGE: + glmnetPlot(fit); + glmnetPlot(fit, xvar = 'norm'); + glmnetPlot(fit, xvar = 'norm', label = False); + glmnetPlot(fit, xvar = 'norm', label = False, ptype = 'coef'); + glmnetPlot(fit, xvar = 'norm', label = False, ptype = 'coef', ...); + + INPUT ARGUMENTS: + x fitted "glmnet" model. + xvar What is on the X-axis. 'norm' plots against the L1-norm of + the coefficients, 'lambda' against the log-lambda sequence, + and 'dev' against the percent deviance explained. + label If true, label the curves with variable sequence numbers. + type If type='2norm' then a single curve per variable, else + if type='coef', a coefficient plot per response. + varargin Other graphical parameters to plot. + + DETAILS: + A coefficient profile plot is produced. If x is a multinomial model, a + coefficient plot is produced for each class. + + LICENSE: + GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) is written by Balakumar B.J., + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + glmnet, glmnetSet, glmnetPrint, glmnetPredict and glmnetCoef. + + EXAMPLES: + import matplotlib.pyplot as plt + np.random.seed(1) + x=np.random.normal(size = (100,20)) + y=np.random.normal(size = (100,1)) + g4=np.random.choice(4,size = (100,1))*1.0 + fit1=glmnet(x = x.copy(),y = y.copy()) + glmnetPlot(fit1) + plt.figure() + glmnetPlot(fit1, 'lambda', True); + fit3=glmnet(x = x.copy(),y = g4.copy(), family = 'multinomial') + plt.figure() + glmnetPlot(fit3) +""" +import numpy as np + + +def glmnetPlot(x, xvar = 'norm', label = False, ptype = 'coef', **options): + import matplotlib.pyplot as plt + + # process inputs + xvar = getFromList(xvar, ['norm', 'lambda', 'dev'], 'xvar should be one of ''norm'', ''lambda'', ''dev'' ') + ptype = getFromList(ptype, ['coef', '2norm'], 'ptype should be one of ''coef'', ''2norm'' ') + + if x['class'] in ['elnet', 'lognet', 'coxnet', 'fishnet']: + handle = plotCoef(x['beta'], [], x['lambdau'], x['df'], x['dev'], + label, xvar, '', 'Coefficients', **options) + + elif x['class'] in ['multnet', 'mrelnet']: + beta = x['beta'] + if xvar == 'norm': + norm = 0 + nzbeta = beta + for i in range(len(beta)): + which = nonzeroCoef(beta[i]) + nzbeta[i] = beta[i][which, :] + norm = norm + np.sum(np.absolute(nzbeta[i]), axis = 0) + else: + norm = 0 + + if ptype == 'coef': + ncl = x['dfmat'].shape[0] + if x['class'] == 'multnet': + for i in range(ncl): + mstr = 'Coefficients: Class %d' % (i) + handle = plotCoef(beta[i], norm, x['lambdau'], x['dfmat'][i,:], + x['dev'], label, xvar, '', mstr, **options) + if i < ncl - 1: + plt.figure() + else: + for i in range(ncl): + mstr = 'Coefficients: Response %d' % (i) + handle = plotCoef(beta[i], norm, x['lambdau'], x['dfmat'][i,:], + x['dev'], label, xvar, '', mstr, **options) + if i < ncl - 1: + plt.figure() + else: + dfseq = np.round_(np.mean(x['dfmat'], axis = 0)) + coefnorm = beta[1]*0 + for i in range(len(beta)): + coefnorm = coefnorm + np.absolute(beta[i])**2 + coefnorm = np.sqrt(coefnorm) + if x['class'] == 'multnet': + mstr = 'Coefficient 2Norms' + handle = plotCoef(coefnorm, norm, x['lambdau'], dfseq, x['dev'], + label, xvar, '',mstr, **options); + else: + mstr = 'Coefficient 2Norms' + handle = plotCoef(coefnorm, norm, x['lambdau'], x['dfmat'][0,:], x['dev'], + label, xvar, '', mstr, **options); + + return(handle) +# end of glmnetplot +# ========================================= +# +# ========================================= +# helper functions +# ========================================= +def getFromList(xvar, xvarbase, errMsg): + indxtf = [x.startswith(xvar.lower()) for x in xvarbase] # find index + xvarind = [i for i in range(len(indxtf)) if indxtf[i] == True] + if len(xvarind) == 0: + raise ValueError(errMsg) + else: + xvar = xvarbase[xvarind[0]] + return xvar +# end of getFromList() +# ========================================= +def nonzeroCoef(beta, bystep = False): + result = np.absolute(beta) > 0 + if len(result.shape) == 1: + result = np.reshape(result, [result.shape[0], 1]) + if not bystep: + result = np.any(result, axis = 1) + + return(result) +# end of nonzeroCoef() +# ========================================= +def plotCoef(beta, norm, lambdau, df, dev, label, xvar, xlab, ylab, **options): + import matplotlib.pyplot as plt + + which = nonzeroCoef(beta) + idwhich = [i for i in range(len(which)) if which[i] == True] + nwhich = len(idwhich) + if nwhich == 0: + raise ValueError('No plot produced since all coefficients are zero') + elif nwhich == 1: + raise ValueError('1 or less nonzero coefficients; glmnet plot is not meaningful') + + beta = beta[which, :] + if xvar == 'norm': + if len(norm) == 0: + index = np.sum(np.absolute(beta), axis = 0) + else: + index = norm + iname = 'L1 Norm' + elif xvar == 'lambda': + index = np.log(lambdau) + iname = 'Log Lambda' + elif xvar == 'dev': + index = dev + iname = 'Fraction Deviance Explained' + + if len(xlab) == 0: + xlab = iname + + # draw the figures + #fig, ax1 = plt.subplots() + fig = plt.gcf() + ax1 = plt.gca() + + # plot x vs y + beta = np.transpose(beta) + ax1.plot(index, beta, **options) + + ax2 = ax1.twiny() + ax2.xaxis.tick_top() + + xlim1 = ax1.get_xlim() + ylim1 = ax1.get_ylim() + + atdf = ax1.get_xticks() + indat = np.ones(atdf.shape, dtype = np.integer) + if index[-1] >= index[1]: + for j in range(len(index)-1, -1, -1): + indat[atdf <= index[j]] = j + else: + for j in range(len(index)): + indat[atdf <= index[j]] = j + prettydf = df[indat] + prettydf[-1] = df[-1] + + ax2.set(XLim=[min(index), max(index)], XTicks = atdf, XTickLabels = prettydf) + ax2.grid() + ax1.yaxis.grid() + + ax2.set_xlabel('Degrees of Freedom') + ax1.set_xlabel(xlab) + ax1.set_ylabel(ylab) + + # put the labels + if label: + xpos = max(index) + adjpos = 1 + if xvar == 'lambda': + xpos = min(index) + adjpos = 0 + bsize = beta.shape + for i in range(beta.shape[1]): + str = '%d' % idwhich[i] + ax1.text(1/2*xpos + 1/2*xlim1[adjpos], beta[bsize[0]-1, i], str) + + plt.show() + + handle = dict() + handle['fig'] = fig + handle['ax1'] = ax1 + handle['ax2'] = ax2 + return(handle) + +# end of plotCoef +# ========================================= diff --git a/build/lib/glmnet_python/glmnetPredict.py b/build/lib/glmnet_python/glmnetPredict.py new file mode 100644 index 0000000..e5ee403 --- /dev/null +++ b/build/lib/glmnet_python/glmnetPredict.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +""" +-------------------------------------------------------------------------- + glmnetPredict.m: make predictions from a "glmnet" object. +-------------------------------------------------------------------------- + + DESCRIPTION: + Similar to other predict methods, this functions predicts fitted + values, logits, coefficients and more from a fitted "glmnet" object. + + USAGE: + glmnetPredict(object, newx, s, type, exact, offset) + + Fewer input arguments(more often) are allowed in the call, but must + come in the order listed above. To set default values on the way, use + empty matrix []. + For example, pred=glmnetPredict(fit,[],[],'coefficients'). + + To make EXACT prediction, the input arguments originally passed to + "glmnet" MUST be VARIABLES (instead of expressions, or fields + extracted from some struct objects). Alternatively, users should + manually revise the "call" field in "object" (expressions or variable + names) to match the original call to glmnet in the parent environment. + + INPUT ARGUMENTS: + object Fitted "glmnet" model object. + s Value(s) of the penalty parameter lambda at which predictions + are required. Default is the entire sequence used to create + the model. + newx scipy 2D array of new values for x at which predictions are to be + made. Must be a 2D array; can be sparse. This argument is not + used for type='coefficients' or type='nonzero'. + ptype Type of prediction required. Type 'link' gives the linear + predictors for 'binomial', 'multinomial', 'poisson' or 'cox' + models; for 'gaussian' models it gives the fitted values. + Type 'response' gives the fitted probabilities for 'binomial' + or 'multinomial', fitted mean for 'poisson' and the fitted + relative-risk for 'cox'; for 'gaussian' type 'response' is + equivalent to type 'link'. Type 'coefficients' computes the + coefficients at the requested values for s. Note that for + 'binomial' models, results are returned only for the class + corresponding to the second level of the factor response. + Type 'class' applies only to 'binomial' or 'multinomial' + models, and produces the class label corresponding to the + maximum probability. Type 'nonzero' returns a matrix of + logical values with each column for each value of s, + indicating if the corresponding coefficient is nonzero or not. + exact If exact=false (default), then the predict function + uses linear interpolation to make predictions for values of s + that do not coincide with those used in the fitting + algorithm. exact = True is not implemented. + offset If an offset is used in the fit, then one must be supplied + for making predictions (except for type='coefficients' or + type='nonzero') + + DETAILS: + The shape of the objects returned are different for "multinomial" + objects. glmnetCoef(fit, ...) is equivalent to + glmnetPredict(fit,np.empty([]),np.empty([]),'coefficients"). + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang (14 Jul 2009), + and was updated and maintained by Junyang Qian (30 Aug 2013) junyangq@stanford.edu, + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + Journal of Statistical Software, Vol. 39(5) 1-13 + + Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + Stanford Statistics Technical Report + + SEE ALSO: + glmnet, glmnetPrint, glmnetCoef, and cvglmnet. + +EXAMPLES: + + x = np.random.normal(size = [100,20]) + y = np.random.normal(size = [100,1]) + g2 = np.random.choice(2, size = [100, 1])*1.0 # must be float64 + g4 = np.random.choice(4, size = [100, 1])*1.0 # must be float64 + + fit1 = glmnet(x = x.copy(),y = y.copy()); + print( glmnetPredict(fit1,x[0:5,:],np.array([0.01,0.005])) ) + print( glmnetPredict(fit1, np.empty([0]), np.empty([0]), 'coefficients') ) + + fit2 = glmnet(x = x.copy(), y = g2.copy(), family = 'binomial'); + print(glmnetPredict(fit2, x[2:5,:],np.empty([0]), 'response')) + print(glmnetPredict(fit2, np.empty([0]), np.empty([0]), 'nonzero')) + + fit3 = glmnet(x = x.copy(), y = g4.copy(), family = 'multinomial'); + print(glmnetPredict(fit3, x[0:3,:], np.array([0.01, 0.5]), 'response')) + +""" +import numpy as np +import scipy.sparse +import scipy.interpolate + +def glmnetPredict(fit,\ + newx = np.empty([0]), \ + s = np.empty([0]), \ + ptype = 'link', \ + exact = False, \ + offset = np.empty([0])): + + typebase = ['link', 'response', 'coefficients', 'nonzero', 'class'] + indxtf = [x.startswith(ptype.lower()) for x in typebase] + indl = [i for i in range(len(indxtf)) if indxtf[i] == True] + ptype = typebase[indl[0]] + + if newx.shape[0] == 0 and ptype != 'coefficients' and ptype != 'nonzero': + raise ValueError('You need to supply a value for ''newx''') + + # python 1D arrays are not the same as matlab 1xn arrays + # check for this. newx = x[0:1, :] is a python 2D array and would work; + # but newx = x[0, :] is a python 1D array and should not be passed into + # glmnetPredict + if len(newx.shape) == 1 and newx.shape[0] > 0: + raise ValueError('newx must be a 2D (not a 1D) python array') + + if exact == True and len(s) > 0: + # It is very messy to go back into the caller namespace + # and call glmnet again. The user should really do this at their end + # by calling glmnet again using the correct array of lambda values that + # includes the lambda for which prediction is sought + raise NotImplementedError('exact = True option is not implemented in python') + + # we convert newx to full here since sparse and full operations do not seem to + # be overloaded completely in np. + if scipy.sparse.issparse(newx): + newx = newx.todense() + + # elnet + if fit['class'] in ['elnet', 'fishnet', 'lognet']: + if fit['class'] == 'lognet': + a0 = fit['a0'] + else: + a0 = np.transpose(fit['a0']) + + a0 = np.reshape(a0, [1, a0.size]) # convert to 1 x N for appending + nbeta = np.row_stack( (a0, fit['beta']) ) + if np.size(s) > 0: + lambdau = fit['lambdau'] + lamlist = lambda_interp(lambdau, s) + nbeta = nbeta[:, lamlist['left']]*np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1]) \ + + nbeta[:, lamlist['right']]*( 1 - np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1])) + + if ptype == 'coefficients': + result = nbeta + return(result) + + if ptype == 'nonzero': + result = nonzeroCoef(nbeta[1:nbeta.shape[0], :], True) + return(result) + # use scipy.sparse.hstack instead of column_stack for sparse matrices + result = np.dot(np.column_stack( (np.ones([newx.shape[0], 1]) , newx) ) , nbeta) + + if fit['offset']: + if len(offset) == 0: + raise ValueError('No offset provided for prediction, yet used in fit of glmnet') + if offset.shape[1] == 2: + offset = offset[:, 1] + + result = result + np.tile(offset, [1, result.shape[1]]) + + # fishnet + if fit['class'] == 'fishnet' and ptype == 'response': + result = np.exp(result) + + # lognet + if fit['class'] == 'lognet': + if ptype == 'response': + pp = np.exp(-result) + result = 1/(1 + pp) + elif ptype == 'class': + result = (result > 0)*1 + (result <= 0)*0 + result = fit['label'][result] + + # multnet / mrelnet + if fit['class'] == 'mrelnet' or fit['class'] == 'multnet': + if fit['class'] == 'mrelnet': + if type == 'response': + ptype = 'link' + fit['grouped'] = True + + a0 = fit['a0'] + nbeta = fit['beta'].copy() + nclass = a0.shape[0] + nlambda = s.size + + if len(s) > 0: + lambdau = fit['lambdau'] + lamlist = lambda_interp(lambdau, s) + for i in range(nclass): + kbeta = np.row_stack( (a0[i, :], nbeta[i]) ) + kbeta = kbeta[:, lamlist['left']]*np.tile(np.transpose(lamlist['frac']), [kbeta.shape[0], 1]) \ + + kbeta[:, lamlist['right']]*( 1 - np.tile(np.transpose(lamlist['frac']), [kbeta.shape[0], 1])) + nbeta[i] = kbeta + else: + for i in range(nclass): + nbeta[i] = np.row_stack( (a0[i, :], nbeta[i]) ) + nlambda = len(fit['lambdau']) + + if ptype == 'coefficients': + result = nbeta + return(result) + + if ptype == 'nonzero': + if fit['grouped']: + result = list() + tn = nbeta[0].shape[0] + result.append(nonzeroCoef(nbeta[0][1:tn, :], True)) + else: + result = list() + for i in range(nclass): + tn = nbeta[0].shape[0] + result.append(nonzeroCoef(nbeta[0][1:tn, :], True)) + return(result) + + npred = newx.shape[0] + dp = np.zeros([nclass, nlambda, npred], dtype = np.float64) + for i in range(nclass): + qq = np.column_stack( (np.ones([newx.shape[0], 1]), newx) ) + fitk = np.dot( qq, nbeta[i] ) + dp[i, :, :] = dp[i, :, :] + np.reshape(np.transpose(fitk), [1, nlambda, npred]) + + if fit['offset']: + if len(offset) == 0: + raise ValueError('No offset provided for prediction, yet used in fit of glmnet') + if offset.shape[1] != nclass: + raise ValueError('Offset should be dimension %d x %d' % (npred, nclass)) + toff = np.transpose(offset) + for i in range(nlambda): + dp[:, i, :] = dp[:, i, :] + toff + + if ptype == 'response': + pp = np.exp(dp) + psum = np.sum(pp, axis = 0, keepdims = True) + result = np.transpose(pp/np.tile(psum, [nclass, 1, 1]), [2, 0, 1]) + if ptype == 'link': + result = np.transpose(dp, [2, 0, 1]) + if ptype == 'class': + dp = np.transpose(dp, [2, 0, 1]) + result = list() + for i in range(dp.shape[2]): + t = softmax(dp[:, :, i]) + result = np.append(result, fit['label'][t['pclass']]) + + # coxnet + if fit['class'] == 'coxnet': + nbeta = fit['beta'] + if len(s) > 0: + lambdau = fit['lambdau'] + lamlist = lambda_interp(lambdau, s) + nbeta = nbeta[:, lamlist['left']]*np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1]) \ + + nbeta[:, lamlist['right']]*( 1 - np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1])) + + if ptype == 'coefficients': + result = nbeta + return(result) + + if ptype == 'nonzero': + result = nonzeroCoef(nbeta, True) + return(result) + + result = np.dot(newx, nbeta) + + if fit['offset']: + if len(offset) == 0: + raise ValueError('No offset provided for prediction, yet used in fit of glmnet') + + result = result + np.tile(offset, [1, result.shape[1]]) + + if ptype == 'response': + result = np.exp(result) + + return(result) + +# end of glmnetPredict +# ========================================= + + +# ========================================= +# helper functions +# ========================================= +def lambda_interp(lambdau, s): +# lambda is the index sequence that is produced by the model +# s is the new vector at which evaluations are required. +# the value is a vector of left and right indices, and a vector of fractions. +# the new values are interpolated bewteen the two using the fraction +# Note: lambda decreases. you take: +# sfrac*left+(1-sfrac*right) + if len(lambdau) == 1: + nums = len(s) + left = np.zeros([nums, 1], dtype = np.integer) + right = left + sfrac = np.zeros([nums, 1], dtype = np.float64) + else: + s[s > np.amax(lambdau)] = np.amax(lambdau) + s[s < np.amin(lambdau)] = np.amin(lambdau) + k = len(lambdau) + sfrac = (lambdau[0] - s)/(lambdau[0] - lambdau[k - 1]) + lambdau = (lambdau[0] - lambdau)/(lambdau[0] - lambdau[k - 1]) + coord = scipy.interpolate.interp1d(lambdau, range(k))(sfrac) + left = np.floor(coord).astype(np.integer, copy = False) + right = np.ceil(coord).astype(np.integer, copy = False) + # + tf = left != right + sfrac[tf] = (sfrac[tf] - lambdau[right[tf]])/(lambdau[left[tf]] - lambdau[right[tf]]) + sfrac[~tf] = 1.0 + #if left != right: + # sfrac = (sfrac - lambdau[right])/(lambdau[left] - lambdau[right]) + #else: + # sfrac[left == right] = 1.0 + + result = dict() + result['left'] = left + result['right'] = right + result['frac'] = sfrac + + return(result) +# end of lambda_interp +# ========================================= +def softmax(x, gap = False): + d = x.shape + maxdist = x[:, 0] + pclass = np.zeros([d[0], 1], dtype = np.integer) + for i in range(1, d[1], 1): + l = x[:, i] > maxdist + pclass[l] = i + maxdist[l] = x[l, i] + if gap == True: + x = np.absolute(maxdist - x) + x[0:d[0], pclass] = x*np.ones([d[1], d[1]]) + #gaps = pmin(x)# not sure what this means; gap is never called with True + raise ValueError('gap = True is not implemented yet') + + result = dict() + if gap == True: + result['pclass'] = pclass + #result['gaps'] = gaps + raise ValueError('gap = True is not implemented yet') + else: + result['pclass'] = pclass; + + return(result) +# end of softmax +# ========================================= +def nonzeroCoef(beta, bystep = False): + result = np.absolute(beta) > 0 + if not bystep: + result = np.any(result, axis = 1) + return(result) +# end of nonzeroCoef +# ========================================= + diff --git a/build/lib/glmnet_python/glmnetPrint.py b/build/lib/glmnet_python/glmnetPrint.py new file mode 100644 index 0000000..5e867dd --- /dev/null +++ b/build/lib/glmnet_python/glmnetPrint.py @@ -0,0 +1,59 @@ +# -*- coding: utf-8 -*- +""" + +-------------------------------------------------------------------------- + glmnetPrint.m: print a glmnet object +-------------------------------------------------------------------------- + + DESCRIPTION: + Print a summary of the glmnet path at each step along the path. + + USAGE: + glmnetPrint(fit) + + INPUT ARGUMENTS: + fit fitted glmnet object + + DETAILS: + Three-column matrix with columns Df, %Dev and Lambda is printed. The Df + column is the number of nonzero coefficients (Df is a reasonable name + only for lasso fits). %Dev is the percent deviance explained (relative + to the null deviance). + + LICENSE: GPL-2 + + AUTHORS: + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + + REFERENCES: + Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010 + + SEE ALSO: + glmnet, glmnetSet, glmnetPredict and glmnetCoef methods. + + EXAMPLES: + x = np.random.normal(size=[100,20]) + y = np.random.normal(size=[100,1]) + fit=glmnet(x = x,y = y); + glmnetPrint(fit); + +""" + +def glmnetPrint(fit): + + print('\t df \t %dev \t lambdau\n') + N = fit['lambdau'].size + for i in range(N): + line_p = '%d \t %f \t %f \t %f' % (i, fit['df'][i], fit['dev'][i], fit['lambdau'][i]) + print(line_p) + + diff --git a/build/lib/glmnet_python/glmnetSet.py b/build/lib/glmnet_python/glmnetSet.py new file mode 100644 index 0000000..ee026f1 --- /dev/null +++ b/build/lib/glmnet_python/glmnetSet.py @@ -0,0 +1,239 @@ +# -*- coding: utf-8 -*- +""" + +Sets parameters for glmnet. Returns a default dictionary of parameters +if nothing is passed in. The user is allowed to pass a partial dictionary +of parameters. Parameter values not in the user input are replaced by +default values. + +Note: The input 'opts' dictionary is expected to contain keys that are a subset +of the keys in the 'options' dictionary below. This check is enforced to make +sure that typos in the keynames do not modify behavior of code (as an example, a typo +that results in the passing of 'alpha' as 'alhpa' should not result in the +default value of 'alpha' getting passed on silently) + +INPUT ARGUMENTS: +----------------- + opts : dictionary of parameters + +OUTPUT ARGUMENTS: +---------------- + options : dictionary of parameters + +USAGE: +----- + # return default values as a dict() in options + options = glmnetSet() + # set default values for all parameters except + # for alpha, intr, maxit, offset parameters. Set + # given values for these parameters. + options = glmnetSet( alpha = 0.1, \ + intr = False, \ + maxit = np.int32(1e6), \ + offset = np.empty([0]) ) + # same as previous case, except we pass in a + # dict() object instead + opts = dict(); opts['alpha'] = 0.5; + options = glmnetSet(opts) + +.................................................................. +Parameter Default value + Description +.................................................................. +alpha + The elasticnet mixing parameter, with 0 < alpha <= 1. + The penalty is defined as + (1-alpha)/2(||beta||_2)^2+alpha||beta||_1. + Default is alpha = 1, which is the lasso penalty; + Currently alpha = 0 the ridge penalty. + +nlambda + The number of lambda values - default is + +lambdau + A user supplied lambda sequence. Typical usage is to + have the program compute its own lambda sequence + based on nlambda and lambda_min. Supplying a value of + lambda override this. WARNING: Use with care. Do not + supply a single value for lambda (for predictions + after CV use cvglmnetPredict() instead). Supply a + decreasing sequence of lambda values. glmnet relies + on its warm starts for speed, and it's often faster + to fit a whole path than compute a single fit. + +standardize + Logical flag for x variable standardization, prior to + fitting the model sequence. The coefficients are + always returned on the original scale. Default is + standardize = true. If variables are in the same + units already, you might not wish to standardize. See + details below for y standardization with + family='gaussian'. + +weights + Observation weights. Can be total counts if responses + are proportion matrices. Default is 1 for each + observation. + +intr + Should intercept(s) be fitted (default=true) or set + to zero (false). + +offset + A vector of length nobs that is included in the + linear predictor (a nobs x nc matrix for the + "multinomial" family). Useful for the "poisson" + family (e.g. log of exposure time), or for refining a + model by starting at a current fit. Default is []. If + supplied, then values must also be supplied to the + predict function. + +lambda_min + Smallest value for lambda, as a fraction of + lambda_max, the (data derived) entry value (i.e., the + smallest value for which all coefficients are zero). + The default depends on the sample size nobs relative + to the number of variables nvars. If nobs > nvars, + the default is 0.0001, close to zero. If nobs < + nvars, the defaults is 0.01. A very small value of + lambda_min will lead to a saturated fit. This is + undefined for "binomial" and "multinomial" models, + and glmnet will exit gracefully when the percentage + deviance explained is almost 1. + +thresh + Convergence threshold for coordinate descent. Each + inner coordinate-descent loop continues until the + maximum change in the objective after any coefficient + update is less than thresh times the null deviance. + Defaults value is 1E-4. + +dfmax + Limit the maximum number of variables in the model. + Useful for very large nvars, if a partial path is + desired. Default is nvars + 1. + +pmax + Limit the maximum number of variables ever to be + nonzero. Default is min(dfmax * 2 + 20, nvars). + +exclude + Indices of variables to be excluded from the model. + Default is none. Equivalent to an infinite penalty + factor (next item). + +penalty_factor + Separate penalty factors can be applied to each + coefficient. This is a number that multiplies lambda + to allow differential shrinkage. Can be 0 for some + variables, which implies no shrinkage, and that + variable is always included in the model. Default is + 1 for all variables (and implicitly infinity for + variables listed in exclude). Note: the penalty + factors are internally rescaled to sum to nvars, and + the lambda sequence will reflect this change. + +maxit + Maximum number of passes over the data for all lambda + values; default is 10^5. + +cl + Two-row matrix with the first row being the lower + limits for each coefficient and the second the upper + limits. Can be presented as a single column (which + will then be replicated), else a matrix of nvars + columns. Default [-Inf;Inf]. + +gtype + Two algorithm types are supported for (only) + family = 'gaussian'. The default when nvar<500 is + options.gtype = 'covariance', and saves all + inner-products ever computed. This can be much faster + than options.gtype='naive', which loops through nobs + every time an inner-product is computed. The latter + can be far more efficient for nvar >> nobs + situations, or when nvar > 500. + +ltype + If 'Newton' then the exact hessian is used (default), + while 'modified.Newton' uses an upper-bound on the + hessian, and can be faster. + +standardize_resp + This is for the family='mgaussian' family, and allows + the user to standardize the response variables. + +mtype + If 'grouped' then a grouped lasso penalty is used on + the multinomial coefficients for a variable. This + ensures they are all in our out together. The default + is 'ungrouped'. + +LICENSE: +------- + GPL-2 + +AUTHORS: +------- + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani + Fortran code was written by Jerome Friedman + R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hasite + The original MATLAB wrapper was written by Hui Jiang, + and is updated and maintained by Junyang Qian. + This Python wrapper (adapted from the Matlab and R wrappers) + is written by Balakumar B.J., bbalasub@stanford.edu + Department of Statistics, Stanford University, Stanford, California, USA. + +""" + +def glmnetSet(opts = None): + import numpy as np + + # default options + options = { + "weights" : np.empty([0]), + "offset" : np.empty([0]), + "alpha" : np.float64(1.0), + "nlambda" : np.int32(100), + "lambda_min" : np.empty([0]), + "lambdau" : np.empty([0]), + "standardize" : True, + "intr" : True, + "thresh" : np.float64(1e-7), + "dfmax" : np.empty([0]), + "pmax" : np.empty([0]), + "exclude" : np.empty([0], dtype = np.integer), + "penalty_factor" : np.empty([0]), + "cl" : np.array([[np.float64(-np.inf)], [np.float64(np.inf)]]), + "maxit" : np.int32(1e5), + "gtype" : [], + "ltype" : 'Newton', + "standardize_resp" : False, + "mtype" : 'ungrouped' + } + + # quick return if no user opts + if opts == None: + print('pdco default options:') + print(options) + return options + + # if options are passed in by user, update options with values from opts + optsInOptions = set(opts.keys()) - set(options.keys()); + if len(optsInOptions) > 0: # assert 'opts' keys are subsets of 'options' keys + print(optsInOptions, ' : unknown option for glmnetSet') + raise ValueError('attempting to set glmnet options that are not known to glmnetSet') + else: + options = merge_dicts(options, opts) + + return options + +def merge_dicts(*dict_args): + """ + Given any number of dicts, shallow copy and merge into a new dict, + precedence goes to key value pairs in latter dicts. + """ + result = {} + for dictionary in dict_args: + result.update(dictionary) + return result diff --git a/build/lib/glmnet_python/loadGlmLib.py b/build/lib/glmnet_python/loadGlmLib.py new file mode 100644 index 0000000..bfe2df4 --- /dev/null +++ b/build/lib/glmnet_python/loadGlmLib.py @@ -0,0 +1,32 @@ +# -*- coding: utf-8 -*- +""" +def loadGlmLib(): +======================= +INPUT ARGUMENTS: + + NONE + +======================= +OUTPUT ARGUMENTS: + +glmlib Returns a glmlib object with methods that are equivalent + to the fortran functions in GLMnet.f +======================= +""" +import ctypes +import os + +glmnet_so = os.path.dirname(__file__) + '/GLMnet.so' +glmnet_dll = os.path.dirname(__file__) + '/GLMnet.dll' + +def loadGlmLib(): + if os.name == 'posix': + glmlib = ctypes.cdll.LoadLibrary(glmnet_so) + return(glmlib) + elif os.name == 'nt': + # this does not currently work + raise ValueError('loadGlmlib does not currently work for windows') + # glmlib = ctypes.windll.LoadLibrary(glmnet_dll) + else: + raise ValueError('loadGlmLib not yet implemented for non-posix OS') + diff --git a/build/lib/glmnet_python/lognet.py b/build/lib/glmnet_python/lognet.py new file mode 100644 index 0000000..284cdd9 --- /dev/null +++ b/build/lib/glmnet_python/lognet.py @@ -0,0 +1,337 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by glmnet. See also glmnet, cvglmnet + +""" +# import packages/methods +import numpy as np +import ctypes +from loadGlmLib import loadGlmLib + +def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, intr, maxit, kopt, family): + + # load shared fortran library + glmlib = loadGlmLib() + + # + noo = y.shape[0] + if len(y.shape) > 1: + nc = y.shape[1] + else: + nc = 1 + + if (noo != nobs): + raise ValueError('x and y have different number of rows in call to glmnet') + + if nc == 1: + classes, sy = np.unique(y, return_inverse = True) + nc = len(classes) + indexes = np.eye(nc, nc) + y = indexes[sy, :] + else: + classes = np.arange(nc) + 1 # 1:nc + # + if family == 'binomial': + if nc > 2: + raise ValueError('More than two classes in y. use multinomial family instead') + else: + nc = 1 + y = y[:, [1, 0]] + # + if (len(weights) != 0): + t = weights > 0 + if ~np.all(t): + t = np.reshape(t, (len(y), )) + y = y[t, :] + x = x[t, :] + weights = weights[t] + nobs = np.sum(t) + else: + t = np.empty([0], dtype = np.integer) + # + if len(y.shape) == 1: + mv = len(y) + ny = 1 + else: + mv, ny = y.shape + + y = y*np.tile(weights, (1, ny)) + + # + if len(offset) == 0: + offset = y*0 + is_offset = False + else: + if len(t) != 0: + offset = offset[t, :] + do = offset.shape + if do[0] != nobs: + raise ValueError('offset should have the same number of values as observations in binominal/multinomial call to glmnet') + if nc == 1: + if do[1] == 1: + offset = np.column_stack((offset, -offset), 1) + if do[1] > 2: + raise ValueError('offset should have 1 or 2 columns in binomial call to glmnet') + if (family == 'multinomial') and (do[1] != nc): + raise ValueError('offset should have same shape as y in multinomial call to glmnet') + is_offset = True + + # now convert types and allocate memory before calling + # glmnet fortran library + ###################################### + # --------- PROCESS INPUTS ----------- + ###################################### + # force inputs into fortran order and scipy float64 + copyFlag = False + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + offset = offset.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) + + ###################################### + # --------- ALLOCATE OUTPUTS --------- + ###################################### + # lmu + lmu = -1 + lmu_r = ctypes.c_int(lmu) + # a0, ca + if nc == 1: + a0 = np.zeros([nlam], dtype = np.float64) + ca = np.zeros([nx, nlam], dtype = np.float64) + else: + a0 = np.zeros([nc, nlam], dtype = np.float64) + ca = np.zeros([nx, nc, nlam], dtype = np.float64) + # a0 + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) + a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ca + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) + ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ia + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) + ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # nin + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) + nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # dev + dev = -1*np.ones([nlam], dtype = np.float64) + dev = dev.astype(dtype = np.float64, order = 'F', copy = False) + dev_r = dev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # alm + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) + alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # nlp + nlp = -1 + nlp_r = ctypes.c_int(nlp) + # jerr + jerr = -1 + jerr_r = ctypes.c_int(jerr) + # dev0 + dev0 = -1 + dev0_r = ctypes.c_double(dev0) + + # ################################### + # main glmnet fortran caller + # ################################### + if is_sparse: + # sparse lognet + glmlib.splognet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + ctypes.byref(ctypes.c_int(nc)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + pcs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + irs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + offset.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(ctypes.c_int(kopt)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + ctypes.byref(dev0_r), + dev_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + else: + # call fortran lognet routine + glmlib.lognet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + ctypes.byref(ctypes.c_int(nc)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + offset.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(ctypes.c_int(kopt)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + ctypes.byref(dev0_r), + dev_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + + # ################################### + # post process results + # ################################### + + # check for error + if (jerr_r.value > 0): + raise ValueError("Fatal glmnet error in library call : error code = ", jerr_r.value) + elif (jerr_r.value < 0): + print("Warning: Non-fatal error in glmnet library call: error code = ", jerr_r.value) + print("Check results for accuracy. Partial or no results returned.") + + # clip output to correct sizes + lmu = lmu_r.value + if nc == 1: + a0 = a0[0:lmu] + ca = ca[0:nx, 0:lmu] + else: + a0 = a0[0:nc, 0:lmu] + ca = ca[0:nx, 0:nc, 0:lmu] + ia = ia[0:nx] + nin = nin[0:lmu] + dev = dev[0:lmu] + alm = alm[0:lmu] + + # ninmax + ninmax = max(nin) + # fix first value of alm (from inf to correct value) + if ulam[0] == 0.0: + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) + # create return fit dictionary + + if family == 'multinomial': + a0 = a0 - np.tile(np.mean(a0), (nc, 1)) + dfmat = a0.copy() + dd = np.array([nvars, lmu], dtype = np.integer) + beta_list = list() + if ninmax > 0: + # TODO: is the reshape here done right? + ca = np.reshape(ca, (nx, nc, lmu)) + ca = ca[0:ninmax, :, :] + ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + df = np.any(np.absolute(ca) > 0, axis=1) + df = np.sum(df) + df = np.reshape(df, (1, df.size)) + for k in range(0, nc): + ca1 = np.reshape(ca[:,k,:], (ninmax, lmu)) + cak = ca1[oja,:] + dfmat[k, :] = np.sum(np.absolute(cak) > 0, axis = 0) + beta = np.zeros([nvars, lmu], dtype = np.float64) + beta[ja1, :] = cak + beta_list.append(beta) + else: + for k in range(0, nc): + dfmat[k, :] = np.zeros([1, lmu], dtype = np.float64) + beta_list.append(np.zeros([nvars, lmu], dtype = np.float64)) + # + df = np.zeros([1, lmu], dtype = np.float64) + # + if kopt == 2: + grouped = True + else: + grouped = False + # + fit = dict() + fit['a0'] = a0 + fit['label'] = classes + fit['beta'] = beta_list + fit['dev'] = dev + fit['nulldev'] = dev0_r.value + fit['dfmat']= dfmat + fit['df'] = df + fit['lambdau'] = alm + fit['npasses'] = nlp_r.value + fit['jerr'] = jerr_r.value + fit['dim'] = dd + fit['grouped'] = grouped + fit['offset'] = is_offset + fit['class'] = 'multnet' + else: + dd = np.array([nvars, lmu], dtype = np.integer) + if ninmax > 0: + ca = ca[0:ninmax,:]; + df = np.sum(np.absolute(ca) > 0, axis = 0); + ja = ia[0:ninmax] - 1; # ia is 1-indexes in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + beta = np.zeros([nvars, lmu], dtype = np.float64); + beta[ja1, :] = ca[oja, :]; + else: + beta = np.zeros([nvars,lmu], dtype = np.float64); + df = np.zeros([1,lmu], dtype = np.float64); + # + fit = dict() + fit['a0'] = a0 + fit['label'] = classes + fit['beta'] = beta + fit['dev'] = dev + fit['nulldev'] = dev0_r.value + fit['df'] = df + fit['lambdau'] = alm + fit['npasses'] = nlp_r.value + fit['jerr'] = jerr_r.value + fit['dim'] = dd + fit['offset'] = is_offset + fit['class'] = 'lognet' + + + # ################################### + # return to caller + # ################################### + + return fit +#----------------------------------------- +# end of method lognet +#----------------------------------------- + diff --git a/build/lib/glmnet_python/mrelnet.py b/build/lib/glmnet_python/mrelnet.py new file mode 100644 index 0000000..006a97f --- /dev/null +++ b/build/lib/glmnet_python/mrelnet.py @@ -0,0 +1,254 @@ +# -*- coding: utf-8 -*- +""" +Internal function called by glmnet. See also glmnet, cvglmnet + +""" +# import packages/methods +import numpy as np +import ctypes +from wtmean import wtmean +from loadGlmLib import loadGlmLib + +def mrelnet(x, is_sparse, irs, pcs, y, weights, offset, parm, + nobs, nvars, jd, vp, cl, ne, nx, nlam, flmin, ulam, + thresh, isd, jsd, intr, maxit, family): + + # load shared fortran library + glmlib = loadGlmLib() + + # + nr = y.shape[1] + wym = wtmean(y, weights) + wym = np.reshape(wym, (1, wym.size)) + yt2 = (y - np.tile(wym, (y.shape[0], 1)))**2 + nulldev = np.sum(wtmean(yt2,weights)*np.sum(weights)) + + if len(offset) == 0: + offset = y*0 + is_offset = False + else: + if offset.shape != y.shape: + raise ValueError('Offset must match dimension of y') + is_offset = True + # + y = y - offset + # now convert types and allocate memory before calling + # glmnet fortran library + ###################################### + # --------- PROCESS INPUTS ----------- + ###################################### + # force inputs into fortran order and scipy float64 + copyFlag = False + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) + + ###################################### + # --------- ALLOCATE OUTPUTS --------- + ###################################### + # lmu + lmu = -1 + lmu_r = ctypes.c_int(lmu) + # a0 + a0 = np.zeros([nr, nlam], dtype = np.float64) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) + a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ca + ca = np.zeros([nx, nr, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) + ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # ia + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) + ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # nin + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) + nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) + # rsq + rsq = -1*np.ones([nlam], dtype = np.float64) + rsq = rsq.astype(dtype = np.float64, order = 'F', copy = False) + rsq_r = rsq.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # alm + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) + alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) + # nlp + nlp = -1 + nlp_r = ctypes.c_int(nlp) + # jerr + jerr = -1 + jerr_r = ctypes.c_int(jerr) + + # ################################### + # main glmnet fortran caller + # ################################### + if is_sparse: + # sparse multnet + glmlib.multspelnet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + ctypes.byref(ctypes.c_int(nr)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + pcs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + irs.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(jsd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + rsq_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + else: + # call fortran multnet routine + glmlib.multelnet_( + ctypes.byref(ctypes.c_double(parm)), + ctypes.byref(ctypes.c_int(nobs)), + ctypes.byref(ctypes.c_int(nvars)), + ctypes.byref(ctypes.c_int(nr)), + x.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + weights.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)), + vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_int(ne)), + ctypes.byref(ctypes.c_int(nx)), + ctypes.byref(ctypes.c_int(nlam)), + ctypes.byref(ctypes.c_double(flmin)), + ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)), + ctypes.byref(ctypes.c_double(thresh)), + ctypes.byref(ctypes.c_int(isd)), + ctypes.byref(ctypes.c_int(jsd)), + ctypes.byref(ctypes.c_int(intr)), + ctypes.byref(ctypes.c_int(maxit)), + ctypes.byref(lmu_r), + a0_r, + ca_r, + ia_r, + nin_r, + rsq_r, + alm_r, + ctypes.byref(nlp_r), + ctypes.byref(jerr_r) + ) + + # ################################### + # post process results + # ################################### + + # check for error + if (jerr_r.value > 0): + raise ValueError("Fatal glmnet error in library call : error code = ", jerr_r.value) + elif (jerr_r.value < 0): + print("Warning: Non-fatal error in glmnet library call: error code = ", jerr_r.value) + print("Check results for accuracy. Partial or no results returned.") + + # clip output to correct sizes + lmu = lmu_r.value + a0 = a0[0:nr, 0:lmu] + ca = ca[0:nx, 0:nr, 0:lmu] + ia = ia[0:nx] + nin = nin[0:lmu] + rsq = rsq[0:lmu] + alm = alm[0:lmu] + + # ninmax + ninmax = max(nin) + # fix first value of alm (from inf to correct value) + if ulam[0] == 0.0: + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) + # create return fit dictionary + if nr > 1: + dfmat = a0.copy() + dd = np.array([nvars, lmu], dtype = np.integer) + beta_list = list() + if ninmax > 0: + # TODO: is the reshape here done right? + ca = np.reshape(ca, (nx, nr, lmu)) + ca = ca[0:ninmax, :, :] + ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + df = np.any(np.absolute(ca) > 0, axis=1) + df = np.sum(df, axis = 0) + df = np.reshape(df, (1, df.size)) + for k in range(0, nr): + ca1 = np.reshape(ca[:,k,:], (ninmax, lmu)) + cak = ca1[oja,:] + dfmat[k, :] = np.sum(np.absolute(cak) > 0, axis = 0) + beta = np.zeros([nvars, lmu], dtype = np.float64) + beta[ja1, :] = cak + beta_list.append(beta) + else: + for k in range(0, nr): + dfmat[k, :] = np.zeros([1, lmu], dtype = np.float64) + beta_list.append(np.zeros([nvars, lmu], dtype = np.float64)) + # + df = np.zeros([1, lmu], dtype = np.float64) + # + fit = dict() + fit['beta'] = beta_list + fit['dfmat']= dfmat + else: + dd = np.array([nvars, lmu], dtype = np.integer) + if ninmax > 0: + ca = ca[0:ninmax,:]; + df = np.sum(np.absolute(ca) > 0, axis = 0); + ja = ia[0:ninmax] - 1; # ia is 1-indexes in fortran + oja = np.argsort(ja) + ja1 = ja[oja] + beta = np.zeros([nvars, lmu], dtype = np.float64); + beta[ja1, :] = ca[oja, :]; + else: + beta = np.zeros([nvars,lmu], dtype = np.float64); + df = np.zeros([1,lmu], dtype = np.float64); + fit['beta'] = beta + + fit['a0'] = a0 + fit['dev'] = rsq + fit['nulldev'] = nulldev + fit['df'] = df + fit['lambdau'] = alm + fit['npasses'] = nlp_r.value + fit['jerr'] = jerr_r.value + fit['dim'] = dd + fit['offset'] = is_offset + fit['class'] = 'mrelnet' + + # ################################### + # return to caller + # ################################### + + return fit +#----------------------------------------- +# end of method mrelnet +#----------------------------------------- + diff --git a/build/lib/glmnet_python/printDict.py b/build/lib/glmnet_python/printDict.py new file mode 100644 index 0000000..26d4922 --- /dev/null +++ b/build/lib/glmnet_python/printDict.py @@ -0,0 +1,13 @@ +# -*- coding: utf-8 -*- +""" +Prints some info about a dictionary object. Used for troubleshooting. + +""" + +def printDict(s): + for keys in s.keys(): + try: + print('%10s %20s %20s' % (keys, type(s[keys]), s[keys].shape)) + except: + print('%10s %20s' % (keys, type(s[keys])), ' ', s[keys]) + \ No newline at end of file diff --git a/build/lib/glmnet_python/structtype.py b/build/lib/glmnet_python/structtype.py new file mode 100644 index 0000000..60d37dc --- /dev/null +++ b/build/lib/glmnet_python/structtype.py @@ -0,0 +1,9 @@ +# -*- coding: utf-8 -*- +""" +This is a dummy class that allows the declaration of a struct similar to matlab + +@author: bbalasub +""" +class structtype(): + pass + diff --git a/build/lib/glmnet_python/wtmean.py b/build/lib/glmnet_python/wtmean.py new file mode 100644 index 0000000..ad831fc --- /dev/null +++ b/build/lib/glmnet_python/wtmean.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +""" +Calculate nan-removed weighted mean. The mean is +computed in the axis=0 direction along each column. + +INPUT ARGUMENTS: +--------------- + mat: must be a 2D scipy array of size N x K + weights: must be a 2D scipy array of size N x 1 or a 1-D array of size N + +OUTPUT ARGUMENTS: +---------------- + returns nan-removed weighted mean as a 1D array of size K + +""" +import numpy as np + +def wtmean(mat,weights): + if len(weights.shape) == 1: + weights = np.reshape(weights, [np.size(weights), 1]) + wmat = isfinite(mat)*weights + mat[isnan(mat)] = 0 + swmat = mat*wmat + tf = weights != 0 + tf = tf[:,0] + y = np.sum(swmat[tf, :], axis = 0)/np.sum(wmat, axis = 0) + return y +# end of wtmean + +def isnan(x): + return ~np.isfinite(x) +# end of isnan + +def isfinite(x): + return np.isfinite(x) +# end of isfinite + diff --git a/dist/glmnet_python-0.2.0-py3.6.egg b/dist/glmnet_python-0.2.0-py3.6.egg new file mode 100644 index 0000000..b5e2b80 Binary files /dev/null and b/dist/glmnet_python-0.2.0-py3.6.egg differ diff --git a/docs/.ipynb_checkpoints/glmnet_vignette-checkpoint.ipynb b/docs/.ipynb_checkpoints/glmnet_vignette-checkpoint.ipynb new file mode 100644 index 0000000..9325480 --- /dev/null +++ b/docs/.ipynb_checkpoints/glmnet_vignette-checkpoint.ipynb @@ -0,0 +1,2344 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Glmnet Vignette (for python)\n", + "July 12, 2017\n", + "\n", + "## Authors\n", + "Trevor Hastie, B. J. Balakumar" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Introduction\n", + "\n", + "`Glmnet` is a package that fits a generalized linear model via penalized maximum likelihood. The regularization path is computed for the lasso or elasticnet penalty at a grid of values for the regularization parameter lambda. The algorithm is extremely fast, and can exploit sparsity in the input matrix `x`. It fits linear, logistic and multinomial, poisson, and Cox regression models. A variety of predictions can be made from the fitted models. It can also fit multi-response linear regression.\n", + "\n", + "The authors of glmnet are Jerome Friedman, Trevor Hastie, Rob Tibshirani and Noah Simon. The Python package is maintained by B. J. Balakumar. The R package is maintained by Trevor Hastie. The matlab version of glmnet is maintained by Junyang Qian. This vignette describes the usage of glmnet in Python.\n", + "\n", + "`glmnet` solves the following problem:\n", + "$$\n", + " \\min_{\\beta_0, \\beta}\\frac{1}{N} \\sum_{i=1}^N w_i l(y_i, \\beta_0+ \\beta^T x_i)^2+\\lambda \\left[ (1-\\alpha)||\\beta||_2^2/2 + \\alpha||\\beta||_1\\right],\n", + "$$\n", + "\n", + "over a grid of values of $\\lambda$ covering the entire range. Here $l(y, \\eta)$ is the negative log-likelihood contribution for observation $i$; e.g. for the Gaussian case it is $\\frac{1}{2} l(y-\\eta)^2$. The elastic-net penalty is controlled by $\\alpha$, and bridges the gap between lasso ($\\alpha=1$, the default) and ridge ($\\alpha=0$). The tuning parameter $\\lambda$ controls the overall strength of the penalty." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It is known that the ridge penalty shrinks the coefficients of correlated predictors towards each other while the lasso tends to pick one of them and discard the others. The elastic-net penalty mixes these two; if predictors are correlated in groups, an $\\alpha=0.5$ tends to select the groups in or out together. This is a higher level parameter, and users might pick a value upfront, else experiment with a few different values. One use of $\\alpha$ is for numerical stability; for example, the elastic net with $\\alpha = 1-\\varepsilon$ for some small $\\varepsilon>0$ performs much like the lasso, but removes any degeneracies and wild behavior caused by extreme correlations.\n", + "\n", + "The glmnet algorithms use cyclical coordinate descent, which successively optimizes the objective function over each parameter with others fixed, and cycles repeatedly until convergence. The package also makes use of the strong rules for efficient restriction of the active set. Due to highly efficient updates and techniques such as warm starts and active-set convergence, our algorithms can compute the solution path very fast.\n", + "\n", + "The code can handle sparse input-matrix formats, as well as range constraints on coefficients. The core of glmnet is a set of fortran subroutines, which make for very fast execution.\n", + "\n", + "The package also includes methods for prediction and plotting, and a function that performs K-fold cross-validation." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Installation\n", + "\n", + "### Using pip (recommended, courtesy: Han Fan)\n", + "\n", + " pip install glmnet_py\n", + "\n", + "### Complied from source\n", + "\n", + " git clone https://github.com/bbalasub1/glmnet_python.git\n", + " cd glmnet_python\n", + " python setup.py install\n", + "\n", + "### Requirement\n", + "Python 3, Linux\n", + "\n", + "Currently, the checked-in version of GLMnet.so is compiled for the following config:\n", + "\n", + " **Linux:** Linux version 2.6.32-573.26.1.el6.x86_64 (gcc version 4.4.7 20120313 (Red Hat 4.4.7-16) (GCC) ) \n", + " **OS:** CentOS 6.7 (Final) \n", + " **Hardware:** 8-core Intel(R) Core(TM) i7-2630QM \n", + " **gfortran:** version 4.4.7 20120313 (Red Hat 4.4.7-17) (GCC)\n", + "\n", + "\n", + "## Usage\n", + " import glmnet_python\n", + " from glmnet import glmnet\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Linear Regression\n", + "\n", + "Linear regression here refers to two families of models. One is `gaussian`, the Gaussian family, and the other is `mgaussian`, the multiresponse Gaussian family. We first discuss the ordinary Gaussian and the multiresponse one after that.\n", + "\n", + "### Linear Regression - Gaussian family\n", + "\n", + "`gaussian ` is the default family option in the function `glmnet`. Suppose we have observations $x_i \\in \\mathbb{R}^p$ and the responses $y_i \\in \\mathbb{R}, i = 1, \\ldots, N$. The objective function for the Gaussian family is\n", + "\n", + "$$\n", + "\\min_{(\\beta_0, \\beta) \\in \\mathbb{R}^{p+1}}\\frac{1}{2N} \\sum_{i=1}^N (y_i -\\beta_0-x_i^T \\beta)^2+\\lambda \\left[ (1-\\alpha)||\\beta||_2^2/2 + \\alpha||\\beta||_1\\right],\n", + "$$\n", + "\n", + "where \n", + "\n", + "$\\lambda \\geq 0$ is a complexity parameter and $0 \\leq \\alpha \\leq 1$ is a compromise between ridge ($\\alpha = 0$) and lasso ($\\alpha = 1$).\n", + "\n", + "Coordinate descent is applied to solve the problem. Specifically, suppose we have current estimates $\\tilde{\\beta_0}$ and $\\tilde{\\beta}_\\ell$ $\\forall j\\in 1,\\ldots,p$. By computing the gradient at $\\beta_j = \\tilde{\\beta}_j$ and simple calculus, the update is\n", + "$$\n", + "\\tilde{\\beta}_j \\leftarrow \\frac{S(\\frac{1}{N}\\sum_{i=1}^N x_{ij}(y_i-\\tilde{y}_i^{(j)}),\\lambda \\alpha)}{1+\\lambda(1-\\alpha)},\n", + "$$\n", + "\n", + "where \n", + "\n", + "$\\tilde{y}_i^{(j)} = \\tilde{\\beta}_0 + \\sum_{\\ell \\neq j} x_{i\\ell} \\tilde{\\beta}_\\ell$, and $S(z, \\gamma)$ is the soft-thresholding operator with value $\\text{sign}(z)(|z|-\\gamma)_+$.\n", + "\n", + "This formula above applies when the `x` variables are standardized to have unit variance (the default); it is slightly more complicated when they are not. Note that for \"family=gaussian\", `glmnet` standardizes $y$ to have unit variance before computing its lambda sequence (and then unstandardizes the resulting coefficients); if you wish to reproduce/compare results with other software, best to supply a standardized $y$ first (Using the \"1/N\" variance formula)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`glmnet` provides various options for users to customize the fit. We introduce some commonly used options here and they can be specified in the `glmnet` function.\n", + "\n", + "* `alpha` is for the elastic-net mixing parameter $\\alpha$, with range $\\alpha \\in [0,1]$. $\\alpha = 1$ is the lasso (default) and $\\alpha = 0$ is the ridge.\n", + "\n", + "* `weights` is for the observation weights. Default is 1 for each observation. (Note: `glmnet` rescales the weights to sum to N, the sample size.)\n", + "\n", + "* `nlambda` is the number of $\\lambda$ values in the sequence. Default is 100.\n", + "\n", + "* `lambda` can be provided, but is typically not and the program constructs a sequence. When automatically generated, the $\\lambda$ sequence is determined by `lambda.max` and `lambda.min.ratio`. The latter is the ratio of smallest value of the generated $\\lambda$ sequence (say `lambda.min`) to `lambda.max`. The program then generated `nlambda` values linear on the log scale from `lambda.max` down to `lambda.min`. `lambda.max` is not given, but easily computed from the input $x$ and $y$; it is the smallest value for `lambda` such that all the coefficients are zero. For `alpha=0` (ridge) `lambda.max` would be $\\infty$; hence for this case we pick a value corresponding to a small value for `alpha` close to zero.)\n", + "\n", + "* `standardize` is a logical flag for `x` variable standardization, prior to fitting the model sequence. The coefficients are always returned on the original scale. Default is `standardize=TRUE`.\n", + "\n", + "For more information, type `help(glmnet)` or simply `?glmnet`. Let us start by loading the data:" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# Jupyter setup to expand cell display to 100% width on your screen (optional)\n", + "from IPython.core.display import display, HTML\n", + "display(HTML(\"\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "# Import relevant modules and setup for calling glmnet\n", + "%reset -f\n", + "%matplotlib inline\n", + "\n", + "import sys\n", + "sys.path.append('../test')\n", + "sys.path.append('../lib')\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", + "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", + "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", + "from cvglmnetPlot import cvglmnetPlot; from cvglmnetPredict import cvglmnetPredict\n", + "\n", + "# parameters\n", + "baseDataDir= '../data/'\n", + "\n", + "# load data\n", + "x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64)\n", + "y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64)\n", + "\n", + "# create weights\n", + "t = np.ones((50, 1), dtype = np.float64)\n", + "wts = np.row_stack((t, 2*t))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As an example, we set $\\alpha = 0.2$ (more like a ridge regression), and give double weights to the latter half of the observations. To avoid too long a display here, we set `nlambda` to 20. In practice, however, the number of values of $\\lambda$ is recommended to be 100 (default) or more. In most cases, it does not come with extra cost because of the warm-starts used in the algorithm, and for nonlinear models leads to better convergence properties." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "# call glmnet\n", + "fit = glmnet(x = x.copy(), y = y.copy(), family = 'gaussian', \\\n", + " weights = wts, \\\n", + " alpha = 0.2, nlambda = 20\n", + " )" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can then print the `glmnet` object." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\t df \t %dev \t lambdau\n", + "\n", + "0 \t 0.000000 \t 0.000000 \t 7.939020\n", + "1 \t 4.000000 \t 0.178852 \t 4.889231\n", + "2 \t 7.000000 \t 0.444488 \t 3.011024\n", + "3 \t 7.000000 \t 0.656716 \t 1.854334\n", + "4 \t 8.000000 \t 0.784984 \t 1.141988\n", + "5 \t 9.000000 \t 0.853935 \t 0.703291\n", + "6 \t 10.000000 \t 0.886693 \t 0.433121\n", + "7 \t 11.000000 \t 0.902462 \t 0.266737\n", + "8 \t 14.000000 \t 0.910135 \t 0.164269\n", + "9 \t 17.000000 \t 0.913833 \t 0.101165\n", + "10 \t 17.000000 \t 0.915417 \t 0.062302\n", + "11 \t 17.000000 \t 0.916037 \t 0.038369\n", + "12 \t 19.000000 \t 0.916299 \t 0.023629\n", + "13 \t 20.000000 \t 0.916405 \t 0.014552\n", + "14 \t 20.000000 \t 0.916447 \t 0.008962\n", + "15 \t 20.000000 \t 0.916463 \t 0.005519\n", + "16 \t 20.000000 \t 0.916469 \t 0.003399\n" + ] + } + ], + "source": [ + "glmnetPrint(fit)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This displays the call that produced the object `fit` and a three-column matrix with columns `Df` (the number of nonzero coefficients), `%dev` (the percent deviance explained) and `Lambda` (the corresponding value of $\\lambda$).\n", + "\n", + "(Note that the `digits` option can used to specify significant digits in the printout.)\n", + "\n", + "Here the actual number of $\\lambda$'s here is less than specified in the call. The reason lies in the stopping criteria of the algorithm. According to the default internal settings, the computations stop if either the fractional change in deviance down the path is less than $10^{-5}$ or the fraction of explained deviance reaches $0.999$. From the last few lines , we see the fraction of deviance does not change much and therefore the computation ends when meeting the stopping criteria. We can change such internal parameters. For details, see the Appendix section or type `help(glmnet.control)`.\n", + "\n", + "We can plot the fitted object as in the previous section. There are more options in the `plot` function.\n", + "\n", + "Users can decide what is on the X-axis. `xvar` allows three measures: \"norm\" for the $\\ell_1$-norm of the coefficients (default), \"lambda\" for the log-lambda value and \"dev\" for %deviance explained.\n", + "\n", + "Users can also label the curves with variable sequence numbers simply by setting `label = TRUE`. Let's plot \"fit\" against the log-lambda value and with each curve labeled." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAElCAYAAAAV9s4VAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd4XNW1t999pqg3S65yN7ZkjAs2BmyaMDh0SDAkgcAX\n00KoJo2QwA1cICHJvRAgEAjl0sK9ECChJKFjgUOxwcbdkotcJRdJtiyrjWbmrO+PfWY0kkbNljQj\neb9+9rPrnFlzND6/2W1tJSIYDAaDwdAeVqwNMBgMBkP8Y8TCYDAYDB1ixMJgMBgMHWLEwmAwGAwd\nYsTCYDAYDB1ixMJgMBgMHWLEwhDXKKWCSqllSqnVSqmvlVI/VkqpWNt1MCilblZKrVVKvdCi/BSl\nVJXzOb9WSr3XQ+//jFLqwp64tqH/4461AQZDB9SKyHQApVQO8H9AOnDXoV5YKWWJiH2o1+kC1wGn\niUhZlLpPROT8tl6olHKJSLDnTDMY2sf0LAx9BhGpAH4A3Aj6Ya+U+r1SarFSarlS6hqnXCml/uT8\nin9XKfXP0C9qpdRmpdRvlVJfARcppcYqpd5WSn2plPpYKTXBaZejlHrVufZipdQsp/wU59f/MqXU\nUqVUSks7nd7PKqXUSqXUzU7ZY8BY4G2l1IIoH69Vb8npCTymlPoC+J1SKlkp9bRS6gvnvc9v7z44\ndY8opdY5vZVBEeWnOZ9hhVLqKaWUJ+L+/Mb5jEuUUkcrpd5RSm1QSl3b9b+aod8gIiaYELcBqI5S\nthcYCFwD/NIp8wJfAqOAecA/nPLBTvsLnfxm4KcR1/oAGOekjwU+dNIvArOd9AhgrZN+E5jlpJMB\nq4Vt04EVQCKQAqwGpjp1JUBWlM9zClAFLHPCL5zyZ4A3I9r9GrjUSWcAxUBSO/fhW8C7TvlQYB9w\nIZAAbIv43M8BN0fcnx846QeA5c7nzAF2xfr7YELsghmGMvRlvgFMVkpd7OTTgfHAicArACKyWym1\nsMXrXgZwegWzgVci5kE8Tnw6MDGiPFUplQx8CvxBKfUi8DcRKW1x7ROBv4tIg/MefwNOQguIIkoP\nwqGtYahXWnze85RSP3PyXmBkO/fhZPSwHSKyUyn1oVOfB5SIyCYn/xxwPfCwk3/LiVcBKSJSB9Qp\npRqUUukiUt3GZzD0Y4xYGPoUSqmxQFBEyp0H+U0i8n6LNud0cJlaJ7aAfeLMibR8K+A4EfG3KP+d\nUuofwDnAp0qpb4jI+q5/kk5T2yI/T0Q2NDO08/dBtZFuic+J7Yg0gGCeGYctZs7CEO+EH2pKqYHA\nY8AfnaJ3geuVUm6nfnzEr/+LnLmLwUBBtAuLyAFgs1Lqooj3mOIk3wMWRJRPdeKxIrJGRH6PHu7J\nb3HZRcA3lVKJTs/lW8AnB/XJW/MucHOETdMiyqPdh0+A7zhzGkOBU532xcAoR3gBLgcKu8lGQz/F\n/EowxDuJSqll6CEXP/C8iPzBqXsKGA0sc35d7wG+CbwGzAHWANuBpcB+5zUt3Sx/D3hcKXUH+v/D\nS8BKtFA8qpRaAbjQD97rgVuUUqcCQef6b0deTES+Vko9ixYSAZ4QkZVtvHdHtGx/L/CgUmolWkQ3\nA+e3dR9E5O9KqdB92AZ85tjoU0pdAbyqlHI5tv65EzYaF9WHMUrE/P0N/Q+lVIqI1CqlBgCLgRNE\nZE+s7TIY+iqmZ2Hor/xDKZWJnrC+2wiFwXBomJ6FwWAwGDrETHAbDAaDoUOMWBgMBoOhQ4xYdBKl\n1HCl1EdKqTWOK4eQG4cspdR7Sqlix7VERqxtBXDcQux2Vs6EyqYopT5zXDy8oZRKjaWNIdqw9SXH\nHcUyxwXFsljaGCKarRF1P1FK2c6kelzQxr2Ny+9sJEqpHyntPHKlUupFpZQ31ja1hVJqgmpyAfO1\nUmp/6PkQjyilzlRKFSml1iulft7Z1xmx6DwB4MciMgmYBdyglMoHbgM+EJE84CPgFzG0MZJngDNa\nlD0F3CoiU4G/A7f2ulXRaWWriHxXRKY7G+ZeA/4WE8taE+2+opQaDswFtva6Re0Tzd54/c4CoJQa\nBtwETBeRKeiFON+NrVVtIyLrReRo57s6A72R8u8xNisqSikLeAT9nZgEXOI8xzrEiEUnEZFdIrLc\nSdcA64DhwAVodwk48TdjY2FzROTfaF9AkYx3ykH7RJrXu1ZFpw1bI/k2jtuKWNOOrX8AfhalPKa0\nYW9cfmdb4AJSnI2GyUA0T73xyOnAJhHZHmtD2uBYYIOIbHW8E7yE/j50iBGLg0ApNRqYBnwBDBaR\n3aAFhQjPnnHIGuV4KkU/gIfH0pjOoJQ6Ce3AblOHjWOEc0+3i8iqWNvSSQbF83dWtAv3+9EbCUuB\nKhH5ILZWdZrvECc/bNogF71RNcQOp6xDjFh0EWec/1VggdPDaLn2OJ7XIl+JHj77Eu0RtTHG9nSG\nS4jj/3xKqSTgl8CdkcUxMudgiavvrLM/5gK059xhaCeOl8bWqo5R2s37+TR3/thvMGLRBZwu8avA\nCyLyhlO82/E/hFJqCNrVQlzijK2eISIz0d3PuP21DvrAH7RL7ZdjbUs7jEO72lihlNqM7q0tVUrF\n1a/1FsT7d/Z0tFfcvaIPfPob2jtwvHMWsFREymNtSDuUoj0VhxjulHWIEYuu8T/ocw0eiih7E5jv\npL8PvNHyRTGkmUtsxxFfaJLrDuDxGNkVjWjuu+cC6yT6yXKxJGyriKwWkSEiMlZExqC79UfH2Y7x\nlvc2nr+zoIefjnecMSrgNPQcYbwT171ghy+BI5RSo5wVZt9Ffx86xIhFJ1FKnYB2OjcnYpncmcDv\ngLlKqWL0l/q3sbQzhFLqf9GO4yYopbY5juMucexcC5SKyLOxtDFEG7ZCHI7/tmNrCCGOhqHasPe3\nxOF3NoSILEH34L+m6RyQJ2JqVAc4Xn5PJ35W7UXF6andiPaqvAZ4SUQ6JcTG3YfBYDAYOiTmPYv2\nNjk59acopaoiNmjd0ds2GgwGw+FOPHidfQZ9mM3z7bRp68hJg8FgMPQCMe9ZdGJDFsTRGLDBYDAc\njsRcLDrJLKXUcqXUP5VSR8baGIPBYDjciIdhqI5YCowUkTql1FnA68CEGNtkMBgMhxVxLxbOLulQ\n+m2l1J+UUgNEZG/Ltkops7TLYDAYuoiIdDjUHy/DUNE2ZOkKZ6epkz4Wvdy3lVCEEJGo4e233yYv\nL4/x48fz29/+ts12XQ133nlnt12rp4Ox1dhrbDW2trS1s8S8Z+FsGioAspVS29A+dryAiMgTwEVK\nqesAP1CP3qjVJWzb5sYbb+TDDz9k2LBhzJw5kwsuuID8/E555m2XLVu2HPI1egtja8/Rl+w1tvYM\n/d3WmIuFiLTrIExEHgUePZT3WLJkCePHj2fUqFEAfPe73+WNN97oFrEwGAyGw4F4GYbqUUpLSxkx\nYkQ4P3z4cEpLO+U7q0Pmz5/fLdfpDYytPUdfstfY2jP0d1sPC7HoSQoKCmJtQqcxtvYcfcleY2vP\n0N9tPSzEIjc3l23btoXzO3bsIDe3U+d9dEhhYWG3XKc3MLb2HH3JXmNrz9DfbT0sxGLmzJls3LiR\nrVu30tjYyEsvvcT55xvvIQaDwdBZ+pXXWaWUtPV53nnnHRYsWIBt25x//lW89dZtKEWzYFm0Kuuo\n3rKaykPptkJn2rhc4Hbr+GCC262Dx9O10PI1Xi8kJurgcvXyH9JgMPQaSimkE/ssDhuxiMTngy1b\nQKR1sO3o5W3Vh/K23TodLbRVHww2xcEgBAJN6a6EQEAHv7/rIfJ1jY36PtXXa7EICUdXQ1ISpKa2\nHdLSmtKJiVpQDQZD72HEopcoLCzsMxNbB2OriBaRhoa2Q319+3W1tVBT0zwcONC6rLGxSTgsq5BB\ngwqaiUl2tg45OdHj1NTYiU1//x7ECmNrzxBpa2fFIub7LAzxjVJNQ1NpaT37XoFAk7B8+CFMmtQk\nKtXVsHcvVFbChg3wxRdQUaHzodjvb19McnJ0GDECRo6E9PSe/TwGQ3/C9CwM/YaGhibxaCkkoXjP\nHtixA7Zu1fMyo0Zp4YgWDx6s55EMhv6MGYYyGNpBRPdUtm7VYdu21vH+/TB8eHQhGT1aBzP5b+jr\nGLHoJfrqOGW8Ew+21tfD9u3RxWTzZt1LmTBBD5clJRVy/vkFTJoEY8bEd48kHu5tZzG29gxmzsJg\n6EaSkrQYTGjj9JTaWli3Dtasgbffhscf1+mKCsjP1yIyaRIcdZSOR46MbxExGNrD9CwMhm7mwAFY\nuxZWr9biEQr798PEiU0iMmkSTJ4MublmybAhdphhKIMhzqiqai4ea9bAypW6B3PqqU1h5MhYW2o4\nnOisWJhO8SHS3/3BxIq+ZCt0zt7MTDjhBPjBD+Chh+CDD2D3bnj3XTjuOPjnP+GYY2DcOLj6anjx\nRSgri42t8YKxtWc4GFvNnIXBEEOU0vMb+flw3XV6F//atbBwIbz2Gtx8s94bEup1FBToJb0GQ29j\nhqEMhjjGtmHFCi0eCxfCokV6jiNSPLKzY22loS9j5iwMhn5IIABff90kHp9+qpfqzpkDl10GM2bE\n2kJDX8PMWfQS/X2cMlb0JVuh9+x1u2HmTLj1Vr1ct7JSL9nNzIR583Td00/rZb2xtrU7MLb2DOY8\nC4PhMMPjgVmz4M47YdMmuPtuePNNvaLqxhth1apYW2joL5hhKIOhH7J9u+5hPPWUdkty7bVw8cXa\nDbzBEImZszAYDAQCeknu44/DV1/B//t/euluXl6sLTPEC2bOopfo7+OUsaIv2Qrxa6/bDRdcoOc3\nFi/WnnZnzSpkzhz461/1GSLxTLze12j0d1uNWBgMhwljx8J998HLL8MPf6h7GyNHwi9+oR0jGgzt\nYYahDIbDmOJieOIJeP55vXv8hz+E884zDg8PJ8ychcFg6DQNDfDqq/Dwwzr/xz9qFySG/o+Zs+gl\n+vs4ZazoS7ZC37I3mq2JiXpT3+LFcNNNcOGFcMUV2ndVLOnr9zVeMXMWBoPhkFAKLr8ciopg4EB9\nFscf/qDPNzcc3sR8GEop9TRwLrBbRKa00eZh4CygFpgvIsvbaGeGoQyGbqS4GBYs0CcEPvwwnH56\nrC0ydDd9aRjqGeCMtiqVUmcB40RkPHAt8HhvGWYwHO7k5ellt/fdp/dnzJsHW7bE2ipDLIi5WIjI\nv4F97TS5AHjeabsYyFBKxY2T5v4+Thkr+pKt0Lfs7aqtSum9GmvXwtFH61VTd92lzyjvafrzfY0l\n/XXOIhfYHpEvdcoMBkMvkpgId9wBy5bps8cnTtRnbpiR38ODmM9ZACilRgFvRZuzUEq9BdwnIp85\n+Q+AW0VkWZS2Zs7CYOglFi7UhzMNHqznM448MtYWGQ6Gzs5Z9IWT8kqBERH54U5ZVObPn8/o0aMB\nyMzMZNq0aRQUFABNXS+TN3mTP/S8UoU8+CCsWVPAKadAQUEh3/8+nHtufNhn8tHzofSWrk4+iUjM\nAzAaWNVG3dnAP5308cAX7VxHepuFCxf2+nseLMbWnqMv2dsTtu7eLXL11SJDhoj8z/+IBIPdc93D\n/b72FJG2Os/NDp/TMZ+zUEr9L/AZMEEptU0pdYVS6lql1A8ARORfwGal1Ebgz8D1MTTXYDBEYdAg\nePJJfZbGn/+sz9hYvz7WVhm6k7iYs+guzJyFwRB7bFsLxl13aZ9TZ7S5MN4QDxjfUAaDIaYsWgTf\n/rY+AvaWW/QSXEP80Zc25fVpIieN4h1ja8/Rl+ztLVtPOgm++AKefRauugp8vq5fw9zXnuFgbDVi\nYTAYeoxRo+DTT2H/fpgzJ/aOCQ0HjxmGMhgMPY5tw913wzPPwOuv653ghvjAzFkYDIa449VX4brr\n4E9/gosvjrU1BjBzFr1Gfx+njBV9yVboW/bG0taLLoL334ef/Qx+9Svd42gPc197BjNnYTAY4p5p\n02DJEvjoIy0eNTWxtsjQGcwwlMFgiAk+H9xwgxaON98Ex0uPoZcxw1AR7Nixgzlz5jBp0iQmT57M\nw6GDhg0GQ8xISNC7vq++Wu/4/uSTWFtkaI/DQizcbjcPPPAAa9as4fPPP+fRRx+lqKioW67d38cp\nY0VfshX6lr3xZKtS2nPt88/rCe8nn2xeH0+2dkR/t/WwEIshQ4Ywbdo0AFJTU5k4cSKlpW06rjUY\nDL3M3Lnw73/DAw/ATTeZM7/jkcNuzmLLli0UFBSwevVqUlNTe8kyg8HQGfbvh0su0fMZr7wCAwbE\n2qL+j5mziEJNTQ0XXXQRDz30kBEKgyEOyciAt96CGTPg2GP1Ua6G+OCwEYtAIMBFF13E5ZdfzgUX\nXNBt1+3v45Sxoi/ZCn3L3ni31eWC3/9e78OYPbuQODc3TLzf10gOxta+cFJet3DllVdy5JFHsmDB\nAggEdH9XKbAsHUemo5VFpg0GQ4/z//4f7N2rPde+9pp2TGiIHYfFnMWnn37KySefzOTJk1FKoXw+\nfrN9O2e63fq0edvWcWQ6WlnktSOFJRRcrkNLu1zgdjeFyHxX6jye6MHr7Xyd1wuJia1DQoIRTEOv\n8uGHeh7j73+HE06ItTX9D+MbqqcIiUZIREJCEgw25Q8mHQqBQFPcMt2ZOr+//dDY2LnyxkY9y9jQ\n0Dz4fFowoglJtJCUBKmpkJ4OaWlNoa18aqoWT4Mhgvfeg8su05v3jj8+1tb0L4xY9BKFhYXhA9Hj\nnW6x1ba1kEQKSH19a1GJrDtwoHWoro5eVlcHSUkUer0UDBzYXExycmDgQB0GDWpKh0JSUrfcp4Ph\nsPse9BKRtr79NsyfD//4B8ycGVOzotJX72tnxeKwmbMwdBOW1dRr6AlsG2pr4d134aijmgtJRQXs\n2QM7dsDXX0N5uc6Xl+vg8UQXkUhxGTQIRo7UaTOc1qc46yx4+mk491z417/0iilD73HY9CxGjx5N\nRkYGlmXh8XhYsmRJL1tn6FFEtKi0FJBQCJXt3g3btulez+jRMGZM9Dgry4hJnPL663Dttfr3hLPX\n1nAImGGoFowdO5alS5eSlZXVy1YZ4pL9+2HrVti8GbZsaR5v3qyFIpqIhNLp6bG0/rDntdfgxhv1\nXMbkybG2pm9jhqFaICLYHTnPPwj66jhlvNPjtmZkwJQpOrREBPbtay4iGzbogxhCZYmJMH485OVB\nXh6Ffj8F8+bBEUf03BBdN9Efvgfz5un1HGecof8skyb1vm0t6Q/3tT0OG7FQSjF37lxcLhc/+MEP\nuOaaa2JtkiFeUUr7mRgwAKZPb10vooe1NmyA4mIdPv0U/u//tJgMGwYTJoSFJBxyc83QVjfy7W/r\nRYDf+AZ88AFMnBhri/o3h80w1M6dOxk6dCjl5eXMnTuXRx55hBNPPLGXLTT0e/x+3fMoLob165vE\npLhYn/LTUkTy8/U4ivuw+d3W7bzwAvziF3o/Rl5erK3pe5g5i3b4z//8T9LS0vjxj3/cC1YZDA5V\nVc0FZP16WLMGSkv19uQ5c+C00/QqMLPXpEs8+yz8x3/AwoV6JNDQeYxYRFBXV4dt26SmplJbW8vc\nuafzs59dwmmnHQcoJwAolGpKN8UqdP1W5YsWfcnJJx/r5K2INiriepF5K2pdU7kVTuvY6qCu88Ma\n/X1MNZYckr3l5VBYqH8af/SRni859VQtHHPm6KdfNw5f9aV72xVbn3oK7r5b38qxY3vUrKj01ftq\nJrgj2L17N9/61rdQShEIBLj44jMYOfJFNmz4C6DFRYtMSGia4ibxkajlW7bUkJ2dHFFmR6SbQmRe\nxI5SZzdLN7UJpe2odZpIIdGxUi4n3RSvXh3A601CKZfT1tUiHWofKnc7ITJ9MMGLZXnDsWUlRJQl\nRK2rr99MXV1us7aWlezk+9m4/8CB+uSfiy/W+e3btWh89BHcc4/uZcyZ0xSGD4+tvXHK1VfrSe85\nc7RgmGNau5fDomexfv16vvOd74QUlJKSEu655x5uvvnmGFjZvTQJSCgOEhKUpnSwWTpaWfO0jUgA\nCCISiAgt850Jfmzbj0gjtu1z4sZwvindiIivzTrbbsC26xHxY1nJuFwpuFzJTrozcVN7tzsDtzsz\nImTgcqXFpwiJ6In0kHgsXKgn3kPCceqpeme7Icyjj8L992vBGDky1tbEP2YYqg1s22b48OEsXryY\nESNG9JJlhu7CtgPYdj22XUcwWNflOBisIRisJhCoahaCwXrc7vQWIpIZRViagtc7jISE4bhcvehm\nxLZh1SotHB9+CIsW6X0fp50GF12kHSfFo+j1Mg89BH/8oxYM0xFrnz4jFkqpM4EH0WdrPC0iv2tR\nfwrwBlDiFP1NRO5t41odisV7773HPffcw6JFiw7Zdui745TxTm/batuBqCLSFPa3yO/F5yvD5yvF\n5Upl9epMZs/OJzFxBAkJw53QlHa5knvGcL8fli7Vu9P+93/1OMxll+kwblzUlxwu34P774fHH4eP\nP9armXuavnpf+8SchdKD5Y8ApwFlwJdKqTdEpKhF009E5PzueM+XX36ZSy65pDsuZehHWJYbyxqA\nx9O1czxFBL+/gurqvzNs2BB8vh34fDvYt++DcNrn24FlJTUTj1A6MXEEycn5eL3DDm4YzOPRvYnj\nj9fLgb76Sq8lnTVLbxq8/HK9IeEwPJ/0Jz/R+zBCcxhDhsTaor5NTHsWSqnjgTtF5Cwnfxsgkb0L\np2fxUxE5rxPXa7dn4ff7GTZsGGvXrmXgwIGH/gEMhk6gBaUyQjy2h9MNDVupq1uHiJ+UlKNaha6K\nVxi/XztPeuEFHc+Zo4Xj7LO1i/nDiF//Gl58UU/3DB4ca2vijz7RswByge0R+R3AsVHazVJKLQdK\ngZ+JyEGdzPv2228zY8YMIxSGXkUphdebg9ebQ1padM93jY17qK1dQ23tampqVrB794vU1q7G5Upp\nJSDJyUfidndwhrzHo92znnuu9oP16qt6IP+aa/TcxuWXw+zZh8X8xu23a+2cO1f3MA7DTla3EGux\n6AxLgZEiUqeUOgt4HZjQVuP58+cz2lkzl5mZybRp08Jjcw899BAzIxzhh86hDdUfTH758uXccsst\n3Xa9nsw/+OCDze5HrO1pLx95RnA82NPT9nq9g/jss7XAZAoKbgJg4cKF1NeXk5eXRm3tKt577yUa\nGjYzcWIpXu8Q1q4dQmLiWE477RxSUo5iyZI9WJYr+vtddRWF48bBrl3wwQcUXH01hfv3w9y5FNxx\nB4wfH1f3M5Tvrv9fd94JRUWFzJoFS5YUkJFx+P7/Anj22WfpKvEwDHWXiJzp5FsNQ0V5zWZghojs\njVLX5jBUXV0do0aNoqSkhPLyNG64IfSa1iFaeVtl5eWFDBlS0O6R3Z1JW1b0EHnyalfKQ6e0hk5c\ndbn0f5YpUwqa1bUVIk9obRm3LHO7u/8HamEfmiyE3rVXJEh9/SZqa1eHQ03NShoby0hLm0lGxmzS\n02eRnn581GGswsJCCk45RU+Mv/ACvPSS3sV2+eXwne9AdnavfI7O0J33VQRuugmWL9cjcykp3XLZ\nMH3pOxtpa59YDaX0DrBi9AT3TmAJcImIrItoM1hEdjvpY4G/isjoNq7XKXcf1dXa71vkCamRR2wf\nTFlbR3Z3Nh159He0k1c7Wx6qizypNfK01s6GyNNa/f6mODIdioPB6OISeZx3QkJT3DLdXl0onZgI\nyck6pKRED8nJuv3hit+/l+rqxVRXf8b+/Z9x4MASEhJGkJ4+Oywgycl5zibMZi/Uq6leeEEfR3fx\nxdrZUhurqfoytq03723bpk/ci3MHwb1CnxALCC+dfYimpbO/VUpdi+5hPKGUugG4DvAD9cCPRGRx\nG9fq9WNVDU3iFE1MGhubgs/XFCLznalraNAnrtbW6hCZjgxKtS8moePAMzIgM1PHodAyn5LSt4f0\nbTtAbe0qRzw+p7r6MwKBKtLTZzniMZu0tJnN5z8qK+Hhh/XOtrPP1qLRz9y5hlYXHzgAf/vb4f0D\nA/qQWHQn5gzu9unvtopocWlPTGpqdM9y//6mUFXVPB8q8/ubhKWlmGRmNnkxHzAASku1vdnZOp+R\nEZ++AH2+nbz99pMcdVQ11dWfUVOzguTkvIjex2ySkkbrm/Doo/Dgg1BQAHfcEf3sjx6mp76zfr/u\nQHk82rN8dzj97av/v/rKaiiDodtQqmn4qjtWvPj9bYtKVZX291dcDHv3ao8cr7yif5jv3atFKTOT\nsHi0jEPp7Gy9YSw3VwtMT/dkEhKGkpl5MkccUQCAbfs4cGAZ1dWfUV7+Ghs3/gi3O4ucnG+Sc8M3\nSb/pRtSfn9CnDB13nBaNY47pWSN7AY8HXn4ZLrgArrgCnnsuPsU9njA9C4OhBwgEtJiExKNlHEpX\nVMDOndp3oIh2TZGbq+NQiMzn5PTsQ03E5sCBr6ioeIOKitcJBPaSnX0+OelnkfVaCdbv/qBdqN9x\nB5xwQs8Z0kvU1enRtrw8vdu7Lw87HixmGMpg6GNUV8OOHfp4ix07Wqd37NDj7MOGRReV0Cmv3Tlp\nW1e3ISwctbWrGZA5l5zVGQy49308g8dp0Tj11D79lD1wQJ+2d9xx8Ic/9OmPclAYsegl+uo4ZbzT\nl2yF3rO3vh7KyloLyfbtekispESLyMSJrUNm5qHZ2ti4m4qKt6isfIOqqo9JrxlJzuvlZG8bQeLN\n98CZZ3b7k7a37mtVld7kftZZesf3wdCXvrNmzsJg6OckJekVrW2tavX7YdMmWLdOh4UL4U9/gqIi\nSEvTopGeDqtXN4nI0KGde8Z7vYMZNuxqhg27mkCghn373qVi1N/ZvOsNkrZfSM5tA8g+4VZSzrkR\n5XJ17wfvYTIz9erhU07Rq+Zuvz3WFsUfnepZKKVOAJaLSK1S6jJgOvCQiGztaQO7ghmGMhiiY9u6\nBxISkcjg9+ujwCdOhCOP1F5AZs7s/JJS2/azf9/HVHz5ByrqPkAFISd5LkNOuofU9KN79oN1Mzt3\nasG4/npwNo73e7p1GEoptRKYCkwBngWeAr4tIqccop3dihELg6HrVFQ0CceaNfDvf+vjwWfN0qtm\nTz1VL4AB0FAqAAAgAElEQVTyeDq+ltg2Ne8+Svln97F7RiXenDyGjVvAwIHf6difVZywbZsWjNtu\ng2uvjbU1PU9nxQIR6TAAy5z4V8BVkWXxFPTHic6VV14pgwYNksmTJ4fL9u7dK3PnzpUJEybIN77x\nDamqqmrz9W2xcOHCLr8mVhhbe46+ZG9nbN27V+SNN0RuuUVk2jSRtDSRM84Que8+kS++EPH7O7hA\nMCjBxx+V8m+ky8qXj5BFn2RKUdG1Ul29tNtt7Qk2bhQZPlzkuec6/5q++h1wnpsdPl87uwjvgFLq\nF8BlwD+dcyg68Tsjfrjiiit49913m5X99re/5fTTT6e4uJg5c+Zw3333xcg6gyG+yMqC88/Xq4O+\n/hq2bIEf/lAP01xzjd4fcvbZ8F//BV9+qZcKN8OysK69npznipn892OYuSCNhG31rF59IV99NYOy\nsj8TCFTH4qN1inHj9BzGbbfp/TOGzg9DDQEuBb4UkUVKqZFAgYg839MGdoWOhqG2bt3Keeedx8qV\nKwHIz8/n448/ZvDgwezatYuCggKKilqeu2QwGFpSUaFPoFu4ULv93rEDTjxRD1kVFMC0adohZZh3\n3oHrr0eOn8neuy9gZ8NrVFV9RE7OPIYN+wFpaTPj8gz0FSv0stqnn9be3vsjnR2G6mzP4kci8oCI\nLAIQkW3ApEMxMB7Ys2cPg53TUIYMGcKePXtibJHB0DfIyYF58+CRR/TKqvXr4fvf1yuxLr9c119z\nDSxe7DjePPNMWL0aNXoc2bNv4ah/z2XmMWtISjqCtWsv4auvjqa09FH8/qpYf7RmTJ0Kb70FV14J\nH3wQa2tiTGfGqogyPwGs7MxrezPQzpzFgw8+KBMmTJCEhAR56KGHREQkKyurWZsBAwa0+fq26Kvj\nlPFOX7JVpG/Z2xu27tih5zfGjRM56iiRBx8UqahwKletEpk9W2TWLJGVK8W2g1JZ+b6sXv1t+eST\nDFm3br5UVX0mtm3HzX395BORgQN13BbxYmtn6PY5C6XUdUqpVUCeUmplRNgMrOphHes21qxZw9NP\nP81bb73FhAkT+Mc//kFJSQmDBw9m9+7dAOzatYtBgwbF2FKDoX+Qm6vH+9evhz/+Uc9rjBsHl1wC\nH+4+CvvjRdop02mnoW77BQMSZjFp0sscd9x6kpOPpKjo+3z55WTKy1+Li7mNk06C//1f3Zv68stY\nWxMb2p2zUEplAFnAfcBtEVUHJMrhQ7GmrTmLV199lXfffZfbb7+d8847j3nf/jYNLhd7KyrIyMri\n2p/8hCceeIDqqipuveceQoN3Sl8znA6XNb1fs7JQe9WyfURZZJ4WZZaTtpx6K/J1cTieazB0hX37\n9FnYTz6pXWxcdRXMP6ec3P+6BT77TO8ePOssQI94VFV9TFnZY1RVLWT48AXk5t6E250e08/w1lt6\neO2jj/SelP5At7v7cA4qGkzErm/RcxdxQ1tiUVRUxMyZM/H7/TQ2NiJAQn4+Qx98kN23305gzx5c\nQ4Yw6N57sdLSAGd4znl9ZBy6frOyiPYSWdeiLDJPG6+znXw4jvgcbYmJRZPYWBGxKxQ7Zc3iTtS7\nneACHYfyLWOnPlpddwavUiRYFl7LakorhdeySLAsPI7dhvhGRB/S99RT8Ne/6k2AVx/zNec8/108\nM6dpt+hDh4bb19UVs3Xrvezd+w65uTczfPjNuN0ZMbP/L3/Rx3x88gmMGRMzM7qN7t6UdyNwF7Cb\npueXiEjvO7hvh/ZWQ51wwgmUlpYyevRoJk6ciMvl4pFHHjnk9+wNfzCRghJNTEREx0DQKQ+KYIsQ\ndNoGRfjsk0+YedJJ2G3Uh8udOChCwAmhdBCa5zto42/RprNh95IlZBxzTDjvF6HRtmkUwWfbzdMR\ndW6lSHAEpC1xSXK5SLYskl0ukiwrnE62rDbrkiLahOIcjweP4wK2r/oFijW1tXpp6tNPw8YNwvfH\nfsJVRbcy/tfz4dprKfzkk7CtdXXrHdF4m9zcmxg+fEHMROORR7SmLVrUpGvxdF87oid9Q90C5IlI\n5cGbFzuqq6vZtWsXW7ZsAeD2229nxIgRsTWqC0QOW7kO4ZfzloQExicnd49RPUxhZSUFU6d26TUS\nISq+CAFptO2wqPhsm3rbpi4Y1HFkOhikNhikwu+nLhikzrapd+KW7WuDQfYGAmS43QzxevFu3MiR\ngwczxOuNGga43WYoMQopKTB/vg5FRYqnnz6FE4s/ZeJtq7j6/rsZ+OMRei0ukJw8gYkTn6eubgNb\nt/6axYuPIDf3RnJzF+DxZPaq3TfeqIfVzjhDLyHOyurVt48Jne1ZLATmikjLrTdxRVs9ixUrVjBn\nzhzq6uoQEZISEyl69FEWPPEE60tLQSn21dSQlZrKsief1F7VLKspjkxHq7Msvaj8UOLQNQ19BluE\nSr+fXY2N7GxsZFc7oSYYZHAUERnq9TImMZHxSUmMSkwM91QOZxob4a03bJ66u5Qla1K44aRV/ORv\nJ5CR3fy3bV3dRrZt+zUVFW+Rm3sDw4ffgsfTe09tEfjJT+CLL+D997Xw9UW6exjqaSAP+CfgC5WL\nyAOHYmR305ZYfP7558yePRu3200wGCR/7FguBv7ziCO0hzURflpcTKbLxR2jRoXLmsXRykT0gb6h\n+lD6YGIRLRxud+eDxxO93OvVISHh0NLJyU0hdIh1crKuM8LWJXy2ze4oIlLm81HS0MCG+np2+nyM\nSEzkiKQkxiclNYtHH6ZCUvL5bu65aAX/3HMMP/2RzY135dCyc1xfv4mtW39NRcWb5OZez/DhP+o1\n0RDRE/U7dujJ74SEXnnbbqW7xeLOaOUi8p8HYVuP0ZZY7N69m+OPP57CwkLOO+88gsEgAwYMYNGi\nReE2I0eOZOHChYxry/dzG3TbOGVIeAKB9oPf33F9Y2NT8PnC6cK1aykYMSJqXau0z6cPTwgdaB0K\ntbX6PVoKSFvCkpKi/T9nZbUdorg37Uvjv9A99vpsm8319Wysr2dDi7jU52NEQoIWkOTkZkIypotC\n0pfubWFhIQUnn8y6O17kV/dn8Fnyadx+bzJXX6NafW3q60vYuvU3VFS8Tm7udY5odMP5uh0QCMB3\nvgMVFYV8+GFBt5zn3dP02JxFSBSUUskiUncoRsaCwYMHM3LkSEpKSrBtm3379jFlyhT279+PUorP\nP/+cQYMGMXjwYGpra/UcQWhZrJNuK2/bNiJy6OPRSjX1DHqKwsLw+O8hEQhoIYkUkEhBiSyrqdEn\ny6xdqwd5W4aqKi0WLQXE54M33mjKDxigZxJzc3VI7RseTLtCgmWRn5JCfpTxjEbbZnNDgxaQujo2\n1NXxr8pKNtbXs8PnY3hCAlNTU5memsqMtDSmp6UxuLM+xuMdy2Liby7nlUtXs/RbV3HHPbfy3/81\nlbvudvG97zW5FUlKGkt+/lPU19/Otm2/YfHi8Qwb9kNGjPgxHk92j5nndus9GLNnay+1Tz3VPzve\nne1ZzAKeBlJFZKRSaipwrYhc39MGdoX2VkN9/fXXzJo1C5/Px6hRo9i7V28TEREaGhqwLAu32x25\nG7zl7vA28xHv32GwLKvN8lBdKN1W6KiNy+VqFdxud7v5lmVutxu3243H4+l03JW24djlwtvYiKem\nBk9NDe4DB3AfOICruhpVVdUkKpWV2otdaakOHk+TcAwb1pSOzA8Z0rPiGyc02jYl9fWsqK1l6YED\nLDtwgGU1NSRZFtPT0poEJDWV3ISEvj3R3tAAt9/OJ89v4ZeDn2avncm998K3vtX6AV1fv4Vt2+6j\nvPxVhg27lhEjftqjPY2aGpg7V4vGf/933xGM7h6GWgxcBLwpIkc7ZatF5KhDtrQb6YwjwXPOOYdB\ngwbxyCOPcOSRRxIMBsnNzWXZsmUMGzbsoN87JByhnkZ7IVqbUFlk3F5oq00wGMS2bQKBAMFgsFlo\nWdZem0AgEA5+v7/DuDNtuhIHAoGwaIXEJSEhQQevl4EeDyMsi2EiDLFthgSDDAwEGOjzke3zkVVf\nT1pDA7VJSRxIS6MmM5O6zEzqs7PxjRpF8IgjsPLySBs4kPT09HBITk7u2w9TBxFha0MDy2pqWHbg\nAEtralh64ABAs97H9NRURicm9r3P/MEHyPwreGfmf3D7lquxXBa/+Y1+WLf8KA0NW505jdcZOfIX\n5ObegGX1TK9r7159FsZ3v9t3TtvrdrEQkeOUUl9HiMUKEena2sYepiOx2LJlC+eddx5nz5vHikCA\n2VdfzcaPP+bfjz/Olf/3f612Vofilju1I8s3fv4542fNar3jmug7t6026iI32rXcXBfaeNeyLtpG\nvMi45Sa7ZYsWcdzJJ3dqY16zTXlOHHrP3mDhwoWcdNJJzUTE5/OFQ0NDQ7N8tLrGujrcFRUkVFaS\nWFlJ8r59pO7bR1ZlJYP37WNwbS173G42ut0UAav8ftYGg5SmpdGYkUFGZmZYRDIyMpqJSkZGBgMG\nDCA7O5vs7Gw2bNjA2WefTUZGRtw+eEWEssZGnnvnHfxTp2oROXCAWttmemoq09PSmJGaysmZmQyL\nk5nadudXKivh2muxizfw2vy3+I8nRzJkiD5D+4QTWjevrV3Lpk23UldXxNixv2XgwHnd+rcK2bpz\np3YP8qMfwQ03dNvlu5We3GexXSk1GxCllAdYAKw7WEN7m4qKCq677jo+/fRTKisrKb7vPrLmzWNv\nZSWbX3uN1DlzeKuyksi7pUD/RBFpSresB6qrqlhVXh4uk4jXtrz7oby0UdayfTTZ68wucmjaOWlH\nbLI7sGULSdnZzTfgRdmYZ0dsrovcfGdDq93cbe3ajtzV7XF2V3ujpL1K6bxTHkqX7dzJJzt2hNt6\nLYtEyyIxMZGk5GSSnHySy0WWZZHkhMRQ2uXCGzG3FBW/n+GbNzO8qIiCoiIoKsJetw6KipD9+2nI\nzORAWhr7Bg1id1YWZenp7PB6qaqpYefOnezdu5fKykoqKyspLS3lhz/8IfX19WRlZYVFJFqIFJns\n7GwGDhyIpzPH0B0iSilyExKYnZFBwejR4fLdjY187Qxd/bW8nBs2bGCI18vpWVmcnpXFKZmZpMfj\ncF52NrzyCtazz3LxrTP41i/+gxcybuTSSy0mT4Z779Wu0kOkpBzJlCn/YN++D9m06afs2PEA48bd\nT0bGrG41a+hQvZT25JP12o7vfa9bLx8zOtuzyAEeAk5HP9PeAxbE2ya9tnoWq1atYubMmfh8PpRS\n3HbXXQy+8kp2rFnDK7/8JX6fD5fbzTfvvZfcKVOi7phumQ7lI3dQ2xFxsEW+rbjljumWD+hARHk4\nHWXHdOQu6dDGNL+TjnwIeyMezN5oZRH50AM4tIM5USkSXa7wDunEUFul8LpczUTADXiUwm1ZKKBR\nBL+zMa6lfaF0OHbaRqZ9ERvq6oNBGkJp225KtygPiITFI1JE0l0u0t1u0l0uMpy4Vb62lozt20kv\nKSF9/XrS164lZdUqVGkpjB2rD62eNg1mzNBhyBAAGhsbm4lIZWVlq3y0kJ6eHl5k0VGckpLSo72X\noAjLa2r4YN8+Pti3jy+qq5mSksLpWVnMzcriuPT0+FvGu2kTXHYZpKfj+/OzPPHWUH7zG/3Avvtu\nyMtr3lzEZvfuF9i8+Q7S02czdux9JCWN7VaT1qyB007TvrDOO69bL92tdLtvqL5Ae8NQgwYNotzp\nAYTajBkzhn379oUnuLOzs9m+fXuv2dsbRO5qbmyRjnxIN7Z4WEc+mOs6SId2N0dL19k2FpDudpPm\nPKjTIh7QaREP77R2ykIP8q74fgqKaPGIEJE62+ZAIEB1MEi1E++Plo8o2+/EPtvW9oiQ6fMxZP9+\nhpWWkrt+PcNqasgdMIBhI0aQm5fH4KOPxtXJOTDbttm7dy+7d+9mz5494TgyHRmLSFQRGT16NGPH\njmXs2LGMGDECdzf1BuqDQT7dvz8sHhvq6zkpIyPc85jUw+LVaQIBPQb12GPw2GPUfuNb/PGPcP/9\n+tS/a66BmTObH8oUDNaxY8cf2L79DwwZ8n1GjbqjW/doLFkC55yjXZrE62rlbhELpdStIvJ7pdQf\niTIqIiI3H5qZ3Ut7YjFv3jwWLVpEeXk5l156Kc888wxTpkzB6/WybNkyLrzwQtavX9/lk/L63Jr1\nXrZVHOGpDgY54Dx0D0Q8mJulIx7QW7/4Avf06eH2VYEA9bZNjsfDQI+HQR4PA71eHXs8DHImvUPx\nQI+HzG52sRFwPkd1IEBVIMCuxkZKGxspbWjgq/feQ8aMobShgTLLojIxkUHV1eT6fAzzeMhNTyd3\n6FCG5eSQm5DAsIQEcr1eMg7Cxtra2lYiEnJnU1JSQklJCbt27WL48OGMGzcuLCChUFZWxrmHcOxb\npd/Pwn37eN8Rjzrb5rTMzLB4DE9MPOhrt+SgvrOff657GaeeCg8+SFUglQcfhNde0wvq5s7VZzGd\ncUa4Q4jPt4stW+6iouJvjBz5S3Jzr+/yJHhbtn70kZ7w/te/4JhjuvZReoqemLMIzUt8dWimtY1S\n6kzgQfQc7tMi8rsobR4GzgJqgfkisrwr71FWVsabb75JwDkoOBAI8NJLL7F//3527dqFx+PB5XJx\n3HHHHfLnMTRHOUNXiS4XXTktpLCigoLp05uV+WybCr+f8sZG9vj9lPv97GlspNzvZ0l1NeUtykLi\nEiksg7xeRicmMjYxkbHOhrbkZud/to3bshhgWQyIMr9QmJfX7EHhDwbZuXkzZatXU7plC6V79lBW\nW8u6nBxKR42iLDub0uRkbMviiORk8pwwISkpnM5oo2eQkpISfvC3hc/nY9u2bWzatCksIF988QUl\nJSUUFxeTnJzcSkRCwjJixAhc7dyTbI+HiwYN4iLn/JeS+no+3LePt/fu5aebNjHQ6+W0zExmpqcz\nJSWFicnJJHbyHncLs2bB8uWwYAEcfTSZL77IXXcdy1136Z3W77yjd1vfcov2GnvWWXDmmUM4/vjH\nGT78JjZtupXS0ke6bRJ8zpymoaiPPoKJE7vnY/Y2MR2GUkpZwHrgNKAM+BL4rogURbQ5C7hRRM5R\nSh0HPCQix7dxvag9i7KyMiZNmsS5557LX/7yF84991wWLFjAeeedx+jRoykrKyMQCDB06FA2btzY\nEx/VEAMaHKeAIWEpd1xsbGlooKShgc0NDWxpaCDT7Q6LR8t4qNfbfW7PRWDrVu2f2wn7i4rYkJPD\n+hkzKM7Pp3jECIqzsljvdpPm8WgBSU4mL0JEurpju7UZQkVFRVhESkpKwqKyadMmKisrOeKII8jL\nyyM/P5/8/Hzy8vLIy8sjzXHh3xa2CCtqavhw3z6+rqlhZW0tG+vrGZ2YyJSUFCanpDAlNZXJKSmM\nSkzseZfyr70G11+vPf9deSUMHhzee+P3a79O77yjQ0mJnmM480w47rhFNDTchMuV4kyCR33kdInn\nn4c77tCuzSPWF8Sc7l46+z5wsYhUOfks4CUROeMQjTweuFNEznLyt6Fdn/8uos3jwEIRednJrwMK\nRGR3lOu1OQz18MMP85Of/IRAIMBll13GCy+8gGVZYdfl2dnZLF26FNu2o77e0D+xRdjZ2EhJfT0l\nDQ2t4qpAgFEJCa2EZHRiIlkeD6kuF2kuF95DmfCtrITiYigq0nFxMVJURGlNDcVTp7J+yhSKjziC\n4sGDKU5JocyyGJWYGBaPPMcFSLbbTZozz5Pmch20oNTW1oaHZIuLiykqKqKoqIgNGzaQlZXVSkTy\n8/MZPnw4Vhvv12jbFNXVsaq2lpU1NayqrWVVbS37AwGOaiEgk1NSyOrulWGlpXoN6+LFUFEBAwc2\n38jppHcljeG9kiN4Z9kg3iv0MHQonHjiGo488m5OPNFDfv69JCUd2gEWDz+sTw5ctKhpCCzWdLdY\nLBeRaS3KwnsuDhal1DzgDBH5gZO/DDg2ci5EKfUWcJ+IfObkPwBuFZFlUa4XVSyqqqqYN28e+fn5\n/OlPf+Kb3/wmF198MVdccQUej4f6+nqOPfZYli1bRllZGdnZnXcNcLDzACKCLTa22Ag6HbSDBCXY\nZjpoO3kn3VZbQW/0C9pBxBYkICDw9edfM3XGVMQW7KCt64ISjrEJl+vlXKBEhdMIEBCUP4jlC6Aa\nbazGgJMPYvmDqMagLvcHUH4b5T848V1WuoHpueMP6rXaWBsIgtggfgLBRvx2A/5gAMGOWH5sg4RW\nuIXSoTq9tNgn4AsKfr9NwC/Yfhs7KNqfpPOOG/btZHzWkKZ9NEjzPTYR+dBC6YDPS6MvgTZPNxbB\nFRRcwSCuoI0rEMRl22Ar9qelsW9AJvuyMqnKyKA6PR2f10PA7cHvcRNwe1AiuP1+PIEA7oAT+/3s\n27qS3EHjcQcCeAJ+3H4d63YBrHZ+MIkI2BGeDESwbWd9oLKwLGe5cpSl462uhSJoWYhSiGVhWy5s\nLBQ2lm2jRNhRsYmRA0Z39Ac/eET/8Zv/bTS2s8NJxEJQKBXEm1RHUloVKP0ipQSl9OffvKuMscOG\n6LxThxIUThzOQ1ACHLAH8esnHu65z9YOPbnPIqiUGinOyXhKqVFE3wYQc+bPn89op4+XmZnJtGnT\nqKioYMmSJXz00UcAvPHGGyz+xxeMUMMpqd2MV3lYtXgVttj8d/5vmZiQhxJY5ysGgSO9+QCs8xUB\niiO9eShRrG0sYqt/GzuTS0AU6xp1/SR3Pgis9ReBwFGeI1ECa/x6dO0olz6PcW1gHYhisnsSSmB1\ncB1KFFNcR2GJxcrgaiyxmGJNRqFYGVyNwmKqmowSixX2SsBimpqCEovlsgIlFtPUNJS4WM5yBJtp\nagogbJQiUH6OVkfhUjbLZSVgc7SahEL4WlahsJmujgRslstqlAgzyENhs0zWYiHMYDyihKUUg7KZ\nbo1HLGGpFANwtGccKJul9kZEwXSPHlv/OrAJEKa7x0XkiZIfixVoZDlrwnkBlgVKEBGOduvrLQuU\nADDNNRZBdF5gmnssNvC1Uz/VNRZFEsuDO1FOe4AVwRJAhfPLg03XUy3yTfWKaa4jWrWXoEcfOhWl\n/VRrHIJieWATIoqpriNAFCvsjYDNNNcYp/0m53rjwnl9vSParE9nDxe4xgF7W9V/LSXYysWkhAnY\nlosVwY364WxZ5CdZrGrcgm1ZTEg5iqDLxbqGdfiUxfhUvdd2fe1qACakHNVredUiH2xQjKmdEjN7\ndH4SAFtS3mHAyS9y1JhsxLZYubYBRDElPxkRi63v7sPKTmRKXioiipVFdfr//4R0EMWq4lqdH58B\nonjln+XNHtqFhYUAPZ4HePbZZ3n22WfpCp3tWZwJPAF8jBbgk4AfiMi7XXq31tc9HrhLRM508p0Z\nhioCTunKMNSSJUs48cQTCQQC2ukfMMNKosIOsIcA9QgeFF4Um7MmE9ZBJYR/9DX7mRixHU4pxHJ+\nSVggzq8HlHK2Zoeu4cSWfr04vz6aforaiEu3F0v061yCuAQsGxSIyw7nxdJ5ve1al4tl69d7gthu\nAY+N7QJxC7ZbX0vcgu3E4opIuwVxg+1S2C4IuhWNbheNbosGl0WjW+FzWTS4FI1YBGxFoyj8NjTa\nCr+T9os0ldlCow0+W2gISlPspH1B52CioNAYdPZh2KE9FvrXrUdZuLHw4MIrbhKCbhKCHhL8XryN\nCST6EklqSCW1IZ20ugzSarPIqBlAZs0AMmszyarJIv1AOomNiQQ8AQKJAfypfoJpQewMG9JBZSpc\nmS48Azx4B3hJzE4kKTuJlEEppOakkj4onZSMFFxW9EnaujrYsEGPIK1f3zx2ufQa/wkTmsfjxkFS\nUmf+lxjigb17P2DdukvJy3uanJyubZpo2NpA2eNlLHtyGfcF7yNpeBIvvvkio8aM6iFru0ZPnMGd\nA4Rmeb4QkYpDsC90TRdQjJ7g3gksAS4RkXURbc4GbnAmuI8HHuzqBDfA9OnTWbFiBbZtc+aFcymb\n/RmBWsGTplj36zoaK4X0yS5GfT9RDxiIOAMHAEKopx0mNHThxHo8Qr/G6Z3rfDjtdHJFCP1dBAFR\nRJrc9DdzlEmIsEOF31fpzm3IFH0N1WS3OIMponDSoaNXbUQJdqhMCXaoTNnY2NiWHo5xixuX7cIt\nbty2E0LpoBuP7cFtu/EGvbhtN56gJxy8Qa9OBzwkNSaR0JhAQmMCSb4kEhsSwyEhkIAXLwlKx4lW\nIglWAgmuBDweDypBYSVYqASFK8WFO8WNJ82DJ9WDN81LQnoCnlQPVoqFK9WFKyUipLp0eYoLV7IL\n5Tr4ydTqati8WU+ClpToPWDr1+tQXq4f/tFEoQsjmoY4Zc+eV9iw4UYmTXqVzMyTOvUasYV9H+yj\n9NFSqhZVsfjYxfxuye/4+S9/zo9//OM253diQbcMQyml8kWkSCkVWsNY5sQjnWGpVvMGXUFEgkqf\n7/0eTUtn1ymlrtXV8oSI/EspdbZSaiN66ewVB/NeZWVl4fR7r39IzrIcfLU+qiuqmzzH7kxh4JJj\nUKIIjbhaYqFoyofSyrm3e8v3MnDgQCyxsJxuiOX8C7UNp8VJKxVur/076de5Iv5ZSteH0i1jN3p9\nvgsXLqVD6DUe5cGt3Lgtt46d9Lpd6zh6+NF4LI+uCwWXjj0uTzjtcrlQHoVy62B5rHBauVWzumbp\nKHkrwQqH0MM/nG/jAd7be0ICAb2sMiQGLUN9vd64HQoTJ8IFF2hRGDkSFi0y+216gljbWlr6OFu3\n3sPUqe+Rmtq+K7zCwkJOmHYCu5/bTemfSrGSLJLnJ/NH9x9ZW7yW9z96n2nTprV7jd7iYO5rR3MW\nPwZ+ANwfpU6AOV16t2gXEXkHfQpfZNmfW+RvPNT32bVrF1u3buW8885j5cqV4fKLL76YX/3qV5x/\n/vksXbqUAQO65sI41l/mrtCXbO1ORPRQUUUF7NkDW7ZoAYjsKWzfrldVRgrC+ec3pQcObOUezNCP\nEbpRFSgAACAASURBVBG2br2XXbue4+ijF3XoCqRmZQ3b7t+G+99uBpw1gPz/yefL+i+58IoLmTdv\nHs/95TmS+vi4Y0c7uC8WkVeUUmNFpKQX7TooOuOiPFIs3nzzTQoLC3nggQcYM2bMQYmFoXeJfPBX\nVOhVp9HSLfNK6Qd+To5e4x4pCmPHwqhRffNITEP3I2KzceMtVFV9wpQp75CQEH2Nq91oU/63csoe\nLaN+cz3Drh3G0GuGIpnCL3/5S/7617/yzDPPMHfu3F7+BF2ju1ZD/QJ4BXgVmN5B2z5FfX09v/nN\nb3j//ffDZf3JT1a80dgIBw7oUFPTPO5MWXW1fvhXOq4rc3KaQnZ2U3rixOb5UH3Lc5sNhmjYdiNF\nRfPx+XYwbVohHk9mqza+Uh9lT5Sx88mdJOcnM/yW4WRfkI3ltli5ciXfm/s98vPzWbFiRZeW4cc7\nHYnFXqXUe8BYpdSbLStF5PyeMavn2bRpE1u2bGHq1KmICDt27GDGjBksWbKEQYM675gi3od2gsGm\nY7kXLizk2GMLwvm24lC6vr4pNDR0nG6r7sAB3SNIS9MhNTV6HErn5EBpaSFnnFHQrC4kAvH44I/3\n70EkxtboBIO1rF49D8tKYMqUd3G5mg8bBRuCbLx5I+WvljPo0kFM/WAqKUfqI3Bt2+b666/nlVde\n4f777+fyyy+PD+eKbdATcxZno3sULxB93qJPEdpItGcP/POfR3HLLbv0xiob/vu/x3Dppct47LGs\n8AqmUF20dCi/bRu8/rpOH2oIBvVEazDYFFrmO9Mm8sEvok8g9Xr1UExysk6HytqLk5KaQmJiUzor\nK3p5y3wonZbW9P6dpbuOCzcYOoPfv5dVq84hOTmfCROexLKaPxp9u3ys/uZqksYkcfyW43GnN9Vv\n376d+fPns2fPHpYsWcKYMYe2yzte6WjO4gURuTzkfbYX7Too2puzuPTSSyksLKSyspKcnMFMmfKf\nTJlyBZalH2KPPTaWq676iuTkATgbUMN1LdMt8y6Xzh9KUEq7rHG5mkLLfGfLvN6mB77LZSZmDYb2\naGjYwcqVZ5CdfTZjx/6+VY+gZkUNq85fxdCrhjLqP0Y1q3/55Ze56aabuOWWW/j5z3/ergPGeKW7\nXJSvRR949DZQADS7oIjsPTQzu5eOJrgNBoMhkrq6YlasOIPc3BsYOfJnreor3qig+Jpixj8ynkHf\nbhqe3r9/PzfeeCNffvklf/nLXzgmXnyPHwSdFYuOdoY8DnwI5ANLW4Qec1vel4jcQh/vGFt7jr5k\nr7FVU139FcuXFzB69J2thEJE2Pa7bay/YT2T/zm5mVB8/PHHTJ06lfT0dJYtWxYWiv5+X9udsxCR\nh4GHlVKPich1B2mXwWAwxBXafccljvuO5ut0bJ9N8bXF1K6qZcbiGSTk6jXVtm1z11138dRTT/Hk\nk09yzjnnxML0mNEVdx8nAuNF5BnH9UeaiGzuUeu6iBmGMhgMHbFnz6ts2HC9477j5GZ1jeWNrP7W\narxDvEx8biKuFD0H0djYyJVXXklJSQmvv/56l1ZMxjvdNQwVutidwM/R+y4AvMBfDt48g8Fg6H3K\nyv7Mxo0LmDLlvVZCUbO6hmXHLSOzIJNJf50UFor9+/dz1llnUVtby4cfftivhKIrdNab1beA89G+\nmRCRMqD9I7MOE/r7OGWs6Eu2Qt+y93C0VUTYsuVetm37PUcf/Qlpac19NFX+q5IVc1Yw5p4xjL13\nLMrSP7RLS0s5+eSTmThxIq+++mq7Ljv6+33trFg0OuM72n+qUildfieDwWCIASJBNm68hfLyVzj6\n6H+TlDQuou7/t3fn4VGV5//H389MErIvbAFkB0EQLItsiohb7aIgVakGq7ijv6+tiLRavbSu1Vpx\nb/1+rRVRI3VpAS0uoAFBQCCCLLLvGCBk3yaZ7f79MZOQhKySyZmZ3K/rOtdZ5szkM0M4d87znHmO\ncOi5Q+y4ZQdDFg4hdVpq1WNbt27lnHPOIS0tjZdeeikkL4ttSU29n8W9wOnAJcCfgZuAdBF5KbDx\nmkf7LJRS1Xk8DrZtuw6XK5chQxbUGL7D6/Sy6392UfRNEUMXDSW6V3TVY8uXL2fq1KnMmTOHadOm\nWRG91QTifhaXAD/F912Lz0RkSSNPaXVaLJRSlZzOHLZsmUx0dC/OOOMNbLYTI0W6cl1svWor9kQ7\ng94ZRET8iQtDK79o9+6773LRRRdZEb1VtWgHt98mfHfKWwZ89yNzhZ1wb6e0SihlhdDK2xayOhx7\n2bDhXJKSzmPQoLdrFIrS7aVkjskkYVQCQ/49pKpQiAjPPvss9957L0uXLm12oQj3z7VJ9+A2xkwF\nnsFXKAzwkjFmtoh80OyfqJRSAVRUtI4tWybTq9eDnHbanTUey/s8j23XbaPv033pemPXqu0ej4dZ\ns2axdOlSVq1aRY8ePVo7dtBrap/Fd8AlIpLtX+8ELBWRhm8d1cq0GUqpti0n52N27Lixzi/b/fDK\nDxx4/ACD3xtM8nkn+i7Ky8u57rrryMnJYcGCBSQnnzwseThrqftZVLJVFgq/XJrXhKWUUgHluwXq\nIwwd+jGJiWOqtnscHnbP3E3hikKGfz2cmL4nLn/Ny8tj8uTJnHbaaXz22We00ztg1aupB/xPjTGf\nGWOmG2OmA/8FFgcuVugI93ZKq4RSVgitvOGWVUTYu/ePHD78LMOGrahRKEq3lvLt6G9xF7gZsWpE\njUJx4MABxo8fz5gxY0hPTz/lQhFun2ttDZ5ZGGP6A6kiMtsY8ytgvP+h1cA7zf5pSinVgnx3truJ\n8vI9DB++iqioToCvgGS9msX+h/bT9+m+dLmxS42hxTdu3Mhll13G7Nmz+d3vfmdV/JDS2BDlHwP3\ni8jmWtuHAk+KyOUBztcs2mehVNvhdheyZcuvsNsTGDw4HbvddwtFV66LHbfsoPxAOYPfHUzswJq3\nVlyyZAnTpk3jb3/7G1dddZUV0YNKS106m1q7UAD4t/X+kdmUUuqUlJcfYsOG8cTFDWbIkA+rCkX+\nsnzWD1tPdN9oRqwecVKhmDdvHtdddx0ffvihFopmaqxYNHRZQP2DpLQh4d5OaZVQygqhlTfUs5aU\nbGLDhnNITb2B/v1fxBg7XpeXvQ/sZVvaNga8NoD+z/bH1u7E4U1EePLJJ3nooYdYtmwZ5513Xqtk\nDVaB+J7FemPMrSLyWvWNxphb8N0ASSmlWo3vPhRpnH76S3Tu/GsAHPscbEvbRkRyBGdvOJuo1Kga\nz6moqOCuu+5i7dq1rFq1im7dulkRPeQ11meRCvwHcHKiOJyNb4jyKSJyNOAJm0H7LJQKX0ePzmPP\nntmceeb7VcOLH3v3GLt/u5uef+xJ9991rxotttKBAwe4+uqr6dGjB2+88QaJiYlWRA9qLTo2lDHm\nAmCIf3WriHx5ivkCQouFUuFHRDhw4AmOHPkHZ521mLi4wbiL3ey6axdFq4sY/O5gEkacfMeETz/9\nlOnTpzN79mzuueeeGldDqRNadGwoEckQkZf8U1AWCquEezulVUIpK4RW3lDK+uWXX7Bz5+3k5Pyb\nESNWExc3mOLMYjJHZGLshpGZI08qFB6Ph4cffphbbrmF999/n1mzZrVKoQilzzVgY0MppVRrc7ny\n2bfvfjp0aM+wYcux2+I5+NeDHPrLIU5/+XQ6Tz35jnU5OTlMmzaNiooK1q9fT5cuXSxIHp6aPER5\ni/9gY1KAfwG9gP3AVBEprGO//UAh4AVcIjK6gdfUZiilwkBx8Ua2br2Sjh0n0bfvX3Ble9l+w3Y8\nxR4GpQ8ipvfJF2N+8803TJ06lWuvvZbHH3+ciAj9W7gpAjFEeUu7D99ghAOBLzlxf+/avMBEERne\nUKFQSoWHo0fnsWnTJfTp8wT9+z9H/qdFZA7PJHFMIsO+GnZSoRARXnnlFS6//HJefPFFnnrqKS0U\nAWBlsZgMvOlffhO4op79DEE8aGG4t1NaJZSyQmjlDdasXq+TnTv/HwcOPM5PfpJB+6greWvKW+yc\nsZPB/xpMn0f7YIuoeSgoKSlh2rRpvPbaa6xevZrJkydblD54P9e6BPIe3IHQWUSOAfgvwT25AdJH\ngCXGmHXGmFtbLZ1SqtWUlx9m48bzqaj4geFnfUPBG8msHbgWcQtnbzyb5Aknfz94+/btjBkzhujo\naFavXk2/fv3qeGXVUgJ6rmaMWQKkVt+E7+D/YB2719fZcK6IHPHfQ2OJMWabiKys72dOnz6d3r17\nA5CcnMywYcOYOHEicKKatvR6pUC9fkutV24LljwNrU+cODGo8oRb3mBa/8lPYNu2NA4e/AXR+35J\n2Zs7ie4ZTdFTRfTs25PI9pEnPf+9997j1ltv5bbbbuOZZ54JivdTuc3qz7Ox9crluXPn0hxWdnBv\nw9cXccwY0wXIEJFBjTznYaBYRObU87h2cCsVIkSEQ4ee5dChv9I7+v/Iua8XFYcr6PdsP9r/vH2d\nl7s6nU5+//vfs2jRIj744ANGjBhhQfLwEgod3IuA6f7lG4CFtXcwxsQaY+L9y3HAT4EtrRWwKapX\n62CnWQMnlPIGQ1a3u5jvv/81x7Lmk/x+Ovt/0YEOkztw9qaz6fCLDlWFonrWw4cPM3HiRPbs2UNm\nZmbQFYpg+Fyb6sdktbJYPA1cYozZAVwEPAVgjOnqHxodfE1YK40xG4A1wEci8rklaZVSLaK0dDuZ\nmWNwbLFTPuVp2tl7Mnr7aLr/T3dskXUfkr744gtGjRrFZZddxsKFC0lJSWnl1MqyZqhA0GYopYJb\ndvaH7NhyO2burSSXTKPv032JPT223v29Xi9//vOfefnll3n77be56KKLWjFt29DS9+BWSqkfzet1\ns2PVvWQf/xfRc59lwMxJpExs+Ozg0KFD3HbbbRQVFbF+/XpOO+20Vkqr6hK0318IFeHeTmmVUMoK\noZW3tbMW7zvI6nfHk73ha/oWL2H0v69vsFB4vV5eeeUVhg8fTteuXcnIyAiJQhHuvwN6ZqGUCgh3\niZtdf1vEsT53kOi6kqHTnyMyoV2Dz9m2bRu33HILAF999RXZ2dlERUU1+BzVOrTPQinVojylHo6+\nfZS9K1/Am/Y6p/f5O93OmNrgc5xOJ0899RQvvvgijzzyCHfccQc2mzZ8tAbts1BKtarib4s58toR\njv13N/Y//J3IW/Zw1qjVxMYOaPB533zzDbfccgu9evViw4YN9OjRo5USq+bQ0n2Kwr2d0iqhlBVC\nK29LZnUXucn63yzWn72ezVM24/zJYmzv3Eynn/bm7HPXNlgoSkpKuPvuu7niiit44IEH+Oijj04q\nFG31cw007bNQSgWciFC8rpis/8si58Mcki9MptsTcDz1Tzhc2QwduJDExIYHiP7ss8+YMWMGEyZM\nYMuWLXTo0KGV0qsfS/sslFJN4ipwkf1ONln/l4WnxEPXW7vS+Yb2ZDtf4tChOfTseR/du9+NzVb/\n36A5OTncc889rFixgldffZVLL720Fd+Bqov2WSilTpmIULSqiKzXsshZkEP7S9vT79l+pFyYQlHx\nKjbv+CXR0b0YOXI9MTG9G3yd+fPnM3PmTK699lo2b95MfHx8670Rdcq0z+IUhXs7pVVCKSuEVt6m\nZHXlujj0/CHWDVnH9pu2EzckjjG7xnDmv84k4XzDzl0z2Lp1Kr17P8zQoR83WCgOHjzI5ZdfzpNP\nPsnChQt57rnnmlwowu1zDRahNjaUUiqIeCu85C3N4/tp37Om3xqK1xcz4G8DGL19ND3v7Ulkx0iO\nHZvPunVnYoyd0aO/p3Pnq+scHRZ8X657+eWXGTFiBGPHjiUzM5MxY8a08rtSLUX7LJRqo8QrlHxX\nQv7SfPKX5lO0qojYQbGkTksl9TepVfeRAHA49rJz5504nVkMGPC/JCWNa/C1v//+e2699VaMMbz2\n2msMGtTg3QeUhZraZ6HFQqk2xLHX4SsOX+RT8GUBER0iSLkohZSLU0iemExkSmSN/b1eF4cPz+Hg\nwWfo2XM23bvfg80WWc+rw44dO3jiiSf45JNPeOSRR5gxY4Z+uS7IhcL9LMJCuLdTWiWUskLw5nXm\nOMl+L5sdt+1gTd81bDh3A5+//zntf96ekd+OZMz2MQx4ZQCdpnQ6qVAUFq4mM3Mk+fkZjBy5lp49\n/1Bvodi2bRvTpk3jvPPOY+DAgezevZs777zzlAtFsH6udQn3rHo1lFJhxFPqoXBlYVXTkmOvg+QJ\nyaRcnEL333UndnAszuVOuk7sWu9ruFwF7Nv3R3JyFtCv3xw6d/51vf0SW7Zs4fHHHycjI4OZM2fy\n6quvkpCQEKi3pyykzVBKhSjxCo69Dkq3lFL6XSn5GfmUZJYQPzyelIt9TUsJoxLqvaHQSa8nHrKz\n32PPnnvp0OEy+vZ9isjIukeH3bRpE4899hgrVqxg1qxZ3HHHHXopbIjSPgulwoSI4Dzm9BWFzf5p\nSyml35cS2SGSuKFxxA+NJ2lCEknnJRER37wGA4+nnGPH5nHo0F+JiGhPv35/JTl5fJ37btiwgcce\ne4zVq1cze/Zsbr/9duLi4lribSqLaJ9FHbxeLyNGjGDSpEkt9prh3k5plVDKCi2X113spnBNIVmv\nZbHrt7vYeOFGVnVexboz13Hg8QOU7y0ncUwi/Z/vzzk/nMO4A+M46+Oz6PvnvnT4eYcmFYrKrC5X\nPgcOPMk33/QhJ2cRAwf+gxEjVtdZKDIzM5k8eTKXXXYZ559/Pnv27OGee+4JeKEIpd+DcM/apvos\nXnjhBQYPHkxRUZHVUVQbJSK48904jzpxHnNS8UMFZdvKqs4YnNlOYgfFEjfEd7bQ4bIOxA2JI6pr\nVL39Bs3ldGaze/csjh59gw4dJnHWWUuIjx9S575r167l0UcfZePGjdx3333Mnz+fmJiYFsmhQkub\naYY6fPgwN954Iw888ABz5sxh0aJFrZxOhTNPqcdXAKpPx5x1brPF2IjqElU1xQ2KI25IHHFD44jp\nF4Oxt0xRqK2kZAuHDj1Dbu5HdOlyI9273010dN3Dga9evZpHH32UrVu3ct9993HTTTcRHR0dkFzK\nWjo2VC0zZ87kmWeeobCw0OooKsh5HB7ceW5cuS5ceS7fcu15rgtXtquqCIhbahSAyilhZEKN9cjU\nSOzR9lZ7LyJCYeEKDh78C8XF6+ne/bf07/98nR3XXq+Xr776iieffJKdO3dy//33s2DBAtq1a/ju\ndqptaBPF4r///S+pqakMGzaMZcuW0ZJnU8uWLWPixIkt9nqB1Fayel1ePMUe3IVuPEUe3EUn5u5C\nN+58d90FwD8XESI7RBLZPpKI9hFEpERULUe2jyS6dzSR7SOJSj1RBFZ8u4KxF4xt2Q/hFIh4yMlZ\nyMGDf8HtzqNHj3s588wPsNuja3y2IsLmzZtJT09n/vz5xMXFMXPmTK6//vqguJ1pW/mdbW0/Jmub\nKBZff/01ixYtYvHixTgcDoqLi7n++uuZN2+e1dHaLPEK3nKvb3J48Tg8Vcteh5eidUXkFOac2F7m\nLwBFtQpAofukbV6nl4jECCKSIrAn2olIrDmPbB9JZOdIYs+IrSoA1ef2mOb/5d9S/Qmnyndl01v+\nK5uS6dnzD3TsOBljar6nvXv38u677/Luu+9SXFzMtddey6JFixg6dGjQvBcVXNpMn0Wl5cuX8+yz\nzwZln4V4BfEKePzLHgEvNebirWdbteeI27/uPrFee46Hhh9zC16nF3EJ4jqxXLXNKXhdtZadde/r\ndZxcFMQp2NrZsMX4p2jf3B5jr1quvf2kA3/SyYUgIjECW4ytzR3wXK4CsrJe5YcfXiQ+fjg9e/6e\npKQJNT6HY8eO8d5775Gens6ePXu4+uqrSUtLY9y4cTokRxumfRYN8JR4WDtkrW9FTkwicmKdOrZV\nrtd+nldqzPH69/PW8Xhd2/zPAcAOxmZ8nZw23/JJ26rN63w8wrdcfY697u0NPh5lsEXaMFEGE2mw\nx9uJiIzARBpsUbYa89r71nhejL3Ggd8WY8PWru0d0Fuax1NKbu5ijh9/j7y8z+nYcTJnnfUZ8fFD\nq/YpLCzkP//5D+np6axdu5ZJkybx0EMPcfHFFxMZWf8YT0rV1ubOLMDXgenY7QBT9Tzfsn+qWqeO\nbZWT/3lfrfmKCedO8B20DVVzbP7n2Dixva5ttecBFO5tqlZqrbweT5m/QLxPXt6nJCaOoVOnqXTs\neAVRUR0BcDgcLF68mPT0dJYuXcqFF15IWloav/zlL4mNjQ2pz1azBkb1rHpm0QB7jJ34oS0zNEHU\nviiiu+slhSpwPB4HeXmfkJ39Hnl5n5CYOJpOnaZy+umvVBUIt9vN559/Tnp6OgsXLmTkyJGkpaXx\n+uuvk5ycbPE7UOGgTZ5ZKBXsfAXiU44ff5/c3MUkJJxN585T6dhxClFRnRARtm/fTkZGBl9++SXL\nli2jX79+pKWlMXXqVLp2rX+gQKWqC/qxoYwxVwF/AgYBo0Tk23r2+xnwPL7Gm9dF5OkGXlOLhQpZ\nHk85+fmfkZ39Hrm5/yUhYQSdOk2lU6dfERnZid27d5ORkUFGRgbLli0jOjqaCy64oGrq3r271W9B\nhaBQGBtqMzAFWF7fDsYYG/AycClwJnCtMeaM1onXNOE+HoxVQikr/Pi8Xm8FOTmL2LbtN6xe3ZXD\nh58nKWk8Y8bsIDn5n3z+eTtuvnkWPXr04IILLmDFihVccsklrFq1in379vHPf/6T3/zmN80qFKH0\n2WrWwAipsaFEZAeAafiSmNHALhE54N93PjAZ2B74hEq1PK+3guLi9RQULKegYDlFRWuIjx9O585X\nExMzkxUrtvrPHv5CWVkZF1xwARdeeCEPPfQQ/fv31yvIlGUs77MwxmQAs+pqhjLGXAlcKiK3+dev\nA0aLyG/reS1thlJBxeNxUFS0hoKC5RQWfkVx8TpiYgaSlHQeLtdgNm0SMjIyycjIIC8vj4kTJ1Y1\nKw0aNEiLgwq4oLgayhizBEitvgnftwoeEJGPAvmzlbKC211MUdEqCgq+oqBgOSUlG4mPH0ps7DkU\nFFzK+vXn8/XXG1mzZj4AY8eO5fzzz+fOO+9k6NCh+uU4FbQCWixE5JJTfIkfgJ7V1rv7t9Vr+vTp\n9O7dG4Dk5GSGDRtWdT1xZTtdS65v3LiRu+++O2Cv35Lrzz//fMA/j5Zar96mGgx56lt3u0sYNgw+\n++wtSkq+o7z8ABMmjMbjOZNFi3qweXMS+/dnsXPnq/Tq1YvBgwdzzTXX8Pzzz7N3716MMa2ev3Jb\nMHx+ja3r/6/A/PvPnTuX5gqWZqh7RSSzjsfswA7gIuAIsBa4VkS21fNa9TZD3XzzzXz88cekpqay\nadMmwHdryBkzZlBaWkrv3r155513mn1ryGUh+kWcYBdsWb1eNxUVB3A4dlNWtouysu0UFX2Nw7Gb\nmJiRLF/ejoiI01i69AdWrVpPQkICY8eOrZqGDx8eNKO3Bttn2xDNGhjVs4bCpbNXAC8BHYECYKOI\n/NwY0xV4TUQu8+/3M+AFTlw6+1QDr1lvsVi5ciXx8fFcf/31VcVi9OjRzJkzh/HjxzN37lz27t3L\no48+2pJvU4WQyoJQVrYLh2M3DseJeXn5AaKiumC396KsLIXs7CgyM8v55JM97N17kJEjRzJu3DjG\njh3LmDFj9HsOKmQEfbEIhMbOLBYuXEhpaSkOhwOAmJgYBgwYgM1mIykpiSNHjrBjx47WjKxamdfr\nprx8/0nFwOHYTXn5QaKiuhAd3Y+Kio7k5kZz8KCHbduKyMw8wtatu7Db7QwcOJAzzjiDUaNGMXbs\nWIYOHUpERJscDEGFAS0WtaxcuZKioiKuvPLKqmIxbtw47r//fiZNmsSUKVNYvHgxFRUVzfqZoXrq\nGeyak1XEi8uVi9N5FKfzGC7XsarlE3PfstudR7t2pxEd3R+RbuTnx/LDD7BjRykbNx7j++93kZWV\nRZ8+fRg4cOBJU8eOHU85r9U0a2CEataguBoqmIwfP56vv/66xrY333yTu+66i8cee4zk5GS9EiUI\niHhwu4txOo9SUrIJt7sQj6cIt7ug2kHfd+CvLAouVw52exJRUalERXUhKioVaI/DEU1xcTfy8rpx\n7JiTw4fL2L8/n+3bd7Nz5xpiY2OrFYKzuP1233KfPn10RFalamkzZxbguwnSxRdfXHVmAfDggw8y\nb948YmJiSEhIYP369a0RNSyIePF6y/F6HXi95Xg8Dv+yo9q2Mv/BvrBqqr7u8RTidhdVLXs8Zdjt\n8UREJBERkYTdnlS1HBnZGUihpCSSwkIbx497OHKkgoMHi8jKyiYrK4sjR46QlZWFx+OhW7dudO3a\ntca8W7dunH766QwcOFAH2FMKbYaq08qVK7n44ospLy8H4Pjx43Tq1Amv18uoUaPo0aMHCxYsaLE8\nviyCiBfwNmHuQcSNiAfw1FhveNuJdd/kRMSF1+uqtuyb17dc9zbfQb9mESiv2ibixGZrh80Wjc0W\nUzXZ7TE1tp048CcSEZEExFJeHoHDYaO0VCguFgoLPeTnV5CX56CgoIiCggIKCgrIz8+vmh85cgQR\nqTro1y4E1eeJiYn6hTalmkCboWpJS0vjiy++wOl00qNHV264wUNZmYcPPywCYNSoaDZs2MSqVd2p\nfmcj34G8crlyu7dqecMGF8OG2ajr4O/b13dzC98wVw3NDcZEAHaMqZwi/LfDbN42my0KYyIxJgqb\nLbJqee3aI4wb1x9jIrHb4zDGt59vn8rlyudGAlE4nYaKCqiogPJyD+XlXioqfJ9daamTsrJySktL\nKSsrq3NeWlpKQcGuqoN/QUEBHo+HlJQUUlJSSE5OPmlq3749LpeLCy+8sGpbSkoKXbp0ISEhISiL\nQKi2Vwc7zRoYPyZrmykW6enp7N+/n8svv5zvvltPefkh9u49wBNP9AEMf//7PETWMWLE3wEQ6HvG\nXgAACcFJREFUOTF5vb5CUXObb56bu5qBA8fi9UrVNo/Ht+zb5q2aPB5Pg8sulxu3u/7J4/E0+Hjl\nVFFRgdPppKKiwr/soKKigqysLN56q7Dqsdrz2ster5e4uDhiY2OJjY2tWm5o3qVLl5O2Vx7sKw/8\n0dHRjR7wQ+k/nlJtQZtphkpLS2PZsmXk5ubSvn17cnNz8Xg8/iJw8nN8f+kbbDYbNputarmuud1u\nx2aznTSvb7m+xyMjI4mIiCAiIgK73V61XNdU3+N2u5127drRrl07oqKiTlpuzraoqKig/CteKdVy\ntM+iAR6Ph/Ly8hoH/doFQCml2oJQuJ+FZex2O3FxccTExBAdHU1UVFTVX+XNLRTVx1sJdpo1cEIp\nr2YNjHDP2iaLhVJKqeZpk81QSimlfLQZSimlVIvRYnGKwr2d0iqhlBVCK69mDYxwz6rFQimlVKO0\nz0Ippdow7bNQSinVYrRYnKJwb6e0SihlhdDKq1kDI9yzarFQSinVKO2zUEqpNkz7LJRSSrUYLRan\nKNzbKa0SSlkhtPJq1sAI96xaLJRSSjVK+yyUUqoN0z4LpZRSLUaLxSkK93ZKq4RSVgitvJo1MMI9\nqxYLpZRSjdI+C6WUasO0z0IppVSLsaxYGGOuMsZsMcZ4jDEjGthvvzHmO2PMBmPM2tbM2BTh3k5p\nlVDKCqGVV7MGRrhntfLMYjMwBVjeyH5eYKKIDBeR0YGP1bKC8RdIMzVNMGaC4MylmZomGDM1lWXF\nQkR2iMguoLG2MkMQN5dNnDixwceD6ZejMmswZapUO1Njn2traM7n1Jp5T/XfLxBZA/U7dSpZW/v3\nvClZg+X/3o/5XIP2IFyNAEuMMeuMMbdaHUYppdqigBYLY8wSY8ymatNm//zyZrzMuSIyAvgF8P+M\nMeMDFPdHCZa/FJpCswZOKOXVrIER7lktv3TWGJMBzBKRb5uw78NAsYjMqedxvW5WKaWaqSmXzka0\nRpAmqDOoMSYWsIlIiTEmDvgp8Eh9L9KUN6yUUqr5rLx09gpjzCFgLPCxMeYT//auxpiP/bulAiuN\nMRuANcBHIvK5NYmVUqrtsrwZSimlVPALhauhmsUYc5cxZpu/M/0pq/OAr6/FGHPYGPOtf/qZ1Zkq\nGWNmGWO8xpj2QZDl0WpfwPzUGNMlCDL9xf/7tNEY86ExJjEIMjXpC62tlOVnxpjtxpidxpg/WJml\nkjHmdWPMMWPMJquzVDLGdDfGfGmM2eo/Nv02CDK1M8Z84///ttnfJ1z//uF0ZmGMmQj8EfiFiLiN\nMR1FJMfiWI12zFvFGNMd+AcwEBgpInkW54kXkRL/8l3AYBG5w+JMFwNfiojX/8eHiMj9FmcaiO/L\nqv8L3NuUi0MClMMG7AQuArKAdcA1IrLdijzVco0HSoB5InKWlVkq+f/w6SIiG40x8UAmMDkIPqtY\nESkzxtiBr4HfikidI2WE25nFHcBTIuIGCIZCUU0wdr4/B8y2OkSlykLhF4fvgGgpEVkqIpU51gDd\nrcwDzfpCa6CNBnaJyAERcQHzgckWZ0JEVgL5VueoTkSOishG/3IJsA04zdpUICJl/sV2+C54qvfs\nIdyKxQBggjFmjTEmwxhzttWBqvkff1PGP4wxSVaHMcZMAg6JyGars1RnjHncGHMQSAMesjpPLTcB\nn1gdIoicBhyqtn6YIDgABjtjTG9gGPCNtUl8Z4f+C4iOAktEZF19+wbLpbNNZoxZgu8qqapN+Krh\ng/jeT4qIjDXGjALeA/panOsB4G/AoyIixpjHgTnAzRZmehBfc90ltR4LuIY+JxH5SEQeBB70t3/f\nBfzJ6kz+fR4AXCKSHug8Tc2kQo+/CeoD4He1zqQt4T9rHu7vi1tgjBksIt/XtW/IFQsRuaS+x4wx\nM4B/+/db5++47SAiuVbmquU1oFX+s9eXyRgzBOgNfGeMMfiaVjKNMaNFJNuKTHVIBxbTCsWisUzG\nmOn4RhC4MNBZKjXjc7LSD0DPauvd/dtUHYwxEfgKxVsistDqPNWJSJH/C9I/A+osFuHWDLUA/39o\nY8wAILI1CkVjal3V8ytgi1VZAERki4h0EZG+ItIHX/PB8EAXisYYY/pXW70CX7uupfxXrs0GJolI\nhdV56mBlv8U6oL8xppcxJgq4BlhkYZ7qDNb36dT2T+B7EXnB6iAAxpiOlU3ixpgYfC0N9Xa4h9vV\nUJH4/kGGARX4hhFpbAj0gDPGzMOXyQvsB24XkWOWhqrGGLMXODsIrob6AF+/kxc4AMwQkSMWZ9oF\nRAGVf3SsEZE7LYyEMeYK4CWgI1AAbBSRn1uU5WfAC/j+8HxdRCy/XN0Ykw5MBDoAx4CHReQNizOd\nC3yF79YM4p/+KCKfWphpKPAmvn87G/AvEXmi3v3DqVgopZQKjHBrhlJKKRUAWiyUUko1SouFUkqp\nRmmxUEop1SgtFkoppRqlxUIppVSjtFioNsUYUxwir7mvKcPGB+JnK1UXLRaqrQnEF4usfE39opRq\nFVosVJvnH67iC/+owEv89/nAGNPXGLPaf0Omx5rzV7wx5jL/6MeZxpjPjTGd/NsfNsbMNcZ85T97\nmGKMedoYs8kYs9h/XwHwDVXxB//2NcaYvv7n9zbGrKrMVO3nxRljlhpj1vsfm9Ryn5BSWiyUAt/Q\nGW+IyDB8Axi+5N/+AvCciPwE3/hZzfkrfoWIjBWRkcC/gN9Xe6wvvuEoJgNvA1/4b9JTDvyy2n75\n/u2v+LNUZnrFn6n6UCjlwBUicja+8dGebUZWpRqlw32oNsUYUyQiibW2Hcd3FzOPf2TQLBHpbIzJ\nATr775KXAPxQ+7kNvOYQfAfsrkAksE9EfuG/a6JTRP7sH/G3TERi/M95BMgVkReNMfuAC0Rkvz/T\nERHp5M+U6s9alcm/z3PABHxjaw0A+lg9OKQKH3pmoVTTzhiaO4LpS8CL/jODGUB0tccqwHd/VsBV\nbbuXmrcNkEaWq2eahm9gweEiMhzIrvUzlTolWixUW1PXQX8VcK1/+TpghX95NXCVf/maZr5mIr77\nUgPc0MznVvp1tZ+92r+8slrWadX2TQKy/WdBFwC9GnhdpZot5G5+pNQpivHftrXyznNz8N2Rb64x\n5l7gOHCjf9+ZwNvGmD8CnwGFzXjNPwEfGGPygC/x3WyqLvWd1QiQYoz5Dl9/RGWBuBtIN8b8Hqh+\nA513gI/8+68nCO4FosKL9lkoVQ9jTIyIOPzLvwauEZEpFsdSyhJ6ZqFU/UYaY17Gd8aQD9xkcR6l\nLKNnFkoppRqlHdxKKaUapcVCKaVUo7RYKKWUapQWC6WUUo3SYqGUUqpRWiyUUko16v8Dd87HAvVu\nBMgAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(fit, xvar = 'lambda', label = True);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now when we plot against %deviance we get a very different picture. This is percent deviance explained on the training data. What we see here is that toward the end of the path this value are not changing much, but the coefficients are \"blowing up\" a bit. This lets us focus attention on the parts of the fit that matter. This will especially be true for other models, such as logistic regression." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, + "scrolled": true + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAElCAYAAADKuLQKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXl8VNX5/98n62QHAiSsCYGwhH0LgiiLYN0QFSxqrdXa\n2tparXb/+f3W2k2prVVrrd9atWpbqYorVoWiUZQlkJAECGHLAgQSEsg6ycxkZs7vjzOZzGRhwmQm\nk0nO+/W6r9x75+aeMyc385nnec7zHCGlRKPRaDSaCyUk0B3QaDQaTXCiBUSj0Wg0XqEFRKPRaDRe\noQVEo9FoNF6hBUSj0Wg0XqEFRKPRaDReoQVE06cRQtiEELlCiP1CiL1CiAeEECLQ/fIGIcS9QohC\nIcQr7c4vEULUOt7nXiHEZj+1/6IQ4gZ/3FszMAkLdAc0Gg8YpZRzAIQQQ4FXgXjgFz29sRAiREpp\n7+l9LoC7gcuklKc6ee0zKeW1Xf2iECJUSmnzX9c0mgtHWyCaoEFKWQ3cBdwDSgCEEL8TQuwSQuQJ\nIb7pOC+EEM84vu1/JIR4v/WbtxCiRAjxqBBiD7BWCJEmhPhACLFbCPGpEGKi47qhQog3HPfeJYRY\n6Di/xGEl5AohcoQQMe376bCS9gkhCoQQ9zrO/QVIAz4QQtzXydvrYFU5LIa/CCF2AuuFENFCiOeF\nEDsdbV97vnFwvPa0EOKgw6oZ7nL+Msd7yBdC/E0IEe4yPr91vMdsIcRsIcSHQogjQohvXfhfTdOv\nkVLqTW99dgPqOzl3DhgGfBP4f45zEcBuIAVYA2xynE9yXH+D47gE+KHLvf4LjHfsZwJbHfv/BBY5\n9scAhY79d4GFjv1oIKRd3+YA+YABiAH2AzMdrxUDgzt5P0uAWiDXsf3Mcf5F4F2X634D3OLYTwAO\nAVHnGYfrgY8c50cANcANQCRw3OV9vwTc6zI+dzn2HwfyHO9zKFAR6OdBb31r0y4sTTBzOTBdCHGj\n4zgeSAcWA68DSCkrhRCftPu9fwM4rIdFwOsucZVwx88VwBSX87FCiGjgC+CPQoh/Am9KKcvb3Xsx\n8JaU0uRo403gEpSoCDqxNBx05cJ6vd37XSWE+JHjOAIYe55xuBTl8kNKeVoIsdXx+iSgWEp5zHH8\nEvAd4CnH8XuOn/uAGCllE9AkhDAJIeKllPVdvAfNAEMLiCaoEEKkATYpZZXjw/17Usot7a652sNt\njI6fIUCNdMRY2jcFLJBStrQ7v14IsQm4GvhCCHG5lPLwhb+TbmNsd7xGSnnEraPdHwfRxX57zI6f\ndpd9AIn+zNC4oGMgmr6O84NOCDEM+AvwJ8epj4DvCCHCHK+nu1gJax2xkCRgaWc3llI2ACVCiLUu\nbcxw7G4G7nM5P9PxM01KeUBK+TuUq2hyu9tuA64TQhgcFs71wGdevfOOfATc69KnWS7nOxuHz4B1\njhjJCGCZ4/pDQIpDjAG+CmT5qI+aAYT+NqHp6xiEELkod00L8LKU8o+O1/4GpAK5jm/hZ4DrgI3A\ncuAAcALIAeocv9O+/PRXgGeFEP+D+n/YABSgxOPPQoh8IBT1Yfwd4PtCiGWAzXH/D1xvJqXcK4T4\nO0pcJPBXKWVBF217ov31vwaeEEIUoIS1BLi2q3GQUr4lhGgdh+PAdkcfzUKIO4A3hBChjr7+Xzf6\nqEt3a9wQUupnQtP/EELESCmNQoghwC7gYinlmUD3S6PpT2gLRNNf2SSEGIQKiv9Si4dG43u0BaLR\naDQar9BBdI1Go9F4hRYQjUaj0XhFvxAQIcQVQogiIcRhIcRPAt2fvoYQYqJL+Y29Qoi61hIbGoUe\nI8/oMeoeQoj7HKVs9vX38Qn6GIgQIgQ4DFwGnEJNSbxJSlkU0I71URzjdRKVJHci0P3pi+gx8owe\no84RQkxFZf/PB6yoad7fllIWB7RjfqI/WCCZwBEpZZkja3gDsDrAferLrACO6X/686LHyDN6jDpn\nCrBLSmmWqnryZ6j6Y/2S/iAgo1DJYq2cdJzTdM46HPWRNF2ix8gzeow6Zz9wiRBisKMawFWoYpz9\nkv4gIJpu4ijZfS3uBfo0Lugx8oweo65xuM7XA1uA/wB7UVUL+iX9QUDKURVJWxntOKfpyJVAjpSy\nKtAd6cPoMfKMHqPzIKV8UUo5T0q5FFWm35/FNgNKfxCQ3cAEIUSKECICuAm1ZoOmIzej3Q6e0GPk\nGT1G58FR9BMhxFhUMc1/BbZH/iPoZ2GBmsYLPIkSxOellI8GuEt9Doc/tgxIc1Sh1bRDj5Fn9Bh5\nRgjxGTAEVfzzfillVmB75D/6hYBoNBqNpvcJuAtLqDWeKx0lqjt7fYkQotaRvJTrKLut0Wg0mgDT\nF6rxvohaIOjl81zT1XKfGo1GowkQAbdApJSfAzUeLjvf8psajUajCQABF5BuslAIkSeEeF8IkRHo\nzmg0Go2mb7iwPJEDjJVSNgkhrgTeBiYGuE8ajUYz4OnzAiKlbHTZ/0AI8YwQYoiU8lz7a4UQekqZ\nRqPRXCBSSq/CBH3FhSXoIs4hhEhy2c9ETT3uIB6tSCn1dp7toYceCngf+vqmx0iPUTCP0QcffMCk\nSZNIT0/n0Ucf9Xh9Twi4BSKE+BewFEgUQhwHHgIiACml/CuwVghxNyoppxlVxE3jJaWlpYHuQp9H\nj5Fn9Bh5JhBjZLfbueeee9i6dSsjR45k/vz5rF69msmTJ/ulvYALiJTyFg+v/xn4cy91R6PRaIKW\n7Oxs0tPTSUlJAeCmm27inXfe8ZuA9BUXlqaXuP322wPdhT6PHiPP6DHyTCDGqLy8nDFj2qrHjx49\nmvJy/9WW1QIywFi6dGmgu9Dn0WPkGT1GnhkIY6QFZICRlZUV6C70efQYeUaPkWcCMUajRo3i+PHj\nzuOTJ08yapT/1tfTAqLRaDT9hPnz53P06FHKysqwWCxs2LCBa6/1XxWoflWNVwgh+9P70Wg0A4uq\nKrj2Wtixw/t7fPjhh9x3333Y7XbuvPNOfvrTn573eiEE0ss8EC0gGo1G00fYvx9uvBEOHuy9Nnsi\nINqFNcDQvmvP6DHyjB4jz3gzRtnZMHOm7/viL7SAaDQaTR/hX/+C668PdC+6j3ZhaTQaTR9g505Y\nuxaOHYPIyN5rV7uwNBqNJoix2+G+++C3v+1d8egpWkAGGNp37Rk9Rp7RY+SZCxmj9eshNBRuvdV/\n/fEHAa+FpdFoNAOZN96Av/xFubBCguwrvY6BaDQaTYDYvRuuugo2b4bZswPTBx0D0Wg0miBj+3ZY\ntQqeey5w4tFTtIAMMLTv2jN6jDyjx8gz5xuj11+H1avhxRfhuut6r0++RsdANBqNppeQEn7/e3jq\nKdiyBWbNCnSPeoaOgWg0Gk0vYLXC976nXFfvvw+jRwe6R4qexEC0BaLRaDR+5uxZuO02JSLbtkF8\nfKB75Bt0DGSAoX3XntFj5Bk9Rp5pHaO33oLp02HSJNi0qf+IB2gLRKPRaPxCXR3cfDPs2QOvvQaL\nFwe6R75Hx0A0Go3Gx7z1Fnz3u3DTTfDrX0N0dKB71DU6BqLRaDR9gOpqFSjPyem/VocrOgYywNC+\na8/oMfKMHqOOvPmminWMGAF5eWC1ZgW6S35HWyAajUbTA6qr4Z57IDdX1bW6+OJA96j30DEQjUaj\n8QIpYeNG5bK65Rb41a/6dqyjK3QMRKPRaHoJq1WVInnsMbBYBp7V4YqOgQwwtO/aM3qMPDMQx8ho\nhD/9CdLT4dln4Ze/hIKCrsVjIIyRtkA0Go3mPJw5A08/rUTj0kthwwZYsCDQveobBDwGIoR4HrgG\nqJRSzujimqeAKwEjcLuUMq+L63QMRKPR+ISjR+Hxx+HVV2HdOvjBD5T10d8I9vVAXgS+1NWLQogr\ngfFSynTgW8CzvdUxjUYz8Ni9G268ERYuhCFDoKhIWR/9UTx6SsAFREr5OVBznktWAy87rt0FJAgh\nknqjb/2RgeCX7Sl6jDzT38ZISvjPf2DZMli7ViUAlpSoLPIkLz9t+tsYdUYwxEBGASdcjssd5yoD\n0x2NRtNfsFhUTOOxxyA0FH70I/jylyE8PNA9Cw4CHgMBEEKkAO91FgMRQrwHPCKl3O44/i/wYyll\nbifX6hiIRqPxSH29Wkr2iSdUldwf/xhWrgThVSQguOnveSDlwBiX49GOc51y++23k5qaCsCgQYOY\nNWsWS5cuBdpMSn2sj/XxwDw+exb27FnKc8/BjBlZ/O//wl139Z3+9cZx635paSk9pa9YIKkoC2R6\nJ69dBXxXSnm1EOIi4Akp5UVd3EdbIB7IyspyPlCaztFj5JlgG6OiIrWU7MaNcOut8MADMG6cf9sM\nljEKagtECPEvYCmQKIQ4DjwERABSSvlXKeV/hBBXCSGOoqbx3hG43mo0mmDBbIYPP4QXXoCdO+E7\n34EjR2Do0ED3rP/QJywQX6EtEI1mYGO1wscfq8D422/DjBmqTtWttwZnnareoCcWiBYQjUYT1Njt\n8MUXSjTeeANSU9VCTl/+MowaFeje9X2CPZFQ04u4BtI0naPHyDOBHiMp1VKxP/whpKSo1f9GjYLt\n22HXLrj//sCLR6DHqDcIeAxEo9FousuBA8rS2LBBicjNN6s4x9Spge7ZwES7sDQaTZ+muLhNNGpq\nVF2qm26CuXMHZt6Gr9ExEAdaQDSa/kF5uVpTfMMGVVLkxhuVaFx8MYRox7tP0TEQTbcZCH7ZnqLH\nyDP+GKPqalW0cOlStbb4vn1qlb9Tp+DPf4ZLLgku8RgIz5GOgWg0moBRV6em227YADt2wJVXqgD4\nFVdAZGSge6fxhHZhaTSaXqWpCd5/X62zsXWrqoB7002wahXExAS6dwMP7cLSaDR9GosFNm2Cr3wF\nRo5UhQxXrYKyMmWB3HSTFg9fcPLkSZYvX87UqVOZPn06Tz31lF/b0wIywBgIftmeosfIM90ZI5tN\nWRjf+AaMGAHr16sg+OHDsHkz3HEHDBrk/74GikA8R2FhYTz++OMcOHCAHTt28Oc//5mioiL/tee3\nO2s0mgGH3a7qTr36Krz+OowerXI1HnoIxozx/PuanpGcnExycjIAsbGxTJkyhfLyciZPnuyX9nQM\nRKPR9AgpIS+vLVcjNlaJxrp1ehnYQFJaWsrSpUvZv38/sbGxXV4X1NV4NRpNcFJU1CYaFosSjU2b\nYNo0neAXaBobG1m7di1PPvnkecWjp+gYyABD+/c9o8eoc4xG+Ogj+MlPYMKELC67TK3s9/LLcOwY\n/OY3Kn9Di4ciUM+R1Wpl7dq1fPWrX2X16tV+bUtbIBqNplPMZhXP+Phjte3dq8qHLF8O992n1tcI\nDQ10L/sZdrtKjhk82OtbfP3rXycjI4P77rvPhx3rHB0D0Wg0gFpLIyenTTB27oSMDCUYy5erGVR6\nTQ0/c/o0zJ4NFRVe/foXX3zBpZdeyvTp0xFCIITgt7/9LVdccUWXv6NrYTnQAqLRdB+7HfbvV1Nt\nP/4Ytm1TpdFbBePSSyEhIdC9HGDk5cFXv6rquPQSOpFQ0220f98z/XWMpFQ5GM8+qxZbSkqCtWvh\n0CG47Ta13Gt+PvzxjyrJ73zi0V/HyJd4NUbZ2TBrls/74i90DESj6ceUlcEnn7S5pUJClHVxzTXw\nhz/o3Iw+x8aNKsMySNAuLI2mH1FZ6S4Y9fVtLqnly2H8eD1Lqs9y4IAqDFZa2qNgU2pqKgkJCYSE\nhBAeHk52dvZ5r9cxEAdaQDQDjZoa+PTTNsEoL4clS9oEY+pULRhBw+rV6o/3wAM9uk1aWho5OTkM\n7uZMLh0D0XQb7bv2TF8eo8ZGtYTrj38M8+apoPezz6qSIX//u1pT4+234d57/ZvQ15fHqK9wQWP0\n+utQUKAWd+8hUkrsdnuP79MddAxEo+nDmEzuuRh5eUo4li+HJ56AzEyIiAh0LzU94qOP4J571E8f\nLIIihGDlypWEhoZy11138c1vftMHneyirf7k8tEuLE2wY7XCnj1tgrFrl3JDtbqkFi3SuRj9ii++\ngOuvV2bjokU+ueXp06cZMWIEVVVVrFy5kqeffprFixd3eb2OgTjQAqIJNux25bloFYxt2yA1VYnF\nZZepZVx1LkY/5cMPVc7HP/8Jl1/ulyYefvhh4uLieOA8cRUdA9F0G+279ow/x0hKVYTwmWdUDsbw\n4apq7ZEj8LWvwdGjbbkY11zTd8VDP0eeOe8Yvfgi3H67sjxcxMNub6GhIcfrNpuammhsbATAaDSy\nefNmpk2b5vX9PKFjIBqNnyktbbMwPv4YwsKUdbF6tYpjjB4d6B5qeg0p4Ve/UjMePv0UJk1ye9lk\nKmX//utZuPC4V7evrKzk+uuvRwiB1WrlK1/5Cpf7yboB7cLSaHzO6dPuuRhGo3suRlqanlo7ILFa\n4e67ITdXLQrvWPipFSklRUW3ExY2mPT0J7xq4vDhw6xbt67VLUVxcTG/+tWvuPfee7v8HR0DcaAF\nRBMIzp2DrKw2wTh9GpYubROMjAwtGAOeM2dUhrndDq+9BnFxbi/b7VaOHLmbxsY8Zs78hLCwnq/h\nYbfbGT16NLt27WLMeUoOBHUMRAhxhRCiSAhxWAjxk05eXyKEqBVC5Dq2/wlEP/sL2nftGU9j1NAA\nH3wAP/qRKm+emgrPPadyMl55ReVivPUWfO97/TeRTz9HnsnKylJWx5/+pB6EqVPh3Xc7iIfZXEF+\n/grM5nKfiQfAf//7X8aPH39e8egpAY2BCCFCgKeBy4BTwG4hxDtSyvarwH8mpby21zuo0aByMXbs\naLMw8vNh/nxlXTz1lNrXuRiaDuzbpxZOSUxUJurUqR0uqa39jMLCWxgx4k5SU3+OEL5bYOXf//43\nN998s8/u1xkBdWEJIS4CHpJSXuk4/ikgpZTrXa5ZAvxQSrmqG/fTLixNj6mrU+tibN+uYhm7dqms\n7ssua8vFiIoKdC81fZbTp1WpgKws+P3vVenjdmaoxVJJcfHPOHfuAyZNep7ExKt82oWWlhZGjhxJ\nYWEhw4YNO++1wbwm+ijghMvxSSCzk+sWCiHygHLgR1LKwt7onKb/Y7GoPIzs7Lbt+HFVUXvBArj/\nfp2LoekmLS3w9NNqbd8774SDB6HdeuR2ewvl5U9z/PhvSUr6GpmZRYSF+f7h+uCDD5g7d65H8egp\ngRaQ7pADjJVSNgkhrgTeBiZ2dfHtt99OamoqAIMGDWLWrFksXboUaPPbDuTjvLw8vv/97/eZ/vTm\n8SefZFFeDkIsZdcu+O9/syguhokTl5KZCYmJWTzwgIpprFix1Pn7CQl9o/996dg1BtIX+hPw46ws\nsu64A4YMYennn8PkyTzxxBPOzx8pJe+99wfKy//EokWTmDXrM3bvruTkyb1+6c+rr77KrFmzyMrK\n6vB6635paSk9pS+4sH4hpbzCcdzBhdXJ75QAc6WU5zp5TbuwPOD6QPV3KivdLYvdu1X8MjOzbZs7\nt8OXxAE1Rt6ix8hBeTn88IfK3/n443DDDU53VVZWFgsXTqSy8h9UVLyElC2kpf2OoUNXI7qYWVFT\no9aaf/VV77vU1NRESkoKxcXFxLUL2HdG0E7jFSpidAgVRD8NZAM3SykPulyTJKWsdOxnAq9JKVO7\nuJ8WkAFKY6OaXp+drWIW2dlqLQxXsZg/v8PUe43mwpESPv8cXngB3nlH5Xb8v/8HMTEA2GzNVFe/\nTUXFSzQ0ZDN06A0kJ3+NhITFXQpHK1lZqpp7bm4vvA8HQRsDkVLahBD3AJtRU4qfl1IeFEJ8S70s\n/wqsFULcDbQAzcC6wPVY0xewWtVa3q7WxbFjMH26EorVq5UbesIEtQKfRuMTTp6El15SWeQRESqv\n45FHIDkZKSX1dV9QUfESVVVvEBc3n+TkrzFt2puEhnav+mVNjZq0df/9/n0bvkQnEg4wgs31IKUq\nBeIqFnv3qqVYFyxosy5mzPDdVNpgG6NAMGDGyGRSVsaLL6qH78tfhq9/XZmzQtDcXEpl5StUVr6M\nEOEkJ3+NpKRbiYwcdUFjtGOHuu3VV8Njj/Vu7lDQWiAaTXvOnlWxilY3VHY2hIe3icUvfqHWw9Cz\nojR+Q0rlQ3rxRdiwQU3J+/rX4a23MIkz1NZ+Rt3h56it/YyWlrMMH76OKVP+RVzcPI8uqvbNbN+u\nSmMVFcGvfw233urH9+UHtAWiCRjNzcqacLUuzpxRAuFqXYwaFeieagYEVVWqtPoLL0BjI/L2r9F8\ny6XURh+jru4zams/xW43MWjQpSQkXMqgQUuIiZmGyofuPidOqIoFL78MNhv84AdKn3xlQV8oQRtE\n9zVaQPouNpv6ltUqFLt2qeOMDPdA96RJEOq7ZFyN5vxYrWpdjhdeQH6yFeNtl1J3XRq1w09TV7cN\nIcJISFjCoEFLGDToUqKiJl6QldHKmTOq/M0rr6gvTTfeqMr3X3RR4EvdaAFxoAXEM73hu5ZSxRtd\nLYucHEhKahOKBQuUZ8Bg8GtXvGLA+Pd7QNCP0cGD2P/+PI2f/526RbHULhlEXcJxwiMSHdbFpSQk\nLMFgSPFKMBoa4Omns6iqWsrWrVBWBsuWwS23wKpVfeu51zEQTUCprVXLsLpaFzZbmxvqpz9VbqnE\nxED3VDOQsddU0fDeY9QWbqAuuZK6FRB5zVgGJa1keMKlTBx0KZGRI726t8WCIzkVtm5Va9dPmKAs\njf/7P/X8h/XDT9tuWSBCiIuBPCmlUQhxKzAHeFJKWebvDl4I2gLxP2azKiboal2cPAlz5rhbF2PH\nBt401wxc7HYrJlMxxvp8Gg++R93pzTQMriSqYZCyLmZ8lYQhS4mIGOrV/evrlUhkZyvB+OILmDhR\n1Uu77DJYvDh41q73uwtLCFEAzARmAH8H/gZ8WUq5xJtG/YUWEN9it6ulVl3FYv9+SE93j1tkZPTP\nb1eavo9TKIwHMBoP0NRUiPFsDs0txUTUhxNzuIWY+kQSxl1LwtU/ISx5/AW3UVOj4hY5OWpyVk6O\nSkCfMUNVMli2TG1DhvjhDfYCvSEguVLKOUKInwPlUsrnW89506i/0ALimfP5risq3N1Qu3fD4MHu\nYjFnjjPhtt8S9P79XqC3x0hKG83Nx9pEwiEYzc1HiAgZSkzdYKIPtxCz/SQx5xKInrSS0KVXqE/2\nCygoWF2tRKJVKHJzVQB85kwlFnPmqJ+TJ3v+0hSo5+jOO+9k06ZNJCUlUVBQAEBNTQ3r1q2jrKyM\n1NRUXnvtNRIcc+F7IwbSIIT4GXArcKljHY9wbxrU9A0aGtQ/iKt1YTS2CcX3v69ypYYPD3RPNQOJ\nNqEopKnpgLtQRCQTEzOVaFIZcmwQY3akEf1OHaG1JrjMUW//95eplb26QWWlu1Dk5Kh43uzZSiiu\nuw5++UvlmgqmmYF33HEH3/ve97jtttuc5x599FFWrFjBj3/8Y9avX88jjzzCo48+2uO2umuBJAO3\nALullNuEEGOBpVLKl3vcAx+iLZDOaWlxL/2xaxeUlKhvVa5xC71Wt6a36FwoCmluPkxERDLR0RnE\nxEx1CEYKMXuqCN36hQo4HD8Ol16qBGPFCo9rBksJp051tCyampRQtFoVc+bA+PH9o/xNWVkZq1at\nclogkydP5tNPPyUpKYmKigqWLl1KUZFat683LJD7pZTO5WallMeFEB2X19IEHCmhuNjdssjPVyXK\nW8Xiu99VdaPCtQ2p8TNKKIodriclEsqicBeKIUOuYPToB4iJmUKoNQx27oQ3tsLW/1MPcGamEoy/\n/vW8U5qqq6Gw0H3bv1+le8ydq7bbboMnn1T/EwPlC9OZM2dISkoCIDk5mTNnzvjkvhcUA2l3rkBK\nOcMnvfARA9ECqapSsQpXwYiK6liyPD5eXa/9+57RY+SZ9mPkLhSuMYrDREQkER091WFRZDj2pxAa\n6gim2WxqStPWrWrbvh2mTGmb0nTxxW5LQEqp4nWtAnHwYNu+xaJWjs3IcN9Gj+59sQjUc/Tkk0/y\nzDPPUFZWxu9+9zvuvfdehgwZwrlzbStgJCYmcvbsWcCPFoijCu53gDTHTKxW4oDt3jSo6Rl1dbBp\nk9p27YJz51SsIjMTvvUt+NvfYKR3U9k1Go9YrQ2YTCXU1n5OWdn2LoQigyFDLmf06PvdhaIVKdX0\nvtakiawsFWxbsUKVRt+wAQYPdiakFn7W0aoIDXUXijVr1M/k5IFjVXTGgQMHeP7553nvvfdYu3Yt\nmzZt4pprriEpKYnKykqnC2u4j4Kb57VAhBAJwGDgEeCnLi81dLagU6DprxZIdbUqCLpxo1qGYMkS\nVbJ88WIV4OsPPltN38BqbcRsLsNkKqW5uQSTqdSxqX273YTBMA6DIZWYGOV+io6eSnT0ZMLCYru+\n8alTbRbG1q3qnMPCsC9dTpl1VAeRaF0RdsqUjhaFn1dqDRhWu53jZjNpLhbXhfDGG2/w0Ucf8eCD\nD7Jq1SrWrVuHwWCgqqqKIUOG8JOf/IT169dTU1PjDKL3SikTx+JPSbhYLVLK49406i/6k4CcOgVv\nvQVvvqmyvC+/XH3LuuqqNneURnOh2GzNLqLQJgyt+zZbIwZDqmMb12E/PHxo90p71NQoy6JVMM6c\nwbrkMkpmXkfhsCUU1o6k8KCgsFDVREtM7CgSU6aoaeQDiUNNTazIz+fEwoVe/X5RURHz58+npaUF\ni8UCwLx58/joo4+48cYbOXHiBCkpKbz22msMGjQI6J08kHuAXwCVgN1xWuoYiG8pKVGC8eab6tvX\n1VerFTK/9CXfZbVq/75ngnmMbDYTZvPxdsLQZk1YrbUYDCmdikRU1DjCw4d3SyA6jFFzszKPt27F\nsuVTjha1UJh+HYXDl1IoMjhYMZgjRwTJyR2FYvLk/vml6EKeIyklH547x71Hj/L15GR+1s2pyJ1x\n8cUXU15eTmpqKlOmTCE0NJSnn366y+t7YxbW94FJUsqz3jSi6ZqiIuWaevNNVeZ59Wr43/+F5csD\nV95Z03ex2y2YTMe7tCBaWs4SGTmGqKg2YUhMvMYpFBERyRdcfrxTbDZMn+7i8BsFFH5cQeGRcArj\nFlAY8gAcn6PNAAAgAElEQVQl9b9hzFhBRmoIGRlwTQb8OENVWu7vSajeUGE2850jRzhgNPKH8eO5\ndqh35VUA6uvrqaiooLS0FIAHH3yQMWPG+KinHemuBfIJsFJKafVbT3xAMFggUqpZiRs3qq2uTlkZ\na9aomIYuCTKwsdtbMJtPdhCG1n2L5QyRkaO6dDNFRo5AeZt9Q309lOw3UvzpCYpzaig+ZKG4PJKj\ndcM4KUeRNriWjCmSjMVDyJgdSUaGKnXTl6rN9lWsdjsvV1bys+JivjFiBD9PTSWypQW2bVMTCrwg\nPz+f5cuX09TUhJSSqKgoioqKuO+++zh8+DCgstIHDx5MrmPh9d6wQIqBLCHE+4C59aSU8nFvGh1o\n2O1qem2rpSGlEowXXlCzp3QQfOBgt1uxWMo7CVIrobBYKoiISHYThsGDVzj3IyJGERLiu28ZNpua\n6VRcDMVH7RTn1lC8z0hxSQjF1XE0tYSTRilpCdWkjTIzeVIEV90STdrSRCbMMxAenuyzvgwUWux2\nXqms5JHjx0mOiOA/M2YwNy5OvVhdrRJVTp3y6t5NTU2cO3eOsLAwbDYbaWlpPPPMM2zYsMF5zQ9/\n+ENn/KOndPdJPO7YIhybxgM2m/oisXGjCobHxyvR2LhRZYAHaqphMPv3e4uejJGUNszm011aEGZz\nORERw92shkGDLsVguM1hQYwmJMS3GZ51dQ6BcN0OWSg+bOVEZQTDIutJCyklrekAaXFVXJMGaV+K\nI21RMsMvmYSYMLlDLY+srCymhC/1aT/7G+2fI7PdzounT/Po8eNMiIrib5MmsaT9B3lVFfTAhZWW\nlkZqaipZWVmsWrUKm83Gxx9/zMMPP+y85rXXXuOTTz7xug1XuiUgUsqHAYQQ0VLKJp+03A+xWODj\nj5VIvPMOjBmj3FNbtqgZJZrgR0o7FktFB2FotSbM5hOEhye6WRDx8YsYPvwWx/FYQkJ8+x3MalXx\nsw4iUQzFxRKLyU7a0HrSIstJaznEtJocrrUcIm1yBClXDcMwJ0OVlp22un9GswOITUp21NWxsbqa\n186cYVZsLK9mZLDQUciwPfbCQmypqV4XGkxKSmLs2LEUFxdjt9upqalh7ty5zte3bdtGcnIy48df\neFXizuhuDGQh8DwQK6UcK4SYCXxLSvkdn/TCRwQiBtLcDB99pETj/ffVjJI1a5RwjBvXq13R+ADl\nYjqF2XwCk+lEJxbEcUJDE5yzltosidYYxFhCQ30fAKip6UoglAsqOVmSNtpCWmwVaaKEtMYC0ip3\nkHbiU4amxiJmzlAi0brpBVv8RovdTlZtLW9WV/N2dTVJ4eHcMGwYa4cNI8PDLIKGO+/k6dde42cN\nDV63v3fvXhYuXIjZbGbixIlkZ2c7K+9+5zvfIT09nfvvv995fW9M490FrAXelVLOdpzbL6Wc5k2j\n/qK3BKS+Hv7zHyUamzerUiFr1sD11+ss8L6MlDYslkqnOJjNbZs6PklLyxnCw4djMIwhMnJMJ4Hq\nFEJDfb9SkMWiagQWF6vp3O1Fwm5XxS7T0iBtbAtpkadIsxSRdnY3KWWfEXFgrwquzZzpLhQZGW5l\nQDT+wWSzsaWmho1VVbx39izpUVGsGTaM64cOZUI35+BbW1qoHjaMJzIzeXTz5h71p6ysjKuvvprh\nw4fz9NNPk5GRgc1mY9SoUeTm5jLS5YOqV5a0lVKeaDc/3OZNg8HKuXPw7rtKND79VM2YWrMG/vKX\nHrkse53+GgOR0k5LS5WLMJxsJw4nsFhOExY2xCkOrVtcXKbzXETECD777AsWLVrq4/7B2bNdWxGn\nT6svH06RSIO1ayRpUadJa8hnSEkOYl8BFBTAf8rUnNgZM1RVzFt+oPZ7sY5Hf32OuouUknKzmS/q\n63mrqooPz51jdlwcNwwdyq/HjWO0wUBWVhYTxo7t1v1OnjzJ72+4ge8KQdx5cjYupH9CCJYtW8aH\nH35IRkYGW7ZsYcqUKW7i0VO6KyAnhBCLACmECAfuAw76rBd9lIoKePttJRrZ2arywk03wT/+AV24\nMDV+QEqJ1Xqug9Wgpru27pcTFhZHZORoN3GIjZ3p2B9NZOQoQkIi/dZPsxnKyroWidBQd4GYPx/W\nrVP7YwfVE160TwlEQQG8vw/27YO4uDZr4rrr4Oc/V+KhSyn3KmdbWtjT0EB2fT27GxrY3dCATUoW\nxMdzbWIiT6WnM9yLxK3KykoeffRRXnrpJbYlJVG8fj1PVFXx4MSJXvWzurqau+++my+++IKzZ8/y\nm9/8hrvvvhuAf//739x8881e3bcruuvCGgo8CawABLAZuK+vJRb6woVVVqZmTW3cqMpAX3WVimdc\ncYVOgvIHUkpstvouXEptlkRISKSLMIx2uJfGuJ0LDfWvq0ZKNUmmK4GorFQTJ1xFonUbN85RlsNm\ng6NH24SidTtzRlUHdHU/TZ+uanxoehWjzUZuQwPZDQ3sdghGVUsLc+PiyIyLY35cHPPj4xkbGdm9\nsi7tOHz4MO+99x7vvfceubm53H777fzvunWcvvdernzqKSxSUrV4sVd937dvH/Pnz8dsNiOE4Fe/\n+hUPPvgg+fn5fPvb38ZkMhEeHs4zzzzDvHnzgF6qhRUMeCsghw+r/IyNG5X/efVqJRorVkCk/76w\nDghUcT53MWgvFoCb1aCEwd2SOG+hvh5gs6mYVl1d28/W/bNnO8YjDIbOBSItTZUMd0sEra5uE4h9\nDuuisFC5mma0C2qnpQXXsnf9BIvdzj6j0c2yKG5uZlpMDJnx8Uos4uKYFB1NiJfuwYqKCnbu3Mm2\nbdvYtGkTjY2NXHPNNaxatYrly5cTHR7OP773Pe5fu5a70tLYVlfHZ7Nne/2ehg8fTlVVFaC+oAGM\nGzeOmpoaTCYTISEhJCYmcuKE+t/zm4AIIX4spfydEOJPQIcLpZT3etOov+iugEip/p9bRaO6WgXA\n16xRlW77cza4L33XNltzJ7GGk27iYLeb3cSgffwhMnI0YWEJF/xNTkowmTr/4L+Q/eZm5SVKSFAz\nWBMSwGrNYty4pQwZoiwHVyuiU9elxaJq0rS3KozGjkIxbZpqMMgJxhiIXUoONTW5WRb7jUbSoqLc\nLIvpMTFEeJndazabycvLY+fOnbzzzjsUFxdTX1/PggULWLRoEVdddRVz5sxxPu8NVivfff11dkdE\nsOHyy3mzupomu53HejDNds2aNWzbto2qqipuueUWXnzxRWbMmEFERAS5ubnccMMNHD58uFdWJGyN\nc+zx5ubdQQhxBfAEEAI8L6Vc38k1TwFXAkbgdill3oW2I6VaeKlVNFpalJXx7LOwcKHOBm+P3W7B\nbC7vxKXUJhJWawORkaOcQmAwjCE2dgaJiVc7xSIsbEgHcbDb1Zrs585596HfehwS0vah37q5HsfH\nw4gRbcX6OrsmJqbj3z4rCzr9bJQSyk+1WROt25EjSl1aReKee5T7SU+V7TXsUlJntXK2pYWzrT8d\nW7nFwp6GBnIbGhgaHu60LNYNH87s2FhivfzGaLPZOHHiBNnZ2ezcuZMdO3ZQUFBAeno6CxcuJDMz\nk7/85S+kp6cT0u4hs9rt/KOykl8cOsSKoiL2fPvbRMXGcmNhIf/oQdLYqVOnePfdd7FaVdUpq9XK\nhg0bqKuro6KigvDwcEJDQ1mwYIHXbbgSUBeWUFXdDgOXAaeA3cBNUsoil2uuBO6RUl4thFgAPCml\nvKiL+7lZIDYbfPFFW4Xb6Oi2HI05cwbu/7ZrroN7ILpNLKzWc0REjOhgNcBYzOYUzObRNDUNob4+\npNMP9/PtG43qg7urD/3u7vfYvWizKeuhpaXtp+t+XR0cOOAuFiEhHafKTpmip8r6EIvd3iYALmJQ\n3cm51uOalhZiQkNJDA9XW1iYc39ERARz4uKYFxdH4gVMPjCbzZw4cYKysrJOt/LychITE5k/fz4X\nXXQRCxcuZN68ecTGdu1utUvJ61VVPHTsGEmVlfz697/nkgcfhCuvZGNVFY+UlbF77lyvYiugBGTq\n1Klcc801/OMf/+Caa67hvvvuY9WqVaSmpnLq1CmsVisjRozg6NGjQC9M4xVCbAFulFLWOo4HAxuk\nlF/yplEXMoEjUsoyx303AKuBIpdrVgMvA0gpdwkhEoQQSVLKys5u2NICn3yiBOPtt5W7ec0a+OAD\nNSW+v4tG57kOJ2luPklNTTXnztVTW9uC2ZyCxTIOszkFk2ksJlMmTU3DaGoajNEYR2NjNHV1ooMA\n2O2uH+KShHg78bF2EmLtJMRYiY+xMSy6hQmjW4hPt5BgsJBgMJMQaSI+wkRCpIm40CZCbF18aLue\nq7bA6W5c5+05UCWPw8M7/gwPV6sZZTiytK++Wv1MSur/D5GPkFLSYLN1EIPqdh/+7cXAZLczxEUA\nhroIwvCICKbExLgJRGJ4OEPCwgi/QDdCQ0NDl+JQVlZGdXU1I0eOJCUlxbktXryYr3zlK6SkpDB2\n7FgMHqpGto5BVUsLeY2N/LKkBENVFU//8Y9cNnIk4uWXIS0Nq93Oz0tK+MP48V6LB8DIkSN5+OGH\n+cEPfgDAoEGDWLFiBWazmaFDh2I2m0lMTCQnJ8frNlzp7iysPCnlrHbn9rYmFXrduBBrgC9JKe9y\nHN8KZLrGVoQQ7wGPSCm3O47/C/xYSpnbyf3k+rtvUfsADlF1vsN2IttBdNtfj+N12XYsZeevyU6u\n7c7rbve7gHtJWyjGmiFIu/t3ANe+t72/tvd54twxxgxJazsjpPO32o2O20+h7kjPaWuls7u1f8vn\n+XUPF3rP8XMljB2iywicj+PnShmbOA4phHpehXo6ZEjbkyJFCEgQ2NVfyS4JwfG8SRDScV46zkup\nHse2/wCfIB1tqM86id2O4x9PAiHKhSmE+uAWQrUdIhAIj4+XPSQEc0QE5kgDpshITJGRmCMjMUca\nOF15iOiJmQhpJ9JsJq6xkVn5+xl1upzmaAPW8FBAYohtwBIiCDWEkz5sMKOWLWbqbau9eq+1tbWs\nWbOGyZMn88wzz3Dddddx4403cscddxAeHk5zczOZmZnk5uZy6tQpEhMTeyWR0CaEGNu6AqEQIoXO\n//8Dzkc5HzJ8SCQIiIkKJW10FNMnxiGQ7DvcAAKmT4wDt2Nlcu473AhCMn1iLEJICg41ApIZk9Xr\n6hhmTFaZpQWHjQDMnBTd7vUYx7HRcRzlOFZlxGZOcRwXOV6fEg1ICg42g5DMaH3d5VgIx+ugjiPM\nHCirwlqbyMS4DEwnx5GXJ7FUJTMhLBOAw8b9AEyMmeo8tptgvGlqu9entzueNqCPRzCeccZpfaY/\nffG4xRhCyxkcx+2fp65/39bp68H7/AkkGZFphFutHGksINRmY2boBMKttWwyZjOR48wWaQDk2Y4B\nkBqa4XIsSA+dTmiolTxbMUXWZqJetsBtaqIC4Jys0J3jTz/9lOzsbGexxHfeeYeioiK+9KUv8f77\n7xMREcGBAwew2+1OK6UndNcCuQL4K/ApSpMvAe6SUn7Uo8aFuAj4hZTyCsfxT1ErHa53ueZZ4BMp\n5b8dx0XAks5cWEIIWVpayo5PdrDz453sztlN/rF8hhmGMTV2KpNtk5lQO4F0QzoJ4xIwjDNgSHXZ\nHMdhscExDctma8JoPEBjY55jy8doLCAsbDCxsTOJjZ3l/GkwjPPNQkIajcZnmM0qP3TYMPifbxzl\nmce+y7e3n2Ty2QNe3S87O5vFixdjtVqd2egLFiygqqqK06dPYzKZCAsLIzIykvr6eqD31kQfCrQG\nr3dKKau9abDdPUOBQ6gg+mkgG7hZSnnQ5ZqrgO86gugXAU90N4gOaqZEYWEh2dnZzu3QoUNMTp3M\nrDGzmDZoGlNCpjCidgSWMgumUhMh0SEYUg1EjYvqKDApBkJj+u58fSntmEwlbqLS2JiH1VpLTMwM\nN1GJiZnm9+Q7jUZzfpqaVKLyvHmwcNQG7qn+NpWP1Hp9vzlz5pCfn4/dbue2227jb3/7G2fPniU5\nOZkJEyZw7Ngx1qxZwxtvvAH4Nw9kspSySAgxp7PXO4tDXHAHlHXzJG3TeB8VQnxL3V7+1XHN08AV\nqGm8d3TVbnfzQJqamti7d69TUHbt2sW5c+eYN28e8+fPZ86kOcwYOoPBxsGYSk2YSk00lzRjKjVh\nLjMTGhfqZrF0EJiovicwLS3naGzMZ8uWN5g+3UhjYx7NzYcwGMY5RKVNWCIikgLd3YASjDkOvY0e\nI89cyBjV1qoKNa/9o4LfvXAn77/6vtftJicnU1VVhd1uJzQ0lNtuu42SkhI+//xz5/TexYsXs23b\nNsC/AvJXKeVdjiVt2yOllMu9adRf9KSUSVVVFbt373azVAwGAwsWLCAzM5PMzEzmzp1LXGwcljMW\nTCUmp7g4txITpuMmwgaFdWnBRI6NJNQQOIFxfajtdgtNTQfdLJXGxjyEiOggKtHRE326VGpfRn84\nekaPkWcudIweewx27ZK88UbPpxCUlZWxatUqCgoKnOduvPFGfv7zn3PttdeSk5PDkCFDAP8KyI1S\nyteFEGlSymJvGuhNfFnOXUpJSUmJm5WSl5dHamqqU1AyMzOZMWMG4S5zy6VdYqmwtAmKi8A0lzRj\nPmEmPDG8awtmjIGQyMDFKqSUmM0nO4iKxXKamJip7VxgMwgLC/6sao2mL9DUpPJRv/gCJkzo2b3a\nC8i7775LVlYWjz/+OOPGjes1AcmVUs5p/elNA72Jv9cDaWlpYf/+/W5WSnFxMTNnznQTlfHnmcst\nbRLzaXOnAmMqMWEuNxM+LLxrC2ZMJCHhvS8wVmsDRmOBm6gYjQeIiBjhJiqt1W97MpddoxmoPPCA\nqnTjsgKtV7gKSHNzM8uWLWPLli3ExcUxbtw49uzZQ6KjUKc/BeS/gB2V8PdZ+9ellNd606i/CMSK\nhA0NDeTk5LiJitFoZP78+U731/z58xk+fHi37me32rGc6mjBtMZgLKctRCRFYBhnIGZqDDFTY4ie\nGk3MtBgihnouJ+1L14PdbqW5+UgHa0VKSztRmUV09BSfL+XqL7R7xjN6jDzjzRht3gyPPKKSoXuC\nq4Ds37+fFStWEB0djZSSkydPMmrUKLKzsxk+fLhf80CuAuYArwB/8KaB/k5cXBxLly51e1BOnz7t\njKc8+eST7N69m0GDBrlZKXPmzCGmk/rwIWEhGMYaMIw1wKUd27O32DGXmzEdM2EsNNJY0Ejlq5UY\n9xsJiQwhZpoSldaf0VOjCR/kn7UjQkLCiImZQkzMFJKS2tYZMJsrMBrzaWzM59y5jzh+fD0mUwlR\nUZM6WCvh4UP80jeNJhhJT1dVn3uKdCZOwrRp06ioqHC+Nm7cOHJzcxk8eHCP2/Fkgbwipfxqa1Xe\nHrfmZwJhgXQHu93OkSNH3KyU/fv3k56e7iYqGRkZhHlZ2E1KieWUBeN+I8YDRufPpsImQhNC24TF\nIS7RGdG9mu9iszVjNO5v5wIrICxsULu4ykyiotJ0zopmQNLQAH/8o1o3zFtuueUWsrKyOHv2LElJ\nSTz88MPccccdztfT0tLYs2dPr8RAClGLSH0ALKVdYr+U8pw3jfqLviognWE2mykoKHAG6LOzsykv\nL2f27NluM7/Gjh3bo3iCtEtMx000HWhyE5emoiYikiKc7i+nsEyO7rVpyG05K/lueStWa40jZ6XN\nUlE5K75fi1yjGej4U0DuBe4G0oBy3AVESinTvGnUXwSTgHRGbW0te/bscZv5Zbfb3ayU+fPnO785\neEOrX1baJM3FzW2CcqAJ4wEjzUeaiRwT6RZbiZkaQ/SkaEIiescqaM1ZaRUWozGfpqYiR85KW1wl\nJmYmkZHJPm9f+/c9o8fIM8EyRn7PRBdC/EVKebc3DfQmwS4g7WkNeLm6vnJyckhOTmb+/PnMnDmT\nGTNmMGPGDEaMGNEtS8XTQ21vsdN8tNndWjnQhKnUpAL3rtbK1GiiJkQREuZ/YWnLWcl3C9oLEe4m\nKrGxM4mKmkhIiPfuuWD5xw8keow8Eyxj1FulTBYD6VLKFx1lTeKklCXeNOov+puAdIbNZqOoqIjd\nu3dTUFBAQUEB+fn5SCmdYtK6ZWRkEB3tG7eP3Wyn6ZCLG8whLpZyC1ETo9wC9zHTYjCMMyBC/DuV\nty1npc1SaWzMw2w+RUxMhpulEhs7g7CweL/2R6MJRnrDAnkImAdMklJOFEKMBF6XUl7sTaP+YiAI\nSGdIKamsrHQKSut26NAhUlJSOghLSkqKz/I0bE02mg42uQXujfuNtFS3ED0luoOwRI6J9HuOiMpZ\n2deuyOR+R86Ke5FJnbOiGej0hoDkAbOB3NY1QIQQBVLKGd406i8GqoB0RUtLC4cOHXITld27d2Ox\nWJg+fbqbqEybNo34eN99Q7fWWzEWGt2D9weM2BpsRGdEd5huHDEiwq8f5FLaaGo64jIDTFkrdrvZ\nbQZYbOwsdu+uYvnylX7rS38gWNwzgSRYxqg3BCRbSpnpkpkeA+zQAhJ8ZGVlMX36dPbt2+cmLAcO\nHCApKckpKK0CM2HCBEJDfTcrq6WmxT1wv1/tS5vsELiPmRZDxDD/Jh9aLJUd4io7dhxh0aIpxMbO\nJj7+IhISFhITM23A1ALrDsHy4RhIgmWMekNAfgikAyuBR4CvA/+SUv7Jm0b9hRYQ77HZbBw7doyC\nggI3camoqCAjI6ODG6y1DIKvsJyxdHCDNR1oQoSLDoH7mKkxhA/2T3IktOasHKCxMYf6+p3U1e3A\nYjlFXFwmCQkLiY9fSHz8RToJUtMv6K0g+krgctRU3o+klFu8adCfaAHxPQ0NDezfv79DfCU2NraD\nqEyaNImICN9ZDFJKLKc7SY480ERofBfJkXH+SY5saTnrFJP6+h00NOwmImIkCQmLHIKykJiYDJ0A\nqQk6ektAkoD5jsNsKeUZbxr0J1pAPOMLs1pKyfHjxzuISmlpKRMnTuwgLMnJyT6Nb0i7xHzC3FFY\nipoIHxbeIXAfPTma0Ojuu5+6M0ZS2jAa91NXt536eiUqFksV8fGZxMcvIiFhIXFxCwgPH9TDd9s3\nCRb3TCAJljHy+5roQogvA48BWSgL5E9CiB9JKd/wplFNcCOEICUlhZSUFFatWuU839zcTGFhoVNQ\nPvzwQ/Lz8xFCdDrFOCrKu9UQRYjAkKIW70q8us2VJm2S5pJmZ2zl3IfnOPH7EzQfaSZiVETHOmGT\nor0unS9EqCP4PpNRo1SKlMVSRX39Turrt1NW9giNjTlERo4lPn6hw/W1iOjoSdpK0fQbuhsDyQdW\ntlodQohhwH+llDP93L8LQlsgfQ8pJRUVFR2slcOHD5OamtpBWHpauqUznMmRB9xnhZlKTBhSDW2x\nFYewRE2I8knJfLvditFYQH39DqelYrXWEh+/wOn2io9foPNTNAGlN4Lo+6SU012OQ4B813N9AS0g\nwYPFYukwxbigoIDGxka3WWCt+3Fxvl+4ym6203S4qUPg3nzSTFR6lBKUGTHEzowldmasT6Yam80V\nDiulNZaSS1RUmlNQEhIWEhU1UeemaHqN3hCQx4AZwKuOU+uAAinlT7xp1F9oAfFMX/fLVldXd5hi\nXFhY6DbFuHUbP368T6cYt7L1w61kDs/EuE+Vy2/Mb8SYbwQgZmaboMTOjCV6Ss9qhNntFhob852C\nUle3A5ut0Tl9OD5+IXFxmYSFxfrq7fmEvv4c9QWCZYz8FgMRQkwAkqSUPxJC3AAsdry0A/inNw1q\nNOdj6NChLFu2jGXLljnP2Ww2jh496hSUV155hX379lFZWcnUqVPdRGX69Ok9nmIcagglbk4ccXPa\nrJ7WGWGN+UpQzn1wjuOPHsdUYiJqYpRTUFoFprv5KyEhEcTHzyc+fj5wLwBm8ymnmJSU/C+NjXlE\nRaW7zfiKiup61UuNprfwVI13E/AzKeW+duenA7+VUq7q/DcDg7ZABhb19fWdTjGOj4/vdIqx69r1\nvsLWbFPur3yjU1wa8xsJjQ51E5TYmbFETfSu8KTdbqahYa/TSqmv34HdbiY+/iKXGV/zCA3tuECZ\nRuMJf5Zz3y2lnN/Fa/t0DETT15BSUlZW1kFUysrKmDRpUgdhSUpK8vk3eSkl5uNmN0Ex5hsxnzIT\nPSXazQUWMzPGqxUjTaaTLm6v7RiN+4iOnuyIoyhLxWBI1VaKxiP+FJAjUsr0Ll47KqWc4E2j/kIL\niGeCxS/ra5qamtymGLdWMQ4NDe0gKtXV1Vx++eU+74O10ariKg5BacxvxLjPSNiQsA4usKjxURdU\nzdhmM9HYmOt0fdXXb0dKu0vmfKuV4t3U6fYM1OfoQgiWMfKngLwKfCylfK7d+W+gpvWu86ZRf6EF\nxDPB8lD3BlJKTp8+3cFaKSoqYvz48cycOZM5c+YwZ84cZs+e7fPyLaCSIpuLmzu4wKxnrWoWmIsL\nLGZ6TLcz7VWp++POzPn6+h0YjQeIiZnqNuMrMtK7adP6OfJMsIyRPwUkCXgLsAA5jtPzgAjgeill\nRVe/Gwi0gGh8gcVioaioiLy8PPbu3Utubi579+5lyJAhTkFp3ZKTfb8iIkBLbQvGAqObC8xYaCRi\nREQHF5ghxdAtEbDZmmlo2OM240uIUJdEx4XExs4hNNTgl/ek6Zv0xjTeZcA0x+EBKeXH3jTmb7SA\naPyF3W7n2LFj5ObmOrecnBwMBgNz5851E5XRo0f7JfZgt9ppPtLs5gJrzG/EZrQRO8PdBRYzLcbj\n2vZSSkymUurrtzstlaamImJjZ7gkOi7EYBjt8/ei6Tv0Si2sYEALiGeCxawOJN0do9aAfXtRsdvt\nHSyVtLQ0vwW0LdWWDi6w5sPNGFIMHfJWIkaePxnSZjPS0LDHrcZXSIiB+PhFTkslNnY2n322XT9H\nHgiW/zUtIA60gHgmWB7qQNKTMWqNq7QXlYaGhg6ikp6e7pdESAC7xU5TUZObC6wxvxFplx1cYDEZ\nMTbFhF8AAB0sSURBVF0mQ0opaW4+5jbjq7n5KIcOpXHFFV9j2LC1GAwpfnkPwU6w/K8FpYAIIQYD\n/wZSgFLgy1LKuk6uKwXqADvQIqXMPM89tYBo+iRnzpxxxlNatzNnzrgF6ufMmcOUKVP8kq8CjmTI\nCksHF5ip2ERUelSHvJWI4Z0nQ1qtDdTVfU519ZtUV7+NwTCOYcPWMmzYWqKi0vzSd43/CFYBWQ+c\nlVL+TgjxE2CwlPKnnVxXDMyVUtZ0455aQDRBQ01NDXl5eU4rJTc3l+PHjzNt2jS3uMq0adOIjIz0\nWz9sJhtNB5o65K2EGEI6uMCiJrknQ9rtVurqPuXMmdeprn6LyMhRDBt2I8OGrSE6eqLf+qzxHcEq\nIEXAEillpRAiGciSUk7u5LoSYJ6U8mw37qkFxAPBYlYHkkCOUUNDA/n5+W6WytGjR5k0aZKbpTJz\n5kyio6P91g8p1Zor7V1g5pMqGbIwuZCVa1aScEkCUROiHB9CNmprt1FV9QbV1RsJDx/utExiYqb4\nra99lWD5X/P7eiB+YriUshJASlkhhBjexXUS2CKEsAF/bZ+TotH0J+Li4li8eDGLFy92nmtubmbf\nvn1OS+WFF17g4MGDjBs3zs1SmTVrFvHxvikNL4TAMNaAYayBoauGOs/bjDYa9zVSsqGEmq01lP6i\nFLvFTsLiBAZdMoiES+aSPnMJ6elPUle3g6qqN8jPX0lYWIKLmEzTGfL9BL9aIEKILUCS6ymUIPwP\n8Hcp5RCXa89KKTtkagkhRkgpTzvWINkC3COl/LyL9uTXvvY1UlNTARg0aBCzZs1yfgvIysoC0Mf6\nOOiPLRYLL730EocPH6apqcmZqzJ06FAWL17MnDlzCA0NZcKECaxevdqv/blo3EXUbqtl82ubMRYY\nmVYzjfiL4jk4+iCx02O54q4vYWzZw6ZNf6S29lPmzRvMsGFrKSpKJSpqvLNwZl8a3/583LpfWloK\nwEsvvRSULqyDwFIXF9YnUsrz2rlCiIeABinl4128rl1YmgGL1Wrl0KFDbjGVvLw8EhMTO8wAS0pK\n8nxDL7FUW6j/op66z+uo3VaLcb+R2BmxJFySQPwl8YTOOkqN+S2qqt4AQpyWSVzcXG2ZBIBgjYGs\nB85JKdd3FUQXQkQDIVLKRiFEDLAZeFhKubmLe2oB8UBWkPhlA0l/GiO73c7Ro0fdYiq5ublERUV1\nEJULSYC8kDGyGW3U71KCUretjvpd9RhSDMRfEk/ksuNY0rdQY34LKa0uYpIZ9GISLM9RsMZA1gOv\nCSG+DpQBXwblsgKek1Jeg3J/vSWEkKi+/rMr8dBoNB0JCQlh4sSJTJw4kZtuugloS4BstVKeffZZ\ncnJUpaL2ojJu3Lgef5CHxoQyePlgBi8fDKglhhvzGqnbVkfdqyOo27YaEXMdsdeewbgki6rhX0WG\nmRg2bA3Dhq0lPn6hXke+j6ITCTUaDVJKTp061cFSaWxsZPbs2U5BmTt3Lunp6YSE+O4DXUpJ06Em\np4VSu60Wa/xRItbtwDbrY2RUPcNGrGF40o0kJFyMEP5JvhyoBKULyx9oAdFofMuZM2c6iEpVVRWz\nZ89m+fLlrFy5kszMTJ8nP5rLzc4YSs2hfExjNyNWfgaDzzE4fBUjJt/MkORlhIQE0onSP9AC4kAL\niGeCxS8bSPQYnZ+amhqee+45qqqq2LJlC6WlpSxZsoSVK1dy+eWXk56e7vP4RUttC/Xb66nOyeNc\n89uYx21BjDhDdPXlDE1cw4gFV2EY3rdWZAyW5yhYYyAajSYIGTx4MJmZmc4Px8rKSrZu3cqWLVt4\n9NFHCQ0NZeXKlaxcuZLLLruMoUOHnv+G3SB8UDiJVyWSeNVlwGXYTDaqs/dTUf1vTjb8krKd3yR0\n36XEW1YxfPyXGHTJMAyp3Stzr/EebYFoNBqfIaWkqKiILVu2sGXLFj777DMmTJjgFJSLL74Yg8H3\n6400G0sp3/cq1WffxBx6GJG9iJDs5QyKX8HgS4aTeE0ihjF6nZPO0C4sB1pANJq+hcViYdeuXWze\nvJktW7Zw4MABFi1axOWXX87KlSuZPn26z60Ek+kkVVUbOXPydYxN+4koXkbLn28gKnQKQ68bytDV\nQ4mZHqOtEwdaQBxoAfFMsPhlA4keI894O0Y1NTV88sknTgulsbGRFStWOC2UkSNH+rSfZvNpKipe\n4uTJJ4huySTyk29Q/7KqmjT0uqEkrk4kYXGCW4FIXxEsz5GOgWg0mqBg8ODB3HDDDdxwww0AlJSU\nsGXLFjZt2sQDDzzw/9u79/ioqmuB47+VB8hDQiQQwyuK3CQQQQgFpEZAK4JpBVq8LVKKBqpcW3zU\nR2s/ol7F1qu9BWm1ctXw8l2lChWURwEREaVCAj4IECFAEoEEeQQBQ7LuH+cAY0wyk0kyk8ms7+cz\nn8ycs2efNWsmZ8/Z58zenH/++WcakyFDhtC6des6ba958wQSE++lc+dbKSx8hj3N/otzr/seHcru\n4vjiKPLuzuPErhO0y2hH3Og4Yq+OJaq17RZ9ZUcgxphGoby8nI0bN545OtmwYQP9+vU7c3VXv379\n6jwBV3n5cYqKstiz5zFatbqYxMT7aX4kjZJFJRQvLObI+iPEDI5xurqujaNZfNVzojQl1oXlsgbE\nmKajtLSUNWvWnGlQCgsLz/z2ZNiwYXTr5v/kVRUVJ/nyy7nk5z9KixYXccEFD9C27RDKDpVx8O2D\nFC8s5uA7B2nVsxVxo+KIGx1Hy+SGGz4/mKwBcVkD4l2o9MsGk+XIu2DkqLCwkBUrVrB8+XJWrFhB\ny5YtzzQmV155JbGxsbWus6KijH37XiA//w80b96RxMT7iY29ChGh4mQFh1YfonhhMcULi4k8N/JM\nY9JmYBskouZ9bqh8jurSgNgAM8aYkNCxY0cmTJjA888/T2FhIW+++SZJSUk899xzJCYmMnDgQKZO\nncqaNWv45ptvfKozIiKahIRMBgzYSkLCzezYcRsbNw6ipGQJ0kw4b/h5JP0tiUF7BtHj+R5ItLDt\n5m2s67iO3JtyKXm7BC0P3y+tdgRijAl5J0+eZN26dWe6u7Zt28bll1/OsGHDGDFiBMnJyT7Vo1rO\ngQMLyM+fhkhzEhOnEhc38juDOR7PO07xwmL2v7qfsgNldLq1EwkTE4iKCb0T8NaF5bIGxBgDUFJS\nwsqVK1m2bBmLFy+mS5cuTJo0ibFjx/o0a6NqBcXFC8nPn4ZqOYmJU2nffkyVowIfXn+YgpkFHFx6\nkPjx8XS6tRMt/yN0zpdYA+KyBsS7UOmXDSbLkXehlKNTp06xbNkysrKyWLlyJaNGjWLSpEmkp6d7\n/TGhqlJSspj8/GmUlx8lMfE+2rf/WZWDOJ4sOEnB3wooeraINgPbsHPoTq6989pG/4NFOwdijDHV\niIqKIiMjgwULFpCbm0uvXr2YPHkyKSkpPPbYYxQVFVX7XBEhLu5HpKWtp3v3JygoeJoNG3pQVDSX\nioqyb5Vt3qk53f7QjUvzL6XdqHYUPFnAhl4bKHy2kPLj5Q39MoPCjkCMMWFHVVm/fj1ZWVksWLCA\nyy+/nEmTJpGRkVHj0PSqyqFDq8nPn8aJEztJTJzK+ednVtm1paocWnmIvTP3cmT9ERJuTqDrb7sS\n1aZxnSexLiyXNSDGmNoqLS3ltddeIysri7y8PCZMmMDEiRO9nng/dGgteXl30axZPCkpc4mOPq/a\nsl/v+Jrdf9zNwaUH6fZYN+J/Ht9ourasC8v4bPXq1cEOodGzHHnXlHLUunVrMjMzWbt2LatWrUJV\nGTJkCOnp6cyZM4fS0tIqn9e2bTp9+75HixYX8fHH/ThyZMO31nvmqGX3lqTMTiF1QSp7Z+wle3A2\npTlV11sfKioqSEtLY+TIkQ22DbAGxBhjzkhJSeHxxx9nz5493HPPPbzxxht06dKFm266ifXr11O5\nhyMiohndu8/goov+ly1bMti798nvlPEUc2kM/T7qR/z4eHKG5bD91u2cOnyq3l/HzJkz6dmzZ73X\nW5l1YRljTA0KCwuZP38+s2fPJjo6mkmTJjF+/Hg6dOjwrXJff72Dzz77T1q0SCI5+Vmiomq+XLis\npIwv7v2Cg0sPkjQriXYZ7eol3r1795KZmcl9993H9OnTWbRoUY3lrQvLGGMaSMeOHbn33nvJzc3l\n6aefJicnh6SkJMaMGcM777xz5oijZcvu9O37AVFRsXz88fcoLc2psd7odtEkP5tMytwUtk/ZTu5N\nuVScqqhzvL/5zW/405/+FJBzLNaAhJmm1HfdUCxH3oVjjkSEwYMHM2/ePHbv3s3w4cO5++67ueKK\nK8jOzgYgMvIckpNnkZj4AHPmDKaoKKvGLi2A2Ctj6b+lPyfyT5A7Mddr+ZosXryY+Ph4+vTpg6rW\nqS5fWANijDG11KZNG26++Ways7MZO3Ysw4cPZ/LkyRw4cACA888fT/fuM9mzZzpbt2ZSXn6sxvoi\nW0Vy8ZsXc3TjUYoXFvsd1/vvv8+iRYvo1q0b119/PatWrWLChAl+1+eNnQMxxpg6+uqrr3j44Yd5\n4YUX+P3vf8+UKVNo1qwZ5eXH2LbtFo4e3Uhq6uu0apVSYz37XtnHvnn76P127zrH9O677/LnP//Z\nzoEYY0xjFhsby4wZM3jvvfdYvnw5vXr1YsmSJURGtiIlZR6dO99BdvZgr+dFYi6LoXRzw13eW9/s\nCCTMhNIYRsFiOfLOclSzJUuWMHnyZC6++GKmT59Ojx492L//VfLy7qFv33Wcc07nKp9XcaqC49uO\n06pnq4DFakcgxhjTiGRkZDB79myuvvpqBg8ezB133EF09NV06jSFLVt+yKlTR6p8XkRUREAbj7qy\nIxBjjGlABw4c4P7772fhwoUsWLCAuLgXOH58B716LSYiovpxtwIlJI9AROQ6EflERMpFJK2GciNE\nZKuIbBOR3wUyRmOMqav27dsza9YssrKyGDVqFFu2DEWkGdu2TW7wy2wbWjC7sLYAPwbera6AOENc\nPgkMB1KB60Wk5ssYTI3C8fr92rIceWc58q5yjjIyMli6dCm3334nK1deRmnpZvLzHwlOcPUkaA2I\nquaq6nagpkOnAcB2Vc1X1TLgFWBUQAI0xph6lpaWxrp163juuRd5+eVeFBVlUVLyTrDD8lvQz4GI\nyCrgLlXdWMW6McBwVb3ZfTweGKCqt1VTl50DMcY0eocPH2bMmDGkph7l+uu/YsCAT4iIaBaUWBrt\nORARWS4imz1uW9y/1zbkdo0xpjGLiYlhyZIl5OcnsHv3KQoK/hrskPzSoFNjqeqwOlZRAHT1eNzZ\nXVatG2+8kQsuuACAtm3b0qdPnzPXq5/ukwznx9nZ2dxxxx2NJp7G+Pj0ssYST2N8XDlXwY6nMT5+\n4oknatz/rFu3jhtvvJGHHvolM2Y8wtatFxIdfV5APt+rV69m165d1NnpAbeCdQNWAf2qWRcJ7AAS\ngWZANtCjhrrU1GzVqlXBDqHRsxx5Zznyztccvfjiizp1apx++mlmvWx34sSJ2qFDB+3Vq9eZZTk5\nOTpo0CDt3bu3jhw5Uo8ePXpmnbvf9Gv/HbRzICIyGvgrEAccArJV9RoRSQCeVdUfueVGADNxutuy\nVPV/aqhTg/V6jDHGH6rKmDEjuOWWtaSnb6FFi251qm/t2rW0bt2aCRMmsHnzZgAGDBjA9OnTSU9P\nZ+7cuXzxxRc8/PDDgM2JfoY1IMaYUJSfn8/jj6cwefLP6N17bp3qmjRpEgsXLuTYsWMcP34cgBYt\nWpCUlERERAQxMTEUFRWRm5sLNOKT6Kbx8ewHNVWzHHlnOfKuNjlKTEzk5MkfsX//q5SVHazTdjMz\nM5k/f/63lvXp04dp06axadMmYmNj6+f8B9aAGGNMozBlyn1s2gT7979Rp3rS09OJiYn51rJ58+bx\n1FNP0b9/f0pLS4mIqJ9dvzUgYeb0FRmmepYj7yxH3tU2R3369CEvrz07dvy93mNJSkqif//+7Nu3\nj927d5Oamlov9VoDYowxjcSFF17FkSPZda5Hz16ZCjgDOj7yyCPs2rWL1q1b07lz1cPJ15Y1IGHG\n+q69sxx5Zznyzp8cpaYORuRwnbY7btw4xowZwzfffEPXrl2ZM2cOL7/8MsnJyfTs2ZNBgwaRl5dX\np22cZg2IMcY0Et269ePRRxPqVMdLL73Ehx9+SGpqKrt37yYzM5OMjAxyc3PZunUrycnJ9OjRo17i\ntct4jTGmkSgqKqJv3758+eWXftcxbtw4Vq9eTUlJCfHx8Tz00EMsXryY3NxcIiMjSUxMZNasWSQk\nOA2V/Q7EZQ2IMSaUVVRUcOLECVq2bBmwbdrvQIzPrO/aO8uRd5Yj7/zJUUREREAbj7qyBsQYY4xf\nrAvLGGPCmHVhGWOMCThrQMKM9V17ZznyznLkXTjkyBoQY4wxfrFzIMYYE8bsHIgxxpiAswYkzIRD\nv2xdWY68sxx5Fw45sgbEGGOMX+wciDHGhDE7B2KMMSbgrAEJM+HQL1tXliPvLEfehUOOrAExxhjj\nFzsHYowxYczOgRhjjAk4a0DCTDj0y9aV5cg7y5F34ZAja0CMMcb4xc6BGGNMGLNzIMYYYwIuaA2I\niFwnIp+ISLmIpNVQbpeI5IjIJhH5KJAxNkXh0C9bV5Yj7yxH3oVDjoJ5BLIF+DHwrpdyFcBQVe2r\nqgMaPqymIRw+vL6wPJxluTjLclE/gtaAqGquqm4HvPW9CdbVVmvV/YMMHTo0oHEEmz87iqaao/rc\naYZ6jgLRgIR6jnwRCjtmBZaLyAYRuSnYwRhjjHE0aAMiIstFZLPHbYv799paVHOZqqYBGcCvRSS9\ngcINC3bo7p3lyDvLkXfhkKOgX8YrIquAu1R1ow9lHwSOqur0atbbNbzGGFNL/l7GG1XfgfipyuBF\npCUQoaqlItIKuBp4qLpK/E2CMcaY2gvmZbyjRWQPcCnwloi87S5PEJG33GLxwFoR2QSsB/6pqsuC\nE7ExxhhPQe/CMsYYE5pC4SqsbxGRESKyVUS2icjvqinzFxHZLiLZItIn0DEGirdciMg490eYOSKy\nVkR6BSPOQPDlc+GW6y8iZSLyk0DGF0g+/o8MdX+c+4l7HrJJ8uF/pI2ILHL3FVtE5MYghNngRCRL\nRPaJyOYaytR+v6mqIXPDafB2AIlANJANpFQqcw2w2L0/EFgf7LiDmItLgRj3/ohwzoVHuX8BbwE/\nCXbcQfxcxACfAp3cx3HBjjuIufg98OjpPAAlQFSwY2+AXKQDfYDN1az3a78ZakcgA4DtqpqvqmXA\nK8CoSmVGAfMBVPVDIEZE4gMbZkB4zYWqrlfVw+7D9UCnAMcYKL58LgBuBV4H9gcyuADzJRfjgAWq\nWgCgqsUBjjFQfMmFAue6988FSlT1VABjDAhVXQt8VUMRv/abodaAdAL2eDzey3d3ipXLFFRRpinw\nJReefgm83aARBY/XXIhIR2C0qj6N99EPQpkvn4sk4DwRWeX+QPcXAYsusHzJxZNATxEpBHKA2wMU\nW2Pj136zsVzGaxqQiFwBZOIcxoarJwDPPvCm3Ih4EwWkAVcCrYAPROQDVd0R3LCCYjiwSVWvFJGL\ncEa96K2qpcEOLBSEWgNSAHT1eNzZXVa5TBcvZZoCX3KBiPQGngFGqGpNh7ChzJdcfA94RUQEp6/7\nGhEpU9VFAYoxUHzJxV6gWFVPACdEZA1wCc75gqbEl1xkAo8CqGqeiOwEUoB/ByTCxsOv/WaodWFt\nALqLSKKINAPGApV3AIuACQAicilwSFX3BTbMgPCaCxHpCiwAfqGqeUGIMVC85kJVu7m3C3HOg/yq\nCTYe4Nv/yEIgXUQi3R/rDgQ+D3CcgeBLLvKBqwDcPv8k4IuARhk4QvVH3n7tN0PqCERVy0VkCrAM\np/HLUtXPRWSys1qfUdUlIpIhIjuAYzjfMJocX3IB3A+cB/zN/eZdpk1wSHwfc/GtpwQ8yADx8X9k\nq4gsBTYD5cAzqvpZEMNuED5+Lh4B5npc3vpbVT0YpJAbjIi8BAwF2onIbuBBoBl13G/aDwmNMcb4\nJdS6sIwxxjQS1oAYY4zxizUgxhhj/GINiDHGGL9YA2KMMcYv1oAYY4zxizUgpl6JSLmIbHSHCt/o\n/pixrnWOEpEUj8cPiciV9VDvEBE5JCIfu0N+rxaRH9ahvskiMr6ucfmx3RtEZH+lvKd4f2aVdR31\nocxaf+quop5EEdlSH3WZ4AipHxKakHBMVdOqWykikapaXss6R+MMwb4VQFUfrEN8la1R1ZFubJcA\nb4rI16pa6zkyVPX/6jGu2npFVW+rh3q8/jBMVetzTDX7IVoIsyMQU9++M1SC+w15oYj8C1ghIq1E\nZIWI/Nud7GqkR9kJ7rJNIjJPRAYBI4HH3W/WF4rInNMTQonID9zlOSLynIhEu8t3ish/u0cXOSKS\n5C1wVc0BHsYZ9h0RiROR10XkQ/c2SBw7RaSNR8zbRKS9iDwoIne6y34pIh+5r+M1ETnHXT5HRGaK\nyPsiskM8JrYSkd+JyGb3OX90l3UTkbfFGTX33RpeR1V5Hy0iK9z7CSKSKyId3PfjTXFG480VkQeq\neG5N79FR9+8Qt47XRORzEXneo0yae0S3wY0/3l3eT5wJizYBv/b2nphGLtgTnditad2AU8BGYBPO\nnBMANwC7OTu5VQTQ2r3fDmfOBoBUnKOMWPdxW/fvHDwmgDr9GGju1nuRu3wecJt7fyfOeFcAtwDP\nVhHrEGBRpWWXAJ+6918Evu/e7wJ85t6fAdzg3h8ALHPvPwjc6d6P9ahzGvBrj9hfde/38Hjt1wBr\ngeaVXvsKj9c3APhXFa/jBpw5Tk7nfaNHPfNxdtT/BH7qUb4AaAucA2wB0tx1R9y/kVW9R5XKDMGZ\nYyIBpwFbB3wfp2fjfaCdW+6nOMOIgDNk+mXu/cepZoIju4XGzbqwTH37WqvuwlquZye3igAeFZHB\nQAXQUUQ6AFcAr6k7arCqHvKyrWTgCz07UOQ84FfAX9zHb7h/PwZ+7GP8nt/krwJ6iMjpZa3FGXzw\n78AD7vbGAq9WUU9vEZmGs5NuBSz1WPcmgDrjMnVwl/0AmKOqJ911h0SkFc4O+TWPGKKribu6Lqzb\ngE+AD1T17x7Ll5/Or4j8A2eo/40er1+o4j1S1cqTcX2kqkVuPdnABcBh4GKcodEF5/0uFJEYnC8R\n77vPfR5npkwToqwBMYFyzOP+z3GGVO+rqhXiDKF9jruutvN01FT+pPu3HN8/62mcHZlWgIHqzGbn\n6QMRuUhE4nDOz0yrop45wEhV/UREbsD5tl45Lm/xRwBfVdMg+6oLTgNQeXa5yucetNLfmt4jT56v\n5XSeBfhEVS/zLOg2IKYJsXMgpr750gDEAPvdHdMVOHNWA6wErhOR8wBEJNZdfhRo891qyAUSRaSb\n+/gXwGp/4xVn7pSpOLPUgTOK6+0e6y/xeN4bwHScbq2q5llpDXzpnpP5uQ/bXw5kikgLd1uxqnoU\n2Cki11WKscbX4VE2CsjCOUr6XETu8lg9TETautsbjdN95llPde9RlduqJBdoL86w4IhIlIj0dI9A\nD4nI991yNeXFhAA7AjH1zZeral4E/ikiOTgT93wOoKqficgfgHdF5BROf/5EnLmsnxWRW4HrTm9D\nVU+KSCbwuohE4sz/cPpKKF+v7kkXkY9xupn2AVNUdbW77nbgKTfOSGANThcZON1YH+GcT6jKA+76\n/cCHnJ13u8pv/qq61G2g/i0iJ4ElOI3ZeOBpEZmK8//6Cs4w7JX9VEQuw9m5qxvnMJyrzNaJM1z5\nRyLyllv+I+AfONOWPq+qmyrFV+V7VM1rqPxaytxG76/uUUckzoyQn+G8n7NFpAKngTYhzIZzNybM\nuF1q/ao5Z2KMz6wLyxhjjF/sCMQYY4xf7AjEGGOMX6wBMcYY4xdrQIwxxvjFGhBjjDF+sQbEGGOM\nX6wBMcYY45f/B/qsVxnVj1pSAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(fit, xvar = 'dev', label = True);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can extract the coefficients and make predictions at certain values of $\\lambda$. Two commonly used options are:\n", + "\n", + "* `s` specifies the value(s) of $\\lambda$ at which extraction is made.\n", + "\n", + "* `exact` indicates whether the exact values of coefficients are desired or not. That is, if `exact = TRUE`, and predictions are to be made at values of s not included in the original fit, these values of s are merged with `object$lambda`, and the model is refit before predictions are made. If `exact=FALSE` (default), then the predict function uses linear interpolation to make predictions for values of s that do not coincide with lambdas used in the fitting algorithm.\n", + "\n", + "A simple example is:" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "False" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "any(fit['lambdau'] == 0.5)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.19909875],\n", + " [ 1.17465045],\n", + " [ 0. ],\n", + " [ 0.53193465],\n", + " [ 0. ],\n", + " [-0.76095948],\n", + " [ 0.46820941],\n", + " [ 0.06192676],\n", + " [ 0.38030149],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0.14326099],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [-0.91120737],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0.00919663],\n", + " [ 0. ],\n", + " [-0.86311705]])" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "glmnetCoef(fit, s = np.float64([0.5]), exact = False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The output is for `False`.*(TBD) The exact = 'True' option is not yet implemented*. \n", + "\n", + "Users can make predictions from the fitted object. In addition to the options in `coef`, the primary argument is `newx`, a matrix of new values for `x`. The `type` option allows users to choose the type of prediction:\n", + "* \"link\" gives the fitted values\n", + "\n", + "* \"response\" the sames as \"link\" for \"gaussian\" family.\n", + "\n", + "* \"coefficients\" computes the coefficients at values of `s`\n", + "\n", + "* \"nonzero\" retuns a list of the indices of the nonzero coefficients for each value of `s`.\n", + "\n", + "For example," + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[-0.98025907]\n", + " [ 2.29924528]\n", + " [ 0.60108862]\n", + " [ 2.35726679]\n", + " [ 1.75204208]]\n" + ] + } + ], + "source": [ + "fc = glmnetPredict(fit, x[0:5,:], ptype = 'response', \\\n", + " s = np.float64([0.05]))\n", + "print(fc)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "gives the fitted values for the first 5 observations at $\\lambda = 0.05$. If multiple values of `s` are supplied, a matrix of predictions is produced.\n", + "\n", + "Users can customize K-fold cross-validation. In addition to all the `glmnet` parameters, `cvglmnet` has its special parameters including `nfolds` (the number of folds), `foldid` (user-supplied folds), `ptype`(the loss used for cross-validation):\n", + "\n", + "* \"deviance\" or \"mse\" uses squared loss\n", + "\n", + "* \"mae\" uses mean absolute error\n", + "\n", + "As an example," + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "warnings.filterwarnings('ignore') \n", + "cvfit = cvglmnet(x = x.copy(), y = y.copy(), ptype = 'mse', nfolds = 20)\n", + "warnings.filterwarnings('default')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "does 20-fold cross-validation, based on mean squared error criterion (default though).\n", + "\n", + "Parallel computing is also supported by `cvglmnet`. Parallel processing is turned off by default. It can be turned on using `parallel=True` in the `cvglmnet` call. " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Parallel computing can significantly speed up the computation process, especially for large-scale problems. But for smaller problems, it could result in a reduction in speed due to the additional overhead. User discretion is advised.\n", + "\n", + "Functions `coef` and `predict` on cv.glmnet object are similar to those for a `glmnet` object, except that two special strings are also supported by `s` (the values of $\\lambda$ requested):\n", + "\n", + "* \"lambda.1se\": the largest $\\lambda$ at which the MSE is within one standard error of the minimal MSE.\n", + "\n", + "* \"lambda.min\": the $\\lambda$ at which the minimal MSE is achieved." + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.07569327])" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvfit['lambda_min']" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.14867414],\n", + " [ 1.33377821],\n", + " [ 0. ],\n", + " [ 0.69787701],\n", + " [ 0. ],\n", + " [-0.83726751],\n", + " [ 0.54334327],\n", + " [ 0.02668633],\n", + " [ 0.33741131],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0.17105029],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [-1.0755268 ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [-1.05278699]])" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvglmnetCoef(cvfit, s = 'lambda_min')" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[-1.36388479],\n", + " [ 2.57134278],\n", + " [ 0.57297855],\n", + " [ 1.98814222],\n", + " [ 1.51798822]])" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvglmnetPredict(cvfit, newx = x[0:5,], s='lambda_min')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Users can control the folds used. Here we use the same folds so we can also select a value for $\\alpha$." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } + }, + "outputs": [], + "source": [ + "foldid = np.random.choice(10, size = y.shape[0], replace = True)\n", + "cv1=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=1)\n", + "cv0p5=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0.5)\n", + "cv0=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "There are no built-in plot functions to put them all on the same plot, so we are on our own here:" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAElCAYAAADtFjXiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXmYFNXVuN8zCyAgW0AQNTOACuLCEhfUqKOgkp8RFBPj\nEgUSl0+NgIa4RMgwavxMYkSDUaMxojFq4sYSPxEXBjQiijAii7jQMyAKYpBVBGbm/P64VT01Pb1U\nd1dP98zc93n66a7t3FvVp+reOvecc0VVsVgsFkvLIy/bFbBYLBZLdrANgMVisbRQbANgsVgsLRTb\nAFgsFksLxTYAFovF0kKxDYDFYrG0UGwDAIhIjYgsEZHlIrJURK4XEcl2vVJBRMaJyEoR+XvE+lNE\nZItznktFZG6Gyn9UREZlQrYleaxuB1p+s9PtgmxXIEfYqaqDAUSkK/AU0AGYkq5gEclT1dp05STB\nVcBQVf08yrYFqjoi1oEikq+qNZmrmiULWN3G6nYs7BtABKr6FXAF8AswSi4ivxeRRSJSISKXO+tF\nRO53eiQvi8iLbu9AREIicqeILAZ+JCK9ReQlEXlXROaLyKHOfl1F5FlH9iIROd5Zf4rTk1kiIu+J\nSLvIejo9uQ9EZJmIjHPWPQD0Bl4SkfFRTq9Bz8/p1TwgIm8DvxORtiLyiIi87ZQ9It51cLbdJyKr\nnJ7Xfp71Q51zeF9E/ioihZ7rc4dzju+IyCARmSMiH4vIlcn/axY/WN22ut0AVW3xH2BblHWbgW7A\n5cCvnXWtgHeBIuA84N/O+u7O/qOc5RAw0SPrVaCP8/tY4DXn9z+AE5zfBwErnd+zgOOd322BvIi6\nDQbeB9oA7YDlwABn2xqgc5TzOQXYAixxPjc76x8FZnn2+y1wkfO7I7Aa2CfOdTgXeNlZvz/wNTAK\naA2s9Zz3Y8A4z/W5wvl9N1DhnGdXYEO29aE5faxuW92O97EmoMScARwpIj92ljsAhwDfB54BUNWN\nIjIv4rh/Ajg9nBOAZ0TCttdC53sYcJhnfXsRaQv8B5gqIv8AnlfV9RGyvw+8oKrfOmU8D5yEuXGE\nKL0hh1ivyc9EnO/ZIvIrZ7kV8N041+FkjFkBVf1CRF5ztvcF1qjqp87yY8DVwJ+c5dnO9wdAO1X9\nBvhGRL4VkQ6qui3GOViCw+p2C9dt2wBEQUR6AzWquslR4GtV9ZWIfc5KIGan850HfK2OHTayKOA4\nVd0bsf53IvJv4CzgPyJyhqp+lPyZ+GZnxPJ5qvpxvYr6vw4S43cku53vWs9vAMXqZcawum1124sd\nAzCE/0wR6QY8AExzVr0MXC0iBc72Qzw9mR859tLuQEk0waq6HQiJyI88ZRzl/JwLjPesH+B891bV\nFar6e8zraL8IsW8A54hIG6cXdi6wIKUzb8jLwDhPnQZ61ke7DguAnzh21P2BU539VwNFzgMH4BKg\nPKA6WvxjdbsOq9sR5FRrlEXaiMgSzCvhXuBxVZ3qbPsrUAwscXoKXwLnAM8BpwErgHXAe8BW55jI\nFKsXAw+KyCTMNX8aWIa5Qf4sIu8D+RiFuxqYICKnAjWO/Je8wlR1qYhMx9xACjykqstilJ2IyP1v\nB+4RkWWYh0cIGBHrOqjqCyLiXoe1wFtOHXeLyFjgWRHJd+r6Fx91tOlpg8Xqdh1WtyMQZ7DCkgIi\n0k5Vd4pIF2ARcKKqfpntelks6WJ1u2Vg3wDS498i0gkz8HWrvUEszQir2y0A+wZgsVgsLRQ7CGyx\nWCwtFNsAWCwWSwslJxoAETlQRF4XkRViQsDd8O/OIjJXRFaLCUnvmKTcR0RkozPq7647SkTeEhPC\nPVNE2qdQ38aU+3sxoegVIvKciHRIVm5EGdeJSQy2TET+ISKt0pHnyDxU6sL7l4rIVvc/TFNuVL3I\nVbnJlJWKbgcsKzAdjiHraUcflohJj7DEj6wossc755r2/xSk7otIRxF5xrk3V4jIcSnKCUwXU5KV\n7VBkZwyiBzDQ+d0e42fbD/gdcIOz/kbgziTlfh8YCCzzrHsH+L7zewxmgCvZ+jam3GE44fLAncD/\npnGde2LC6Vs5y/8ELg34v8wDPgcOypRe5KrcZMpKRbcDlhWYDkeTFbH9LmBSCtfucIxLaWuMK+lc\noHeK/0Ogug9MB8Y6vwuADtnWxVRkBa7wQXyAGc6D70Ogu+fkPkxBVlGEkn/t+X0gsCLFOjaK3Iht\n5wB/T+O69gSqgM6O0s4GhgX8350BvJFBvRjaVOTGKSso3U5LVpA6nEBvw3lzkjy/HwEPe5Yn4clD\nlKSswHQfkyri0wzqRyC66EdWTpiAvIhIMaY38TZGqTcCqOoGPNn40mCFOFkAgfMxih4EmZLr5WdE\nBM4kg5o0un/E3JDrgS2q+mpAdXP5CU7+lCDx6MWipiA3QVlp63aG7pPAdVhETsIkQfs04c4NWQ6c\n5Ji42gL/D5NYLmkC1v1ewFdiso0uEZGHRGSfFGWFCVIX/crKqQbAsTk+C4xX1R00jJwLwmf1Z8A1\nIvIuJtvgngBkZlIuACJyC7BXVZ9MQ0YnYCSmt9YTk6DrooCqiJiUuCOon4ArCLmRepHTcn2WlbJu\nZ/A+yYQOX0iKHQJV/RBj3noF+D9gKSaCOGkC1v0CTNbSP6vJg/QNcFOKstz6BaaLycjKmQZATB6O\nZzEmjpnO6o1icpEgIj0wIdppoaofqeqZqnoMJmw9lZ5Jo8kFEJExmN5Pug/rYZgshpvVTI7xPCab\nY1D8AHhPVTcFJTCGXuSs3CTKSkm3M3mfBK3DYtIkjMLJHppinR5V1aNVtQST8jnVxHFB6v5nwDpV\nXewsP4tpEFIiSF1MVlbONADA3zA5w+/1rJuFGYwCGA2kcnHqpZAVkxALEcnD2BQfTKWyjSh3OPAr\nYISq7o55lD/WAkPEJNoSYCiwKk2ZXlLu7cUhml7ksly/ZaWq20HKClKHo6VqPh1YpdFn8PIntK5O\n38Ukhkv1DTgw3XfMbevEmfzGkbUyxXpBsLqYnKxMDGSkMFhxIubVrgLzmrcEGA50wUw4sRrjAdAp\nSblPYjxSdmMUYCwmG+BqzMDZHSnWtzHlfowZvHInu7g/zWtdilH8ZZg85oUB/YdtgU3AvpnWi1yV\nm2ndDlhWYDocTZaz/lGcyVHSuH4LMGMBS4GSNGUFpvvAAEwSuArM20THbOtiKrJsKgiLxWJpoWTU\nBBQjQCSt4C6LJRewum1pDmR6DOBR4MyIdTcBr6pqX+B14OYM18FiyQRWty1NnoybgESkCJitqkc5\nyx8Cp6iZa7QHUK6qkbMCWSw5j9VtS1MnG15A+2nwwV0WSy5gddvSpMgFN1A7Cm1prljdtuQ02ZgR\nbKOIdPe8JscMWhERewNZMoqqRvqup4PVbUvO4Ee3474BiEieiJyfZj0iA0SSClpJx+831mf06NFN\nQqZfuZCa3FSOay7XtqYmpYwCOanbQV6fXJPl6mgyshLpda6dYyZk+SVuA6CqtcANvqVFICJPAm8B\nh4rIWhEZi0lpfLqIrMZE0N2ZqvxUKS4ubhIyrdzMyczLS8/6mUu6HeT1sbKahyy/+DEBvSoiEzH5\nPHa6K1V1c6IDVTVW7pph/qpnsWQWq9uWloyfBuAnzvc1nnUK9A6+Oo1Dp06dmoRMK9efzMrKSior\nK8O/3Z5UcXGxn17VNTRx3Q7ymltZ2ZNVW1tLeXk5kJIep0TCBkBVe2Wk5CwycODAJiHTr9zS0tTk\npnKcH7mNLdN7g5SVlTFmzBjfspuDfgd5zXNNlqujychKpNe5do4up512GiUlJUDyepwqCQPBnBzv\nVwEnO6vKgb+o6t7MVs14SiQzoGGxlJWVUeqzZTNJIRmP1W1LjpGMHkdDRFAfXkB+TEAPAIXA/c7y\nJc66y1KuncWSO3wPq9uWLBLLhOn9nSn8uEIco6qjVfV15zMWOCajtcowrp0t12VaufFlVlZWUl5e\nTnl5OdOnT69nP/VLc9DtIK+5ldX4soqLi8Omn6qqqvDvxvAK8tMA1IhIH3dBRHqT4rRsFkuQuDdO\nSUlJyjeO1W1LS8aPCehXwDwRWYMJeinCTB7RZHEfFLku08rNnEwPTV63g7w+VlZ2Zc2fPz8weX5I\nGAkM7AIOwcwWdC3QV1XnNULdLD6ZMqVxj8tFtmzeTNlPf8qn06dT9tOfUhUKxd2/trbW/Wl1O4dJ\nRUebkl5HmjG3bN7M1cOG+dbjtPERUrw0qPDkFMKZNRPMmzevScj0KzeVyzRv3ryUjvMjt7FlVq5Z\no2M7d9YdoAq6A/SXffpo5Zo1cY9z9KvJ63aQ1zzXZLmXKRlZiS5trp2jy3mjRqWkx9Hwq9t+xgBe\nE5HznImULZacY/rkyUz7+mvaOcvtgLJPP2X65MkJj7W6bckFqkIh1rz8Mj2//pq7MJOAJ6PHqeIn\nDmC7U5dq4FuMrVRVtUPGalVXtiaqnwVETJehsY7LNpFuc0vuvZc/VVQ02K/01FMpe/31mHKc575i\ndTtnSUVHm5peV4VCTDv9dMo+/ZR2mJwkpRibZBGJ9TgafuMAEo0BCHC4quapaitV7aCq+wZxg4jI\ndSKyXESWicg/RKRVujItLQOv98/7S5eyfssWJgFlmJ4TmJsor2fPmDLch6/VbUu2mT55cvjhD07P\nH5hOYj1Ol0TZQBV4MehCRaQnpoEbrGY6vQLggqDLiYWNA2hacmPJrAqF2Pr3v/N4ZSW3AxOBacAq\noLRPH8bcdltMmZmy+mRDt3PNr93KSo7a9etphwlDd2kH7CWxHqeLHzfQJSJyjKq+G3DZ+UA7EakF\n2gKfByy/xZBqxHgmcgE1JlFt/8ClxcXc/corFPVKnObH6nZuk4qONgW99poxQ99+W5eK1mEnsCoJ\nPU4VP2MAHwIHY96ud1JnJz0qrYJFxgG/Bb4B5qrqJVH2sXZSS0xKTz2Vsig9ML82U+ctoAar25Ys\nctmYMWx89lkG7NxJIXA+8IfOnSl9772UH/5B5gI6M6UaxEFEOgEjMWMcW4FnReQiVX0y6LIszZe8\nAw5gJ4TfACAlm2mfxLv4x+q2JRmqQiFqZ83i6Z07wwPA17ZvT96IERnt+bvEbABE5DQ1+VGqRKSX\nqoY820ZRN96WCsOANepMvCEizwMnAA1ukjFjxoRD+zt16sTAgQPD0Xeu/S3ZZXddqsdHW46Una48\nd7miooIJEyYEJq8p1jeyzu72vmedxdnPzGH2nv/SDngJ+FP79jzo2Ewj97/nnnuoqKhAVelVd3Pl\nNXXdDvK/jJSZK7pwzz33BHLv59r1mv/XvzLt6695F6gAJgDTduzgBwsWUF5entT1qaioSD5/UKwA\nAWBJtN/RlpP9AMcCHwBtMK/d04FrouznP/IhCZpbIFhzlxtNZuWaNTrl4ov1tILB+v96FOuoAw7Q\nKRdfrBPGjUsob9CgQapqgmWag27namCTlZWY35SUmMg10HnOt4JeUlycllx8BoLFHAMQkaWqOijy\nd7TlVBCRUox3xF5gKXCZRuRht3ZSixd34GzDF1/w+vjxTN20qe612bGZTn/88YR51AcNGsTSpUvd\nMYAKq9uWxiIyhuWdJ57gD6+91sCMeemRR/LcsmUplxNEHIDG+B1tOWlUtUxVD1PVo9Sk5M34JBzN\nlZaSC8j1/1/94ovhhz+YMYBpX3/tO2IywgXU6nYO09xyAUXGsLRr145LaM1k6jwRSvv04bunntoo\n9YnXAPQWkVkiMtvz211u0tPoee13uSzTr9yystTkpnKcH7mZlun6TXtpB/x35UqKioood5JrxZoX\nYM2aNYwYMcJdbPK6HeQ1zzVZro4mIyuRXufCOboxLFNmzeJ5dnMTcFleHj869FCOvu02BgwalFCP\ngyCeF9BIz++7IrZFLlssjUYs75/v9O/vax7VmTNnAjB79myor+dgddvSCESLYbm+tpa/tm7NBRde\n2Gj1SBgHkE2sndQfLS0XUFUoxA0DBvK37dsajAEk4zrn106aCaxu+6O55gKKFcNyRb9+XPTAA4Ax\nF6U6K1iQcQAWS85QFQoxffJkNraBHxR0oVvbfejerx/9zziDUFUVoaqqtG4ciyUTeAd/F7/zDotX\nrWISZrL1MZigkZ3AAd/7Xti1s1Hw4yqUrQ/WDdTOB6CqTz31lM6bN0+fevJJvbxbt3r50sd27pxS\nvnRV/65ymfgEqdu55NYYtKzmNh9A1LkrQFeCDm/fPmVdjsSvbvuZD8CS4zT3XEA9evQIxPvH0vRo\nbrmAYuWvmlRcTLszzmiU6F8v8eIAZhPHJU5VR8TaFhTWTmrxkm7uH5ezzz4bEXEHgWdHbre6bckU\nsXR43KBBDB43Lmy6TNeMGcQYgOsNMQroATzhLF8IbEy5ZhZLigSU+4eJEycCYS+gXVjdtjQS6Xqw\nBU4iGxGw2M+6THywYwBWrkdm5Zo1enG77hkdA2iKup1rdnsrqz6hUCg8hnX+wIH6o7xCnQRaGaHD\nQdYrmm5H+/jxAmonIr1VdQ2AiPSCBnE4FkvguJ4TFRUVLH7nHdbMncummh6c3aGAzvsSiPeP1W1L\npikuLkZU+fdll/E3z7SP/1NQQNezzqJjURFFvXoRqkonB2Fq+JkPYDjwELAGk9yqCLhSVV/OeOWs\nndSCcf0s+973woNnqfr9R+KkhFiH1W1Lhin76U+Z+I9/NDD93HXxxXDIIQnzVyVLIHMCA6jqHOAQ\nYDwwDujbGDeIxT/NPRdQNM+JAL1/rG7nMM0lF1C66UsyRcIGQETaAr8CfqGq7wPfFZEfpluwiHQU\nkWdEZJWIrBCR49KV6RebC6hp5QJas3x51Jun9vNAZlps8rqdC7ltMiWrueQC2taxI5OBUozbp5v4\nzR38dYO/GjuA0c8YwKPAe8DxzvJ64Bng32mWfS/wf6r6YxEpwMydarE0QLp1C8T7JwZ7sLptyQDe\n9OXb33iD2yFswpwMrO/Qgd9ncMJ3P/gZA1isqkdHzA/wvqoOSLlQkQ7AUlWNOx2ftZP6o7nlAooM\nm18xcyZb336bw2truQzoSnBjAKoqVrdzl+aQCyiW/f+Cvn2Z/eGHGSkzyFxAe0RkH5ygMBHpA+xO\ns369gK9E5FFgALAYGK+qu9KUa2kGuN48VaEQj48axX2ewd//KShAjjmGo845J5DcP1a3LZkmlv1/\nf5GwCSlb+av8NAClwBzgIBH5B3AiJn9RuuUOxkyVt1hE7gFucsqqh50TOPG8qpB6fcvLc3dO4NIr\nruAnznypJcC7wEXV1bzTuzcTb7ih3s3jR16MeVObvG4HqXuRMrOtC65uN+U5gfMOOICXgH3CZ2Pm\nr95z4IH19p8xY0bK1yvwOYGd11MBDgK+A5wF/BDo6ifAIIHc7piJs93l7wOzo+yXahxEXJpbIFhp\naWpyUznOj9ygcOdL9c6VqqC/OfXUtGXX1ta6cwI3ed1uykFSiXB1NBlZifS6Mc+xcs0anThihJ4r\n+fWCv37Zp0+DAMZsBIL5GQP4QFWPTK5ZSYyIzAcuV9WPnDlU26rqjRH7aKL6WZonVaEQ1592GodV\nVjZImXvXxRdT+sQTcY/3Q6bmA7C63bKJN3f1/xQU8FXv3jw4Z05GE78FOQawRESOUdV3A6iXl3HA\nP0SkEBOIMzZg+ZYmRuSN87jnxikFfg78oXNnSgP0nLC6bQka155f9tOfNshe+2B1NVd16ZIzc1f4\nSQd9HLBQRD4VkWUi8oGIpD5dvYOqvq+qx6jqQFUdpapb05XpF6/9LpdltjS5sSZ9fxfjOz2xe3f6\n33QToaoqyoMLmmnyuh3kf2llBScr1uBvr332ocSZGN778M/UPRkPP28AZ2a8FhaLh1g3ztH9+zPx\nhhuCLi6uu6bFkipBZa/NJL7nBBaR/YA27rKqrs1UpTxlWjtpCyRe3pQgbP8unjgAq9uWQKkKhbhv\nwgTWzJ5Df90TaPyKHwLLBSQiI0TkYyAEzAcqMV5MlhyhOeUCqgqF2LF9O5fmFTCZupD5azt3ZkwG\noiatbuc2TSkXUGVlJeXl5Tz91FP89rjjmDJrFs/pHm4CJhUUcNXxx2fChJkeidyEgPcxrnJLneVT\ngUf8uBil+8G6gbaIOYG9+dIj5/z9aUGBnt6/v/7hd7/TefPm6bx58zQUCgVSV+rcQJu0buea62aQ\nsprinMBTLr44rMPq0eUpF1/caPUiwPkA9qrqf0UkT0TyVHWeE9xisQRCIq+Ja3r2zITtHwCr25ag\niTWGFVDywkDxEwfwKnAO8L8YM9aXwDGqekLGK2ftpL5oLrmAgprz1y/OfAD7YnU7Z2lquYAaI37F\nD4GNAQAjMfOmXocJm/8UODu96lks9akKhVheWckk6tLlQqN4TVjdtgRCVSjEtNNP5/HKSm4HJgLT\ngFXAdd260fess3LH9u/ix06UrQ92DKBFjAFUrlmjv+zTp57t/5egK0Ev79ZNJ0+aFKjt3wWfdtJM\nfILU7Vyz2wcpqymMAbhjWFcNHRrV9j+quNjXvNU5OQYgIttxsiUCrYBCYKeqdshMk2RJllRnkwt4\nFrqkcSN//3X77fzBmSsVjL20DLi0uJi7X3+dUFVVOOlV0IjINuen1e0cJBUdbWy9dsew5pWVRbX9\nH9GrV8bdPlPFdxwAgBij6UhgiKrelLFa1ZWnydTP0jRpbNu/i9dOanXbki6NFb/ihyDHAMI4bxcz\nsNHBlgBxIya9NHbEpNVtSzq48StjGil+JSj8BIKN8nx+JCJ3At8GUbjjfrdERGYFIc8vNhdQ7sgN\nB37RKuaNk8kcKc1Bt5tinpzmIuvpp5/m6aee4vff/z5TZs3imdrqlAO/cjUXkNcrohoTLTkyoPLH\nAysBa3NtQSRKlxs549eGDRsyWR1Xv61uW5KmR48ezP/rX/n95583iF+5y5m4KKfxM1KciQ9wIPAK\nZpKcWTH2SXcw3JLDpBoxGRRkyAvI6nbLwp24KPITxMRFqeJXt/14Af0pQQMyLsW2ZyrwK6Bjisdb\nHKZMST1nSjbzAeVCxGQ8/ba6nX1S0dHG1uttHTsyGWNPz8MEf3Ult7J+xsKPCagN0B/4p7P8Y8yr\n7cJUCxWRs4CNqlohIiWYqSejYucETjyvallZCVOmJF/fsjIoKcnOnMBVoRDzVq1iDXAw5qYJYaKy\n3Bsn6P8rxrypg2niuh2k7kXKbAxdiLfs6nYycwIbvY69PYjr9fTTT7NhwwY2//e/bH/jDc7AzPl7\nDDAZeL99e07s1Yvy8nKKi4vD9v9MXa+MzAls3iR4GyjwLBcCb/t5vYgj8w5gLWa2pC+AHcDjUfbL\nyOuRDQTLbiBYosCvp558sl7gV6auLSa+pcnrdq4FbwUpK9cDwUYPGxbVjDlxxIikZeXqnMCrgeNV\ndbOz3Nm5Sfom19TElH8K8EtVHRFlmyaqn6Xp5AKqF/j12msN/KXdwK/GCppxcgF9x+p27pLLuYCq\nQiGuGzKEI7/8Mmz6KXK2ZTqGJRFBzgl8J7BUROZhXmdPBqakVz1LSyRHIyatbluSxs378/cvv6w3\nb/W1GPv/prw8Kisrszrfrx8SxgGo6qOYeYFfAJ7DvA08FlQFVHV+tB5SJsmEv20mZDZHuakkfctU\nXR2avG4HeX2srPi4k7787vLLKfv0U9511rvpS/4KlPbpw40PP5z0wz/Deh6VmA2AiBSJSEcAVd0A\nbAOGAheJSKtGqp/FB00lF1CuZEusqqpi61YzT7vV7dwm13IBFRcXU1JSQreamqhvsVXdu3PtK6/k\nbO6fSGKOAYjIIuBcVf1cRAYCr2Lyph+FmSTmsoxXztpJmwW5Zvs/7rjjeOGFFzjggAMABmF125IE\nuZLzPx5BjAHso6quQ/ZPgb+p6h9FJA+oCKKSlpZBcXExosqGDz7g91BvwCwbtv9du3bRs87cZHXb\n4hvvW6zX9v9zYGq3bpzmvMW64125TrwxAG/rcRrwGoCq1ma0Ro2AHQNoXLneAbMy6kw/bt6fRAEz\nQdc1oufd5HU7V+zjzVlWpO3ffYt9F2P7n1RczC2LFnHBhRdSUlKS0sM/G2MA8d4AXheRf2F8mTsD\nrwOIyP7Ankaom6WZMH3y5Ho3jTtgdiewMQu9ptNOO43zzz/fXbS6bUlIrLdYyP2c//GINwYgwE+A\n/YF/qep6Z/0gYD9VfTnjlbN20iZNZWUlby9cyBPXXMP3vv66ga/0pd27c9vChY1+46gq//znP7nw\nwgsBDrS6bUmE+xbrdmQi3T5/NXQo50+alDOmH79jAMlGOf4wmf3T/WATZvmitLRxj/NLrIjfykZO\n+hYLPNGSVrdzk1R0NGi9DoVCesnxx+sk0N+ATvHo8CTQX/bp42vKx8YEn5HAySrtkmT2T/eTqZvE\npoLIbCqIRHOkToqR8iGezEwQ0QA0Wd3OtfQNQcrKhVQQlWvW6Ng2baJ2ZE7v3Dmwh39Ozgkc+WaR\n5P6WFkgir5+qLJl+EmB121IPr/vytG+/jTqGtU+vXoSqqlCRnDD9JEuycwIfq6rvZLA+keVpMvVr\nqeRaLqBE9tJc9JW2up2bZDsXULx8Pxe3acMdK1fmWkcG8D8G4KsBEJETgGI8XkOq+ng6FfSDvUn8\nkSsNgDvo+/frrmPQxo0NgmTCXj/33kuP/ffP+oCZkwzuRKxu5yzZaABcPX75vvv45p13OLS6mssw\nnRdvR2bKiBH8YebM1AvKIIFNCi8ifwfuAr6PSXd9DHB0mpU7UEReF5EVIvKBiKQ68UZK2DiAzMgV\nVV6YOJF/bdxYL9VDFXWmn1R8pTPsH93kdTvbPvLNTZaosnjyZO576y3+WW3m+J0GfEVdvp8bevbk\nexdcwNNPP91o9coEfsYAjgb6B9xdqQauVzNpRnvgPRGZq6ofBlhGiyHbuYC8Pf8uGzdyF3U9/zLM\nE3Yi0P6II3LRXnqi1e3cpbFzAXnTPETT41KcMaw336TImfClSZNolBh4Btjfz4hyqh9gBjA0yvp0\nBsItjUQ8d08FvSWHXeWsbltc3liwQK/u2TOmHv/GWXfV0KG+vNeyCQFOCDMPGAi8A+z2NByBpLkV\nkWKgHDhN6keOAAAgAElEQVRCVXdEbNNE9bNkj0Q2f7fnf3737lwydSpDjj8+l3r+7hjAFqxut3gq\nKyv5zUUXUbRwYYO5fV09vhPY1adPk8j2GVggGHBKtI+f1sWH7PbAYmBkjO0Bt4uG5hYHkA25lWvW\n6MQRI/SiNm10kicw5vwM9PwzPCVkk9ftXPPdb2qyXF3+sUg9XXZ7/7eAXlJYqJccf7w+9eST9Xr+\nuXqOBBUHoKrzfTY6SSEiBcCzwN9VNeZQeqYmhU/n+MZcrqioyKn6bPjiC97429/YvGABBXv2cDZw\nNsY2ejRmhvXpmB7Tq507c8aFF6Kmp531/yvaxNmZ0O9s6nYu3RtB6m5FRUXg5xupy6Oor8tDMQO+\nS7t353uXX07/ww/nggsuyMnrleqk8H5MQEMwg+CHAa2AfGCnqnZIqqSGch8HvlLV6+Pso4nqZ2kc\nqkIhfn/FFWxesICD9+yJ6hbnDpJNAr5tAq/KjgloMVa3WxRVoRD3TZjA53Pn0vvbb+Pqci77+scj\nMDdQ4D7gQuBjYB/gMuDPaVbuROBi4DQRWSoiS0RkeDoyWzJTpmTuuKpQiF+NHMmv+/eny6uvcuee\nPQ3c4qZj3DxrMbb/pd27c/Rtt4V7/jmO1e0cJhXdjnWMV5fbzJrFHd9+m1CXWw0e3FT0ODUS2YiA\nxc73Ms+6pX7sS+l+sGMAWcsFFMvGXy+ZW4R3xCTQkd/5jr6xYEHyFUpQ10zg6FeT1+1ctUPnUi6g\nN+fP17Pbt9dbQEeDrvShy1f37JlQl3Pterm4up3o4ycO4BtnntQKEfk9Zn4AP28OliZCVSjE9MmT\nqV2/nm86dGDbzp3s+M9/6P3tt9xB/ddjrz+020uqBa5u1Yo2J5/MBT/7Gd8/6aRsnUrSWN1unrg6\n/c2nn/LhunXUrF/PIMwr3krgEcwsXtOJrcu/fuihJmf6SRY/YwBFwEaMjfQ6oCNwv6p+kvHKxbCT\nev/cTzZsYL9Onfhyy5asfx/UvTudDz6YMbfd1qiKk2zou3v9pvzjCUYccCCyaRNHeuz6k4HxxLaL\nlgI3YNzi1jbhm8UZA9iHHNJtv7h27I1vv83Wmhr2tGlDcbduYX38fNMmCnbtolqkwTY/+ySzre+B\nB2ZM7/3oduTDftb6zxiWl89htbX8kjo9/jnmwe/V51qahy5HEnQuoH2A76rq6iAq55doN0msRGPu\nn5sL379r3ZqNXbtGveky8f18ZYhRxb187f/5pk3hB/7tKDuQmDdHKXX+/M3pwe/iNABtyRHd9ktV\nKMTdJSXcsXZt+B6YDOzAtGKR+vhwnG1+9vG7zav3QTUuc9Z/1kC3vfvsys+n7ZYt9Tow7VFWIuG6\n3UJ9f36vs4LQPHQ5kiDjAM4GVgMhZ3kgMMuPfSndD1GMeRNHjoyaY35KEt//l+T+fr5HRyxPcmyM\nK9P8nu5jP9CU5ILWs4FG2kLV89s9p0tbtdIrhg2L6dvflMZXAM0l3fZL5D0wz/P/xNLPeNu8+0Tq\nsZ/jJxFd7893vq8DvTyGLiba5ur2+T6Pr3SOiaybV6cvdbadIRJXl/3Q1McA/Ng7pwDHYiImUdUK\nIGvNZNutW8N5uV3cUXu/3/skub+fb41YzsPYy/+V5vc8H/vdzJSU5N7MlHpeD17vB1cxXLvotfvs\nw46RI7n1ww/5S467dyZJzui2X2LdA3nE1s9427z7ROqxn+PziK73Y5zv24DuRNfFRNtOcnR7jM/j\npwOlTGlQN1enw/rcvj2/KS9vbrqcNH4agL2qujViXdYcmPMOOICdEeu8f66f72OS3N/Pd+8o64No\nWIp87HcHZSnJdY+LvDkmY264ndQ9+EtXrGDqjBkJbxY3MCVIMiHTJZd02y+R90AJdf9dLP2Mt827\nTzQ9Tla2q2c/IPkGJPL7VEdHf5DE8b+irF7dajEmn/Mx+tx15EhKly3jxJNPTvLKNyRI3cyknsfC\nTwOwQkQuAvJF5BARmQa8leF6xWTolVdyQ8+e4RvAHQM4P8e+xxBsA5Op78ib48uhQ9GRI/nbqaea\niVt8PvibKrmk234ZeuWVTOjatd49MBnjqRFNH+Nt87NPMtvGkH4DkkrDFU2fJwMfFhay+IADqB4y\nhH+1AH1OFj9eQG0x4yhnYMZMXgZuU9VvM165BF5AOz/9lE83bKBbp05s2rLF9/fe/HwKa2qSPi7e\n9weffUa3bdvqDUYFMTh9JPBBAHKiyZ3nDNr1PeggOvfpE4gXR3l5eeA9mUzIhPAg8B3kkG77JewF\ntGgRn+7aRYd996WoW7ewPn6xaRMF335LNbCnTZt62+Lts/KLL+i///5xj/duizYI6+r9FOcTxACz\nKyvRIPTnHTuyb00NWlBA0ZAhTJg6tYFOB6lPuSorUC+gbJGpcPlMPaR6FRWl1TBF+3ZvyKAaqk1b\ntnBQjx5sad+esgx4PTS1BsDPTZIJgtTtbD+EonXIvti0ia+3b6dzYWHcBsTvNm+nLXKfZDsw2b5e\njSEr7QZARGbFO1ADSpkbD5svxZIJRowwqjt79myA2ZHbrW5bmjpB5AI6HjgQeAPjOvvHiE+zIT8/\nn8GDB3PEEUcwaNAg7r77bprSzenNffKnP/2J/v37c8kll9TbZ/78+XTq1InBgwczaNAgzjjjjJRz\nCMVj7NixPP/888ELDpCFCxfy2WefuYvNWrebOkHmArJEIZZ/KCYz4nDgMWApcDtwuB/f0qA+pOEr\nHY9If9t99903/HvTpk06bNgwLS0tTUumS01NTZK18yfXi/cy9evXT9evX99gn/Lycj377LPryY28\nvNXV1alWM8zw4cP1ueeeS1uOl6DjAKqrq/Wll15y4wCavG7nqi96LuUC8pJr55gJWaQbB6CqNao6\nR1VHA0OAT4ByEflFEA2PiAwXkQ9F5CMRuTEImX5xc4tHo2vXrjz00EPcd999ANTW1nLDDTdw3HHH\nMXDgQB5++GHANJxXX301/fv358wzz+Tqq68O93x79erFTTfdxNFHH82zzz7LmjVr+MEPfsAxxxzD\nKaecwkcffQTAV199xY9+9COOO+44jjvuOBYuXAiY3vqgQYMYPHgwl1xyCTt3Rjq+wt13382RRx7J\nUUcdBfwJgKuuuipc1r333tvgGPW81bjXYOzYsVx11VUMGTKEG2+8kW+++Yaf//znDBkyhO9973vM\nmjUr7nUA+MUvfsFhhx3GGWecQVVVVXj9a6+9xuDBgxkwYACXXXYZe/fuDV+fX//61wwaNIhjjz2W\npUuXMnz4cA455BD+8pe/JPV/pUJ+fj7Dh4cTdDZ53Q7y+lhZzUOWX+ImgxOR1sBZmJS5xZgnzQvp\nFioieZg000OBz4F3RWSmNtLE2Vu2bIm7vVevXtTW1rJp0yZmzJhBp06dWLRoEXv27OHEE0/kjDPO\nYPHixaxdu5aVK1eycePGBhMxdO3alcWLFwMwbNgw/vKXv9CnTx/eeecdrrrqKl577TXGjx/P9ddf\nzwknnMC6des488wzWblyJX/84x+5//77Of7447nlllvYZ5996slesmQJjz32GO+++y41NTW0b38c\n779/Cg888AAvv/wy5eXldO7cucF5vfHGGwwePBjA2T4BgPXr1/P2228DcMsttzB06FAeeeQRtm7d\nyrHHHsvpp5/OE088EfU6LFmyhI8//phVq1bxxRdf0Lt3bwB2797N2LFjmTdvHn369GH06NE88MAD\njBs3DoDi4mKWLl3K9ddfz9ixY3nrrbf45ptvOOKII7jyyiuT+r9SYffu8AyQT9DEdTvI62NlNQ9Z\nfonZADiTWhwB/B9QpqrLAyz3WOBjVa1yynoaGAk0SgOQDHPnzuWDDz7gmWeeAWDbtm18/PHHvPnm\nm/z4xz8GoHv37vSK8D74yU9+AsDOnTt56623+PGPfxzugbs94VdffZVVq1aF1+/YsYNvvvmGE088\nkeuuu46LL76YXbt2kZdX/0XtzTff5Nxzz6VNmzbOmlG88cYbDBgwwGtiaMDJJ58c7tFPmTKF1183\n693zcM939uzZ/OEPfwBgz549rF27NuZ1WLBgARdeeCEA+++/f/g6rF69mt69e9OnTx8ARo8ezf33\n3x9uAM4++2wAjjzySHbu3Enbtm1p27Ytbdq0Ydu2bXTokNacLHG59NJLWb48rM4tVrctlnhvAD/F\nxFaMB8ZJ3aQIgrEvpXOHHgCs8yx/hrlxGoXKysq429esWUN+fj7dunVDVZk2bRqnn356vX1efPHF\nesuRZpp27Uywfm1tLZ07d2bJkiUNylFVFi1aRGFhYb31N954Iz/84Q958cUXeeihh/if//kfDj30\nUL+n5wvvNXDr6vLcc89xyCGHNKhrstchVkME0Lp1awDy8vLCv8F4L1RXV8esaxA88cQT3nN+q6nr\ndpDXx8pqHrL8kpU4ABE5DzhTVa9wln8KHKuq4yL2azquOJYmiQYcB2B125Ir+NHtbE1+sR74rmf5\nQGddPVRVGuMDVGO8QVYAFcD1nm15mGjRD4DlwOtAB2f9A8AqYC7wCjDMOSYEfMcjoxiYA7zvlDHZ\nWd8V+CewzFn/gLN+mlPW+8CTQKsodb7O2ecDYJxnfb2yPetLgNlR1j8KnOdZ3gf4i+d8Z8e7Ds62\n+zAmjrnAi6484DTnui7DBG+2iqwjJnvAtET1z+B/HzQ5pdv203I/fpRVNDtvAPmYNLxDMbMwvQNc\nqKqrGr0yaSAi7VR1p4h0ARYBJ6rql9mulyV7NBfdtrQM/EwJGTiqWuO43M3F9CwfaaI3yL9FpBNQ\nCNxqH/6WZqTblhZAVt4ALBaLxZJ9msQE2CJyrYisEpEPROTOAOSVishnIrLE+QxPfFRS8n8pIrWO\naSgIebeKyPsislRE5ohIj4Dk/t65rhUi8pyIpO17KSI/EpHlIlIjIoMDkBd4UJWIPCIiG0VkWRDy\n0qhHIHqdCX0OQoeD1NsgdTUIHQ1SL4PSRxE5UEReF5EVjk6NS3iQNmL4eyofzODlXKDAWe4agMxS\nzEBvJup7IGbANwR0CUhme8/vazGDxUHIHQbkOb/vBP43AJl9gUMwg8SD05SVh4nSLcKY2SqAfgHU\n8fuY6R+XZUIHfNYhML0OWp+D0uEg9TZIXU1XR4PWy6D0EegBDHSvPWYsKm69cuINIFbLJSKdgaeB\nw4AXRaSjqn4VVLGJyhaRuSKyWkReFpGOPuVOBX6VsPAkylXVHZ5D3YmPUsYtG7gXeN8p+22gd4rn\nHEZVV6vqxzjXN1q5SVzrcFCVqu7F6MLIdM5ZRFZgvLdGJig701wF3Kmq1QAB6HWQHk2+dDgRQeqt\nqr6qqu7xb2MaqVRlxdRRnwSml0593gS+TvV4j5wNaqY1da/9KkxcSkxyogHAuGFer6qHY7KQXiMi\n/YCbgBqMC+FhwDIROTqgMn8hIhXA74DfxCj7VVXti+kp3JxIoIiMANap6gc+yo93zg3KFZHbRWQt\ncBHwmyTPNWHZwDiMC2hS55xuuQmudbSgqrgKnUTZlwKt45SdaQ4FThaRt0VkXgB6/QvHPPLXdBqx\nJHXYj7wg9dblZ8BLAclKhSD1MiOISDHmrWJRvP2y4gUUhb8D3aUuIrMHJgVFK2Ab0BnT6r6Nmf+5\ndyKBIvIKZr7o8CpM9sdbgPsxXjsqIrcDY4E3VXWHiKzC9C5GAqc4xz4GlAM3xZE7Cfg1cHrEtqio\n6gZgg/N7h/O2839AT2CNiFyCycjaAbhJVScBkxx747WYCZISkuA6VDhl12BeZQ8FLo8852RkqmqD\n/PpxzjnutfZzfn6JUvYnGDNAxspOoCsFQGdVHSIix5BAr5PU57sxE2alUi/fOpyoXqo6Oxm99aNX\nInILZp7yJ9OpV7xjmzoi0h54Fhgf8RbWkHRsTpn4YIKmKjE2rK8xD8VTnG2bMba37wRYXhGO7S2y\n7Ij9NieQcwTmAbMGYzvd68jaL9lzjlcucBDwQYDnPxH4FuiS7DknkDuPOPZVP+eMydQ5x7P+JuDG\ngHRsHSagLbBzTrIOYb12lgPRa68+p3BsyjrsQ3baeosJGvwP0Dqg/yCujsY5LnC9TOd/i5BTgBm/\nGe9n/1wxAQFRWy4FZmAiSsG04oWq+t80y/F6I4wClsco20tcf1lVXa6qPVS1t6r2wrwWDtIEsQF+\nyhWRgz3L52Bse2kjIucAtwGXqermaGWnW0SMcv1e63eBg0WkSERaARcAcWeqS1ihurJvdcoJ+pz9\nEtZrETmUNPQ6mj6nIidVHY5Tr8D0Voxn06+AEaq6O9H+yYhO4ZjA9dKpRxDjOH8DVqpqw3zw0Qii\nJQ2oNW7QclE3iPF35/cuPL2mNMp6HJOeoAJzI/aMUXZ353cPYFWSZawhgQdFnHOuVy7mgeXWdyaw\nf0DXeyfmrWqJ8/k6nXN2jjsH07vehYmEfSmVc/ZsG47xZvgYYwoLQsfew6Rq3o3p5Y5P55xTrEuh\no9cfAIvT0eso+tw9oDom1OEExwemt87/X+XR1fvTkBVXR33KCFIvn/To41pgbIpyTsSMmVZgUrAs\nAYbHOyZnAsHEpJ/+SlWv96z7HeaV/HeODbGzqgZqG85m2facG6/cbJdtseQiOdEAiMiJwAJMb8h9\nLf81Jo/KvzD2wyrgfFUNdNaEbJVtz7llnLPFksvkRANgsVgslsYno4PAEiXEOYuBNxZLoyAi451A\nN3/h+BZLlsi0F9CjwJkR67IVeGOxZBwRORzjg380JhDnhyKSMG7FYskGGW0ANHqI80hMwA3O9zmZ\nrIPF0sgcBixS1d2qWoMZdxiV5TpZLFHJRhzAfqq6EcLRmftloQ4WS6ZYDpzkmDrbAv8PM8BsseQc\nuZAKIuYotNh5Uy0ZRn1OnZeEvA8d19JXgB0Yf+yayP2sblsyjR/dzsYbwEYR6Q7hCMa4UYZ1QQ5B\nBLb422/06NFpl5XqJ1tlt8RzzhSq+qiqHq2qJcAW4KN4ut2Yn9LS0hZVbks9Z7/EbQBEJE9Ezk9G\n+aOJoX6I8yxMTg+A0ZgIQYslJwhC50Wkm/P9XeBcTKSnxZJzxG0A1OTfviFV4SLyJPAWcKiIrBWR\nsZjJHE4XEXfi7LRn+Aqa4uLiFld2SzznaKSr8w7PichyTOfmalXdFm/n4uJiRKRRPmVlZY1WlvvJ\npf+3Htu3w8KF5jvRrru3s3DdQrbvTrxvU8LPGMCrIjIR+CcmdwwAapKHxUVVL4qxaZi/6mWHkpKS\nFld2SzznOKSs885+JydTWFVVVVKv7U0NEck9/dq+HU46CVasgMMPhzfegH33jXr89t3bOenRk1ix\naQWHdzucN8a+wb6to++bsNwcw08D8BPn+xrPOsVHTn6LpYlidT5gcq4BWL7cPPyrq2HlSvN7yJCo\nxy//cjkrNq2guraalZtWsmLTCoYcGH3fhOXmGAkbADVpYbNOaWluyLAEz4YNG5gxYwYbNmxgy5Yt\n1NbWUlNTQ35+PrW1ZhbAvLw8ampqEBFUtd62goICqqurw79ra2tp1aoVXbt2pVOnTgwcODApM0RW\ndX77dvNwOuKI6D3SRNst/jjiCNPzX7kS+vc3v6Owffd2du7dSb/v9GP1f1fTv1t/Du8Wfd+mSMJc\nQCJSiJm/1H2tLQf+omYuzNQLFhkPXOYsPqyqf4qyjzbnV+OWQGVlJRUVFWzZsoWvvvqKPXv2kJeX\nF/OBXVVVFT62tLSUsrKyBr/jbYv8HQ+nMYk2d3FaOi8i12GigWsxyefGquqeiH3Cuu02agnNEkmY\nLVyuu+46pk6dWm/dY489xsCBAxkwYICf02nA3r17ueOOO9i6dSt33313wv3D55dLbN8OixaBCBx7\nbNTr6DX99PtOP+4Zfg/HHnCsL/NPtoml25H4MQE9gMldfr+zfImz7rKYRySunDdcvhp4SUT+rapr\nUpVpyS6VlZVUVlayZcsWPvnkEwoKCvjmm28AaNu2LdXV1ezcaczpo0eP5rHHTDD4pEmTKCsrY+/e\nvUycOLHeQz6LpKzzItITM/VhP1XdIyL/xEwY8njCUhOZJXyYLZYtW8aCBQv4+OOPmTp1KiJCVVUV\nEyZMYNSoUezdu5eCggKef/55HnzwQcaNG8fmzZt57733+OKLL/jf//3fsKxp06aF37IOPPBAzjvv\nPAAKCwspLS3l+uuvp0kSrSGNgtf0s/q/q2nXql2TePgng584gGNUdbSqvu58xgLHpFluTofLl5eX\nt7iyky23srKS8vJyZsyYwV133cWMGTN4++23+eSTT9i5cyfFxcXs3buXvXv3MmHChPDDHxp6/YRC\noQDOIFDS1fl8oJ2IFABtMZN9JMY1SxQWRjdLJNoO7Ny5k9atW7NlyxY+//zzcM974MCBXHLJJaxY\nsQKAc889lyuvvJLFixezfft22rVrx/Ll9ScS83ryRCPnevV+idaQRuGI/Y7g8G6HU5hX2OxMPy5+\n3gBqRKSPqn4K4CS2ahDZmCTLgdvFTIS+GxMu/26aMi0ZJNKUU11dTZs2bcI9+1NOOYX58+eze7eZ\nre+cc87h/fffz3KtUyZlnVfVz0Xkj5iZnb4B5qrqq75K3Xdf0xt1e6aRZolE2zFvAB07dqSmpoba\n2trww3vJkiVMmzaNvn37AmZMxQ0aWrFiBT179mTv3voWrl/84hcxq/rnP/+ZiooKFi5cyPHHH+/r\n9HIGn/b/fVvvyxtj3wh7/zS33j/4awB+BcwTkTWYgK4iYGw6harPcPlskXMeC1kq1/vQ37LFzJHS\nqVOncG/+pptuCptsSkpKmD9/fkpl9+qVE34GXlLWeRHphEl4WARsBZ4VkYtU1V8w2L77xvRG8bP9\nyiuvBOCCCy4A4O6776aqqoqhQ4dy7bXXNtjfOw5w4YUX+qoiwDXXXMM111yTeMdcxEdDCmYMYPmX\nyzlivyOa1MPfR1hDmLgNgIjkYebNPATo66xerQFMyqyqj2LSRSMiv8XM0dmAMWPGUFxcTHk5nHOO\n8ehwH1au2cLv8pgx5YwZ43//lri8YcMGevTowZYtW3j11Vdp3749PXr0YOvWrYRCIYYPH45LeXk5\noVAo/AB3TTmRyy7Rlr2mp0h5qciPlOc9v3vuuYeKioq4HkEB6PwwYI0bMyAizwMnECUaeMqUKT5F\npkdRURHjxtlpCeqRoCFN1fc/W5SXl1NeXs727fDoo/6P8+MFtFRVB6VZv2hyu6nqJidcfg4wJDJi\nsr6nBKRrcvQro7y8PGs98WyVXV5eTnFxMZWVlYDp/VdVVYVNOy5+vG+S3TZmzBh69eqVkowtmzdT\nUFnJp6+9xvbaWmTffcnfto2a/HwOHzaMCVOnUhTjDSOOF1DKOi8ixwKPYMYMdmM6Oe+q6p8j9mvo\nBdRMaYrnt3DdQk6efjLVtdUU5hWyYOwCX77/2cD1DP7Od+C002D9eoDgvIBeE5HzgOcD9sl8TkS6\nAHvxES5vyRyuqcf9XVxcHHbHTMe0E4uqUIhlzz1H682bGfnww3zx5ZesLyjgjPvvJ2/rVvZUV7O3\noID/3H8/+du2US0SdVve1q1U79lDf1WmAl2Bybt2sQO4Dnh45kx+u2QJt8yfH7MRiEHKOq+q74jI\nsxiz5l7n+yG/xycyOzRVs0TO4DOOwh0AXrlpZU4PAH/+OZxyCoRCkJ8Pe/YkPsaLnwbgSuB6oFpE\nvsXYRFVVOyRdWw/Jhss3Ji1lDMBr4weoqKhg69atdOrUKS25WzZv5lcjR4Z75d6H9578fF6+9Va+\nV1tLV+B8THf553v38rD34V1Tw/Yvv0Tc5XjbgN8CtwC3YZJL/cv9vW4d0ydPpvSJJ5I5hbR0XlXL\ngKT9WROZHVIxS2QiDmDHjh385je/oX379px33nlhOTNnzmTevHn07t07N01OScRR5PoAsBvGcOWV\nsMZxnq9JYRQ10RiAAIer6toU6mjJUVyffYAtW7ZQXFxcr5fv14Mnsie/+6uvzEO+pobBEL1XXlPD\ndoxtJPzwx/PAjvhNEtumA6UY3+ZaoJ37+3N/XpiQXZ1PlHLAT0qCxogDePXVVxk1ahQnnHACN998\nc7gBaNeuHR06dGDv3r2oakz30ayRRPqHXH7T8vb6vQ/9Vq2gthaKi+GTT/zJSpQNVIEXU69q06Q5\nxwF4H/6unR/8++K7D/2VDzzAmEMPpecHH9B3/XruXL+eI3bv5s81NRyLGUX9LfAV5gHdnbqHdQ/P\nchnwOyIe2J7fDR7mCbbtdL7zvL979vR9fbKp84n8zv34pTdWHIC7zrtt2LBh3HrrrfTu3Ttws2Eg\n+IijgLo3rZOnn8xJj56UUxlAP//cBC5/8kndwz8/Hw4+GD74wLzULFniX54fE9ASETlGVQP10/cT\nLu/F5gJKj8gHf6Sdf/r06VGPqwqFWPzUUxR+9hnbamrqmW+mYHrwsXryELtXjme5hvoP7ALPbyKW\n420DmAzht43JwI6DDuKW225L5lJBGjovIodisogqxnTUG5gcLdVJJInMDn7MEo0RBzBs2DBKS0uZ\nO3cuF110EatXr2b9+vXk5+ezaNEiKisr+e1vf5vwWjU6Pt0/U03+lmm2bzc9fzPIa+jdGx56KGY2\ni4T48QL6EDgYqMLca6499KjkiwvL7Am8Sf1w+RdV9fGI/WwuoACI1uv3evdEetiMufRSrh85koIv\nv2TjV19xZE1N2F7/MHUPWO9DfyJwF+ZBXIZ56LvcgGkQCpz93MbBXb4Q8xbglf0wsB3q2fnjbVsD\n/Ld1a/bp2DEIL6BAdN5xKf0MOE5V10VsazQvoKqqKmbOnJk1u3xT8wJy3wDcAeBccAHdvh2eegqu\nvrqu53/ggWYcINoLbpC5gM5Msq5+ccPla0kmXN6SNMXFxWHf90SBW+tCIa496iiO2rGDQhr28i/H\nOLS75hv3od8O4/JSSP0eOTTslXsf3lfk57M9P59x+fnIvvuSt3Ur1ziePu7DfJxIzG3jnQd9r6Ii\nBvm25iQAACAASURBVHXpklQyuDgEpfPDgE8jH/6NjY0DSI5cGwD22vwLC407e3ExzJ8f/eGfDDEb\nABE5zcmDUiUivVQ15Nk2CtM7Som0wuUbgeYSBxDN7BONqlCIOY88Qo+vvmLbrl0cjnnQd8X05N1e\nvvvA95pzvA/9ZZievPuQ30Rdr9z78G7ftSttgUldunD3zJmUlpWlHAcQbVuqZEDnfwI8lXbFLMHg\nwwXUO/ibbbNPNE8fEXjgAfjJT4LJBh5vEPguz+/nIrZNSqfQiHD5nkB7EYk1e5glBWIN9kayLhTi\nF0ccwcHr1jFw1y7uw/QK3AHcMszD3/vA9w6yLsO8HVyRn8+2wkKuyc9ncZs2tB85kl7jxjHs5puZ\nu3Ejx998M6dMnsywm2/mpc8+46jLL+eo885L1j8/0wSm805K6RHAM8kcl2iWwiRmMbR4cV1ATz7Z\nfEe5gLk0+OtW9wc/qHv4g+n5B/Xwh/gmIInxO9pysvgOl3dTQQDhyT1yKXVCJpZdUj3ejeitqKhg\nw4YN7LPPPoCZeMVNreD2+lutW8fJmDzHKzEmnymYP6IUGENdL/8l4HXgCIz5ZmReHts6dGDSQQeF\ne/JAuDc/ZswYvt66NXw+sVI3xEr9kIVUEEHq/A+A91R1U6wdIlNBZGA6gEaNA8hpfLiA5tLg76JF\npspeT59evYzZJ9p/7qaCSJZ4DYDG+B1tOVnWAkNEpA3GJXwoMbKBut4pU6bAhAn1t0WaShItl5eX\n4L3nkj2+KS0XFxdTUlJSzzRywQUXsHr16rCdf6hj5z8fmIZJYv836sw8RZh8BndjTDu3Aa3324//\nFhYyqUsXHpk5k+mPm3H7ol69GiR187PsHYuIHJeIXE5FnpcJEQoUxWwUpM5fSALzj9sAuPUIYDqA\nrMYB5DQ+MoDmQvSva/YZN67u4X/YYTBtWnxPn5KSknr67tckGq8B6C0is3Bc2ZzfOMtpvbenEi5f\nVgYRHaak8SujqY4BJLL5uy6dbVavZgCm1+/a+Y+kzmVzL3Xd3SsKCtjbpQuTunfnu6eeypP33htW\nrqDMN95efpYJROdFpC3mLfeKZApP9Izyk8U4URzAL3/5SwYMGMC5554LwOLFi+nWrVvcOIAY51jv\nO+fx4QKa7cFf7xueM2EeBQXm4T90aGbKjNcAjPT8vitiW+Ry0qQaLm+JTjSbv7cBcHv9A6L0+ssw\ns58XUWfXL8jLY9fBB/PgnDnhXn4LIBCdV9VvgG7JFh7AdABZiQNoMvjIAJrN6N9oZp/DDzc9/0wR\nswFQ1RwM5WscmmIuoFiunjNfeIFPXn45aq//59T1+g/EDO5eLsJ+Q4eyb//+dOrSpVEGaf30/h98\n8EFat24dnjD+jjvuID8/H4A777yz3u/CwkJatWrF9OnTk5oUPhd0Ps3pABplPoD27dvzxz/+0de+\nTYVsp3/evh2uuy45s08Q+IkDsOQw8cw+/1mwgFUPPsjRe/ZE7fW7Pvw7gXdEyD/kEPoNH85Uj5kn\nCGbMmAGYXDF79uxp8MB2f99xxx3k5eWFA4fuuOOO8MO8TZs29OvXz/fD3FKHjQMgoQtotgeAFy2C\nVavM70ybfbzYBiAKTWkMILLnP2bMGObPn8+6UIjbbr+do6urww9/N3J3OvVt/de2b0/hiSdybBpT\n+8V6yOfl5VFZWUm7du04+OCD6dSpU706p3LOFktS+HCfytYAcLRB3379Mmv28ZKVBiCVfCk2F1BD\nInv/AE8/9VR4oPcYGpp8XJ9+b6//wTlzwi6ciXBdzdq1a8c999xDYWFhuPx4D/mWhIh0BP6K8Zit\nBX6mqoti7V9UVNR0BlNToKioKLsV8OE+lY0B4FiDvvfck1mzj5d4kcCzieP6pqojUi1UVT8CBjnl\nuPlSXoh3TBCz5/mV0RTGAKIN+m7ZvJmFN93E02vX0g7zkC+lvsnH2+v/7nnncVCvXlFdOL3luHTs\n2JHKyko6duxIcXFxYA/6XOn9B6jz9wL/p6o/FpECTKqTmHivsSUDJHCfylb0bzYGfSOJ9wbgej2M\nwmTwdWfUuBDYGGAdciJfSlMj2qDvh3PmcPTatfwe48c/hroHv2vy8fb6Y3n3eHv5M2bMoHXr1hQU\nFNCpUyc6depESUlJc+3dp63zItIBOElVxwCoajVgZ7vLJnHcp7I1+JutQd9IEnoBicgfVfVoz6bZ\nIrI4wDrkXL6UXB4DiDbou2XzZj55+WXaf/QRAD+jzuxzLXUmn8UFBXz34ovDvX6vTNcXP1O9/Hjk\nyhhAQDrfC/hKRB4FBgCLgfGquivY2lp84R38jeI+lY3BXzezZzYGfSPxMwbQTkR6q+oaABHphUkL\nkzaefCk3xdonG6kgXLKRCqKiosL3/mPGjGH4mWey9e9/5+mvv+ZdzEQsrqfPUEwCm2LgisJCzvrD\nH3hv6dLw+bmpFNwGZdeuXeEHv3eCePf6N/Xr7SMVhEs6Ol8ADAauUdXFInIPRr8bjEB5U0FERnJa\nAiAHB3+9VXIzewZh9kk1FYSf+QCGY6J012DMx0XAlar6cvLVbCB7BGZC+OExttv5ADxEs/t/+vLL\nFC1cGJ4hawym9++afX4M7Dj00Abune58AB07dqRTp07hqSFb0gBunPkAUtZ5EekOLFTV3s7y94Eb\nVfXsiP2sbmeahQtN8rfqavO0XbAg6lvA9t3bG23w99VXYfhwY/opLIT77w82uZtLYPMBqOocETkE\n6Oes+lBVd6dbQYeE+VJcpkxJfyA4CBnZJNLuvy4U4stFi/gu5uHv9fN3zT7f9u3LkBgBPkVFRWF5\nLeWh74d0dF5VN4rIOhE51HF2GIrJs2dpbPzkzsB4ADXG4G+k3b9v38w8/JPBzxtAW+B6oEhVL3du\njL6q+u+0CjZyq4Deqho172r9WZMg3Q6TXxm5NgYQze4/84UX+PKBB3ho7956Hj8/py6l6q4+fag5\n6yw6dekC1PX6oz34c+2cG4M4bwBp6byIDMC4gRZi3iLGqurWiH3sG0BjsH173NwZjZn+wdv7LyiA\nOXMyZ/f3+wYQbz4Al0eBPYAbJbQeuD2NugEmX4qqdov18LfU4Wb2LCkpoaqqil5FRSx77DF67d3L\nXZhWtB11ufs/BBYfeijXvvJK+OHvYnv9vkhL51X1fVU9RlUHquqoyIe/pZFwB4DjPPwbK/9/ZO+/\nMYO94uGnAeijqr/HeBG6ia6ab9QKuR0HsGXzZqadfjqztm7ldsycutOoawT24ph9LroI9QQXucE4\n7kM/0pUzl885C7Q4nW92+JgAJpoHUKaqEun105jBXvHw4wW0R0T2wQmQEZE+mBz+lkYg0vzz4Zw5\nHP3pp1F9/ScCy9q35+AzzwzvD7bXnwJW55s6PqJ/G8MDKFNeP0Hh5w2gFJgDHCQi/wBeA25It2AR\n6Sgiz4jIKhFZISLHpSszKFJxp8pU2W5vvVdREc/deWc9X3+39/8Vpqt6RWEhN774Yj2zj9+Hfy6d\ncw6Qls6LSKWIvC8iS0XknUxV0hIHdwC4sDDmALCb/mHB2AUZCwBzo32rq43554EH/M3k1ljEfQMQ\nk6DkQ0xk5BDMa/B4Vf0qgLKTCpdvabmAvD3/xe+8Q+jee3n688+jpni4E1jYsSNHjR7NXmcGp0iT\nj+31+yMgna8FSlT16wxU0eIHH5MnZHoAOBe9fiLx4wX0gaoeGWihJlx+qar2SbCf9ZQAftC3L0d/\n9FFMX//z8/M54Jpr6NSlC6ecckq9tND24R+bOF5Aaem8iISAo1X1v3H2sbqdKRKkfobGSQHhDUPI\ntNdPJIHFAQBLROQYVY06Z2+K2HD5OET2/ruuWcNN0KD37/r67zr4YDN5i+31B0W6Oq/AKyJSAzyk\nqg8HWDdLPHxE/0LmU0Bs3w47dxpvn9WrjRUqV+z+Xvw0AMcBF4tIFeZ5I4Cq6lFplusrXD5bqSBK\nSkqylgpiwoQJFBcX8/RTT/Hkrbfy/6qruQvoi8lQ5pp9KoGf9ewZHvR1r1Mq5Ueee2Odr7fMHEoF\nka7On6iqX4hIN0xDsEpV34zcyaaCyAA+Bn8hswPA3jaoXz946aXMJ3nLZCqIqMm8VbUq6dLqZOZ0\nuHw2g6KefvppevTowYYvvuD18eOZumlTg55/EXUpHkZPmcLqjz5K29PHBoLVWx+YzotIKbBdVe+O\nWG9NQJnAffq60b9xRlwzlQIiMt1DjAwUGcWvCShhA+ARuB/Qxl1W1bWpVw9EZD5wuap+5NwkbVX1\nxoh9WuxNct0559B+5syodv+JwAV9+3L0hRdam38aJLpJUtF5J4o4T1V3iEg7YC5QpqpzI/Zrsbqd\ncbIY/bt9O5xwgnkRATMM8dZbjT/wG1gksIiMEJGPgRAwH2N5eCntGsI44B8iUoEZB7gj3s6NOSFM\ntqkKhdj68svchDH3RLp73uCYfRIFd1lSI02d7w68KSJLgbeB2ZEPf0sGyWL0by4HfMVEVeN+gPeB\n72C8dgBOBR5JdFwQH1M9g+dnyviVMW/evPQLS5JQKKTz5s3TqVOn6iVDhugOk7Yo/NkBOgn0tI4d\ntXLNGp0yZUqg5WfjnLNdtqNfWdF5glBoS322bVMdMEC1oMB8b9vWYJe31r6lBbcWKFPQwlsLdeG6\nhYEX3aZN3Co0CrF0O/LjJxBsrxp3tjwRyVPVecDRiQ6yJIfbgx84cOD/b+/M46Oszj3+fbKwBFkU\nkEUkAakSwCpeRVAUtNdbBJde7624VqnX5boh1d5alQra24teqyCtvcVakFZwobVuoEIFihWCCyhb\nrchMQoGAgCGBQCDhuX+c900mk5nJJPO+M5PJ+X4+88m7zXvOwFmf85zfw4GtW2tdPKdSJ/PweXY2\n37zhBgLFxeTn59cu/NiQgp5jy3xLJNICcBju4m9uVq6ni7/pvuErGvF4AZWJyDHAXzAmm12YNcmM\nJdkLkuFun+127eI+aKDy6bp7+pE/qwVUj1ZX5jOCOOWfn/iXJxCEYScM82QNoCVs+IpGPF5AHYBD\nGFe4a4HOwAsaY5NLXAmLBIF9GHf2I6rawEs2VXLQqaI4EOD7Q4cyYt8+cjELv/mYlufK9u05+eab\nOW3oULvg6xExvIASLvMikoXZ3/IPjRBM3i4C+0SMBWC/Nn8lU+Y5XjxbBFbVA6pao6rVqvq8qj6d\naOPv4G6XHxqp8U8lqdCmKQ4EmHnRRdwXReXzxH79fG38rRZQHR6V+YnYQDDJpZEFYD/UP9NV5jle\nGjUBiUgFjioi0AYT5OKAqnZKMG0hPjE6IDO1gEJNPzMnTWLQl1/yIrCahiqfvYYO5cYbb0xNRlsZ\niZZ5EekDjAX+GxNYxuI3KYr/u349/O1v5rhFeP2EEfc+AKgVyrocGK6qUQO5x/muLUAZEHW7fGuZ\nJhcHAjx08sn8X3V1g01fzwK7unfnwhkz6NmrlzX7eEg80+TmlHkReQXT+HcG7rUmoCSQgvi/FRVm\n8XfSpDq5h3RZ+PV8I1jYy9eo6tBm5azuHb00ZLs8cKeGbZfP5Erijv5Ld+zgt3fdxbA9exrY/acB\nH3TuzG/XrCG/X79UZjcjibeSOM/GVeZFZBxwsareKSKjMR3ApRGey9iynRIa2QHs9eavcLmH6dP9\nl3toCp6JwYnIFSGnWRh3uEMJ5A0AVd3h/P1KRF4FhgEN9FIyVQuooKCAVStX8up99/Hqnj10wOw0\nuhGYg+kEVmRlUTh+PIHiYgLFxZSWltKzZ09f8mO1gOpIsMyfC1wmImOB9kBHEZmrqt8Lf9BqAXmE\na/tfuBBKShqsAfix+Ou6fdbUmNF/hw6pbfz91AKaHXJajdkV+ayq7mpyanXvTOvt8snSpokk97AR\nswbgyj284RoYfcZqAdW77kmZF5FRWBOQv8Rh+1+5dSXnzzmf6qPV5Gbl8pcJf0lI+TNd5B5i4asJ\nKFFEpB/wKmahLQfjYjctwnMZWUmCwSCrVq5k0Y038szhwxHt/l/37k2/iRM503EpsLZ/72mKCaiZ\n77cdgN/EYft3ZwDu4m8iMwBX7uH229PL7TMczzoAEXk61n1VvbuJeYub0EoyZUriWj5evMMrpl53\nHfe98AIdQq65dv+irl35/syZdtHXZ2LMAHwv87YD8Ig41D8rqioo2laU8Oav0MlGTo7pc2KEHEgp\nXnYAs4BBwEvOpe9iLBUrAVT1+cSyGjPtlGwE88skEer2OeummzhlyxaOUmf+yQdGZ2Xx/ObNSV/0\ntSagetd9L/O2A/CQJG3+Cpd5fuaZ9N3x62VEsG8CI1W12nnx/wErVPW2BPPY6nBH88WBALJ1a0S5\nB+nXr3bR147+U4Yt8y2FRsI/ehX5qyXLPcQinhnA58AIVd3rnB8LrFLVU3zPXAZKQRQHAvzgwgsp\nDAYbuH1efcwxzPzsM+vymSRizAB8L/N2BuABcSwAe2X/T0e5h1h4OQOYBqwRkaWY3bvnA1MSy56h\nMb2UTMJd+F1x333M3b49YpSv3n37EiguRkXsyD+1NLvMi0hbjIhcG0z9WqCqU33KZ+umkfCPru//\nwmsWUlJe0uzNXy1d7iEW8WgBzcbESH0V+ANmZOSV3T8t9VL80KYpKCig6KWXOG77dh7HyDzsdv7O\nwXQGvYYOrX022VgtoDoSKfOqWgVc4GwaOx24WEQypLlIM1z1z9zcBuqfoYFfxs4bm1Dj3+KCvDSB\nqDMAJy5qmaruU9VSESkHvgMMEJFfqOrhRBJuql5KS9UCCt3xu2fhQn4KDUb/bpSv88aNo7S0NPmZ\ntADelXlVrXQO22LqmLX1eE0jm7+8sP2HWphyc40JefDgzBn9A9EjggFFQG/n+HTMgPVe4HngN9G+\nF+8HeMV57yjg9SjPaCYQ3LJFrygo0AdBp4AGw6J8XVFQoMEtW1KdzVYHYVGTvCrzmJn1GqAc+J8o\nzyT3x2YScUT+2rZvmw6YMUBzpuboab86TcsPNT001+LFqtnZJihfbq7qs8+mLsJXUwkv29E+sdYA\n2qvqduf4OuC3qvpzx26/NpFOx9FL2amqax29lKiLFamQgvDyvHTHDj6aPJm5wSAfAgcxMs93YQLO\nvp+Vxa0/+xmB4mJWFhX5JvVgz+OSgvCkzKvqUWCoiHQC/iQig1S1ganTSkE0kzhs/2PnjSW4L0jB\nsQUsvGZhk80/Lc3rp7lSELFGMetCjj8Bvh1y/lk8vUuMd/8MKAG2ADuA/cDcCM/50Tk2ilcxagOB\ngF4/YoQ+BPqTkNH/fud4P+h9l13mS9pNxcYE9qfMA5OBH0S4nrTfmXFs26Y6YEDUGUCicX/Ly1V/\n/eu60X9OjuqSJV7+AP8JL9vRPrFmAO+JyMtOA30s8B4YFU8gIfu/qj4APOC8z90u30Asq6UjquSs\nWcP9xLD7X3UVwWDQev2kBwmXeRHpholwt09E2gMXYbyKLF5QUQFjx0IwCAUFZg0gbFie3zmfgs4F\nBMuCTdb9374dRo2CQCCD7f4hRN0H4Oigjwd6AS+r6jbn+lDgeFV9x5MMZKheSjAY5CfXXEP+ypX1\nxN66YVqDjQUFPPnee9bnP4WE+0p7UeZF5FTMmoH73/6Sqv53hOdabNlOKY1o/4Tu/C3oUsDyG5bT\nu1PvuF5dUQFnnAGbN5vznBwT3D2dTT/RSHgfgFM6Xwx76SWq+qYH+QtNZzmwvLHnWpoWUKzR/99z\ncviuY/e3Pv/pgxdlXlXXAWd4nTcLpoU+cMA44rsRWMICv4d6/xSXFVNSXhJ3B1BUZEb+LgUFLbPx\nbwpxh2R0eMSXXMTBVA+20sT7jkT80oPBIC/On8/tI0bQ+9AhnqAuru9U4DdA37FjuerqqxntxAXw\nKu1EsPsAopKyMm8JwfXJvPhic75oUcSgLweOHGBg14HkZuXGbf6pqDA7fe++u27Rt39/WL48sxt/\niG8ncCi+SedmCqLKR5Mn8/LOnRF3+37Rpg3fsXb/loQt8+lAIxFYQk0/A7sOZNG1i+JS/gz19a+u\nNtdycmDWLOgd38ShRdPUmMDDVHV1wonGuV2+pWkBxbL7u8Hdp1x2Gf/72mv+ZcISN3HGBG5SmXc2\nOM4FegBHMYFkGshL2zWAJhBHBJYlW5Yw5vdjqNGaJgV9CdX4AcjONq9PR4nnpuClFhAicg5QAOSI\nyEAAVZ3b3MypapWIXKCqlSKSDfxVRBZ50bmkEuv1kzkkUOarMW6fa0XkGOBjEXlXVZMT2i0TKSqK\nqcVQUVXBpLcnUaOmFT+l6ymNmn7cgO6hZp/CQpg5M71i+/pNo2sAIvI7zAB2JHCW8zkz0YQ1jbfL\nN9UmHY/df1NBAf/1/vtcdfXVMRt/uwaQehIp86paqqprneP9wCbgBJ+ymvk0osRWUVXB/PXz2bTb\ndBA5WTlMHzM9puln+3bj7TNmTP1+ZeZMo/DZWhp/iG8GcCYwyOv5qrO78mPgJOCXqvphrOfTWQuo\nMbu/9fppcXhS5kWkACMpUeRBnlofjSixbS/fzqg5owiUBcjNzkVqhMHdBzPshMhO++6o/9ZbYcuW\nuuvZ2Znt6x+LeDqA9UBPzOYYz9A4t8uHSkFMn56YFMTo0ctYtsxjqYfSUhY+/TT5X37J7ZgVw6nO\n5w6MU7nr9bNs2bJ65p9o73dJplTC6NGjUy7VkAZSEC4Jl3nH/LMAmOjMBBpgpSBi0IgSW0VVBaPm\njGLz18ZpX2qEX13yK8YPHh9x9B+6wcudTIDx9pk1q+WbfZorBRFPQJilmFHMaqDKvR5p41ZzEZHJ\nwAFVfTLsetovlBUHAkwdNIiZhw5FDe5+3hNPMHzECDvyTzNiBIRJqMyLSA7wJrBIVWdEeSbty3ZK\naST+YuiiL8CAYwfwya2fRG38hw2DbdvqrmVnQ79+xtUzE7194l0EjkfLZFSkTzw6EzHe2Q3o7By3\nx3gEjY3wXHSxCx+JR5smEAjo/HnzdGyPHlFVPsf26KHz583TQCDgadp+YLWAvCvzGC+gJxt5Jnk/\ntCVRXm5kOAsLjRAPqA4ZUqv3U36oXBd/uVgLZxYqU1CmoP1n9Ndt+7ZFfVX//nWvAnO+ZEnLUfZs\nDtHKdvinUROQmp26XtMLeN5ZB3C3yy/0IR3f2FZSwrxbbmHo/v3kAldSp/KZD2xp145nVq60Ug8t\nkETKvIicC1wLrBORNRjnhgdU9W2v8pexRLLThNj9Q23+7sg/JyuHWZfMqt3t64YJ6NoVxo1raPLp\n08e4eGbiqL85xGMCGo5p2woxfvvZGHNNJ98zl6bT5PdXrODZb3+bZw4ebBDU/WWMv/+d55zDwy+8\nYM0+aUwME5DvZT5dy3bKiGanGTKEij8vpGjfRm5941a2lNWt3mZLNkOOH8KKCSvgcEeWLjUOQ8XF\n5quHD9d/VSabfMKJ1wQUTwfwEXAVJoDLmcD3gJNV9cdeZLSRtGsrSbpoATW22esIcOikk7hr8WI7\n+k9zYnQAvpd52wE4RHPNcVZntw86kVGvjKs36gfo36U/0y94Dt12FhzpwP331zkLhZMpC71NId4O\nIC4tIFXdDGSrao2aeKljEs1gU0kHLaDiQIBfTpzIoVWrAPg+ZrQ/ExM66giwpkcPznz0UVSapyBg\n9wGkB+lQ5jOeUIf80Ma/Tx8q/ryIJf2U816+mM2lpdRsHgVfXohUnEDvr65nco/V/Oiq0Vw+tgOX\nX96w8W/TxliPBgwwJp/W5t8fL/G4gVaKSBtgrYg8jnGNa6qIXD3i3S6fTix46SVmT5jA0IMHGUx9\nm/9UjMTzmvbt+fErrzDyvPNSmVVL4jS7zIvIc8AlmIh334w7RXckDEblcqPjEX322bUtV0VVBUXb\nzDODug1i4+6NtcfF+4oZcvyQZgU+TyYV2ytYv+Bv5Hfcw8YHfg+lfRnEfjYyiEo6wImd2DPzEh54\n/BF2HtiJdhJ4YQ183R+A7DbK9sNZTPhl9AFW//5GK27v3gahgi1hxGMCygd2Ymyhk4DOwDPOCKl5\niYr0BHpqyHZ54HIN2y6fLlpAjdn8HwauzMpi4tKlnHv++Yll0pI0YpiAml3mRWQkdRHuonYA9UxA\n4Vo3bdrUGbALC2HaNCpqKjnnix+z/mDQPEIOhzHqZW3JoZqj9OvUl7dO+gklVV9RecLx5G3bxaC8\nvmzsLlBczCDpzkZtxr28vmzMz4O8vNqOp/JIJXm5eQ06ovB7xTs2MiRQScecPLZ36sao0bkED34D\noYYjtHV+SxWHaYvZRaOQVQVHzT2yDjvHsWfUhYXw2GOQl9e6TD3R8GwNwHlZe6Cvqn7uReYivP9P\nwExV/XPY9ZR2AMFgkFUrV/K7SZMYunMnuRh7fz6mE3gCM335L+yib0skViVJpMw7HcgbcXcAoUFO\norCyD5w/AaqznQtKXZvoHivk1sCR7LrvtamGwzkNj5t1T+p3PFD/PPxeW3KorqmmXxm89BJcdv5w\ntv1xORxtE+UHEOE85MchDfpG2+hHxrM1ABG5FBMQ+23n/HQReT3xLNa+v4A02y7v7th947XX+N1N\nN9U2/q7Zx9X5OYLTAfTuzU3TpnnS+Ns1gNTjd5lvwJAhRuPGpU2bho/sgoG7Me2gmkbZPc6tobad\nPJJt/roft+EOP27WPeCw1u+kQs/D71VpNTXZsPk4OPsW2DZgPXTfaEb42YcwtecoZLnHGnZ+lDZt\nlJwc6N9feP1149a5ZIn5FBXBpZda+34ixLMGMAUYBiwDcMw2nri3xLNd3iWZWkClpaWs+/RT3rz3\nXs6qqalt/J/DmH3mYBZ/P8zKouDCC3lg1izr8ZNZTMGnMh+Rjh2NvPFqRwy3sNCsalZWwo9+BJs2\n0fEwfPAcrM7PhuoaCr+CTb3M8YllMO46CHaGbIWqREb5jdxryjtqZyPizFxy9sOE88guHUzNccWw\nuxAUcnp8SfWuk+BIe/p27c7U717NVyXdGdjtFP7pmx0oKalvy28NbpzJIp4OwA1wHXotYf811Qxi\nSgAADB5JREFUZ7v8AuB3qhpVID+ZWkAzn36at2bNotPWrewpL6czMAS4FGPnPxV4DOgD3HXMMVzy\n059y6mmn1Tb+XmnXuFgtIG/Pm6AF5EuZD6eBFtC3vlV3023lRo82HUNlJR3z8viW2zkAvUM6ik/a\nwIbjoe+OSjZVltTa8gvz+rLJseUXSnc2hdj5m3Rv5k+gZKvpeLpBZfss8g4drdcRhd87sQzGTcgl\n2KmGnKwcqqmh4NguvHXbM2yt+bp2raCwW2GtmmekIC62wW8cP7WAngP+DNwP/BtwN5Crqrc1PZv1\n3jsX2K2qP4jxTFJ8pYsDAR6/5Rb2LF/ON44c4T8wfv2TMat5D1Lfz39Njx5c/9RTVt+nhRNjETih\nMu+YNd9Q1VNjPNOy9gFUVNR2ROTl1c1SoP6MJexexemFbDhYQt9OfSkpL2Fw98Fp76mUCXi5ESwP\n0wb+C8YS+A7wqKoeSiBz52L0f9ZRa8lsuF3ez0pSHAgwZ/JkStesYdPnn3NqTQ3dMKaeKcDjmEZ/\nGmaa9DDwEI6r5zvv+ObquWzZspSoQqYq3VSmHaMDaHaZF5F5wGigK8aT6GFnH0H4cy2rA7C0KDyL\nCKYmcMuDzscTVPWvmO31Sac4EOCR669nzwcfIKoUAL/C2PddO/9YjJ3/Ycwq+VGM18+H2dncPHu2\n9fPPcBIp86p6jfc5slj8IeoMoDGvB/VQDjoaiYySigMBfnHPPexctYp9NTUczM6m/cGDHNi/n0Gq\n3Et9M88k6nR8Qt07pznvK2nfnktnz+bfx49P9GdZ0oTwUVIyy7ydAVj8JGETkIh8BWwF5mNcNOuv\niPmjEhqeBw1u2cIv7rmHd5eMpKD9YxzMziavqorKQ4egXTto25aO1dVUi3AwO5uO1dUcrK5mb3k5\nZ0GtWedZYDkP83um8iz1bfuumecoZlfvQ86PPQCUZGfT9YILrKdPBhKhA0hambcdgMVPvNgH0BN4\nAOMIMwO4CLNou9yLiiAiz4nIThH5LNZzT44ezZTXX+ezyh8yb88ehuzaxfH79jGjqooh+/aRv2sX\nXffuZZpzr+vevfy8vJxzMJE8XLPOzcAnTOFl4FGMBsUcjD9/FmZxNwvT6L8NFHXujF5+OT//4gt+\nnURhN7sPIKV4UuZFZIyI/E1E/i4iP/IprxZLwkTtABwRrLdV9QZgOLAZWCYid3qU9mzg24099LOS\nEjo4xx2oa7zdhrxn2Hmk46nOXzCjfLfRd237R4HPMJ3F7e3bM+T663m3rIyn/vSnpI/6165dm9T0\nUp1uqtMOxYsy78S4+AWmbA8GrhaRgbG/lTzsACPz020KMXcCi0hbEbkC+D0mxO3TwKteJKyq7wNf\nN/ZchwjnbuPtHoefRzo+4nzfHeU7exCZjHFF6lRYyMvXXssjGzZQ0L9/Qr8tEcrKylpVuqlOOxwP\nyvww4AtVLVbVI8CLwOXe57R5tMbGsDX+5niJ6gXk+OkPARYCU1V1fdJyFcIB6ncCbuOdE3JM2Hmk\nY9fOdCWm0Q8CB7t2ZeDIkcx46ilr37d4VeZPwKwjuPwD0ylYLGlHLDfQ6zBt50Tg7pBdkYKJN+l7\nRDCAe7p1Y/ru3eBkJtRrZzJQ4WRoUoR77vEtWVmUdekCe+HBHj3IHz6cp2I0+sFg0NffFItUpd0a\nf3ME0qLMWyzJIi41UN8Sb0Q1UUSsm4TFV+LxlGgKTjjJKao6xjm/3ySjj4U9Z8u2xVc82QjmM67+\nYES8rpwWSxL4EBjgDG52YEJLXh3+kC3blnQgocheieBsmf8AOFlESkRkQqryYrF4harWAHcC7wIb\ngBdVNUq0WosltaTUBGSxWCyW1JGyGUC8iMjjIrJJRNaKyB9ExNeFuFRs4hGRPiLynohsEJF1InJ3\nMtINy0OWiHzia+CThml2FpFXnP/fDSJydhLTniQi60XkMxF5wYkBnFRE5C7nt68TkWmNf8Pz9O8V\nkaMiclyS0sv4uuykm9L63JS6nPYdAGYqPVhVTwe+AH7sV0Ip3MRTDfxAVQcDI4A7UrB5aCKwMclp\nzgAWqmohcBqQFFOJiPQG7gLOcBwQcjC2+qQhIqMxoSZOdWSjn0hy+n0wO52Lk5hsa6jLkPr6HHdd\nTvsOQFWXqKrr7r8KE4/FL1KyiUdVS1V1rXO8H9MQnuB3ui5OYzAW+E0S0+wEnOdKJatqtaqWJyt9\njBptBycwUR6wPYlpA/wnME3VxFFU1d1JTv8p4IfJTLA11GVIbX1ual1O+w4gjO8Di3x8f6RNPElr\niCFlMZLdxiCZC0L9gN0iMtuZrs4SE4jdd1R1O/BzoATYBpSp6pJkpB3CycD5IrJKRJaKyJnJSlhE\nLgO2quq6ZKUZgYyvy5CS+tykupxqN1AARGQxRrqn9hLmBzyoqm84zzyICdU3LwVZTArShBjJHqY5\nDtjpxL0dTQy3XI/JAc4A7lDVj0RkOiYClwfRn2MjIl0wo8F8YB+wQESu8bpsxSjXD2F+/7GqOlxE\nzsLIVXmmQdJI2g9gzD+h9/xOt1XVZUh+fW5OXU6LDkBVL4p1X0RuxExrLvQ5K9uAviHnfZxrviNx\nxkj2gXOBy0RkLNAe6Cgic1X1ez6n+w/MKPQj53wBkKyFun8GtqjqXgAR+SNwDuBpgxSrXIvIbcAf\nnec+dBZju6rqHj/TFpEhQAHwqZitzn2Aj0VkmKru8ivdkPRvJMPrMqSsPje9LqtqWn+AMRh/6q5J\nSCsbowCZD7QB1gKFSfqdc4EnU/xvPQp4PYnpLQdOdo4fBh5LUrrDMBqA7TCjpDmYmUgy/61vwegN\ngTEHFafo/zyAmYkkI61WUZed9FNan+Oty2kxA2iEmZj/wMWONssqVb3dj4RUtcaR/n0Xsz7ynCZh\nE4+YGMnXAutEZA1RYiRnIHcDL4hILrAFSMpmQFVdLSILgDUYodg1wKxkpB3CbOC3IrIOE7rC7xlX\nNJTkmf0yvi5Dy6rPdiOYxWKxtFJamheQxWKxWDzCdgAWi8XSSrEdgMVisbRSbAdgsVgsrRTbAVgs\nFksrxXYAFovF0kqxHYAPiEhFgt9/xdEQQUQCXsv1OtozZ8TxXKNpi8hiEensXe4s6Yot15mH7QD8\nodmbK0RkEJClqsFE3+UB8aQ9F7jD74xY0gJbrjMM2wH4jIj8rxMU4lMRudK5JiLyjIhsFJF3ROQt\nEbnC+cq1QKh2SINdmiJyloh8ICIfi8j7IvIN5/oNIvKqiLwrIltE5A4xgU8+cZ7vEvKa74nIGjEB\nUc5yvn+ck591IvJsaNrOez907v1HyHveIELMW0tmY8t1hpAqrYpM/gDlzt9/A95xjo/HBN/o4Vx/\n07neA9gLXOGcL8MEzXDfFQCOC3v/MZjRFMC3gAXO8Q3A3zH69t2AMuBm596TwN3O8VLg187xecA6\n53gG8JBzPBaocdMGujh/22F0dI4Nyc/nJElPxn5subbl2rtPS9ACasmcC8wHUNVdIrIMI0Q2EnjF\nub5TRJaGfKcX8FUj7+0CzHVGSEp9VdelqloJVIpIGfCmc30dcGrIc26+VohIR8feeT7wr871hSLy\ndcjz94jId5zjPsA3gNXO+VdAbyD0eUvmYst1hmBNQMnF1UaPxUHMaCQWjwLvqQkleGnY81Uhxxpy\nfpT6FSo8H0dpiACIyCiMfO/ZasL5rQ1Ls52Tb0vrxJbrFortAPzBtTGuAMaLCdLcHTMtXQ38Ffh3\nx2baAxgd8t1NwIAo73PpRJ22eXMVNMcDiMhIYJ+qVgB/wdhqEZGLMSMygM7A16paJSa26fCwd/UA\ngs3Mh6XlYMt1hmFNQP6gAKr6qogMBz7FjER+6EyZ/4AZeWzAhK37GBOZCuAt4ALgvZB3fSoi6hy/\nDDyOmSo/5DwfMx9Rrh8SkU8wZcCtbFOB+SJyFfABJmQiwNvAbSKyAWMXXem+SET+CSPrG2mkZcks\nbLnOMKwcdIoQkQ6qekCMP3IRcK5TidphKsm52gL+c8SEcnxNVZc2+rAl47HlumVhZwCp403HfS0X\neESdcHyqekhEHsYEsP5HKjMYJ+syvZJYmoQt1y0IOwOwWCyWVopdBLZYLJZWiu0ALBaLpZViOwCL\nxWJppdgOwGKxWFoptgOwWCyWVortACwWi6WV8v/VSqeHPvaK7gAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "f = plt.figure()\n", + "f.add_subplot(2,2,1)\n", + "cvglmnetPlot(cv1)\n", + "f.add_subplot(2,2,2)\n", + "cvglmnetPlot(cv0p5)\n", + "f.add_subplot(2,2,3)\n", + "cvglmnetPlot(cv0)\n", + "f.add_subplot(2,2,4)\n", + "plt.plot( np.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", + "plt.hold(True)\n", + "plt.plot( np.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", + "plt.plot( np.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", + "plt.xlabel('log(Lambda)')\n", + "plt.ylabel(cv1['name'])\n", + "plt.xlim(-6, 4)\n", + "plt.ylim(0, 9)\n", + "plt.legend( ('alpha = 1', 'alpha = 0.5', 'alpha = 0'), loc = 'upper left', prop={'size':6});" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We see that lasso (`alpha=1`) does about the best here. We also see that the range of lambdas used differs with alpha.\n", + "\n", + "### Coefficient upper and lower bounds\n", + "\n", + "These are recently added features that enhance the scope of the models. Suppose we want to fit our model, but limit the coefficients to be bigger than -0.7 and less than 0.5. This is easily achieved via the `upper.limits` and `lower.limits` arguments:" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAElCAYAAAAV9s4VAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd8VUX2wL+TkAAhpBdKAiSUhEivKhYQFaxYWHRhUfTn\n6loQy+pakUXsyqqoiCioCIKyiggiIBKVIhBIoYUSElJJL5CevPP7475gFklIyMsryXw/n/vJm/vm\nzZx7ct87d86ZOaNEBI1Go9Fo6sPJ1gJoNBqNxv7RxkKj0Wg050QbC41Go9GcE20sNBqNRnNOtLHQ\naDQazTnRxkKj0Wg050QbC41do5SqVkrtUUrtU0pFK6UeU0opW8t1PiilHlZKHVBKLTnj/OVKqQLz\ndUYrpTY0U/+LlVK3NEfbmpZPG1sLoNGcg2IRGQKglPIDvgQ8gFlNbVgp5SQipqa20wjuB8aKSPpZ\n3vtVRG6s64NKKWcRqW4+0TSa+tEjC43DICI5wL3AQ2D82CulXldK7VBKxSil/m4+r5RSH5if4tcr\npdbWPFErpRKVUq8qpaKAiUqpUKXUOqXULqXUL0qpPuZ6fkqplea2dyilLjKfv9z89L9HKbVbKdXh\nTDnNo5+9Sqk4pdTD5nPzgVBgnVJqxlku70+jJfNIYL5S6nfgNaWUm1LqE6XU7+a+b6xPD+b33lNK\nHTSPVgJqnR9rvoZYpdTHSimXWvp52XyNO5VSg5VSPyqljiil7mv8f03TYhARfejDbg+g6Czn8gB/\n4O/AM+ZzrsAuoDtwK7DGfD7QXP8WczkR+Gettn4CeppfjwA2mV8vBS42vw4GDphfrwYuMr92A5zO\nkG0IEAu0AzoA+4CB5veOAd5nuZ7LgQJgj/l42nx+MbC6Vr2XgMnm157AIaB9PXq4GVhvPt8ZyAdu\nAdoCybWu+zPg4Vr6udf8ei4QY75OP+CEre8Hfdju0G4ojSNzNdBfKfUXc9kD6A1cAnwNICKZSqnN\nZ3xuBYB5VHAx8HWtOIiL+e+VQN9a592VUm7AVuA/SqmlwDciknZG25cA34pImbmPb4BLMQyI4iwj\nCDN1uaG+PuN6b1BKPWEuuwLd6tHDZRhuO0QkQym1yfx+GHBMRBLM5c+AB4B3zeXvzX/3Ah1EpAQo\nUUqVKaU8RKSojmvQtGC0sdA4FEqpUKBaRLLNP+TTRWTjGXWuO0czxea/TkC+mGMiZ3YFjBSRyjPO\nv6aUWgNcB2xVSl0tIocbfyUNpviM8q0icuR/BG24HlQdr8+k3PzXVOs1gKB/M1otOmahsXdO/6gp\npfyB+cA886n1wANKqTbm93vXevqfaI5dBAKjz9awiJwEEpVSE2v1McD8cgMwo9b5gea/oSKyX0Re\nx3D3hJ/R7G/ATUqpduaRy83Ar+d15X9mPfBwLZkG1Tp/Nj38Ctxmjml0BsaY6x8CupsNL8BUINJC\nMmpaKPopQWPvtFNK7cFwuVQCn4vIf8zvfQz0APaYn66zgJuA/wJXAPuBFGA3UGj+zJlplqcAHyql\nnsP4PiwH4jAMxftKqVjAGeOH9wHgEaXUGKDa3P662o2JSLRS6lMMQyLARyISV0ff5+LM+nOAt5VS\ncRhGNBG4sS49iMi3SqkaPSQD28wyliul7gJWKqWczbIuaICMOkV1K0aJ6P+/puWhlOogIsVKKR9g\nBzBKRLJsLZdG46jokYWmpbJGKeWFEbCerQ2FRtM09MhCo9FoNOdEB7g1Go1Gc060sdBoNBrNOWkR\nxkIpNV4pFa+UOqyU+pet5bEl5mmSe5RSq20ti61QSnkqpb42p7nYr5QaaWuZrI3WgYFS6lFlJKGM\nU0otVUq52loma2BOC5NpnjlXc26gUmp7rVQuwxrTpsMbC6WUE/AeMA64APirUurMue+tiRnAAVsL\nYWPeAX4Qkb7AQOCgjeWxBa1eB0qpLsB0YIiIDMCY0HO7baWyGosxfhNr8zrwgogMBl4A3mhMgw5v\nLDDy+RwRkePm1bbLgQk2lskmKKWCgGsx5t23SpRSHsClIrIYQESqWlt6Cq2D/8EZ6GBesOgGnC3j\nb4tDRLZg5AKrjQkjpxiAF3Bmqpp6aQnGoivGwqsaUs3nWiP/AZ6gdS+eCgFyzBlb9yilPlJKtbe1\nUFZG6wAQIxX8WxgLEtOAAhH5ybZS2ZRHgTeVUskYo4ynG/PhlmAsNJzOA5QpIjHUn7CupdMGI/Pr\n++acTyXAU7YVyepoHQDmdTYTMDLwdsFIBjnZtlLZlPuBGSLSDcNwLGrMh1uCsUjDyLxZQxCNHF61\nEEYBNyqljmFkGh2jlPrcxjLZglQgRUSizOWVGD+crQmtA4MrMbLr5omxcdQ3GFmGWyt3isgqABFZ\nieHCbzAtwVjsAnoppbqbZzrcjrHnQKtCRJ4RkW4iEoqhg59F5A5by2VtRCQTSFHmTYyAsbSygL/W\nwWmSgQvNSR0Vhh5aU6D/TA9DmlLqcjA2vwIalS3Z4dN9iEi1UuohjCyhTsAnItKabgjNn3kYWKqM\n3d+OAXfZWB5b0Op1ICI7lVIrgWiMJJTRwEe2lco6KKWWYWRb9jXHKF7A2CTrXXPyyDKMXScb3qZO\n96HRaDSac2FzN1RDFtQppUabF5LsU3/e9Uyj0Wg0zYxNRxbmBXWHMXyJ6Rjxh9tFJL5WHU+MPPxX\ni0iaUspPRHJsIrBGo9G0Umw9smjIgrrJwH9r9jrWhkKj0Wisj62NRUMW1PUBfJRSm5VSu5RSU60m\nnUaj0WgAx5gNVbPA6AqgA7BdKbVdRI7aViyNRqNpPdjaWDRkQV0qkCMiZUCZUupXjMRofzIWSik9\ntUuj0WgaiYicM+ODrd1QDVlQ9x1wiVLKWSnlBoyknoU1ItKqjxdeeMHmMtjDofWg9aB10DA9NBSb\njiykjgV1Sqn7jLflIxGJV0qtB+KAauAjEWmNq1EbRFJSkq1FsAu0Hgy0HrQOamiqHmzthkJEfgTC\nzji34Izym8Cb1pRLo9FoNH9gazeUxsJMmzbN1iLYBVoPBloPWgc1NFUPLSrdh1JKWtL1aDQaTXOj\nlEIcIMCtsTCRkZG2FsEu0How0HrQOqihqXrQxkKj0Wg050S7oTQajaYVo91QGo1Go7EY2li0MLR/\n1kDrwUDrQeugBh2z0Gg0Gk2zo2MWGo1G04rRMQuNRqPRWAxtLFoY2j9roPVgoPWgdVBDU/Vg89xQ\nGliwAGJiICsLsrONv1lZUF7e+Laqq8HZ2fIyOhpaDwYN0YNSJrp1209ExFb69t1GRMRWvL1PnH7f\nmWqcpRonTDhhamaJLU9sHDiVNkPD0fdw2TMLm6Fh+0THLGzMihXw/PPwyCPg7w8BAcbh7w/t29ta\nOk1LRcREbu5S8vKWcerU77Rp44+7+8V07DgKd/dRuLp2AxTO36zA9YWnqXzsKUz9B2Lq1QdcXW0t\nvl3Q1t2dtu5uthajyTQ0ZqGNhQ3JyIBBg+D772HECFtLo2ktnDy5hyNHHkKkim7dnsLT8xJcXQP+\nXHHxYnjuOdiwAS64wPqCaqyCDnDbOSLw97/Dvfda1lBo/6yB1oNBbT1UVuZy+PD9xMVdS+fO/8eQ\nIb/j73/L2Q3FBx/ACy/A5s0Obyj0vWCgYxYOyqJFkJYG33xja0k0LR2RajIyPiYxcSYBAZMYMeIg\nLi7eZ69cVQVz5sBnn0FkJISGWlVWjf2i3VA2ICkJhg+Hn3+G/v1tLY2mJVNYuJ0jRx7C2bkDvXvP\nw919YN2VExNh6lRo1w4+/xy6dLGeoBqbod1QdorJBHfdBf/8pzYUmuajvPwEBw9OY//+iQQHP86g\nQb/UbShEjJHEiBFwyy1GjEIbCs0ZaGNhZebNg4oKw1g0B9o/a9Ba9WAyVZKS8jZRUf1xdQ2gtHQh\ngYGTUaqOB8e8PLjtNnjjDfjpJ3jsMXBqWT8LrfVeOBOdG8qBOHQIXnzReIjTawA0liY/fzNRUYPJ\ny/uBQYN+o2fP13F2rmdq57p1MHCgMYqIijJeazR1oGMWVqKqCkaNgjvugAcftLU0mpZEWVkKCQn/\npKhoB716/Qc/v5vqHkkAHDkCjz5qPL28/z5cfbX1hNXYHTpmYWe89hp4eMD999taEk1LwWQq5/jx\nV4iKGoSbWzgjRhzA3//mug1FURE8+SRcdBFcdhns26cNhabBaGNhBWJi4O23jemyze0O1v5Zg5au\nh/z8Teza1Y+iot8ZOnQXISH/PqvLKTIy0phVsXgxhIcb+WT27TOMRtu21hfcBrT0e6GhOPw6C6XU\neOBtDMP1iYi8Vke94cA24DYRcZjVCeXlhuvpzTchONjW0mgcnaqqQhIS/kle3nr69JmPr+919X9g\n/3544glo0wa++86Ys63RnAc2jVkopZyAw8BYIB3YBdwuIvFnqbcRKAUW1WUs7DFm8fTTcPAgfPst\n1OdG1mjORU7OGo4cuR8fn+vo2fN12rTxqLtyWho89ZSxmOfVV2HKlBY3y0ljGRoas7D1yGIEcERE\njgMopZYDE4D4M+pNB1YCDvVYtG2bMfqPjdWGQnP+VFbmcvToIxQWbiM8/HO8vcfUXbmsDObOhbfe\ngvvug/h46NjResJqWiy2ftToCqTUKqeaz51GKdUFuElE5gMO85NbXAzTphmTTQIDrdev9s8atBQ9\nZGWtZNeu/ri4+DF8eFzdhkLEGL5GRMCuXbBzJ7z8MpG7d1tXYDukpdwLTcXhYxYN4G3gX7XK9RqM\nadOm0aNHDwC8vLwYNGgQo0ePBv5QljXKTz0F3btH4usLYL3+Y2JibHK9umzZcmVlPl98cROlpYlM\nmbIST8+L667v5wePPELk0aMwfTqjH3/89Pv6fvgDe5HHVuWYmBhqiIyMJCkpicZg65jFhcAsERlv\nLj8FSO0gt1LqWM1LwA8oBu4VkdVnac8uYhabNhmjirg48K4jX5tGUxclJYfZu/cGfHzGERr6Os7O\n7c5eMS/PyAy7fDnMnGnMy27jCM9/GnvCUdZZ7AJ6KaW6K6VcgduB/zECIhJqPkIw4hYPnM1Q2AuF\nhXD33bBwoTYUmsaTn7+J6OhLCQ5+nN693z27oRCBJUugb19jK7yDB2H6dG0oNM2KTY2FiFQDDwEb\ngP3AchE5qJS6Tyl179k+YlUBz4NHHoFrroHx423T/5lD79aKI+ohPX0BBw5MJiJiOV26nO32549c\nTq++aqTr+OAD8POrs01H1IOl0TowaKoebP4oIiI/AmFnnFtQR927rSLUebJ6Nfz6qzH7SaNpKCZT\nFQkJj5OXt57Bg7fg5tb77BU3bjSGrRMnGinE29XhntJomgGdG8pCZGcbedhWrIBLL7WJCBoHpKqq\nkAMHbkekmoiIr3Bx8fpzpdJSY83EN98Yc7GvvNL6gmpaLI4Ss2gRiBixxSlTtKHQNJzc3B/YtWsA\n7dv3pn//H85uKKKjYdgwyMw0hqzaUGhshDYWFuDLL40Y44sv2loS7Z+twZ71UFGRzYEDkzlyZDrh\n4Yvo3ftdnJzO8AibTEb2yXHj4JlnjJvMx6fRfdmzHqyFpXSQV1nJrqIilmdm8vLx4+w+edIi7VoL\nh49ZODppaUZQe9067ULW1I+IkJn5BQkJ/6RTpzsYPnzv2febMJmM1df79hn7THTrZn1hWzEiwuHS\nUrYVFvJ7URF7i4s5VFJClUnoTHs8itvRJqs9wX1g6EW2ltZ66JhFExCBa6+FCy80prtrNHVRWprE\n4cP/oKLiBOHhn9Cx49CzV6wxFPHx8MMPOlWHFSiorGT3qVNsLyxke1ERvxcV4YYzwUWecMCDwmh3\nTuxwozTDhT69FeHh0KcPTJoE/frZWvqm09CYhTYWTeCjj4xj+3ZwcbFatxoHQqSa1NR5HD8+h+Dg\nfxIc/DhOTnXcLCYT3HsvHD5sGAp3d+sK24qILy7mm5wcVuXkcLCkhIi2HQjM8aQi2oOE1R7kHmnL\n6NEwZowxcaVXL+jcuWXmeNPGopk5dgxGjoRffjHS8dgLkZGRp5f3t2bsQQ9FRVEcOfIgTk7tCQtb\nWPeUWDAMxd//DkePwtq1FjMU9qAHW1OjgwPFxazIyuLr7GyKqqoYUeGPyw4/4ld4kpTgxKhRcMUV\nxjFwYMvb+riue8FRss46JCaTkc7jqafsy1Bo7IPi4gMkJj5PUdHvhITMplOnuzCy7NeByQT33AMJ\nCRY1FBrIKC9neWYmM3btIqu8koH5AQRtCmPXEg+O91Bcfz3MeM/Y5kN7B+pHjyzOg7lzYdUq2Ly5\n5T19aM6f0tJEkpJmkZe3juDgJ+na9UGcndvX/6HqasNQJCYahqJDB+sI24KpMpn4MS+PhRkZROYX\nEp7lR8l3gRz/3osrRiuuuQauvx66dj13W60B7YZqJg4cgMsvhx07IDS0WbvSOAjl5RkcPz6HrKzl\ndO06neDgx+rfmKiGkhJjiJqdDWvWaEPRRBJLS1l04gSL0jNod7Idzus7k/u1P7dc04ZbbjHiD3rG\n4p/Ri/KagcpKY4vUOXPs11DoefUG1tBDZWUuCQn/Yteufjg5tWfEiEOEhMxqmKFISTFWcLZta8y7\nbiZD0dLvh3KTiRVZWYyNjmXg9t0s/66Kwn8MYPDHQ3jtss6kJ7RhypRIrrlGGwq9zsKKvPIK+Psb\nE1Y0rZeqqpOkpr5Nauo7+PtPZNiwWNq1C2p4A9u3G/mdZsww9sduiVNsmpkDxcUsTM/g07RMOmR2\noHBZZwae7Medf3Vm4q8643NzoN1QDWT3biObbHS09nW2Vqqry0hPn09y8mt4e19Jjx6zcHPr1bhG\nPvvMMBCLF8N11zWPoC2UU1VVfJWdzXuJGRw9WYbzT53w2taZ/7umPVOmQEiIrSV0TPRsKAtSVma4\nn/7zH20oWiMmUyUnTnzK8eOzcXcfwsCBG3F379+4Rqqrjelz334LkZF6Gl0DERGiTp7kvcQMvs7J\npt0hT0zfd+POMB/umOLEsNf1wMxa6JhFA5g509hnZvJkW0tyblq6j7qhWEIPIiYyM5eza9cFZGWt\n4IILVtK//3eNNxSFhXDDDbBnjzEzwoqGwlHvh/zKSv6TlErIpihGbz7AinntuPrL4Szp3p/s1X7M\ne9uJ4cMbZigcVQeWRscsmpktW+CLL4yEn/oJpnUgIuTmriUx8VmcnNrTp898vL3Hnl9jR47AjTfC\n2LHG0FRP5q8TEeGX/EJejsvgl4ocZKcP/Y73ZOYobybOVXg0YN6ApvnQMYt6OHXKWMk5dy5MmGCx\nZjV2TH5+JImJz1BdfZKQkJfw9b0Bdb5PCT/8AHfdBbNnG/meNGcls6KCN+JOsCgzg1MFCr+dnbm3\nRyB/n+Sq3b5WQK+zsAAPPGBMhf/0U4s1qbFTiop2kZj4LKWlCYSEzCYg4HaUOs8Vl6Wl8OSTxtaJ\nX3yhNzk5C9UiLE/I47V9GRxom4/rTn9udunMk9d5MHCgHsJbE73Oools2GAsqH3nHVtL0ji0f9ag\noXooLt7Pvn23sG/fzfj738qIEfEEBk45f0Oxdy+MGAFZWRATY3NDYW/3Q2JBBbetPo776t+58+ck\nOuz34b8uF3HyuXCWPuPZLIbC3nRgK3TMohnIz4f/+z9jROHpaWtpNM1Baekxc2qO9XTr9iR9+y49\nd2qO+hCBefOMHbDefNOYPqeDXIChmvmRRbyZkEpipzyCEv14tnM/Hp3SUS9adyC0G+osTJ0KXl7G\nd1/TsigvTzen5viKoKDpBAU92rAV1/WRmWnEJnJzYelSI5+1BhF4a30hLyYnUeJbwnVlQbw6uhPh\nXXWQ357Q6yzOk2++MWY3RkfbWhKNJamoyCEl5TUyMhbRufPdjBgRj6urX9MbXrvWSAT4f/9n7ICl\nZzsBsHxbMffuO0qZbyl3derG29d0or2L9no7Mvq/V4usLHjwQWORraMOj7V/1qBGD1VVRSQl/Zud\nO8Oorj7F8OFx9Oz5RtMNRWkpTJ9uzIJYscJIGGaHhsLa90N1Ndz+YRaTc2K4tZMvJyeMYMGNXWxq\nKPR3wsDhYxZKqfHA2xiG6xMRee2M9ycD/zIXTwL3i8heS8shYsxunDYNLmpF++q2VKqry0lJeYvk\n5Nfx8bmaoUN30r59T8s0vnevsUIzIsIIYutERAAUFgn9308gOzyHtREDuKaX3hK2JWHTmIUydoQ5\nDIwF0oFdwO0iEl+rzoXAQREpNBuWWSJyYR3tnXfMYskSeOMN2LXLSASqcUyM1ByLSEp6EQ+P4fTo\n8SLu7hbaKFkHseukshIiXk2iMDyX/TcMwL+d/Y2yNGfHUWIWI4AjInIcQCm1HJgAnDYWIvJ7rfq/\nAxZfppOSAo8/bkyX1YbCMRGpJitrOYmJL9C+fQj9+n2Dh8cIy3VQO4i9fbsOYtdCBK59KYfUIRkc\nvnKINhQtFFvHLLoCKbXKqdRvDO4B1llSABEjNjljBgwaZMmWbUNr88+KmMjOXkVU1CDS0t4jLOwj\nBg7cyJ49JZbrZO1a4+YYMsTI/+JAhsIa98PrX5Syeegh1o24gGA3+3vaam3fibpw+JhFQ1FKjQHu\nAi6pr960adPo0aMHAF5eXgwaNOj0JuU1yqpdXrUKCgtH869/nf19RyvHxMTYlTzNVRapZtWq2WRm\nLmHYMC9CQl5i796OxMYqavakb3J/69fDhx8yes8eWLGCSJMJtm61i+tvaLm574fkFOG5U148Gt4N\n4vcQGW9f118be5HHVuWYmBhqiIyMJCkpicZg65jFhRgxiPHm8lOAnCXIPQD4LzBeRBLqaa9RMYuj\nR+HCC42HxfDw87oEjZUxmarIyvqS5OSXcXb2pEeP5/Hxufb88zfVRe0g9ocf6iD2WaishNBnkmk3\nOpdD1w7CScdvHBJHiVnsAnoppboDGcDtwF9rV1BKdcMwFFPrMxSNpbramPn0/PPaUDgCJlMFJ058\nTnLyK7RtG0SvXvPw9h5reSOhg9gN5v43TpE5Jpn40UO1oWgF2DRmISLVwEPABmA/sFxEDiql7lNK\n1Wxe+jzgA3yglIpWSu20RN9vvWVMi58+3RKt2Q9nDr0dnerqMtLS3mfHjl5kZ39NePhiBg/+BR+f\nK+s1FOelh8xMY/e6pUuNIPaddzq8oWiu+2FjpInPusTzWs+ehLo1IU2KFWhp34nzpal6sPXIAhH5\nEQg749yCWq//Dvzdkn3u2/fHNFknW4f4NWelurqY9PQFpKS8SceOw7jggq/x8BjZfB3qldgNJi8P\nJn6XxODrXXmkTydbi2MVqqqqiI2NJTY2lv379xMXF8eDDz7ITTfdZGvRrEaryw1VUWHEKR580Phd\n0NgXVVVFpKW9T2rq23h6Xkr37s/RsWMzTlOrnU58yRK47LLm66sFIAJjpxfy+/X7SBgzjM4tdK65\niLB//35+/PFHIiMj2bJlC127dmXIkCH069eP/v37M3LkSHx9fW0tapNxlJiF1ZkzB7p0gbvvtrUk\nmtpUVuaTlvYuaWnv4e19NYMG/UyHDhc0b6d6JXajWfBZNVsvi2dx/94tzlAUFRWxadMm1q1bx7p1\n62jTpg3jx4/nzjvvZNGiRQQEBNhaRNsiIi3mMC6nbnbuFAkIEElPr7eaQ7N582Zbi9AoKipyJSHh\nWfntNx85ePAuKS4+bJF269WDySTyzjsifn4iixcb5RaKJe+HgwdF2j15WK7btt9ibVqD+nQQHx8v\nL7/8sowePVrc3d3lqquukrlz58rBgwfFdMZ9caqqSiLz8+WVpCS5IS5OvsvObmbJLUtdejD/bp7z\n97XVjCxKS42JLe++C50721oaTVVVEamp/yE1dR7+/jczdOhu2rfv0fwdZ2Ya0+Dy8vRK7EZQXAxX\nP5+L2705LBk2zNbiNImsrCxWrFjB559/TmpqKhMnTuTxxx9nzJgxdKiVQbSsupqfCwrYXFDArwUF\n7C8upl+HDlzk6cnfAgO5qJVtCt5qYhaPPw5pabB8uZWF0vwPVVWnSEt7j9TUufj4XEOPHjMtl+Dv\nXOgg9nkhAhPvLWfdzbv54aIIRnt72Vqk8yI2NpY333yTNWvWcP311/O3v/2NK6+8EmfnP3ZFzK+s\nZG1uLqtyctiYn88gd3eu9PbmUk9PRnp40N75PHdQtGN0zKIWBQXw8cfGIjyNbaiuLiU9fT7Jya/j\n5TWaQYN+pUMHKy1wqR3EXrFCB7Ebydy3hQ1D43mkV2eHMxQiwm+//cYrr7xCbGwsM2bMYN68eXh5\n/XEdKWVlfJeTw6qcHHaePMkYLy8m+PnxYZ8++Lm62lB6O6MhvipHOagjZrFwocgtt9TpymtR2FvM\norq6TFJT35OtW7vI3r03ycmTsVbp97Qe4uJELrhAZNIkkbw8q/RtTzTlfjCZRJ59VsRnRqIM27ZH\nKqurLSdYM2MymWTDhg1yySWXSJcuXWTBggVSVlZ2+v3UsjKZm5wsI6OixOe332TqgQPyTVaWFFdV\n2VDq5kXHLBrAkiXw6KO2lqJ1YaQK/5Tjx+fQoUM/+vdfTceOQ60ngAi8844x/U2vxG40FRWGx+6X\ngBQ8Jp1g1ZDBtHGARUkiwrp165g9ezaFhYU899xzdOrUibFjx1JpMvFlZibz09PZV1zMBD8//h0S\nwhVeXrg4wLXZmhYfs0hKgmHDID0d9Iiy+TFyNy0lKWk27dqFEBLyIp6eVt5NqnYQW++J3WgKC+HW\nWyHzklSKx6Xyy+BBBLdrZ2ux6sVkMrF69WpefPFFKioqeP7557n11ltxdnamoLKSBRkZzEtNpbeb\nG9O7duV6X19ctYEAdMziNF98Abfdpg1FcyNiIivrK5KSZuHq6k9Y2Cd4e4+2viA6iN0k1qyBhx6C\n7o+kcmpkKpF2bihMJhPffPMNL774Is7Ozjz//PNMmDABJycnMisqeCspiY8zMrjWx4c1/fszqKPe\nve+8aYivylEOzohZmEwiffqIbN9+Tndei8HaMQuTySTZ2atl585+EhU1QnJz1/9pfrpVKCkRefBB\nkW7dRH75xe5iN7aioXpITRW59VaRnr1N8n8/JUn3bdsksaSkeYVrAlVVVbJs2TKJiIiQ4cOHy/ff\nf3/6vkuu0/zLAAAgAElEQVQrK5NHjhwR799+kwcOHZIv16+3sbT2gY5Z1MPOnYbremQzphRqzZw6\nFcfRo49RUZFGaOjr+Ppeb/kssA0hLs5Yid2vH8TGgpcX6ORxDaK6GubPh3//G6Y+XEHRswc5INX8\nGjGYbnY4oqiqqmLZsmW89NJL+Pn5MXfuXK6++mqUUiSXlfFacjJfZmVxZ6dO7Bs+nC5t2xKZnm5r\nsVsGDbEojnJwxsjiwQdFZs9umNXVNJzy8hMSH/932bLFX1JS5kl1dYVtBKmoEHnxRWMl9qeftuiV\n2M3Brl0iI0aIXHqpyMLoXOm8das8d+yYXc56Ki8vl4ULF0poaKiMHj1aNm3adHokkVBSIvfEx4v3\nb7/Jk0ePSmZ5uY2ldSxo7SOLigpjSv1OiyQ010BNuvB3SE5+g06d7mDEiEO4uNgon1JcnBHEDgyE\nPXsgONg2cjgg+/bBzJnGd2PmbBNHLktkVlYmS/v2ZYyd5ccqLy9n0aJFvPrqq4SHh/Ppp59y6aWX\nAnC4pISXjx9nTW4u93ftypGRI/HVMarmoyEWxVEOao0sVq0ynphaG83hqzeZTJKZ+ZVs3x4icXE3\nSnHxIYv30WDKy0VmzRLx9xdZtKjO0YSOWRjU1sORIyKTJxv50d56SyQur1iGR0XJDXFxkm1nT+Ml\nJSXyzjvvSNeuXeXaa6+V7bUCj/tOnZK/7t8vflu2yOzERMmvqH9kq+8FAx2zqIMlS2DqVFtL4fgU\nFUWRkPAoVVVFhIV9jLf3FbYTJibGGE107WqMJoKCbCeLA5GcbGz8t2oVzJgB78038XFBKmP2J/NC\njx481LWrbWJNZ6G4uJgFCxbw5ptvMmLECL777juGDjXW58SeOsWc48f5taCAR4OC+LBPHzzatNif\nMPujIRYFGAV0ML/+GzAX6N6Qz1rzwDyyyMsT8fAQyc9vvPXVGJSVpcqBA3fI1q2dJC1toZhMNlzZ\nWl4uMnOmMZr47DMdm2ggGRki06eL+PiIPPOM8b2IOXlShu7aJVdER0uCHc12KioqkldeeUUCAwNl\n4sSJEhMTc/q9nYWFcmNcnHTeulXeSk6WUy14lbUtwMIji/nAQKXUQOBx4GPgc+Byy5ouy/DVVzBu\nnDEpRtM4qqtLSEl5g9TUd+nS5T5GjDhMmzY2nJu+Zw/cdRd062aMLLp0sZ0sDkJuLrz+upEP7Y47\n4OBB8PCt5sXjx1mYkcGroaHc1amTXYwmCgoKmDdvHvPmzeOqq67i559/JiIiAoBthYXMTkpif0kJ\n/woOZnlERItM5OcoNHQJY5XZAk0A3hOR9wG7Xd3Sml1Q57vProiJEye+YOfOMIqLDzB06G5CQ1+2\nnaEoL4fnn4fx4+GJJ4wkgI0wFK1x3+WiImMKbFiYsQo7NhYmTIjksGsBg6KiOFRSQuywYdzdubPN\nDUVeXh4zZ86kV69eJCQksGXLFpYuXUrfvn2JzM9nbEwMUw4e5BZ/f46OHMlDQUHnbSha471wNqy1\nB/dJpdTTGC6oy5RSToBdTjtISIDDh43fGM25ETGRm/s9x4+/BEBExHI8PUfZVqioKGM0ERpq/OLp\nDUjqpaQE3nvPSIE1fjzs2AE9e8LJqire3pzCLldX5vXuzS3+/rYWlYKCAubOncv777/PLbfcws6d\nOwkNDUVE2JiXx+zjxzlRUcEz3brxt8BAnbPJnmiIrwroBDwGXGoudwPuaMhnrXkAMmuW4afV1E9V\nVbGkpn4gv//eW6Kihklm5goxmWw8v76sTOTpp43pOkuX6tjEOSgrE5k3T6RzZ5GJE0X219rAbk1O\njgRv2yZ3HzwoeeeYLWQNioqKZM6cOeLn5yd33XWXHDt2TESMmXZrcnJkZFSU9N2xQ744ccIu13m0\nZLBwzOJREflXLQOTrJRq5g2Sz48lS+DLL20thf1SUZFJWtp7pKcvwMPjIsLCPsbT81KbuyXYudMY\nTYSFGaOJTp1sK4+ds3KlsaFX//5GOqzBg43z2RUVPHL0KL8XFbE4PJyxdrBuYs2aNdx7772MGTOG\nrVu30qdPH0wifJOdzZzjx6kS4bnu3bnV3x9nW9+HjUBEbP+9sSINyjqrlNojIkPOOBcnIgOaTbLz\nQCkl4eHCgQOtNxt1ZGQko0eP/tP54uL9pKTMJSfnGwICbico6FHc3PpYX8DaFBXBzz8b8YgffjBS\nik+aZJF/Xl16cHROnIAHH4QDB+Cjj8C8Pg0RYVlWFo8fPcqUwEBmh4TQwdnZpno4efIkjz32GD/9\n9BOfffYZl112GSLC6txcnk9MxEUpnu/enRv9/HBqxi/s+erAJCYO5RxiX9Y+jhw6Qn50Pk4JTril\nuOGd7k3AgwHc/tjtlhe4mahLDxbJOquUuh94AAhVSsXVeqsjsK1xotbZx3jgbYxg+yci8tpZ6rwL\nXAMUA9NEJKau9qZObb2G4kxEhIKCn0lJeYuTJ/fQteuDjBhxBFdXP9sIZDIZo4YffzSOPXvgoosM\nR/urr0JAgG3kcgBEjFHzE08YSXWXLoWa1E3JZWXcf/gwKeXlfN+/P8PtYG/o3bt385e//IUrrriC\n2NhYOnbsyE95eTybmEipycSckBBu8PW1qyfzsqoyotKj2JK8hZjoGNgMI46MICy1DyMqhtKmaxHu\n7vl4qnS82uzA0/MmW4tsVeodWSilPAFv4BXgqVpvnRSRvCZ3bgTKDwNjgXRgF3C7iMTXqnMN8JCI\nXKeUGgm8IyIX1tGeJCUJ3bs3VTLHxmSqICtrBampczGZygkOfpyAgCk4O9sgMVx2NmzcaBiHDRvA\n09MwDuPHw+WXg5ub9WVyMFJS4L77jD3kFy+GIeYxvkmE+enpzEpKYkbXrjzZrZtd7NGwdu1apk2b\nxvz585k4cSL7Tp3ikaNHSS4vZ3aPHkwKCGjWkURDKa0s5dfjv7IpcRPbj2/n1O5TXJt8LcPjh+OZ\n7Y5vcCaBpzbhcWIT7YJdUWF9IDz89CED+qM8HX9+fkNHFg3e/Egp5QwEUms0IiLJ5y2h0eaFwAsi\nco25/JTR7B+jC6XUh8BmEVlhLh8ERotI5lnak4ZeT0uksrKAjIwFpKbOw80tjODgx/HxGY9hk61E\nVZUxHWf9esNAHDoEY8YYxmHcOAgJsZ4sDo7JBAsXwnPPwcMPw7/+BaVOVRwqKSG+pISFGRmYRPg4\nLIy+HTrYWlxEhAULFvDvf/+bb7/9lvChQ5mVlMSyrCxe6NGD+zp3tvlueymFKaw7uo7vD3/PjkM7\nuCnnJsYkjKFTVCfaezjh3+k4vmkr8aiKxWnSrcYuUMOGIa4unDoVQ2HhFgoLtlKUE0kP3yfo3P+f\nNr0eS2BRY6GUegiYBWQCJvNpaWrMQil1KzBORO41l/8GjBCRh2vV+R54RUS2mcs/AU+KyJ6ztCcv\n3jMVbP/QYhEKTnTGVN3QGcqCoprk3BSCfUMQse6XUiFgMqFEEKWMM05O4GSbf0ZKbhLBvj1s0vd5\nIcZ/EAFQiIBJnDA5KVRbMDkrqpUTohTOYsLJZMKlqpJ21ZX1Npuce5xuvs0/1DaZTFSWlVPtrCgN\nDqTA24eUwK50y0xleHwsHSrKadvGFWcnKy+qE9iXc5xgD29OlhdhqlAEVAXTOWsAHTODqfLLpq3H\nXnrnrKFjVQrxAy7g0MALSOsWBE5OKKdy/IO2UeqyBxdxoXemJ703Z+Gd24X2z7yPumKsda+nCTRr\nzKIWjwBhIpLbOPGsT+S+1QT6GdvidWjvRM/g9gwIM5664g4VAzhE2cmtmOgoZ3K/mUZ4W2OqS3yJ\nETYKdxtQZ9mpPJcLci9qcP2WWm5T0g7M4157kMdyZSHcbeDpcjXt6q1fXe5G75xRVpOvDTDyUG/a\nlZeRULQZ16oKBjv1BSDadASAwU69rVoOIoRBTr3N5Ur6unjg1PYbYty3UVZdTrt2HXjxMi+2VvuB\nysLZVAJJu6hOO4WYFNUB/nRqG4Bb9gmy3dIp7W+ih7diauoOLo50Pv0DXLPozV7LMTF/hHojIyNJ\nSkqiMTR0ZLEZuEpEqhrV+rnbvRCYJSLjzeWGuKHigcvrckNVV1eTmZlJSkoKycnJJCcnk5KSwqZN\nm7jpppuYPXu2JS+h2RCpJubXCRTvrsLtu5fo824Y7gPdbS2WRnNOyjPKyV2bS/r8dJSTIvTVUNzG\nePJZRjpzDm6hXckRelUksCfpR4I9g5nSfwq3XXAbnTva5+LLo0eNBY8rVsAtt8C0vxfjFXKMgA4B\nBLoH2lq8JmNpN9QnQBiwFiivOS8ic5sopDNwCCPAnQHsBP4qIgdr1bkWeNAc4L4QeLu+AHdd13Pi\nxAkGDx7MihUruOyyy5oittWori4lLnYcKimMU/ffScBfAgiZHYKLj10untdo/gcxCdlfZ5P4XCJt\nu7claHoQHuO8+G9hLq8kJ2MyVXG1UxLZqT+y5vBqhnUZxuR+k7ml7y14tvO0tfh/IjPTmGDw4YfG\nxL2334aLL7a1VE2nocaioY7tZGAj4IoxbbbmaBIiUg08BGwA9gPLReSgUuo+pdS95jo/AIlKqaPA\nAoypvI2mU6dOfPLJJ0ydOpX8/Pymim4VnJ3b06//d1QEbafrL1vABDsjdpK+MB2pPrtR1HlwDLQe\nDGypB+WkCLgtgOEHhhM4JZCUuSnsDPqdITML+KWkF2/37MOhthewIeBe/nHLNiYNuIvVh1fT7e1u\nTPxqIt8e/JayqrImy2EpHQQGwlNPGSmFZs4EX1+LNGs1mqqHBs+GAlBKuYlISZN6bEYaMhtqxowZ\npKen89VXX9nVHO/6KCtLJTp6FCEhc+iQfhNHph9BKoTe7/XGY+T/zqlvqYvRGovWg4G96aEsuYys\nL7PIXJpJZW4l/rf6U3p9R+Z3K2R5TjYTfH35P393DiWvZ9neZcSciOHm8JuZMmAKl3e//LwC5Pam\nA1vR1AB3Q91QFwGfAO4i0s2cqvw+ETmvp/zmoiHGoqysjBEjRvDII49w9913W0myplNcfJCYmDGE\nh3+Kj884Mr/I5Ni/juEz3ofQV0NxDXC1tYgaTaMoji8m++tssldmU5ldiftNPvx2ObzSJZc+HTvw\naFAQg1zK+Gr/CpbtW0bGyQxu73c7k/tPZmjnoQ7zsGfvWNpY7AAmAqtFZLD53D4R6ddkSS1IQ9dZ\n7N+/n9GjR7NlyxbCwsKsIJllKCzcxr59E+jffy0eHiOoKqoi6d9JZH6eSegboXSeZp8BQo3mXJQc\nKiH7v9lkf51NeUY5+eM78MXIMmIGwPQewUzr1ImU/CMs27uMZXuX4ezkzOR+k5ncfzK9fXvbWnyH\nxuLGQkRGKqWiaxmLWBEZaAFZLUZjFuXNnz+fjz/+mO3bt+Pq6jhP5Tk5qzl8+D4GDfrldG6nU3tP\ncWDSATwu9iBtUhpjxznO3O/mQrseDBxRDyVHS8heaRiOUyll7B3dhm8urmDkNV14qHsQXdu2ZWfa\nTpbtXcaK/Svo5tmNyf0n1zmjyhF10Bw01Q3V0AB3ilLqYkCUUi5KqX8CB8/1IXvmH//4B0FBQTz3\n3HO2FqVR+PndSI8eLxIXN57y8gwA3Pu7M2TXEEylJo48cISSI3YbVtJozolbLze6P9WdYbuHMfL3\noUwY1oWXlrRnzMXpfDBpB098Egvtw3nnmndIfSyVl654iZgTMUR8EMFVS65icfRiCssKbX0ZLY6G\njiz8gHeAKzHWR28AZtjbIr3GpvvIyclh0KBBfPrpp1x55ZXNKJnlSUqaQ3b2SgYP/oU2bYxphiJC\n+ofpJM1Mos+HffC/1fab3Wg0lqI0qZSUrzI58mUG1UnlHL7chd63d+aam7vh2rYNpZWlrD2ylqV7\nl/Jz4s9cGXolU/pP4dre19KujQ3yojkIFs8N5QicT26on376iWnTphETE4Ofn42ysZ4HIsKRIw9R\nUhLPgAE/4OTU9vR7RVFFHPjLAXwn+NLz9Z44udo+uZxGY0lOJZYQueQ4ef/NwSepmvJxHRk+NZig\ncX44uTqRX5rPfw/+l6V7lxJzIobrel/HxIiJjOs5jvYu7W0tvl1hEWOhlHpSRF5XSs3DnLWmNrVz\nONkD55tI8Mknn+TQoUOsWrXKoWZYiFSzf/9tKOVMRMSXKOV02i9ZmV9J/B3xVOZUEvFVBO2CW9eT\nlfZTG7QGPew4mMOmz5PwXnuKnskK7+t9CbmtEz5X++DU1on//vBfTvidYOXBlezJ2MPVPa9mYt+J\nXNfnOtxdW09WhOaOWdTEJaKA3Wc5WgRz5swhNTWVDz/80NaiNAqlnOnb9wsqKjI5cuQhahtKF28X\n+n3XD7+b/Ng9fDe5P9qVx1CjsRgj+/rxzCvDuGHnhWxd24l3/fJYPyueXwO3cmDqAZz3OPOPfv9g\n852bOTr9KON7jmdxzGK6vNWFCcsnsCh6EWlFaba+DLun1buhajh8+DCjRo0iMjKSCy6wyx1j66Sq\nqojY2Cvx8rqc0NDX/zQ6Kvi1gAN/PUDn/+tMjxd6oJwdZ/Sk0TSW4upqPjtxgsUxKVz8i4lrtrfB\nLbYc79Fe+F7vi8+1PrQLakd+aT5rj6zl+8Pf89Oxn+jasSvje41nXM9xXNLtEtq2aXvuzloAlp46\nuxH4i4gUmMveGKk5xjVZUgvS1P0sFi1axNtvv83OnTtp186x3DaVlXnExIzBz+9mQkJm/en98hPl\nHJx8EJwgYlmEXsSnafGYRPghN5f30tI4lH6SBw525OLfFbKpCBc/F7xGe+E1xguvy71oE9iGXem7\n+PHoj6w7uo74nHgu634Z43uO57LulxHhH2H99OpWwtLGIkZEBp1x7vSaC3uhqcZCRLjtttvo3Lkz\n77zzjgUlsw4VFVl8/PEwrr9+Ot26PfGn901VJpJeSOLEZyeI+DICr0sdf5evumgNvvqGoPVg6KD7\nyJEsPnGCz06cwMfJmbvzvLlsrzNO24op/LWQdj3a4TPOB++rvPEc5Ul+dT4bEjaw8dhGfkv+jazi\nLIZ3Gc5FQRdxSbdLuDDoQrtMdlgf1lpnUa2U6lar8e6cJeDt6CilWLBgAatWrWLt2rW2FqfRuLoG\n0KvXW6Snf0ha2vt/et+pjROhL4US9lEY+yfuJ/mNZFqSG1KjqYuQ9u2ZHRJC4oUXMrdPb/aFmrhy\nVAb3PF/Bjt3d8HmjO6qN4tjTx9jqv5W0v6Rx6c+XMq/PPA4/dJij04/y2EWPYRITL295maD/BPHl\n3i9tfVlWpaEji/HAR8AvGOssLgXuFZH1zSte47DUtqq//vort912G9HR0XTq1MkCklmX0tJEYmIu\np0ePf9O5811nrVN2vIz9k/bj2smV8E/DcfHWac81rYtKk4nNBQV8mZXFt9nZXOzpyd8CA7nOyZPy\nyCLyN+STtyEPp7ZO+E/yx3+iP+4D3VFKUVldSZWpqkVMw22OPbj9gJp9JH4XkZwmyNcsWHIP7uef\nf56oqCjWrl2Lk433DT4fSkoOERMzhl69/kNAwG1nrWOqMJHwzwRy1+RywdcX0HFok7POazQOSXF1\nNatycvgiM5PthYXc6OfH3wIDGePpSWl0Mdkrssn+bzZSJfhc44Pvtb54jfWijXtDNxu1XyzihlJK\nhZv/DgG6Aenmo5v5XItl5syZFBQU8O6779palEZRk7PezS2MAQPWc+TIDHJyVp+1rpOrE73f7U3o\nq6HEjY8j5e2UFuOW0vtZGGg9NEwHHZydmRIYyLoBAzg0ciRDO3bk2cREgn//nXvbp7D6YVeq9oTT\n4/sI3MLcSJ2XSt4Pec0vvAVp6r1wLrP4GHAv8NZZ3hPgiib1bse4uLiwdOlSRo4cyejRoxk0aNC5\nP2RnuLv3p3//Nezdey1OTl/g43P1WesFTAqg49COHJh8gPyN+YQvDtezpTStlkBXV2YEBTEjKIhj\npaX8VljIjqIivszKIr6khHYjnQgf48bM7m1wrCRBTeNcK7j/IiJfK6VCReSYFeU6Lyzphqph6dKl\nzJkzh927d+Pm5mbRtq1FYeFW9u27mQsuWImXV91bypoqTSTNTOLE5ycI/zQcn6t8rCilRmP/iAjp\nFRUcKimhZ/v2dHewKfZnw1LpPvaIyJCavxaVsBloDmMBMHXqVDp06OBwK7xrk5+/iQMH/kr//mvw\n8BhRf91N+Ry88yCBkwMJmROic0tpNC0YS02dzVNKbQBClVKrzzwsI6r98/7777Nx40a+/fZbW4ty\nTuryS3p7jyU8fDF7997AqVOx9bbhPdabYTHDKIkvIXpUtEOmPNe+egOtB62DGpo7ZnEtMARYwtnj\nFq0CDw8Pli5dyoQJExg+fDhBQUG2Fum88PW9jt693yMubjwDB/5Mhw5966zr6udKv+/6kfZ+GtEX\nR9PzzZ4E3hHoUIkWNRqN5TiXG2qJiEytyT5rRbnOi+ZyQ9Xw0ksvsWnTJjZu3Iizs+Mu/T9x4nMS\nE59l0KBI2rfvec76p+JOceCvB3Af6E6f+X1o4+n40wU1Go2BpdxQQ5VSXYApSilvpZRP7cMyojoO\nTz31FNXV1bzxxhu2FqVJdOp0B926PUts7JWUlaWcs777AHeG7hpKG882RA2OovB3vQuZRtPaOJex\n+BDYBITz5/TkUc0rmv3h7OzMF198wdy5c9m5c6etxTkrDfVLdu36D7p2nU5s7FjKy0+cs76zmzN9\n5veh51s92TdhH8dfPo5U2++aDO2nNtB60Dqooal6qNdYiMi7ItIXWCQioSISUusIbUrH5pHKBqXU\nIaXUeqXUn7JyKaWClFI/K6X2K6X2KqVsvtlScHAwH3zwAZMnT+bkyZO2FqdJBAc/RmDgVOLirqKy\nsmH7Xfjf7M/QqKHkrc8j9qpYytPKm1lKjUZjDzQm3cclQG8RWWxO/dFRRBLPu2OlXgNyzTvx/Qvw\nFpGnzqjTCegkIjFKKXeMEc0EEYmvo81mjVnU5p577qGqqopPP/3UKv01FyLCsWNPk5+/kYEDf8LF\nxbthn6sWjr9ynLT30ghbEIbfBMfZklaj0fyBpVOUvwAMA8JEpI85jvG1iIxqgoDxwOUikmk2CpEi\nEn6Oz6wC5onIpjret5qxOHXqFEOHDmXWrFn89a9/tUqfzYWIkJDwGIWFWxgwYEODDQZA4bZCDk45\niPc4b3q+0ZM2HXXwW6NxJCydovxm4EagGEBE0oGmZp0LEJFMc3sngID6KiulegCDgB1N7NciuLu7\ns2zZMh5++GGSkpJsLc5pzscvqZSiZ8+5eHpeQlzc1VRW5jf4s54XezIsZhhSKUQNiCL/54Z/tjnR\nfmoDrQetgxqae51FDRUiIkopAVBKdWjIh8w77AXWPoWRU+q5s1Svc0hgdkGtBGaIyKn6+pw2bRo9\nevQAwMvLi0GDBp3e8KNGWZYqnzx5kokTJzJlyhR++eUXtmzZYtH2z6ccExNzXp9XSpGSciPp6SnA\n1QwYsIGtW2Mb/PnwT8L57rXv2H3bbq6aeBWhr4eyZbft9dHay+d7P7Skcg32Io+tyjExMdQQGRnZ\n6Ifchrqh/gn0Bq4CXgHuBpaJyLxG9fa/bR4ERtdyQ202B9PPrNcGWAOsE5F6t6+zphuqBpPJxLhx\n4xg1ahSzZs2yat/Nwf+6pDbi4tK43fQqCypJeDyBgp8LCPskDO8rGu7S0mg01qc59rO4CrgaY3Sw\nXkQ2NlHA14A8EXmtrgC3ud7nQI6IPNaANq1uLAAyMjIYPHgwK1eu5JJLLrF6/5ZGRDh69FGKirae\nl8EAyF2Xy+F7D+N7vS+hr4fqWIZGY6dYOmYBEIexU14kUH9yoYbxGnCVUuoQMBZ4FUAp1Vkptcb8\nehQwBbhCKRWtlNpj3rXPrujcuTMLFy7kb3/7GwUFBTaV5cyh9/mglKJXr//g4THKPK228dfke40v\nw/YOw1RuskkswxJ6aAloPWgd1NBUPTTIWCilJgE7gb8Ak4AdSqmJTelYRPJE5EoRCRORq0WkwHw+\nQ0SuN7/eKiLOIjJIRAaLyBAR+bEp/TYXN9xwA9dffz3/+Mc/WsQGQpYwGC5eLoQvCqf3+72JvzOe\nww8cpupUVTNIq9FompuGxixigatEJMtc9gd+EpGBzSxfo7CVG6qG0tJShg8fzhNPPMGdd95pMzks\niSVcUmCOZTyWQMFmHcvQaOwJS6+z2Csi/WuVnYDY2ufsAVsbC4C9e/dyxRVXsG3bNnr37m1TWSyF\nYTAeoahou3kdxvkZDIDcH3I5fN9hfG8wxzJawB7GGo0jY+mYxY/mlBzTlFLTgLXAD00RsKXSv39/\nZs6cyeTJk6moqLB6/83hnzVcUm/j4XGReR3G+cdlfK81xzLKTET1b75YhvZTG2g9aB3U0KwxC6VU\nL6XUKBF5AlgADDAf24GPmtRzC+ahhx4iICCAF154wdaiWAxLGgwdy9BoHI9z7WexBnhaRPaecb4/\n8LKI3NDM8jUKe3BD1ZCVlcWgQYP44osvuOKKK2wtjsWwpEsKzLGMRxMo+KWAsEVheI/WsQyNxppY\nag/uXSIyvI739uqYRf2sX7+ee+65h5iYGHx9fW0tjsUwDMYMiop2MGDA+iYbDICcNTkcvu8w/hP9\nCX0lFGc3x91cSqNxJCwVs6jvV6B940RqfYwbN45JkyZxzz33WG06rTX8s4ZL6h08PEYSFzeuSS6p\nGvyu92N43HAqcyqJGhRF4bambbCk/dQGWg9aBzU09zqLKKXU3888qZS6ByNduOYcvPzyyyQlJfHR\nRy0rxNMcBsPF14WIpRGEvhLK/lv3k/BkAtVl1RaQVqPRNJVzuaECgW+BCv4wDsMAV+Bmc7ZYu8He\n3FA1xMfHc8kll/Dbb7/Rt++f0l85NDUxDCOX1DpcXetNHtxgKrIqOPzAYUoOlhD+WTgewzws0q5G\no/lfLL3OYgzQz1zcLyI/N1G+ZsFejQXAwoULef/999mxYwdt27a1tTgWRURISnqBrKwVDBy4gXbt\nupFlxgUAABupSURBVFus3azlWRx95Chd7u1C9+e74+TamAw1Go3mXFh0nYWIbBaReebDLg2FvXPP\nPffQs2dPnn766Wbtxxb+WaUUISGz6dr1AaKjL6W4+IDF2g38ayDDYoZxKuYUu0fs5lRsvRnqT6P9\n1AZaD1oHNVglN5Sm6SilWLhwIStXruTHH+0yvVWTCQqaQUjIy8TEXEFRkeX2qGrbuS39Vvcj6JEg\nYq+MJWlOEqYqk8Xa12g056bBKcodAXt2Q9UQGRnJ5MmTiY6OJjAw8NwfcEByc9cSHz+Nvn2X4uNz\ntUXbLksp49A9h6jKqyL8s3A6RDRoHy6NRlMHFt/PwhFwBGMB8OyzzxIdHc3atWtR6pz/I4ekoGAL\n+/ffSu/e8wgImGTRtkWEjI8ySHwukeAngwl+LBjl3DL1qNE0N82xn4XGQsyaNYvc3FzmzTvvjQbr\nxF78s15elzBw4EaOHn2UtLT5Fm1bKUWX+7owZOcQ8n7II/rSaEoTSv+njr3owdZoPWgd1KBjFg6I\ni4sLy5Yt48UXXyQuLs7W4jQb7u4DGDz4V1JS3iQpaY7FFya2D2nPwE0D8Z/kz56L9pD1dZZF29do\nNH+g3VA25PPPP+e1115j165duLm52VqcZqO8PIO4uHF4eV1Br15zMTLcW5aiqCIO3HYAn/E+9Hyr\nJ87tdLoQjaYh6JiFAyAiTJkyBS8vLz744ANbi9OsVFbms3fvDbRvH0JY2CKcnFws3kdVYRWH7jlE\naUIpESsicOvdcg2wRmMpdMzCAVBKMX/+fL777jtiYy2xrbn9+mddXLwZOHADlZV57Nt3M9XVJRbv\no41nGyK+iqDz/3Xmk2GfkLVCu6Xs9X6wJloHBjpm4eB4enoybdo0lixZYmtRmh1nZzf69VuFi4u3\nxfJJnYlSiq4PdqXnGz059uwxDt9/WOeX0mgsgHZD2QEHDx5k7NixpKSk4Ozc8n3tIiaOHv3/9u49\nPqryXPT470nCLQnhTkJQiBFyI5AEKKBUxIIoUooCVbygeEo9PUqlIG5QVPZupQU9VnrabmvRAipb\ntFWUzT7dggpCxYNcciM3EAyXhORwhxAuuTz7j5lsMSaEy8ysmcnz/Xz4ZNbkzXqfeTPMk/W8a71r\nBsePr6dfv49o1SrGK/1Un6ym6KdFVBZV0ufdPoQnWFnKmPqsDBVAkpOTiY2N5dNPm8dKKiIh9Oq1\niC5dfkxm5lDOnNnjlX7CosJIWZFC7M9iyRyaSfnb5V7px5jmwJKFn3jggQd46623rno/gVKfFRHi\n4p7h2mtnkZl5E6dOZXl0/3XjICJ0/1l3+q3tR/G8YooeKaLmTPMpSwXK+8GbbAxcAnbOQkQ6iMga\nESkSkY9EpN1F2oaIyHYRWeXLGH3p3nvvZdWqVZw+fdrpUHyqe/f/Ra9ei8jJuZXDh1d7rZ+26W0Z\nsG0ANRU1bB+8ndOFzWucjblajs1ZiMhC4IiqviAis4EOqjqnkbYzgAFAlKr+6CL7DMg5izqjR49m\n8uTJ3HfffU6H4nMnTvw/8vLG06PHbLp3f9xry6CoKgdfO8jXT39Nr0W9iL4/ONfnMuZSBcKcxThg\nmfvxMuDOhhqJyDXAHcBrPorLMZ4qRQWidu2GkJGxidLSxeza9Ri1tdVe6UdEiP1pLGmfpFH8q2IK\npxZSU9l8ylLGXCknk0VXVS0HcN9xr7FbrL0MPAkE7iHDJbrzzjvZtGkT5eVXPhEbyPXZNm3i6N//\nc86c2UNu7hiqq6/8PtxNjUNkv0gGbBlA7dlaV1mqIDjLUoH8fvAUGwOXqx2HMM+E0TARWQtceJwv\nuD70n2mg+XeSgYiMAcpVNUtEhrt//qKmTJlCXFwcAO3btyc9PZ3hw4cD3wyWv25v2bKFQYMGsWLF\nCqZPn35F+8vKyvKb13Ol28OGrearr6azeHEa8fELuO22SV7p7x/b/oH+REnak0TWsCzKp5bT8baO\njr9+T24Hw/vharfr+Es8Tm1nZX1zEsn69espLi7mcjg5Z1EADFfVchGJAdapanK9Nr8GHgCqgTZA\nW+B9VX2wkX0G9JwFwJo1a5g7dy5btmxxOhRHqSolJb9n374F9OnzPu3aDfFqfxW5FeTfnU/UDVH0\n/n1vQiOC/3oXYyAw5ixWAVPcjx8CPqzfQFWfVtUeqhoPTAI+bSxRBIsRI0ZQUlJCYWGh06E4SkS4\n5prHSUxczI4dYykvX+HV/iL7RtJ/S3+0Wtk2aBun84OzLGXMlXIyWSwEbhWRImAEsABARLqJiPfO\nofRzoaGh3HvvvVc80V3/0DvQdeo0hrS0j9mz558oLv7VJS9zfiXjEBYZRtKyJK6ddS1ZN2dxcOnB\ny96Hvwm298OVsDFwudpxcCxZqOpRVR2pqomqOkpVj7ufP6iqP2yg/WcXO202mDzwwAMsX76c2lq7\nzzRAZGQa/ftv5siRVRQWPkht7Tmv9SUidHu4G+nr09n/wn4KHiqg5rSdLWWMrQ3lh1SVvn37smjR\nIkaOHOl0OH6jpqaSgoIHqaoqp0+flbRs2dm7/Z2uYde0XZzcfJKUd1OITI30an/GOCEQ5ixMI0SE\n+fPnM2XKFPbt2+d0OH4jNDScPn3epV2777N9+xBOn/buvE5oRChJS5LoMbsH2bdkc/AvBz1+tz9j\nAoUlCz81btw4Zs6cyZgxYzhx4tKvNwj2+qxICPHxv6Fnz7lkZd3MsWOfNNjOk+MQ81AM6Z+ls/+3\n+yl8sJDqCu9cMOgNwf5+uBQ2Bi4BO2dhmjZjxgxuvvlmJk6cSFVVldPh+JVu3R4mJeUd8vPvo7TU\n+xf3R6REMODLAUhLYdvAbVTkVni9T2P8ic1Z+Lnq6mruvPNOYmJiWLx4sdfWTApUlZU7yc0dQ+fO\nE4iP/7VX7u9dX9mbZeyeuZvrfnMd3X7SzX4nJqDZPbiDSEVFBcOGDWPixIk8/fTTTofjd86fP8yO\nHeNo3boHiYlLCA1t7fU+TxeeJv/H+UT0iyDhTwmEtfXqYgjGeI1NcAeRyMhIVq9ezauvvsrbb799\n0bbNsT7bsmVn0tI+RrWGnJxbqao66vVxiEiKoP/m/oSGh7rKUtn+WZZqju+H+mwMXGzOopmIjY1l\n9erVTJ8+nY0bNzodjt8JDW1DSsoKoqJuYPv2Gzl3rtT7fYaHkrg4kbjn4sgemU3pn0vtbCkTtKwM\nFWDWrFnD5MmT2bhxIwkJCU6H45dKSv6VvXufJzX1A6KiBvmkz8qiSvJ+nEdEnwgSXk0gLMrKUiYw\nWBkqSI0aNYr58+dzxx13cOjQIafD8Uvduz9KQsKr5OaO4fDh7yw55hXhieGuslRUKNsGbONU5imf\n9GuMr1iyCEBTp07l7rvvZty4cZw5c+Zb37P6rMuOHW3p2/fv7Nz5KAcO/B+f9BnaJpTEVxOJ+2Uc\nOaNyKHmlxPGylL0fbAzq2JxFM/X888/Ts2dPHnroIVtDqhFRUQPJyPic0tI/8dVXM1D1zRpP0fdG\nk/F5BqWvllJwfwE1Z2xtKRP4bM4igJ09e5aRI0cydOhQFi5c6HQ4fquq6hg7dtxFixadSE5+i9DQ\nNj7pt+ZMDUU/KeLMV2dI/TCVVt1a+aRfYy6HzVk0A61bt+aDDz5g5cqVzJw5k1OnrE7ekBYtOpCW\n9hEhIW3Izv4B58/7Zq4ntE0oycuT6fSjTmwfvN3mMUxAs2QR4Dp37szGjRs5cuQIycnJPPfcc47X\nyf1B/fpsSEgrkpPfpH37EWzffgOVlTt9EoeIEPdMHNf/9npyRuVwaKVvT0qwer2NQR2bszBER0ez\nbNkyVqxYwfLlyxkxYgT5+flOh+V3RIT4+Ofp0WMOmZnDOHHic5/13XViV/r+vS9fPf4Ve3+z1xK6\nCTg2ZxFkqqureeWVV/jlL3/JlClTeO6552jbtq3TYfmdo0c/oqBgMr17/5GuXX/ss37PlZwjd1wu\nEckRJCxOILS13evbOMvmLJqpsLAwfv7zn7Njxw4OHTpEcnIyK1assL9k6+nY8TbS0taye/dM9u17\n0Wfj06p7KzI2ZFB7tpbsH2Rzvvy8T/o15mpZsggydXXJ6Oholi5dyjvvvMOCBQsYMWIEeXl5zgbn\nQ5dSn42MTCMjYxPl5W+ya9c0amt9c5+K0PBQUt5JocOtHdg2eBsVO7y3rpTV620M6tichbmooUOH\nsnXrVsaPH8/w4cOZNWuWnTV1gdatryUjYyNnzuwkL+8uampO+6RfCRGu+5friJ8fT/bIbE5tt9+J\n8W82Z9GMlJeXM2fOHNauXcuLL77IpEmT7F4MbrW1Vezc+QgVFbn07buaVq1ifNb3oZWH2PmznfT9\n975EDYryWb/GgN3PwlzEpk2beOyxx2jfvj1/+MMf6NOnj9Mh+QVVZe/e5ykr+wt9+/4HEREpPuv7\n8OrDFP2PIlI/SKXdje181q8xNsHdTF1KXfLGG29ky5YtTJgwIWhLU1dSnxUR4uKeJS7uX8jKGs6x\nY5e/jyvV+YedSX4zmR3jdnB8w3GP7dfq9TYGdQJ2zkJEOojIGhEpEpGPRKTBP6dEpJ2I/FVECkQk\nT0QG+zrWYBQWFsa0adPIy8vjyJEjJCUlsWTJEltnCoiJeZCUlBXk599NWdlbPuu3420dSVmRQt6E\nPI59csxn/RpzKRwrQ4nIQuCIqr4gIrOBDqo6p4F2S4HPVHWJiIQB4ap6spF9WhnqCm3evJkZM2Zw\n9uxZXn75ZW6++WanQ3Lc6dN55OSMoVu3qfTsOddn8zvHNxwnb2IeSW8k0en2Tj7p0zRffj9nISKF\nwM2qWi4iMcB6VU2q1yYKyFTV6y9xn5YsroKq8u677zJ79mz69+/PCy+8QK9evZwOy1Hnzh0kN3cM\nkZH9SUh4hZCQFj7p98SmE+y4awex/zOWns/2JKSFVYyNdwTCnEVXVS0HUNUyoGsDba4DDovIEhHZ\nLiJ/FhHfLBkaoK6mLiki3HPPPRQUFPC9732PIUOGMGvWLI4f91wN3Vc8Vadu1aob6ekbOH/+ILm5\nY6mubvCg1uPa3diOgVkDObX1FNuHbOd0/pWd0mv1ehuDOlc7Dl6996OIrAWiL3wKUOCZBpo3dEgQ\nBvQHHlPVrSKyCJgDzGuszylTphAXFwdA+/btSU9PZ/jw4cA3gxXM21lZWR7Z31NPPUViYiKvv/46\niYmJzJs3j8TEREJDQ/3q9fpqOzX1Q956azxffJHBlCmf0br1NV7v/4uiL9AnlcSdiWQOy6R0Uild\nxnfhlh/ccsn789T7IZC36/hLPE5tZ2VlUWf9+vUUFxdzOZwsQxUAwy8oQ61T1eR6baKBL1Q13r39\nfWC2qo5tZJ9WhvKC7OxsnnjiCUpLS3nppZcYPXq00yE5QlXZv/8FSkr+SN++q4mM7Oezviu/qqTw\nwUJCWoeQtDSJ1j1a+6xvE9wCoQy1CpjifvwQ8J2bJbvLVPtFJMH91AjAllP1sbS0NNauXcvChQv5\nxS9+we23396slg6pIyL06DGb+PgXyM4eydGja3zWd3ivcNI3pLuWCBmwjbI3ymy9L+NTTiaLhcCt\nIlKEKwksABCRbiKy+oJ2jwPLRSQLSAN+7fNIA0j9Q29PERHGjh1Lbm4uo0eP5pZbbuHRRx/l0CHf\n3p/hUnlrHACioyfRp897FBRM5uDBv3itn/pCwkLo+VRP+q3tx74X9pE7NpeK3IuvK+XNcQgUNgYu\nVzsOjiULVT2qqiNVNVFVR6nqcffzB1X1hxe0y1bV76lquqqOV9UTTsVsoGXLlkyfPp3CwkJatGhB\ncnIyL774IufOnXM6NJ9q3/4mMjI2sHfvfL7++lmf/pXfNr0tA7YOoMMPOpA9Mpv8+/Kp3FXps/5N\n82TLfZirUlRUxJNPPklWVhZPPPEEU6dOJSIiwumwfOb8+f9Pbu5YwsMTSUx8jZCQlj7tv/pUNQd+\nd4ADiw7Q5a4u9Hy2p81nmMsSCHMWJggkJiayatUq3n//fTZs2EB8fDzz588PyNNtr0TLll1JT19H\ndfVJcnJup6rKt687rG0Ycc/EMXjnYFp0acHWjK3smr7L7pNhPM6SRZBxqj47cOBA3nvvPdavX8+u\nXbu4/vrrmTNnDuXl5Y7E48txCA0NJzX1PSIi+pKZOZSzZ/f6rO86LTq2IP7X8QzKHwQCX6Z8yZ6n\n9vDxqo99Hou/sTkLl4CdszDBKTk5maVLl7Jt2zYqKipITk5m2rRp7N3r+w9QXxIJpXfv3xEb+wjb\nt9/IqVPbHImjZXRLei/qzcCsgVQdqaJgcgHFvyqm+pRvbuxkgpfNWRivKisrY9GiRSxevJixY8cy\ne/ZskpOTm/7BAHbo0Ep27nyEpKSldOo0xtFYKndVUvzPxRz7+Bg9/qkHsY/GEtrG7vttvmFzFsYv\nxMTEsGDBAnbv3k3v3r0ZPnw4EyZMYOvWrU6H5jVdutxFauq/U1Q0lX37XqS21rn5g/De4aQsTyHt\nkzROfH6Czb03U/JKCbXnbXVhc3ksWQQZf63Ptm/fnrlz57Jnzx6GDRvGXXfdxahRo1i/fr1XTjt1\nehzatRtCRsbnHD/+KVu2pHLo0EpHLqKrG4fI1EhS308l9YNUDn94mC+TvqRsWRlaE/xH4k6/F/yF\nzVmYgBIREcH06dPZvXs399xzD4888ghDhw5l0aJFrFu3jiNHjjgdose0aRNPv35/p3fvP1Bc/M9k\nZt7EyZObHY0pamAUaf+ZRtLSJA6+dpAtaVs4uvaoozGZwGBzFsZRNTU1rFy5knXr1pGTk0NOTg5t\n27alX79+pKWl/ffXhIQEwsK8uu6lV6nWUFb2Bl9//Szt2n2f+Pjf0KbNdQ7HpBz+8DC7Z+0mPCmc\n6//39UQkNZ9rZIyL39/PwhssWQQ+132w95KdnU1OTg7Z2dlkZ2dTUlJCUlISaWlp30oinToF1s2B\nampOs3//bzlwYBExMQ/Ts+dcWrTo4GhMtedqOfD7A+xbsI/o+6OJmxdHi46+uW+HcZ4li2Zq/fr1\n/70kcTCpqKggLy/vW0kkJyeHyMhI+vXrR8eOHb/Vvry8nOjo6Eb25rw2bc4wYEAucXH7KSmJ8Vo/\nRUVnSUy8tCu6tQaqj1dRc7qWkDbBU6HeueccCfGtPL7fuC6PccfUpz2+X29p7LPhUpNF4B7Xm2Yl\nMjKSwYMHM3jwN7dgrzsKycnJ4dSpU99qn5+fT0pKiq/DvEwTOHmyhI4di73WQ9u2JXTs2P3Sf6AL\nVB+r5vyh4LkCPELKiBLPJ+TY69I8vk9/ZkcWxhjTjNl1FsYYYzzGkkWQsXPKXWwcXGwcbAzq2HUW\nxhhjvM7mLIwxphmzOQtjjDEeY8kiyFh91sXGwcXGwcagjs1ZGGOM8TqbszDGmGbM5iyMMcZ4jGPJ\nQkQ6iMgaESkSkY9EpF0j7WaIyA4RyRGR5SLS0texBhKrz7rYOLjYONgY1AnkOYs5wMeqmgh8CjxV\nv4GIxAI/B/qraj9ca1lN8mmUfiLY3/D2+gKbvb7g52SyGAcscz9eBtzZSLtQIEJEwoBwoNQHsfmd\nS32zBuqKs57+z+hv4+DUh42vxsGfP0w9MQb+/Pou1dWOg5PJoquqlgOoahnQtX4DVS0FXgL2ASXA\ncVX92KdRGmOM8W6yEJG17rmGun+57q8/aqD5d05jEpH2uI5AegKxQKSI3OfNmANdMPwF5Ak2Di42\nDjYGda52HBw7dVZECoDhqlouIjHAOlVNrtdmInCbqv7UvT0ZGKyq0xrZp503a4wxl8nfb360CpgC\nLAQeAj5soM0+YIiItAbOASOALY3t8FJesDHGmMvn5JFFR+Bd4FpgL3C3qh4XkW7AYlX9obvdPFxn\nQFUBmcBUVa1yJGhjjGmmguoKbmOMMd4RFFdwi8jtIlIoIjtFZLbT8XiSiLwuIuUikuN0LN4gIteI\nyKcikuc+AeJxp2PyJBFpJSKbRSTT/frmOR2Tp4lIiIhsF5FVTsfiaSJSLCLZ7t/fl07H42ki0k5E\n/ioiBe7/g4MbbRvoRxYiEgLsxDWfUYprTmOSqhY6GpiHiMj3gQrgDfeFiUHFfXJDjKpmiUgksA0Y\nFyy/PwARCVfVShEJBT4HHlfVoPngEZEZwAAgSlUbOtMxYInIHmCAqh5zOhZvEJGlwGequqTuWjZV\nPdlQ22A4shgE7FLVve65jBW4TrcNCqr6DyAo36jgusZGVbPcjyuAAqC7s1F5lqpWuh+2wnVSSWD/\nhXYBEbkGuAN4zelYvEQIjs/J7xCRKOAmVV0CoKrVjSUKCI5B6A7sv2D7AEH2YdNciEgckA5sdjYS\nz3KXaTKBMmCtqjZ6Rl8Aehl4kiBKgPUosFZEtojIT50OxsOuAw6LyBJ3GfHPItKmscbBkCxMEHCX\noP4GTHcfYQQNVa1V1QzgGmCwiKQ4HZMniMgYoNx9ZCjuf8FmqKr2x3X09Ji7LBwswoD+wB/dr7ES\n15p9DQqGZFEC9Lhg+xr3cyZAuGulfwPeVNWGrrcJCu5D/HXA7U7H4iFDgR+56/pvA7eIyBsOx+RR\nqnrQ/fUQsBJX2TtYHAD2q+pW9/bfcCWPBgVDstgC9BKRnu7lyyfhuuAvmATrX211/gLkq+rvnA7E\n00Skc93y++5D/FuBoJi8V9WnVbWHqsbj+n/3qao+6HRcniIi4e4jXkQkAhgF7HA2Ks9xr823X0QS\n3E+NAPIba+/kFdweoao1IjINWIMr+b2uqgUOh+UxIvJvwHCgk4jsA+bVTUgFAxEZCtwP5Lrr+go8\nrar/6WxkHtMNWOY+ay8EeEdV/6/DMZlLEw2sdC8jFAYsV9U1DsfkaY8Dy0WkBbAHeLixhgF/6qwx\nxhjvC4YylDHGGC+zZGGMMaZJliyMMcY0yZKFMcaYJlmyMMYY0yRLFsYYY5pkycKYBojIqQaeu0lE\ntolIlYiMv8jP1orIixdsPyEiz3krVmN8wZKFMQ1r6AKkvbhuAby8iZ89B4x33w3ysrmXMjfGrwT8\nFdzG+Iqq7gNwX9F7MdXAn4GZwDMXfkNEeuJa3qQTcAh4WFUPiMgS4CyuVXc/dx/ZXAfE47r18Exg\nCDAa15o+Y1W1xkMvzZgm2ZGFMZ6nwB+B+0Wkbb3v/R5YoqrpwL+5t+t0V9UbVHWWezse11Iv44C3\ngE/cN8A6C4zxYvzGfIclC2O8wL3M+jJger1v3YBrhVaAN3Gt3Frnr/Xa/l1Va4FcIOSCdYlygTiP\nBmxMEyxZGOM9vwN+AkRc8NzFSlin622fA1DXAm5VFzxfi5WQjY9ZsjCmYU0tCX+x7wuA+77N7+JK\nGHU2Afe6Hz8AbPRQPMZ4lSULYxrWRkT2ich+99dfiMhAEdkPTAT+JCK5jfzshUcPL+GazK577nHg\nYRHJwrU0+/QGfqapfRrjc7ZEuTHGmCbZkYUxxpgmWbIwxhjTJEsWxhhjmmTJwhhjTJMsWRhjjGmS\nJQtjjDFNsmRhjDGmSZYsjDHGNOm/AAumGaOKWa6EAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "cl = np.array([[-0.7], [0.5]], dtype = np.float64)\n", + "tfit=glmnet(x = x.copy(),y= y.copy(), cl = cl)\n", + "glmnetPlot(tfit);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "These are rather arbitrary limits; often we want the coefficients to be positive, so we can set only `lower.limit` to be 0.\n", + "(Note, the lower limit must be no bigger than zero, and the upper limit no smaller than zero.)\n", + "These bounds can be a vector, with different values for each coefficient. If given as a scalar, the same number gets recycled for all." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Penalty factors\n", + "\n", + "This argument allows users to apply separate penalty factors to each coefficient. Its default is 1 for each parameter, but other values can be specified. In particular, any variable with `penalty.factor` equal to zero is not penalized at all! Let $v_j$ denote the penalty factor for $j$ th variable. The penalty term becomes\n", + "\n", + "$$\n", + "\\lambda \\sum_{j=1}^p \\boldsymbol{v_j} P_\\alpha(\\beta_j) = \\lambda \\sum_{j=1}^p \\boldsymbol{v_j} \\left[ (1-\\alpha)\\frac{1}{2} \\beta_j^2 + \\alpha |\\beta_j| \\right].\n", + "$$\n", + "\n", + "Note the penalty factors are internally rescaled to sum to nvars.\n", + "\n", + "This is very useful when people have prior knowledge or preference over the variables. In many cases, some variables may be so important that one wants to keep them all the time, which can be achieved by setting corresponding penalty factors to 0:" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAElCAYAAAAV9s4VAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXl4lNXZ/z9nsm9kIyRhJywBFFkEN1BBQMGdgopLEbRo\ntb4ufW3R1rcuVavWt5VqtbX6w+XVqnUpoqIgEESoLELY95CFhCSQfZnMen5/nJnMJCRkm8lMZs7n\nus71LPMs585Mnu9z7vuc+wgpJRqNRqPRnAmDryug0Wg0Gv9Hi4VGo9Fo2kSLhUaj0WjaRIuFRqPR\naNpEi4VGo9Fo2kSLhUaj0WjaRIuFxq8RQtiEENuFEHuEEDuEEL8UQghf16szCCHuF0LsE0K822z/\npUKISoedO4QQq7x0/2VCiJ9449qawCfU1xXQaNqgTko5AUAI0Rv4J9ALeKKrFxZCGKSU9q5epwPc\nA0yXUha18Nl3UsprWztRCBEipbR5r2oazZnRLQtNj0FKeQq4C7gP1MNeCPGCEGKzECJbCLHYsV8I\nIV51vMV/I4T40vlGLYQ4JoR4TgixDZgnhMgQQqwUQmwVQqwXQoxwHNdbCPGx49qbhRAXOvZf6nj7\n3y6E+FEIEdO8no7Wz24hxC4hxP2Ofa8BGcBKIcQDLZh3WmvJ0RJ4TQjxA/C8ECJaCPGmEOIHx72v\nPdPfwfHZK0KI/Y7WSh+3/dMdNuwUQrwhhAhz+/s867BxixBivBDiayHEYSHE3R3/1jQBg5RSF138\ntgDVLewrB1KAxcBvHPvCga3AIGAu8IVjf6rj+J84to8BD7td61tgqGP9PGCNY/094CLH+gBgn2P9\nc+BCx3o0YGhWtwnATiASiAH2AGMdn+UAiS3YcylQCWx3lEcd+5cBn7sd9wxwi2M9HjgIRJ3h7zAH\n+MaxPx2oAH4CRAD5bna/Ddzv9ve5y7H+JyDbYWdvoNjXvwddfFe0G0rTk7kcGCOEuMGx3QsYDkwB\n/gUgpSwRQqxrdt6HAI5WwUXAv9ziIGGO5QxglNv+WCFENLAR+LMQ4j3gUyllYbNrTwE+k1I2OO7x\nKXAxSkAELbQgHLTmhvpXM3uvEUL8yrEdDgw8w9/hEpTbDinlCSHEGsfnmUCOlPKoY/tt4F7gL47t\nFY7lbiBGSlkP1AshGoQQvaSU1a3YoAlgtFhoehRCiAzAJqU86XiQ/5eUcnWzY65q4zJ1jqUBqJCO\nmEjzWwHnSyktzfY/L4T4ArgK2CiEuFxKeajjlrSbumbbc6WUh5tUtP1/B9HKenNMjqXdbR1Aop8Z\nQYuOWWj8ncaHmhAiBXgNeNmx6xvgXiFEqOPz4W5v//McsYtUYGpLF5ZS1gDHhBDz3O5xjmN1FfCA\n2/6xjmWGlHKvlPIFlLtnZLPLbgCuF0JEOlouc4DvOmX56XwD3O9Wp3Fu+1v6O3wH3OSIaaQD0xzH\nHwQGOYQX4KdAlofqqAlQ9FuCxt+JFEJsR7lcLMA7Uso/Oz57AxgMbHe8XZcC1wOfAJcBe4EC4Eeg\nynFO8zTLtwJ/E0I8hvp/+ADYhRKKvwohdgIhqAfvvcCDQohpgM1x/ZXuF5NS7hBCvIUSEgm8LqXc\n1cq926L58U8DLwkhdqFE9BhwbWt/BynlZ0II598hH9jkqKNJCLEI+FgIEeKo69/bUUedojqIEVLq\n718TeAghYqSUdUKIJGAzMFlKWerremk0PRXdstAEKl8IIRJQAeuntFBoNF1Dtyw0Go1G0yY6wK3R\naDSaNtFiodFoNJo20WLRAo6uhtuFEJ/7ui7eRAjxkFAJ+nYJId4TQoT7uk7eQAjxgCP9xm5n+o1A\nJZBtdaQ6KXH0BnPuSxRCrBJCHBQqtUu8L+voKVqx9RwhxCZHipblQojY7qyTFouWeQDY5+tKeBMh\nRF/gv4AJUspzUJ0d5vu2Vp5HCHEWcCcwERgHXO02viCgCAJblwFXNNv3CPCtlDITWAs82u218g4t\n2foG8Gsp5VjgM+DX3VkhLRbNEEL0B65EfTGBTggQ4xjMFQ20lA21pzMK2CylNEmVtfU7VH6kQCSg\nbZVSfo/Kb+XOdah0JTiW13drpbxEK7YOd+wHldNsbnfWSYvF6fwZ+BUBPgBJqjTZ/4sarFUIVEop\nv/VtrbzCHuBih7siGvUiMMDHdfIWwWSrkz5SyhIAKWUxbpl1A5C9zkzDwI1A/+68uRYLNxy5dEqk\nlNmcOelbj8cxBuE6VHbSvqhEebf4tlaeR0p5AHgeWA18BexAjb4OOILJ1jMQyC95dwC/EEJsRWU0\nNnfnzbVYNGUycK0QIgeVrXOaEOIdH9fJW8xAZR4td7gsPkVlYA04pJTLpJQTpZRTUanAvZn4z6cE\nk60OShz5vxBCpKFSnQQkUspDUsorpJSTUGlpjrZ1jifRYuGGlPI3UsqBUsoMVLB3rZRyga/r5SXy\ngQscCe8EMB3Y7+M6eQVHAkKEEANRif3e922NvEcQ2Nq8xf85sNCxfjuwvLsr5EWa2Or23RqAx4C/\ndWdldLqPIEVKuUUI8THKVWFxLF/3ba28xieOHFEW4N4An48hYG0VQryPyiCcLITIBx4HnkPNR3IH\nkIfy5fd4WrE1TgjxC5Sr7VMp5VvdWied7kOj0Wg0beFzN1RLg0+afX6pEKLSMUhuuyOVtEaj0Wi6\nEX9wQy1DTWZzpkBya1NOajQajaYb8HnLopXBJ80J2C6sGo1G0xPwuVi0kwuFENlCiC+FEKN9XRmN\nRqMJNvzBDdUWPwIDpZT1QojZwL+BET6uk0aj0QQVfi8WUspat/WVQohXhRBJUsry5scKIXTXLo1G\no+kgUso2Xf3+4oZqNbWGc3SmY/08VHff04TCiZTSI+Xxxx/32LX8uQSLndrWwCzBYmd7bV25ciWZ\nmZkMHz6c5557rl3XbS8+b1m0MvgkHJBSyteBeUKIe1CDjIzATd1Rr9zc3O64jc8JFjtB2xqIBIud\n0Latdrud++67jzVr1tC3b18mTZrEddddx8iRIz1yf5+LhZTyjMnrpJR/Bf7aTdXRaDSaHsmWLVsY\nPnw4gwYNAmD+/PksX77cY2LhL24ov2PhwoW+rkK3ECx2grY1EAkWO6FtWwsLCxkwwJWRvn///hQW\nFnrs/losWmHq1Km+rkK3ECx2grY1EAkWO8H3tmqxaIWsrCxfV6FbCBY7QdsaiASLndC2rf369SM/\nP79x+/jx4/Tr189j99diodFoNAHApEmTOHLkCHl5eZjNZj744AOuvdZzWZICKuusEEIGkj0ajabn\nYTJBdTVUVbmKc7v5svm+zz6DYcM6f++vv/6aBx54ALvdzp133skjjzzS5jlCCGQ7xllosdBoNBoH\nUoLRCOXlcOqUWlZUqFJZ2VQAWnv4Swnx8dCrl2vpvh4f7yru2716QWYmRER0r81aLLpIVlaWzwNK\n3UGw2Ana1kCkNTulhJoaOHECiotdpaTEJQLV1eqY2lpVamrUwz4kBJKSIDnZVRISVHF/0MfHQ1zc\n6Q/9yEgQXkh96q3vtL1i4fNxFhqNRtMRjEY4flyVVatg0yYlCs5SXKyWBgOkp0NammuZmgoDByoB\niI+H2FiIiVEP/dhYta+73+x7CrplodFo/IrKSjh6FI4cgbw8KCiA/Hy1LChQLYC+fWHAAFX69VPb\n7sKQnq4e/pq20W4ojUbjl0gJZWVKDFoqJpMK8g4dCoMHK0EYONAlDikpqtWg8QxaLLpIsPt8AxFt\na/chpXIFOQXA2VJwrgsBw4crQRg2zFWGDlWuovb6/H1tZ3eiYxYajaZHYrOpuIF7q8ApCkePKjfQ\n0KEuUbjuOtd2UpKva6/pKLplodFoWsVigdzcllsHubnQu7erReAUAud2XJyva69pD9oNpdFo2o3T\nbZSdDTt2qGV2tgos9+vX1E3kFIWMDIiK8nXNNV1Fi0UXCRZfaLDYCdpWd+x22L0bvv0W1q6FbduU\nW2n8eBg3Ti3HjlWiEB7effXuKPo77To6ZqHRaJpw7JgShzVrlEAkJMD06bBoEfz976oF4Y3BZJrA\nQLcsNJoApbRUicKaNaoYjUocnGXgQF/XUOMPaDeURhNk1NTAd9+5xCEvDy69VAnDjBkwapRuOWhO\np71ioYe2tEKw5MkPFjsh8Gw1m2HDBnjiCZgyRY1afvFF1S317ruzOHUKli+H+++H0aMDUygC7Ts9\nE762VccsNJoegt0Ou3a5Wg4bN6oA9PTp8LvfKcGIjlbHZmVBqP7v1ngQ7YbSaPyYnJymQemkJFfM\nYdo0PbhN03V0zEKj6YE4g9JOgWhocMUcpk9XuZE0Gk+iYxZdxNf+we4iWOwE/7S1tha++gp++Us1\nrmHECPjnP+Gcc+CLL6CoCP7v/2Dhwo4JhT/a6g2CxU7wva3aq6nRdCNmM2ze7Io7ZGfDxImq5fD3\nv6t1HWvQ+CPaDaXReBH3kdJr1sD336vWg9O1NHmyKyit0fgCHbPQaHxETo4SBmcqjcREV8xBB6U1\n/oaOWXQRX/sHu4tgsRO8Z2tpKXzwASxeDEOGwEUXwfr1MGsW/PgjHDoEr74Kc+d2n1AEy/caLHaC\n723V3lGNpoPU1qqR0k7XUl4eXHKJaj08+GDgDoDTBDc+d0MJId4ErgZKpJTntHLMX4DZQB2wUEqZ\n3cpx2g2l8ThWK2zZAqtWKXHYsQMmTXLFHXRQWtOT6TExCyHEFKAWeKclsRBCzAbuk1JeJYQ4H1gq\npbyglWtpsdB4hKIi+OYbWLlStSAGDoTLL1fi4D5SWqPp6fSYmIWU8nug4gyHXAe84zh2MxAvhEj1\ndr187R/sLoLFTjizrRaLijM88oiaz+Hss5VQzJ4Ne/aoLq4vvKAEoycIRbB8r8FiJ/je1p7QeO4H\nFLhtFzr2lfimOppA4cQJNSDuyy9Vr6Vhw5Q4/PWvcP752rWk0bjjczcUgBBiELCiFTfUCuAPUspN\nju1vgV9LKbe3cKx2Q2laRUoVb/jiC1ixQs0lfcUVcNVVatmnj69rqNF0P4E0U14h4J7ooL9jX4ss\nXLiQwYMHA5CQkMC4ceMapyJ0NuP0dvBsNzSAxTKVL76ATz7JIjISbrppKi+8ADZbFqGh/lVfva23\nvb3tXM/NzaUj+EvLYjCqZTGmhc+uBH7hCHBfALzUHQHurCCZ2zcQ7TxxQrUePv9cxSHOPReuvhr6\n9Mnipz+d6uvqdQuB+L22RLDYCXoOboQQ7wNTgWQhRD7wOBAOSCnl61LKr4QQVwohjqC6zi7yXW01\n/oiUsHevEofly9UguFmz4JZb4J131AhqUHM8aDSazuEXLQtPoWMWwYPFomaJ+/xzVex2uPZaVS65\nBMLDfV1DjaZn0GNaFhpNe6mvV2MfPvlE9WIaOlSJw7//DWPG6FHTGo038fk4C38lK0h8Fv5uZ02N\nyrt0ww1qjumXX4YLL1SZXLduhf/5HzX3Q3uEwt9t9STBYmuw2Am+t1W3LDR+R0WFci198omKM0ye\nrJLwvfoqpKT4unYaTXCiYxYav+DkSeVO+uQT2LQJLrtMCcQ110BCgq9rp9EELj0mN5Qn0WLRsygs\nhM8+UwKxY4caGDd3Llx5JcTG+rp2Gk1w0GNyQ/krvvYPdhfdbWduLvzv/6o5H8aMUVOMPvigGhvx\n4Ydw443eE4pg+U4heGwNFjvB97bqmIXG6xw8qFoPn3wC+flw3XUqMD19uu7iqtF4iuPHj7NgwQJK\nSkowGAwsXryY+++/32PX124ojceRUmVqdQpEWRnMmaNcTJdcohP0aTTeoLi4mOLiYsaNG0dtbS3n\nnnsuy5cvZ+TIkWc8T4+z0HQrUqopRJ0CYTIpcfjb31RXV4N2eGo0XiUtLY20tDQAYmNjGTVqFIWF\nhW2KRXvR/8Kt4Gv/YHfRFTvtdti4EX75SzX39C23qP3vv69iE3/6k+r26i9CESzfKQSPrcFiJ3TM\n1tzcXLKzszn//PM9dn/dstB0CKtVzT/9ySeqJ1NysmpBrFihJgzSo6g1Gt9SW1vLvHnzWLp0KbEe\n7C2iYxaadrF7t3IpffQRDBqkBGLuXBgxwtc102g0TqxWK1dffTWzZ8/mgQceaNc5Omah6TImk2pB\nvPqqcistXqxSbDimC9FoNJ7E+aLbheb5HXfcwejRo9stFB3BT7zJ/kew+EJbsvPYMTUX9cCBsGyZ\nikkcOwaPP96zhSJYvlMIHlt7jJ1SqjQF27erPPqvvKL+yW65BS6+WP1jRUbCgQOtXqItWzdu3Mh7\n773H2rVrGT9+PBMmTODrr7/2mAm6ZaEBwGaDlSvhtdfUQLkFC1QKcO1m0mjaQXW1GkRUUHB6yc+H\n48chKgoGDHCVgQPVpO/O7b591TGdZPLkydhsNg8a1RQdswhySkvhzTfh73+H1FS45x646aYu/WY1\nmsBCSiguVpO25+Soh39zYbDbm4qAuyg4S0yMry1pEZ0bSnNGzGZ47jn4859VoPqee9T0oxpNUCKl\nGj16+LCaavHwYZV64PBhJRLR0TBsGGRkqB4ezQUhPr7HdgXUYtFFAnlu323b4I471G/89tuzuPHG\nqb6uUrcQyN9pc4LF1g7b2dCgHv4HD7rKoUNqabcrv+vw4WqZmanWhw+HXr28ZkN7aY+tgwcPJj4+\nHoPBQFhYGFu2bGnzuro3lOY0jEZ44gl46y2VzO/WW2H9el/XSqPxAtXVKli8bx/s36/Kvn0qdjB4\nsBKCkSNV/pnFi9V27949tnXgxGAwkJWVRaJz4nkPolsWQcKGDfCzn8HYsWq2udRUX9dIo+kizljC\nwYNKGJyisH+/cillZsLo0TBqlGs5bBiEhfm65l5jyJAhbNu2jeTk5Hafo91QGkBNS/roo2q09Suv\nqIR+Gk2PoqFBuYoOHDjdfRQe7moljBqllqNHq7iCv+SZ6UYyMjJISEggJCSEu+66i8WLF7d5jnZD\ndZFA8PmuWgV33QXTpqkssC21TAPBzvaibe0BlJeroNrWrars2aNcRxkZSggyM2HGDPjFLyAzk6xd\nu3qmnZ2gPd/pxo0bSU9P5+TJk8ycOZNRo0YxZcoUj9xfi0UAUlGhBtKtW6e6xF5xha9rpNG0QH29\nmiJxyxaXOBQXq255kyapAWtjxgS868iTpKenA5CSksKcOXPYsmWLx8RCu6ECjM8+g/vuU+6mP/wB\n4uJ8XSONBrBYYO9eJQhOcTh0SLmMzjtPicN556nWQ0iIr2vbI6mvr8dutxMbG0tdXR2XX345jz/+\nOJdffvkZz9NuqCCjtBT+67/Ui9oHH6gMAhqNT7DbVfdUZ2thyxbYtUuNTXCKwuLFqrdFRISva+t1\npJTYbDWYzScwmYowm0+4rRdhMp3AbC7GYill/PiNxMSM6tR9SkpKmDNnDkIIrFYrt956a5tC0RF0\ny6IVeorPV0o1f8QvfwkLF6qusR0Zfd1T7PQE2lYvUVjYtMWwbZsal+DeYpgwwStjFXz5nSoRqHZ7\n6DcXAuf6CUASEdGX8PC+hIenO9bT3dZTCQtLJSwsCSFaDsx7y1bdsggCjh+Hn/9cZR748kuYONHX\nNdIEPBUVSgzc4wwmk0sYHnxQLfv08XVNu4TdbsZkKsJkOt5YzObC04RBiFDCw/sSEZHu9vDvT1zc\nJMd+JQqhoT3fH6xbFj2UDRtg3jzVKeSRR1QPQo3GoxiNyq/p3mo4cQLGj1fi4BSIwYN75GA2KSUm\nUz51dXupq9tDff1+jMajGI1HsVhOEh6eRkREf0fpR3h4PyIi+rm1CvoSGuq5yYV8hR5nEcB8+inc\nfTe89x540CWpCXYKCmDTJvjPf9Ry7141dsHpSpo0SW33sAC0lDaMxmPU1+9vLHV1+6iv30dISC9i\nYs4iJuYsoqNHExU1nKioDCIi+iFEz7Kzs/QYsRBCzAJeQs2t8aaU8vlmn18KLAdyHLs+lVI+3cq1\nAj5m8de/wjPPwBdfKDdwV/FXO72BttUNKdXAtqwsVTZuVO6kiy5S5cILlV/Tz9MPu9tpszVgNB6k\nrm4/9fUHGoXBaDxMWFgqMTGjiI4eRXT0aMf6aMLCPJ8Ww1sEdcxCqEjOK8B0oAjYKoRYLqVsPgPI\nd1LKa7u9gn6ElPDYY/Cvf8H336sxShpNu5FSdVXNylIDcLKyVE+kadNg1iz1BpKR0WPcSWZzKTU1\n2yku/pi9e/9Kbe1uTKY8IiMziI4eRUzMKHr3vpbo6CVER2cSEuKf6cF7Ej5tWQghLgAel1LOdmw/\nAkj31oWjZfGwlPKadlwvIN1QFosaib13rwpkp6T4ukYav0dK1X3VKQxZWcp9NG2aKlOnwpAhPq5k\n20gpMZuLqKnZTk3Nj9TWbqemZjt2ex2xsROIi5tAbOx4YmLGEB2dicGgg3cdpUe0LIB+QIHb9nHg\nvBaOu1AIkQ0UAr+SUu7rjsr5A7W1cMMNKs3NunV+O3+KxtdIqSbmcRcHUMIwfTo8/bQSBz9vOZjN\nJVRXb6WmZis1NduoqfkRsBEbey5xcRNIS7udYcOWEhk5GOHntgQavhaL9vAjMFBKWS+EmA38G2h1\nss+FCxcy2DFRdEJCAuPGjWv08znnsG3Ptvt8t5053xPbn32WxSOPwJQpU/nb32DjRs/fLzs7mwcf\nfNAn9nX39ksvvdTp34PfbUtJ1j//CdnZTD1xArKyyDIaYfx4pt50E1mzZ6tpOoXwj/q2sP3ttyuo\nrz/I2LEWamq2sn79Buz2Bi655ELi4iaSk3MB0dG3M3PmDQgh3M4f0ng9/ft1ff7uu+/y1FNPERcX\nh5SSw4cPs2jRIl5++eUmxzvXc3Nz6Qj+4IZ6Qko5y7F9mhuqhXOOAedKKctb+CxgAtx2O1x2meqA\n8sIL3nsh9LWd3UmPtzU3t2nMwWxu6lYaNqzxh+JvtlqttdTW7nC0GFSrwWwudriSJhIXN4levSYR\nGZnRoRaDv9npTTpiq91up3///mzevJkBAwac8dge0RtKqL5pB1EB7hPAFuBmKeV+t2NSpZQljvXz\ngI+klINbuV7AxCyWLlXB7PXre1xPRY2nyMtzuZTWrVOpuqdOdYnDiBF+6Vay203U1u50uJG2Ul29\nlYaGY8TEnN1EGKKjRwZN99TuZtWqVfz+979nw4YNbR7bI2IWUkqbEOI+YBWurrP7hRB3q4/l68A8\nIcQ9gAUwAjf5rsbdw4ED8Pvfww8/aKEIKgoKmrYc6uqUKEydCkuWqPTcfiYOdruV+vp9jaJQU7ON\n+vp9REUNd4jCBfTr91/ExJytg8/dyIcffsjNN9/s0Wv6fJyFJwkEN5TVCpMnw+23w733ev9+uhnv\nQwoLmwakq6qathxGjeq0OHjDVintGI2HqanZ1hiErq3d6UhvMZFevSYRFzeJ2NhxhIREe/TereF3\n36kXaa+tFouFvn37sm/fPlLa0XWyR7QsNKfzwgsq39rPf+7rmmg8TlFRU7dSRYWr5fDgg3DWWX7T\ncnCmwmjeMyk0NKFRFHr3/j1xcecSGhrv6+pq3Fi5ciXnnntuu4SiI+iWhR+xc6eaBOzHH1U2Z00P\nx9FLqbGcOgWXXuoSiLPP9pupP02mYjdRUEswNAqDKhMJD9eDfPydm2++mVmzZnH77be36/geEeD2\nND1ZLMxm1fPpoYdUqnFND6S4WPVIcLYcSkubisOYMX4hDhZLmUMUtjW6lOx2oyP4PLFRGFR+JP9o\n6fRkpITqavVzKClpunSunzwJZWWwfDkMHdr5e9XX1zNo0CBycnKIa+fMZ1osukh3+0J/+1s13fC/\n/929ngjt8+0CpaVNWw4nTsAll7jiDmPG+KyHgtNWq7XKMfp5a6M4WCxlxMVNcBOGczvcZdVf8NXv\n12xWD3j3B35L684SHq6ytqemNl0611NSoHdv1cGttfmgvGWrjln0IPbtg9dfh927/cZlrWmJkydV\ny8EZlC4qUlMSTp0KP/uZmvnNh93XbLY6amuzqa7eSm7ul2zefDcmUyGxsWOJi5tEcvK1DB78FNHR\nI1qdYCdYcb79nzzZtLgLwIkTqvFYUgI1NeoBn5LSVABSUlS/hOZC4Of5GNtFu1oWQojJQLaUsk4I\ncRswAVgqpczzdgU7Qk91Q111lYpVPPSQr2uiaYLVqvovf/WVKrm5MGWKq7fSuHE+E4emYxlUnMFo\nPOoYy3BuY5whOnoUBkNwvxNarapXck6OGrpSUKAmDnM++IuLlSCEhakHfO/eatlcDNLT1XpaGiQl\n+YVH0SN41A0lhNgFjAXOAd4C3gBulFJe2sV6ehRfioXN1rnnxrffqp5P+/bpCYz8guJi+PprWLkS\nVq9WE/vMng1XXgnnnw+h3f/gtdst1NXtbRJ8rq/fT3R0plucYSIxMWOCciyDlFBeDkePKkE4dqzp\nsrBQPeQzMtTXOWAA9O/vevg7xSC6e3r7epU777yTL774gtTUVHbt2gVARUUFN910E3l5eQwePJiP\nPvqI+HhXDzZPi8V2KeUEIcTvgEIp5ZvOfZ03y/P4MmaxZAksW6YyLgwd2nQ5bBgkJ5/uYrLZ1JwU\nv/sdzJ3rkWp3mKCPWdhsahY4Z+shJ0c18668UqXuTk/v1jpKaaO+/kCT4HNd3W4iIwc3CT7Hxo4l\nJKR130Ygfq+1tXD4sJqG49AhVbZuzaKkZCqg/t+GDFHLjAy1npGhehYGwotYe77T77//ntjYWBYs\nWNAoFkuWLCE5OZlf//rXPP/881RUVPDcc881nuPpmEWNEOJR4DbgEsc8FGHtPDcoeO451VX+yBH1\nhnPkiJqg6MgRVeB0ATl4ECIjYc4c39Y96JBSTRH6z3/Chx8qJb/qKvjzn9WkP2Hd89NWg9yONgk+\n19buIDw8rVEYUlJuIDZ2fEDM4dxeqqpg1y7VlXz/fpXR4OBB1XoYPlwFgYcPh5kz1dd1000tv4wF\nI1OmTCEvr2l0YPny5axfvx6A22+/nalTpzYRi/bS3pZFGnALsFVKuUEIMRCYKqV8p8N39CL+GrNw\nNpOdwnHkiPrxf/wxxMaqnhUjR8I998BPfxoYb0F+yb59SiD++U/lcL75ZlVGjvT6raWUNDTkNXEl\nOQe5Od3pHRUBAAAgAElEQVRIvXpNIjZ2Qo+ava0rSAn5+UoUsrNdpaREdSQbO1aNUxw5UmU6GTAg\ncOIE3iQvL49rrrmmsWWRlJREebkr72rzbU+3LB6SUi5xbkgp84UQZ7W38sGOEOrNJzlZub0BnngC\n5s2D999XvTC2boXnn4ennoJHH4VFi1rvQqfpAHl58MEHSiBOnoT589X2ued69VXUZCpq0mKoqdmG\nEGGNbqQBA/7bMcitj9fq4E+YTKqV4BQEp0BERal+AmPHqhbCs8+qVoPOieY9OttFur1iMRNY0mzf\n7Bb2BQze9PkWFcHLL8P27Wq7Vy81P8306fCf/6h5ap5+WsVBfvYz73a7C0TfNmVlKmXv//2f8mHM\nnQsvvUSWzcbU6dM9fjuz+WSTXkk1Nduw282O0c8T6dv3Hscgt74ev3dr+PJ7PXVKiYGzZGerWENG\nhhKF8eOV12/sWBVY7goB+ftthc7ampqaSklJCampqRQXF9Onk3/0M4qFI9vrvUCGo0eUkzhgU6fu\nqOF//gcWL4ZBg07/7MIL1dSp27YpwXj2Wfjv/1Y9pmJju7+uPQajEVasgPfeU2MgZs1SanvFFS6/\nntvkL53FYqls0lqoqdmK1VrV6EpKTV3AsGF/ITJyUI8c5NYR7HYVn3NvKWRnqzEIY8eqFsMll8D9\n9yt3UmSkr2scHEgpcXfHX3vttbz11lssWbKEt99+m+uuu65T1z1jzEIIEQ8kAn8AHnH7qKalyYd8\njb/GLNzZuVM9vw4ehPh25F/buROeeUY95x54AO67r33nBQU2mxog9957auj7xIlw222qx0CvXl2+\nvJQ26ur2UFW1iaqqjdTUbHZM2DO+Sc+kqKihAT/Irb5eDRp1F4bdu5Vr1elGcrYaBg/WweYzYbTZ\nOGE2c8Jspshk4oTZTLFj+9khQ0jvgv/5lltuISsri7KyMlJTU3nyySe5/vrrueGGGygoKGDQoEF8\n9NFHJCQkNJ7j8XQfjomKUnFrjUgp8ztujvfwd7GQEi6/HK6/Hn7xi46du38//OEPqnfnvfcq4UhO\n9k49/Z6dO+Gdd1TsIS0Nbr1VxSL6ds3NY7VWU129mepqJQ7V1ZsJD08nPn4y8fEX0avXBUExYU9N\njWrZbtkCO3YoYcjPV4FmpzCMGwfnnAOJwRGLbxe1VmujCLgLgft6kcmE0W4nPTyc9IgI0sPD6Rse\nTpqjzE1JIbGbeuM58fQ4i/uAJ4ASwO7YLaWU53Slkp7G33NDffUV/PKX6o2ss7+Ho0eVaHz6qYpn\nPPhg156RPcbnW1amWhDLlqmuZbfdpsqoUe2+RHNbzeaTVFaupbJyPVVVmzAajxAXd65DGC6iV68L\nCQ/v7QVjvE97v1e7HfbuVbGyzZtVOXZMCcL556txQOPGKaHo5mdYu/D271dKSaW7CJhMFLu1BIrc\nhMAiZePDPz0iQi2biUJ6RARJoaGdclG2x9alS5fyxhtvALB48WLuv//+Nq/r6d5QDwKZUsqydh6v\naYbVCg8/DH/8Y9f+6YYOhTfeUAP5/vhH5Qu+5hqVKmT8eM/V1y+wWmHVKiUQq1bB1Vcroy+7rFN9\nKG02I2VlK6mo+JaKijU0NOSSkHAJCQnTSEtbSGzsuIAfAS2livmvW+dKcZWQoCbcOu881Wo95xz/\nFAZvUGO1ctRo5GhDA0eNRo6bTBw3mShyE4VIg4E0x4M+zSEAqeHhjI2NJT08nH4OMYjvpAh4ir17\n9/L666+TmZnJvn37+M1vfkP//v35yU9+4pHrt7dlsQ6YKaW0euSuXsKf3VB/+xt89BGsWeNZf255\nOfzjH6p31YgRquVy5ZU9vD/6wYNKIN59V+VlWLRIuZnc/KztwW63UF29mcrKNVRUfEtNzQ7i4iaS\nmDiDxMTpxMVNCvi8SVKq1qhTHNatU/H+yy5TKa6mTVPjFwIZKSXHTSb21NWxt66OA/X1HKiv54jR\nSLXNRkZkJMOiohgaFcXAyEj6hYfT1yEMaeHhRPeQfrwff/wxv/nNb3j00UdZtGgRTz31FAaDgcce\ne+yM53naDfUmkAl8CZic+6WUf2rz5G7EX8Wiulo9yFeu9N7bv9mseov+6U8qLcKDD6qpWXtMvpvq\naqWmy5app9tttymROKv9w3mklNTX76eiYjXl5aupqtpAVNQwEhOnk5g4g/j4Kd023acvyctzCcPa\ntaofgLs4DBkSuAHocouFPXV1jWW3YxkhBGNiYzkrOppRMTFkRkUxIjqa9PDwgOm1tm3bNi666CJK\nSkqIiIhgxowZTJo0iaVLl57xPE+7ofIdJdxRAh5P+kL/8AfVk9ObbqLwcBXnveUW2LBBicbvfgd3\n3aWC6a3FNXwas7DbVcrvZcvg88/Vk2zJEpW4r51+ELO5hIqKbykvX01FxbcIEUpS0kzS0hYwcuRb\nTWIOPSY+00GKipq2HGpqYPToLObPn8pvfqNeVALkedgEs93O37/6iroxY9hWU8O2mhrKrVbOjolp\nLDekpHB2TAwpAZAWoa3fb1hYGP3792f48OGYTCYGDhyIJ1+e2yUWUsonAYQQ0VLKeo/dPQjIy1Nz\nVeza1faxnkAI1bf9kkvUQKilS9Xsnc64xrhx3VOPM5KXB2+/DW+9pQaPLFoEL77YrhFaNlsDVVXf\nUV7+DRUVqzGZCkhImEpi4kwGDfotUVHDAuZNsTVOnlSxhrVrm07Id9llrqm8169XWdQDCZPdzpbq\natZXVpJVWcnmmhrSCgu5KjOTuSkpPJuRwbCoKAwB/v23Rn19Pbm5uQwfPpzw8HDKy8s5evSox67f\nXjfUhcCbQKyUcqAQYixwt5TyXo/VxAP4oxvq1ltVUPqpp3xXB/e4RmamEo1uj2sYjaoL17Jlqj/m\n/PlKJNqRdsNozKG8fCVlZSupqvqOmJgxJCVdQWLizKCIO1RUuOZcWrdOdWOdMsXlWjrnnMBMj2F2\niENWZSXrKivZUlNDZlQUUxMSuDQhgYvj40noaZF4KdX/Qn091NWpUl/vKlOmdHr0bUlJCUOHDiUm\nJobExESOHTvGhAkTGDRoEIcOHQJUuvLExES2O9NH4PmYxWZgHvC5lHK8Y98eKeXZnbLKS/ibWGzd\nCtddp1Ip+8Po6+ZxjYceggULvBjXkFJ11l+2TMUjzjtPCcR1151xOK+r9bCSsrKvsFqrSEqaRXLy\nbBITZxIWluSlCvsHNTXKleiMORw+rEb2T5umBGLCBJ9Mq+F1pJQcqK9nVUUFq8rL2VBVxfCoKKYl\nJDAtMZEp8fHEd6fhUqp/lIoKVcrLobJS7XMW94e+88HffN19aTQqn3F0NMTENC5lVBSWsDDEP/5B\n2LBhna5yTEwMvXv3prS0lOjoaK666ireeceV7/Xhhx8mISGhSdDb42IhpTxfCLHDTSx2SinHdsoi\nL+FP4yykVK6BBQvUeAh/QkpXXGPVqizGjp3K2WerTJ/OZUpKF25QXKx6Mi1bBhaLEogFC1TPplZo\nufUwm+Tk2cTGjvfICGl/jVnU18PGja6Ww+7dMGmSSxzOO6/jmYj91dbmnDKb+baigtUVFayqqEAA\nVyQlcXliIpclJpLcRsuhVTulVNkLa2pU54nqarVeVaVKZaWrOMXAfV9lpTonMlKNPExMVNPjJSRA\nXJx6+4uJcZVmD//GZUwM5tBQSmpqKKyspKCsjOMnTlBUVERhYWFjKSoqIiIigs2bNzNixIiO2erG\n9ddfz/LlywEYMWIEW7ZsaTLR0cCBA1m3bh1Dhw5t3OfpAHeBEOIiQAohwoAHgP3tPDco+fe/1e9t\n0SJf1+R03OMaK1ao3/+ePeoh9a9/qWVkJKcJyOjRZ2ghmc0qqdWyZUqJ5sxRwZrJk1t0MzVtPazE\naq0kKWkWaWk/ZdSodwK69WAyqdlanTGH7dtVLGnaNJUL7IILAmPO5pYw2+1sqqpqbD0cNhq5NCGB\nyxMT+XV6OiOsVkRNjcpTfviwesDX1rqW7us1NWqyqqgolyjU1LjWDQaV9iUurukyIUHlzElMVNPk\nZWaq9YQE19J5zBlaMlJKysvLmzz0Cw8cOE0EKisrSU1NpV+/fk3K2LFjG9f79u1LrAfcDw8//DB3\n3XUXP/nJT+jXrx+FhYWNYrFhwwbS0tKaCEVHaG/LojewFJgBCGAV8IC/DdLzFzeU2ayCjH/9q0rv\n0dOQUk1FuXu3S0T27FGDuRIT1cxjjSW0iAF7VjLw+/cZODKa5LvmIm6Y16KqGI3HKC9fSXn5Sior\n13ul9eCPWCzKJekUhy1b1MBzZ8xh8mT/cFN2Cqu16QO62VLW1HDAYmF1VBSrEhL4LiWFkWVlXH7o\nEJfv2sUFe/YQXlGhjrdYXA9058PdWWJjVXGut7TfWZzndjHHf2VlJUePHuXYsWPk5+e32hpoLgLN\nS0pKCiHdFFS68847+eyzz6isrOTJJ58kJiaG6dOn8/Of/5wjR44QFRXFp59+ysSJExvP8XhuqJ6A\nv4jF0qWuaZwDCatVddPM31NNwb9+IH/1QfIr4yjoez75IRnkl0TQ0KBEZMAAGDDARp8+uSQkbCEu\nbhXJyfvJzDybvn1nkJR0ecC2HqxWFcN3isOmTWpmROc4h4sv9oNkkGazavo63TLV1a5lS6UVMcBk\nOu3N/VRqKmtGjmTV0KGs6tcPYTBwRXU1M81mphsMJMfGNhUE5zIqqtv7+FqtVnJycti/fz/79+/n\n4MGDHDx4kEOHDmEymRg6dChDhgxh4MCB9O/fn759+za2BPr160dMTEy31vdMnDp1im3btlFcXMwd\nd9zBlClTeOSRR1i6dCkPPvggixYt4sUXX+TNN99k3bp1jed5xA0lhPi1lPIFIcTLwGlPYSll24lH\neiid9flWVKgssW7fhV/TbjutVkJXr2bg228zcOVKNRbizUVqvmq3t6aTJ/PYu3cTBw7sJyenglOn\nJnDkyARKS6+iqCiO48cFvXo1a504xMW5nprqnZ5a3vLj2+2qa7QzIL1hg7Jn2jS4+241wVWSJ3XR\nanU95J3+d/ftqiqy9uxhalzcafsbj7ValWI1L86HeK9eqhk5aNDpbhz3t//oaMxS8p/qalaVl7Oq\nooKD9fWNrqVfJSaSGR3tte7M7f1OpZQUFBSwc+dOdu3axa5du9i3bx9HjhwhLS2N0aNHM2rUKCZP\nnsyiRYsYMWIEqampXqt3nc1GjtHIUaORg0Yj++rqOFxfz1ujRjGilR4nbdl64sQJfvazn1FaWoqU\nkqqqKmbMmMHLL7/M999/z6hRowgJCaFfv36dqnNbMQtnXGJbp67eDoQQs4CXAAPwppTy+RaO+Qtq\nsqU6YKGUMttb9ekqTz+t3PUdGHjsv0ipMsu9957qzTR4sApUv/pq49PPbjdRWb620b1ksZSTmnoF\no0bNdrQemqbGtdvVuID8/Kbl++/V8IuCAvUs69//dEFxFxZfum2kVFmAnS2HrCzo3VuJw09/Cm++\neYYhI3a76y2+pYd9a+vu+xoamvre4+ObrsfHq+9nwoTTxcB5XBfe4qWUHHT2Wjp6lA1VVYyIjmZm\nYiIvDh3Khb16Ee7DfDM2m42DBw+yY8cOduzYQXZ2Njt27CA8PJyxY8dyzjnncPXVV7NkyRIyMzM9\n2jow2e2UWSycNJs51tDAPkdakbyGBk6YzZyyWKiyWrFISagQSMAuJTEhISSHhdFgs3X63snJyRQW\nFjZuHzlyhHvvvZfi4mKeffZZQAnO5Z30jfvUDSWUk/oQMB0oArYC86WUB9yOmQ3cJ6W8SghxPrBU\nSnlBK9fzqRvq6FGVqXPvXvV23GM5eFAJxPvvq1d859Dw4cMBMBpzHeLwlSP2cDZJSbNJSppNXNyE\nLscejEYlGgUFp4tKfr7aHxmpBKV5GTDAtR4X54k/hnoJP3gQNn4v2biqjl3fVZISXsXU8VVcMKqK\ncYMrSQpp4wHvXK+rU71kWnvIt/Rgb74eG9vt7poyi4U1jqD0qooKAC5PTOTypCSmt6PXkrcwm83s\n3r2bHTt2sH37drZv387u3btJT09n/PjxjWXcuHGkp6d3+Po2KSk2myl0JBd0PvBPWSyctFgodCQc\nLLNYqLHZsEhJiBDYpUQAcSEhJIWFkRYeTv+ICDIiIxkZHc3w6Gj6hIXROyysMQGh89nV2dZMUVER\nmZmZzJs3j7feeourr76aBx54gDvvvBOr1UpeXh7z5s1j+/bt5Oe7ZpfwdNfZ1cANUspKx3Yi8IGU\n8opOWeW67gXA41LK2Y7tR1Cpz593O+ZvwDop5YeO7f3AVCllSQvX86lY3HCD6tXy29/6rAqdp7hY\nzQ/xf/8Hx49ju2UeppsuwzQ8ngZTASZTPg0NeVRXb8JiKScp6QqHQJzeevA2Uqou78ePK+E4frxp\nce4LDVWi0a+fY9lX0j/FRP/4GvrFVNI/8hTJ8hSiytVd0l5eSWVuJdUFlZhKKpEVlYTXV5IoKukl\nq7CHRyIS4glNauFB357tuDi/z/IopaTEbGZ3XR1ZlZWNrqVL4uO5PCmJmYmJjPSia6k1LBYL+/bt\nY9u2bY1l7969DB06lAkTJjBhwgTGjx/P2LFjm3QXbQuT3c6B+nr21tVx2OEaym1oIL+hgSKzmfjQ\nUBJCQoh0fG9Gu51qm41Kq5VYg4G+4SH0CxX0D7WTZjCTTD2x9mqkpZoacw01phpqzDXUmmsbt1ta\nr7fUs/ue3YxOGd3pv1Hv3r0pK1P9jqKjo3nllVe48847SUhIoKqqihdeeIFHH30Us9nceI6nxSJb\nSjmu2b7GMRedRQgxF7hCSnmXY/s24Dz3WIgQYgXwBynlJsf2t8CvpZTbW7iez8ZZLPn5TSSGNtDZ\nfx8hBGFhp3sFq0/2wVTb9VnfWiPvZD6Deg8ApHpbFbLRBsd7jqO41mWnrew6wlErgwQh7RgAJBiQ\nIKXaj0RIsNsN2KyhWO2h2Gyh5JQXkN4rE7M9FIs9HJstFIkBg8GKwWAjRNgxGKyEGOwYDDYMoTYM\nITYMIVYMoTZEiB2DwY5rShf/Je9kPoNSBrbvYAFgQAoD0mBAqr8qAjvCbsNgtyFk590jnUaCtNuR\nNjt2mw273YYwGAgJCcUQYsAQGkJe2XEGpw5Ghtixh0rshjP//9tFKA2hUdSHxVAVmUhldCL1EbFE\nm2rpZawiylhFREMNoQ11GEz1hFjMhEiJQQgMgE1YKI22URxWjdF2igiDgejQaKLCoogKjSIyLJKo\n0KjGbff1yNBI1/5mx0fHRhOXHkevs3phiGj5ZaKtZ1JlZSXTp0/nkksu4aWXXmqcIW/BggWEh4dj\nMpkwGAykpKRQVFTUeJ6nx1nYhBADnTPjCSEG0ULA2x9YuHAhgwcPBiAhIYFx48Y1/oGzHHMwe2N7\nQLSRautBAMYMV/6P3YdrurS950g1saMLOKvyak6tvp6DFSrPy4gYNXD+UN2eLm+bjBEMqDzfY9dr\n77bo5vsBHD6Vg6iuY0yTz8Xpx0c7t/diA4bGjG2nPbtdn4vWjpfdYq+xLhqjw9PQteuFdtv3097f\nyxC3z+uNEaSfnHja8bKV6wkpGRM5nDCrhZyqnUSYGzif/gi7JNuagwTGhWYgZRzZtpMAjAvJQAI7\nbUcxSMGN9mFEWuE/4himcMHo2JHYoqPYJQoQUdGcm34hEaER7K3cS2hIKOf1Pg+Arae2YsbMiN4j\nGrcBJiZPxGg08uWxLxny+yFccaty2DR/3mRnZzfZbv750qVLKS4uZtmyZQCsXbuWvLw8+vfvj9Fo\nxGKxNI4NeeKJJ8jNzaUjtLdlMQt4HViPeg+5GLhLSvlNh+52+nUvAJ6QUs5ybLfHDXUAuNQf3VBd\n4ciRI0yZMoUPP/yQSy+9tHF/Q0MBOTmPUlmZRUbGs6Sm3uYf4xGkVN0nnWkQnKNgz7TtXNbUKHdM\nUpJrZKz7KNkzLWNifJpC1T0DhPvg3+altc+EcJnqLM6xYK1tO/f5IFTR47BUWKjbW0fdHlXq99ZT\nu7sW7BBzdgwxZ8UQNTyKqOFRRA6JJLRvA5awAhoajtHQkIvJVITZXIjZXILZXIzZXIzNVkd4eCoW\nQyqlllhKTGEU1pgx5pYTerSM+Pxq+hbVM+yUZOQpiLLA0d4GDvSG/Ul2jveLpXJwGraMDFKSB5Ae\nl056bDrpcen0jetLemw6qbGphId0LTPuli1buPPOO1m8eDEPPPAAt99+O5MmTeLLL7/knHPO4bXX\nXuOtt95i/vz5FBUVkeyYl9kbc3D3BpyB5R+klKc6a5TbNUOAg6gA9wlgC3CzlHK/2zFXAr9wBLgv\nAF7y1wB3V1mzZg233XYbmzZtYsiQIU0+q6r6gSNHHgBg2LCXiI+/0PMVsNvh1Ck4cUINqDhx4vRy\n6pQrR05U1OkP+9a23ffFxwdm5rs2kFJ1ZOqIuLjvN5tbF5K2xKZXL78PlXgNKSWWUosSkH11GA8b\nMR420pDXQEN+AyJEEDkwkoiBEUQMiCCifwSRAyLV+oAIwtIl1pBSzOYTmM0nsFhOYTafxGotw2I5\nhcVShsVykvL6UgpqTlJx0oQhP5LIPEGv4zb6FFkZWGxlUAWU9BIUpIZRkB5BXlo4h3sb+DHewp6Q\nKqLDY9l21zZGJLec7qM9PPnkk7z66quUlpayYMEC3njjDe655x6WL19OZmYmI0aM4MMPP6Surq7x\nHI+IhRBipJTygBBiQkuftxQ36CiOVstSXF1nnxNC3K0uL193HPMKMAvVdXZRa/f1p9xQneXll1/m\nH//4B5s2bTpt+L+UdkpK3icn5xESEi4hI+N5IiM7Mc1ZQ4MaRrxhA1krVzLVZFJCUFqqnirp6WoC\njPT000tKiitHTg/L+NlT8iW1htncfmHJyckCpjbuq6tzDZ1oSUzcs1y0VM6Q99GndD2Hm8RaYcVU\nYKIhvwFTgUmtFzjWj5swFZoIiQohLDWM8D7hhCaFEpYYRmhCKCHxIYTGhxKa4CjxoRjibRBXhT26\nEhlRiVUqUSmryufkvj0Y9+UijpQSnVdDcqGZASfsCDscToaoD97g7Cl3dtrWmJgY6utds0hMmTKF\n7Oxs6urqkFJiMBiYOXMmX3/9deMxnopZ/BK4C/jfFj6TwGVt3aAtpJRfo2bhc9/392bb93X1Pj2F\n++67j127drFgwQI+/vhjDG6vg0IYSEu7jZSUOeTnv8C2beMZPvwVUlPnn/mi1dVqGPGGDfDdd2p4\n8ejRaijxjBlqgF16OqSldTxjnabbCA9XXbLb0y07K6vpfBbOcXytiU1lJeTmuj5zLp3nGAyuDl0t\nlTN9lpDgk8HZ7UIIQVhSGGFJYcSObXnwjpQSa7kVc4kZc4kZa4VVlSpVGvIasO60YquyYamwYKuy\nqc8qrVirQzBEpBGa0J/Q+IkkJtxIikNcQkaFEHphKNXxoYSGlJBav5PeQ67pkj3z589n+fLl1NbW\n0tDQ0OSzYcOGkZOTw/vvv9+pa7fVsrhBSvkvIUSGlDKnU3foRnq6G8qJ2Wxm+vTpXHbZZTz55JOt\nHldbu4vdu68hPf1nDBr0mKsrY2mpEgZnOXgQJk5UmQMvvljlu+6xyYg03Y3TfeYcMuIUkeZJWp3D\nSdyTuTqHl1itpwtLWwKTkADJyWrAYw9rxDYipcRWZ8NaqcTEWml1CYlzWWnFVm3DWm1l6AtDiejX\n+ZxWM2fOJDs7m7KyMvr378+TTz5JYmIi99xzD6WlpQghuPTSS1mzZk3jOZ5yQ22XUk5wLjttQTcR\nKGIBUFpaynnnncdzzz3H/PmttxxMDUXs2T6b6IpYMj/PxLB+kxozMXmyEoaLL1ZC0cWkahpNVzCZ\nXK0U97GKLQmP+2enTkFZmXKhpaSoVlWfPqo4W1mpqa4BmX36BG9sxkleXh7XXHMNu9ym57zhhhv4\n3e9+x7XXXsuPP/5Iklv+GU+5ocqFEKuADCHE580/lFJe234Teha+9m/36dOHFStWMGPGDHr37s2M\nGTNcH544AatWwerVRKxfzzhhZv/jYey8tpiz7n6D8LGXtDuA7Gs7uxNtq++IiHA95DuK3a76VJSW\nqillS0pUKS2FL7/MQoipjYMxa2pUSpjBg1UZNMi1Pniw8rb2VDHp7Hf6+eefM2DAAMaMGdOl+7cl\nFlcCE4B3aTluofEiY8aM4eOPP2bu3Ll8+fjjTMrNVSKRnw/Tp6v8548/TsiwYZyF5Nix37K9dDFj\nGr4gJmakr6uv0XgEg0G5onr3Pv2z5rGZ+nr175GbC8eOqXxjK1ao7dxc1Vrp21e1QpxhOmfrJC1N\ntV5SUtS9AqGrstFo5Nlnn2X16tWN+zrrfWnLDfWulPKnzuyznbpDNxJIbihyctRkQitXsiIri7us\nVtbddRcjb71VTaPWyqQsJ04sIyfnEUaPfp/ExOndXGmNxr8xGtVcLcePK29tcbGrpXLihGq5nDyp\n3F9Wq6und69eKkgfE+OaQqN5It7m02m4l5iY7mvRuLuh9uzZw4wZM4iOjkZKyfHjx+nXrx9btmyh\nj6OZ56mYxT7UhEcrganQNM+DlLK8CzZ5nB4tFlYr/Oc/8MUXqpw6BVddBVdeCdOn89by5TzxxBN8\n//339D/D9KQAFRVZ7Nt3E0OGPEPfvn42p6tG00NwjolxTvNhNKouyM5J+lqa4sO9uO9raFCC4RQT\nd7FxiklsLDz8sGrhdIXc3FyuueYadu/efdpnQ4YMYfv27SQmJjbu81TM4m/AGiAD+JGmYiEd+wOS\nTvkH7Xb1y2ipm0hL3UXct0+eVFldr75aTU06cWKTV5GFCxdy6tQprrjiCjZs2NAkQNWcxMSpjB+/\ngd27r8ZoPERGxnOtjvr2N9+2N9G2Bh7etDMy0jW8qKvYbEpkWhKVmhqXCJ2p11d7bL3lllvIysqi\nrKyMgQMH8uSTT7LIbW5n9+y2HeWMYiGl/AvwFyHEa1LKezp1h2Dht7+F555TrwfN+wc6RzzFx6sU\nqCY3Gl4AACAASURBVKNHnz4KKjlZLc/Aww8/TGlpKVdddRXffvvtGfPwR0ePYMKE/7Bnz1z27p3H\nqFHvEhLiP7N6aTTBREiIK/mwN2lrDEVOTudHQHQk3ccUYLiUcpkj9UeclPJYp+/sBXzqhjKZVBzB\ny2kspJTccccdFBcX8/nnnxPWRgd0u93MoUN3U1u7izFjVhAR0der9dNoND0LT6cofxyYCGRKKUcI\nIfoC/5JSTu56VT1Hj45ZdACr1cqcOXOIj4/n7bffbnMyeCkl+fnPU1T0KmefvZy4uC5lltdoNAFE\ne8WivfH5OcC1qNxMSCmLAA/NQ+afONP/+iOhoaF89NFHFBUVMX/+/NOG9TdHCMGgQY8wdOif2LXr\nck6dWtH4mT/b6Wm0rYFHsNgJvre1vWJhdryySwAhhHZ++5ioqCi++uorhBDMmjWLysrKNs/p02ce\nY8Z8yaFDP6eg4M+dDnRpNJrgo71uqIeB4cBM4A/AHcD7UsqXvVu9jhEsbih37HY7Dz30EGvXrmXl\nypVtdqsFaGjIY/fua+jV6yKGD38Zg6GHJt7RaDRdxhvzWcwELkd1n/1GSrm6jVO6nWAUC1AxiRdf\nfJFXXnmFr776irPOOqvNc6zWavbtm4/d3sDw4a8QE9P5eX81Gk3PxdMxC4BdqJnysoCdnaxXj8HX\n/sGOIITgV7/6Fc888wyXXXYZ33//fZvnhIb24uyzP+fQoUyys6dy4MDPMJkKu6G2vqMnfaddJVhs\nDRY7wfe2tksshBA3omaxuwG4EdgshJjnzYppOs5tt93Gu+++y5w5c/j000/bPN5gCKVPn5s477xD\nhIensHXrOeTkPIrF0nb8Q6PRBBftjVnsBGZKKUsd2ynAt1LKsV6uX4cIVjdUc7Zv387VV1/NY489\nxr333tvu8xoajpOX9ySnTi1n4MAl9O37C0JC/HSKNI1G4xE8Pc5it5RyjNu2Adjpvs8f0GLhIicn\nh1mzZjFv3jyeeeYZ18RI7aCubj85OY9SW7uDIUOeIjX1NtR06RqNJtDwdMziayHEN0KIhUKIhcCX\nwFddqaC/42v/YFfJyMhg48aNrFmzhjvuuAOLxdLicS3ZGRMzijFj/s3o0e9TVPQPtm0bT1nZVz2+\nq21P/047QrDYGix2gu9tPaNYCCGGCSEmSyl/BfwdOMdR/gO83g3103SBlJQU1q5dS2lpKddddx21\ntbUdOj8+fjLjx29gyJCnOXr0YbKzp1FZ+V2PFw2NRtNx2kpR/gXwqJRyd7P9Y4BnpZRdm13cw2g3\nVMtYrVbuvvtudu3axZdfftmYx74j2O1WSkreIT//OUJDExgw4GF69/4JBkNbiYs1Go0/46n5LLZK\nKSe18tluHbPoOUgpefzxx3n//ff55ptvGDp0aCevY6esbAUFBS9iMh2nf/+HSEu7g9DQWA/XWKPR\ndAeeilmcKWd2VMeq1LPwtX/Q0wgheOqpp/jVr37FxRdfzLZt24CO2ymEgd69r2P8+A2MGvVPqqo2\n8MMPg8nJ+Q0m0wkv1NxzBNp3eiaCxdZgsRN8b2tbYrFNCLG4+U4hxM9QkyFpehh33303r776Klde\neSXffPNNl64VH38BZ531L849dzM2Ww1bt57FgQN3UFe310O11Wg0/kJbbqhU4DPAjEscJgLhwBwp\nZbHXa9gBtBuq/WzcuJG5c+fyzDPPcMcdd3Soa21rWCxlFBX9jcLCV4iNHc+AAQ+TkDDNI9fWaDTe\nwdPjLKYBZzs290op13axfl5Bi0XH2L9/PzfffDMGg4HHHnuM66+/HoMHZpW32RooLX2PgoL/xWCI\nZMCAh0lJuUEnLNRo/BCPJxLsCXhSLIJlDuO1a9dSW1vL008/TV1dHb/97W+58cYbCQ3tei8nKe2U\nl6+koOBFjMaj9O//IOnpiwkN9c1UKMHynULw2BosdoL3bPVGIkFNAGIwGLj22mvZvHkzf/7zn3nt\ntdcYNWoU/+///b9WB/K1FyEMJCdfxbhx6zjrrE+prt7CDz8M5ujRJQGftFCjCTR81rIQQiQCHwKD\ngFzgRillVQvH5QJVgB2wSCnPO8M1tRvKA6xfv56nn36aw4cPs2TJEhYtWkRkpGdyRBmNuRw//hIl\nJe+QnHwNAwb8N7Gx53jk2hqNpuP4vRtKCPE8UCalfEEIsQRIlFI+0sJxOcC5UsqKdlxTi4UH+eGH\nH3jmmWfYvn07Dz/8MHfddRcxMZ6ZJNFiqaCo6O8UFv6FmJgxDBjwMImJM3QwXKPpZnqCG+o64G3H\n+tvA9a0cJ/BBPX3dp7m7OJOdF1xwAStWrOCLL75g48aNZGRk8Nhjj3HkyJEu3zcsLJFBgx7hgguO\n0afPzRw58hDbto2nuPhd7HZzl6/fEsHynULw2BosdoLvbfWlWPSRUpYAOLrgtpaDQgKrhRBbWxrz\nofE+48eP5+OPP2bdunXU1dVx0UUXcfHFF/Pmm29SXV3dpWsbDBGkpy9k0qTdZGT8geLit9i8eSj5\n+S9itZ7mldRoND7Cq24oIcRqINV9F+rh/xjwlpQyye3YMillcgvXSJdSnnDMobEauE9K2eJUcEII\nefvttzN48GAAEhISGDduXGMPAqcy6+2ubV900UWsXLmSP/7xj2RnZzNnzhwWLlyI+P/t3Xl0VHWa\n8PHvU0uSqqRSWQgJEpGtWURDBBJQNKiNAjruetxGxfFMnz6nR9qxtcdu+7y2Pf/YZ0bQY/f7jv12\nj9jTkB7bt1tRBtoFDYIgCIRFQqAJSIKAWYuQvap+7x9VKRIgJJWtqlLP55x76t5bN5ffQ1Xuk99y\nf1cEi8Uy4PPPnp1KZeXLfPzxe2RkLOGee/6dpKRLoyZ+3dbtWN7uXD969CgAb775ZtT3WZQB1xtj\nTolIDvCJMWZ6Lz/zAtBojFnew/vaZzHMqqurKS4uZuXKldTU1PDYY4/x2GOPMXny5AGfu7X1GFVV\nr3Ly5BtkZNzCpZf+CJfrqkEotVKqUyz0WawBlgbXHwPePfcAEXGKSEpwPRm4Gdg3HIXrmoVHsoHG\nmZWVxbJly9i5cyfvvfceZ86cGbRmqqSkcUye/DJz51aQkjKTvXtvo7R0IbW16/s1TXq8fKYQP7HG\nS5wQ+VgjmSx+CdwkIuXAd4GXINDsFJwaHQJNWJtEZBewFXjPGPNBREqrejVz5kxWrFhBVVUVzzzz\nDGvXrmXcuHE88sgjfPzxx/j9/n6d125PY9y4Z5k3r4KcnMeoqPgXtm+fwfHj/xuvN7xndCil+kfv\n4FZDqrq6mtWrV7Ny5Urq6up49NFHWbp0ab+nSIfAdOsNDSUcP/4aDQ2fkpPzKJdc8gOczoE3fSkV\nb6L+PouhoMkiupWWlvLmm2+yatUqpk2bxtKlS7nvvvtwufo//Udr69ccP/5/OHnyd7hchYwd+yQZ\nGTcTeEy8Uqo3sdBnEdUi3T44XIYzzvz8/FAz1dNPP82aNWu49NJLefTRR/nkk0/61UyVlHQZkya9\nxLx5x8jKuoeKiufYtm06VVWv4fV27y+Jl88U4ifWeIkTIh+rJgs17BISErjzzjt55513OHjwILNm\nzeKpp55i4sSJvPDCC1RUVIR9TqvVwZgx/8CcObuYOvW3oYcyHTz4TzQ17R+CKJSKL9oMpaKCMYbS\n0lJWrlzJ6tWrufzyy3n88ce59957SUnp3yNbW1urOHHidU6c+C1O53TGjv0BmZm361TpSnWhfRYq\nZrW3t7N27VpWrlxJSUkJd955J0uXLqWoqKhfz9vw+9uprv4z33zza1pajnDJJd9jzJh/JDFxzBCU\nXqnYon0WAxTp9sHhEo1xJiQkcNddd/Huu+9SXl5OXl4eTz75JJMnT+bFF1/kyJEjYZ3PYkkgO/sB\nPJ5/JS9vLW1tx9m+/XK++uoBGho29uuejWgXjZ/rUIiXOCHysWqyUFEtOzubp59+mj179vD2229T\nW1tLYWEhRUVFvP7669TW1oZ1vpSUmUyd+jpz5x7B7b6G8vLvsW3bdCoqnqexceeITBxKDQZthlIx\np729nfXr17N69WrWr19PUVERDz/8MLfddhtOpzOscxljaGzcRnX1n6mu/n+Aj1Gj7iYr6x5SU+fp\nEFw14mmfhYoLjY2NvPPOO6xatYovvviC2267jYceeoiFCxeG/WhYYwxNTXtCicPrrWPUqLvIyroH\nt7sIi2Xgj5pVKtpon8UARbp9cLjEepwul4tHHnmE9evXc+DAAQoKCvj5z3/O2LFjefLJJ9myZUuo\naam3WEWElJSZTJjwIoWF+8jP/4TExFwOH/4xW7aMoazsMU6dWk17+7fDENnAxPrn2lfxEidEPlZN\nFmrEyM7O5sknn2Tr1q18/vnnjB49mieeeIIJEybw3HPP8be//S2sPgmncyqXXfYT5sz5ktmzvyQ1\ndR7V1X/iiy+m8OWXszh8+Dnq6zfg97cNYVRKRQdthlIjmjGGvXv38sc//pHi4mKSkpJ44IEHePDB\nB5kyZUq/zun3d3D69BfU139AXd0HNDfvx+2+loyMRaSn34zTOU0fD6tihvZZKHUOYwzbtm2juLiY\nt956i5ycHB588EHuvfdeJkyY0O/zdnTUUV+/IZg8/gr4SU+/mYyMm0lL+y4JCaMGLwilBpkmiwH6\n9NNPQ0+YGsniJU7oHqvP52Pjxo0UFxezZs0a0tLSWLRoEYsXL2bBggVhj6rqZIyhpeUgdXUfUl//\nVxoaSnA6p4aSR2rq1VgsCYMY1YXFy+caL3HC0MWqHdxKXYTVauWGG27gN7/5Dd988w3FxcXk5OTw\n0ksvkZ2dzaJFi1ixYgVlZWVh9XOICE7nVHJz/4krr3yP+fNrmDTp3wE4fPgZNm/OYu/e26mq+hXN\nzQf1vg4VM7RmodQ5PB4PGzZsYP369axfvx6AxYsXs3DhQoqKisjOzu7lDD1rb6+hoeFj6ur+Sl3d\nB4jYyMi4mfT0m0lP/y52e/pghaFUn2gzlFKDwBhDeXk569atY8OGDWzatIns7GwWLFhAUVERCxYs\nIDc3t9/nbm4uo67uA+rrP8Dj2YTTeTkZGTeTkbEIl6tQJz1UQ06TxQDFS1tovMQJgxOrz+dj7969\nbNy4kZKSEjZu3IjL5eqWPCZMmNCv0VB+fxsez+Zg8vgrLS1HSE+/IdTf4XD0/emC8fK5xkucEPk+\nC70lVakwWK1W8vPzyc/PZ9myZRhjOHDgACUlJXz44Yf87Gc/Q0RCiaOoqIhp0/o2lNZiSSQ9/UbS\n028EXqK9/Vvq6z+iru4Dvv76X7FYHMEmq0Wkp9+AzeYe+oBVzPH7/cyZM4fc3FzWrFkzaOfVmoVS\ng8gYw+HDh7vVPJqamroljyuvvDLsqdYDU5F8Fbq34/Tpz0lOzgv1d7hcc3Q6EgXAihUr2LFjB6dP\nn+5TstBmKKWixLFjx0LJo6SkhJqaGq699tpQ8rjqqqvCnsfK52vF4/kslDza2ipJS7sxmDxuwuHo\n/30jKnZVVVXx+OOP8/zzz7N8+XJNFj3RPovwxUucED2xnjhxgo0bN4aWY8eOcfXVV4eSR0FBAQkJ\n4d2L0dZ2gvr6j0LJY/duOwsX3hm8MfB6bLbUIYomsqLlMx0OfYn1vvvu4/nnn8fj8fDyyy8ParLQ\neqtSw2zMmDHcf//93H///QDU1NSwadMmNm7cyLJlyzh48CAFBQWh5DF37txebxJMTBxDTs4j5OQ8\ngjF+Tp/+T5KS6jl+/DXKyh4mJeWqUEe5yzUbEetwhKqG0dq1a8nOziY/P59PP/100O/h0ZqFUlHG\n4/GwefPmUNPV3r17yc/PD/V7XHPNNbhcrj6fz+drxuP5LHRvR3v7CdLTF4b6O5KSLh3CaNRw+elP\nf8of/vAHbDYbLS0tNDY2cvfdd/P73//+oj+nzVBKjRBNTU1s2bIllDx27NjB9OnTQzWP6667jvT0\nvt/M19Z2PDgdyQfU13+I3T6K9PRFZGQsJi1tAVarYwijUcOhpKRk0JuhNFn0IF7aQuMlThg5sba2\ntrJt27ZQ8ti6dSsTJ04MJY+ioiL279/fp1iN8XPmzK5grWM9Z87sIjV1PhkZi8nIWIzTOTWqZ9Dt\n7TP1e/34W/z4m/34Wnz4mwPbvhZfYH9r8P3OfU1+fGd8+M748DZ6A9stPvytfky7wXSYQPOOHzBg\n/IH1bvuMAUNoOe+9ixyXtz4P5+QLNzmG8/3VZNELTRbhi5c4YeTG2tHRwY4dO0LJY/PmzaSmpnLL\nLbeEmq7Gjh3bp3N5vR7q6zunI1kHWMjMXEJGxhLS0m7EZksZ2mC6MMbgb/bTUdsRWry13sB6TWB7\n8/7NzLLPwnvai68xcJHvmhiMz2BxWLA6rVicFqwOKxaH5eyS1GVfkgWry4o12Rp4TQmsW5yB9ywJ\nFsQmYAlcYLEAAmKRbq+dS+gYur93weOC60mXJWFJuPCw6kjflKfJQqkRxufzsXv37lDy+Oyzz3C7\n3d3u9ejLXeaB6Uj2U1u7jrq6dTQ2bsPlmhtMHrf06bkdxhh8jT689V68DV466jsC651L130NZ/d3\n1HfgbfAiItgybdgz7Ngz7dhH2QPbwXV7ph1bhg1bqi1wgXdZA4mhMxkkWqK6ZhQNoj5ZiMi9wM+B\n6UCBMWZnD8ctBl4hkKN/Z4z55UXOqclCqXP4/X7KyspCNwmWlJRgs9m6JY+pU3tubvJ7/XgbvLTV\n1lNfu4GGlg9o5COMHxx1C0isug5rRQH+6sTzE4DHi9VhxZZuCyxpZ1/t6fbz9of2pQUWq1NHbQ21\nWEgWUwm03r0OPHOhZCEiFuAg8F3gG2A78IAx5kAP59RmqDDFS5wQv7Ean8F7OviXe4OXjoYODpUd\n4rPtn7Fl3xa2HtpKc1szMzNncqXrSmbYZzDNN43ExsDF39fiw+bufqG3pdtg/FE6Jm2mbcxG2ly7\ncXbMItV6E2muxaSkX449w47NbcNiH7onIcTrZzqYov4+C2NMOYBcvI5YCBwyxnwdPPaPwB3ABZOF\nUiORv8OP13P2Yt918Xl8Z7e7HHOg6gCJvsTAMU0+bC4bVrc19Be7M83Jre5bueOaO7AtsVEjNeyu\n2c3Oqp28cfgN9lXsY9L4SRTcUEDh1YUUzi3kiiuuwG7vOgvuFcDfAeD1NtLQsIHa2v+hou4eqLWQ\nyRIyGP6+DjU0It5nISKfAD/qoWZxD7DIGPO94PbfA4XGmGU9nEuboVTU8bX6zr+wn3Nxv9g+f5s/\ncJF3n22e6bZ+7nLOe1aXNdCpGob29nZKS0vZvn17aDl69Ch5eXkUFBSElilTppw3z9Vg9HWo4RMV\nzVAi8iHQ9UkxQmCg2PPGmPeCx2iyUFHN13K2g7YvF/dz9+OnWzt8jxf7Lvu61gKsydaouLg2Njay\nc+fObgmktraW2bNnd0sg48aN61Zer7cxOMJqXcRHWKnzRUUzlDHmpgGe4jgwrst2bnBfj5YuXcr4\n8eMBSEtLIz8/P9TO9+mnnwL0abtzva/Hx+p2aWkpTz31VNSUZ6i2jd+w/JfLuWLcFVz9navpqOug\n5LMSfKd9zMmcg7fey+dln+Nr9JFvzcdb72XbiW14z3i5ynIV9nQ7u+27sbqszBs3D1uajR1NO7Cm\nWJmfN5+kCUl88c0XWFOsXH/d9djSbGzevxlripUbb74REel7eecPPN6h+P7u2LEDgGeeeSb0vsfj\nITExke3bt7N8+XIOHDiAzWajoKCArKwspk6dyhNPPMHo0Xfy1VdpGPMAhYWjqa1dxzvv/Jzm5vsp\nKppPZuYS9u0bRVLSOG644YY+ly9evr8Ar7zySr+vZ123O9ePHj1KOKKlGeoZY8yOC7xnBcoJdHCf\nALYBDxpjyno4l3ZwhykW4/S1+s6Otz93/P0Ftr11gdE5exL3UJhdiC0jOPQyIzgMMz04/DI9MESz\nc4RO536rI/ZG5ETqczXGcPz48W61jy+//JLU1FQKCgqYM2cOhYWFzJ49G7fbPeBaRyx+f/trqGKN\nimaoi/7DIncCrwGjgAag1BizRETGAP/XGPN3weMWA69ydujsSxc5pzZDxRjjM3TUBW+y6lyqz9k+\nZ5+/zX92jH3nmPvMi2wHE8BQjspRPfP7/Rw+fLhbAiktLSU3N7db89XMmTMx5ki3vo6UlKtwu4tI\nS1uA230NVmtypMMZcaI+WQwFTRaR1XkDVk8X/Pbq9vP2eRu8gTH3owI3WSVkJYTWQ0tW922rKzra\n8FX/eb1e9u/f3y2BlJWVMXXq1FDymDNnBrm5HhobN+PxlNDYuIvk5Bm43deRlnYdbve12O2ZkQ4l\nqjzxxBO8//77ZGdns2fPHgD27NnD97//fZqamhg/fjyrVq0iJeVsjU2TxQDFS/X2YnH62/2hi357\ndXuvf/F31HRgSbR0v7hnBi/251zwQ0kg3Y5Yh+fCHy+fKcRmrK2trezevbtbAvn666+ZMWMGs2bN\nYvbsK8jLS2DUqBOcOfM5p09vZd++dG68cREuVwEu12ySk2dgsYT3LJBY0ZfP9NZbb2XLli20tLTQ\n0tICwNixY3E4HLhcLrxeLzfddBPLly8P/UxUdHCr6OTv8NNa0UrzgWa+Xf8th9cdDlz0z0kM/mZ/\n97/su1z0ndOd59UCbJk2rEmx176vokNSUhJz585l7ty5oX1NTU3s3r2bnTt38vnnO/jVr3Zy6NAh\npkyZwuzZd5GR0cqMGQ5aWz+iqmoFra1HcDi+Q3LylaSkXElych4pKXkkJFwSF7XRn/zkJ5w+fZp7\n7rkntK+5uZnjxwPjgn7xi1/wyiuvdEsWfaU1ixHM6/HSfKD5vKXlSAuJYxNxTnPi+I6DhOwE7Fld\nmoCCScGWZouLXzAVW1pbW9mzZw+7du1i165d7Ny5k/379+N2u5k5cxpz5oxi6lQ7Y8a0kJz8LV5v\nOcb4QgnE6byc5OTLcTqnYbePHnHf8c2bN7Nw4cJQzeLaa6/lxz/+Mbfffju33norH330EW1tbaHj\ntRkqThi/oa2q7fykUNaMt9GLc6oT53QnzmlnF8dkh9YA1Iji9/s5duwYX331FeXl5ZSXl3PgwAHK\ny8tpbm5m1qwJzJ07iilT7OTktJGSUotIFSJ+nM4pOBxTgq+TcTgmkZQ0Cbs9MyYTybnJ4uDBgyxZ\nsoTKykrS09Px+XzU1NSEjtdkMUDR2ubbXt2OZ5MHz0YPnk0emsqasLlt3ZJB55KYm9jrlz1a4xwK\nGuvI05c46+vrQwmkvLyciooKKioqqKyspK2tmry8dKZPdzFxop0xY/ykp7fgcNRjsUBCwjiSkyfi\ncFxGQsIYEhKysdncWK1ubLbU4HoqNlsqVmvKkD6utq+f6bnJoqtnn32W4uJiqqqqQvu0z2KEaK1q\nxbPRQ8PGBjyfeWirasM93427yM2kFZNIyUvBlqofo1I9SU9PZ968ecybN++897xeLydPnuT48eNU\nVVVRWVnJrl2VVFZWUl19hPb2r7HbDzF+fDK5uQ5Gj7bjcllIToakJD+JiR3YbB1YLC2ItCHiwGZL\nxW4/m1A6k0kgsbiC253vubokHzcJCWOwWAb2+2yM6fb87erqarKysvD7/Rw5cqTf59WaRRRqOdzC\nt3/8llPFp2g/2U5aURruIjdpRWkk5yVjsen9AkoNF6/Xy6lTpzhx4gQnT56krq6O2tra85a6uhqa\nmmpoba0jKclLTk4qWVkpjB7tJDMzibS0BNxuGykpgtMpJCX5SEz0YrO1Y7G0AGfIz/8Ep3NKv8v6\n0EMP8fHHH1NdXU1ubi4vvvgiFRUVvPXWW4gI48aNIy0tjbfeeiv0M1qziBHGb/C3+mn/tp2av9Tw\nbfG3tH7dStZ9WUz5jym4r3GHPQmcUmrw2Gw2xo4d2+enDUKgE76uro66ujpqamqCySSQZA4dquu2\nXVvbRF3daWpra9m928f06QMrr9Vq7TY7cFlZGQkJCVitVhwOB6+++mq/zqs1ix6E2+Zb834N9R/W\nd3umr7/1ws/67brfdBgsiRasbiuZSzIZ/eBo0m5MG7baQ7y0bYPGOhKNpDg7r1099TNGeroPrVkM\nEpvLhmOiI/Qs39BrcL3rc35D7+tjH5VSQdF+HdCahVJKxbG+1iy0p1QppVSvNFn0oOvc7yNZvMQJ\nGutIFC9xQuRj1WShlFKqV9pnoZRScUz7LJRSSg0aTRY9iHT74HCJlzhBYx2J4iVOiHysmiyUUkr1\nSvsslFIqjmmfhVJKqUGjyaIHkW4fHC7xEidorCNRvMQJkY9Vk4VSSqleaZ+FUkrFMe2zUEopNWg0\nWfQg0u2DwyVe4gSNdSSKlzgh8rFqslBKKdUr7bNQSqk4pn0WSimlBk3EkoWI3Csi+0TEJyKzLnLc\nURHZLSK7RGTbcJUv0u2DwyVe4gSNdSSKlzgh8rFGsmaxF7gLKOnlOD9wvTHmKmNM4dAXKzIi/UUY\nKC1/ZGn5IyvWy98XEUsWxphyY8whoLe2MiEC5bz++uuH9d+L1JdtsOKMhV+Wi8UaC+W/mHPLP9zf\n34Hq7/9/tMQ5HN+fSMcaC30WBvhQRLaLyD9GujBKKRWPhjRZiMiHIrKny7I3+HpbGKeZb4yZBdwC\n/EBErh2i4nYT639p9lW8xAka60gUL3FC5GON+NBZEfkE+JExZmcfjn0BaDTGLO/hfR03q5RSYerL\n0FnbcBSkDy5YUBFxAhZjzBkRSQZuBl7s6SR9CVgppVT4Ijl09k4RqQTmAe+LyLrg/jEi8n7wsGxg\nk4jsArYC7xljPohMiZVSKn5FvBlKKaVU9IuF0VC9EpHficgpEdkT6bKES0RyRWSDiHwVHACwLNJl\nCoeIJIrIF8GbJvcG+5ViiohYRGSniKyJdFn6I1I3rg4GEXGLyJ9EpCz4OzA30mXqKxGZEvw/3xl8\n9cTg7+8/B2+O3iMiq0QkocdjR0LNIjhC6gzwe2NMXqTLEw4RyQFyjDGlIpIC7ADuMMYciHDR1F11\nEgAABH1JREFU+kxEnMaYZhGxApuBZcaYmLloicg/A7OBVGPM7ZEuT7hEpAKYbYypj3RZwiUiK4ES\nY8wbImIDnMaY0xEuVthExAJUAXONMZWRLk9fiMglwCZgmjGmXUT+G1hrjPn9hY4fETULY8wmIOZ+\nUQCMMSeNMaXB9TNAGTA2sqUKjzGmObiaSGDQRMz8BSIiuQSGZf820mUZgIjcuDpQIpIKXGeMeQPA\nGOONxUQRtBA4HCuJogsrkNyZqIFvejow5r5gI5mIjAfygS8iW5LwBJtxdgEngQ+NMdsjXaYwrACe\nJYYS3AXE6o2rE4AaEXkj2JTzGxFxRLpQ/XQ/UBzpQoTDGPMN8DJwDDgONBhjPurpeE0WUSLYBPU2\n8MNgDSNmGGP8xpirgFxgrohcHuky9YWI3AqcCtbshN6nnolWEblxdRDYgFnAr4Plbwaei2yRwici\nduB24E+RLks4RCQNuAO4DLgESBGRh3o6XpNFFAhWAd8G/ssY826ky9NfwSaET4DFkS5LH80Hbg+2\n+RcDN4jIBdtro5kx5kTwtRr4CxArE25WAZXGmC+D228TSB6xZgmwI/j/H0sWAhXGmDpjjA/4M3BN\nTwePpGQRy38Z/iew3xjzaqQLEi4RGSUi7uC6A7gJiInOeWPMT40x44wxE4EHgA3GmEcjXa5wiIgz\nWCuly42r+yJbqr4xxpwCKkVkSnDXd4H9ESxSfz1IjDVBBR0D5olIkogIgf//sp4OjpY7uAdERFYD\n1wOZInIMeKGz0yzaich84GFgb7Dd3wA/Ncasj2zJ+mwM8GZwNIgF+G9jzP9EuEzxJBv4S3CqGxuw\nKsZuXF0GrAo25VQAj0e4PGEJzjKxEPhepMsSLmPMNhF5G9gFdARff9PT8SNi6KxSSqmhNZKaoZRS\nSg0RTRZKKaV6pclCKaVUrzRZKKWU6pUmC6WUUr3SZKGUUqpXmiyUugARabzAvutEZIeIdIjI3Rf5\nWb+I/FuX7R+JyP8aqrIqNRw0WSh1YRe6Aelr4DFgVS8/2wbcLSIZ/fmHg1O9KxVVRsQd3EoNB2PM\nMYDg3dIX4yVwJ+zTwM+6viEilxGY3iUTqAYeN8ZUicgbQCuBWYc3B2s2E4CJwKXBc80jMA9RFXBb\ncD4fpYaF1iyUGnwG+DXwsIi4znnvNeANY0w+sDq43WmsMeZqY8wzwe2JBKaxuQP4A/Bx8OFercCt\nQ1h+pc6jyUKpIRCcZv5N4IfnvHU1Zyed+y8CM992OneK63XGGD+wF7B0mfNpLzB+UAusVC80WSg1\ndF4FngCSu+y7WBNW0znbbQAmMIFbR5f9frQJWQ0zTRZKXVhv091f7H0BCD4T+y0CCaPT5wSmtAb4\ne+CzQSqPUkNKk4VSF+YQkWMiUhl8fUpE5ohIJXAv8B8isreHn+1ae3iZQGd2575lwOMiUkpgavof\nXuBnejunUsNOpyhXSinVK61ZKKWU6pUmC6WUUr3SZKGUUqpXmiyUUkr1SpOFUkqpXmmyUEop1StN\nFkoppXqlyUIppVSv/j9McajkIqwDngAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "pfac = np.ones([1, 20])\n", + "pfac[0, 4] = 0; pfac[0, 9] = 0; pfac[0, 14] = 0\n", + "pfit = glmnet(x = x.copy(), y = y.copy(), penalty_factor = pfac)\n", + "glmnetPlot(pfit, label = True);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We see from the labels that the three variables with 0 penalty factors always stay in the model, while the others follow typical regularization paths and shrunken to 0 eventually.\n", + "\n", + "Some other useful arguments. `exclude` allows one to block certain variables from being the model at all. Of course, one could simply subset these out of `x`, but sometimes `exclude` is more useful, since it returns a full vector of coefficients, just with the excluded ones set to zero. There is also an `intercept` argument which defaults to `True`; if `False` the intercept is forced to be zero.\n", + "\n", + "### Customizing plots\n", + "\n", + "Sometimes, especially when the number of variables is small, we want to add variable labels to a plot. Since `glmnet` is intended primarily for wide data, this is not supprted in `plot.glmnet`. However, it is easy to do, as the following little toy example shows.\n", + "\n", + "We first generate some data, with 10 variables, and for lack of imagination and ease we give them simple character names. We then fit a glmnet model, and make the standard plot." + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZYAAAElCAYAAADHpsRNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXd4XFe1v/9u9TqSZTuyLBdZttzlJnc7lrudBlxISMgN\nJIEUCCEQuNwQvkBoFwj8KAm5kIQETCCQkNwbSnIT27El23HvkrsdS66yXFRmpFGf9ftjz4xGsiRL\nsqQzR9rv85znnLPnlHWO5fnMWmvvtZWIYDAYDAZDVxFitQEGg8Fg6F0YYTEYDAZDl2KExWAwGAxd\nihEWg8FgMHQpRlgMBoPB0KUYYTEYDAZDl2KExWBLlFINSqk9SqkDSqm9SqmvKqWU1XZ1BqXUY0qp\nQ0qpPzVrz1ZKlXmfc69Sak033f8PSqmPd8e1DX2TMKsNMBg6SaWITANQSg0A/go4gO9e74WVUiEi\n4rne63SALwBLROR8C59tFJGPtHaiUipURBq6zzSDoeMYj8Vge0TkMvAQ8ChoYVBK/VQptV0ptU8p\n9aC3XSmlfuP1DlYrpd7x/VJXShUopX6ilNoF3K6USldKvauU2qmU2qCUGu09boBS6k3vtbcrpeZ4\n27O9XsUepdRupVRsczu9XlW+UipPKfWYt+23QDrwrlLqyy083lVemNfD+K1SahvwtFIqRin1slJq\nm/feH2nrPXg/e04pddjrBd0Q0L7E+wz7lVIvKaXCA97Pj7zPuEMpNVUp9Z5S6rhS6uGO/6sZejUi\nYhaz2G4BnC20lQADgQeBb3rbIoCdwHDgE8Db3vZk7/Ef9+4XAP8RcK33gZHe7ZnAOu/2q8Bc7/ZQ\n4JB3+5/AHO92DBDSzLZpwH4gCogFDgCTvZ+dBPq18DzZQBmwx7s86W3/A/DPgOP+C7jbu50AHAWi\n23gP/was9ranAKXAx4FI4HTAc/8ReCzg/Tzk3f4FsM/7nAOAC1b/PZgluBYTCjP0RpYDmUqpO7z7\nDiADmA+8ASAixUqpnGbnvQ7g9TbmAm8E5G3CveulwLiA9jilVAywGfilUupV4H9F5Fyza88H3hKR\nau89/he4ES02ihY8Ey+thcLeaPa8tymlvu7djwCGtfEeFqBDh4hIkVJqnffzMcBJEfnQu/9H4BHg\nWe/+v7zrfCBWRNyAWylVrZRyiIizlWcw9DGMsBh6BUqpdKBBRC55v/S/JCJrmx1zyzUuU+ldhwCl\n4s3hNL8VMEtE6pq1P62Uehu4BdislFouIsc6/iTtprLZ/idE5HgTQ9v/HlQr282p8a49AdsAgvku\nMQRgciwGu+L/AlRKDQR+C/za27QaeEQpFeb9PCPAq7jdm2tJBha2dGERcQEFSqnbA+4xybu5Bvhy\nQPtk7zpdRA6KyE/RIaexzS67CfiYUirK6xH9G7CxU09+NauBxwJsmhLQ3tJ72Ajc6c3BpACLvMcf\nBYZ7RRrg00BuF9lo6EOYXxkGuxKllNqDDvvUAa+IyC+9n70EpAF7vL/aLwIfA/4HWAwcBM4Au4Fy\n7znNy3z/O/C8Uupb6P8nrwF5aFH5b6XUfiAU/SX9CPAVpdQioMF7/XcDLyYie5VSq9CiI8CLIpLX\nyr2vRfPjfwj8SimVhxbcAuAjrb0HEXlLKeV7D6eBLV4ba5RS9wNvKqVCvba+0A4bTYl0QxOUiPmb\nMPQdlFKxIlKplEoCtgPzROSi1XYZDL0J47EY+hpvK6US0cn47xtRMRi6HuOxGAwGg6FLMcl7g8Fg\nMHQpRlgMBoPB0KX0amFRSq1USh1RSh1TSj1htT0dxdsddI9S6p9W29JelFKjA0qb7FVKlfvKlwQ7\ndrXdrnb7UEo9rnQx0Tyl1KtKqQirbWoPdrLbW/Kn2Ntz0NfWTym1Ril1VOkSRwlddr/emmNRSoUA\nx4AlwHl018m7ROSIpYZ1AKXU40AW4Ghl9HVQ4/03OIseUHjGans6gl1tt5vdSqnBwAfAWBGpVUq9\nDrwjIq9YbFqb2M1updR8oALdLX+St+1p4IqI/NT7w7ufiHyjK+7Xmz2WmcBxETnlHSX9GvBRi21q\nN0qpIcDN6LEIdmUp8KEdvuBawK6229HuUCDWO5AzBv1D0A7Yxm4R+QBdEy6Qj6LL9uBdf6yr7teb\nhSUVPQjOx1lvm134JfB17D347E68NalsiF1tt5XdoqcK+Dl6oOY5oExE3rfWqmtjV7ubcYOIFAOI\nyAUCqlxfL71ZWGyLt5ZTsYjso+0ChUGL0uXWP0LTYom2wK6229Fu75iij6KrLg9GF/W821qrro1d\n7b4GXfYjtjcLyzl0hVcfQ7xtdmAe8BGl1En0r89FSqmgjN22wU3AbhG5ZLUhncCuttvR7qXoisol\noics+190Zelgx652B1LsrZmHUmoQuuRPl9CbhWUnMEopNdzbW+Mu9JwZQY+IfFNEholIOtru9SLy\nGavt6iCfwkYhmWbY1XY72n0amO0tzqnQnW0OW2xTe7Cj3c2jH/8E7vNu3wv8o6tu1GuFxfsr4lF0\nNdqDwGsiEuz/8L0CbwXdpehfcbbCrrbb1W4R2QG8CeylcW6aFy01qh3YzW6l1F/QxUZHK6VOe4uN\n/gRYppQ6ihbGn3TZ/Xprd2ODwWAwWIPlHsu1BjEqpcYopbYoPUvdV5t9Vqj03Nx7lVI7es5qg8Fg\nMLSGpdWNvYO5niNgEKNS6h/NBjFeAb5Ey32sPcBCEWneP9tgMBgMFmG1x3LNQYwicllEdgP1LZyv\nsP4ZDAaDwRCA1V/K1zuIUYC1SqmdSqkHu9Qyg8FgMHQKu0/0NU9EipSe83ytUuqwt3SBwWAwGCzC\namG5rkGMIlLkXV9SSr2FDq1dJSxKKdP1zWAwGDqBiHS48ofVobCODmL0P6BSKkYpFefdjgWWAwda\nO1FEbLk89dRTltvQl+w2thu7je2NS2ex1GMRkQallG8QYwjwsogcVko9rD+WF70lB3YB8YBHKfVl\nYDwwEHjL642EAa+KyBprnqT7KCwstNqETmFXu8HYbgV2tRuM7S1hdSgMEXkPGNOs7YWA7WJgaAun\nVgBTutc6g8FgMHQUq0Nhhmtw3333WW1Cp7Cr3WBstwK72g3G9pboEyVdlFLSF57TYDAYuhKlFGLD\n5L3hGuTm5lptQqewq91gbLcCu9oNxvaWMMJiMBgMhi7FhMIMBoPB0CImFGYwGAyGoMAIS5Bj1/it\nXe0GY7sV2NVuMLa3hBEWg8FgMHQpJsdiMBgMhhYxORaDwWAwBAVGWIIcu8Zv7Wo3GNutwK52g7G9\nJYywGAwGg6FLMTkWg8FgMLSIybEYDAaDISgwwhLk2DV+a1e7wdhuBXa1G4ztLWGExWAwGAxdismx\nGAwGg6FFTI7FYDAYDEGBEZYgx67xW7vaDcZ2K7Cr3WBsbwkjLAaDwWDoUkyOxWAwGAwtYnIsBoPB\nYAgKjLAEOXaN39rVbjC2W4Fd7QZje0tYLixKqZVKqSNKqWNKqSda+HyMUmqLUqpaKfXVjpxrMBgM\nhp7H0hyLUioEOAYsAc4DO4G7RORIwDEDgOHAx4BSEflFe88NuIbJsRgMhqBEPEJ1YTWV+ZVU5FcQ\nkxHDDXfeYLVZQOdzLGHdYUwHmAkcF5FTAEqp14CPAn5xEJHLwGWl1K0dPddgMBiCibordVTkV1CZ\nV+kXEvdBN2GJYcRmxhKbGUvEoAirzbxurA6FpQJnAvbPetu6+1zbYNf4rV3tBmO7FdjVbmjZ9obK\nBpw7nBS9XMSJx0+wb+k+tqRsYdvIbRR8q4DKQ5XETYlj5M9GMvv0bOacmcOk/5vEyKdHkpidaKnt\nXYHVHkuPcd9995GWlgZAYmIiU6ZMYeHChUDjyzX7Xbe/b9++oLKnI/v79u0LKnv6wr5d/1489R62\nr95OaW4pUzxTqMyvZOOOjdRdrmPe+HnEZsaSF5NH1PIoVvxhBZFDItmwYQNOnEFhf/P93NxcVq1a\nBeD/vuwMVudYZgPfFZGV3v1vACIiT7dw7FOAKyDH0pFzTY7FYDB0GhGh5lwNlQd0CMu3uI+6iUiJ\nIG5SnD+UFTsxluiMaELCrA4IXT92zbHsBEYppYYDRcBdwKfaOD7wATt6rsFgMFyTutK6pgJyQC8q\nUhGXGUfMhBgSsxNJfTSV2AmxhMaGWm1y0GGpsIhIg1LqUWANOt/zsogcVko9rD+WF5VSycAuIB7w\nKKW+DIwXkYqWzrXoUbqN3Nxcv8tqJ+xqNxjbrcAKuz01HioPN/VAKg9UUl9WT+xE7XnEZsYy8JMD\niZ0YS8TAlpPqdn3n0H22W+2xICLvAWOatb0QsF0MDG3vuQaDwRCIeITqgmrdGytARKoLq4kaGUXs\nxFjiMuMY/IXBxGbGEjU8ChXS4eiPIQBTK8xgMPQaGiobqNhXQcX+xqXyQCXh/cP9+Y+4TJ0PiRkT\nQ0iktXmQuoY6Dl46yK7zu9h1fhe7i3azcPhCfrb8Z5ba5aOzORYjLAaDwZY0uBuo2F+Ba5cL124X\nrl0uqk9WEzM+hvip8cROjiVushaR8MRwq82l3lPPoUuH2H1+txaSol0cuHiA4QnDmT54OlkpWUwf\nPJ3JyZOJi4yz2lzACEub2FlY7Bq/tavdYGy3gmvZ3VClRaRid6OQVJ2o0iKSFa+X6fHETowlJKJn\nvZCWbG/wNHDk8hG/F7Lr/C72F+9niGMI0wdPZ3rKdKYPns745Ml8WAs7nE52ulzscLlY0a8fv8rI\nsMz2QOzaK8xgMBia0FDdQGVeZRNPpOp4FTFjY4ifHo9jtoPBXxxMXGac5aEs0CJy+NJhv4DsOr+L\nfRf2kRKf4heRT4z7BJMGTeF8Qxg7vSLy18suDpzaR0Z0NDMdDuYnJPD4kCFMjI21+pGuG+OxGAwG\ny/DUeKjI93ohu1xU7K7AfdRNzJgY4rLiiJ+uvZHYzFhCo6zv1usRDydKTvgFxCciA2MHkpWSxYzB\nM8ganMXUQVO5IpHsdLm0J+J0sr+ykuTwcGY4HMyMj2emw8HUuDhiQq1/rtYwobA2MMJiMFiPp9ZD\nZX6l3wtx7XbhPuwmOiPaH8qKz9K5kWAQERHhw9IP/TmR3UW72V20m35R/ZiROsOfE5mWMo3qkFgt\nIk4nO1wudrlcxIaGMiM+3i8i0+LiSAy3PtfTEYywtIGdhaW3xsyDGWP79eOp9VB5sGk4y33ITfTI\n6CaeSNzkOEJjQi23W0QoLCts9ESKdrGnaA/xEfH+xPqM1BlMS5lGWHgCu7z5kJ0uFx/k5uKZMkWL\niMPBjPh4ZsTHMygy0rLnaS8mx2IwGIIST50H9yF3ExGpPFBJVHqUP7E+6DODtIgEwSh1EeF0+Wl/\nTsS3jg6LJmtwFtNTpvMfc/6DrMFZxEb1Z49XQFa5XDya/yFFtbVMjYtjRnw8dw4cyO0ZGdw9bx5K\nmbEvPozHYjAY2o2n3isiAeGsyvxKotKimvTOipsSPCJyznWuyTiRXed3ERYS5k+sZw3WIa2kmBvI\nr6z0J9d3ulx8WFXFxNhYpnu9kOnx8YyLjSW0j4iICYW1gREWg6HjeOo9uI+4/Ul11y4XFfkVRA2N\n0uLhDWnFTYkjLC44gh/V9dVsP7udrWe3sv3cdraf3U6dp44Zg2doIfGGtVLiB3PU7fYLyE6nk/zK\nStKjo/2hrBnx8UyKiyMixPqeZ1ZhhKUN7CwsVseeO4td7Ya+abs0iBaRAE+kYn8FkamR/nxI/PR4\n4qbGERbf9SLSWbvrGurYdX4X6wvWs75wPTvO7WDcgHHMGzqPWUNmMSt1FsMThnOmtraJJ7Lb5WJA\neLgWEG8vralxccSFdfzZevPfi8mxGAyGdiENgvtYs5zI/koiBkX4vZABHx9A/NR4whKC6yuiwdPA\n/uL9WkgK1rP5zGbS+6WzKG0Rj89+nBuH3YiExrDD28X31bMudhzaigJ/Yv0/hw5lhsNBf5v10LIT\nxmMxGHox4mkUkYrdFdoT2VtBeHJ4ky6+cdPigqLsSXNEhIOXDpJTkMP6wvVsKNzAoLhBLEpbxOIR\ni1kwfAElKpYt5eVscTrZUl7O6ZoasuLimBkwXmRoZKRJrncCEwprAyMshr6AT0R8AuLa5aJiXwXh\nA8P9ifW4rDjis+IJ7xd8IgJaSE6UnCCnMIf1BevJKcwhNjyWJSOWsGjEImYNXcAZifWLyFanE0dY\nGHMcDuYlJDDH4WBSbCxhfTgv0pUYYWkDOwuLXeO3drUb7GN7bXEtzm1OnNudOLc5ce12kR+TT/aN\n2Y0iMi2e8KTgFBEfp8tP85s3fkPRgCLWF6zHIx4tJGmLGDN4LmdUPzaXl7OlvJzDbjeT4uKaCMlg\ni8eL2OXvpSVMjsVg6MN4ajy49rpwbXdpMdnmpL68HscsB47ZDoY9MYz4GfE05DUwYeEEq81tkwsV\nF8gpyPF7Jc4aJxPcE/jE5Du4beqXOaMGsNXl4tvl5dSduMzchDrmORzcmZFBVlwcUUFcAsWgMR6L\nwRCE1BTV4NzipHxzOeVbyqnMryRmdAyO2Q7/Ep0RbYsJqa64r7Dh1AZ/aOu86zzZw7OZMfRG4gfM\n4FxoCltdFexxuRgVHc2chATmORzMTUhgRFSUyY1YiAmFtYERFkMwIw1C5cFKyjeX+8Wkvqwex1wH\nCfMSSJibQPz0+KAYcNgenDVONp7a6E+4f1jyIXOHzmN86jyikqZzJmwI2yoquVRby2yvgMx1OJjp\ncODoRHdfQ/dhhKUN7Cwsdo3f2tVu6H7b6yvqcW13+b0R5zYnETdEkDAvAcc8BwlzE4gZG9Mpb8SK\n9+6uc7P59GZ/aOvAxQNkpc5kxKDZhPbL4nT4cHZWVJEcEeEXkbkJCYyLiSHE642YvxdrMDkWg8Gm\nVJ+u1gKyWXsj7qNu4qbEkTAvgdQvpDLulXFE3BBhtZntpqa+hu3ntvvHkuwp2sPYGyaRmjyLG8Y8\nwoiJ6eyu9UB8PHMTEviow8Ech4OBEfZ5RsP1YTwWg6EL8dR7qNxfSfmWch3a2uzEU+NpDGvNSyA+\nKz4oJqhqL/Weenaf3+0f3b7t7DaGJGaQfMMsahMmczx8JFERjT215jocTO7jpVB6CyYU1gZGWAzd\nRX15PeVbvd7IlnJcO1xEDovUYS2vmESPirZVAtojHvKK8/weycbTm0iKG0K/ATOojM/kbNRYJiYO\n8oe05jgcDI2KstpsQzdghKUN7Cwsdo3f2tVuaN12EaG6oFrnRryJ9qqTVcRPj/cn2R1zHJaOG+nM\nexcRDl8+TE5BDu8XrCOnMJfIyCQc/adTFpdJQ8Ik5g9MY67DwZyEBGbEx3f5rIe98e/FDvTaHItS\naiXwKyAEeFlEnm7hmGeBm4BK4H4R2ettLwTKAQ9QJyIze8puQ+/HU+uhYm9FY5J9sxMU/iR7yudS\niJsSR0i4vUI+IsLJ0pOsL1jPeyfXsb4wB1HhRPefTmlcJsPnfJbs5Ay/R5IRbS+Py2A9lnosSqkQ\n4BiwBDgP7ATuEpEjAcfcBDwqIrcopWYBz4jIbO9nJ4EsESm9xn1s67EYeo66K3VNw1q7XUSPim70\nRuY5iBpuz3EVZ51nef/kOv5x4n02nsqhqr6W8KQsah2TyRq6gCUp45jjcDDb4bDd9LmG7sOuHstM\n4LiInAJQSr0GfBQ4EnDMR4FXAERku1IqQSmVLCLFgEJ7OgZDhxAR3Efd/nEjzi1Oas7X6JHscx0M\n/9ZwHLMchDms/i/SOYorinnv5DreOL6Gbac34Kwug8QpOPpPZ978F1iZOpl5iYlM7EOTVhl6Dqv/\n16QCZwL2z6LFpq1jznnbigEB1iqlGoAXReR33WirJdg1fhtsdjdUNeDa6Wrs9ru1nNC4UH9PrdQv\npRKXGYcKVUFne3socZfwxom1vPTWK5xMOE5ZZREqYRLDBs3h1huf47Zh05mXkBi087Db8Z37MLZf\njdXCcr3ME5EipdRAtMAcFpEPWjrwvvvuIy0tDYDExESmTJnif6G5ubkAZr8L9/ft22fp/WtLapkq\nUynfXM6699ZRfbKa+ZPmkzAvgRNZJ4j9TCzL71jeeH4ZLAzV5+/bt6/H7e3ofk1DLWdSGnjj2Lts\nXfc2Lvc5IqZMZjDDWRRzP9kjx/PQLbcRGRKizz90mEFBZH/zfav/Xq5n3w5/L+3dz83NZdWqVQD+\n78vOYHWOZTbwXRFZ6d3/BiCBCXyl1PNAjoi87t0/AmR7Q2GB13oKcInIL1q4j8mx9GL8JVECBiH6\nS6LM1d1+HTMdhMbYoyRKS1TV1/N64XbeOPYeO07ncvnyHqJihzMm9UZuGrmce0cvZkxcoi3zP4bg\nxa45lp3AKKXUcKAIuAv4VLNj/gl8EXjdK0RlIlKslIoBQkSkQikVCywHvteDthsswl8SxTcIMaAk\nSkJ2AsOeHNbpkijBgqu+nncunOAvR95hW+F6Ll3aRmRYNKMHz+e+Kffz4Lg3GJ0wyGozDYYWsXwc\ni7e78TM0djf+iVLqYbTn8qL3mOeAlTR2N96jlBoBvIXOs4QBr4rIT1q5h209llybxm+70u7qM9VN\nCjT6S6LMTfAPRIwY2HXlQqx452V1deSUXOS1EzlsLFjLxeIthNQUMyplLstHLueBcbeSOXD0Na9j\n/l56nt5su109FkTkPWBMs7YXmu0/2sJ5BcCU7rXO0NN46j1U5lU2GYToqfb4x45k3JVhu5IoLXG5\ntpaNZWX848xe1p1cy4XizVCeT0piBsvTl3Hv/JdZMGwuYSGW/xc1GDqM5R5LT2Bnj6W3U19ej3Ob\n0y8krp0uIodG+ntrOeY6bFcSpSUu1NSwobyctRdPs+bkOi4Ubya0dBcRSlgwYimfGnMLK0ctJyk6\nyWpTDQY/pqRLGxhhCQ78JVECkuzVBdXET4/319VyzHEE7XzsHeFMdTUbysrILS1hzemtXCreQnT5\nHtzO40xNnc0nRt/MTaNWMH7geNuLpqH3YoSlDewsLHaN3+bm5rJg7gJdEiWg0i8hNCnQGIwlUTr6\nzkWEgupqNpaVsaG8nHUXjlF6cSuJrr2UXNrO4PjB3DZqBStHreTGYTcSHR4dNLYHC3a1G3q37bbN\nsRh6D4ElUY6/c5yQkyH+kigD/20gI/+/kbYtiRKIiHCsqooNZWV6KblIVcleBlbsw3lpG1U1l7kl\nfRkrpt3J8pEvk+pItdpkg6FHMR6LoVOICFXHqpoUaKw55y2JMs8b1rJxSZRAPCIcrKxkQ1kZG8vL\n2VBaSoi7kFR3HlWXt3Pq4h6mDprC8pHLWTFqBVkpWYSG2HfMjMHgw4TC2sAIy/XTUN2Aa5fLnxsp\n3+ItieLr8jvPQezEWELCgius1RkaRNhfUeH3SDaVl5MgboZX5VN/ZSfHz28kOiySFSNXsGLkChaP\nWExCVILVZhsMXY4Rljaws7BYFb+tL6+nLLeMsk1lODc7qcirIHZ8bGOSfa6DqCGtT+5kp7hzncfD\nbpeLDeXlOk+Sm8vQmdMZXVdASNluCoo2cfLKUbLTsv1iMippVFCG9Oz03gOxq93Qu203ORbDddFQ\n3YBzq5PSdaWUvl+K+6Abx2wHCdkJjPjRCF0SJbZ3hHeqGxrY4XL5Q1vbnE5GRkUxJcxFatluJlz6\nO0dzv0N0v3RWjFzBl5f/jLlD5xIRauZsNxjag/FY+ijSILj2uihbV0bpulKcW53ETIih35J+9FvS\nD8dcB6FRvUNIKhsa2FpervMjZWXsdrkYHxvLnNhwEioOcOHCZnIL1uKscbJ85HKWj1zOsvRlJMcl\nW226wWApJhTWBkZYvMn241V+j6Qst4yI5Aj6LdVCkpCdQHii/cePADjr69nsFZEN5eXkV1QwJS6O\nBQkOUuvPcrl4C7kFa9l1fhczBs/Q4a1RK5iUPIkQZf8ckcHQVRhhaQM7C8v1xG9rimooXVfq90oQ\nSFySqMVkcT8iB3ff3Bw9GXcuqatjk1dINpaVcbSqihnx8SxISCAzopbyS9vJKVjLmg/XkBiV6M+T\nZKdlExcRZ6ntXY1dbber3dC7bTc5FgOgJ7S69D+XKHqpiMq8ShIXJdJvST+GPTmM6Az7l0YBKK6t\nZZPXG9lQVkZhdTVzHA6yExP52Yhh1Jbnk3PyNf6xfTW/Ki1g8YjFrBi5gh8s+gFpiWlWm28w9HqM\nx9JLcO1xUfRSERdfv4hjloOUz6XQ/7b+hETYP7RzrqbG741sKC/nQm0t8xMSWOBd4mqLWH9yLWtO\nrmHjqY2MHTDW75XMTJ1JeGjvCPEZDD2NCYW1QW8VlrrSOopfLebCyxeoK60j5XMpDLpvEFFDW+8G\nbAcKq6r83sjGsjLK6utZkJhIdmIiCxISGB7WQG7hetZ8uIbVH66mrqGOFSNXsHzkcpamL6V/TH+r\nH8Fg6BUYYWkDOwtL8xioeISyDWUUvVTElXeu0P+m/gz63CD6Le4XVBNbtTfuLCIcr6ryeyMby8qo\n8XjI9gpJdmIio6Mi2XthD6tPrGb1h6vZX7yfuUPn+r2Sri7k2Jtj5sGKXe2GTtpeUwNlZVBaCuXl\nel1aqtvGjIHFi7vF1uaYHEsfp+Z8DRdWXaDo5SJCY0NJeSCFjGczCO9vrzCPiHDI7fZ7IxvLywlT\nyu+NfHv4cDKioznnOseaD1fz/a2ref/k+6TEpbBi5Aq+veDbLBi+oFsLORoM10QE3G4tBidPQmho\nU3EIXPu2S0r0UlYGdXXQrx8kJjaufds33GD101037fJYlFLzgH0iUqmUugeYBjwjIqe628CuwK4e\ni4jg3Ork7LNnKV1TysA7BpLyQArx0+Ntk4RvECG/osIf2tpUXo4jNNTvjSxISCAtKorq+mo2ntrI\n6g9Xs+bDNRRVFLE0fak/xDXEMcTqRzH0NjwecDpbF4KWtktKGtvCwpqKg28JFIvmnycl6f3YWLDB\n/+FuDYUppfKAycAkYBXwEvBJEcnu6A2twG7C0lDdwKXXL3H22bM0OBtI/VIqg+4bZIuCjvUeD3sq\nKvyhrQ/KyxkUEcGChAS/kAyJitKey6VDrP5Qh7e2nNnC5OTJ/jElppCjocOIwIULUFgI589DcbHe\nLy5uFISe8+9HAAAgAElEQVRAoXA6ISbmamFoLga+7aSkpm2R3dddP1jobmHZIyLTlFLfAc6JyMu+\nts4Y29PYRVhqztVw7rfnKPpdEfHT4kl9LJW8yDwWLV5ktWmtUuvxsNNXHqWsjK1OJ8Ojohh59Ch3\nL1/OgsREkiN0KZQr7iusK1jH6hOrWXNyDaEq1C8ki0csJjEq0eKn0fS5eH8Q0C67ReDyZS0cBQVX\nr0+dAocD0tIgNRWSk2HQIB1a6t//aqFISNBeR0/YHqRYnWNxKaWeBO4BFiilQgB7BfeDFBHBucUb\n7lpbSvI9yUzdOJWYMTEAqNzgcperGhrY7nT6Q1s7XS5GR0eTnZjIF1JTeXX8ePqHh5NbWcn8AUls\nP7ud//Z6JYcvHWbB8AWsGLmCJ+Y/QUZShm1CeoYeoqysUShaEo+ICC0cI0boZcIEuPVW3ZaWpkNM\ndqK6Go4ehYMH9XLoEMydC1//utWWXRft9VgGAXcDO0Vkk1JqGLBQRF7pbgO7gmD0WBqqG7j42kXO\nPXuOhgpvuOve4At3VdTXs8Xp9Ie29rpcTIyN9edI5iUkkBDwq6+wrNDfeyunMIfhCcP9Xsm8ofOI\nDOv94QNDG1RUtO5xFBZCfb0WjEDx8G2npWkvw460JCAHD8KZM5CeDuPHa5GcMAGmT9fPGwR0dyjs\naRF54lptwUowCUt9eT3nnjvH2WfPEp+lw11Jy5OCpquwu6GBzeXl5JSVkVNWRn5FBdPi4/35kTkO\nB3EBQlJRW0FuYa5/TElpVame8GrkCpaNXMaguEEWPo2hx6mu1gLhWwKFo6AAKisbvYuWxCMpyRZJ\n7VbxCYhPOHwicvp0o4c1YUKjkGRkaC8sSOmRHEuztjwRmdTRG1pBMAhLXWkdZ585y/n/Pk/STUkM\n++YwYsde223v7vhtjcfDNqeT9aWl5JSVscflYkpcHIv79WNRYiKzHQ6iQxuT6B7xkFec5/dKdp7f\nyfTB0/1jSiYPmkyICunVcedgptttr6vTX5IteRwFBTopPnTo1Z6Gb52c3KJw2O6dV1fDsWNw8CC5\n//d/LKys1CJiMwGxJMeilPoC8AiQ7u0Z5iMe2NLRm7Vyj5XAr4AQ4GURebqFY54FbgIqgftEZF97\nz7Wauit1nPnlGc7/9jwDPjaAqVunEjMqxjp7vMn2nLIy1peWssPlYnxMDIsSE/nW8OHMS0ggNrRp\nb6yLlRf9HsnaD9fiiHSwYuQKHp/9OItGLGqxkKPBpjQ0wNmzrYerioshJaWpYKxc2bidkqLHdPQW\nampaDmGdOqVDWBMm6J5ld90V1ALS07TpsSilEoB+wI+BbwR85BKRkuu+ue4EcAxYApwHdgJ3iciR\ngGNuAh4VkVuUUrPQ42dmt+fcgGv0uMdSe7GWMz8/Q9FLRQy8fSDDvjGM6BE9P6ivQYQ9XiHJKStj\nc3k5o6KjWZSYyKLERG5MTPTnSOoa6igsK+R4yXFOlJzg+JXjbD6zmZOlJ1k0YpHfKxnRLzjiv4ZO\n4PHoLrjNPQ3f9rlzMHBg6x7HkCEQ3gv77QQKSGAYy+eBBOZAxo+H0aP7hIB0e0kXpVQokEyAlyMi\npzt6w2bXnA08JSI3efe/oS/b6HkopZ4HckTkde/+YWAhMOJa5wZco8eEpaaohjM/O8OFVRe44e4b\nGPafw4ga1r21uxpEKK6t5WxNDWdqavS6upqjVVV8UF5OakSEP7Q1zxGHq/I8x0uOc/zK8UYRKTnO\nmfIzDI4fzKikUWQkZZDRP4Ppg6czK3WWKeRoF0TgypWrBcO3feqUToC3JhzDhvXu8Rk+AQkUj0AP\npI8KSGt0d47lUeC7QDHg8TbL9eZYlFKfAFaIyEPe/XuAmSLyWMAx/wJ+LCJbvPtrgSfQwtLmuQHX\nkK8vv/kaxgghCEp5iIysIiqiqsPPE1EaQfSVKFSK4BnpISw+hJCQ66sufLSwlDFp/QCoVYrKkBDc\nhOBWIbhVKFVKb0eKhxg8RIuHWI/ejmqoJaKuEme9m4t1Ls7XlHGptoLE8BhSIxMZHJlISmQCgyMT\nSY3sR3JEPOEhXdMrbdPxs9yYYc/R8kFnu8ejvxCrqqGmCqproNq3rm5cXC42nbnMjY5wPW4jKUmv\nfUtSEiT1D8ovyk15x7lxUkYXXU0Id4cT4QohpLRCj33xiUlhYWMOJFBErkNAbJcfCsDqcSxfAcaI\nyJWO3qAb6FSXkbyNq0kJ1V/ysQoywkOZGqFjwXtrGwAC9j0oapkWpdv31uhrTPX+kGtrX4C9hSBb\nYaL373RvrTZ6apRe763VTzE9WhEaCntqtLhnRetH213VuF9fJdR7n2FGlEIFfD7d6wjt8mrg9Oim\n+zOiIVLB3mpIAxbGQISCzVUuwMWCmLMAbHILJ1EMiVGgYJNb23ljjPJ/Tgf386uFBUkhnT7fyv38\nEg8qSgWNPZvcAgpujA0FFJvcHlCKG+NCQSk2VXj05/FhqPBwPqhWcK6EG51OKCxkk0v/Bd0Yr/+7\nB+N+vruBBcmRXXK9ja56GqIamJlaR0NCJJurYyE1mTkPLSRs/Cx25bmJihrC0qUfBfSXK5cv+79g\nc3NzAdq9v2/fvg4dH8z7ubm5rFq1CoC0tDQ6S3s9lhxgmYjUX/Pgjtxch8K+KyIrvfvtCYUdAbLR\nHkub5wZcQ/70J+Gee9pn18GDcPvtepzSc89B9HWkRmpqaqioqMDlcjVZnz17lnXr1vH+++8THx/P\nsmXLWL58OYsWLSKhG/rq19TXcMl9iUuVl7hYeZFLbu+6+b53XdtQyw2xNzAgZgBJ0Un0j+7fdB2j\n14FtSdFJJmRmCBpEPNTWXqS6upCqqhNUVR2nquo4bvcxqqqOo1Qo0dEZREdnEBOT4d+Ojs4gPDw4\nqkBYTXeHwl4GxgDvADW+dhH5RUdv2Oy6ocBRdAK+CNgBfEpEDgccczPwRW/yfjbwK2/y/prnBlxD\nbrhB2L9fV3hoDxUV8PDDcOAAvPmm7uzRHYgI+fn5rFmzhrVr17JlyxYyMzNZtmwZy5YtY9asWYRb\nkCytqqvikvsSl92XKakq4Yr7il5XNVsHtJdWlRITHuMXndbEqPl+v6h+pi6YoUcREerqLlFVdcIv\nNI3Cc5zQ0OgmQhMoPGFh8Vab32N0t7A81VK7iHyvozds4dorgWdo7DL8E6XUw/ry8qL3mOeAleju\nxveLyJ7Wzm3lHvLkk8LRo1ok2jv+SgReeAG+8x34zW+0F9PdVFdXs3nzZtauXcvatWs5evQoixcv\nZvny5SxevJhx48YFbRkUj3hw1jgpqSphzftrGDF1RIsC1FyYnDVOHJGOlsUnQISaC1JCZEK3vIve\nHDMPVoLJbhGhtvZCE6FpFJ4PCQ2NbyI0e/bUsmzZx4iOHkVoqL1KynRXjqVDE30ppWJExN3Rm1iN\nUkqqqoSpU+H734c77ujY+bt363Nuuw1+9rOezX3+/e9/p7q6mrVr15KTk4Pb7WbhwoUsWrSIhQsX\nMnr06KAUmo58UTR4GiirLmvVE/Ktm7dV1lbSL7pf6x5RC+39Y/oTGx7b5jsLpi+5jmJX2+1itxad\n803EZuPGLUyYUEZ19YeEhfUnOnqUV3hG+72dqKiRhIYG38yulgqLUmoO8DIQJyLDlFKTgYdF5JGO\n3tAKfN2Nt26Fj38c8vNhwICOXaO0FO6/Xw8BeP11GD68e2y9FoWFheTm5pKTk0NOTg4NDQ1NhGbk\nyJFBKTTdQV1DHaXVpVxxX2m3IF1xX6FBGq7yhHyiEyhAzYUpIjT4elMZggeRBmpqzjbzcHROp7r6\nFBERyURHj/KKzeiAUFs6ISHB+bfV3cKyHbgd+KeITPW2HRCRiR221AICx7F89ataHP7yl45fRwR+\n/nPttfzhD3DzNXowdzciwsmTJ5sITUhIiF9ksrOzSU9P7zNC016q6qpaFZ22PKSosKjWw3OteE39\novsR1kVduA32xeOpp6bm9FWhNbf7GDU1Z4iMHOL3bpKSbqZ//5usNhnoAWERkVlKqb0BwrJfRCZ3\nwtYeJ1BY3G6YNAl+8Qv4yEc6d70PPoBPfQo+/WkdWuuCKR1apSMhAhHh+PHj5OTkkJuby6ZNm6is\nrGTatGlMmzaNrKwspk2bxqhRo657fE1X2h1stGS7iOCqdbXsFQWKU7O2suoy4iLiOhSqS4pOIjEq\nkRDV8X8ju753u9oN12+7x1NHdXWBX3Sio0cyYMBtXWdgG1g9juWMUmouIEqpcODLwFW9r+xATAy8\n9BLccw8sWKDn/Oko8+frvMu//zssWwZ//Wv7e5t1J0opRo8ezejRo3n44YcBKC4uZu/evezevZs3\n3niDJ598kitXrjB16tQmgjNmzBhCe1ONpy5GKYUj0oEj0kFaYlq7z/OIh/Lq8hY9oCvuK5woOcH2\nc9spqSpp8pmrxkVCVELL4tOKIPWP7o/VxVYNHSckJJyYmNHExIymf3+rreka2uuxDED3vlqKHju3\nBvhykAyYvCYtlXR55BE9mPnllzt/3YYG+MEP4He/g1dfBbv84CopKWHPnj1NlvPnzzNp0qQmns34\n8eMt6epsgHpPPWXVZU3Ccz4xaq3L92X3ZWobahkQM4D+Mf0ZEDNAb0f3b7IO/Lx/dH8ckQ4TLjW0\nSI/0CrMrLQmLywUTJ2pRWL78+q6/Zg3ce68Oj33jG3omVLtRXl7Ovn37/EKze/duCgsLGTduHJMn\nT2bSpEn+df/e8rOqF1JdX80V9xUuuy9zpUqvL7sv+9suV12+6vPq+mq/1+MTG78YxbS8nRiVaMYe\n9QG6RViUUv8pIj9VSv0aXa2kCS3V5QpGWitCuXo1PPSQHgQZf51jni5cgP/6L90p4IEH9MyiHe15\n1hJWxp4rKyvJz88nLy+PvLw89u/fT15eHnFxcX6R8S1jxoxp4t305Zi5lXTG9pr6Gr/H4wvZBQqP\nfx3Q7qpxkRiV2FSMYvozIPpqMfJ93lZlhr72zoMFq3IsvjzKro5e2A6sWAFLlmgv47//+/quNWgQ\n/PrX8J//CT/6EYwZA5//PHzta7r2nx2JjY1l9uzZzJ49298mIpw+fdovMm+99Rbf+973OHPmDGPG\njPF7NiLChAkTGDhwoIVPYGgPkWGRpMSnkBKf0u5z6j31lFaVtipGx68cb+oxeSszxEXENREb39p5\n1Mnh2MNXiVH/mP5EhQXf+A9D2/TZUJiP0lLIzIQ//7lrcySnTmkP5n/+B774RXj8cejXr+uuH2y4\n3W4OHDhwlXcTFRXl92p8Xs7YsWOJCMIKu4buxdeRobkX1MQjqrq6LSI04uqwXPPQXTOxigmPMXmj\nLqC7uxuvBe4QkTLvfj/gNRFZ0WFLLeBa87H861/wla9AXh7EdnFFhoIC+OEP4R//gC99Sd+nG2pM\nBiUiwtmzZ/0i4xOcwsJCRo8efZXgDAqGrnWGoMLXzdsnNv5cUaCn1IIY1Xnq/L34HJEOYsNjiY2I\nJSY8Ri9hMf7twPbY8IBjmn0eFxFHXEQc0WHRfUa0ultY9onIlGZt/jEtwU57Jvr6zGe0R/HMM91j\nw4kTWmDeeQe+/GV47DFwOK59nl3jt23ZXVVVxaFDh64SnLCwsKvEZty4cUT28MRTdn3nYF/bu9ru\n2oZanDVOnDVOyqvLcde5/UtlXWXT/dpKquqrrvqssrbS3+bbrqitoLq+mujwaL/wSIEwcMLANsUp\nJjyG+Ih4HJEOYsJj/OdHh0U3uVZ0WDTxkfE9Fv6zehxLg1JqmG/GSKXUcFpI5tuZX/1Kh8Q+8Qk9\nvqWrGTUKVq2CY8d0F+X0dN2T7JFHYOTIrr9fMBMdHU1WVhZZWVn+NhHh/PnzfrF57733ePrppzl5\n8iSjRo26SnBSUlL6zK9GQ8fxhc8GxHRBD5pmNHgaqK6v9otQTk4OmbMy/QLlE6HAbXedm4uVF3HW\nOP2f+dqr6qr8bVV1Vdw7+V5+vuLnXW53T9Jej2Ul8CKwAe8cUMBDIrK6e83rGto7NfE//6mT7fv3\n64GU3UlBATz/PPz+9zBzJjz6qO5M0M0D4m1HdXU1hw8fvsq7Aa4Sm/HjxxMVZRK9BkNX0RNz3g8A\nfN2DtonI5Y7ezCo6Muf9PffoPMtvf9szX/JVVfDaa7pHmculPZj77+9cRYC+gohw4cKFJp0E8vLy\nOH78OOnp6U3EZvLkyQwePNh4NwZDJ+iucSxjReSIUmpaS5/75kUJdjoiLCUl8LGPaXH505+6ZixK\nexCBbdv0jJX/93/wyU/q3mQlJSZm3l5qamo4cuTIVYJTV1d3ldiMHz+e6FamBrVrngLsa7td7Ybe\nbXt35Vi+CjwEtBTwE2BxR28Y7CQlwfr18P/+H2Rl6RL5AcM4ug2lYM4cvVy4oCsC3HSTFrZvfUuL\nnamu0jaRkZFMnjyZyZMn8+lPf9rfXlxc7Beb3NxcnnnmGY4dO0ZaWlqTgZ6TJ09myJAhFj6BwdA7\nuJbHcoeIvKGUSheRkz1oV5fSEY8lkH/+Ex58EL75Td2Lq6ejKXV18Pe/ay/mxAlty+c+B0OH9qwd\nvZHa2lqOHj3q92x865qamiYVBSZPnsyECROI6e6km8EQhHRXKGyPiEzzra/LQgvprLCATrLfcQek\npemClVaNQcnLgxdf1CVj5s6Fhx/WHk13luzvi1y8eNEvNPv37yc/P5+jR48ybNiwqzoLDBs2zORu\nDL2a7hKW9wEPMBPY2PxzEenkjCY9y/UIC+gqyF/9qi42+eabMLkHZ6FpHgOtrIS//U2LzJkz2oN5\n4IHg82J6U9y5rq6Oo0ePXpW7qaysvMq7mThxIrFdPcr2Omy3C3a1G3q37d2VY7kZmAb8iZbzLH2C\nyEhdS+yvf4WlS+EnP4HPfrbnQ2OgOxXcf79e9u/XuZjJk7UX89BDelZL48V0LeHh4UycOJGJEydy\n9913+9svXbrkL9K5detWnn/+eY4cOUJqaupVuZu0tDTj3Rj6DNfyWP4kIp/2VTnuQbu6lOv1WAI5\nfBhuvx1mzIDf/Kb7x7u0h8pKeOMNeOGFRi/mc5+DYcOstqzvUV9fz7Fjx67ybpxOJ5mZmVd5N/HX\nW1bbYOhGuisUdgg9ude7wEL04Eg/IlLS0RtaQVcKC+gv8i98Afbu1aGxMWO67NLXTV6e9mL+8hct\nfh/5CNxyCwwfbrVlfZsrV66Qn5/fpLPA4cOHGTRo0FXz3YwYMaLbp442GNpDdwnLY8AXgHTgHE2F\nRUQkvaM3tIKuFhbQ405eekn3GHvuObjzzi69vJ/Oxm/dbl1c85134N13ISVFC8ytt+ru0909C3Fv\njjt3FfX19Zw4ceIq76akpKSJdzNp0iQyMzNJaEfPEbu+d7vaDb3b9m7JsYjIs8CzSqnfisgXOnrx\ntvBWSH4dGA4UAp8UkfIWjlsJ/AoIAV4Wkae97U8BDwIXvYd+U0Te60ob20Ip3f03K0v3Gtu0CX7+\nc52PCQZiYrTY3XmnnkJ5xw54+2096PLsWVi5UgvNypW9u5x/MBMWFsbYsWMZO3Ysn/zkJ/3tpaWl\n/tzN3r17+eMf/8jBgwdJSkoiMzOTzMxMJk6cSGZmJmPHju3xIp0Gw7XoSEmX+UCGiPzBW94lXkQK\nOn1jpZ4GrnhnqHwC6Cci32h2TAhwDFgCnAd2And5qwE8BbhE5BftuFeXeyyBlJXpnlk7d+qJvj77\nWWhlUHdQcOaMHt3/9tuwYQNMmaI9mVtugfHjremUYGgbj8dDQUEBBw4cID8/378UFBSQnp7u71ww\nYcIEJkyYwMiRIwkzvTgM10l3l81/CpgOjBGR0UqpwcAbIjKv46b6r3kEyBaRYqXUICBXRMY2O2Y2\n8JSI3OTd/wY6BPe016YKEblmb7XuFhYf27bBj3+svYOvfEXnYdpTGt9KqqogJ0eHzN5+W9dH84XM\nFi4EU9MxuPGVscnPz+fAgQMcPHiQgwcPcuHCBTIyMvxC41vS09MJ7e44qKHX0O3zsQBTgT2+OViU\nUnkiMqnDljZes0REklrb97Z9AlghIg959+8BZorIY15huQ8oR0+d/LWWQmne83pEWHzk5+suyatX\n66KSjz3W+ZpjPRm/FYEDBxpFJi8PbrwRFi/Wy+TJ7S/M2ZvjzsGMz/bKykqOHDniFxrfUlxczJgx\nY64SnLS0NEsFpze8czti9XwstSIiSinx3qxdI8C8M08mBzaha4x9q4XDO/rN/xvg+167fgj8Avhc\nawffd999pKWlAZCYmMiUKVP8LzQ3Nxegy/avXMnlwQfhe99byE9/CiNG5LJyJfzqVwtJTe36+3Xl\nfmamtn/2bMjMXEhODvzpT7k88wy43QtZuBCGDMll2jT49KcXolTL19u3b19QPE9n9vft2xdU9lzP\nflZWFrm5udx0000sXLiQiooK/vznP1NYWEhJSQnPP/88u3fvpry8nPHjxzNhwgSio6NJS0vjrrvu\nIi0tjY0bN3a7vebvJTj2c3NzWbVqFYD/+7IztNdj+Q8gA1gG/Bj4LPAXEfl1p2+s1GFgYUAoLEdE\nxjU7ZjbwXRFZ6d33h8KaHTcc+FdrHlRPeyzNOXsWfvELPdHXHXfAE0/oib7sxrlzOmy2fj2sW6dr\nmfm8mcWLddkbgz1xuVwcOnToKg+npKSEcePGMWHCBL/wTJgwgeHDh5su0X2AnpiPZRmwHO11rBaR\ntR29WbPrPQ2UePMlrSXvQ4Gj6OR9EbAD+JSIHFZKDRKRC97jHgdmiMjdtIDVwuLj0iV49lk918vy\n5TpMNm+ePZPlIrqO2vr1jUtMTKPILFqkuzgb7E15eblfcAKFp6yszC84gYupn9a76AlhSQZmeHd3\niMjFto5vx/WSgL8BQ4FT6O7GZUqpFOB3InKr97iVwDM0djf+ibf9FWAKupZZIfCwiBS3cq+gEBYf\nTqcuaPnCC3o8yec/D5/+dMuTe+XaJH4roqsS+ERm7dpchg5d6BeahQv1lAR2wC7vvCV6yvaysjIO\nHTp0lZfjdDr9nk2ghzN06NA2Bce8c2u4lu3dmmNRSn0S+BmQi/ZYfq2U+rqIvNnRG/rwjtpf2kJ7\nEXBrwP57wFVj20XkM529t9U4HPD447rn2IYNWmC+8x0958rnP6+nKrbbjz6ldFfl8eP1NMvr1unx\nMevX60oA998Po0Y1ejTz54OpZmJfEhMTmTt3LnPnzm3SXlpa2kRs3nvvPQ4ePEhlZWUTofEtqamp\nxsPphbQ3x7IfWObzUpRSA4H3RaQH6/x2nmDzWFri4kWdg3nxRYiL02Xx//3fg7+7cnupq9PjfHwe\nzc6dMGlSo9DMmWO6NvdmSkpKrgqnHTx4kOrq6hY9HDOddHDQ3d2N80UkM2A/BNgf2BbM2EFYfHg8\n+tf+Cy/o9R13aC9mmm1nw2mZqirYurWxI8CBA9pT8wnN9Olmxsy+wJUrV1oUnNra2hYFJyUlxQhO\nD9LdwvIzYBLwV2/TnUCeiDzR0RtagZ2EJZCiIvjWt3JZt24hAwfqisV33mmPEiwdjTs7nbosjs+j\nOXlSh8s6M4bmeunNMfNgpbndly5darGXWkNDQ4shteTkZMsEx67vHCzKsSilRgHJIvJ1pdTHgfne\nj7YCr3b0ZoaOkZKik/ovvqgHW65apbsqr1gB996r172laofDoUf833KL3r98Weef1q/XxT4vXoTs\nbFiyRAvN2LH2y0MZ2s/AgQPJzs4mOzu7SfvFixc5dOiQv8rAm2++ycGDBwGaCM7YsWPJyMhg6NCh\nptKABVyruvHbwJMikt+sPRP4kYjc1s32dQl29VhaoqQEXn8d/vhHOHVK52HuvRcybRGU7Dznzzcd\nQ1NT03QMzYgRVltosAoRobi4uImHc+zYMY4fP87ly5dJT08nPT2dESNGXLU4eksSs5vorrL5O0Vk\nRiuf5Zsci7UcOQKvvAJ/+hMMHKgF5u679XZvp/kYmqiopmNoBg+22kJDMOB2uzlx4gQFBQUUFBRw\n8uRJ/3ZBQQHR0dEtCk56ejrDhw8nIiLC6kewlO4SluMiktHKZydEZFRHb2gFdhaW9sRvGxr0r/lV\nq3SNr+xsLTK33gpW/b/oybiziBZZn8jk5sINN9BkDE3//u2/Xm+OmQcrVtgtIly8eLGJ0AQKz7lz\n57jhhhuaiE2g+AwePJiQkBDbvnOwbhzLLqXUgyLyu2Y3ewDY3dGbGbqH0FBYulQvTqee1fKZZ3Qp\n/2XL4Kab9LwrgwZZbWn3oBSMG6eXL35RC+3+/Vpsf/97PY1Benqj0Nx4Y+/pxm3oPEopkpOTSU5O\nZvbs2Vd9Xl9fz9mzZ5sIzurVq/3CU1payrBhw0hISCArK+sq4UlKSuqzPdiu5bEkA28BtTQKyXQg\nAvg3X0mVYMfOHsv1cP48vPeenntl3Tr95XrzzVpoZs3q/lkkg4XmY2h27NA5KZ/QzJ0b3PPnGIIT\nt9tNYWFhE48nUISAFkNsI0aMIC0tjZiYGIuf4Np0d3fjRcBE7+5BEVnf0RtZSV8VlkDq6mDLFj1N\n8bvv6sKYy5droVmxQoeO+gqBY2jWr9fTA8yY0Sg0M2eaMTSG60NEKC0tbVV0Tp06RWJiYothtnHj\nxpESJIX2ur1WmJ2xs7B0V/z27Fntzbz7rvZmMjIavZkZM67fm7FT3NnlajqG5siRXLKzG+ucTZli\nH+/OTu89ELvaDZ2z3ePxUFRU1ERwCgsLOXnyJEuXLuXb3/529xjbDEtrhRl6H0OG6BzMAw9AbW2j\nN/Pgg3DhgvZmbrpJr3u7NxMfr0X15pv1/j/+ofM069bpcUQXLugOET6hMdM3G66XkJAQUlNTSU1N\nZf78+dc+wWYYj8VwFWfONOZmcnJg2DA9MHHJEliwoO8lvouKGsfQrF8Pbrfu0uwTmvR0IzSG3okJ\nhbWBEZbOU18Pu3frX+/r1jUmvn1CM2cOREZabWXPUlDQVGjCw5uOoRkyxGoLDYauobPCYqaAC3J8\n0xWwM50AABYaSURBVIZaRViY7kH2zW9qYbl4EX7wAx0qeuIJGDBAh8uefhp27dLtwWD39XAt20eM\n0F2Y//xnPavmmjU64f+Pf+h8zJgx8IUvwBtv6MndehK7vne72g3G9pYwORZDh4iObvRWAMrKdE2v\ndev0oMyiIp2PGDYMkpN7f00vpbSQ+MTE44H8fO3JvPKKzmGlpTV6NAsWQEKC1VYbDN2LCYUZupSi\nosZ6XuvW6VDa4sWNYjR0qNUW9iy+UKIvbLZtm07++4Rm3jw9pbPBEIyYHEsbGGGxBhH48MNGkVm/\nXk9P7BOZRYs6VmqlN1BdDdu3N76PffsgK6tRaGbNsq4Mj8HQHJNj6aXYNX6bm5uLUno64ocfhr/9\nTedn/vY33fb73+tcxbRp8PWv615olZVWW63pznceFaVDhd//Pnzwge7K/OST+tkff1wL7YoV8NOf\nNs1ZtRc7/73YFWP71Zgci6HHCAnRye0pU+BrX9PjZ3bs0L/ef/Qj2LNHC83Spdqj6Qsj4OPidB23\nlSv1fmlp4zw0992nOwcsWKALaWZn6wnP7DJY09B3MaEwQ9BQUaFHwPtCZ75ZJH2hs8zMnptFMlgo\nLtZdmzds0EtRkc7L+IRm6tTeM9mbIfgwOZY2MMJiTy5f1l+qPqEpK2vaEaAvDkwsLoaNGxuF5vRp\nXURz4ULt2WRlmRyNoeswOZZeil3jt11h94ABcMcd8PzzcPy47l21cqX+Yp0/X3fj/exn4dVXda6i\nqwjmd56crN/Jc8/pbs0ffqjL8Jw/D488AgkJuSxYoMcdvfOODq3ZgWB+59fC2H41ljnRSql+wOvA\ncKAQ+KSIlLdw3MvArUCxiEzq6PmG3sOwYXD//XoRgcOHdS7izTfh0UchNbXRm8nO7hvjRQYMgI9/\nXC+gxSQ8HDZvhl/+Ej71Kf3e5s3TYjxvnu400dc8PUPPYlkoTCn1NHBFRH6qlHoC6Cci32jhuPlA\nBfBKM2Fp1/neY00orJfT0KCT/76wmW+8iE9o5s3TPbL6GvX1etKzzZt1L7QPPtDtgUIzZYrJ0xha\nxnY5FqXUESBbRIqVUoOAXBEZ28qxw4F/NROWjpxvhKWPUV2t51zxCc2BA7qXmU9osrL65pepCBQW\nNgrN5s1w6pSeKsEnNLNn971Co4aWsWOO5QYRKQbwzkTZ0eLs13u+LbBr/NZqu6Oi9ADMH/5QC8zZ\ns/CVr+ixNA88AAMHwkc/Cs8+CwcP6i9cH1bbfj1cy3aldCjsnnt07io/XwvL176mJ4P74Q9h8GDd\n2+xLX4LXXtPvzmq7gxlj+9V06282pdRaIDmwCRDgWy0cfr0uRZvn33fffaSlpQGQmJjIlClT/BPc\n+F6u2e+6/X379gWVPQC33baQ227T+yUlUFOz0DuGJpfaWrj55oUsWQIFBfuCwt6e2t+/P5eYGPjR\nj/T+2rW5HD8ObvdCXn8dHn44l6goWLp0IfPmQWRkLmlpsGRJ19kTjH8v7d3ft6/3/L3k5uayatUq\nAP/3ZWewMhR2GFgYEMrKEZFxrRzbUiisI+ebUJihTU6ebFp6JiFBh8x8pVYGDLDaQusQ0b3yfDma\nzZt1t+c5cxpzNTNnmppnvRE75lieBkpE5Ol2JN/T0MKS2cnzjbAY2o3Ho3MyPqHZtEmHjwInO4uL\ns9pKa7l4Uc866svV5OXBxIlNOwUkJ1/7OobgprPCgohYsgBJwPvAUWANkOhtTwHeDjjuL8B5oAY4\nDdzf1vmt3EvsSk5OjtUmdAq72i1yte21tSKbN4t8//si2dkisbEi8+aJfOc7Ihs2iNTUWGJmi1j1\n3t1u/S5+9CORm28WSUwUGTVK5N57RV58UeTQIRGPp/Xze9Pfi524lu3e784Of79b1i9GREqApS20\nF6HHrfj27+7I+QZDVxMerke3z50L3/62npr4gw+0N/PVr8KxY/ozn0czZUrfKz0THa09uQUL9L7H\nA4cOaY9m40b48Y/B6dTvyefRTJ/e92Yf7SuYki4Gw3VSUgK5uY2hs0uXdI80n9BkZJgBiaCrAwR2\ncz5yRIuwT2jmzu170ygEO7bLsfQkRlgMPcnZs00nOwsJaVrjbPBgqy0MDioq9Nw0PqHZtg2GDGnM\n08yf3zfrwQUTdhzHYmgHvq6AdsOudsP12z5kCHzmM/DHP8KZM7B2re419fe/6wrN48bpEjRvvdX1\ntbzs9N7j4rTQPvUUfPObugv4q6/qqQHefVeX5UlJgdtv1+Vpdu7UY22CDTu98+Z0l+19cOyxwdBz\nKAVjxujlkUd06Zl9+7Qn88ILWoDGjm30ZubP1/mKvkhYmB6YOXWqFl4RXb3Z59H84Q9QUKCrBPi8\nmtmz+0ZNOLthQmEGg4XU1OgQkC9stn+//uL0Cc2MGX2z9ExrlJXpSgq+XM2uXXpG0sBuzsOGWW1l\n78HkWNrACIvBLrhc/3979x5kZX3fcfz9aYiKRkGMRDTxRgS0gW4gRYnssKbNVFOjDM3Q1ksC1Zlq\nmqo1idJO1ElrpiUdM7XmUq1RTFvrxMYYMLVoo5suNaLCbiSCVk28gHipaZxIQAG//eN3juewnt09\nu57zXHY/r5lnOOfsc5aPD7vn6+/6pFlU1ULz9NPQ2ZkmA3R1+Q6S/b3+OvT27jkpYK+90uLNE06A\n449PdyUdq63At8uFZRBlLizd3d1vbr1QJmXNDcXK/uKLacZZ9di6tXar4mqhqZ/aXKTsw9Gq3BHw\nxBOpFbh2bTo2boRp09LGo3PmpGnOM2e2brfrsl5zGDr7SAuLG9lmBTZ5MixenA5INzT74Q9Tkbn2\n2lR4qoVmwYI0hjOWSWl69zHHwNlnp9d27Eg7Azz0UDqqN46bPr1WbObMgVmzxuatFdrBLRazEtu6\ntVZourtT4Zk3L403dHam2Wj+sHyr7dtTsVm3LhWbdeveWmxa3bIpI3eFDcKFxcaKl15K4ww9Pel4\n5JG0CLGzMx0f/jAceGDeKYupvthUC85Yb9m4sAyizIWlrP23Zc0Noyv7tm1pvGHNmlRo1q5NG2pW\nWzSdnWndTd6Kes37F5t169IWPtOmpRbNnDkA3Sxd2lXKYuMxFjMbtv32q01dhrTAsK8vFZlbb4UL\nLkjndHbWis2MGWNvr7OBjB+fZpYdf3zttf5jNt3d6UZpY7ll059bLGZjWAQ89litRdPTkzaLPPHE\nWrGZPTtN4bWBDdSyKXuxcVfYIFxYzJq3ZUvtpl49PfDkk2mhZrVFM2+e70fTjGa60YpebFxYBlHm\nwlLUvuehlDU3OHt/1dXu1RZNb2/qLqu2aObPf/s39Ror17zajVY/G61/y6arK+0nlwWPsZhZLiZO\nhFNOSQekD8d161KRueEGOPdcOPjgPcdppk71rsSN7LNPmgI+d27ttR070lY+1WKza1d2haVd3GIx\ns7eleivnnp5a99nu3XvOPJs1y1vRlJG7wgbhwmKWnQh46qk9JwQ891wam6m2aubO9f5dZeD7sYxS\nZb3XQ1lzg7O/XVJaK3P22XDddbBpU9q/67zz0v1nLrkkdZ2deCJceinccQesXJl/7pEqwjUfKd+P\nxcxK6+CDYeHCdEBauLl2bWrNXH112i1g6tRa99n8+d7+vszcFWZmudu5Mw1g14/TjB+/54SAY4/1\nws2seYxlEC4sZuUSkfbpqo7R9PSkac/1CzfnzPHCzXbzGMsoVdb+27LmBmfPQ//cUlpIeM45sGJF\nWqS5YQOcdVa6XfGnPw2TJqU1H5ddBqtXp5uk5aGs1xzalz23wiLpQEl3SXpM0mpJDe9cLembkl6Q\n9HC/16+QtFnS+spxcjbJzSwPhx6a7ktzzTVpkeaWLbBsWZru/KUvwZQpqRVz0UVwyy3w05+mlo9l\nL7euMEnLgZcj4suSLgUOjIhlDc6bD7wKfCsiZtW9fgXwy4j4ShN/l7vCzEa5115LCwx7euCBB9Kx\nY0fajmbOHPjgB9Nx1FFevNms0o2xSHoUWBARL0g6BOiOiBkDnHsEsKpBYXk1Iq5q4u9yYTEbg557\nDh58ENavT0dvb5qR1tGRiszs2enP6dNhnOfIvkUZx1gmR8QLABHxPDB5BN/jM5L6JF0/UFda2ZW1\n/7asucHZ89Cu3IceCqefDl/8IqxaBZs3p92cly1Lt31etSpNgZ4wIW2Nf/75ae3NQw+l1k6e2bNQ\nynUsku4G6renExDAFxqcPtwmxdeBv4yIkHQl8BXgnIFOXrJkCUceeSQAEydOpKOj483N16oX189b\n97yvr69QeYbzvK+vr1B5xsLzLH9eNm7sZu+9Ydmy2te3bYMDDuhi/Xq47bZuli+HrVu7eP/7YcqU\nbo45BhYv7qKjA9av3/P7jaafl+7ublasWAHw5uflSOTZFbYJ6KrrCrs3IhpuvdaoK2yYX3dXmJkN\ny44d6dbO9d1oGzakSQLVLrTqn5NH0t9SAmXc3XglsARYDnwK+N4g56py1F6QDql0oQEsAn7Shoxm\nNkbts09tK/uqXbvSNve9vanYLF+eHu+7b63IVI8jjhi7kwTyHGNZDnxU0mPAbwF/AyBpiqQ7qidJ\nuhm4D5gm6RlJSytf+rKkhyX1AQuAP8s2fjaqzdSyKWtucPY8lCX3uHFw3HFw5plw1VVwzz2p62zN\nGli6NE1vvv76tOHmQQelW0J//vNw881pz7Tdu/P+L9hTu657bi2WiPg58NsNXt8KnFr3/IwB3v/J\n9qUzM2tOddPNo46CRYtqrz//fGrN9PbCd78Ll1+eXps5c89utA98APbeO7/87eAtXczMMvLKK9DX\nV+tK6+1NOz9Pn14rNCedlIpNEZRuHUuWXFjMrKi2b0+TAqqFpqMjTXsugjKuY7EmlKXvub+y5gZn\nz0NZc8Pbzz5+fLrx2XnnwbXXZltU2nXdXVjMzKyl3BVmZmYNuSvMzMwKwYWl4Mra91zW3ODseShr\nbnD2RlxYzMyspTzGYmZmDXmMxczMCsGFpeDK2n9b1tzg7Hkoa25w9kZcWMzMrKU8xmJmZg15jMXM\nzArBhaXgytp/W9bc4Ox5KGtucPZGXFjMzKylPMZiZmYNeYzFzMwKwYWl4Mraf1vW3ODseShrbnD2\nRlxYzMyspTzGYmZmDXmMxczMCiG3wiLpQEl3SXpM0mpJExqc815J90h6RNIGSRcM5/2jQVn7b8ua\nG5w9D2XNDc7eSJ4tlmXAf0bEdOAe4M8bnLMLuDgifh2YB/yJpBnDeH+plOUH1DlbpwwZwTlbrSw5\nRyrPwnI6cFPl8U3Awv4nRMTzEdFXefwqsAk4rNn3l02jH7aurq7McwylmV+KIuQe6S9vltlb/QHT\nruzt/iBsVe48PrBHkr0ohaVdPy95FpbJEfECpAICTB7sZElHAh3A/SN5v5mZZaOthUXS3ZIerjs2\nVP48rcHpA07bkvQu4N+ACyNi2wCnjcppX0X5P5vhKmtucPY8lDU3OHsjuU03lrQJ6IqIFyQdAtwb\nEcc2OG8ccAdwZ0RcPdz3V84dlUXHzKzdRjLdeFw7gjRpJbAEWA58CvjeAOfdAGysLyrDfP+ILoyZ\nmY1Mni2WScC3gfcBTwOLI+IXkqYA/xgRp0o6EfgvYAOpqyuAv4iI/xjo/Xn8t5iZWc2YWHlvZmbZ\nGVUr7yWdLOlRSf8j6dIBzvl7SY9L6pPUUbSMkqZLuk/SDkkXZ52vLsdQOc+Q9OPKsUbSzILmPK2S\nsVfSA5VWcOFy1p33m5J2SlqUZb66v3+o67lA0i8kra8cXyhizso5XZV/959IujfrjJUMQ13Pz1Uy\nrq9MbtolaWLBMh4gaWXlM3ODpCVDftOIGBUHqUg+ARwBvBPoA2b0O+cU4PuVx8cD9xcw47uBOcBf\nkRaHFvVangBMqDw+OetrOYyc+9Y9nglsKmLOuvN+QJqssqiIOYEFwMqss40g5wTgEeCwyvN3FzFn\nv/NPJS36LlRG0uLzv65eR+BlYNxg33c0tVjmAo9HxNMRsRO4hbSIst7pwLcAImItMEHSe4qUMSL+\nNyLWkXYdyEszOe+PiFcqT++ntnA1S83k/FXd03cBb2SYr6qZn02APyVNq38xy3B1ms2Z92SYZnKe\nAXwnIrZA+r3KOCM0fz2r/hD410yS1TSTMYD9K4/3B16OiEE/n0ZTYTkMeLbu+Wbe+mHX/5wtDc5p\np2YyFsFwc54L3NnWRI01lVPSwsr09FXAH2WUrd6QOSUdCiyMiG+Q3wd3s//u8yrdIt+XdFw20fbQ\nTM5pwCRJ90p6UNLZmaWrafr3SNJ4Usv/OxnkqtdMxq8Cx0l6DvgxcOFQ3zTP6cY2Ckg6CVgKzM87\ny0Ai4nbgdknzgSuBj+YcqZG/A+r7t/NuFQxkHXB4RPxK0inA7aQP8aIZB8wGPgLsB/xI0o8i4ol8\nYw3o48CaKObM1t8BeiPiI5KmAndLmhVpm62GRlOLZQtweN3z91Ze63/O+4Y4p52ayVgETeWUNAu4\nDjgtIv4vo2z1hnU9I2INcHRlqnqWmsn5IeAWST8DPgF8bYAdKtppyJwR8Wq1ezEi7gTeWdDruRlY\nHRE7IuJl0rKF38goX9Vwfj7/gOy7waC5jEuB2wAi4kngZ8AMBpP1gFYbB6HeQW0Qai/SINSx/c75\nGLXB+xPIfvB+yIx1514BfLbA1/Jw4HHghIL/m0+tezwbeLaIOfudfyP5DN43cz3fU/d4LvBUQXPO\nAO6unLsvaS3ccUXLWTlvAmlAfHxBr+XXgCuq//6krrNJg33fUdMVFhG7JX0GuIvUEvtmRGyS9Mfp\ny3FdRPy7pI9JegLYRqrEhcpYmUzwEGmQ7A1JF5J+IQZsduaRE7gMmAR8XZKAnRExN6uMw8j5e5I+\nCbwObAcWZ5lxGDn3eEvWGaHpnJ+QdD6wk3Q9f7+IOSPiUUmrgYeB3cB1EbGxaDkrpy4kta62Z5lv\nGBmvBFZIerjytksi4ueDfV8vkDQzs5YaTWMsZmZWAC4sZmbWUi4sZmbWUi4sZmbWUi4sZmbWUi4s\nZmbWUi4sZsMg6ZcNXuuUtG6o7e4lvSHpb+uef1bS5e3KapYXFxaz4Wm08Otp0u2x/2WI974GLBrp\nFiiS3jGS95llbdSsvDfLS0Q8AyBpqNXGu0h7q10M7HGDLElHADcABwEvAUsjYrOkG4EdQAfw35UW\n01HA0aR97y4mbU90Cml/rI9HxO4W/aeZjYhbLGbZCdK+S2dK2r/f164BboyIDuDmyvOqwyJiXkR8\nrvL8aKCLdN+MfwZ+EBGzSAXod9uY36wpLixmGars+XYTb72nxTxqu9v+E1B/C+Vb+517Z0S8QdpY\n8dci4q7K6xuAI1sa2GwEXFjMsnc1cA7pPiFVg3Wjbev3/DVIOwSSNoOsegN3b1sBuLCYDc9QN+Aa\n7OsCiHTvmm+TikvVfaRb0wKcBfS0KI9Z5lxYzIZnvKRnJD1b+fMiSR+S9CzpBl3/IGnDAO+tb5Vc\nRRqor752AbBUUh9wJrWusqEmBHh7ciscb5tvZmYt5RaLmZm1lAuLmZm1lAuLmZm1lAuLmZm1lAuL\nmZm1lAuLmZm1lAuLmZm1lAuLmZm11P8DHWVFDfWNk0YAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "np.random.seed(101)\n", + "x = np.random.rand(100,10)\n", + "y = np.random.rand(100,1)\n", + "fit = glmnet(x = x, y = y)\n", + "glmnetPlot(fit);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We wish to label the curves with the variable names. Here's a simple way to do this, using the `matplotlib` library in python (and a little research into how to customize it). We need to have the positions of the coefficients at the end of the path. " + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "%%capture\n", + "# Output from this sample code has been suppressed due to (possible) Jupyter limitations\n", + "# The code works just fine from ipython (tested on spyder)\n", + "c = glmnetCoef(fit)\n", + "c = c[1:, -1] # remove intercept and get the coefficients at the end of the path \n", + "h = glmnetPlot(fit)\n", + "ax1 = h['ax1']\n", + "xloc = plt.xlim()\n", + "xloc = xloc[1]\n", + "for i in range(len(c)):\n", + " ax1.text(xloc, c[i], 'var' + str(i)); " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We have done nothing here to avoid overwriting of labels, in the event that they are close together. This would be a bit more work, but perhaps best left alone, anyway." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Linear Regression - Multiresponse Gaussian Family" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The multiresponse Gaussian family is obtained using `family = \"mgaussian\"` option in `glmnet`. It is very similar to the single-response case above. This is useful when there are a number of (correlated) responses - the so-called \"multi-task learning\" problem. Here the sharing involves which variables are selected, since when a variable is selected, a coefficient is fit for each response. Most of the options are the same, so we focus here on the differences with the single response model.\n", + "\n", + "Obviously, as the name suggests, $y$ is not a vector, but a matrix of quantitative responses in this section. The coefficients at each value of lambda are also a matrix as a result.\n", + "\n", + "Here we solve the following problem:\n", + "\n", + "$$\n", + "\\min_{(\\beta_0, \\beta) \\in \\mathbb{R}^{(p+1)\\times K}}\\frac{1}{2N} \\sum_{i=1}^N ||y_i -\\beta_0-\\beta^T x_i||^2_F+\\lambda \\left[ (1-\\alpha)||\\beta||_F^2/2 + \\alpha\\sum_{j=1}^p||\\beta_j||_2\\right].\n", + "$$\n", + "\n", + "Here, $\\beta_j$ is the jth row of the $p\\times K$ coefficient matrix $\\beta$, and we replace the absolute penalty on each single coefficient by a group-lasso penalty on each coefficient K-vector $\\beta_j$ for a single predictor $x_j$.\n", + "\n", + "We use a set of data generated beforehand for illustration." + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "# Import relevant modules and setup for calling glmnet\n", + "%reset -f\n", + "%matplotlib inline\n", + "\n", + "import sys\n", + "sys.path.append('../test')\n", + "sys.path.append('../lib')\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", + "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", + "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", + "from cvglmnetPlot import cvglmnetPlot; from cvglmnetPredict import cvglmnetPredict\n", + "\n", + "# parameters\n", + "baseDataDir= '../data/'\n", + "\n", + "# load data\n", + "x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We fit the data, with an object \"mfit\" returned." + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "mfit = glmnet(x = x.copy(), y = y.copy(), family = 'mgaussian')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For multiresponse Gaussian, the options in `glmnet` are almost the same as the single-response case, such as `alpha`, `weights`, `nlambda`, `standardize`. A exception to be noticed is that `standardize.response` is only for `mgaussian` family. The default value is `FALSE`. If `standardize.response = TRUE`, it standardizes the response variables.\n", + "\n", + "To visualize the coefficients, we use the `plot` function." + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYMAAAElCAYAAAAGIY7hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXl4VNX5xz9nkslOViBhD5uyKlA2UQSLUhBBrChqLULd\nfi60VFvr1lqtC6hV0bqhVbTWlYqgIoJCRFFBhLDvJGEPkAVC1lne3x9nJhlCQibJJDOTnM/zvM9Z\n7r3nfufmZt45uxIRDAaDwdC8sfhbgMFgMBj8j3EGBoPBYDDOwGAwGAzGGRgMBoMB4wwMBoPBgHEG\nBoPBYMA4A4OfUUo5lFJrlVKblFLrlFJ3KaWUv3XVBaXU75VSW5RS/6mUP0Iple/6nOuUUksa6P5v\nKqV+3RBlG5o+of4WYGj2FIrIAAClVEvgPSAW+Ht9C1ZKWUTEWd9yasFtwCgROVjFsRUiMqG6C5VS\nISLiaDhpBsOZMTUDQ8AgIseAW4A7QX+ZK6WeVEqtUkqlK6VuduUrpdRLrl/hXyqlPnf/IlZKZSil\nZiql1gCTlFJdlFJfKKV+Ukp9o5Q6y3VeS6XUPFfZq5RS57nyR7h+va9VSv2slIqurNNVe9molNqg\nlPq9K+9loAvwhVLqD1V8vNNqO65f8i8rpX4EZimlopRS/1ZK/ei694QzPQfXsX8ppba6ahutPfJH\nuT7DeqXU60opq8fzedz1GVcrpforpRYrpXYqpW6t/V/N0GQQEWPG/GbAiSrycoFWwM3A/a68MOAn\noBNwJfCZKz/Zdf6vXekM4E8eZX0FdHXFBwNfu+L/BYa54h2ALa74QuA8VzwKsFTSNgBYD0QA0cAm\n4FzXsT1AQhWfZwSQD6x12X2u/DeBhR7nPQZc54rHAduByDM8hyuAL135bYA84NdAOLDX43O/Bfze\n4/nc4oo/A6S7PmdL4LC/3wdj/jPTTGQIZEYDfZVSV7nSsUB34ALgIwARyVZKLa903QcArl/1w4CP\nPPohrK7wYqCnR36MUioKWAk8q5T6L/CxiByoVPYFwHwRKXHd42NgONpBKKqoAbiorpnoo0qfd7xS\n6s+udBjQ8QzP4UJ0sxoickgp9bXr+NnAHhHZ7Uq/BdwOPO9Kf+oKNwLRIlIEFCmlSpRSsSJyoprP\nYGjCGGdgCCiUUl0Ah4gcdX1RTxeRpZXOGVdDMYWu0ALkiatPovKtgCEiYquUP0sp9RkwDliplBot\nIjtq/0m8prBS+koR2XmKUO+fg6omXplSV+j0iAMI5juh2WL6DAz+pvxLSynVCngZeMGV9SVwu1Iq\n1HW8u8ev90muvoNkYGRVBYtIAZChlJrkcY9zXNElwB888s91hV1EZLOIPIlujulRqdhvgYlKqQhX\nzeMKYEWdPvnpfAn83kNTP4/8qp7DCmCyq0+hDXCR6/ztQCeXYwX4LZDmI42GJor5FWDwNxFKqbXo\nJhEb8LaIPOs69jqQCqx1/To+AkwE/gf8EtgM7AN+Bo67rqm8DO9vgFeUUg+i3/f3gQ1oR/CiUmo9\nEIL+Yr0dmKGUughwuMr/wrMwEVmnlJqLdhQCzBGRDdXcuyYqn/8o8JxSagPaSWYAE6p7DiIyXynl\nfg57ge9dGkuVUtOAeUqpEJfWV73QaJYwbsYoEfP3NwQfSqloESlUSiUCq4DzReSIv3UZDMGKqRkY\ngpXPlFLx6A7hR4wjMBjqh6kZGAwGg8F0IBsMBoPBOAODwWAwECDOQCnVXim1TCm12TXN3z3FP0Ep\ntUQptd217EBcNdePUUptU0rtUEr9xWhv2tqDVXcT0P5vpVS2a7STO+9cpdQPqmJ5i4HVXOs33dXo\n+YPr+Zf/Dao453mll+lI9xjm6zeqe3eqOK9uuv09BdrVZ5EC9HPFY9DjpHsAs4B7XPl/AWZWca0F\n2IWenm9FT6/vYbQ3Xe3BqrsJaL8A6Ads8Mj7Ehjtio8Flgea7ir09EYPLw5HDyteAnSpdM5Y4HNX\nfAjwo7/01vTu+Ep3QNQMROSwiKS74ieBrUB74HL0VHpc4cQqLh8M7BSRLNGzSd93XdcoGO2Nrz1Y\ndUPQa/8Ovf6RJ070OkoA8UDl5TvAz7qroCewSkRKRa8UuwK9ppMnlwNvA4jIKiBO6QmOfqOad6dd\npdPqrDsgnIEnSqlU9K+PH4FkEckG/SDwWJXRg3boiUdu9nP6A2oUjPbG1x6suiG4tXvwR+BppdRe\n4EngvirOCTTdm4Dhrqa5KOBS9GKFnlTWfAD/P+tyPN6dVZUO1Vl3QDkDpVQMMA/4g8vzVR73GrDj\nYI32xidYdUNwa6/EbejP0BHtGN7ws54aEZFt6Ga5pcAiYB16xnlQUMW74xMCxhkove7KPOA/IrLA\nlZ3truIopVLQ0/ArcwC9sqOb9lRdVW0wjHagkbUHq24Ibu1VcIOIfAIgIvPQTUKVCTjdIvKmiAwU\nkZHo5cUrL0Z4gFNrC37XDNW+O57UWXfAOAP0L4otIjLbI28hMNUVvwGo6sP/BHRTSnVSSoUB17iu\na0yM9sbXHqy6Ibi1V16m+4BSagToDXU4/UsVAkP3KSi9KCJKqY7oxQbfrXTKQmCK65yhQL67Gc/P\nVPXueFJ33f7uIXf1ep+Prqalo6tsa4ExQCJ6c5Lt6B7/eNf5bXBtbuJKj3GdsxO412hv2tqDVXcT\n0P4ucBC97PVeYBp6v4g1rs/yA9A/0HRX81lWoPsO1gEjXXm34tr4x5X+F3oU1HpgQABoru7d8Ylu\nsxyFwWAwGBq2mUgpFa70vq3rXJMkHqrmvICa3GEwGAzNjQZdtVT0uuoXiUiR0uuqr1RKfSEiq93n\nKKXGovdq7a6UGgK8AgxtSF0Gg8FgOJUG70AWvb8q6Nl+oZw+ZM4vkzsCbXq8wWAw+JMGdwZKb8m3\nDjgMLBWRnyqd0uiTO5RSFnQny6/QU9OvVUpV3t7QYDAYmg2NUTNwikh/9HjXIUqpXg19Ty8ItOnx\nBoPB4FcabaczETmhlFqOHgq1xeOQV5MklFI+H/ZUuUyl1Exf38NgMBj8jYioms5p6NFELd1L8Sql\nIoFLgG2VTvN6koSI8NBDD9V7vO68efO4+eaby9P/+c9/mD59eoOOEfaFbn+Z0W50G+2Bb9Vp95aG\nrhm0Ad5ytdFbgA9EZJFS6lZARGSOK32pUmoXUIieyFItmZmZ9RbVrl079u7dW57ev38/7do17BpU\nvtDtL4z2xidYdYPR7i/qq72hh5ZuBAZUkf9qpfSdDamjMoMGDWLXrl1kZWXRpk0b3n//fd57773G\nlGAwGAwBRaP1GfiKqVOn1ruMkJAQ/vWvfzF69GicTic33ngjPXv29Pr6rCwYPRqUAoulwiqnPe3E\niamMHFmRDgk5Ne5Ou+OeZrFAaKiOe4ZVmdV6elidhYVVhG4LD9fmGffFM/cXwao9WHWD0e4v6qs9\naJajUEpJoGgtK4OMDHA6QQQcDh1WlfaMu9Pu0DPumedp7ny7vSLPHbfbwWY7Ne5Ou+Pu/LKyirzK\n6bKyCist1eYZt1q1U4iIqLDIyFMtKqpqi46GmJiKsEWLijA2VofR0dqRGgwG36OUQrzoQA46Z5CW\nlsbIkSP9LafWBKtuEVi6NI2hQ0dSUkK5FRefakVF2jzjJ09CYaE2d7ygQNvJk3DihLbSUu0Y4uK0\nxcdXWEICJCZWhImJkJRUEcbHn9mRBOtzD1bdYLT7i+q0e+sMgq6ZyNC4KKWbjGJjtTUEdrt2CseP\nV1heHuTnQ26ujm/fDjk5Op6To/OPHdPOp2VLaN1aW0oKJCfrsE0bOHJEh+3b6xqIwWComqCrGRgM\nnpSWaqdw5AhkZ1fYoUMVduCAtogI7RQ6dICOHSusc2fo0kU7ENNcZWhqNNlmIoOhLojo2sSBA7B3\nb4VlZen+n4wM3XzVuTN06wbdu+uwRw9tycnGURiCkybrDIK1TS9YdUPz0X7ypHYKO3fCrl2wY4du\nntq2TXeo9+oFvXtDnz7azjlHN035W3egYbT7B9NnYDD4iJgY6NtXW2WOHYOtW2HTJti8GebPhw0b\n9Eiqc8+FAQNg4EBt7dubWoQh+Ai6moHBECiI6Kam9eth7VpYswZ+cq3JO3SotvPOg0GDTOe1wX80\n2WYigyGQEYF9+2DVKvjhB20bNugmpeHDYcQIuPBCPb/CYGgMvHUGDb6Eta9JS0vzt4Q6Eay6wWiv\nDUrpEUpXXQXPPKOdwdGj8Pjjuhnqn//UQ12HDYO//hW+/VYPrfW3bl9itPuH+moPOmdgMAQbUVFw\n0UXwt7/BsmXaOfzjH9oJ/OEP0KoVXHkl/PvfelisweAPTDORweBnDh+GJUvgs8902Ls3XH45TJqk\n5z8YDPXB9BkYDEFIaSksXw6ffAIffwydOsHVV8Pkybr5yWCoLabPIMAIVt1gtDcm4eEwZgxcc00a\nBw/CzJl63sOAAbqp6Y039NIdgUywPXNPmrP2oHMGBkNzITQURo2COXP0zOnp0+HTT3UNYepU+P57\nPXrJYPAFppnIYAgyjh6Ft9/WTiI0FG67DW64wQxXNVSN6TMwGJo4IvDNN/Dii/D11zBlCtx5p15T\nyWBwY/oMAoxg1Q1Guz/wRrdSMHIkfPQRpKfrpTHOO0+PQnLPhPYHwfrMoXlrDzpnYDAYTqdjR3ji\nCb3Q3vDhet7CqFF6ZJLB4A2mmchgaILYbPDuu/DYY9C2LTz8sF4Kw9D8MH0GBoMBux3++18947lT\nJ5g1S6+samg+mD6DACNYdYPR7g98pTs0VI802rpVT1y7/HK47jrdnNRQBOszh+atPeicgcFgqD1W\nK9xyi96sp0cPXTu45x69u5vBAKaZyGBolhw6BPfeC199BU8+qWsLZkOeponpMzAYDDXy/fd6bkKL\nFnoS29ln+1uRwdeYPoMAI1h1g9HuDxpL97Bhek7ClVfC+efr4ak2W/3KDNZnDs1be9A5A4PB4FtC\nQuD3v9fbdqalweDBenc2Q/OiQZuJlFLtgbeBZMAJvCYiz1c6ZwSwANjjyvpYRB6toizTTGQwNDAi\nMHeu7lx+4AHtJCzmJ2NQExB9BkqpFCBFRNKVUjHAz8DlIrLN45wRwN0iMqGGsowzMBgaid274frr\nIS4O3nxTb9VpCE4Cos9ARA6LSLorfhLYCrSr4lSvxzEEa5tesOoGo90f+Ft3166wYoVuMhowQC+E\n5y3+1l4fmrP2RqsAKqVSgX7AqioOn6eUSldKfa6U6tVYmgwGQ/VYrfDII/DOO7qW8Nhj4HT6W5Wh\noWiUoaWuJqI04B8isqCKY04RKVJKjQVmi8hZVZRhmokMBj9x4ICewRwbq51DYqK/FRm8xdtmotBG\nEBIKzAP+U9kRQHnzkTv+hVLqJaVUoojkVj536tSppKamAhAfH0+/fv0YOXIkUFFFMmmTNmnfp3fu\nTOPvf4fPPx/JkCHwwANppKYGjj6TrkinpaUxd+5cgPLvS68QkQY19GiiZ85wPNkjPhjIrOY8ERFZ\nvny5BCPBqlvEaPcHgax77lyRVq1EFi6s+ngga6+Jpqjd9d1Z43d1g9YMlFLnA78BNiql1gEC3A90\ncgmcA0xSSt0G2IBiYHJDajIYDPXjhhv0TOVJk2DzZvjLX8xSFk0BsxyFwWCoEwcOwPjx0L8/vPwy\nhIX5W5GhKgJiaKnBYGi6tGunh58ePQpjx0Jenr8VGepD0DkDd0dJsBGsusFo9wfBojsmBubPh759\n9TpHu3cHj/aqaM7ag84ZGAyGwCIkBJ57DqZP14vdrV/vb0WGutAs+wxuvPFGPvvsM5KTk9ngsSLX\nCy+8wEsvvURoaCjjxo1j5syZPrmfwdBcWLpUT1CbOROmTfO3GgMEyNpEvsSXzuC7774jJiaGKVOm\nlDuDtLQ0Hn/8cRYtWkRoaCjHjh2jZcuWPrmfwdCc2LZNdyxPnKidQkiIvxU1b5psB7Iv2vQuuOAC\nEhISTsl7+eWXuffeewkN1aNtfe0ImnNbpD8JVu3Bqhvg8OE0Vq2Cn3/WDiGYttYM5udu+gx8xI4d\nO1ixYgVDhw7loosuYs2aNf6WZDAELYmJ8OWXerXTYcMgM9Pfigw10SybiQCysrIYP358eTNR3759\n+eUvf8ns2bP56aefmDx5Mnv27KmhFIPBcCZE4PnndXPRggV6FVRD49Jkm4kaig4dOvDrX/8agEGD\nBmGxWMjJyfGzKoMhuFEK/vAHePVVGDcOvvjC34oM1RF0zsBXbXr//ve/2bFjB3379uX5559n4sSJ\nLFu2DNBNRjabjaSkJJ/cC5p3W6Q/CVbtwaobqtY+YQIsXKhHGLnWUAtImtpzrw0NvmppIDJu3DiW\nLFmCxWLh+PHjvPrqq8yfP59//OMf9O3bl/DwcN5+++3qC9i1C7p31z97lNL7ArrjnmmLpcLsdggP\nr0h7Hg8JOfXcyvmeYU0WGloRuq1y2mo9Ne5OW616TQF3nju+fTuUlup0WJj+HFVZRETFZzQYKnHe\neXqP5bFjITtbr2lkCByaZZ/BvHnz+PLLL3nttdcAePTRR4mIiOBPf/qT94WIaHM6tbnT7jx33OE4\nNc8zXdk8jzkcp4femN1eEbfZTs2z2XTcbq+Ie4Y2G5SVnR4vK6uw0lJtnvHSUigp0eZ2GpGR2jlE\nRlZYVNTpFh1dYTExFdaiRYXFxlaY1eqTd8DgPw4ehIsvhiuv1JvnmEXuGpaA2c8gEOnTpw8PPvgg\neXl5hIeHs2jRIgYNGlS7QjxrAYYKRCqcQ3GxtpISKCqqSBcW6rQ7PHlSjz88fFiH7vTJk3DihI4f\nP67jVivEx+vNeePjtSUkaEtMrLCkJGjZssLi4823ToDQti188w1ccol+Bf75T/OnCQSCrmaQlpZW\nvqFDfXjzzTd58cUXiYmJoXfv3oSHh/PMM8/UX2g1+Eq3PwgY7SLaeRw/ri0/X1teXoXl5EBuLhw7\nBjk5pO3bx8iTJ/V1rVpB69aQnKwtJUVbmzb6G6ptW736WlSUvz9p4DzzOuCt9rw83WTUrx+89FJg\n/K5qis/d1AxqYNq0aUxzzZd/4IEH6NChg58VGWpEqYompbZtvbsmLQ1GjtS1laNHtR0+rButs7Nh\n715YtUq3XRw4oMOoKOjQAdq3h44doVMnbZ07a2vd2vyU9QEJCXr5irFj4Y47tEMwj9V/BF3NwFcc\nPXqUVq1asXfvXsaMGcOPP/5IbGysdxfbbPoLxbMjuLqwqg5jzzzz9gcWIrp2sX8/7NsHWVkVlpkJ\ne/bopq7OnaFbNz2QoFs3OOss6NFD1zTM37RWnDgBv/oVDBoEs2ebx+drzNpENXDhhReSm5uL1Wrl\n2WefrV3VMDMTLrig6g7kyukzdSCLnO4kPEcWeTOayHO0UHWjiWoaQVR59JB71JCnhYefPpLIPXrI\n3VnstshIXXZT/a8uKNBOYdcubTt3wo4delGe0lK9DVjv3tCrlw779tW1jKb6PHxAfr7uQ7jwQnj6\nafOofEmTdQbB2qZXpe7KDsQ9cqg2o4k8Rw+5RwpVHk1U3cghL0cRpWVlMTIh4cwjiEpLKzqLi4u1\n5qioilFEkZG6ecdzBFFMTEXYokVF6B455I67O4tjYmr9LdHo70turnYKmzdX2IYN+nmee662AQPg\nF7/QTiO06pbaYH3Poe7ac3Nh1Ci9yN0jj/helzc0xedu+gxq4Pjx49x0001s2rQJi8XCG2+8wZAh\nQxpXRLCMSHK3u9cGu/30EUTu0HMUUWFhxciho0d13G0nTlR0Fh8/rq+Pja0YPeQ5gigpSVurVnr0\nkLuzuKysIZ5I9SQm6sV4hg07NT87WzuF9HQ9DffRR3X/RP/+eo2GIUNg6FDdR9FMca9nNHy4/hP+\n/vf+VtS8CLqaga+YOnUqI0aMYNq0adjtdoqKirzvMzD4B7tdOwXP0UO5ubqN323Hjmk7ehSOHNEW\nGVkxcsg9eqhdu4rRQ+7O4vDwxv08J07AmjW6A3vVKvjhB90UN2yY3iVmxAjdxBToPxZ8TGamdgiz\nZsF11/lbTfDTZJuJfMGJEyfo378/u3fv9kl5hgBGRDdIZ2fDoUO64//gwYrRQwcO6I7iQ4d0U1Sn\nTpCaWmFduugO4k6dGn7Cm4jui/juO20rVmjHduGFcNFFulG9R49m0aC+aZNuMnrrLRgzxt9qgpsm\n6wx80aa3fv16brnlFnr16sX69esZOHAgs2fPJjIy0jdiq6AptkUGA15rdzq1w3CPGsrMhIwMvanv\n7t3aeXTooEcNnXWWbu93dxA3wCZI5boPHtTNdMuW6XGYDod2CmPHwujR2oEFGL56X77/Hi6/XPvE\nnj3rr8sbmuK7bvoMzoDdbmft2rW8+OKLDBw4kBkzZjBz5kwefvhhf0sz+AuLRTcftWmj2+4rU1am\nncOOHXqtpp9/hnfe0R3EYWHQp4/uHD7nHB327u2bZqe2bXVbyXXX6ZrDrl2wZIn+yXzTTbrP4bLL\n9C4y3bvX/34BxLBhuqnoyith9Wo9fsDQcARdzcAXZGdnc95555XvV/Ddd98xa9YsPv30U5+Ub2hG\niOgmpo0bdQexu5N4927dpPOLX+gO4qFDdU3Cl3tAFhfD8uXw6ad6s4DERO0UrrpKO6Um0px04416\nvMG77zaZj9So+KyZSCkVDRSLiFMpdRbQA/hCRGy+keodvu5AHjFiBK+99hpnnXUWDz/8MEVFRcya\nNctn5RuaOUVF2jGsWaN/1q5apZ3GoEF6jsrw4XoZz+ho39zP6YSffoL//Q8++kjXVq6+Gq69Vjuh\nIKa4WNcSfvc7mD7d32qCD186g5+B4UACsBL4CSgTkd/4Qqi3+Hqewfr167npppuw2Wx06dKFN998\nk7i4OK+u3bt3L2PHjnXrOsU88ywWS3m8oKCAuLi40/I9zZ3vGVaOe6ZDQkJOya/K3OeEhIScEvfM\nCw0NPS0eGhpabrt27aJPnz6n5Fmt1vKwOgsLCzvFwsPDsVqt5c+pMQioNuDcXD1i6NtvdQdxerr+\nBT9qlLbzzitvWqqXbhHthD74AN57T6/D9NvfaseQkuK7z1MNDfHM9+zRlavPPmvY3dIC6n2pJY3R\nZ6BEpEgpdSPwkog8qZRKr73UwOLcc8/lp59+qtO1ycnJfPjhh4jIKQaclud0OhER1qxZw4ABA07L\nr+pcz7jT6Sy3ymmn04nD4Tgl3zPtcDjKQ7d5nuc2u91OSUlJedxut59ybN++fRw8eBC73Y7NZis/\nx2azndHKysqw2WyUlpZSWlpanm+1WgkPDy+3iIgIIiIiTolHRkaeYlFRUeVhdHR0eRgdHU1MTMwp\n1qJFC1q0aEF4Yw8VrYnERL3d17hxOl1cDCtX6s7hv/wFtm7Vw0nHjtVzKOqKUroGMmiQbnRfvhz+\n8x89k2vECN3XMGZMtRPeApEuXfT2mb/7Haxdqys+Bt/iTc1gHXA78Cxwo4hsVkptFJG+jSHQQ4dP\nm4kM/kFEKCsrK3cQpaWllJSUlIfFxcWUlpZSXFx8mhUWFpaHRUVFFBYWUlhYyMmTJ0+xgoICCgoK\nEBFiY2PLLS4ujri4OOLj40lISCi3xMREEhMTSUpKIikpiVatWpXX4hqVnBw9YuiLL2DxYv2LfuJE\nbf3717/BvKAAPvwQXn9dL9B3001wyy16rkUQIKL7ys87Dx580N9qggdfNhONAO4GVorILKVUF2CG\niDTq/EDjDAy1pbS0lIKCAo4fP86JEyfIz8/n+PHj5Ofnk5eXV265ubnk5uaSk5NDTk4Ox44do7i4\nmKSkJFq3bl1uKSkppKSk0KZNG9q0aUPbtm1p165dw0xWdDh0P8P8+docDpg8Ga65Ro9Wqq9j2LQJ\nXnlF98qOGqUb44cPD/ge2r179WoeK1fq0b2GmgmIeQZKqfbA20Ay4AReE5HnqzjveWAsUAhMFZHT\nmqF82Wdw44038tlnn5GcnMyGDRsAyMvLY/LkyWRlZZGamsqHH37odR+CNzTFtshgoK7ay8rKOHr0\nKEePHuXIkSNkZ2eTnZ3NoUOHyu3AgQMcOHAAi8VCx44dT7HU1FQ6d+5M586dSUlJqXUt4xTdIrB+\nve4DeP993a8wZYruB6jv0usnTsDbb8MLL+ilPu6+GyZNqlcTUkO/L7Nnw8cf69YvX0/Oborvus/6\nDJRSA4H7gVTP80XkHC/02YG7RCRdKRUD/KyUWiIi2zzKHwt0FZHuSqkhwCtAFQO9fce0adOYPn06\nU6ZMKc+bOXMmF198Mffccw+zZs3iiSeeYObMmQ0pwxDAhIWF0a5dO9rV0IQiIuTn57N//3727t3L\n3r17ycrK4tNPPyUjI4OMjAyKioro2rUr3bp1o1u3bvTo0aPcErzpG1BK7wDTrx88/riuMcydq2sI\nAwfq5p6JE+vWkB4bC3feCbffrntn//lPuPde+NOfdLkREbUvs4G5805doXnjDS3R4Bu8aSbaDvwZ\n2Ij+dQ+AiGTV+mZKfQK8ICJfe+S9AiwXkQ9c6a3ASBHJrnStT5uJsrKyGD9+fHnNoEePHnzzzTck\nJydz+PBhRo4cybZt22ooxWComRMnTrB792527drFjh072L59O9u2bWPbtm3ExMTQp08f+vTpQ9++\nfenXrx+9e/cmzJsv9uJi+OQTmDNHr5R64426D6C+i92tXg2PPaaHqv75z3DrrQGx+5sn69fridjb\nt9evr7054Ms+g+9E5AIfCEoF0oA+InLSI/9T4AkR+d6V/gq4R0TWVrq+QZ1BYmIiubm55ccrpw0G\nXyMi7Nu3j02bNrFp0yY2bNjAunXryMjI4Oyzz2bgwIEMHjyYQYMGlQ/trZatW3UfwDvv6N3m7767\n/mMw09P16qorV8L992tHE0AjtG6+WTuCJ5/0t5LAxpdDSx9SSr0OfA2UujNF5ONaiIkB5gF/8HQE\ntWXq1KkApKamEh8fT79+/crbyNLS0gC8Tv/www8UFhaWl223209pc3M4HKeka1t+5fRzzz1XL73+\nTLvjgaKnNunKn8HfeqpKd+zYkaioKAYPHszIkSMpLi7mvvvuQ0T47rvveOaZZ8jMzKRHjx5cdtll\nXHDBBTibRXIqAAAgAElEQVSdTiIjIyvKy86GK65g5KOPwr//TdqECdC6NSOfeAIuvZS0b76pm755\n82DdOtL+7//gsccYOWsWXH89ad9+W+31jfW+jBkDt9wykjvugIwM35Rf+TMEwvvhbTo9PZ0ZM2aQ\nlpbG3LlzAf1d6TWVx7pXMXb+HWAN8BbwpsveqOk6j+tDgcVoR1DV8VeAyR7pbUByFeeJiMjy5cvF\nF2RmZkrfvn3L0z169JDDhw+LiMihQ4ekR48ePrmPG1/p9gdGe+NTWXdubq58/vnncv/998vw4cMl\nOjpazjvvPHnggQdk+fLlUlpaemoBNpvI+++L9O0rMmCAyCefiDid9RO1YoXI+eeL9Okj8tln1ZbX\nmM/8b38T+c1vfFdesL4vItVrd3131vxdXeMJsN2bgs5w/dvAM2c4finwuSs+FPixmvPq/bA8ycjI\nkD59+pSn77nnHpk5c6aIiMycOVP+8pe/+PR+BoMvKSwslKVLl8p9990ngwYNktjYWLn88svllVde\nkf3791ec6HCIzJ8v0r+/SL9+IosW1c8pOJ0iCxaI9OwpMmKEyOrV9f4s9aGgQKRNG5E1a/wqI6Dx\n1hl402fwJvCUiGzxvr5Rfu35wAp057O47H6gk0vgHNd5/wLGoIeWTpNK/QWuc6Qmrd5y3XXXkZaW\nRk5ODsnJyTz88MNMnDiRq666in379tGpUyc+/PBD4qtZHvhgaSk3b9+OAixKYXGFldPVhSHu0JUX\nUinumRfiOj/UFfcMz2RWj9BqsejQZWEWC2Gu/DCPdKjHkhqG4OLo0aMsXbqURYsWsWjRIrp168bE\niRO58sorOfvss/Xw1E8+0W3/KSm6oX3QoLrf0G7XI5r+9jc9E+zxxxtkKW9vePVVPer2668DfpqE\nX/BlB/JWoCuQge4zUOgvcm+GlvqMQNoDudDhIC0/H6cIAjhc4Wlpj/iWlSs5a9gwHCI4RXC4jrnj\nThGdduU7PM5xiGD3yLN75Nk84u50edzpxObKs3mky0Qo8zhW5nRS6nTiAMKUItxi0eaKO9atI3HQ\nICIslnKLdFtISHk8KiSEKI8wOiSE6JAQYkJCiLZYiHHFW4SGEhMSQkgj/OcGwvtSF+qj22azsWLF\nCubPn8/HH39McnIy11xzDZMnTya1fXv9Jf73v+tNc556qn4zkI8fh4ce0msgPfII3HwzaStWNOoz\nt9v1Ek/PPgu/+lX9ygrW9wUaZ20is89QJaJDQhiXlFSra9KSkhjZpk0DKfINTrdjEKHU5SBKnU6+\nzcvjnLPPpsTpLLdih4Nip7PCHA6KnE6yy8oocjopcjgodDopdDgodDg46QoLXFbocBBhsdAiJITY\n0FDiQkOJDQkhLjSUeA9LcFmi1UpCaChJVitJVivxoaGN4kyCEavVyqhRoxg1ahSzZ8/m22+/5f33\n32fgwIH07duXadOmceXatUTPnq2/Rf/8Z/jjH+s2UiguDp57DqZNg9tu0xMAbrnF9x/qDISGwgMP\nwBNP1N8ZNGfOWDNQSoUAm0WkR+NJqlaLz5qJDP7HKUKRw8EJh4MTdjsnHA6O2+3aHA7y7Xby7Xby\nbDby7Hby7HZybDZyXeEJu5240FBaWa20CgujtdVK67Awkl1hSlgYbVyWEhZGhC/3EQhSSktL+fTT\nT3nzzTf5/vvvueaaa/jjhAmc9fLLep7Ca6/phezqisOhpwc//riuLdxxR6Pt32y3691JP/gAhgxp\nlFsGDb5sJloATBeRvb4SVxeMMzB44hAh12bjqNvKysi22ThSVkZ2WRmHy8o45LLDZWXEhoTQPjyc\nduHhtA8Pp2NEBB1dYWpEBO3CwghtpC+uQODgwYO89tprzJkzh27duvH44MEMe/dd1IQJeqXT+qy3\ntH27Xl7UatU7snXq5DvhZ+CFF/QSFR97Pei9eeBLZ7AC6A+sRnfwAiAiE+orsjYEUp9BXQhW3RD8\n2i8cMYKjNhsHSks5UFrKPpftLSkhq6SEzJISjtpstAsPp0tEBF0jI+kWGUnXyEjOcsUbu2bRWM/c\nZrMxf/58Zs+ezcn9+3m3XTt67tuH5Y039BTfOpCWlsbI4cPh6acrbMqUBu/dLSqCzp3hm2/0JnN1\nIdjf9YbuM/hrHXQZDAGDRSmSw8JIDgtjQIsWVZ5T6nSyt6SEPSUl7C4uZldxMSuOH2dnURGZJSW0\nDQ+nR1QUvaKi6B0dTa/oaHpHRRETRHsCVIXVauXqq6/m6quv5vvvv+fBp54iZMsW3pg0ibDrryfi\nmWfq1pcQEqL3aBgzRi+ot3ChXjq7AdeOiIrSLVNPPQX//neD3abJ4tWqpUqpZMA9Dm21iBxpUFVV\nazDNRAa/YHM6ySgpYVtREVsKC9lSVMTmwkK2FhXRNiyMc2Ji6BcTw4CYGPq3aEHbsLCgHqK7fft2\n/vXww4yZN4/+CQmEf/wxSeefX/cCS0v14ncLFuj9FAYO9J3YSuTkQPfuekvqINmmocHxZTPR1cBT\n6HWFFHoLzD+LyDwf6PQa4wwMgYbd6WRXcTHrCwtZf/IkawsK+PnkSSzA4NhYBrdoUR4mWK3+lltr\nMjMy+H7qVEZ/+y3fTJrEmDffJLo+ezb/7396xNHDD8P//V+DNRvNmKG7K556qkGKDzq8dQbezCBe\nD7T2SLcC1nszo82Xho+Xo2hsglW3iNFeG5xOp2QVF8u8I0fknl27ZOS6dRKzYoX0XrVKbtm2Td4+\ndEiyiotrLCeQnvm+hQvlcHS0vBodLW+8+qo4HI4znn9G7Tt2iJxzjsgNN4iUlPhUp5vMTJHERJGT\nJ2t/bSA999pS3+UovBk+YZFTm4VywKvrDIZmh1KKjhERXNmqFbO6dmV5v37knn8+b/XsSe/oaBbm\n5DDw55/p/OOPTNu2jXcOH+ZwaWnNBfuR9uPHk7x3L1f37cu5f/oT111yCTk5OXUrrHt3+OEHOHlS\nr6567JhvxaIHL51/vt4HyOA93jQTPQWcA7znypoMbBCRvzSwtso6pCatBkMwICJsLSoiLT+fr/Ly\nWJ6fT4fwcH6VmMiliYmcHxdHWCAOc3U4cDzwAPmvvMJVUVHM/OQTBtd1mWynE/76V/2N/dln0LOn\nT6V+8YXeJ3nNGrNEhU+3vVRKXQm4e5C+FZH59dRXa4wzMDRV7E4nawoK+DIvj0U5OWwvKmJUQgKX\nt2zJZUlJJAZaf8PcuZTMmMG1wOgnnuC2226re1lvv61nQP/vf3BBvbdNKcfp1JPQ3n+//ts6BDs+\n6zMIFMP0GfgNo71xyS4tlb/MmyeXb9ggLVaskFHr1slL+/dLduVlqv3J11+LLSlJ7m3bVm6//Xax\n2Wzlh2r9zJcsEWnVSq+o6kNmzRKZOrV21wTj++KmwfoMlFIZSqk91dhu3/ktg8HgSeuwMMYkJfFJ\n374cGjaMO9q149vjxzlr1SouWb+e1w8eJN9m86/IX/6S0G+/5TGLhb7LljF27Fjy8vLqVtYll+hh\np1On6vUkfMTvfqcXajUbFnpHtc1ESqnKK7FZgKuBPwFrReTKBtZWWY9Up9VgaA4UORwsysnhvSNH\n+Dovj18lJjIlJYXRCQlY/dXHkJWFXHIJixMT+WNeHl8sXkznzp3rVtbGjXqS2qOP6oXvfMBvfwv9\n+8Ndd/mkuKDEl/MMLMBvgT8D6cDjUoe9DeqLcQYGQwV5NhsfHj3KW4cPk1lSwrSUFG5q04bOkZGN\nLyY7G0aPZl1iIhN27mTxl1/Su3fvupW1cydcdJFe7G7KlHpL++EHuOEGvQ5fIPbJNwbeOoMzNRNZ\nlVK3AlvQE80misj1/nAEnnjuVRpMBKtuMNr9QU26E6xWbm3blu8HDOCrc8+l2Olk8Nq1jF6/noXH\njuFozB9OycmQlkb/4mK+6tWL4RdcwKpVq+pWVvfusHSpnrH83ns1n18DQ4fqZSq8fQ2C9X2B+ms/\nk6/MAO5D71G8CDhHKfVrt9Xrrn7mxhtvJDk5mXPOadT9eQyGBqFXdDTPdOvGvqFD+W1yMo9mZdF9\n1Sqe2bev8foWEhLgyy85Oz+ff/XqxWXjxvH111/XrayePWHJEt22M69+Cx0opbsi3n67XsU0C87U\nZzAXvU1lVYiI/K6hRFWFL5uJvvvuO2JiYpgyZQobNmzwSZkGQyCx6sQJZu/fz5e5uUxLSWFG+/a0\nj4ho+Bvn58OoUew76ywGLF3KR/Pm1X0V0PXrYfRoPT70oovqLCk7W69ium8fxMTUuZigxafzDAIB\nX/cZZGVlMX78eOMMDE2avSUlPLt/P28dPszlLVtyf8eOdI+Katib5ubCqFFk9OnD4MWLWbBgAcOG\nDatbWcuWwTXX6HWp6zEx7bLLYPJk3aHc3Kh3n4GrkB5KqVFKqZhK+X7bCjNY2/SCVTcY7f7AV7o7\nRkTwbLdu7BoyhM4REQxbt44pW7eyo6jIJ+VXRdqGDbBkCZ1Xr+abq69m4sSJrFmzpm6F/fKXesW5\nceP0T/w6MmWKd01Fwfq+QAP2GSilfg8sAKYDm5RSl3scfrxedzUYDI1KotXK31JT2TVkCGdFRnL+\nunVM3bqVrJKShrlhq1aweDG9Fizg82nTGDduHFu21HHsyQ036J/0EyboHWzqwIQJsHatbioyVM2Z\n+gw2AueJyEmlVCowD/iPiMxWSq0Tkf6NJ9M0ExkMvuSE3c7T+/bx4oED3JCSwv0dO9IyLMz3N0pP\nh9GjWXLTTdzy7rt8//33tG3btvbliMBvfqOHBr3+ep2k3Hqr3gnt3nvrdHnQUu8+A6XUZhHp7ZGO\nQTuELcAvRaSfr8R6g6+dQWZmJuPHj2fjxo21vvZ4yXE+2PwBFmXBoiyEqBAdWkJOS7vjoZbQ8rQ7\ndOeFWkKrNavFWhEPsWK1WIN64xRDYHG4tJR/ZGXx4dGj3NexI3e2a+f7RfJc7f6vXncdL6elsWLF\nCmLrssdyQYGeQfbkk/Dr2g9oXLkSbr4ZNm9uXovX+cIZLAPuEpF0j7xQ4A3gNyLSqJvC+nIP5Ouu\nu460tDRycnJITk7m4YcfZlotZjweKTzCg8sexClOHOLA4XTgFOdpac/4kc1HiOsRh8PpKM+3O+04\nxBU6HdictvJ8m9OG3WkvN5vDVp4XokIICwnDGmLVoUWHlS08NFyHIeGEh4YTHhJORGjEKeaZF2mN\nJMoaRWSoDt22afUmLrroIqKt0USHRRMZGhk0DilY97RtbN3bCgu5a/dudhcX82y3blyaVHkBAu+p\nUvvcuchjj3H3+eez6eBBPv/8c6x1WYBv1aqKNp9abmUmUrF43aBBVZ8TrO8LNOweyFMAu2eGiNiB\nKUqpV2upM6B4991363V96+jWzBk/p1bX+OolE5FyZ2Fz2ChzlGFz6rDMUUapvbQi7igtzyt1lFJi\nLymPF9uKy/NyinMosZdQbCumyF5Esa2YYnsxhWWFFNmKOLL5CA9lPkShrZDCskJsThsxYTHlFhse\nS4uwFsSGxxIbHktceJwOI+JIiEggPiKe+Ih4EiITSIxMJDEykdjwWCyqmU4JDUB6REez6JxzWJST\nwx937eLVgwf5V/fudPDVcNSpU1EbN/J0ejq/jojg9ttvZ86cObX/UTFkCEyfrnuEly6t1bRipSo6\nkqtzBs2ZZju01FB37E47hWWFnCw7yYnSExSUFVBQWsCJ0hOcKD3B8dLjnCg9QX5JPsdLjpNfmk9e\ncR55JXnkFueSW5xLYVkhCZEJJEUmkRSVRKuoVrSObl0eJsckkxydTEpMCm1atCEuPC5oaiPBTqnT\nyay9e3l+/37+lprKHe3aEeKLZ+9wwGWXUdaxIwNWruT222/n9ttvr1s5I0fqGsKf/1yrS3fuhOHD\n4cABCGnUtg3/YeYZGAIau9NObnEux4qOcazoGEcLj3K06ChHC49ypPAI2YXZZBdmc/jkYQ4VHMLu\ntNOmRRvatWhHu9h2tGvRjvax7ekY15GOcR3pFNeJllEtjcPwIdsKC7l1xw5KnU7e6tmTs30xPyE/\nH4YO5ciUKfSdPZuPPvqICy+8sPblZGbCwIGwejV06VKrSwcMgGee0f6kOdBknUGwtukFq24IDO0n\ny05yqOAQBwsOcqDgAPtP7Gf/if3sPb6Xvcf3knU8i1J7KZ0TOtM5vjNdErrQPbE7hTsLuXLslaTG\npxJiCZ6fgoHwzAGcIrxy8CB/y8jgb6mp3NmuHZYaHG6N2rdtg+HD+eGRR7jyH/9g1apVdOjQofbi\nZs7UvcKfflrry/buhZdeOv1YoDz3utCQfQbugmZJpS0uq8qr5tp/A5cB2SJy2kJASqkR6LkMe1xZ\nH4vIozWVa2h+xITF0D2pO92Tuld7zonSE2TkZZCRn8Hu3N1sOrKJ1ZtW8+LRFzlSeIRuid3o0bIH\nPVv2pHer3vRp3Yezks7CGhJgO4kFEBaluL1dOy5OSOCGbdtYcOwYb/fsSbvw8LoX2qMHzJ7NeQ89\nxL23384VV1zBd999R0Rt+yfuugvefFNvm3nZZV5fdtVVMGwYPP88hNb4Ddh88GYJ67UiMqBS3oaq\nvtyruPYC4CTw9hmcwd0iMsGLskwzkaHOFNmK2JGzg+3HtrP56GY2H93MpiOb2Ht8L2cnnU2/lH70\nS+lH/5T+DGgzgBbhLfwtOeCwO53M3LuXfx04wNwePRhTjxFHANx6K5Kfz9UOBylt2vDCCy/Uvowl\nS+C222DTJqjF8t0DB8KsWTBqVO1vGWz4YmjpbcDtQBfAc2ezFsBKEbneSyGdgE/P4Az+JCLjvSjH\nOAODzym2FbPpyCbSD6ez7vA61h1ex4bsDXSK68SgdoMY2m4owzoMo0/rPkHVzNSQfJOfz2+2bOGG\nlBQeTk0ltK7zEoqL4bzzKLr+enq/+CLPPPMMV1xxRe3LmTQJ+vaFhx7y+pInn4Tdu+HVoB4X6R31\n3gMZiANSgfeATh6W6M1+mh7ldAI2VHNsBHAMvWnO50CvM5QjIsG7R2mw6hZpftrL7GWSfihd5qyZ\nI9M+mSY9/tVDWjzeQi5++2L5xzf/kBWZK6TYVux7sR4E+jPPLi2VS9LTZcTatXK40t7MtdK+Y4dI\ny5ay/p13pHXr1pKZmVl7MVlZIklJInv2eH3Jnj0iLVuKeGzdLCKB/9zPRH33QK62xUxEjgPHgWuV\nUiFAMrqPIUYpFSMie+vgpCrzM9BRRIqUUmOBT4Czqjt56tSpgO4oiY+Pp1+/fuUdJu5FmgI1nZ6e\nHlB6mkvaTW2ut4ZYyduWR3e6c/PlNwOwYPECNh/ZTG5JLncvuZuNqzfSq1Uvrr70ai7pegn52/Kx\nKEuzeV+2fP8994qQlprKwJ9/5r7cXHpFR9etvJkzyX3oIa6cMIFrr72Wb775hpUrV3p/fceOpF12\nGdx6KyOXLPHq/llZaSQlwfLlI7nkkvq9L4GSTk9PZ+TIkaSlpTF37lwAUlNT8RZv+gzuBP4OZANO\nV7aIF30GruurbSaq4twM4BcictoW1qaZyBBIHC85TlpmGl/t+Yqle5aSV5LH2G5jGdd9HKO7jiYu\nIs7fEhuNhceOcdP27TzauTO31HXdocsvR3r1Ymx6OoMHD+aRRx6pXRknTujpxcuXg5dbbj79NGzf\nDq+9VnvJwYQv90DeBQwRkZw6CklFO4O+VRxLFpFsV3ww8KGIpFZTjnEGhoAlIy+DRTsX8fnOz/lu\n73cM6zCMK3pcweU9LiclJsXf8hqcHUVFXLFpE0NjY/lX9+5E1nZGV3Y29OvHsVdeofctt/D5558z\ncODA2pXx1FPw44/wv/95dXpWFvziF3DoENRlZYxgod59BlLRVr8cCPWmzamKa98FDgKlwF5gGnAr\ncIvr+B3AJmAd8D3a6Zg+gwDDaK8dBaUF8tHmj+TaeddK/Mx4GfHmCHnlp1fkWOExr8sIxmdeYLPJ\n5E2bpPsrr0hGUVHtC5g/X6RLF/ng9delV69eUlxcy36ZwkKRtm1FfvrJ60sGDRJZurQiHYzP3U19\n+wy8GQawB0hTSt2nlLrLbd54JBG5TkTaiki4iHQUkTdF5FURmeM6/qKI9BGR/iIyTETquIu2wRA4\nxITFMKnXJN698l0O332YGUNnsCxzGV2e78Jl717GvC3zKLWX+lumz4kJDeW9Xr0YnZDAkLVrWZJ7\nWmvvmZk4ES68kKt+/pkePXrw97//vXbXR0XBAw/Agw96fcnll8PChbW7TVPFm2aiKsdricjDDaKo\neh1Sk1aDIZApKC1g/rb5vJn+JpuObOLaPtdyyy9uoU/rPv6W5nNW5OczecsWHuzUiTtqs7pobi70\n7k3u66/T68Yb+eSTTxg6dKj315eVwdlnw9y5MGJEjadv2qQ3UcvMbLrLWvt8OQqlVJSINNxeeTXf\n3zgDQ5NhT94e5qbP5fW1r9M9qTu3DbyNX/f8NWEhDbDBjJ/YU1zMuI0buTghgWe7dvV+PsL778Nj\nj/G/Bx7ggb//nfT09NrNTn7rLXjjDb1vcg24l7X++GM491zvbxFM+GQPZFdB5ymltgDbXOlzlVJV\nrOrROFQeAhYsBKtuMNobgi4JXXjkokfImpHF9MHTmfPzHFKfS+Xxbx8ntzg3YHV7g1t7l8hIfujf\nn21FRUzYtImTdvuZL3QzeTJ06MCVGRn07t2bxx+v5S67v/mNXnzohx9qPFUpvfjpggWnag9G6qvd\nG1f9HPArIAdARNYDdVhm0GAwVMYaYmVSr0ksu2EZX17/JTtzd9Lt+W7M/nE2mfmZ/pZXb+KtVhb1\n7UubsDBGrV/PsbKymi9SSq8i989/8uKMGbz88su12z85NBTuvltPM/aCyy+vcAbNGW/6DFaJyBDP\nfY+VUutFpFErVaaZyNBcOFRwiNmrZvPa2teYePZE7h9+P10Tu/pbVr0QEe7bs4cFOTksOecc7zbN\n+ec/4YsveOmKK3j3vfdYsWIFFm+bmgoL9YbHK1bohfHOgN0Oycmwfj20b+9d8cGEz5qJgH1KqWGA\nKKWsSqk/AVvrrdBgMFRJmxZtmHnxTHZO30n72PYMeX0IUz+ZSlZ+lr+l1RmlFDO7duXGlBQuWLeO\nbYWFNV/0hz/A4cP8X7t2OJ1OXqvN7LDoaLjjDj33oAZCQ+HSS82oIm/mCrQE/ouegXwEeAdI8mbc\nqi+NBphn4HA4pH///jJ+/HiflVkdTXH8cjAQrNo9decX58tfl/1VEmclyowvZsiRk0f8J8wLanrm\nbxw8KG1XrpQtJ0/WXNiiRSJnny0b166Vli1bysGDB70XcuyYSEKCyIEDNZ760Ucio0cH7/si0gjz\nDETkmIj8RkSSRaS1iFwvdZyNHCjs2LGD/v3707FjRzIzM/niiy94/vnn/S3LYKiSuIg4HrnoEbbc\nvgW7007PF3vy5Mong3auwrQ2bXiiSxcuXr+erTXVEMaMgQ4d6PPDD9x00038uTbbXCYlwW9/C889\nV+Opv/qV7m/2psLSZKnOSwD3uMIXgOcrmzeexpeGq2bgK/bt2ycXX3yxfP311xIeHi579+71afkG\nQ0OxM2enTHhvgnSd3VUWbFsgTqfT35LqxFuHDnlXQ0hPF0lOlpMHDkiHDh3km2++8f4mmZkiiYki\neXk1njpmjMgHH3hfdLCAD2oG7n6BNejVRStbUPPHP/6Rp556irVr1xIdHV23bfcMBj/QLbEbC65Z\nwEvjXuK+r+/j0ncvJSMvw9+yas2UlBRmumoIu4rOMIXp3HPh0kuJfuEFnn76aaZPn47d22GqnTrB\n6NF6EloNTJhQ6x00mxbeeIxAMHzYZ/DZZ5/JHXfcISIiY8eOlT59+tS7zJpoim2RwUCwavdWd5m9\nTGZ9N0uSZiXJzG9nSpm9rGGFeUFtn/mcAwek8w8/yIGSkupP2r9fJDFRnBkZctFFF8kLL7zg/Q2+\n/Vake3cRh+OMp+3ZIxIfv7ym0wKWBu8zUEotVUrFe6QTlFJfNqB/anBWrlzJwoUL6dy5M4sXL2bP\nnj1MmTLF37IMhlpjDbFyz/n3sPrm1SzPXM7A1way7tA6f8uqFTe3bcstbdrwqw0byLXZqj6pXTu4\n9VbUk0/ywgsv8PDDD3P06FHvbnD++XpLzK++OuNpnTtDixbg2kqi2eHNPIN0EelXKa98zkFj0RDz\nDBYuXMijjz5KSkoKC2sxrsx+3E72u9koiwIFWPTQOSyAojxfWXTeKee580LUqcfdYYgOVUgV8RBV\nbuXpUHV66GHl2gxNHhHhnQ3vcPeSu7lz8J3cd8F9WEOCY21mEeHPu3fz/YkTLD33XKKrWgL7yBE9\nZ2DLFu568kkKCgq8H246Zw4sWgSffHLG02bMgNat4f776/AhAhRf7mfwM3CFuHY2c21WM19EBvhE\nqZc0hDO49tprSU1NZfPmzbVyBkd3lfHKjbkoQAkoER1HyuO481zVLyWu405xnQfK6T7mus4pWNzn\nOAWLCBan65hDUOIkxImOO536mEOHFocrbnel7U4sNichCCEhEBLm4SSs2ixWS0U8zBUPc8UrhZZw\nCypcYQnXcUuEy8ItWCJdcY8wJDIES5SFkCiPMFqHymKcU0Oy/8R+blp4E8eKjvGfK/5Dz1Y9/S3J\nK0SEadu2kWe383GfPoRU9SPmzjuhRQuO33svZ511Fl999RV9+562VcrpFBZCx46wbp0Oq2HxYnj8\ncT1XrangS2cwBpgDfIP+fTscvR9BozYVuZ1BWlpa+VZv9aGoqIhOnTqxZ88eWrRoUatrjxzRvxxE\ntDmdp8Yrp0UgOzuNli1H4nCcfp477nCcHvcMK8erMru9Im6zgcMh2O0KpYTQUD3BxuqykBCwhgrW\nEFc8RHTa4oqHCKEWoaDwG1LihhOqBKsSwpSTUAQrTsLEiRUnVqcTq9NBmNOB1eEkzO4g1GYnzObA\nWmrXVmIjrMROVIQQHaOIjFaEtgghJCaEkBbaQmNDCYl1hXE6DI33sASXxYdiCa15zqSv3pfGpr66\nRa9CYI8AACAASURBVIQ5P8/hweUPMuviWUzrN63Raoj10V7mdDJ6/XoGx8byZNcqZl1nZMDAgbBn\nD8+/9RaLFy9m0aJF3hU+Y4Ze5voMax0tXpzGVVeNZP9+iAuyzeqqe+7eOoNq90B2IyKLlVIDAPc6\nsjNE5FhthQYaUVFR3rc5VqJ1a3j99dpdk5YG/vlO0u+A06mw2bSzsNspj9tsOt9tZWWnxsvK4Oef\nwzj77EjKyqC0VJtnvLQUSkrgZIkO3VZcXGFFRe5QKCyEogKw5UJUBNo5hAtRYU6irEJUqJMoi4No\ni4MosRPltBNlLyGyzEZkSRmRRaVEFZYSG+kkMUmR0EoR0TKUsFZhWFtZyy0sOYzC/YWUdC4hLCUM\nS7iXSxk0AZRS3DrwVoZ3Gs7keZP5as9XvHLZK8SGx/pb2hkJs1j4X58+DPn5Z3pGRTGtTZtTT+jc\nGcaOhZdf5v/uuovZs2ezbNkyfvnLX9Zc+G23wYUXwkMPQXh4ladERMCwYbBsGVxxhQ8+UBBRbc1A\nKdVDRLa5HMFpiMjaBlV2uh6fNxMZ/Ivdrh3EyZO6Fn/yJBQUVIQFBXprW7cdP64tPx/y84W8HMjL\ng+MFEBUuxEc5iY9wEhdqJ95iI85ZRouyMmKLSmhRUExSlIPkFGjbwUJ8Jyvh7cIJbx9OeMdwIjpG\nEN4xnNAWNf4+CjqKbcX88cs/8tWer/h48seck+zV9uV+ZVthIRemp/NR796MiI8/9eDGjXq46J49\nfLBwIU899RSrV6/2bt2iSy6BG26A66+v9pRnn4Vt2+DVV+v5IQKEejcTKaXmiMgtSqnlVRwWEfHC\nFfsO4wwM1eF0ameRmws5OXDsWIUdPartSLZw+KCQfQiyjylCLUKraDstrXZaUkpSWTEJBUWkhNvo\n0F5IPctCmx5WorpHEtlNW3i78KDujP/vhv8y48sZvDD2Ba7pc42/5dTI0txcfrt1K+sGDqRN5V/y\n48fDuHE4b7mFIUOGcNddd3HttdfWXOj8+XoBvO++q/aUrVv1xOemsuFNvfdABq5yhV28GaPa0IbZ\nA9lvNDXtTqeekLpli8hXX4m89ZbI44+L3HabUy77lUP6dLdLfLRDIqwO6RpXIhck5Muvow7KHWG7\n5J9ddsjCcTtl218z5cjHR6Rwe6E47b6fAdxQz3zdoXXS+bnOcveXd4vNYWuQe/hS+1/37JEx69ef\nPsv6229FunYVsdtl2bJlkpqaKiVnmqfgpqxMpHVrke3bqzy8fPlycTpFOnbU70cwUd95BmeqE98H\nfATMAxp15JDB0JAoBfHx2nqeMtBG4e5jAd1MlZkZTkZGOBkZceza6uTLjQ5e+lmxb7GFVuF2Okoh\n7cty6d7OTp9zFf0utNJ2WDQx/WIIiapieKSf6ZfSj59u/olr/3ctl717GR9e9WFA9yP8tVMnzl+3\njhcPHOBOz/Wlzz8fEhJg0SIuGj+enj178vrrr3PHHXecuUCrVTcRzZ1bbUeyUrpm8OWXld+Pps2Z\nmom+ApzAYOC0gVYiMqFhpZ2mR6rTajA0NjabHtiyfTtsTnew8UcHmzfBzoOhtFB2ujgK6JFUyrl9\nhMEXh9J3QgzRvaICZlit3Wln+qLprNy3ks+u+4yOcdUPt/Q3O4qKGLZ2LSv696dXdHTFgXfe0Vtc\nLl3K6tWrmTRpEjt37iS8ms7hcjZt0t/2WVl6GF0VfPyxnpqweLEPP4if8EWfQRi6RvAf4KbKx0Wk\n5g1GfYgvnUFpaSkXXnghZWVl2O12Jk2axEMPPeSTsg3NG6dTtzWvW+Pkp6U21q1ysmF3KIUlirMt\nBfTrWMbQCxQjJ0fQ+ZIYLFb/jXASEZ778Tme/uFpFlyzgIFtB/pNS03MOXiQlw8e/H/2zju8qiL9\n459zbi/pvSeEDtKrgEgVRVlgLYAuIojY1t7Wn66iLpZFxcJaV1gLYkFsKCpIEJASkBYCIaEEQklI\nu2m3njO/P85NSCCBAAmg8n2e95l35szMec/JzX3vzLyFtd26Yaw+KHa7ITkZli6F9u0ZMWIEY8eO\n5ZZbbjn5hL16wfTpmmVSPXA4tEQ3BQWa8/LvGU1xZvCBv3yoMftNzU008ZlBZWWlEEIIn88nevfu\nLdauXdsk8zaEP9q+++8F54vshw4JsfBDj7j/mgrRL6FC2GWviJOqxKi4IjFzXJHY+n2lUJSj++Jn\nU+4vt38pIl6IEIuzFzfJfM0hu6qq4sotW8TTe/bUvfDkk0JMmyaEEGLVqlUiOTlZeDyNiM/0xhtC\nXHPNcc21Ze/bV4ilS89A6LOM5oxN1F2SpFjgen88otDadOb66tzCarUC2irB5/P9rq1ELuD8R3Q0\njL7ewMxPbazcZ8Ph1fPNCj39Ljfw8zo9l16pJ8boYWwbB28+UM6Rg8pZk+0vbf/CwusWMvHLiXyS\n8clZu++pQJIkXm3Zkpfz8jjgrpXHYdo0+OQTKCnh4osvJjU1lQ8//PDkE44bBz/+qJmfNYDBgzV/\ngz8NGtISwF1oYazdwG5gTy3a3RhN05REE+czUBRFdOnSRQQEBIhHHnmkSee+gAs4VSiKKjZ8Wyme\nGl0kBoWXCDse0S6oStw9qlws/8ErfL7ml2Hz4c0i9sVY8Z91/2n+m50m/rFrl/jbsWY+f/ubEP/+\ntxBCiOXLl4vU1FTh9TbCUmrCBCFefbXBy0uWaKuD3zto5MqgMV/CbzRmouamplYG1XA4HGLQoEFi\n27ZtzTL/BVzA6aAq3yMW/qNQTGmZL1rIFSLE6BUTLq0SXy3wicZYUJ4ucopyRItXWogXVr7QfDc5\nA5R5vSJ21SqxxuE42pieLkRSkhB+BXDJJZeIDz744OST/fSTEF27Nni5qkoIm02IsrIzFPoco7HK\noDFpL2+TJKm/JEk3AUiSFC5JUkqTL1EaibS0tCadLzAwkEGDBrG4mc0Gmlrus4kLsp99rM1cxegZ\nYbybHUlWiYnFz5YQc6iYf15bQUSAwnXD3SxaJGgo4vPpIjU0leWTlvP2b2/z71UnTyZfH5rznQfo\n9fwrJYV7cnKqfyRqsYri4moy2j/22GM899xzR683hMGDtW2irVtrmmrLbrFAz54n9E87r3Cm7/2k\nvveSJD0B9ADaAHMAI/Ah0O+M7nwOUVhYyNtvv828efOQJIn8/PzGh8IFir1eXs3LQ5Kk6sjUNbwE\nyPXwuwsK2JyXh1yrrZpvbKmTJHS1eBm0tmN4HaCv5iWphtdX8/7reknCIMtH2y+cm5yX0Afq6XVf\nBL3uA0++h4y385n3hpd/XB3KRNnK1dfAzXfo6NGjaTxm4wPjWXbjMgb9bxAAD/Y7hbzDZwETo6OZ\nffAg8woKuD4qSmu85x4tjsTYsQwdOhSAZcuWnThmkSxrZwcffwwNRD4dNEg7N2jA6OgPhUblMwC6\nAr8Jfw4DSZK2CCFOGuBEkqT/AlcC+Q31lyTpVeByoBKYJISoN7VEU5qWLl26lKFDh2IyaeEFJEni\nzTffbHSCm2Kvl1fy8hCAAFQhanghBOoJ+Oq+dXj/NaVWXTlmjOK/Xs2rQqDUwyt+3ndMm69Wm68e\n8vrfrV6SMFSTLNfwRlnG6OdNslxTry5NsqxRLd58DFmqSafDIstY/bxVlrHqdNhkGZtOh1Wnu6CY\nTgIhBOXp5aS/fIT5X+r5QR+DPULHlDtkJk2SCAs783vkleUx6H+DuKXbLeedQljlcDA+M5OdvXph\n1um0QFepqbBgAfTowVtvvcXixYtZuHDhiSfatAnGjoVdu+rVpCtXanpm/fpmepCzgKYMYb1OCNFL\nkqTfhBDdJEmyAasbqQz6AxXA+/X1lyTpcuBOIcRISZJ6A68IIfoc28/ft8mUwcGDB0lMTCQ7O5uE\nhATGjBnD3XffXfOL4s+KakXhVVW8fgXhPabu9vMeVcXtr9fma0gIXH7eqao4FQVXNe+nKkWpKSur\nS0WhSlUxyzJ2nQ67TkdArTJArydQpyNQrydApyNYrydIrydIpyPEYCBYrydErydUrydQr0f+EygV\nb6mXg/89zKIXy1jkiWaVK4TRf5W4/Q6Jnj3PbLWQV5bHJXMu4aF+D3Frj1ubTugmwJVbtnBZaCh/\nr/ZMnjlT+3L/8EMqKytJSkpi/fr1JCcnNzyJEJqb8dy50Of4rx6PB8LDNf+0kJBmeYxmR1MqgweA\nVsAw4FlgMjBPCPFaIwVJAr5pQBm8CSwTQnzir28HLhVC5NfTVwjRdPkMwsLCcLvd2Gw2hg8fzgcf\nfHDGc54Iv9e4+nD2ZVeFwKmqVCoK5YpChaJQ7vNR7q+XKwplPh8Onw+HolDq50t9Pkp8Pkq8Xop9\nPqoUBdvWrUT36kW4wVBDEQYDkUYjUUYjkQYD0UYj0UYjYQbDeaM8TuedC1VQ9G0Rm/91gIU5wXwj\nxRGVouOBByXGjtVyWZwOdhXvYuDcgfx72L8Zf9HJg8Gdrc/Lb+XlXLl1Kzm9e2PV6bRwti1aaGcA\ncXHcf//96HQ6XnjhhRNPNH26FuXwlVfqlf2yy7To16NHN9+zNAXORj6DmZIkDQPK0M4N/imE+Ok0\nZK0PccD+WvUD/rbjlEFTorS0lKqqKlq1aoXRaCQjI4N58+YxYcKE5rztBTQSsiRh0+mw6XREnsE8\nXlXlW6+Xth07UuT1UuinAq+XfS4X68vLyfd4yPd4OOzx4FAUIg0G4kwm4kwmYo1GEkwmEsxmEkwm\nksxm4oxG9I0JlXwOIMkS4aPCGTIqnO4rS7n5ue0sXm1g5j9SePghI/fcKzF1qpbf5VSQGprK4hsW\nM/T9oQSaAhnZemTzPMApoltAAH0CA3nj4EHuT0jQgk1dfz3Mng0zZnDHHXfQq1cvnnzyyRq/onox\nfjwMHAgvvVTv5cGDYdmy818ZnCka+1thC1Ad8GNzM8lyUkyaNInk5GTS0tIIDg6mS5cuNZqw+iS9\nMfUlS5bQr18/HnvsMTp06ED37t158803iY2NPa35GlOvbmuu+Zuzfumll55X8pxKfcywYTX1YGD0\nCfp7VZU2vXtz0O3mh2XLKPR6KerWjU2FhWSsWkW+10tZx47EGI0EZ2QQazLRb+BAUs1mHBs2EG8y\nMXLIkCaRv7rtdMdv8m2CB+COgO5c8UQWH6xaz0evxfHcc8O5/36JDh3SsFgaP19hZiH/TPonN311\nE1+O+xLPLk+D/c/m52V6jx4M3byZ9jt3YtHpuPSuu+Dii0m75BIwm7n44ov56KOPaNWqVcPztW5N\nWkAAvPIKl95333HXBw+Ga69NY8yYc/95Plm9mp87dy7AibfIjkFjtomuBf4NpHE07eWDQojPG3WD\nU9sm2gEMPNE2UVNg3bp1TJkyhfT0dEwmE127dqVFixZ88cUXTTL/Bfxx4VFV9rvd7HE62e1yscvp\nZJfTSY7TSbbTSaBeT2uLhfY2G+2tVtrbbHS02YgyGs+p3I41DvY8uodte3V8GtuaNdlGHn5Y4vbb\ntexejcXinMXc9NVNrJq8ihYhLZpP4FPA+MxMOtls/CMpSWsYNQpGjoRp0/jpp5+4//772bx584mj\nDMycqUUdrMeq0OeDiAjtcuSZLFXPEZryzGAzMEwIUeCvRwBLhBCdGylIMpoyOM52S5KkK4A7/AfI\nfYBZJztArv1r6XRRVVXFv/71L7744gt0Oh0FBQXMnTuXK664olHj9zn2MXLeSGRJPo50kq5uXdbq\njh0OwtuHo5N16CRdvaVe0tfU9bK+hnRy3bpe1mOQDVqpM2CQDRh0Wt2oM2KQDVqp08raZNKZtFJv\nqsMbZEOD/yxN8c7PFc6m7KoQHHS7yXI62V5ZSWZVFdsqK8morEQnSXSy2ehkt9PNbqdbQABtrNYG\nraaaQ24hBMXfFbPrwV3sswcyx9aSjN16nnpKi+rcQADP4zB73Wxmp8/m1ym/EmwOPu762f68ZFVV\nMWDjRrJ79yZIr9f2dG67DTIzEZJE+/btefvttxkwYEDDk+zbB127kjZ/Ppf6V5O1MWqU9o6uvbYZ\nH+QM0dB7b7IzA0CuVgR+FMHJndX8QswDLgXCJEnaBzyB5qcghBBvCyG+kyTpCkmSctBMS29qzLxn\nig4dOrB3714kSaJDhw7cd999VFVV0bFjRzIzMwHNFyE0tP4QTMF6H4+3NyGQEQJUJL9JqIRA9ZuM\nKqhIqAIEEjsKqmgZVYbq768IzZBB8dfVWrwiQFWFlthegE+AW602P0UzB1X9ZqKqwCdUf6lZ/vhU\nFZ9Q8SoqPqHgURS8qoJH8eFRfVqpeP28F7fiRVEVTHojZp0Js95cQxaDFU+Ol5j9MVj0FqwGaw3Z\nDDasBit2ox270Y7NaMNutBNgDNBKUwCBpsCauk4+/+L7NyVkSSLebCbebGZILdMTIQSHPB62VFSw\nubKSb4qKmJ6byyG3my52O70DA+kVGEjvgACSzOZmi5MlSRJhI8MIuSyEuHcOEfPEWvZfmsirb8Tz\n4osSr72mbZ2fDHf0uoOdRTu55rNr+G7Cdxh0hmaRt7FoY7VyeWgor+Tl8c/kZC3ZuNUK332HdOWV\nTJ48mTlz5pxYGSQmQvv2kJ6upcY8BoMGaTrmfFYGZ4rGrAz+DXQCPvY3XQdsFUI81MyyHStHk20T\n/ec//yEsLIxJkybhdDoByMrKoqCggNGjR2M2m9m6dWuDykBRnJSVrQEEQqjg9ybQ5FP9bcLfptTq\nd7Su9VNqxmtl7bri73uU1+pKnboQvlptPj9V895j2r0I4UVVvX7eV1P6FDduxYNb8eLyabzL58Wl\neHErPjyqHrfQ+0sdHlWPS9XhUiXcqoxLkXAqUKUIqnyaKWmFz0eV10eF14NT8WLVmwgwWgg02gk2\nBxBkCiLEEkyIOZRQayThtmjCrdGEWyMIs4YRbg0n3BpOgDHgDxlI0OHzsaG8nLVlZawrL2e1w4Fe\nkugXFET/oCAuDQ6mg83WbBZOniMedj+ym6Lvi9l+XXue/iKIvn0lZs7UwjefCIqq8Jf5fyEhMIE3\nrnyjWeQ7FWRXVXHxxo1k9+pFsMEA8+ZpCQnS0jh8+DDt2rVj//792O32hieZPRtWr9byJByD9eth\n0iQtFcLvDU22TeSfbCzQ319dIYQ4iSdH06MplcGUKVNYuHAhpaWlqKoKwLhx4/jhhx/weDy43W46\ndOjA5s3n7Kz8vIIQKqrqQQgPqupGVd0I4UZVXX6q5p2oqgtFcfr5Kj9fhddXQbm7lFKXg1J3GeXu\nMkrd5TjclTjcVTg8TkrdbhxeH+U+HQ6vTJkPSj0KioAQk4kws41wSwCR1hAireFE26OJC0wkPiiF\n+KCWJIV2wGZqAm+rcwQhBLtdLlY6HKwoLSWttBSHonBpcDBDgoO5LDSUlGYIru9Y7WDnLTsRSRYW\ntmrLOx/oeewxuOsuzUm3IZS5y+j5Tk8e7f8oN3a5scnlOlVM2r6dFIuFJ5KTtexDqalalpoePRg1\nahRjxozhpptOsPlw4IDmiXz4MBxzxuPzQWioltCoKRz6ziaaIrlNSyBKCLHqmPb+wCEhxK4mkbSR\naMozg5UrV5KTk8PkyZNrlMHXX39NWloaGzduJD09nbvuuosZDaTFOx1c2HdvHIRQUJQKfL5yFKUM\nn89BmTOfgso88isOkF9xmPyKfAqqCsmvKqGgykGBs5ICp5sit4JND5FmA9FWK3G2INRcM/0vbkNK\nSCotw9oRGZCK0RiDyRSHThd43q44qt/5fpeLn0tLWVJSwo/FxQTp9VwWGspVYWEMDA7GdKJv61OA\n6lbJfSaXg28fRHqgNY98E46qSrz3HrRu3fC4jIIMBv1vEEv+toTO0Z3ryH62scvppPeGDeT07q2t\nDl58UftJ//HHLFy4kJdffplffjkuaWMdpLVvz6WzZsHw4cddGz4c7rxTOz84H9GcZwaz0PIgHwuH\n/9pVjZTxvEP//v3Jzz9qsOR0OpkxYwY//fQTo0aNwuVyMXbs2HMo4Z8XkqRDrw9Crw+qaQsKgoRG\njFWFyqGyvewt3sae4u3kluawevdGvs3dRW7GGvaVl6CXINaiI9bsI8EqkxwYRqvgBNqGtybU3gKT\nKRGzOdlPCcjySVIoNjMSzGZujI7mxuhoVCHYXFHB4uJinty7l8zKSoaHhjI6PJyrwsIIOF2vMkA2\nyaQ8nUL4mHB23LSD11PzWdqzHRdfrDvhKqFjZEdmXTaLqz+7mvSp6fUeKJ8tpFosjAoP5+W8PKan\npMDUqVqe49xcRo4cybRp08jJyaFly5YNT9K/PyxcWK8yGDAAVqw4f5XBmeJEK4N0IUTPBq5trc86\nqDnR1DmQFyxYwDXXXIOqqmRkZDB06FCsVisHDhzA4/GQlJTEunXriPw92pJdQL0QQlDkLCKnOIfs\nomyyCjPYcWQrWUVZ5JTkEWIykxoYQIpNJsniJNFUSuuQaIJsrbBYUrFYWmGxtMZqbY3FknrOFUW+\nx8O3RUUsPHKEXxwOBgcHc21kJH8JD8fWWNOgeqC6VXY/spsjC45gfr4Dt78aSEiIlm44IqL+MXd+\ndyd5ZXksvG7hOV1tVa8Osnv3JsRggAce0HKRvvQS9957LzabjWeeeabhCXbu1A6g8/KO035pafDI\nI7BmTbM+QpOjKbaJsoUQrRq4liOEOIF6bXo0tTL4/PPPufbaa2u2iaoRFxdHRUUFe/fuJeT3Gozk\nAk4ZiqqQ68gl80gmW/O3srVgK1vyt7C7ZBctgmJpFxpB20ArbeweEoz54N2P2ZyI1doem60dNltH\nbLZOWK1tkOWz71NQ6vXyVVERnxQU8KvDwajwcG6IimJISMhpB/0r+q6IrClZhN8cw3ueZD78SOL9\n9zWP3GPh9rnpP6c/N3e9mWk9pp3h05wZpuzYQZzJxFMpKbB/P3TuDLt3s2XfPkaOHMnevXvRnUhZ\ndugA774LffvWaXY6NWWYnw82WzM/RBOiKZTBx8DPQoh3jmm/Gc3v4LomkbSRaMozA7vdTmVlJQA6\nnY6JEydy0UUX8fDDD+P1B4jv0aMH6enp9Y7PXuniyoFeJMkfvhqBLFWHrBb+kNYar7VBubqCEH1/\nZH+7LIHOX8ryUV4nC3Syv00W6CTN/lvvb9fpQCeDXifQ67R4M9WlQX+0NBgEeoOE0SAwGCWMRjCY\nwGAAoxlMJgmDCUxmCaMFTBYJk0XCbJUwWiV0JhnJKCGbZFZuXsnA/gORjTKSSUI2yxqZ5PN2z70a\nZ/p5cflcZBRksPHQRjYc2kD6wXS2H9lO2/A29IhuR9fwCDoGyoRIeVRWbsXtzsViaU1AQDfs9m4E\nBHTHbu+CTndqMSDORO58j4f5BQV8cPgwBV4vk6OjmRwTQ+KpeJf54T7kJnNcJjqbjrwp7bn573ru\nvhseeuj4AHiZRzK5ZM4lvNbuNcZfdfIYRs2FXU4nfX77jX19+mDR6bRwE336wN1306NHD2bMmMHw\neraBwP/elyzRItTVE9OoXz946inwO5qfVzjTM4MTKYMoYCHgATb4m3ug+QmMEUIcPk2ZTwtNfYBc\nVlbG2LFjcblcAFx22WVccsklLFmyhIceeogXXniBZcuW1Tu+uMLHZ+sdSKoEKqg+kAQIFYQCwicd\nrXslELAzYyUtW/VH+CSEIhCKhPBKCEUbjw9Un4TwaqXqBdULQpFQvKB4JRSfQPFJ+HwCr0+zcPB6\nQVXA6wOvV8LrBZ/iJy94FfD6JO26T8KngMcn4fGBT5XwKhJeBTyKhEeV8aqaz4NRFhgkFaMkQCwj\nQB6AERWjUDAKf6mqmGUVs17FoheY9SpWo8BiEljNArsFrFaB3QZ2OwQEQmCQREAIBIbKGAN06Ox+\nCtChD9CjC9ShD9RK2XzmyqY5DjNdPhcbD21kdd5qft3/K6v2r0In6RiYPJABCX3pHRlDpP4IFRUb\nKS/fQFXVdqzW1gQE9CYwsA9BQf2wWFqe8NmaSu7NFRW844/9f3FgIH+Pj2dYSMgpmauqXpXdD+2m\n8OtCQt64iPGP2OjUCd56C0zH7JS9sOoFPv7mYzY8uwFZOncxnK7csoWxERFMjonR4lBPmQLbtzP7\njTdYtWoV8+bNq3dcWloalwYEaHkOdu48TuM98ojmsf3kk2fhIU4RzaYMak00COjor24TQpyTFNFN\nuU00YcIElixZQmFhIfHx8UyfPp1PP/0Ul8vF+PHjCQgIYNGiRQ0m1s5zubguM7NOLoLaeQ1Olpeg\ndv6CY/MTVLfXzkUgAB21ktfUSlRzIjLUKo/NT1Cdo6B2roKanAXIyKqETpWRfTKyIiN5NR6PjHDL\n4JZRnBJqhYxSIeF1SHgcEt5SmaoyicpyQUUFVFZKVFRBRSVUOiUqXBKVLpkqr4RZr2LXq9hkBbus\nYMOHXXix+XzYvB4ChI8gi0KIXSUkCEJDBGFhEBElYw3XoQ/VYwgzHKVwA4YIjZd0Z2/FIoRgV8ku\nlu9dTlpuGj/v+Rm9rGdYi2EMazGMwckDMCr7KC9fR1nZahyOlaiqh6CgfgQHX0pw8CBstg5Izfjl\nWaUofFxQwKt5eXiE4O9xcdwYHX1KZwv58/LJuTuHuFmtuf+LCAoKNMvN2ucIiqrQf05/brjoBu7o\ndUczPEnjsLioiH/s2cNv3bsjAXTpAjNncqRLF1q1asWBAwewNbTXIwQkJ8OiRdCxY51LixZp8eyW\nLm3uJ2g6NKmfwfmApvYz+Oqrr6isrKxxOrv99tt59913AW3raOXKlXTv3r1J7nemEPUkq6nJPXBM\nwprqenX+gYbyE3iO4T21Snd1eUxeAtcxVJ2nwOmvV/lzEugk6WjCGn8uAptcKz+BXo9N1mFSdBi8\nOnQeHTqnHlGlR5TrUBx6vCV6XAV6Kg5IFB+B4kIoKoZih0RJuYxRJwixKIQafATrvITgJdjnJtjp\nJNjlIsKuEBMpiI2FwFgDxmgjxhiNTLEmjHFGTHEm9AGnb4Fzor/X9sLt/LTrJ37c/SMrclfQrQFD\npwAAIABJREFUJboLI1uN5Ko2V9EuvB1u934cjhWUlqZRUrIMRSkjOHgQoaEjCA29DJMptsnlqpbt\nF4eDWXl5rHI4uDMujjvj4gg1NM6LuHxjORmjMoi5I463yxL49FOJH3/UIkdXI6swi37v9WPtzWtJ\nDU1tluc4GVQhaLtuHXPatqVfUJC2jPn+e/jySy6//HImTpzI+PEn2Mq6+24tkcHjj9dpLi2FhAQt\n4nUjX9k5xx9WGTTlNtFf//rXOspg+PDhjB49msmTJ/Pjjz+Sl5fXBJJr+LP4GQi/kqlOWFPpT1hT\nUauszlFQ5s9TUObPT1CmKFqOAn9uglKfjwpFIcCfsCZEryfMYCBMbyAAPSa3AWOVAbnMiFpswHvE\nQFWekdJcPQf3wcEDcCB/OTbTQKIDfURbvETIHiIVF+HOKsJKK4g2uIlPBHuSGVOiCXOSGXOyGXOK\nGUuKBUNkwzGbGgun18myvctYtHMRX+/8GpvBxth2YxnbbizdY7ojSRIu1z5KSpZQXPwDJSU/kZER\nzPDh4wgLG0VgYK9mWTXsqKzkhf37+bKwkCkxMTyUkEBEIwLqufJcbL1yK4E9A1nSuRXPPifz3XfQ\nyR+KMi0tjQ3GDSzKXsTSiUvP2bnSK3l5rHY4mN+hA1RUQFISbNzI+2lpLFiwgK+++uq4MTWf9eXL\n4d574bffjuvTubPm3Ny791l4iFNAs28TnS9o6uQ2q1atYujQoTXKIDg4mNLSUgCee+45nnjiCdxu\n9xnfpxp/FmXQ1FCEwOFPWlPsT1pT5PXWyU9wxE/5Hg8F/rwE4QYDMUYjxs2badmjHzanCWOpCXHE\nhGu/CcdOM4dydOTugYIjEBumkhDsJd7kJk44ia6sILLQQYy3iqBWZiwtLVhbWbG0sWBtbcXa1ooh\n9NR/GgohWH9wPV9s/4IF2xegCIVxHcYx/qLxdIzs6O+jsGjRm7Rvf4DCwq/xegsJDx9FRMTVBAcP\nQpab9ifpfpeL5/btY35BAbfFxnJ/QoJmlnkC+Mp9ZI7LRCiCzAkXcc+DMl98oR2wpqWl0f+S/vR8\npycP9H2A6ztd36TyNhYOn4/kNWvY1rMnsSaT9mvfZqPskUdISEio12Kw5rOuKBAdrSmDhLpeLnfe\nqe0iPfDA2XuWxuBPpwyaCitXrmTo0KE1B8gdOnSgW7duLF++HL1eT1BQEBs3bqx3bLUc57slzZ8V\nXlWlwOvlkNvNIY+Hgx4PB91u8txuDrjd7He72ed2IwOJZjOJRjNhHjPmUjPSIQvOHAtHtpjZlalj\n3z5BXKSgRbiXFKuLFLWSeIeDqH3F2Gxg62DTqKMNe2c7to42dLbG7cMLIdh4eCPzM+YzP2M+IZYQ\nJnWexA2dbiDCdnQj3uncxZEjCzly5DOczl1ERIwhMnICwcEDm3TFkOty8fTevXxVVMRDCQncFR9/\nQg9n1aeSNTkL1x4Xh+/rxI3TdMyff9T0dE3eGsZ8Mobtd2w/Z85ot+/cSYTBoDmhZWXBJZfAvn1c\nff31XH755UyZMqXhwTfcoPW/5ZY6zZ9+qoUv+vrrZha+iXBBGZwAEyZMYOnSpRw5cqTmALlNmzbc\nddddKIpCcXExI0aM4K233qp3fFVOFetarQMJ7aBS1kpJJ4HuKF+nrj/aJulPQIbjS9mgmXlKBkkz\n76wu/aafkrFuvZrqmIEeQzqL31rHKP0plZrwrzhy3W72ulzscTrZ43Kx2+Uix+lkj9NJuMFAK4uV\naI8Va5EVdY+V0k02dqcbyd4JcdGC9nE+WtmdpHrLSMkvxrrLgSnBREC3AOzd7QR0DyCgewD6wBOf\nTahC5ZfcX5izaQ5f7fiKwSmDmdZ9GsNSh9WxynG5ciko+JT8/A/x+UqIirqeqKgbsdnaNtm7yaqq\n4qFdu8iorOSF1FTGhoc3+BkRqiD779mUrS2j9PHOjL/ZwIIF2ncowLRvpmHQGXj9itebTL5TwbbK\nSoZu3kxunz4YZVmLSDppEgvMZt544w2WLFnS8OCPPoLPPoMvv6zTfOiQdq5cWHhm+aXPFv6wyqCp\ntixyc3O56qqr2LJly3HX9u/fzxVXXMHWrVsbHC+02NUIVSAUAQp+k1F/XT1aR4HlK5czoOcAhPdo\nnxre5yfv0VL1qkfbvALVox7PewSqW+NVt1pDwl23rrr85PTXnX5yqQhFaMrBqkO2+kubjM6mq6EN\n5Rvo26ovuoBaZqB+809dgA59kP4oBeuRTefOpPBYnO7nRRGC/S4XO51OdlRVsaOqiu3+/AQeVaW9\n1UaC147tsA3PdjuHVtrZtEaHySTo0kbholAnbZUyWhwqRNrmwJxsJrB3IIF9AwnqH4S1jbXBL9gy\ndxnT507nZ36m3F3ObT1u46auNxFqqRtFt6JiC/n5H5Kf/wEWS0tiYm4hIuJqdLqmCWa3tKSEe3Ny\nCDMYeLN1a9o0kDpSCMGeR/dQ+HUhxdO7Mu7mVXz33aVcfDEUO4tpP7s93074lh6xPZpErlPFoE2b\nuD02lmsiI7VQEy+9hPPHH4mNjWX79u1ER0fX9K3zeSks1ILdFRQcZ0ObmKiFtE49N+fj9eJs5DP4\nQ0IIQW1FWDtmyZdffkm7du0aHOtwOPj444+RZc0OvqGympdlmcydmZSbymvqDZHOon0Z19R1ujp8\nfaTX6dHr9Ufrfl6v19eQ3MByX/X5FUWVilKlaGWlglKpoFZqvG29DUu8BaVcwVfmw33QjVKm1NR9\nDh+KQ8FX6sNX6kPSS+hD9OhD9BhCDZoJaKgBfZj+qAmo3wzUGGnEEGFAH6w/r1YoOkki2WIh2WJh\n+DGhzI94PGRUVrKlspItweXsiD9EZr8qUsxm2soBBOUHcHBTIL8ujWFTZhwJCYJerbx08VbS9ptC\nAp/ORVSpBPUPIvjSYIIHB2PrYEOStecPNAVyVZurmDlwJmsPrGV2+mxavtqSCRdN4J4+99AyVPuc\n2u2dsNtfICXlXxQVfcOhQ++Qk3MvMTE3ERt7BxZL8hm9gyEhIWzs0YP/HDhAv99+4674eB5OTDxu\n60iSJFo820Jbsf5rMw/fqzB6NHz7LfTqFcrzQ5/n1m9vZe3Na89JTovJ0dHMOXxYUwZXXgm33ool\nL4+rrrqKTz/9lLvuuqv+geHhWo6DFStg6NA6l3r00GLgnU/K4Ezxu1sZNAUmTJhAWloaRUVFREVF\nMX36dBYtWkRWVhY6nY6kpCTefPNNYmJi6h1fUFDA448/rvkPqCqqqtYol/rqtUtFUWrqtfnqen18\ndf1k5PP58Pl8deqKouD1epEkqUYxGAyGOnxtMhqNNWVt3mQy1bSZTKY6ZDabj5LJjEW2YFEtmD1m\nzF4zJo8Jg9OAvkqPrkKHVC5BKfgKfXgKPHgLvKhuVTP/jKpl/hljxBhrxBRvqiF90PmlNKrhUVW2\nVVayvryc9f4cBdlOJ51sdlq6AzHvDKZwWRDrlhiQJBjQS6FnWCWdyguxry9AKVMIHhxM6IhQQi8L\nxRRT95foofJDvL7udd7+7W36J/bn0f6P0jPu+NBhTuceDhyYzeHDcwkOvoT4+PsIDu5/XL9TxX6X\nizuzs8l2OvmwXTu6BQQc10cIQc49OZRvKGffPZ257e86VqyA1FTBgDkDmNx1MpO7Tj5jWU4VVYpC\n3OrVZPTsSZzJBPffD2Yz3/fvz1NPPcXq1asbHvz001BSojkX1MKMGVrzv//dzMI3Af6w20QXcHpQ\nVRWv11ujMKp5r9dbhzweTx3+WHK73XXI5XLV8E6nE5fLVUNVVVU4nU6cTmcNX1VVRWVlJVVVVQDY\nbDZsNhsh1hBiTDFEGaMIl8MJI4wQNYRATyABrgAsFRaMDqO24oqS0MfpsSRbCGwTSFDbICypFiyp\nFvRB589it8LnI728nFUOByscDlaXlZFoMtFNDiEwO4T8H4NY8b0Bmw0G9/HRO6iMiw7m411ehDnZ\nTNiVYYSNCiOge0DNqqHSU8l7G9/jhV9foENEBx6/5HH6JfY77t4+XwX5+R+wf/+LGI3RJCX9g9DQ\nK85IkQohmF9QwF05OfwrJYWpMTHHzSdUwY7JO/Ac9vDrVRfx8isyv/4Ku93rGD1/NDv/vhO78QQJ\nZpoJU7OySDWbeSQpSctQM2IE3pwcYhMSSE9Pbzhx/IYNcP31sGNHneYff4Rnn9W2is53/GGVwe/V\nRPP3Kjc0n+wej4fKykoqKipqyvLycioqKigrK6O8vJyysjLKyspwOBw4Sh24ilyQD/piPeZSMwGV\nAUT4IoiX44lWo/HpfZTZy6gKr0KJVcix5NB1UFfCOoYRFRtFVFQUERER6M8g3PPpwqeqbKyo4OfS\nUpaWlLC6rIyLbDa6+kIxbAxl+8IAVv8q0aO7IDnoRyZEtCNgxSGUcoXwv4QTcXUEQZcEIetl3D43\nczfN5blVz9EytCUzBs+od6Wgqj4KCxeQm/ssIEhOfpLw8NFnpBSyqqq4ets2utjtvNm69XFezD8v\n/ZnI2ZHIJpn3k9qxYoXEkiUw9fsbaBHSgqcGPXXa9z5drHY4mLRjBzt69dKevWdPePppbv78czp0\n6MC9994L1PNZV1WIjYVff63jWVdUpFVLSk6cAOhs4sKZwQX8blG97XSm0WE9Hg8lJSUUFhZStLMI\nkSnQZelQ9ihYNlqwr7QjV8lsNW7la75mh2cHRwKP4Ip3EZgYSHx8fA0lJCSQmJhIYmIi5tMI7HYi\n6GWZnoGB9AwM5OHERNyqyorSUhYXF7O46w4KOnoZHRRG4v5wNr6t58bvEgkMTOSKkV4GUEjcA7vw\nHHATMTaCyAmR3NLvFiZ3ncx/N/6X0Z+Mpm98X54Z/Axtw49aFsmynsjI64iIuJaiokXs3ftPcnOf\nISXlqdNeKbSxWlnbrRt3ZGfTe8MGFnbsSKtah8uyTqbdvHZsHryZqS33kpuYwsSJ8O+3Z9D9na5M\n7TaVhKDGZKhoOvQJDEQCVpeVcXFQEEyeDHPmMGbiRJ5//vkaZXAcZBkuv1zzXr7jaHiNsDCNsrOh\nTZuz8wzNjd/dyuACLuB04KvwUbW9isqtlZRtLKN0QynOrU5Uk0plXCX5wfnsNuxmk3sTWQezyMvL\nIyQkhJSUFFJSUmjRogWpqam0bNmSVq1aERUV1eRnF3ucTr4uKuLrwkLSy8sZGhJCj7JIir8P45tP\ndbjdMGaYl0HGQqKW7Uc4VaJuiCJqYhQkw+vrXmfmrzO5pv01TB80nXBr+HH3EEKlsPBL9ux5HIMh\njNTUFwkMrDdtyUkhhOCtgwd5Yu9e3mvblpHH5IP0FHj4rc9vxDyezPX/jWb4cPD0f4xcRy4fjPng\ntO55Jnh+3z5ynE7eadNG+0mfkoIrM5Po9u3JysoiKiqq/oGffQZz52qBiWrh2mvhL3/RdpHOZ/xh\nt4ku4AKaCkIIXHtdlK8v1yi9nPIN5RhjjAT2DUS0FxTHF7PHu4c9e/eQk5NDTk4O2dnZuN1u2rRp\nQ9u2bWnbti0dOnSgY8eOpKSknDhWfiNR7PXyZWEhnxYUsLqsjBGhofR3RnHo61A+ny8jhOCaoR6G\nuA9jXpSHtZ2V2KmxyCNknlr7FJ9s+4T/G/B/3N7zdgy6472JhVA4dGgOe/f+k+DgQbRoMQOzOem0\nZF3tcHDNtm3cEhvLY0lJdSKiVmZWsunSTYS/05EhdwTx0mtV3LO7JV+N+6reba3mxEG3m47p6ezv\n21fb2powAS6+mHErVzJkyBCmTp1a/8DSUs2WND8fauWgfuEFOHgQZs06Sw9wmmisMqixejnfSRNV\niGXLlonfI36vcgvx55Jd9amifFO5yHsjT2y7fpv4NeFXsTJqpci4JkPk/SdPVGZXClVVRXFxsVi9\nerV47733xIMPPihGjhwpkpKShNVqFd27dxdTpkwRr732mli5cqUoLy8/I7kLPR7xRl6euHjDBhG5\ncqX4+86dYt6acnHXXUJERgpxcV9VvHxbmfh16FaxImyFyL4/W2xet1kM/2C46DC7g1iRu6LB+3i9\n5WL37n+KFSvCxN69zwhFcZ2yrEIIcdDlEv02bBBXbtkivvnppzrXin4sEquiV4m0L5wiIkKIpxbM\nF4P/N/i07nOmuGLzZvH+oUNa5ccfhejWTXzyySdixIgRQogTfF4GDBDiu+/qNP38sxD9+jWjsKeI\nhmT3f3ee9Dv2wpnBBVxALUg6CXtnO/bOduJujQPAudeJY7mDkp9LyH06F9kkEzIshNTLU+lxdY86\nkU/LysrIyMhg8+bNbNq0iffff59t27aRkpJCz5496dWrF3379qVjx46NPsQOMxi4NS6OW+PiyKmq\n4v38fB5UthA/0cQzD8YSvDGSee8F8GR6R64a6uMvhw4TNeIwM/vNZNeYXYz7bBwjWo3g+aHPE2at\nu5Wj19tJSZlOdPRN5OTcTXp6J1q1mk1o6NAGpKkfMSYTP3fpwoO7djFt506+79OHTnbNaih0WCgJ\nDyWQ/68Mnn6yG7Meuxb3pGf5ec/PDE6pJ21aM2JSdDRvHTzI36KjtbgZR44wMimJm1etwuFwNDyw\n+tzg8strmrp1g02btLwi58AeoclxYZvoAi7gFCCEoGp7FcU/FFP8XTFla8oI7BNI2KgwwkeHY044\n/tDZ4/GQkZFBeno6a9euZfXq1Rw4cICePXsyYMAABg4cSJ8+fbBYGu857FNVFhcX89ahQ6x2OLgp\nJoZrjbEs+9jCW29BUIBgXEcHvdZlYw0ULBu+jFdCXuHVka9ydfurG5y3sPAbcnLuIiioPy1bzsJg\nCGuwb0OYl5/P3Tk5vNqyJeP9+/BCCDKvzUQfqudF0YYtuXuRr72eVZNXnlW/EaeiELt6Ndt79iTa\nZNJSthkMXLVlC+PHj2fChAn1D0xPh0mTYNu2Os1t2sCCBcelPTivcOHM4AIu4CzAV+6jZEkJhV8V\nUvRtEZYUC+F/DSfyukgsKQ1/uRcXF7N69Wp++eUXli9fTkZGBt27d2fYsGEMGzaMHj16NPrsYY/T\nyewDB5h7+DCXBAfzQHwC5WuD+M9/YNUqwYRLnFy2bxe2wiL+1/9/uK908+qoV+s9YAZQlEr27HmM\ngoL5tGz5GpGRDSuPhrClooJRW7cyNTaWRxMTkSQJX7mPDT03EHFfIqNfj6bwoid498k+XNHqilOe\n/0wwcft2egYE8Pf4eC0q6TXX8N6jj/L94sV89tln9Q9SFC2Lz7ZtUMsZ9frra8Idnbe4cGZwnuH3\nKrcQF2RvLBSPIoqXFIusW7PEyvCVYn3v9WL/rP3Cfdh90rHl5eXiu+++E/fee6/o2LGjsNvt4rrr\nrhMffPCBOHLkSKPuX+71itfz8kTy6tVi4G+/ie8KC0VOjiruuUeIkBAhxgxyiw97ZIpvY74V464d\nJ77e+vUJ5ystXSXWrm0rMjKuFh5PYaNkEOLoOz/ocolu6eliyvbtwqMoQgghKrZViJXhK0X6ggoR\nGOISbf85Vqiq2ui5mwLfFRaKvhs2aBVVFaJ1a1Hyww8iMDBQLF68uOGBY8YI8cEHdZpeekmIO+5o\nRmFPAWd6ZnCeuEucXUyZMoWoqCg6VWfjAKZPn058fDzdunWjW7duLF68+BxKeAG/R8gGmZAhIbR+\nozV9D/Yl+clkyjeUs7bNWraO2sqRhUdQPWq9Y+12O5dffjkvvfQSW7duZc6cOQwbNowvvviC1NRU\nBgwYwMsvv8zevXsbvL9dr+eOuDiye/XilthYHt69m2tLNjD0sSL27BH0v9LIw4fa8WLccHplPEL5\noAqe/b9nqfJU1TtfUNDFdO++EZMpkfXru1BScmq5HmNMJpZ36cJhj4eRW7dS7vNha2+j5ayWKI9m\n8MLTOvb+9zk+3vjlySdrQgwNCSHb6WSv06mFHR0/nuDFi+nWrRvr169veOCQIcflu6yOUfRHwJ9y\nm+jKK69k9erVNSESAG677Ta+//57goKCSE5O5qOPPsJuP/tu8xfwx4Ov3MeRz49weO5hqnZUETM5\nhphpMViSG3dG4Ha7+fnnn2uycyUmJjJ+/HjGjRtHfHx8g+NUIfiysJAn9u7FKss8lZLCpbYQ5s2T\neP55gRUPl5eupaV5N11mdabr6K4NzlVc/BM7dtxEVNQEUlKeQZZPnhGt5vlVlVt37mRbVRXfXXQR\nIQYDmddnogvSM3WvnYzK5RSmXVcnVHdz49asLFIsFh5OTNRCTQwZwmsPPshvmzczZ86c+gdlZWkB\n6/btq4ldXVEBUVGa9en5mgbzvNkmAkYAO4CdwMP1XB8IlAK/+emxBuY5w0XUUaxYsUIsWrRImM3m\nmrbY2Fhxh3+9N2fOHPH444832f0u4AKqUbmjUmTfky1WhK0Qm6/YLIoWF53SNonX6xVLliwRU6ZM\nESEhIWLgwIHi3XffFWVlZQ2OUVRVzM/PF63XrBFDNm4Uv5WVCZ9PiPnzhWjXThUtEwrFP+zLxbdD\nFgnnXmeD87jdR8SWLVeJDRv6CKdz/yk9t6qq4p7sbNElPV0UuN3CU+IRvyb9KnbNLxTGsAPisbcb\nNn9tDqSVlIjO69YdbejSRRz46CMRFRUlFP+W1nFQVSHi4oTIyqrT3L69EBs3NqOwZwgauU3U3IpA\nBnKAJMAAbALaHtNnIPB1I+YSQjTdHvDKlSvrKAOz2SySk5NF586dxbhx40Tbtm2b5D7VuLDvfm5w\nvsruq/SJg/89KNZdtE6s7bBWHHz3oPA5fTXXGyO3y+USCxcuFKNHjxbBwcFi0qRJ4pdffmlQuXgU\nRczOyxNRK1eKv2Vmiv1OZ41SSG7hFHGh2eJl62qR/Vi28FX66p1DVRWRm/ucWLUqWhQXL6m3T0Oy\nq6oqHt+9W7Rdu1bkuVyiZHmJWBWzSkx/eZUwhBwSJSVn7+xAUVURu2qVyKyo0Bqee06IadNEQkKC\nSE9Pb3jgjTcK8Z//1GmaOFGId95pPlkbi/P9zKAXkC2EyBVCeIH5wF/q6XfOYxJ36tSJWbNmsWnT\nJgoLC9m1a9e5FukC/sDQWXXETI6hx+YetJzVkiNfHGFtylr2zdyHr8LXqDlMJhOjR49m4cKF7Nix\ng44dOzJ16lQ6derEm2++SUVFRZ3+Blnm9rg4dvbuTYLJRJf165l5YB9jrlHJzjLz8DOx/J+xNWNf\nl/i09SaKfyo+7p6SJJOY+DDt2n3E9u03kJv7XPWPtZNCkiSeSklhUnQ0gzdtwt3HSvSkaEYvtWNv\nv5xxUw80ap6mgCxJXBcZyccFBVrDddfBggX07dWLRceEnaiDIUPgmOxoXbpo/ga/ezRGY5wuAX8F\n3q5VvwF49Zg+A4FCtFXDIqB9A3OdqeKsg2NXBllZWWL48OGiR48e4r777hM6na5J73cBF3AylG8p\nF9vGbRMrI1aKPU/vEV6H95TnUFVVLFmyRIwZM0aEhoaKe++9V+Tm5tbbN6eqSozcvFm0XrNG/FhU\nJIQQoqJSEZdNWyoMplIxzr5PrLtuh3Afqd8ayuncL9LTu4nt2ycJRTm5xVRtPLVnj+i4bp0oqHCJ\ndZ3XiQ8f/0qYwg4d6+TbrFjncIiWa9YcXUn17Ss2zZghevXq1fCgAwc00yzf0ZXT0qVC9O/fzMKe\nAThPVgaNwQYgUQjRBXgdOCumBeKokgEgMDCQH374gfT0dEwmE8HB5yaB9wX8eWG/yE77j9vTdUVX\nnDudrG2lrRQUp9LoOSRJYsiQIXzxxRds3LgRWZbp2rUrN9xwA5s3b67TN9Vi4dtOnXgxNZVbdu5k\n4vbtuAwKi98czNs/rebrVqsZ8XUST7fMpWDBkePuZTbH07XrL3i9xWzZMgKvt6TRcj6WlMTI0FAu\n376V+DdbkvxWGLFXPc6kKW5KSxs9zRmhR0AAQgg2Va+gxo2j4/btZGVlUVC9YjgWsbEQHQ0bN9Y0\nde4MW7ZAIxdI5y2a1ZpIkqQ+wJNCiBH++iNoWur5E4zZA3QXQhQf0y5uvPFGAJKTkwkODqZLly41\n8bvT0tIAGlWfMGEC33//PaWlpSQkJDB9+nT+97//sX//fux2OyUlJYwaNYqrr7663vEHyvZz/cyr\nAYmojhFI6CjILEaWJKI7RiNJOgq2HUGSZGIvikOWZLZ9vY2I1AgSOiWgk3Uc3HoQGZmkLknoZB15\nm/OQZZnUrqnoZT25m3ORkWndozU6ScfujbvRyTo69OyATtaRvSEbnayjc+/O6GU9memZ6GU93S/u\njkE2sHXdVnSSjr4D+mKQDfy2+jf0sp6Blw7EqDOyduVa9LKeoYOHIknSCd9XNd/Y93s+1Y99hnMt\nT2Prs2bNok1QGxK+SaBsXRkHxx0k7PIwBg0ZdMrzlZaW8vDDD/P5558zYMAAnnjiiZrQC9X9v1+6\nlP8ePsyqlBReTk0lKjOT3SW7eXzlN+g+fxNL/gZu6l7KA4uuxRBiqDO/EArz5o3H4VjD5MkrWLNm\nT/WrP6F8QggWxsezobycac9lsm/PXt4N6s5lqZczfvzZed+LEhKwyDKDc3O1XMdTpzJ78GBS27Rh\nxIgR9Y+/6y7SXC6YMKHmekREGq++CuPHN6+8J6pv2rSJe+65h7S0NObOnQto35XTp09HnGsPZEmS\ndEAWMAQ4BKwDxgshttfqEyWEyPfzvYBPhRDJ9cwlhGia5Db1pb0sLy9n9uzZSJLE2LFjmTFjRoPj\ni8q2smVjH0BoJNSjPALQbMkltHcrkNi0CTp30aEdj0gIP9XmBRJC1K2rQvLPqF1ThcYr1aWq3c0n\ntCTuigo+ITRSwSdUvKrAq2qlR1XxKCpu1Ydb0XgVGSQDoAdJjyQbkSSNZNnEkR1eYjtGoNdb0Ms2\nDDorBr0do96O2RCIxWDHarBiNVixGW3YDDZsRht2o70OBRgD6o2g2Zz4vSYVqi132boydj2wC5/D\nR8uXWxIy+PTyPzidTt555x2ef/55evTowVNPPUXnzp3r9FlbVsaUHTtoZbXyVuvW4C1JqSEQAAAg\nAElEQVTlqo9GI/16J1mfXMtYwyGe+dhGzMjQ4+bPy3uFvLxZOBz/4rLLGgjrcAxUIbhxxw7cFT7u\nHV/BP/u8xKaf5rHoGwO9ep3WY54SfnU4mLZzJ1t7ahFU09q0wTlqFHP37eOTTz6pf9BXX8Hrr8NP\nP9U0jRwJU6fC6NHNL3NDONPkNs3uZyBJ0gjgFTTLov8KIZ6TJGka2grhbUmS7gBuA7yAE7hXCLG2\nnnlEc8vaHBB+RaGVKkIodXitrH1N8bcrtfoqCOGr57oPVfXW6utDCK+f96KqXn9bdbu3VrvHz3vw\nqS58ikaK6sKnOFFUN6rqQlFcqMKNUN2oqhv84yThRsKLjBchQEGPImS8QodHlXCrEi4VnD5BlaJQ\n6VUo9/pwqzJCMiEkC5JsRae3o9MFYDQEY9KHYjWFYzNFEGiJJcQaSagllFBLKGGWMEItoWddmZwv\nEEJwZMERdj+4G1tnzXGrsX4Kx6JaKcyYMYPhw4fz9NNPk5R0NHy1W1V5Ys8e/pefzxutWjE82Ma4\nz8dRWmDHOv+/5KxXmfW3Eq54KxbZUHen+cCBN9m3bwadOy/Bam3dKHk8qsqILVsYvEVPzwcPMGPs\nfirW3cy6ddAE0cBPCFUI4lav5pcuXbQEPTNmULlrFwkLF5Kfn4+hPucBhwPi4qC4GIyav8Wjj4LJ\nBE880bzyng7OG2XQVPi9KoM/A1TVi6q6UNUqFMWJqlahqk4UpRJFqfS3V+DzVeD2OnB6i3F5SnB7\nS/B4HXh9DlSlAlWtQBKVyMKFATcKEi5FR4VPosynUurx4VQMKJIVIdvR6YIxGCKwmqKxW+IIsaUQ\nGdia2MBkYgNisRlt5/rVNDkUl0Lei3nsf3k/iQ8mEn9f/HFfyI1FWVkZM2fOZPbs2UyePJnHH3+c\nwMDAmuurHA5u3L6dAcHBvJyawiM/3M2avLXcVLWU6f8I5LrIfF74OZSAVnWV0qFD77Fnz+N07vwT\nNlv7RslS6vXSf+NGHp8pWHfgM36VZvC3CUZuv/20Hu2UcNvOnaSYzTyUmAiZmXDZZfSIjOTFl15i\n4MCB9Q/q0gXefBP69AHgk080+uKL5pf3VPGHVQZNseyfMmUK3377LVFRUWzZsgWAhx56iG+++QaT\nyURqaipz5syp849xpvi9blfAuZFdCIGqOvH5Sv1UgsdbRFnVQRzO/VS4DuF0H8btKUTxFSOpDnSi\nEovswqtCiQccPpmMLUZSOoWhM0RgNsUTZE0hIrAdCaHdSAq9CIvh9H5dNzdO9s6du51k35mNa5+L\nNm+1Iahf0Gnf69ChQzz22GN8//33zJgxg4kTJyLLmoKp8Pn4e04Oa8rK+KRdO776bRZzN89l7sCl\n/N+10RTt9vL+2wr/396Zx0VVfn/8/cywyg6CiAvuG2mKoqa5tPvVX1lmmlaaqWXZrpWlWZmVtptl\ni1mmqWVqWmnmkisuKIqioLiACAICgsg6w8z5/XEHREVFAUG779frce6989xnPnMd7rnPcs5pP+zs\ngov169fTosVxjh4dR7t2m3B2bnSxjz6H+Px87lkbzieDc1k9Np6fvxjOvn2ah29lsurUKd6Ki2Nr\ncDDr162j56hRfNOtG7E+PkydepHpzdGjtSTIY8YAmhNznz5QlSvSyztMdANE4b5y+vTpw969e4mM\njKR169aMHDmSu+++mylTpmAwGBg3bhwffPABH3zwQVVL/c+ilMJorIHRWANHx4Di476XOU9EsFiy\nKChI5lT2EQxZf9KonuJMbhwFBfFYs8LJOJ2J+UQ+cVjJMBvJtbpiMfri4FgXD5fmBHgF08i3KzXd\nW6BNe1U/nBs503p5a1IXp7L/of34PexHw8kNMda4cr21a9dm1qxZ7Nixg2effZZvvvmGGTNmEBwc\njKudHT+2aMGc5GTu2LuX95sOZ2wNXx5e25Xl61fwzwdNuXOEE2+uTOfFBd4YDNo9x9//MSyWbPbs\nuYt27Tbj6Fj7MiqgvpMTc7u1Ztrjuwn52cKjQ8y8+qo9P/10xV/piujp6cnB3FxOFBRoYSYeeIB7\nk5O5Z/nyixuDrl1h0aJiY9C0KSQnQ1YWVOAz5DXluusZVATJycns2rWLcePGsWXLFtq3b8+yZcto\n0UJLJL506VIWL17M3LnXPk+rzrXDZD5N/KlwEk7tIjUrkjM5hyk0JWAv6bgbc3GzE7IszpiUHw6O\nDfH2aEND3x4EeHfCwSHgmsbhvxTmdDOHnjvEmZ1naPFji3L1EqxWKz/99BPjxo3j4Ycf5t133y3u\nIR/IyeGhqCg6uLlxZ2EkL/09mkUDFuF8sBMP97XSzNfE/B2ueNU6a5Di4iaTmrqQtm03YG9ftonv\nH+NPYOgeRcHI07z91YP88YcWEK4yeTQqiq4eHjxdpw5s344MG4Zfaiq7du2iXr16F55w7Bh07KhZ\nANvvoFMn+PRTzU5UJ8raM6gOfgbXHH9/f4KCggAtWmTLli1JTDzr/fjDDz/wvxIZjXRuTBzsPWhS\n63Z6thzLQ51+4onbQ3my1zGG/S+bfndZaBEcg0fdzzHVuJOE3DzC435l+Y4B/L2xPqvW2fP7v7VZ\nsbUHO6NfITVtOQUFSWX2xq1I7H3saTW/FY2mNGJ///0cnXAUq7n06KiXw2AwMGzYMKKiosjOzqZV\nq1YssQ2Et3BxYWu7dmRbLHxR0Igv+s6n/8L+pDZcy54TDrg7WghuaCYyzFzcXmDgeLy87iAy8v+w\nWPLKpGFY/QDCXlF4fOHKhNcLeeWVyl/D/4CvL0vS0rSdkBBUZiaPhoSw5jxv42Lq19ci05UYF7r5\nZjjPleP6oiyeadWhUMGxieLi4qR169YSGxsrgYGBxXlqJ0+eLP369auQzyhJdY2RUxZ07WexWq1y\n/PRxWX7gF/liw5My8Y9gGf+bh3y+1CjL19jJyn+dZeXmm2TX/qckOXm+5OTEiNV6kcBnlaA7Pylf\nIu6JkPDO4ZJ7JPeq2ijJxo0bpVmzZjJw4MDivApWq1Umx8VJQGiofBcTKn4f+cmv+34Vq8Uq792R\nLK6G1fLbj6biNqxWi+zfP1j27XuozNcir7BQJnf5Sz5+6l9p2VJk+fJyf5VLkl1YKG4bN8ofRfmb\nn35atvfrJ4MHD774SQMGiMyeXbz75ZciTz5ZuTovRXWPTVStsVqt9O/fn2nTpuHq6srs2bNZsWIF\n8+fPr2ppOtUUpRR13evSu/lAnuv+Le/cG87k/pkMuTsVhwYr2Gf3IitPuvLd7p/5acsw1my9mbUb\nXNkU1pkjR94gLe0vzOb0StPn6O9ImxVt8B3gy65Ouzi58CKetGWkW7duREREULduXdq0acPvv/+O\nUorxgYF826wZ408KL9/3Fy/98xKzImbx+mo/XuuTybMjrLw9Rlt2rJSB5s1nUVCQSGzsm2X6XCej\nEZdX02k6H4Y9n8Wrr2rJxioLF6OR2z092ZqVpR144AFuPnqUNWvWYLVepJfVtSuEhhbvXu89g//k\nnAHA4cOHadeuHZMnT+aFF15g5cqVjBkzho0bN+Ljc+m8r1ZrAbm5BylyIFPKUGJboY2+nb9d8vXc\n7bK9qmozRq1zeUSEuMw4thzfwo7jq0hKX4e3IYVONd2o55SNvUNt/Lxvx9OzB56ePXFyql/hGs7s\nOsP+h/bjc68PjT9qfNVLUIsIDQ1l2LBhdO7cmenTp+Ph4cGe7GzujYykv6cji//px6j2TzHu1nFE\nfJjMgxM86D/Cnqkz7FEKTKZUdu3qTGDgm9Su/fhlPy/PnMe4Pl/h4dKRdendGPa44oknyvUVLsns\npCT+Sk9n0U03gdkMfn508fTkm2XLzkmEVUx4OAwZUpwXOStLi1Zx+nTl+0dcCTfs0tKKYPDgwSxd\nuhSTyURAQADvvPMO77//PiaTqdgQdO7cmRkzZpR6fn7WISIjeiOqSI+12NdY+9fmkayKtrWjUuyd\nbNsWq+2YaPXEUlxHcyw766ymtW2wrW7RXjUjYbRtG23vFW3blThuZztud8liMNjbtu2Li3bMAUOx\nV7I9BoNj8b72qu1rx51s+9q2VpwxGJwwGp0xGJxtbf/3DFtabhob4jbwb+xqDiatxN8uldtq+9DA\nKRMney98vO/Ey0srDg6XWzdVNswZZqIfi6Yws5CghUE4BjiWq72cnBzGjh3L33//zZw5c+jevTsn\nCgq4NzKSxo4GokNH0KN+F6b1mkb09DQeeMWV3o868PkszSDk5EQTEdGDoKDf8PS8yBr+EoxZMIbb\nR/Vh0fuNWP1BAw4eBJdKch9JMZloERbGyS5dsDcYYMAA5qSlkdqnD2Nsq4bOobAQvLy0yWRvzSO7\nUSP4+29o3rxyNF4NN6wxqIg176GhoXTv3p3WrVujlPbE/f7779OrV6+yNRAfD/feq81qWa1aKdoW\n0fqzIudsr8/Npae9/dn3rFZtu7RXqxUMBu3xwmgEgwGxMyL2RrA3InZGcDAi9naIQ8l9o7Zvb0Ts\nDeBgh9gbztZzMJSoYzj7WlQcVPG21U4h9loJPZBC53b+iB1Y7a2IEax2ghgFq8GC1WjBarAgqhCr\nKsRq81bWHNGKSl5xEbFiNNawGYkatu0aGI0utuJ6XnHDaHTDzs4No9EdOzt326uHrXhiMJR+k6vO\n/h0JWQn8c/gfVh75mwNJq7jN34Pufi7UNCRyMLoWd901AB+fPri7dy7XElexCsfeP8aJr08QtDgI\nj85Xv9qoiOXLlzNy5Egef/xxJk2aRIFSDIqKIrfQROrCUTRpV595/eYR++1p+r3kQs8BjsyYqxkE\nLWvaUIKDw3ByunimNoA9yXv4bvhP1DHdzyqXzvTq7MC4ceWWf1Gaf/st3w4YQE8vL/jxRxJmzmSk\nhwd///136SfccQe8/LLmZAA88AAMGgQDBlSexotR7cNRVBQVaQyqgivSXdJYFJXCwnP3i46ZzaXv\nm81aKTp2fjGZSt83maCg4OxrQQHr4+Pp6ekJ+fnasfx8reTlnS35+ZCbq7Xh7Aw1amjFxeXsq6sr\nuLpidauB1cMJq4cjFndHrG72WFztsbjaYXUxYHFWWnGyYrEvxCK5FBaewWLJwmI5Q2FhFhZLFoWF\np20lAzBgb++Fnd3ZYm/vw65dudx6a1vs7Wtib+9bXBwcamFn51lteiiF1kJC40P5M+ZP/jq4FJe4\ndJ7s1YTmLpnYWU/h7f0/ata8H2/vXtjZXV061rS/0jj4xEGaTGtCrUHl9+RKTU1l8ODBWK1WFixY\ngHfNmjwZE8PWDRtoUXMLmdnx/DXoLxJnZ/PAszW44xEnpv2gGYRjx6aQlraUdu02XNSQF9F1elcm\nTprMhJc9ifusLUePKNzcyi2/VIYuWECtjh35sHFjSErC2qoVPoWFJKel4ehYis6JE7W/MVsss7ff\n1v4E3nuvcvRdiv+cMdCp5lgsZw1ETo5WcnO1ZLE5OdprUTlzRnvNytK2s7K0AdeSJStLMyZeXlrx\n9gYfn7Ovvr5ITR+sNd0prOlEobcdZjcoNORgNp/CbE6nsDAdszkNkykVszkNs/kkJtNJrNZc7O39\ncHDwx9GxNg4OWnF0rIujYx1bqX/NjYaIEJ0WzeKoxSyKXoTFlMzwFi1p75GPMkXh6dkDX9+HqFmz\nL3Z2V/aUn703m8j7IvEf6k+DtxqgDOX7XhaLhYkTJzJ37lx+++03OnbsyOtHj7IsLY3WKT+TnL6H\n5YOXkzgrl3tfdGXAMw6897k9IsL+/f1wcAigWbOvLvkZ3+78luQZyTQ5fBfvet3EkE7uvPFGuWRf\nlLCsLIYdOMD+oih5wcE8lZ/PoBkzSn+Y++cf+OADsEUR/f13mDUL/vqrcvRdDbox0LkxsFo1Q5GR\noQUGy8iA9HRtOy1N205N1UpamhaG+ORJcHLS4s77+0Pt2trMXkCAFmCsTh2oVw+LvzdmdRqTKRmT\nKYmCgiRMphMUFCTaSgIFBfGIWHFyqo+TUyBOTg2Li7NzE5ydG2NnV0mPqTZi0mP4dd+vzIuchx35\nPB3UlvbuZzDn7sDT83Zq1RqMj899GI1OZWrPlGJi3wP7cGroRIsfW2BwKP+iwmXLljFy5Eg+++wz\nHnnkET45fpwZiYl0TlvAsZPhrHhkBUc+yuaBSV6Met2ece/YUVh4mvDwjgQGjsfff8hF284qyKLR\nR41Y8v1Sxj7mxOGv2xN7ROFR/tGuC7CK4L9lC2HBwTRwdoYJE1i/bh1rbruNyZMnX3jC6dNQt672\ne7S35+hR6NlTG0muLtywxuA/MUxUzbjutItAZiYkJ7N+5Up61qoFSUmQmAgnTkBCAhw/rh3z9obA\nQK00aKCVxo21mcDAQHBwoLDwNPn5x8jPjyc/P5b8/Djy84+Sl3eEvLwjGI2u1KjRDGfn5tSo0Zwa\nNVrg4hKEk1MD2yT/lVPaNRcRdifvZu6euczfN5+2fk0Y2bwljRyOkpsTga9vf2rVGoKHR9fL9mQs\neRaiBkVhzbEStCQIO7fyR6bZv38/vXv35u677+a7777j4+PHmZWUROe0XziYvI1/Hv2HfeNP0/8L\nPyZ+aGTUi3bk5OwnIqInbdtuuGRQuyeWPcGtO27Ff00Qw2o2YXRHbyZOrPje2vr16/mhVi1ucXfX\nvJFDQ8keOpQ7fHzYvv2CYMoaN98MM2dCx45YreDmpv20rnVYCj02kY7O+Sh1dlgpJUV7VCsNi0UL\nJ3DsmFbi4rSF4kuWwNGjmvGoWxe7pk1xbdoU1+bNoWUraPkgNK4N2h8ZJtMJcnMPkZd3kNzcg2Rm\n/ktOzn7M5lPUqNECV9fWuLjcjKurVuztL8wFULavpQiuHUxw7WCm3jWV5THL+SHiB0LjIxje5j4e\n8nbhdMxIRISAgJHUqjXkoquSjM5GghYFcWj0ISJ6RtBmRRscajlcla4igoKC2LJlC927d+eZZ55h\n+vTp5FqtLDIMpoNYuXfBvax4fwVzTyUz8NUAate30LdfEI0aTSEqahDBwdsv2rsZETyC4XHD+fHU\nbDr0iufjaV48/7yiMhIS9vHxYW5ysmYMOnXCJSODzORkMjIy8PIqJaRGly6wZQt07IjBoK0kOnCA\na5KPoSK57noGOjrXDJNJMxAxMXDoEBw8CNHR2l96QQEEBcFNN2mlTRstrHGJsYvCwixycqLIydlL\ndvYesrP3kJOzF3t7X9zc2uPm1gF39064uXXAaLz69ZIJWQnMDJ/J97u/p5FXQ8a0u4dmDjGkp/+J\nj08f6tR5Dnf3TqX2FkSEY5OOkTw3mbZr2+IUWLahpkuRlZVF//79cXZ2ZsGCBUxKSmJ1RgZN4r/i\nTF4Kvw/4ncX/S+TZTfVZudFISEeIihqIg0NtmjadVmqbIkLQjCC+Nn+N+rUGvbzr83xwTaa8W/EL\n+jPMZgK3bSOlSxecjUYYNIjP9u0j8J136Nev34UnzJ4Nq1aBzVn10UfhrrvAlpixyrlhh4l0dKoF\n6emas1FkpFb27tWKn59mFDp0gJAQaN++eA06aMmOcnNjyM4OJytrB2fObCc7ey81ajTD3b0Lnp7d\n8fDodk6k1rJitphZemAp07ZPIyErgRdDhtMnwEh6yizs7b2pU+d5/PwGYjBc2AM4/vlxEqclcvO/\nN+PcsPxhvU0mE0OHDiUpKYmlS5fy+smTHMrNocaBd3E0GpnXZx5fdkhganx9tu61o169DHbubEuz\nZjPw8elTapsfhX7EwZSDjHxjJD8P8+D7KU05GW+slJVF3XbvZnz9+vTy8YE5c4j56COm9+zJ9OnT\nL6y8f7+W4uzQIUBbSZSVBRcLeHqtKasxqPKYQ2UtVHBsomvN9apbRNdeZgoLRQ4cEJk/X2TMGJEe\nPUTc3ESaNhUZMkTk669F9uwRsZwbn8diyZfMzK0SH/+x7N3bVzZt8pavvgqQAwdGSkrKr1JQcPKK\npYQlhMngxYPFe6q3vLb6FTl0fI7s3n27bNlSV+LjPxGzOeuCcxK+SpAt9bZITkzO1V4BETl7zS0W\ni4wePVratm0riUlJct/evTJ4X6TcMedOGbFshBSkFsgY31hpXMskGRkiGRkbJTTUX/Lzk0ptNzEr\nUbymeEn8nHjZ1nmnONx+UsZNKSiX1otpfz8uTp6NidEOpqSI2dVVQtq2Lf2kwkLt/zk9XUREliwR\n+b//q1BZZUKPTaSjU10wGrUB40GD4OOPteWGGRmweLEWx2b7dujfH2rWhPvug48+gp07MYgdHh6d\nqVdvDK1bL6Vr11QaNnwXF5cgUlLmsn17E3buDObo0QmcPr3V5p1+aULqhDCv3zzCnwwn25RLx3kv\n8G1CM2o2+IasrDC2bWtIbOxEzOZTxefUeaYOgRMDibgtgtyY3HJfDoPBwPTp0+nbty89unXjI3d3\njhaYaNt5OuFJ4XwU9RHvbq1FuzNp9L/bhJtbN2rXHkFMzJNFD4DnEOAWQIeADmxpswV12sLL3XOY\n9rm2rr+i6ePjw4r0dE2Hnx+G5s3xOXCAM2fOXFjZaITgYNi5E4BWrbTRxOuOsliM6lCw9Qx0dK57\nkpJEFi4UGT1apGVLEW9vkX79tJ5DbOwF1S0Wk2RkbJQjR8ZJWFgb2bTJR6Kihkpq6lIpLCxbZNKU\n7BQZt3qceE/1llF/jpIjKRslOvoJ2bTJW44enSAmU3px3RPfn5AtgVskLz6vor6xfPLJJ9K4cWOJ\niI2Vptu2yZQj+yXws0CZEzFHUlafkvb2GfLSSJNYLAUSFtZakpN/LrWduXvmSu95vSXllxTZ0Wmn\nOLbLlLe+K19PpjSsVqsEhIZKTI6t7TfflLl168qqVatKP+GVV0TefVdERMxmEScnkdzyB42tEChj\nz6DKb/JlLbox0LlhSUwUmTtX5LHHRPz8RJo3F3nxRZG1a0VMpguq5+Udk+PHv5Ddu2+TjRvdZd++\n/nLy5KIyGYbUnFR5bfVr4j3VW55f8bwkpIVJdPRw2bTJR+Li3pfCQu3mF/9xvGxvsV0KTlbcMEyR\nQdh45Ij4bd4sPx7dJb4f+sqaI2tk7weJEmCfJ7NnFkpWVrhs3uxX6nBRdkG2eHzgIUmZSbKt2TZ5\n5Y0T4tIsRywWa4XpLGJIVJR8nZCg7fz7rxwLCJC33nqr9MoLF4rcd1/xblCQyO7dFS7pqrhhjcH1\nOn59veoW0bVfUywWkZ07Zd0TT4iEhIh4eYkMHqzdbLKzL6huMqVJYuJM2b37Dtm0yVOioh6TU6fW\nXDZvQEp2ijy/4nnxmeoj76x/R1Izd8m+ff0lNLSOnDjxvVithXLkjSOyo/0OMZ82X9FXuNQ1LzII\nPx84ILU2b5YFMevE90NfiToZJcv6HhUvB7OEhVnlyJHxEhl5v1itF97kh/w+RD7f+rkkzkyU3fdE\niGOjHHlrYcYVaSyL9jlJSfJgZKS2k5srZicn6dOzZ+knxsaK+PuL2PQ+9JA2dXQt0ecMdHRuJAwG\nbQXSY49BWBjs2wfdu8P332se1P37w8KFWogPwN7eh4CAEbRtu4aQkGjc3Npz5MjY4jmBvLzYUj/G\nz8WPaf+bRtjIMA6mH6TN933YVtCLlq0Wkpw8m/Dwjvi8ehL3EHf23b8Pa8HVZU47n5dffpmnn36a\nd/v25VkfHz7IdOGt26dw/6/3c+tPHrxW/xj97rHg4fEmubmHOHny1wvaeKzNY8zZOwf/x/zJjcxh\n9EP5fPSxovBieQeukju8vPg3MxOLCDg7I23bYty+ncLCwgsrBwZqfiu2jIktW0JUVIXKqXzKYjGq\nQ0EfJtL5r5OWJvL99yJ33y3i6amtUFq1SlvNch5ZWbslJuYF2bTJR/bs6S2pqX+K1XphvSJ2JO6Q\nLrO6SLtv2smG2A2SlDRXQkNry4HoEbJn0CbZ/8j+Up/Sr5YJEyZIcPv28sjevfJgZKQ8u+I56fVz\nL8k5liP9nROl960mycwMk82ba50znyEiUmgplDqf1JH9J/fLsQ+Pye4BUeLgWyATV6VUmL4iWm3f\nLjtOny4SLd/UrCnh4eGlV+7dW2TxYhER+eUXbRqoOoDeM9DRucHw8YHhw7XgaNHR2gqW11/XnkrH\njz8nH6+bW1uaNv2cW245jq/vQxw79i7btjXm+PFPKSzMuqDpDgEd2DxsM692fZVHf3+UsVtW0qDV\negxGJ86M7k9WjaUcffNohX2VSZMm0TEkhMQxY4jLy6N+q5cxWUy8ffBtvvjFkdjtBXw7rR1+fg8R\nGzv+nHONBiOPtH6EuXvmEvBUANlr0xkysIBPvwBzBfcO7vTyYk1GhrbTowd3GI1s3ry59ModO8KO\nHYC2ouh66xlcd8ZgvS064PXG9aobdO1VwWV1+/vDCy9oyxlXrtSixN5yC9x2mzaMZDIBYDQ6U7v2\n47Rvv52goN84c2YH27Y15PDhseTnJ5zTpFKKh296mOjR0dRxq0PbmbeyJrMVrYKWoIb9TKLPMOJ/\n2FV+7bbP+vLLL/Hz8qLmjBl8lJDIq71mszh6MesbruKb4aeY+p6QdOI90tKWkpW145zzh9w8hDl7\n5yAuQsDIAIbnnCR/gzdfRZUvzef52s8xBrfcQmBmJjs2bCj95JAQbWgPaNYMYmOL/xuuCeX9rV93\nxkBHR+c8broJPv1UC8D3zDPw9ddawL2JE7XAfDbc3UNo1WoBHTrsAqzs3NmGgwefIi/v3Cd+FwcX\npt41lX+H/sv8ffPpvXgM7o1/wb/brRz1uYPDq6cXDd2WC6PRyJw5czAdO8YtW7bw5JEEfnjwN55f\n+Ty13yngzSbxDOpbAx+fTzh06BlK+lcE+QXRyKsRf8b8SZ0X6lC4NIm77zEz6StThfYOenh6sv3M\nGfIsFnBxwdKqFaYNG0r//iEhmnG2WnF0hPr14fDhCpNS+ZRlLKk6FPQ5Ax2dsrN/v+bH4OWlzS1E\nRFxQpaAgVY4enVDst5CbG3tBHYvVIjPCZkjND2vKh5s/lIT1G2Tdj01kV+j/JN5DcCwAAAyySURB\nVD8/sUKkpqenS7NmzeTOJUvk//bulWnbpkvwt8FyOu603O98Qgb0KpDw8K6SmPjtOefN2ztP7pxz\np4iIHHzmoPwxJF4caxXIt8dOVIiuIrqEh8tqm3exddw4+cTFRWJL8QcREZEGDUSio0VEpG9fkd9+\nq1ApVwX6nIGOzn+YVq3gyy+1eYSWLaF3by162rp1WohvwMGhJg0bvkunTodxcqpPeHh7Dh16EZMp\ntbgZgzLwdMjThI0IY8XhFTx0eBzeBb+S/Vttdoa1IyXll3JL9fb25q+//mLP6NEcTkvDXPs+6nvU\nZ2L0RD791sjWtRb27Z5LbOwETKa04vMebPkge1P2cij9EIHjA/H68xgt6sGbc89UaO+g5FCR6tmT\ne5ydCQ0NLb1ySEjxvEHLlteXJ3KlGwOlVC+l1AGlVIxS6rWL1PlCKXVIKRWhlGp7qfZu2DHgaoyu\n/dpTYbq9vGDcOG0A+5FHYNQoLTTG8uXFRsHe3pOGDSfRsWMUIoWEhbUgPv5DrNazA94NvRqydsha\nBgQN4M6sXuTkP4TDtE+Ji51ITMzTWCz55dLetGlTFs6fT+rzzzM5NpYxd37J7wd+J7LjNqbenszL\nz9VDqdEcPDiieIjG0c6RYW2H8c3Ob3AMcMR/mD/Da6ZRsCiAuSkpV3W5StN+zrxBly40zcoi7GLz\nBh07Fs8bXOtJ5Go9Z6C0zB5fAvcAQcAgpVSL8+r8D2gsIk2Bp4BvKlNTRVJdbzTVUZeuqWxUmiYH\nB3j8ce3u9MIL2iqkTp200MvFPYVaNGv2JcHB28jMXMfOnW3JyFhXrMmgDLzY+UX+HPQno5qO4lCa\nwunT2ZjyU9m9uwt5eUcu/vlloGfPnnz6yisYZs5k5MFjzH5gHiP+GEHDT1K41z6Zt14cR0FBEgkJ\nnwHatXqq/VPM2TuHPHMe9V+rT+vtR3FKdmbsihR2lxZH6Cro5O5OTF4e6WYzuLlhatqUrNWrS503\nWG9nB6GhYLVedyuKKrtn0BE4JCLHRMQM/AL0Pa9OX2AOgIhsBzyUUhfN1l2dMm5dyR/utdRd0TeU\nitBeVTfeS2mvzsag0n4vRiMMHAgRETB2LDz3nJb8Z9Om4io1ajSldesVNGz4HgcOPM5vvz13zsqj\nTnU7ET4qnPmPz2dD7E4yn34RX48h7Np1CydOfEePHt2vWt6QIUOY0KEDx7dvZ3GWBz/0/YH7VtzH\nY1/EkhBhZvonq4mKmsHp01tYv349Db0aEhIQwsL9C3Hwc6D+MwE8UjedwHmtuGdTFBFXaBBKu+4O\nBgPdPDz455QW1M+5Vy86FxQwcOBAsrLOXaa7Pi0NXFygb19a1D7NoUOaL9q1oLy/mco2BnWA4yX2\nE2zHLlUnsZQ6Ojo6FYnBAAMGaLH4H39cK7fcAosWgcWCUgpf3wfo2DEKOzsvdu68mbi4ycXDQT41\nfPhj6B84fuXIH+oPIu9pQmO/pSQn/8ju3d3Izo68amkvvfgio3Jy+O7IEVSNtsx/cD4Pn3yQKcPD\nyVySw5B++5gxeQkWi+aF/UzIM8zYOQOAemPq0evoYTrWVhQMDaHbG2mEpZW/h/BUQADPHTrEoKgo\n9txxByOaNsXby4v27duzZ8+esxXt7GDtWggMxKVnCF099xNbuhN4teO6S3t53eXjtXG96gZde1Vw\nzXTb2cGwYTBkCCxbBp98Aq++qmVxA4yA18Fk2qe15UiPrwir9T6uaWezyXQBmjxmYf2yE5ja309q\n3XtJbLiC7qNuoSA+ALFcXSaynu5wa7wdBUmuFIiVb1waYLlpLEM/FYaiAMWvyxNZ8tV6AF4Dlnxl\nyzM5SRuXvqeLtpvwq/YUWhb2xWRxU7PSkxfPtL3GAXED4G52c3cbL45sHskRmx9adFgiS2augJZg\nbOHBC9ahzH2vKe/8uOBKL8EVU97fTKVmOlNKdQbeFpFetv1xaMucppao8w2wTkR+te0fAHqISMp5\nbelpznR0dHSuAilDprPK7hnsAJoopQKBJOBhYNB5df4ARgO/2oxH5vmGAMr2ZXR0dHR0ro5KNQYi\nYlFKPQusQpufmCUi0Uqpp7S35TsRWaGU6q2UOgzkAMMqU5OOjo6OzoVU6jCRjo6Ojs71wXXlgayU\n+kUptctWYpVSl4+adQ1QSj2nlIpWSkUqpaZUAz1vKaUSSlyrXlWtqSRKqTFKKatSyrsaaJmklNqj\nlNqtlFqplPKvBpo+tP2eIpRSi5VSpc9oXltN/ZVS+5RSFqVUcBVruawj67VGKTVLKZWilNpb1VqK\nUErVVUr9q5Tab7s3PX/J+tdrz0Ap9THa/MLkKtbRE3gD6C0ihUqpmiKSdpnTKlvTW8AZEfm0KnWU\nhlKqLvA90BxoLyKnLnNKZetxFZFs2/ZzQCsRebqKNd0J/CsiVtvDhYjI61WsqTlgBb4FxopIlTyI\n2RxZY4A7gBNo85IPi8iBqtBTQtetQDYwR0TaVKWWImwPNv4iEqGUcgXCgb4Xu1bXVc/gPAYAlb9e\n6/I8DUwRkUKAqjYEJaiuE+6fAa9UtYgiigyBDRe0G16VIiJrRKRIxzagblXqARCRgyJyiKr/XZXF\nkfWaIyKbgYyq1lESEUkWkQjbdjYQzSV8uK5LY6CU6gYki0j5/N8rhmZAd6XUNqXUOqVUh6oWZONZ\n2zDD90opj6oWA6CUug84LiJX75FUCSilJiul4oHBwMSq1nMeTwB/V7WIakRZHFl1zkMp1QBoC2y/\nWJ1q53SmlFoNlAxHoQABxovIn7Zjg7iGvYJLaJqAdg29RKSzUioEWAg0qkJN44EZwCQREaXUZOBT\nYHhla7qMrglow2l3nfdeVWoaLyJ/isgEYIJt/Pk54O2q1mSrMx4wi8j8ytZTVk061x+2IaJFwAvn\n9YTPodoZAxG561LvK6WMQD/gmk1iXUqTUmoUsMRWb4dtYtRHRNKrStN5zASu2R/yxXQppW4CGgB7\nlFIKbegjXCnVUUTKl57qKjWVwnxgBdfAGJThd/440Bu4vbK1FHEF16kqSQTql9ivazumUwpKKTs0\nQzBXRJZdqu71OEx0FxAtIicuW/PasBTbH6xSqhlgX9mG4HKctyKmH7CvqrQUISL7RMRfRBqJSEO0\n7n27yjYEl0Mp1aTE7v1o46pVim311yvAfSJSUNV6SqEq5w2KHVmVUg5ojqx/VKGekiiqfk7lfH4A\nokRk2uUqXo/GYCDVY+K4iB+BRkqpSLQnyyFVrAfgQ6XUXqVUBNADeKmqBZWCUD3+cKaUuFZ3Ai9U\ntSBgOuAKrLYtDZ5R1YKUUvcrpY4DnYG/lFJVMo8hWu7LIkfW/cAvIlIdDPh8YAvQTCkVr5SqcudZ\npVRX4BHgdtvS6UsuM79ul5bq6Ojo6FQc12PPQEdHR0engtGNgY6Ojo6Obgx0dHR0dHRjoKOjo6OD\nbgx0dHR0dNCNgY6Ojo4OujHQucFQSpU/+/m1aTO2LCG8K+OzdXRKQzcGOjcaleE4U5Vt6o5AOtcE\n3Rjo3PDYQhestUVxXW3LqYBSqpFSaqstuc27V/IUrpT6P1uk2nCl1CqllK/t+FtKqdlKqY22p/8H\nlFJTbV7OK2yxtUDzvn7NdnybUqqR7fwGSqktRZpKfJ6LUmqNUmqn7b37Ku4K6ejoxkDnv8F04EcR\naYsWMmS67fg04DMRuRktVtKVPIVvEpHOItIe+BV4tcR7jYCeaHH2fwbW2hKe5AN9StTLsB3/yqal\nSNNXNk1JJermA/eLSAe0WFifXIFWHZ3Looej0LmhUEpliYj7ecdS0TI+WWxRHE+IiJ9SKg3ws2UU\ncwMSzz/3Em3ehHZDrg3YA7Ei0tuWZc4kIh/YorPmioiz7Zx3gHQR+UIpFQvcJiJxNk1JIuJr01TL\nprVYk63OZ0B3tAQ8zYCGVR3oT+fGQe8Z6PwXKMsTz5UGzZsOfGF7sh8FOJV4rwC0XJWAucRxK+eG\njZfLbJfU9AhQEy3Sazvg5HmfqaNTLnRjoHOjUdpNfQtaQiSAR4FNtu2tQH/b9sNX2KY7Wg5egKFX\neG4RA0t89lbb9uYSWh8pUdcDOGnrxdwGBF6iXR2dK6baJbfR0SknzrYUlkVZuj5Fy142Wyk1FkgF\nisILvwT8rJR6A/gHOH0Fbb4NLFJKnQL+RUvcUxoX65UI4KWU2oM2H1BkAF4E5iulXgVKJiOZB/xp\nq7+TapB3QefGQp8z0PnPopRyFpE82/ZA4GEReaCKZenoVAl6z0Dnv0x7pdSXaE/8GWjJ53V0/pPo\nPQMdHR0dHX0CWUdHR0dHNwY6Ojo6OujGQEdHR0cH3Rjo6Ojo6KAbAx0dHR0ddGOgo6OjowP8P8S3\n99uo441lAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(mfit, xvar = 'lambda', label = True, ptype = '2norm');" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that we set `type.coef = \"2norm\"`. Under this setting, a single curve is plotted per variable, with value equal to the $\\ell_2$ norm. The default setting is `type.coef = \"coef\"`, where a coefficient plot is created for each response (multiple figures).\n", + "\n", + "`xvar` and `label` are two other options besides ordinary graphical parameters. They are the same as the single-response case.\n", + "\n", + "We can extract the coefficients at requested values of $\\lambda$ by using the function `coef` and make predictions by `predict`. The usage is similar and we only provide an example of `predict` here." + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[[-4.71062632 -1.16345744 0.60276341 3.74098912]\n", + " [ 4.13017346 -3.05079679 -1.21226299 4.97014084]\n", + " [ 3.15952287 -0.57596208 0.2607981 2.05397555]\n", + " [ 0.64592424 2.12056049 -0.22520497 3.14628582]\n", + " [-1.17918903 0.10562619 -7.33529649 3.24836992]] \n", + "\n", + "[[-4.6415158 -1.22902821 0.61182888 3.77952124]\n", + " [ 4.47128428 -3.25296583 -1.25725829 5.2660386 ]\n", + " [ 3.47352281 -0.69292309 0.46840369 2.05557354]\n", + " [ 0.73533106 2.29650827 -0.21902966 2.98937089]\n", + " [-1.27599301 0.28925358 -7.82592058 3.20521075]]\n" + ] + } + ], + "source": [ + "f = glmnetPredict(mfit, x[0:5,:], s = np.float64([0.1, 0.01]))\n", + "print(f[:,:,0], '\\n')\n", + "print(f[:,:,1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The prediction result is saved in a three-dimensional array with the first two dimensions being the prediction matrix for each response variable and the third indicating the response variables.\n", + "\n", + "We can also do k-fold cross-validation. The options are almost the same as the ordinary Gaussian family and we do not expand here." + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "warnings.filterwarnings('ignore')\n", + "cvmfit = cvglmnet(x = x.copy(), y = y.copy(), family = \"mgaussian\")\n", + "warnings.filterwarnings('default')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We plot the resulting `cv.glmnet` object \"cvmfit\"." + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAElCAYAAADtFjXiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xt8FPW9//HXJ6CIgIJWQbAmkFpRq0VFisXWoNL2lOOl\n6lHbUBs91Vatoh7UUk2TbdrjpbTF2tZfWy8BuVir1XqpVVTWC14LIqLosSGJFgSvKASxSD6/P2Y2\nTjabZDbZ3flO9vN8PPbBzuztnUmY7873+53PiKpijDGm+JREHcAYY0w0rAEwxpgiZQ2AMcYUKWsA\njDGmSFkDYIwxRcoaAGOMKVLWAJiCEpFtIrJMRFaKyHMicpGISNS5ekJEzheRl0Tk5rT1R4jIBv/n\nfE5EHsjT598kIifk471NcegfdQBTdFpU9WAAEfkUsBDYCajt7RuLSImqtvb2fbJwNnCUqq7N8Nij\nqnpsZy8UkX6qui1/0Yzpnh0BmMio6tvAWcAPwNuBi8jVIvK0iCwXkTP99SIiv/O/bd8vIvemvvmK\nSKOIXCki/wBOEpExInKfiDwrIo+IyGf9531KRG7z3/tpETnMX3+E/y19mYgsFZFB6Tn9o5QXRGSF\niJzvr7sOGAPcJyLTM/x4HY5q/G/s14nIU8BVIrKjiNwgIk/5n31sV9vBf+w3IrLKP6rYPbD+KP9n\neF5ErheR7QLb53/9n/EZETlIRP4uIq+KyPey/62ZPkVV7Wa3gt2ADzKsexfYDTgT+JG/bnvgWaAU\nOBG4x18/3H/+Cf5yIzAj8F4PAuX+/QnAQ/79+cAX/fufBl7y798FHObf3xEoSct2MPA8sAMwCFgJ\nfN5/bDUwLMPPcwSwAVjm32b6628C7go872fAt/z7OwOvAAO72A7fAO731+8BvAecAAwAXgv83HOA\n8wPb5yz//i+B5f7P+SlgXdR/D3aL9mZdQMYlXwEOEJH/8pd3AvYGDgf+DKCq60Vkcdrr/gTgf3v/\nIvDnwLjCdv6/RwP7BtYPFpEdgSXAr0RkPvAXVV2T9t6HA3eo6hb/M/4CfAmvURAyfNP3ddYF9Oe0\nn/cYEbnYX94e2KuL7fBlvC4zVPUNEXnIf3wfYLWqNvjLc4BzgF/7y3f7/74ADFLVzcBmEdkiIjup\n6ged/Aymj7MGwERKRMYA21T1LX/nfJ6qLkp7ztRu3qbF/7cEeE/9MYb0jwK+oKpb09ZfJSL3AFOB\nJSLyFVX9v+x/ktBa0pZPVNVX2wUNvx2kk/vpPvL/bQ3cB1BsH1DUbAzAFFrbjkpEdgOuA671V90P\nnCMi/f3H9w58Sz/JHwsYDlRkemNV3Qg0ishJgc840L/7ADA9sP7z/r9jVPVFVb0ar6tlbNrbPgYc\nLyI7+EcY3wAe7dFP3tH9wPmBTOMC6zNth0eBU/wxgj2Ayf7zXwFK/cYU4NtAMkcZTR9mrb8ptB1E\nZBled8dWYK6q/sp/7HqgDFjmfwt+EzgeuB04EngReB1YCrzvvya9nG0l8P9E5HK8v+9bgBV4O//f\nisjzQD+8nek5wAUiMhnY5r//fcE3U9XnRKQer3FQ4A+quqKTz+5O+vN/CswWkRV4DWMjcGxn20FV\n7xCR1HZ4DXjCz/iRiJwO3CYi/fysvw+R0UoBFzlRtb8B4z4RGaSqLSKyC/A0MElV34w6lzFxZkcA\nJi7uEZGheIO6P7GdvzG9Z0cAxhhTpGwQ2BhjipQ1AMYYU6ScaABEZE8ReVhEXvRPuU+dbj9MRB4Q\nkVf8EgA7F3suy2SZ8pDrBhFZ789GSq37vIg8KZ+UkBhfyEwZMk73t1nbdos4T8bfZewyRX0qsj8G\nMQIY598fjDeveSxwFXCJv/5S4Mpiz2WZLFMech0OjANWBNbdD3zFv/8fwOJCZkrLtz/eVN4BeFN4\nHwDGRJWnq99l3DI5cQSgqutUdbl/fxOwCtgTOA7vtHb8f48v9lyWyTLlIdfjeHWFglrx6hMBDAXS\nS2QU0r7A06r6kXoVVB/Fq4EUmU5+l6NilynKFquTVqwMaMJrwd5Le+xdy2WZLFNe8pTS/ghgLNCM\nd8LZ68Cno9hOgSwvA8PwCtk9AVwTVZ6ufpdRZ8k2kxNHACkiMhi4DZiuXguWPkc1kjmrLuayTJYp\nz87Gy7cXcCFwY1RBVPVlvG6yRcDfgOfwztyOXIbfZeSyyeRMA+DXPbkNuFlV/+qvXu/XfkFERuCd\nEl/0uSyTZSqA76jqnQCqehteae3IqOpNqjpeVSvwSm3ns2BfKJ38LiOVbSZnGgC8bxgvqeo1gXV3\nAVX+/e8AUWxkF3NZJsuUa+mlrdeIyBHgXWyGiHe44hUORET2wivItyDKPL5Mv8uoZZcp6r4qv79q\nEt4h3XK8w7tlwNeAXfAu8PEK3sj/0GLPZZksUx5yLQDW4pWKfg04He+6Cv/wcz4JHFTITBkyPop3\nMZ7ngIoos3T1u4xbJisFYYwxRSrvxeBEpAmvdG8rsFVVJ4jIMLyrOJXijVSfrKrvd/omxhhjcq4Q\nYwCteIdsB6lqaiDph8CDqroP8DAwswA5jDHGBBSiAZAMnxPpiS/GGGMK0wAosEhEnhWR7/rrhqvq\nevDOXgN2L0AOY4wxAYW4IMwkVX3Dn8b1gIi8gpsnvhhjTFHJewOgqm/4/74lInfinVCyXkSGq+r6\nrk58ERFrGIwxpgdUVbp7Tl67gERkR/+0ZERkEPAV4AWyOPEl6vm+6beamprIM8Qhk2u5amrcy5S+\nnVIZXbi5vJ1cu7mYK6x8jwEMBx4XkeeAp4C7VfUBvLoeU/zuoKOAK/OcI2eampqijtCBi5nArVyJ\nhPevS5lSUplSGV3g8nZyjau5wshrF5CqNuLVGU9f/y5wdD4/2xhjCqmpqamtMWhqaqKsrAyAsrKy\ntvuuKcQgcJ9SVVUVdYQOXMwEbuayTOFYpvBSuYI7+kQi4WzeIKdLQYiIupzPxIcIuP6nFIeMJpxE\nIkFNTU1kny8iaNSDwH1RMpmMOkIHLmYCN3NZpnAsU3iu5grDGgBTFCL8MhZaHDKavsW6gIwxJses\nC8gYY4zTrAHIkov9fS5mAjdzWaZwLFP3mpqaSCaTzJ49m/r6epLJJMlkMlbnBdg0UGOM6YHgtM9H\nHnkkFtM+09kYgDHG9FKqz7+5sZH66moaliyhfNIkqurqKB09uuB5bAzAmIDa2qgTdC8OGU3nmhsb\nuXbKFGbMn8/cpiZmzJ/PtVOm0NzYGHW0TlkDkCXX+iHBzUzgVq5UnR2XMqWkMrlUC8jl7eSaVK76\n6moSDQ0M8tcPAhINDdRXV0cVrVvWABhjTA60rlnTtvNPGQS0rl0bRZxQbAzAFIU4lFmIQ0aTWSKR\ngFdfZcb8+e0agRZgVmUlNfPmFTSPjQEYY0wB9Zt8HNN27k+Lv9wC1JSXU1VXF2WsLlkDkCUX+yFd\nzARu5rJM4Vim8FK5rl05Bz23hlmVlZxWVsasykrOW7QokllAYdl5AKYoxKHOThwymvaaGxup/9nP\n2LjqVXbeaT0/uXUWB35ubOSlIMKyMQBjjOmB1LTP1MyfVJfPeYsWUT93bixqAdkRgDHGhBS86teN\nM2dyXYZpn7Oqq2HvvaOKmBUbA8iSi/2QLmYCN3NZpnAsU2ZlZWVUVFRQUVEB69YxCEgGHnd92mc6\nawCMMaYHWoYMaZvx07YOKBk5Moo4PWJjAMYY0wMXTp9O65338r+vxXcMwI4ATFGIQ52dOGQ0nxi6\nyy5sO/UmTvrMqbGZ9pnOGoAsudAPmc7FTOBWLqsFlB2Xt5NL5v77h4w4dRzlVVXUzJsXq50/2Cwg\nY4zpkfUt29i4w0uc9MUa/vHMk+1mCJWWlrY1WMHrBrjGxgBMUYhDnZ04ZDSf+PwZ57Jh6Ls0/3Kh\ncyd+2RiAMcbk0T/7r+Qb+34j6hi9Yg1Allzsh3QxE7iZyzKFY5k619zYyKUnncLB9z/OgPtv55aF\nC6OO1GM2BmCKgkNH552KQ8ZiE+zXb2pqYocBA3hsxgyuXrvWm/r52q2c8eTjjDzppEhz9pSNARhj\nTAhd1fw/7YADuH3FiqiidWBjAMYYk2OdXfVr0MaNUcTpNWsAsuRKP2SQi5nAzVyWKRzLlFnJqFEd\nyj/cBzBiBMlkkmQy2dZlFAc2BmCMMSFV1dVx9l1Pcd3GT8o/1I8cydULFsTuJDCwMQBjjAklkUhw\n2WU1DN+tkTOOqGb980sonzSJqro653b+NgZgTEAc6uzEIWOx+83djzHg6zfx8zvnxbb8Q5A1AFly\noR8ynYuZwK1cVgsoOy5vpyjd+PSfOGDfge3WuZCrp2wMwBhjQtjW2sqLrXdw1VcfjjpKzhRkDEBE\nSoB/AP9S1WNFZBjwJ6AUaAJOVtX3M7zOxgBMTsShzk4cMhazY86ZyaId7mLLL18EcK7+T5BrYwDT\ngZcCyz8EHlTVfYCHgZkFymGMMVlpbmwkMW0aH95xI4csKqG5sTHqSDmT9wZARPYEvg5cH1h9HDDH\nvz8HOD7fOXLFxf4+FzOBm7ksUziWydPc2Mi1U6YwY/58Hlz3Jg+sXMm1U6a0awRc3FZhFeII4FfA\nxUDw4Ha4qq4HUNV1wO4FyGGKmKNH6u3EIWOxqa+uJtHQ0Hb27yAg0dBAfXV1lLFyJq+DwCIyFViv\nqstFpKKLp3ba81lVVdV2MYWhQ4cybtw4Kiq8t0q1vIVeTonq8+OynFrnQp7aWvd/fxUVSZJJN/JU\nVFREvj1c+HtavXJl284/6f9bAbSuXUtz//7O/H0nk0nq6+sBsrr4TF4HgUXkf4FpwMfAQGAIcAcw\nHqhQ1fUiMgJYrKr7Zni9DQIbYwoqWAH0xpkzue6ppzoUf5tVWQl7722DwF1R1R+p6l6qOgY4FXhY\nVb8N3A1U+U/7DvDXfObIpfRvkS5wMRO4mcsyhVPMmcrKytqOgnadMIEfjylvq//TAlwyciT7TJ3a\ndtnH2bNnx6r+T1BU5wFcCdwqImcAzcDJEeUwxphODd1lFw64ehGTv1vN2KFe6YdLMpR+cPWav92x\nWkDGGNOJRCLB+/2/xZa3d2f40NnOdvmkc6ILyBhXxKHOThwyFqM5b53DLgc/EnWMvLAGIEvF3Dea\nLZdyWS2g7Li8nQppy8fKu4Oe4syvfLnT57i4rcKyWkDGGNOJ5W9tYceBYykdPjTqKHlhYwCmKMSh\nzk4cMhabsqrTGT5qd57+2VVO1/5JZ2MAxhjTS+sGvsQxB0yOOkbeWAOQJRf7+1zMBG7mskzhFHum\n5sZGqk+tZMKfX2fT7Td1WQDOxW0Vlo0BmKIQhyP3OGQsBqkCcImGBuqAlttupea5pWybOjXqaDln\nYwDGGBOQmDaNGfPndyj/cPbEiZxxxRWAd+KXyyd/hR0DsCMAY4wJaF2zpt3OH7wqoKMHDmxXmK4v\nsDGALLnY3+diJnAzl2UKp5gzlYwa1Vb7J6UFKBk5MuPzXdxWYVkDYIwxAVV1dVwyqn0BuJrycqrq\n6qKMlRc2BmCMMWlOnPH/2HDvbYza0kD5pElUZSgA5zIbAzAmoLbW/Vo7ccjYFwXr/zc1NVFWVsbi\nD+/kG2eeyl4b18Tm5K+e6LILSERKRMRKNQe42N/nYiZwK5fVAsqOy9sp14L1/5ubmzn8S19mw5Bn\nOfeYr0eaqxC6bABUtRW4pEBZjDEmcvc9+wr9Pt6Zg/fOPOjbl3Q7BiAiVwJvA3+CTwbHVfXd/Eaz\nMQCTO3GosxOHjH1dIpGgYeeRPP76o6z+xc2xqv8TlMsxgFP8f88NrFNgTE+CGWOMy5741xIO23NS\n1DEKottpoKo6OsOtaHf+Lvb3uZgJ3MxlmcIp5kxbHjuH7x5+XOjnu7itwur2CEBEtgPOBlJXREgC\nv1fVrXnMZUxOxeEoPg4Z+7qNGwex+dUJHHFI1EkKI8wYwPXAdsAcf9W3gW2q+t08Z7MxAGNMQZ1y\nyq1s2nQy997rLdsYAByqqp8PLD8sIs/3PJoxxrilubGR+upqWu5bxY5j7qK5MV4nfvVUmFIQ20Sk\nPLUgImOAbfmL5DYX+/tczARu5rJM4RRTplT55xnz53PPxmXUPz+fqw8/nFsWLqS0tJRkMkkymWw7\nWaxQuQohzBHAxcBiEVkNCFAKnJ7XVMYYUyD11dUkGhraKoAOAq5eu5ZZ995Lzbx5UUbLuy7HAESk\nBJgILAX28Ve/oqofFSCbjQEYY/KuZvJkEhm+xddMnkzi4YcLHygHcnJNYP9M4N+q6kequsK/FWTn\nb0wuxaHGThwy9kXZln/uS8KMATwkIieKSLetSTFwsb/PxUzgVi6rBZQdl7dTrlXV1VFT3vPyzy5u\nq7DCjAF8D7gI+FhEtuCNA6iq7pTXZMYYkyfpFUDHXHI54645ky+07Mneh0/ivJiVf+6p7sYABPi0\nqr5WuEjtPt/GAExOxKHOThwy9kWJRIJt5Yfwm3/8iunDvhzLef/pcjUGoMC9OUtljDEOWrRyGeN3\nPSrqGAUXZgxgmYgcmvckMeFif5+LmcDNXJYpnGLLtPm+H1Nz9KU9eq2L2yqsMA3AF4AnRaRBRFaI\nyAsisiLfwYzJpTgc1cchY1+0adOONDfDFw7tF3WUggtTC6g003pVbc5LovafbWMAxpi8+q//uo2P\nPjqJu+6Kb+2fdL0eAxCRI6FtR1+iqs2pG1AktfKMMX1dY+Nojjwy6hTR6KoLaFbg/u1pj12ehyyx\n4GJ/n4uZwM1climcYsq0evVojurF+K+L2yqsrhoA6eR+pmVjjImV5sZGph9zEgdsnsLtV06jubEx\n6kgF1+kYgIgsU9WD0+9nWu70zUUGAI8C2+OddHabqiZEZBjeNYZLgSbgZFV9P8PrbQzAGJNzqQqg\nqSJwLcAlI0cyevp0xk+YAEBZWRllZWVRxuyxXJwHMEZE7hKRuwP3U8uhTpHz6wZNVtWDgHHAf4jI\nBOCHwIOqug/wMDAzzPsZ01NxqLMTh4x9RWcVQFtWrKCiooKKiorY7vyz0VUDcBzwC7yxgNT91PLx\nYT9AVTf7dwfgHQWo/36pK4zNyeb9ouZif5+LmcCtXFYLKDsub6dcaF2zpm3nnzIIaF27Nuv3cnFb\nhdVpLSBVfSQXH+CXlF4KlONVFn1WRIar6nr/c9aJyO65+CxjjMkkvfbPW/360QLtGoFiqQAa1O15\nADn7IJGdgDuA84HHVHWXwGPvqOquGV5jYwAmJ+JQZycOGfuCRCJB1WmncfHB47hpwwdtYwA15eWc\nt2hRnygCl8trAueEqn4gIknga8D61FGAiIwA3uzsdVVVVW19cUOHDmXcuHFUVFQAnxx62bIt94Vl\nSJJMupOnry4D7FU2mr+NOplTPvMiu7z9BuWTJjF+6lQam5vbGgBX8oZZTiaT1NfXA2Q3dqGqebsB\nnwJ29u8PxJsR9HXgKuBSf/2lwJWdvF5ds3jx4qgjdOBiJlW3cqX+lFzKlJLK5NKfu8vbqbdqa2t1\n2TLVz3xGtbXVW3YhVy75+85u99GdHgH4s306PSBV1WNDtC97AHP8cYAS4E+q+jcReQq4VUTOAJqB\nk0O8lzE9Foez++OQsa/461/huOO8brdi1tV5AEf4d08ARgCpqyN/E1ivqhfmPZyNARhjciyRSHDH\nHTX85jdw+OF9p/5PUK/HANSfBSQiv1DV8YGH7haRf+QgozHGFNx77+3M2rVw2GFRJ4lemHLQg0Rk\nTGpBREZDhym0RSM4kOQKFzOBm7ksUzh9MVNzYyOJadNouvkmDh4yjX+9lpvSDy5uq7DCzAK6EEiK\nyGq8GkCleNcJNsaYWOhQ+uHdFdRMeYrzFi2KOlqkQp0H4Nf0GesvvqxeiYe8szEAY0wuJKZNY8b8\n+R1O/Lr4qKOYMG1a29TJONf/CcrZeQAisiNwEVCqqmeKyN4iso+q3pOLoMYUQm2t+7V24pAxToJn\n/65ZujRj6YfdWlupqqoqcDJ3hBkDuAn4N5AaMlkD/DRviRznYn+fi5nArVxWCyg7Lm+nsMrKyqjw\nC7u9s912tKQ9nqvSDy5uq7DCNADlqno1sBXairsV+exZY0yc7DV5Mv8z6tNtjUCq9ENVXV2UsSIX\n5prATwBHAUtU9WARKQcWquqEvIezMQCTI3GosxOHjHGVSCRYtB62PXYre2/aTPmkSVTV1fWJuj+Z\n5OJ6ACk1wN+BT4vIfOAh4JJe5jPGmILZtk1YfWcNN9z6AuVVVdTMm9dnd/7Z6LIBEBEBXsY7G7gK\nWAiMV9Vk3pM5ysX+PhczgZu5LFM4fS3T6tXl7LUX7LdvmO+82XFxW4XV5dbw+1/+pqrvqOq9qnqP\nqr5doGzG5EwczvSPQ8a4SZ389fYd11DeWpzX/e1KmDGAOcBvVPXZwkRq99k2BmCM6ZFM1/2tKS9n\n29Sp/Oqaa6KOl1dhxwDCNAAvA5/Bq9rZgjcDSFX1wFwE7eazrQEwxoSSftWvZ+bN4+cPPdTh5K/T\nDjiA21esiCJiweRyEPireJdzPBI4BvhP/9+i5GJ/n4uZwM1climcOGYKzvtvbm5mt23bMp78tevW\nrSSTSZLJZFuDkc9cLuv2TGBVbQbwr9u7Q94TGWNMDpSMGpXxur+jDjkkcBW24hamC+hY4BfASLxL\nN5YCq1R1/7yHsy4gY0wPpK77e9FB+zP3/Q/75HV/u5LLLqA6YCLwf6o6Gu+ksKd6mc+YgopDjZ04\nZIyTp1//gDtPHswVp5zKaWVlzKqsLIqdfzbCNABbVfUdoERESlR1MTC+uxf1VS7297mYCdzKZbWA\nsuPydgpr5l2/4CulF/HTWxbm9eQvF7dVWGGuB7BBRAbjXdB9voi8CR3qKhljTOSaGxupr67m5Uee\noN+QN7jspiejjuS0MGMAg4AteNM/K4Gdgfn+UUF+w9kYgMmRONTZiUNGl3U27/+8RYuonzu3z133\ntys5ux6Aqga/7c/pVSpjjMmh4Nz/G2fO5Dp/5w/e7J9EQwOzqqth772jiui0bscARGSjiHzg37aI\nyDYR+aAQ4VzkYn+fi5nAzVyWKZy4ZArO/Wfduozz/lvXri14rrgIcwQwJHXfLw53HN6sIGNiIw5H\n/3HI6LKWIUMyzvt/q6SECaWlbTvqvnLZx1wIdU3gDi8SeU5VD8pDnvTPsTEAY0woF06fzvs338u1\n73UcAyi2qZ+5vCbwCYHFErwpoFt6kc0YY3Jv4DDmH74fQ3UCb698kvJJkzivD1/0JRfCnAdwTOD2\nVWAjXjdQUXKxv8/FTOBmLssUTpwypUo+v1j/e/Z/dSXTf/2zgl70xcVtFVaYMYDTCxHEGGOyFZz6\nWQO0rIeaKVPYNnVq1NFiIcx5AL/u6nFVPT+nidp/to0BGGM6lZg2jRnz5xdlyeeu5LIW0A7AwcCr\n/m0csD2w1L8Z47w41NmJQ0YXNDU1tZVzXrN0acapn4M2bowiWuyEaQAOBCpU9VpVvRavGNw4VZ2j\nqkV3YpiL/X0uZgK3clktoOy4vJ2Cc//f2W67DnVpWgBGjMhpzf8wueIoTC2gYcBOwLv+8mB/nTHG\nRGqvyZM5/eXN3LS1/dTPugULbPZPCGHGAE4HaoHFePWAvgzUFuLbv40BmFyJQ52dOGR0zbRp83lu\n6Rf5r4OrWf3EEsonTaLKpn7m7prA/puNAL4AKPCMqq7rfcTuWQNgciUOO9c4ZHRFqurnktv/yR4H\nfYafzK8ruoJvXen1ILCIlIrIzgD+Dv8DvP7/b4nI9jlLGjMu9ve5mAnczGWZwnE5U2rq54z583lg\ny9P87sn5XDtlChvefbfrN8hzrjjqahD4VvyyGiIyDvgz8BrweeB3+Y9mTO7E4YthHDK6oL66uq3k\nM3xS9fO1xYujjBVLnXYBicgKVT3Qvz8LaFXVS0SkBFieeiyv4awLyBiTpmbyZBIZvnWfNXYs37ru\nOsAKvuWiFlDwxUcCMwFUtdUrChoqxJ7AXGA40Ar8UVV/LSLDgD/hXWC+CThZVd8P9abGmKISrPnf\n1NTEW/36Zaz6OeqQQ7yy0Ca0rrqAHhaRW0XkGrxpnw8DiMgewL9Dvv/HwEWquj9wGHCuiIwFfgg8\nqKr7+O87s6c/QKG52N/nYiZwM5dlCselTKl5/wDNzc1Mu+KXVA4taZv/n5r6WVVXF0k+l7ZVtro6\nArgAOAXYAzhcVbf660cAl4V5c3/weJ1/f5OIrAL2xCsmd4T/tDlAEq9RMMaYLv1s0f2sOPE/mbVl\nCA1LlljVz17I6noAIvKfqnpPjz5IpAxvR/854HVVHRZ47F1V3SXDa2wMwBjTZmZ1LVd/9EcWTL2X\nU44YRyKRsKmfGeSyFlDQT3oYZjBwGzBdVTfhnU8QZHt5k1dxqLMTh4xRaSv5/PsFfO7OHZi4185R\nR+oTwpSCCAo3+ht8gUh/vJ3/zar6V3/1ehEZrqrr/ZPM3uzs9VVVVW2j+UOHDmXcuHFt/YGpvrdC\nLi9fvpwLLrggss/PtJxa50qe1PLs2bMj/32llhMJqKhw+/eXSCSpqIg+T/BvKarPDy6PLi3l2ilT\n2K2hgYuAQ9/ySj6Pr6ujsbGxLWfUv7+of1/19fUA2c1+UtXQN2BCNs/3XzMX+GXauquAS/37lwJX\ndvJadc3ixYujjtCBi5lU3cqV+lNyKVNKKpNLf+4ubafaykrdBLrYO1FaFXQTaG1lpdbW1kYdz6lt\nleLvO7vdP4ctBfFFoIzAEYOqzg3xuknAo8ALeN08CvwIeAbvRLNPA81400A3ZHi9hslnTHfiUGYh\nDhmj0Nm8//MPOoiDzz+/7Rtvsc/9D8rlNYFvBsqB5cA2f7XifbPvkqouAfp18vDR3b3eGFOcgnP/\nG7dsyTjvf9f99qOqqqrw4fqQMIPA44FJqnqOqp7n3/J2FTDXJTN8E4mai5nAzVyWKZyoMwVr/u86\nYQJnDhzDff5jUc/7Txf1tuqNMIPAK/Hm/r+R5yzG5E0cZgrGIWMU1rQO5s5vDKX/G0ezsPGfNu8/\nh8JcD2Ax3mUgnwE+Sq1X1WPzG83GAIwpZqmSz8mH7qN1972Ye+dfrORzSDkbA8C7GIwxxhRMquRz\noqGBGqAapdvRAAAU9ElEQVRl3bvUTJnCtqlTo47Wp3Q7BqCqj2S6FSKci1zs73MxE7iZyzKFE3Wm\nTCWfj3K05HPU26o3wswCmghcC+wLbI83q6dFVXfKczZjTJFqXbOm3awfgIHArlu3tu1wbdpn74UZ\nA/gHcCreBWHGA6cBn1XVvFfwtDEAY4pDesnnZ+bN4+cPPdRh6uesykpq5s2LImKs5LQWkKr+E+in\nqttU9Sbga70NaEwhxaHOThwy5ktw2mdzczPTav/IaSXlzpR87qvCNACb/WsALxeRq0XkwpCv65Nc\n7O9zMRO4lSuR8P51KVNKKlMqowui3k5zbh7NiLMWMauyktPKyphVWcl4R6d+Rr2teiPMjvzb/vN+\ngNcQfxo4MZ+hjDHFJ1Xx85U/zmNJ/TTOPBNq5s2jvKqKmnnzGLHHHlFH7HPC1gIaCOylqq/kP1K7\nz7UxAJMTcaizE4eM+RKc9jmIT7p8zlu0yOb+90DOxgBE5Bi8OkB/95fHichdvY9ojDGeTNM+Ew0N\n1FdXRxmrzwvTBVQLTAA2AKjqcsC9jrgCcbG/z8VM4GYuyxROoTNlmvY5CHjnpZcoLS0lmUwye/bs\ntplCLnHx9xdWmDOBt6rq+yLtjiaK9EDVxFUcehDikDGXelLx0+b951aY8wBuAB7Cu2j7icD5wHaq\n+v28h7MxAGOKwoXTp/P6nOuZ8/7mDmMALs78cV0uawGdB1yGVwhuIXA/YJNxjTE50/jxIO4+eTCf\nff8Y/vXM01bxs0DC1ALarKqXqeqhqjrev7+lEOFc5GJ/n4uZwM1climcQmVKTf3ccMdvOXJpKd+7\n8oq2aZ/pO38XtxO4myuMTo8AupvpU4hy0MaYvqtDxc83nrWKnwXW6RiAiLwFvI7X7fM00H4UuAAV\nQW0MwJi+KzFtGjPmz+8w8Hv2xImcccUVgBV866lcnAcwAu8C7p8DrgGmAG8XezloE09xqLMTh4y5\n1NnUz9EDB7bVBbKdf3512gD4hd/+rqrfASYC/wSSIvKDgqVzkIv9fS5mArdyWS2g7ORjOzU1NZFM\nJkkmk9TX17NO+rcVe0tpAUpGjixYplxwNVcYXQ4Ci8gAETkBmAecC/wauKMQwYwxfUt6xc+dDv4D\n3xtkFT+j1NUYwFy87p+/Abeo6spCBvMz2BiAyYk41NmJQ8beSl3nd9Wjj/P8tkH8ZNZ1vHTvH2hY\nsoTySZOosqmfORF2DKCrBqAV2hrn4JME0EJcEcwaAJMrcdi5xiFjb1jBt8Lp9SCwqpao6hD/tlPg\nNqSYLwfpYn+fi5nAzVyWKZx8ZOqs4NtVZ57ZVu8nmUx2Wu/Hxe0E7uYKI8yZwMbEXhy+XMYhY7aC\n9X7WLF2acdbPbq2t7er9mMIJdT2AqFgXkDF9x4kHHsjcF16w6/wWQE6vCWyMMT2VKvcg6zZxTL/+\nrPLX26yf6FkDkCUX+/tczARu5rJM4eQqU2rgd8b8+dz2ViN3b/uYnw8ezImjRjGrsjKrap8ubidw\nN1cY1gAYY/Im08DvtZs2wS67ZCz4ZgrLxgCMMTkTHPRtampi2TXX8Ovlyzs876yxY/nWddcBVu8n\nH2wMwJiAONTZiUPG7pSVlTG6tJRHrr+ehxMJGt/c0Nbnn9ICjDrkEKv34wBrALLkYn+fi5nArVxW\nCyg7Pd1OwT7/uU1N3LK2iStKcjPw6+LvDtzNFYadB2CMyZlMff7XtX7MaWVlDAK70pdjbAzAFIU4\nlFmIQ8ZMgv3+C84+mz+8/HKH55x/0EEcfP75bd091u+fX7m8JrAxxnSqrKwMUaW+upq3X3+dauC7\nQKn/eAuw63772dm+DsrrGICI3CAi60VkRWDdMBF5QEReEZH7RWTnfGbINRf7+1zMBG7mskzhZJMp\n2O//l5YWfoh3Balmcnuyl4vbCdzNFUa+jwBuAq4F5gbW/RB4UFWvFpFLgZn+OmPyJg51duKQETpO\n9Xxm3jx+ntbvXwd8Y9AgJh1/vPX5OyzvYwAiUgrcraoH+ssvA0eo6noRGQEkVXVsJ6+1MQBjHJSq\n69+wZAmbNm/mV2++2dblk2Jz/aPj8hjA7qq6HkBV14nI7hFkMMb0UKa6/tXAdNr3+6fm+ht3uTAI\n3OVX/KqqqrZvDkOHDmXcuHFtf1SpvrdCLi9fvpwLLrggss/PtJxa50qe1PLs2bMj/32lL9vvL9xy\nerampibuvPNOAJb96U9c19DAs6nH8bp8vg/8N3AoXr//+KlTSSaTffrvKbiNov591dfXA2R3pKWq\neb3hfSlYEVheBQz3748AVnXxWnXN4sWLo47QgYuZVN3MZZnCyZSpafVqra2s1JMGDNBa0CZv1mrb\n7dvDh+u3y8q0trJSm1avLkgmF7iYy993drt/LsQYQBneGMAB/vJVwLuqepU/CDxMVTMOAtsYgDFu\nyHg5R+A8vG94LcDFRx3FhGnTbK6/A3p9TeAchViAd4S4K7Ae72/mTuDPwKfxZoqdrKobOnm9NQAm\nJ2pr3a+141LGpqYmnnrySR694QY+XrOGNe+9x6z169k38JwWYBYwg0+u7WuzfdwQtgHIexdQb25Y\nF1AoLmZSdStX6k/JpUwpqUwu/bkvXLBA/6e8XDf53TubQP8nQ7fPSQMG5K3LJ52LvztVN3MRsgvI\nisEZYzr4+403dryAO1AfeE4LMPCggzjiu9+lsbm504u5G3dZLSBTFOJQZyfqjMFun3eeeIL9PvyQ\nKmg3v/9y4Kd8coavdfu4yeXzAIwxDhJV/lFd3XZWb6aB3ueGD+esYcPoP2oUX/7v/0al+25m4y7r\nAspScO6vK1zMBG7mskztNTU1ccvChZxz9NHM+PznGdjQwNtAkvbdPqlv/L978kn+sGoVv3vwQU79\n5jcLOsvHxd8duJsrDDsCMEUhDnV2osjY2bf+8f7jg4CVAwYw66STrKZPH2RjAMYUkWymd9b498+e\nOJEzrrgCsLn9cWFjAMYYoP1O/8PGRt584w1mffgh+9Kxnx+8b/2tfNLtU7dggX3z76NsDCBLLvb3\nuZgJ3MxVLJmaGxtJTJtGzeTJ/Hb6dB6+9FJ+/tBD3LR6Nbd++CE34J2F2dn0zvt23JGLjzqK8XV1\nzgz0uvi7A3dzhWFHAMb0EakSze++9BL/fPnldt/yq4G38Xb4qZ1+qptnELDVf4/Ut/6L6uo49Zvf\nLPwPYQrKxgCMianUDr91zRre7t+fjc8/z3VvvdXpFM7UDj+lBq8haAFOHj6cUYHpnRMPO8z6+mPM\nxgCMCXCpzk5nwmTszbf81sD7tOD1/7YAl4wcycxbbuHwL30ptz+QcZ4dAWQpWN/cFS5mArdypc6y\ndSlTSipTKmPwm/3mnXbiYxF2ev/9Xn/LvxKvbn8LcN7gwez0uc8xzL9eb/ogr8vbyTUu5rIjAGMc\nEtypl4waxdFnncWDf/gDrWvW8PK//81fd9sNuJNzp0zpsJOvBs4APkXPv+VfNHw4H44Zw1nvvUf/\nUaP4inXzGOwIIJSu/vMGv6EF73f1PJceczVXrjPXzp/HjGOPiyRz62uvdeiuObukhJmtre26b36F\nsgnJeHnF4Lz8sN/yzxk4kK177MHQ0aOtX7/IWDnoHHns0Uf1nJEj25XF/XZJib4UWL7QL5Obfr+r\n57n0mKu5cpkZNPLMwXLKm0Br+aSs8iY/Y6bHFPTHndxPPXcT6FnDh+u3DztMzxw7Vs8+6ihduGCB\nNjY2Rv1fyETA33d2v48N86Sobi40ALWVlW07fwVd3Ml/3tpu7ufzse/k8D1dzdXbzPh/7n+L4PfT\n2WOpHfli/99Uxs528qn7lwfuTxswQE8pK8v5Dt/FGvcuZlJ1M1fYBsDGALrRumZNW030lPS+1uBy\nZ/fz+Zj28HX5zpzLXL3NXEMtAANz/Nk9fSzVP09g+Qg/Y0va81JdQqm+fA44gJpt2ygZOZKfWn0e\n0wvWAHRj88470wJtjUAFmf/zlnRzP5+Pjcnhe7qaq7eZa0gAcCjwSIFydfZYcAwglakamEOiw05+\n85AhqAg3fvABJSNH8qMC7fBdm9UCbmYCd3OFEuYwIaqbFy9aTatXd7g03nf69w/VN93V8wr9WLGP\nAbjw2OmDB+v0iRO1trJSH3/kEa2trNQfT56sM449Vi847jj98eTJBbu8ounb/H1nt/tYmwUUQtss\noLVrae7XjzOrq72ZHmvXsnnIEG8WyAcftLtfMnLkJzNC0p6X68febW5mzP779+g9Xc2Vr8yN77zD\n/ttvX9DfT+qxTPPtwc155JYpPBdz2SygPHFxwMfFTKpu5rJM4Vim8FzMhR0BGGNMcQp7BGDloE1R\ncL0OEMQjo+lbrAHIkou1v13MBG7lSniTgJzKlJLKlMroApe3k2tczRWGNQDGGFOkbAzAFIVUpU2X\nxSGjiQcbAzDGGNMlawCy5GJ/n4uZwM1clikcyxSeq7nCsAbAFIWamqgTdC8OGU3fYmMAxhjTx9gY\ngDHGmC5ZA5AlF/v7XMwEbuayTOFYpvBczRWGNQDGGFOkbAzAGGP6GBsDMCYgDnV24pDR9C2RNQAi\n8jUReVlE/k9ELo0qR7Zc7O9zMRO4lctqAWXH5e3kGldzhRFJAyAiJcBvgK8C+wPfFJGxUWTJ1vLl\ny6OO0IGLmcDNXJYpHMsUnqu5wojqCGAC8KqqNqvqVuAW4LiIsmRlw4YNUUfowMVM4GYuyxSOZQrP\n1VxhRNUAjAJeDyz/y19njDGmQGwQOEtNTU1RR+jAxUzgZi7LFI5lCs/VXGFEMg1URCYCtar6NX/5\nh3jXsLwq7Xk2B9QYY3ogzDTQqBqAfsArwFHAG8AzwDdVdVXBwxhjTJHqH8WHquo2EfkB8ABeN9QN\ntvM3xpjCcvpMYGOMMfnj/CCwiNwiIsv8W6OILIs6E4CInCciq0TkBRG50oE8NSLyr8C2+lrUmVJE\n5H9EpFVEdnEgy09E5HkReU5E/i4iI6LOBCAiV/t/T8tF5HYR2cmBTCeJyEoR2SYiB0ecxakTR0Xk\nBhFZLyIros6SIiJ7isjDIvKiv186v9vXxOkIQERmARtU9acR56gAfgR8XVU/FpFPqerbEWeqATaq\n6i+jzJFORPYErgf2AQ5R1XcjzjNYVTf5988D9lPVs6PM5Gc5GnhYVVv9LxSqqjMjzrQP0Ar8Hpih\nqpF8+fJPHP0/vDHDtcCzwKmq+nIUefxMhwObgLmqemBUOYL8LzMjVHW5iAwGlgLHdbWdnD8CSHMy\nsDDqEMDZwJWq+jFA1Dv/gG5H/SPwK+DiqEOkpHb+vkF4O7jIqeqDqprK8hSwZ5R5AFT1FVV9lej/\nrpw7cVRVHwfeizJDOlVdp6rL/fubgFV0c35VbBoAEfkSsE5VG6LOAnwW+LKIPCUii0VkfNSBfD/w\nuxCuF5Gdow4jIscCr6vqC1FnCRKRn4rIa8C3gB9HnSeDM4D7og7hEDtxNEsiUgaMA57u6nmRzAJK\nJyKLgOHBVYACl6nq3f66b1LAb/9dZLocb7sNU9WJInIocCswJsJMlwG/A36iqioiPwV+Cfx3hJku\nx+smm5L2WN519/ekqpcDl/t9yecBtS7k8p9zGbBVVRe4ksnEi9/9cxswPe2ItwMnGgBVndLV4/55\nAycABRuI6iqTiHwf+Iv/vGf9Ac5dVfWdqDKl+SNQkP+8nWUSkc8BZcDzIiJ4XRpLRWSCqr4ZRaYM\nFgB/o0ANQIi/8yrg68CRhcgDWW2rKK0B9gos7+mvM2lEpD/ezv9mVf1rd8+PSxfQFGCVqq6NOojv\nTvz/pCLyWWC7fO/8u5M2m+UEYGVUWQBUdaWqjlDVMao6Gu+w/aB87/y7IyKfCSwej9dPGjl/1tbF\nwLGq+lHUeTKIchzgWeAzIlIqItsDpwJ3RZgnRYh+fCTdjcBLqnpNmCfHpQE4BTcGf1NuAsaIyAt4\n3yJPizgPwNUiskJElgNHABdGHSiN4sZ/lisD2+loYHrUgXzXAoOBRf403t9FHUhEjheR14GJwD0i\nEsm4hKpuA1Injr4I3BL1iaMisgB4AvisiLwmIqdHmcfPNAmoBI70pzl3Ox08VtNAjTHG5E5cjgCM\nMcbkmDUAxhhTpKwBMMaYImUNgDHGFClrAIwxpkhZA2CMMUXKGgATayKysZev/7NfNwW/3HhOS1b7\ntaK6PYM9zGeLyCIXajyZvsMaABN3PT6RRUT2A0pUtam375UDYT57LnBuvoOY4mENgOkzROTn/oUw\nnheRk/11IiK/E5GXROR+EblXRE7wX1IJBOuldDhTWUQOFZEnRGSpiDwuInv7678jIneIyAMislpE\nzhWRC/2zL58QkaGBtznNPzNzhV88EBHZxc/zgoj8MfjZ/vs+6z/23cD73I1XFNGYnLAGwPQJInIi\ncKCqHoBXO+rnIjIcry7SXqq6H17JjsMCL5uEd9GMrqwCDlfVQ4Aa4IrAY/vj1ROaAPwM2KSqB+PV\n8w+WBxmoqgfhfXu/0V9XAzzm572D9sXOTlfVQ4FDgekiMgxAVTcA26eWjektJ6qBGpMDk/DrRanq\nmyKSxNsxHw782V+/XkQWB16zB/BWN+87FJjrf/NX2v+fWayqm4HNIrIBuMdf/wJwQOB5qVyPicgQ\nvx//y8A3/PV/E5HgxUUuEJHj/ft7AnsDz/jLbwEjcexiJCae7AjA9FWpuvZd+RDYoZvn1OFdqvEA\n4Ji05werdmpguZX2DUV6jkxXIRMAETkCr9LsF1R1HLA87TN38HMb02vWAJi4S/WdPwacIiIlIrIb\n8CW8b81LgJP8sYDhQEXgtauAYHno4Pul7MQnted7WvHxFGi7juz7qroReBRvDAIR+Q+8Iw2AnYH3\nVPUjERmLV4kzaDjQ1MMcxrRjXUAm7hRAVe8QkYnA83jfsC/2u4Jux/tG/SLeZQWXAu/7r70XmAw8\nHHiv50VE/fu3AlfjdQFd7j+/yxydrN8iIsvw/r+lGpEEsFBETsUrK/yav/7vwPdF5EXgFeDJ1BuJ\nyCHAU4FrBxvTK1YO2vR5IjJIVVv8efZPA5P8xmEHvJ3/JI3BfwQRmQ38VVUXd/tkY0KwIwBTDO7x\np2Vuh3fd5DcBVHWLiNTgXWD8X1EGDOkF2/mbXLIjAGOMKVI2CGyMMUXKGgBjjClS1gAYY0yRsgbA\nGGOKlDUAxhhTpKwBMMaYIvX/AcB+Ek/d2JNlAAAAAElFTkSuQmCC\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "cvglmnetPlot(cvmfit)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To show explicitly the selected optimal values of $\\lambda$, type" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.04731812])" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvmfit['lambda_min']" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.1445027])" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvmfit['lambda_1se']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As before, the first one is the value at which the minimal mean squared error is achieved and the second is for the most regularized model whose mean squared error is within one standard error of the minimal.\n", + "\n", + "Prediction for `cvglmnet` object works almost the same as for `glmnet` object. We omit the details here." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Logistic Regression\n", + "\n", + "Logistic regression is another widely-used model when the response is categorical. If there are two possible outcomes, we use the binomial distribution, else we use the multinomial.\n", + "\n", + "### Logistic Regression: Binomial Models\n", + "\n", + "For the binomial model, suppose the response variable takes value in $\\mathcal{G}=\\{1,2\\}$. Denote $y_i = I(g_i=1)$. We model\n", + "\n", + "$$\n", + "\\mbox{Pr}(G=2|X=x)+\\frac{e^{\\beta_0+\\beta^Tx}}{1+e^{\\beta_0+\\beta^Tx}},\n", + "$$\n", + "\n", + "which can be written in the following form\n", + "\n", + "$$\n", + "\\log\\frac{\\mbox{Pr}(G=2|X=x)}{\\mbox{Pr}(G=1|X=x)}=\\beta_0+\\beta^Tx,\n", + "$$\n", + "\n", + "the so-called \"logistic\" or log-odds transformation.\n", + "\n", + "The objective function for the penalized logistic regression uses the negative binomial log-likelihood, and is\n", + "\n", + "$$\n", + "\\min_{(\\beta_0, \\beta) \\in \\mathbb{R}^{p+1}} -\\left[\\frac{1}{N} \\sum_{i=1}^N y_i \\cdot (\\beta_0 + x_i^T \\beta) - \\log (1+e^{(\\beta_0+x_i^T \\beta)})\\right] + \\lambda \\big[ (1-\\alpha)||\\beta||_2^2/2 + \\alpha||\\beta||_1\\big].\n", + "$$\n", + "\n", + "Logistic regression is often plagued with degeneracies when $p > N$ and exhibits wild behavior even when $N$ is close to $p$;\n", + "the elastic-net penalty alleviates these issues, and regularizes and selects variables as well.\n", + "\n", + "Our algorithm uses a quadratic approximation to the log-likelihood, and then coordinate descent on the resulting penalized weighted least-squares problem. These constitute an outer and inner loop.\n", + "\n", + "For illustration purpose, we load pre-generated input matrix `x` and the response vector `y` from the data file." + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "# Import relevant modules and setup for calling glmnet\n", + "%reset -f\n", + "%matplotlib inline\n", + "\n", + "import sys\n", + "sys.path.append('../test')\n", + "sys.path.append('../lib')\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", + "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", + "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", + "from cvglmnetPlot import cvglmnetPlot; from cvglmnetPredict import cvglmnetPredict\n", + "\n", + "# parameters\n", + "baseDataDir= '../data/'\n", + "\n", + "# load data\n", + "x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The input matrix $x$ is the same as other families. For binomial logistic regression, the response variable $y$ should be either a factor with two levels, or a two-column matrix of counts or proportions.\n", + "\n", + "Other optional arguments of `glmnet` for binomial regression are almost same as those for Gaussian family. Don't forget to set `family` option to \"binomial\"." + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": { + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } + }, + "outputs": [], + "source": [ + "fit = glmnet(x = x.copy(), y = y.copy(), family = 'binomial')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Like before, we can print and plot the fitted object, extract the coefficients at specific $\\lambda$'s and also make predictions. For plotting, the optional arguments such as `xvar` and `label` are similar to the Gaussian. We plot against the deviance explained and show the labels." + ] + }, + { + "cell_type": "code", + "execution_count": 30, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZEAAAElCAYAAAAlet80AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXmcXUWV+L/n3vvWfr2mkzRpSELCDkJAwioaREccEBWV\nQdyijOi4j86Cjg7+XEZ0RgQdGUVBREUUFEVRFglhUUAMSQiQkBCyJ93p7vT21rud3x/3dr9OZ+uE\nhO501/fzqU8tt27deqdf3/OqTtUpUVUMBoPBYNgXrNHugMFgMBgOXowSMRgMBsM+Y5SIwWAwGPYZ\no0QMBoPBsM8YJWIwGAyGfcYoEYPBYDDsM0aJGMYcIhKIyFMi8oyILBaRT4uIjHa/9gUR+YSIPCci\nPxlW/hoR6Yk/52IRue8APf9HInLxgWjbYABwRrsDBsNOKKjqKQAi0gz8HKgDvvhSGxYRS1XDl9rO\nXvBPwHmqunkn1x5W1Yt2daOI2KoaHLiuGQwvHTMSMYxpVLUTuAL4GERKQES+ISJPiMgSEflgXC4i\ncn38q/9eEbl74Be4iKwRkatF5G/A20Vkloj8UUSeFJGHROSouF6ziNwRt/2EiJwZl78mHi08JSKL\nRKRmeD/j0dIyEXlaRD4Rl/0fMAv4o4h8cicfb4fRVTxy+D8ReRz4uohkReRGEXk8fvZFu5NDfO1/\nRWR5PLqZMqT8vPgzLBWRH4pIYoh8/iv+jH8VkZNF5B4RWSUiH9r7v5phQqGqJpgwpgLQt5OybcBk\n4IPA5+KyJPAkMAN4G/D7uHxqXP/iOL8G+Jchbf0JmB2nTwMeiNM/A86K04cBz8Xpu4Az43QWsIb1\n7RRgKZAGaoBngJPiay8CjTv5PK8BeoCn4vDZuPxHwF1D6n0VuCxO1wPPA5ndyOGtwL1x+SFAN3Ax\nkALWD/ncPwY+MUQ+V8Tpa4Al8edsBtpG+/tgwtgOZjrLcLDxd8ArROQdcb4OOBJ4FXA7gKq2i8iD\nw+77BUA8ijgLuH2InSURx68Djh1SnhORLPBn4Fsi8jPg16q6aVjbrwLuVNVy/IxfA+cQKRZhJyOO\nmF1NZ90+7PO+SUT+Nc4ngem7kcOriab/UNUtIvJAfP1o4EVVXR3nfwx8BPh2nP9dHC8DalS1CBRF\npCwidarat4vPYJjgGCViGPOIyCwgUNWO+AX/cVW9f1idC/bQTCGOLaBbY5vL8EcBp6uqN6z86yLy\ne+AC4M8i8nequnLvP8mIKQzLv01VV23X0ZHLQXaRHk4ljsMhaQDFvCcMu8HYRAxjkcGXnYhMBv4P\n+E5cdC/wERFx4utHDhktvD22jUwF5u2sYVXtB9aIyNuHPOPEOHkf8Mkh5SfF8SxVfVZVv0E0bXTM\nsGYfAd4iIul4pPNW4OF9+uQ7ci/wiSF9mjOkfGdyeBj4h9hmcghwblz/eWBGrJAB3gMs3E99NExg\nzC8Mw1gkLSJPEU3deMAtqvqt+NoPgZnAU/Gv8a3AW4BfAa8FngU2AIuA3vie4a6q3wV8T0Q+T/Q/\ncBvwNJEC+a6ILAVsohfyR4BPici5QBC3/8ehjanqYhG5mUjBKHCDqj69i2fvieH1vwJcKyJPEynX\nNcBFu5KDqt4pIgNyWA/8Je5jRUTeD9whInbc1++PoI/Gzbdht4iq+Y4YxgciUqOqBRFpAp4AzlbV\nraPdL4NhPGNGIobxxO9FpIHIUP4lo0AMhgOPGYkYDAaDYZ8xhnWDwWAw7DNGiRgMBoNhnzlolYiI\nnC8iK0RkpYj8+2j3Z7QQkaOGuORYLCK9A243JgKxS5D2ePXSQNk3YrcfS0TkVyJSN5p9fLnYmSzi\n8o/H8lgmIlePVv9eTkTkUBFZICLPxp976DLpCSUPEUnF7nEWx5/5qri8UUTuE5HnJXIVVL9P7R+M\nNhERsYCVwHnAZqLlipeq6opR7dgoE8tlI9GGuQ2j3Z+XAxF5FZAnWgZ8Ylz2OmCBqobxS0JV9bOj\n2c+Xg13IYh7wOeDvVdUXkWaN/JGNa0SkBWhR1SUikiNa8v1moIWJKY+sqhbj5d1/Jtp79DagS1W/\nEf8Qb1TVK/e27YN1JHIasEpV18W7i28j+oJMdF4HrJ4oCgRAVR8l8g81tOxPWvXU+zhw6MvesVFg\nZ7Ig8iJ8tar6cZ1x/8IEUNU2VV0Sp/PAcqCViSuPYpxMEa3KVaJ35o/j8h8T7bfaaw5WJdJKtKFs\ngI1x2UTnH4j9JhkG+QDDNgdOMI4CXi2RF+AHReTU0e7Qy42IzATmEO0dmpDyiD0YLAbagPtV9Ulg\nqqq2Q6R0GeLxeW8w+0TGCRK59b4I2Ovh6HhFRP4D8FT11tHuyyjiEE1TnCEic4FfErmnnxDEU1l3\nAJ9U1XzsJmbCySMemZ8c2wfvFJHj2dEbwT7ZNg7WkcgmIk+mAxwal01k3ggsUtWO0e7IWEBE5gN/\nD1w2yl0ZbTYAvwaIf32GIjJpdLv08hArjDuAn6jqb+PiCSsPgNgb80LgfKA99jM3YEPap825B6sS\neRI4QkRmiEgSuJTozIeJzDuZuFNZ27lbF5HzgX8FLlLVyi7vGp8Mdz3/GyKfYkh0+FZCVbtGo2Oj\nwE1EZ8JcN6RswslDosPW6uN0Bng9kY3oLmB+XO19wG932sCe2j8YV2fB4IviOiJFeKOqjvulersi\n9t66DpgVe6mdMIjIrUQeeycB7cBVRKtvksDAy+FxVf3IqHTwZWQXsvgJ0UFXc4hcvH9GVR8arT6+\nXIjI2UQONJcRTdMo0ffiASLlMmHkISKvIDKcW3H4hap+NfYx90uiA9jWAZeoas9et3+wKhGDwWAw\njD6jPp21i81iV4nIxngD3VPxqMNgMBgMY4xRVyJEQ+037KT8GlU9JQ73vNydMhgMBsOeGXUlsosN\nUrD7ozwNBoPBMAYYdSWyGz4W+z764b76dDEYDAbDgWWsKpHriVYazSHaYXnNKPfHYDAYDDthTO5Y\nH7Zh7gfA73ZWT0TM0jKDwWDYB1R1v5gMxspIZPhmsZYh1y4GntnVjapqgipXXXXVqPdhrAQjCyML\nI4vdh/3JqI9Ehm6QEpH1RBukzhWROUAIrAU+NGodPEhYu3btaHdhzGBkUcXIooqRxYFh1JWIqu7M\nt9GPXvaOGAwGg2GvGSvTWYaXyPz580e7C2MGI4sqRhZVjCwODAe12xMR0YO5/waDwTAaiAg6zgzr\nhpfIwoULR7sLYwYjiypGFlWMLA4MRokYDAaDYZ8x01kGg8EwwTDTWQaDwWAYExglMk4w871VjCyq\nGFlUMbI4MBglYjAYDIZ9xthEDAaDYYJhbCIGg8FgGBMYJTJOMPO9VYwsqhhZVDGyODAYJWIwGAyG\nfcbYRAwGg2GCYWwiBoPBYBgTGCUyTjDzvVWMLKoYWVQxsjgwGCViMBgMhn3G2EQMBoNhgiEiADcB\nFwLtqnrivrZlRiIGg8EwMfkR8IaX2ohRIuMEM99bxciiipFFFSOL7VHVR4Hul9qOUSIGg8Fg2GeM\nTcRgMBgmGAP7RERkBvA7YxMxGAwGw6hglMg4wcz3VjGyqGJkUcXIIuIXz/xiaFbisM8YJWIwGAwT\niJ8u+ykAInIr8BfgKBFZLyLv35f2jE3EYDAYJhDn//R87n3PvcZ3lsFgMBj2nkpQ2a/tGSUyTjDz\nvVWMLKoYWVQxsohwA3e/tmeUiMFgMEwgKv7+HYmMuk1ERG5kmP8WEWkEfgHMANYCl6hq707uNTYR\ng8Fg2AtOuP4Env3os+PKJrIz/y1XAn9S1aOBBcBnX/ZeGQwGwzik7Jf3a3ujrkR24b/lzcCP4/SP\ngbe8rJ06CDHzvVWMLKoYWVQxsogo+aX92t6oK5FdMEVV2wFUtQ2YMsr9MRgMhnFBydu/SmTUbSIA\nw/23iMg2VW0acr1LVSft5D5jEzEYDIa9IP2VNJUvVPabTcTZH40cANpFZKqqtotIC7B1VxXnz5/P\nzJkzAWhoaGDOnDnMmzcPqA5fTd7kTd7kJ3J+4cKF3HzzzYQaUlk3zlZnAYjITKKRyCvi/NeBbar6\ndRH5d6BRVa/cyX1mJBKzcOHCwS/PRMfIooqRRRUjC+ir9NF6TSv5z+XHz+qsXfhvuRp4vYg8D5wX\n5w0Gg8HwEsi7eXLJ3H5tc0yMRPYVMxIxGAyGkbOyayUX3nohqz6xavyMRAwGg8Hw8tBX6aMuVbdf\n2zRKZJwwYEQzGFkMxciiipEF9JR7qE/X79c2jRIxGAyGCUJvuZf61P5VIsYmYjAYDBOEHz71Qx7b\n8Bg3veUmYxMxGAwGw97RVeyiMdO4X9s0SmScYOZ7qxhZVDGyqGJkAV2lLiZnJ+/XNo0SMRgMhglC\nR7GD5mzzfm3T2EQMBoNhgnDBrRfw4Vd+mIuOucjYRAwGg8Gwd7Tl22jJtezXNo0SGSeY+d4qRhZV\njCyqGFnAlv4tTKudtl/bNErEYDAYJgBe4NFZ7GRKTXQ8k4gcKiILRORZEVkmIh+Py78hIstFZImI\n/EpEdrvF3dhEDAaDYQKwvnc9Z954Jps+vQkRATgF+A0wNa6iwGnAt4GBHYmzgLKq7nIObKyeJ2Iw\nGAyG/ci6nnXMbJg5tCgEbKAOSBIdU/5xVT13oIKI/AbY7qbhmOmscYKZ761iZFHFyKLKRJfFmp41\nw5VIBxAANUSjEQHWDbvt74Cbd9euGYkYDAbDBGD1ttXMapg1mFfVzSLyTWA9kAY84LqB6yJyA9FU\n1rW7a9fYRAwGg2ECcNmvLuP8I87nvSe9d8Am0gj8mmhGahawCfiOqt4qIvOBrwHfVNX/2V27ZjrL\nYDAYJgDLO5dzbPOxFL3iQNHrgEOBGcA3geuBs0TkfOBfifTDrXtq1yiRccJEn+8dipFFFSOLKhNZ\nFn7o83zn8xzTfAwn/9vJA8UNwJFADpgPXEtkF/kO0Axkgd+LyPW7a9vYRAwGg2Gc88K2F2jJtVCb\nqmXNPWsGipcTLeutBY4BisAfVPWjIvIj4DFVvWFPbRubiMFgMIxzfr7s5/xq+a+445I7cI5wCFYH\nxneWwWAwGEbGk5uf5JWHvBKAsBICICI3iki7iDw9UE9ErhKRjSLyVBzO31PbRomMEybyfO9wjCyq\nGFlUmciyeHzj45x+6OkAqDs4e/Mj4A07qX6Nqp4Sh3v21LZRIgaDwTCOKXkllrYv5fTWSIlQiSJV\nfZRol/pw9mqay9hEDAaDYRyzYM0C/mPBf/DY5Y8RaoidsaECqioiMgP4naqeCNF0FtFKrV7gb8Bn\nVLV3d+2bkYjBYDCMY+5ffT/nHX4eAJ35TnB3W/16YJaqzgHagGv21L5RIuOEiTzfOxwjiypGFlUm\nqizuWX0P5x8R2ceXrF0SOTjZBaraMWR65wfA3D21b5SIwWAwjFPW965nQ+8Gzjj0DAAWr16MZLcz\neQhDbCAiMtTl+8XAM3t6hrGJGAwGwzjlW499i2Vbl3HTm28CYP635nPLdbeg6xTg58A8YBLQDlwF\nnAvMIXITvxb4kKq27+4ZZse6wWAwjFNufeZWvnLuVwbzq9avwqq1CAhQ1ct2csuP9vYZY3o6S0TW\nishSEVksIn8d7f6MZSbqfO/OMLKoYmRRZaLJ4rmO59jUt4nzZp03WLZxw0ashv372h/rI5EQmKeq\nO1vLbDAYDIZdcMOiG3j/nPfjWNXXfNeWLmTSfvF2MsiYtomIyBrgVFXt2sV1YxMxGAyGYfRV+jj8\nusN56oqnmNEwY7Dcnm1jv8rGu8Ub2CdyPpH3Xgu4UVW/vrfPGtPTWUQeJu8XkSdF5IOj3RmDwWA4\nGPjBoh/w+lmv306BdJe6CTtCwqZB31kW8L9Erk+OB94pIsfs7bPG+nTW2aq6RUQmEymT5fFW/UHm\nz5/PzJkzAWhoaGDOnDnMmzcPqM6BToT80PnesdCf0cwPlI2V/oxmfsmSJXzqU58aM/0Zzfy11147\nId4Pp519Gt987Jt86fAvsXDhwsH/hav++yrIQ+65HL30ApwGrFLVdQAichvwZmAFe4OqHhSBaPnZ\np4eVqSHiwQcfHO0ujBmMLKoYWVSZKLK4+pGr9W2/eNsO5X//5b9Xe6atr/nRa5RoludtwA1afZ++\nG/i27uW7eczaREQkC1iqmheRGuA+4P+p6n1D6uhY7b/BYDC83LTn2znh/07g0fc/ytHNR293rfnC\nZijBWz75Fm58840AVwBfBHqIFjEtBnpU9RMAIvIZ4L+BZlXdtqtnjmWbyFTgURFZDDxO5CTsvj3c\nYzAYDBOWKx+4kvee+N4dFEjRK7JtxTYOOeEQjmkeNHtsAtaq6vHAmcAbiT1ricihwOuBdXt65phV\nIqq6RlXnqOrJqvoKVb16tPs0lhlqD5joGFlUMbKoMt5lcf/q+1mwZgFfnPfFHa7d9cxd6EYlNTvF\nsY1zBorvBQ4RkbuBJ4EmoBRf+xbwryN57phVIgaDwWAYGdtK27j8rsu54cIbqE3V7nD9az/+GslD\nkzxfeJ7v/sepAKhqAKwGTiFyy9gHXC8iFwEbVHXZSJ49Zm0iI8HYRAwGw0RHVbn4lxczo34G155/\n7Q7Xe8u9NJ3exCvnvhJ/rk/trU/x8MMCUE9kBzkJWAh8mcj2/CDwelXt39NePRj7S3wNBoPBsBuu\nfvRq2vJt3Pa223Z6/bqF1xGuDDnu346jtaWV31bVweFAJ9GSXgUuADYCM4GlIiLAocAiETlNVbfu\nrH0znTVOGO/zvXuDkUUVI4sq41EWdzx3B9f/7XrueMcdpJzUDtfdwOXq/72a5pOaeajzId5x/Dvo\n6Bi87BCdF/KQqh4GFIGLVLVFVWep6uFESuXkXSkQMErEYDAYDkoeXPMgH7n7I9x16V201rXutM53\nH/supUdKvPuKd9OQbuC4ppPofO2GgcvfJDpL5J0iUgIuAy6PHd+WRESJbCW7dbY1IpuIiJwNLFHV\ngoi8m8gQc53GOx1HC2MTMRgME5FH1z/Kxb+4mNvfcTuvmfmandbpLfcy9e1TyW7MctI/n8T8k+Zz\nTt37OPaeRbgfPRWi3elfA2zgVKLzQ34HXE20b+RuYKmqvmN3fRnpSOT/gKKInAR8hsiif8sI7zUY\nDAbDfuL+1ffz1l+8lZ9d/LNdKhCA99/6fioLK3zyPz7J5v7NvOvEd7FqFQQN0W9/Vb2L6H0+HXiU\naI/Inar6vKquIlIu4Z76M1Il4sc/+d8M/K+qfhfYcR2ZYdQYj/O9+4qRRRUjiyrjQRa3LL2Fd/36\nXfz6kl/z+tmv32W9O5ffyW+v+S1nXXgWt2y5hevOvw7Hclj6TEjQvN2reznwAvAWIiWyQES+IiLr\niTZ8f29PfRrp6qx+EfkskW+VV8feHxMjvNdgMBgML4EgDPjPB/+TW5+5lYXzF3Lc5ON2WXf1ttVc\n8rlLyPRkmP7m6RydPZrzjzgfgAWri1gtncOHFxZwB/BJVc0Dnwc+LyKrgUuJlvzukpHaRFqIjC5P\nquojIjKd6LCoUZ3SMjYRg8Ew3tla2Mq7f/1u/NDntrffxpSaKbus21HoYNa/zqJ4S5GPXv9RHi4+\nzJ8/8GdqkjWoQsN715I/7AeEX/uvgfNEZgFLgc+r6nVD2xKRvwCTVfXI3fVvpNNZ/6yq16jqIwCq\nup7I/7zBYDAYDhB/WPUH5nxvDnOnzeW+99y3WwWyoXcDs/9zNqWflnjfVe/jd92/4+7L7qYmWQPA\nkiVQmbuB1k0bh97230B5QIGIyBFDrjUTGdt3y0iVyM4m3944wnsNLwPjYb53f2FkUcXIosrBJIvu\nUjeX//ZyPnL3R/j5237OV8/76nbH3A7nDyv/wOwvzKZ0U4k3fPgNPJJ+hAff9+B2S3+v/02J8IgK\n2+6+GAARuQ+4GJgkIq6IrAV+JCLrRMQFZgMnisgfd9fX3SoREfknEVkGHC0iTw8Ja4AR+VUxGAwG\nw8hQVX769E85/vrjySayLPunZbtdgdWeb+fSOy7lom9chNwiHHfpcfQf289fPvAXZjbMHKxXLMJP\nUi/S+tcn8POHDRR/gMjesRx4HrhGVc8BLgL+BjxD5EF9t0t8d2sTEZF6oJFoLfGVQy71786//MuF\nsYkYDIbxwpObnuST93wSN3D5zhu/w5mHnbnLup3FTq59/Fque/w63L+6yINC9h1Zrrj0Cr587pdJ\n2Nuve7r0pjZ+3/Ai1ofeTL5rKaqtAIcALaq6RERyRIrjrcCPiQ4AfFRE5gOzVPU/d9WXETtgFBGb\naMnX4Jgqto2MGkaJGAyGg53lHcv5woNf4LGNj/Hlc7/M/DnzsWTHSSJVZdGWRXz/b9/n9uduZ3py\nOs/f8jx2h80hlx/CLf94C2dPP3uH+25a0ck/rnieyxY9wbZFT3DPPTejmkVVt9uJLiK/ITpz/XZV\nbYzLDgXujc8c2SkjsomIyMeAduB+ol2MdwO/H8m9hpeHg2m+90BjZFHFyKLKWJPFsvZlXPary3j1\nza/mtNbTWPXxVXzg5A/soEDWdK/h649+nRO/dyLvuP0dJKwE2dVZnv3Ss0iNcNXPrmL5Vct3UCCh\nKl9fs4EPr3qe965o5d7vfY1E4nSmTt3RDZaIzATmEE1fPRu7gwe4hMgJ4y4Z6T6RTwFH784dsMFg\nMBh2j6qyYM0Crn3iWv62+W/88xn/zPcu/B51qbrBOqGGLGlbwu9X/p67nr+L9b3reesxb+W/Xvtf\nfPv33+b7n/4+2qO856r3cN0/XUdDumGH5ywvFPinlat4ZmXIq393Ctv6LuVd73oXP/iB8s531nLj\njdW68VTW4D4REbkc+LaIfAG4i/i0w10x0n0iA/7l/RFJ6mXCTGcZDIaDgZ5yD7cuu5XvPvldLLH4\n+Gkf5z0nvodMIoOqsqJzBY+sf4QFaxawYM0CGjONXHDkBVx41IXMapzFB3/yQR64+QF4Hs5/3/n8\n5OqfMKl20g7PeaFY5Or16/lNZxfTHpjBlD+3MveVV7Fgwb20th7GggXXc889UzjzTBnYJ+IQzSr9\ncfg+EQARORL4iaqesavPNtKRyIvAwvgYxcpAoapeM8L7DQaDYULhhz4L1izgx0t/zN0r7+YNR7yB\n77zxO5w45USeanuKbz72TZ7Y9ASPb3ycXDLHOdPP4e9m/x1ff93XmVIzhdueuY0P3vhBXrz7RayV\nFhdcdgE3338zk5omDXtOyL3d3Xx/82Ye6+vjrdY0mj91Gicf7zDr7P/ipz/9OaeeeipLl85i5sxm\nlqduHnr7TcBzQxWIiExW1Y7YM8nn2YPrk5GORK7aWbmq/r893nwAMSORKgsXLmTevHmj3Y0xgZFF\nFSOLKi+HLPzQ59H1j/LLZ3/JHc/dQUuuhVOnncqkzCRe6H6BxVsW013u5uSWkzmt9TRObz2d0w89\nnUPrDqWn3MMfVv6BG5+6kYfuf4jwiRCnw+HyD13OV6/8Kk1NTYPPccOQh3t6uLOzkzs6OpiVyfAP\ndS28eMNUfv4jm699rcxjj32MRx55mFwuR0vL0Tz33M/4/vctvrD+NJ684kmAVwEPE23X0Dh8DjgK\n+Gic/7Wqfm53n3lEI5EBZSEiWVUt7r1oDQaDYXyypnsNtz1zG3evupuntjxF2kmTclL0u/3k3Bxd\npS5aa1t55wnv5L9f/9/MapyFJRZBGLBoyyJuWXoLv1nxG5Y+txRdogSLA1oOaeHzn/0873/X+0mn\n04SqLMvneainhz91d7Owp4ejs1ne0tzM72aewt03ZPjKd+Gtb4Wf/WwRn/jEuwiCgO7ubj760c/w\nxz9ewQUXCGe8ppfl31oOgKr+mchT73DuAb490s8/0pHImcCNQE5Vp8cu4T+kqh8Z6YMOBGYkYjAY\nDjSqSlepixe7X2RN9xqWbV3GExufYEXnCtoKbfihT22yliOajuCc6edw6rRTOW7ycRzTfMygyxGI\nRimLtyzm4XUP8/D6h1m4diGpfAp3mUv/U/0k80nedPGb+MzHPkviiCNYVijwdD7P4jhMTiQ4p76e\n8xobOa++kRVPJLnxRrjrLrjkErj88g6+9a2P89vf/hbbtvnHf/xH5s//Ah/72CQOPRR++lP49pPX\n8NjGx7jjkjt2WOK7r4xUiTwBvB24S1VPjsueUdUT9kcn9hWjRAwGw0slCAO25Lewvnf9YFjXs451\nvetY27OWNT1rUFWyiSxu4FLxK8xums1prafxpqPexIVHXbjD0bSqyoa+DSzavIgnNj3BYxsfY9Hm\nRUytmUq6M8OmJdvoX9qL1aPMmPcqjnzzJbhzTuYF12Wr53FEJsMramp4RU0Np9TWckouR5Od5PHH\n4c474Ze/hPp6mD8fzjprBV/60j9z//33k06n+ehHP8qHP/wv/OIXk/mf/4FPfxquvBKe3rqE1//k\n9Tw0/yGOn3L8flMiIzWso6obonPbq7LfHx0w7B/M3HcVI4sqRhbROeMb+zby23t+S+OxjaztWbtd\n2Ny/mUnZSUzJTqEmWYOIUPJKdBQ62FrcypFNR3LGoWcwd9pc5rbO5cSpJ27nx8oLPJ5uf4bH2p7l\nya3Ps6x7PSv62lCnnqb62fh9KXqfnklxqcuLS1ZAtobk6Wcy+7PncdyZZzErl2N2JsPsdJqjsllm\npNPY8bt2wwZ44C741P1w770wbRq85S1w882buf/+7/Ctb/2Ef/mXTcyePZvvf/8GTjjhvfz85w5z\n58JrXwt/+QvMPiLg5qU/5so/Xcn3Lvjebt3I7wsjVSIbROQsQEUkAXySyN+KwWAwjAqqSm+lly39\nW9iS38KW/i1s6t/Epr5NbOzfyMa+jWzo3UBnsZNDag+hZlMNh4eHk0lkAKhN1tJa10qoIe2FreTS\nzRxSN4vpTcdySMMRtDTMpq5mGoUQun2fVZUif2rfxsY1f2KrW6Tb88mr4FlpsLMkw1pSnIDdX0O4\nrExx6dMUnv8jVrHCjFNP5LXnvYHLvv1Dzjz2WFLWjvu8gwCeew5ufCx6+T/8MPT3R8rg3HMDLrnk\nSf70p59x00138tWvbiGTyfC6183n3//9KpYtm8yXvgTJJFx6KTz5JGjDi9zx3B388N4fMrlmMve9\n5z7mtMzXNwasAAAgAElEQVQZfJ6I3AhcCLSr6olD+yIinyHy8Nu8JxdXI53OagauA15HdGj7fUQb\nU0Z186GZzjIYxhdFr0hHoYPOYicdxTgudLC1sJWOYhS3F9ppz7fTlm8jYSdozjZTn2oklZ6EnWxA\n7Ro8SVHCJh9CfxDSFwQkUo1k05NJpppwkvVg1+BbKco45EMlIUKtbZEhIKEu4hfwvV7K5S76i5sp\nlTpoTmaYlqllUiJJJvTJr1vHluXPs375KtwNFdgMuYYcJ556IhecdwFvfO0bOf7447GGKQ3PgxUr\nIvfsixfDokVR3NICZ54JZ56pzJixmlWr7uFXv7qdJ55YShjORuR4Zsz4e5qbX8e6dZNwXeGss+Ds\neUVmzH2W7vRT/HXTEzyy/hH6K/1cdPRFvO+k93HWYWcxdCYpTp8D5IFbhiqR2NXJD4GjgVfuFyUy\nVjFKxGAYm1T8Cr2VXrpL3XSXu+kudbOttI3ucjddxS66SlHoLG6jvdxPe7mfbW6Z0M6QTU8ilWzE\nTtYidg3YGQIrjW8l8aQaAiuNlagDJ0dopUioSxqfGgmpsy0aHYfmVIYpqRoOydTR4CRQvx+30k2h\ntJW+wma29W9ga9+LbOlexfqeNSTtJIfVHcbU3FTqU/WknBRe2WPrhq1sXLuRzWs247a5JLoSuFtd\nGqY0cMzxx3DGqWdw3tnnMXfuXJqbm6tyqMALL0QKY8UKePZZeOYZWLUKZs6Ek06COXPgxBN9isXn\neeCBRSxc+CIvvhjg+9MJw5k4zjGEOpVpR2zmiFM6aT5iPdnWNYT1L9Lhr2Zl10ra8m0cNeko5rTM\n4bTW03jV9FdxwpQTduqDCyIlEm82nAH8bpgSuR34EtFu9ZemRETk31T1GyLyHaI1w9uhqp/Y7Tfp\nAGOUSBUz913FyKLK3spCVakEFfoqffRV+ugt99Jb6aW33EtPuYdt5V62lvvYWu6ns1Jkm1ei13Xp\nD3wKYUgphLJaqJ3GSdRiOTnEqQEni1oZ1M4QWilCO0NopcFKQFBCgjKOuiTUJy0BGVFqLCFn2zQ4\nCRoTKSalMkxO5ZiWqaM1W09rtpEmJ4EdliiVO+ksdtCWb6Mt31ad4spvYXP/Zjb2bqT3+V6mHD+F\nxkwjdck60ok0glDJV+jr7KOnvYfOtk68bR6ZQga718btdPEKHlMOncLsI2fzimNfwdyT5nL88cdz\n7LHHUlOTo6sL1q2DNWui8OKLkeJYuRK2bIHW1ig0N0fG8GRS6e7uZcWKbWzcXKE/8AgzLtS8ALUr\nSDVtoq61h4bDiiSa+uiXzXSUN5NNZDms7jBmNMzg8IbDObzhcI5oOoKjJh3F4Y2H7/a8keHEI5Hb\ngLcRmTWeBX5AdAjVvwCnADXAOuAkVe3dVVt7euqA3eNvI+7dfkREzgeuJXIUeaOqfn00+mEwjFVC\nDSl6RXpKPWwtdtNW6qaz3EdHuZeuSoFnlizle96f6fdc+vwKBd+nGPqUQqUcgquCi+BjE1gJAkmC\nnUHsDNjpKFhp1E6D3QoyHYISWCUkWcZKuNhpjwQ+SXwaRMlaQo1lUWNb1NsOdYlICTQmUjQns0xJ\n52jJ1DMtU09LtoGGdP3gL2Y/9OkudUcjlWIXHYUONvdvZlP/Jtq2trGk0M79xQ66il10l7vpK/dh\nWzbZRJaUkyJpJ7HUIigHuP0uld4Kxd4iQT4guSWJu8aNpsj6O3B7XIrdRRzHYXLLZA6ddijzDp/H\nEScdwYwZM5g+fRaZzCzy+WmsXWuxfj2sXx8tlW1rg44O6O4Gy4JMbQUn14OV6cWze6hID6X6HtLT\nt9GZa2NrahN+chO+30bodMIh/TC7CGmPZJBmcmYS05sP4/Dmw2nJtdCSO4pptdOYVjuN1rpWWmtb\nt1suvJ/4OfAo8D/AmcBTRO9aB/gK8CHgduCzbH8UyHaM2emseMv9SuA8YDPwJHCpqq4YUseMRAwH\nFFUl0AA3cHF9l1LgUvArFAOPol+h6LuUAo+i71KI01Hwo+B7Uf0wiku+RzHwKQc+ZQ1wg5CKKq4q\nnoKngg8EWIRiE2ATio2KEwUrgUoy+gVvJcBKgpWKYvUhdOPgIaGLpS6W+ljqY6uHowFJCUkCKQuy\nlkXWtsnZCeoTKRoTaRqSGWodh5zlUGNb1DoOtbZDzrbJ2DYJK0FIiBu4eIGHF3qD6bybHxzFDIR+\nt5+8m6fgFih4BQpugZJfouSXKPtlym4Zt+LiuR7qKRII4gnqKeoqlmfhBA4JP4ETOCT9JI6XwHET\nSMVCS4pX8HHzFcr9JcqFIslMhlxtI7W1k6mtmUou20Im04JlTSUIp+AFzZTDBspBLXkPil6BUtBP\nhTy+1U/g9EMyCpLux0rnsTP9WJl+7Ew/ku6DVB9Bog+XPpQQJ0gjFQctKUHBJciXoazYnk2tXcvU\nuqkcN/M4XvXKV3H2yWczu2U2jelGbGtn+/0OLEOms84CHlDVjIg8QLSL3SJ6504D2oiOz93lOesj\nGv+IyP3AO1S1J843Arep6hte4mfZHacBq1R1XfzM24A3Ayt2e5dh1FFV3DCMXq6BSyXwB1+2ldCn\n7MflcboSxi/VwKcS+rhhQCUMcOO0G4a4YYinUdrXEDdUfA3xNMQPFU+j4A8NQKAMxgFSjYFAJX5Z\nC4pFiBW/rG1UbBgIViKOk6ABhD5ogKgfvbjVj8t8COProY+GfrTkJgwQDZEwxArB0hBbBTsULLWx\nFWwFRyGjYKvgaPTPaWlAIjb0OoCjigPYhDiqJAiR0IewQqAevrr4YRR76kZ5davX1MULKvh+hYJX\npscvE/gefuDiBy6h7xIEHhoESGAjvoP4Nng2BBbiWqhvgS9o/KLHB/WIBO0DviCBQAASDAh/IISR\nTIIQDX00cEFCSCaRZBZJJJFECrFTiJMGO4XaKXw7iWcnUMsBJ4kkHSRhY6UtpM5GEhaSABKQSASo\nU6LXKdLjFAntTaizCnUKkCxAoghqYQc12FpDUnNk7ByNTo76dC2NNTmaarPUpx2SgJf36NnaQ097\nD93t3fSs7qG3o5dCdwG3z4UKJKwEjY01tLa2cuSRR3LKKadwzjnnMHfuXBxn5NNMo4DAoCv42UTv\n16OALcAU4L3Ar3bXwEg/3eQBBQKgqt0isusT4/cPrcCGIfmNRIplO17xxrcd4G6MZQQVQKDQ1UFN\n89TIAU5cNnBcjEps0IpXZ6iADqzUECGU+J6oSVQkqm8JYdxMiKBWdD0UieO4riVRPNiOICgoiMYu\neVQRQEId6HnVWw+KoMhAPq6LxmUD5rgw/lgA4ZB7qLaDQnnrVjLNk+M2ohe0Pdifaj1QRMP4fh0s\nF8L4eogVqRuEACFELD+6LmH0hpQQsQIEH6wQEQ8kxLK8uI6PSAB4YAWgAaoKGiIDjw0hDC00hDCE\nMBQ0FDQELwRXIQyEMLotqqO6k7QSapQPw6jM6w+RLNuVA1iWYEkUbEsQLFIiWGJhkUawsMTGilJY\nccpWCxELseKXtiNIOtZ4CYUEhIkQdULCREgYx4ETEiSCKHYCfDvAd0J8O0QQnCDA8ZVE4OEEJZzA\nwvZtbF+wfRvLs7F8G/FtrMBGAhsCG0ILKxCsMkgBnNDCCWzswCYROiQCh0RYT0In0dPTR0t6ElZF\nUF/xfQ/P9/FcD8/z8P1uPH8r5SBgoyqbEDT+7jm2QyKZYHIqxczsTHK1OeoPraepqYna2trB1VcD\n/1e6uMijS//EI9aDqGWjloXaTpS2E6hlE9oO6kSKMXQSqJ2I5sZ29p8uFslkmnQ6R01NPTU1jeRy\njSQSyZ3WH45qtCKsrw/uvnugTbmVaKYnDawGvgucSzRDdYaIbAJ+VP0H3DkjVSKBiEwfOMkwtuiP\niXmkZ1Y+hOTi3aIJG2mqwWqJfPOHbX0AEyIfen3gbt3/7evI62tbH7o/n98+LL91SF52fT9psHKl\n/fT5BaulHpD4uiAt9YCDtvUCAlMbo8/f3gukkalNcf3od5dMnQQI2t4d9W/K5EgbtseLXqY2Rdqt\nvRNQrJZGQKG9C1DsQxqAEG3vRkSxW+oQUcItPYgozrRaRELCLX0gIcnWHJaEeJv7ka4StXMaEVG8\nzXlElOz0NCJKeUMRsUKyh6WwJKS0sYyIUjMzgVghxfUuYoXkDncQ26O41gNLqTtSSNpK6cWAhCjT\njoaUhPSuDkkAhx8FKYW2lZBUOPZISAewdiUkKzBnGqR8WLkKnABObQW8kMVrAvDhlMkWVkVZskGx\nPTitFhIlWLIVnAq8xoJMEf7aC5YLZwVQEWEB4IpwomNRFOGxUCmLMNu2KSLc5fo02g5T7AR9arHS\n8ymoRUITFAPok+jYjHoriSNCn3qICA1W9KLe5rngQUMxSWcn9IRR/YHr1XwCAXqDCoLSYDmIKj2h\ni6BMEgvRkG71EVWaBURDthGiQJPYhNh0oYRi0yAJfLHYGvqU4zp96tOrHg4WTVaSeiuJpVBrJZjt\n1NFop+nTgJyV4dTs4azTgAcrG6mk6ph8/OnR91X1MhH5MpG9Y4WqfkJE/gDUichUVW0VkTXAbrdy\njHSfyPnADcBD0X8N5wBXqOq9e7x5HxGRM4Avqur5cf5KQIca13dmE/HCaE14r+/TMyR0+z7bPI/u\nON0dp7f5Pr2+TykMKQUBxTCkHIakLIusZZGxLGpVafQ8Gn2fhkqFBteloVKhvlKhrlKhtlwmV6mQ\nK5XIlstky2XSpRKZYpFUqUSqWCRRKODEwSoUkHwe8nnwfcjloLY2CnV11fTQfF3d9unaWshkIJWC\ndDqKB8JAPpEYHH3sDlWNfg37iueGeGWPSqVEpVzBLbtUKhXcikel4uK7HpWKR8V18Vwf1/VxvSj2\nvQC3HOBVAvxKiF9RgkqI5ymBB6GnBC4EHgQ+hJ5Es0OeRL+0PQsNLPAdCJIQOBAkILQhdOJfnjYS\nWmhgg1pIaEFoQSiIRnGUB1SiwYBGv+oHRjqigsSjHNHoC22j0RSRKBaKI1E+KtfB6wNpa0iZhWJr\ntcwaCEqcjmKBOB2N1SIsfBSfAVuI4IsQiBAI+JYQIgRWVBZaQmBZBJYQShzbVlxmRWW2jS8Wgdh4\nYuFLnMfCUyuacVILXwVfBS+M4kAFX8EPo7JAJUoPxOFAfYblozqBWtgSkpCQhBXiSEjCCnAkwLEC\nEhLgWD6OBNjiR2nbx3F8bMfHcTxsx8dKeVgpF0l62KkKknSxk2XsZIVEqoiTLJFKFUgkS6QSBZKJ\nMpYTInaAFT/L0YBk6JFyfTKeT6ZSoaZSJlsqkSuWqM0XqevL09BVoG6zjwTg1QluvU2lLoGbS1Cp\nTVOqy5CvzdHd2ETb1Gl0NrfS2zCb8uQjSTYfwuR0PVOTSaYmk0xLJmlNpajd1+krVXDdaF1wqVSN\ny+UoeF4UwhBU0SAgXyzS2dtLV28vHb29bO3upqOnh7bubtq6utiybRsbu7rYsG0bWcfhuEyGc8tl\nvlQsAhwGLAJSwMOqepGIXEE0hfU74E7gCeD7qvrSDevxhsOBg0keV9XOfZPUyIjPdH+eaLi1Bfgr\n8E5VXT6kzgExrIeqlMOQUhhSjBXLgIIpBAGlOB7IF4emh5QV4nQ+CHaIbRFqbJsGVaaWy0ypVGgu\nl2kul2kql2kslWgoFqkrFqktlcgVCtQUCmQKBdL5PMlCAadcxqpUsCoVpFLBcl2kUom+cJUKEgTR\nFtYBxTKQTqW2Tw/PD782oJSGKqvhZen0rkMmE7U5QoUWhhXCsEQYluNQGoyDoLRD2UA6ulYNI80H\nQQnVJKq1QA7IEYY5IEsY5hDJEoY1qNagmiUMo6CaGZaOQhCkCcN0HKcIgij4XgK/bOGXArxyHEo+\nXjnALwZ4pRC/HOCXFb8c4rvgVxTfhWBAEfuC71mEgRAEggZCGFoEoYWGFmCTxCMtHik80rik1CeF\nSwqPpISkJCBphSQsHYwTFiQsxbEhYYFjCY4FtiXYdjTlZVsWdjwFZllSnRIbmC9VwfMFzwPXg4oH\nng+eF5W7PnhiETgWoWMTOFE6sKMQWoJvWQSxcgzEipQpFj6Cx4CiixWbWngKvoJHiEuIR4hHZB9z\nUVzAk2gVmh8Kfmjh+Q6+n8DzEnhuGpGQZKpEKlUmmSiTdspkrBI5yVNLnrqgj8ZKD435HurCfrK1\nBTINfaRatpGZ2kdykos0W/hNSToyTWzMTKE70UIxeQhp91CmlCcx008yQ5PMlhR16QRW1sLO2tg1\nNlaNhZ2zsXM2Tp2DXWtjJUZ0cvmIUVXa29t5+umnufOaa/jevfdCdOT5VKqzSuuBjwDvAy4gmqla\nRnQgYc9OmgX2vE/kGFVdISKn7KJjT+3LBxop8QjoOqpLfK8edv2gXJ2l8WqcQhDQHyuVfBDQ7/uD\n+f6BEJftLF8Jw+ifJQzJL1qEnHxybICO/okSquR8n1rPI+f7ZHyfjOdFIQhIx+lUXJ6O02nPIzUQ\nXJe075N2XZKuS8rzSMbppOeRrFRIDMSuG4VymUSlglOp4LguTrmM7XkEqdRgCDMZgnSaMJMhTKfR\nTAbNZtFY6Wgmg2QySKyIrHQaiWMrk8FOp7EzGaxMBieOBxTWwqeeYt5rXxspr3R6l/PMQ/8equ5u\nlU4QFHdxrRini3Gd4XEhvl4YbEMkiW3XYNtZLCs7JK4ZzA9NW1bNdnV2V9+yavD9NK5rUalE+0RO\nPnkelQqU8x7lngqVfpdKX4Vyn0u536OS9yj1+1SKAeVCEMUlpVzS6IdwBUpli7IrlFybkutQ9BMU\ngxQFP00hTFPQLB4JshTJSYEaKVFjl8k5JWoTZXIJl1zSoy7tUZ/xqcuE1NVAbQ3U1VrU1Vrk6pPU\n1iWoqUuSrk1BMkNoZ1ArRYiDekpYCQkrIVpRwnJIWA4JSkGULkUhKAZRuhin4/hvvX9jTjCHoBRE\n07QpC0la+Gml0lCmWJ+nUFugkC1QSJcoJssUnQplK8CTEBcolmoo9kyl2D2JQncj+d4G8oVa+ku1\n9Pk5fGwarDwNiTx1mT7qGruob+ymLlUim60QNrq4U8CuS9IgNTR0TuWQ9gTT10NNZ0iQDwj6A6yk\nhdPg4NQ7OE0OTqMT5QfKGrYPiaYETlMU23X2drvTd0Z8/buq+jERmQd8RlXftC/vsz0pkRtU9Yr4\neNyd/O/pa/flofuLg1WJHAiGbyrTeIWSGysZT5UgLhuIfVUCqKZ3cn2kwRuW9sKwmlYlCAK0UkHK\nZaxiEatcxiqVsMtlrHKZRJx24pAolbBdd1AZJeJ0Ik4PKrJKhXSlQtp1I0XouizO53mtKmnXJeW6\n+LZNJZXCTaVwk8koTqdxUym8dBovlcJPp/FTKfxYufnpNOFAiJWbZrNVBZfNRiGTwaqpwaqpwc7l\nSNTUkLQsUpYVxSLbpZMiJHCxdUAZFYYpnmo+Sg9XQlF5VUENLysShmUsK41t51i61OLUU5ux7Vys\ngGridO12sePUxukoRPm6OF2HZWX2+GLyfSj2euQ7yxS2Vch3VSh0u/Rv88j3eOR7Avp6Qvr7lP4+\npa9f6M3b9BVseopJesspuisZer0sxTBNnZWngR4atDuKnTwNySKN6TKN2QpNOZfGuoCmRqVxktA0\n2aZpapKGQzLYjXXRzr76+sGp4IVLljDvda8DIPSHKCJ3iGJyQ9SNlVWc9koB7cUKm/Jl2grdbPPX\nU9QNhPYmJmfbmJJroza7hWRuPV45RWHtkfSvnk7fqsPoW9tK75ZWOgvT6NIGOqiliyxdYYaeME3O\n8miWMs24TAldplBmarJMS9ZjZl3A5JoQO2NhJS3Ekej0D4mXrgSK+lGfw1KI3+8TbAsIKyGJSQkS\nkxMkpiRITkmSnJokdViK7LFZGuY14GQdiJbxNhEvBAQWqepp8SGEHwS2xn/az6nqPbv6u+9JibxD\nVW8XkVmq+uKeXmQvN0aJGAYIhijLASU2qMwqFfxikaBUIigWCctlwjjWUikKxSKUSmi5jJRKUYgV\nnpTL2LHis0ulSNHFcaJcJlUskiyXSZVKOJ5HJZWilMlQTqUoZDIUMhnymQz96TR9mQx96TT96TSl\nbJZyNku5poZKNksll8OtqcHL5fBqa/Fraghra6GmhrRtk7Ys0pZFZmh6wHZn22QG80KaMlmpkNYS\naSmT0iIJLeFopGh8v58gyBMEQ+P+uLxvh7Sqj23X4Th1cVyP4zQMCQP5xsGQSDTiOE0kEpOw7exe\n/T19P1pJ1NMDvb3QvdWjp61M95Yy3R0e3R0B3V0h3d3Q3Sts63PYlk+xrZSmz01T5xRpsntpkh6a\ntIumoINmfwuTnF6as0WacxWa6z2am0ImT4bmqTbJSbXQ1ASNjVHc1ASTJkWhqSmyMQ77zr1QKrE0\nn2dpPs/i/n5W9m9kqm7grFQ7JzibmaZryJaWE3gdZPpryawJqF2Up25ZgJdvZB1n8oI/lzWJWbTl\nErQnkrR5tbT1TGbz1pn4QYKpk7cxfUqFVxyS4NQpaY5vKNPoVfC2efhdPm6Hi9fu4ba7OE0OmcMz\npGakSB2WIn1YGrvexuvwqGyokF+ap7y6zP9v777D46rOxI9/X/Uuy6ruwg3bsY1cMdU24NBCTUKA\nhA0lJIQQIBBgCRv40ULIbkLbTSELDi0hBIdiggOGdQnVNrZl417lIlnVarbKSPP+/jhXnrGsblmS\npffzPPeZuWXOnHllzzv3nHvPOXXPqQADgAwgEbgbGIm7jeJbQEVbpz9vLYmsVNXJDY/t+lfQBSyJ\nmB6nvt51hh44cPjScCGFt/grKvBXVFDvPforKty3ZkUFUlmJVFQQUllJWEUFITU11MXE4IuNpTY2\nlprYWKpjY6mKjaUqJoYDMTGHElW5t5RGRbE/Jobi6GiKoqMp8JbCqChCo6KIDQ0lLjSU2NBQ7+7y\noPWG5yEhxHnP40P9JHCQOA4SQyXReoAorSBcKwjzl0N9GXV1pd6y/9Di85Xg8xUjIocSiltSCAtr\neJ5KeHgK4eEpREQEnoeExLR69tPcn6CsDIqLoaTELcXFUFykFOX5KMqtpbiwnsICKCwWisrCKa6I\nIDbCR2pUBanhpaSGlJBGPml1uaRW7yGtKoe06ArS+teRnqYkD4wkLCMF0tIgPd0tGRloejq5ycl8\nocryykqWl5ezrKKCtJBqzovJY1pYDpm6hYgDq6iq2kx0WSyxm3wkLa0kMi+cKoZR7ptJ8e5zqIuC\n8qH7yMmsZksC5JQlsmvHaLZsnkJMrJ9TTq7mnLOTmTMnghNPBFSpzaulalsVBzcepHJ1JWUflVGz\nt4a0K9MY8tMhRJ8QTf6f88n4dsah+UREZCZwJ+5ylGdwNxxWquqv2xLv1pLIB17B03Fz8R5GVS9u\n7x+4M1kSCbDxogJ6XSzq613yqagILA3rTT1WVh5KSIt37WJWSIhbLy9Hy8vdvUH9+lGfkEBdYiK1\n8fHUJCRQlZjIgcREKhITKU9IoCwhgeL4eAoTEylISGBffDz7RajwLhAJ7s+rVyU+LIyE0FASwsJI\nbHgMC6NfaCj9Q2pJDqkkSSpIpJx4yonRUqL8pUT49xNSV0xdXTE+XyE+XxE+XzGq9UGJJbBERKQT\nEZER9DiAiIg03LU4zWvp34Xf7856CgsPXwoK3FJYoBTk1pG/z09BobC/Ioyk6BrSY8pJj9hPuhQw\noH4vA2p2MqByMwM1l8HpPgYNgqghKWwZPZpPhw/n44wMPoqJYa8IMxOjOC8ml0khG4k/uJyykiVI\njY+EjSH0/78KIvMiqIwYRemBiyndOBMJC8cfWUHlhVtZOS6fHSWh5K09kVUrzyUyIppLLgnl6qtj\nmTHj8GtYqndVk/tsLrm/z2XEf41gwLUDDt2x3nCMd7PhYmA8LqFcC5Thhry6s6Wxs1pLIhG4gbhe\nAr7XeL+qLmn2xV3AkkhAr/viPAoWi4AjYqHqrt4rK3NLQ3tRaakbCKrhp3tJiVsvKvJ+xntLTIwb\nSTA11S1paZCWRl1qKtUpKVSmplKWmsr+5GSK4+Mpa+Jy+4ZL7oMvtS+rqyMuNJT+4eEkhYWRFBZG\nalgd6SEVpEk5yVJOEqXEU0qsv5hIfxFhdYX4ffnU1uZRV1dCeHgqkZGDiIwcTGTkECIjBxMVNYzI\nyKFERQ3jk082MXv27E6Ja12dC01BgRtHq2HJy4PcXMjdXc/e3X5yC0KJi/AxJKGMoVEFZIbs4gTf\nZpLZRPHwA2w69QQWZU2gOC6OOZUVXBxdwuS0vfgivmR/yQdEVISR9HEN/T+upZpkSsMuo3jNJUh9\nDP4apey0Oj6/8nMOhOWw/19DWbLo3wgPS+LWW2O44YYQYoJaEQ9sPED2OdmMeX4MyecmB5+JxOES\nyMOq+paIpAJF6sZFeQQYoKo3NBeL1pLIS6p6TcNovp0S/U5kScSYLqTqEk5R0ZE/1fPzA9+o+fnu\n8cABN0HGwIFuGNuGx8GDD18iI/GrUubdtxV8D9d+n4+SujqKgx4LfT4Ka2sp8vk46PeTFh7OwPAQ\nhoeVkxlWymApIk0KSdICYupzCfPtpa4mh/r6CqKiTiA6eiQxMaOJiRnjLeMID086ZiErKnIDN+bk\nuGX7dm+k3y1KTo4ysH81maNziZiwm6IxZWwcEc2ULZv55qZ1nB+3l5CJBylK3UItxSR9EULKB9XU\nSAqlYT9k/8dnIqGh1IfCZ9f7yf7qe2Rt2cOC177Lxo2n8eijkVx3nRy6QLFofhE7H9jJtFXTGsbO\nCgPeARao6lON69/UUPFHHNNKElmPm4hqATCLoFEnXIBaHmf+WLMkYkwPVl3tksneve7n+d69sGdP\n4HHPHre9Xz8YNgyGDnVLZiaccIJbMjPdzbjNqPH7ya+tJb+2ln21teTV1pJbU8Pe2lr21NSwu7qa\nPTU11KkyMrKeCWGFjArdx1B2k+rPIbZuO1K9ifCweGJjxxMbO5G4uInExWUREzOWkJDwZt+7M/h8\nbo0PO2EAACAASURBVPj4devcsmYNrFxXz570EhLP30f5+BJGFxzgxmWLuWrRHzl4VjgFp9dRHV9J\n6mIlIRsOpF5IYfaPqc0JR0X4182RLL9gKedu/ZTfPfEYGRnDePXVSJKTQf3KJwM/4fT80xuSyIu4\ns447GuokIhmqus97/hNgmqpe3dxnaC2J3Ar8EBgO7OXwJKKqOvzoQnh0LIkEWBNOgMUioMfHwu93\nZy45OYGf6zt3Bibn2LnTXaI7cmRgGTUKTjwRRo+G2LYNj15eV8e8hQvJmD6dnOpqcmpqyKmuZmd1\nNTurqgjx7WVa+B4mhuUwgm2k1m0iqj6XiOgJJCdOIynxVBITTyMqaugxDUeDsjI3xe3Sz+p5q6CY\ndYPzYXwZY3fHcePuPK7Z9wf2xyxk36xqIguEAQv81Az8KsWrfs7BNfX400J56JE6Lhv5Vz59ZiLL\nl13HkiWRDBwI6765jvGvjwfXgb4Ud0Nhw8h1PwOuBrJw/eE7gR+oan5zdW3rsCe/U9UfHm1gOpsl\nkYAe/2XRhSwWAcd9LFRdR8PWrbBtm5sScPNmt2zd6i6/HTsWxoxxj+PHuyXpyOaplmJRXV/Pzupq\ntldXs72qiq1VVeQcLMR3YDX9atcwOWQDY3QtEhJJfexppCSdzYlp55IU2zW/o30+WLCshqe37OOj\nfrn48iKZvHUwtySXMyv+fgoT5uGnlgHzwX/iPex57iLq9tfx4R3R7L/sQ6r/GMKKFbfx2WdRREQo\nISEhh3WsH432DHtyOjBKVed6Q6DEq+qOzqhER1kSMaYP8/vdmcvGjbBhg1u+/NK1CyUkwIQJMHGi\nW046ySWaDoxr5fP72VZVxYYDB9hRvo4DZUuIPfgJw+uXUy2J5EedSXjiVxmRcg5TEvqTEtG2kXU7\nqs7v55WcYh7avIuCynr8z2VyRUYKN1/8B/z1dxKZU0XKF4mUV71L4Xwf666OZsfN81l+zyzOPHMm\nDz0U1nDp9BDgRdzQJ37gj6r6tIicBPweN7qvD7hZVZudmLCtZyIPAFOBE1V1tIgMBP6mqqcdbUCO\nhiURY8wRVF1yWbvWdTKsWQOrV7s+mAkTYPJkmDoVpk2DceMgtGOTQtXW17G+6FN2Fb5Dffn7RPm2\n8TkzWBt2FpGJc5iemMaMhAQmx8cT0crQOx2hqry/fz93b95OaWEIVY+PYnq/KO6/+S5q659i0DwI\nLX6K7Qsm8vH1kfQ7/2nuu+55duyIpn9/Ae9mQ1Vd7V2htQK4DDeb7K9V9X0ROR+4W1WbvaytrUlk\nNTAJWKmqk7xta1rqse8KlkQCjvtmi05ksQiwWAQsfvddZiUkwIoVrsNh+XLXVDZpEkyf7pZTToEh\nQzpUfk1NLoWFb7Ar/zWqD2SzM/JsFujZ/LNmHNMS+zGzXz9m9+vHjISETk0qflX+tG8f927fzsTc\nDNb85ARuvGYVF5x8Cv0/9REz/yds33Qxv39mFwXz4vn65V/jllsiAJ4Hvgbkq+pEEXkTd6XWf+Lu\nEcnGjeR7jqp+p7n3b+u5Xa13zbA3t5F0+mS/xhhzTMXEwOmnu6VBaalLKsuWwSuvwC23uBGnTz0V\nzjgDZs50fSxt+NKPjBzI4ME/YvDgH1FTs5dRBa8yYd+z/CSijMrIq/mo9mvcua2YTQcPcnpiIuf3\n788FycmMiI4+qo8VIsL1AwbwteRkbty0icHzVrHpha/w7oN7+e09Q/BHP03SU8O58d8n8OZd/+SN\nNxoGY2cu7g71F72bDbOAwcCPgMdwHe+zcDcgNqutZyI/BUYBc7zCrwf+rKrPtP8jdx47EzHGdCpV\ndxPHJ5/A0qWwZIm7yXL2bJgzxy3D29eZXl6+gry8Zyks/BtJSeeSOOBWPqsbyYKSEt4tKSExNJTL\nUlO5PCWFqfHxHRrqJVB95Ve7d/PMnj18f/tEXnmwjBceHErKynj2PjmPRXcv4df/fQ+VFXHgzkQu\nxs1/sA54GHgBdxYSBmzBjae1QVXnNPee7elYnwN8FXeZ73uqurDDn7STWBIxxhxzubnw4YewcKFb\nEhLgwgvha19zZyvhbbuXpK6unLy859iz5ymiooYwdOi99Es6j5WVlbxRVMS8wkJ8qlyVlsa309MZ\n18bLl5vy14ICbtuyhf8oO4lPnnqNm2++gdQHLiKn5jKuKryQkuJ0cJMLJgJvA3eo6lMisgdIU9UI\nEXkHd59gjaomNvde7Uki6cA0b3WZqha0dHxXsCQSYG3fARaLAItFQKfEwu+HVavcROXz57t7WS66\nCL7+dXeWEhnZhiLqKCqaR07OI4hEkpn5AMnJXwMgu7KSPxcU8Ep+PkMiI7lhwACuTk8ntgOd/6/m\n53PX9u3cumEyU3aMJn54HpXf+we3jIlj/fpTwZ2JfAeICBoCJR9IwQ0N/wJwDa4vfFozb0ObendE\n5ArczILfBK4APheRb7T7UxljzPEsJASmTIH773cd8ytXQlYWPP64G9bl+993TWB+fwtFhJGW9i2m\nTs1m2LD72LHjPlatOoPy8k/Jio/nVyNGkDNjBvdnZvJOcTHDPv2Uu7ZtI7empl1VvTI9ne8NGMA/\nJq3n1dy/4A/xEXPyq4wZkd1wyEQgAkBEqkRkN65paxvukt8rcTni+y29T1v7RLJxUyQWeOupwAeq\nelK7PlUnszMRY0yPsWsX/OUv8PLLbtywG26Aa69144W1QLWe/PyX2bHj5yQkzGDEiN8QFTX40P6d\nVVU8uWcPL+bnc1VaGj8fNoyMNpzxgJvzZNbq1Zwdksq5r04gPEUoDh/Oebd9AW7ukJnAHUA8br71\n4bipce8FYoApqtriBVhtvc4spFHzVXE7XmuMMb3f0KFwzz3uvpTXXoPdu919KZddBosWuU77JoiE\nkpHxXaZP30RMzFhWrMhi165f4ffXAZAZHc2To0axafp0okJC+Mry5TyycyfV9fWtVilUhN+PHs1v\nK3P4ghupGruf0HcPTUh7JXCd93w9cAA39MmLuOk/6rxtLWprIviniLwnIteKyLXAP4B32/ha0wUW\nL17c3VXoMSwWARaLgC6LhYi7mfH3v3dnJ+ee6y4dnjgR5s6F2tomXxYaGs0JJzzIlCmfs3//Qlau\nnEFl5dpD+1MjIvj1yJF8MWUKqyorGb98OYv372+1Ol+JjeXC5GRWnvstYjcJMVUVDbvuxo2JCHAQ\nGAa8AkTimrlmAHUi8suWym8xiYjISBE5TVXvAv6Aa0ObCHwKPNtq7Y0xpi+Li4ObbnLDsTzxBPz5\nzzBiBDz5JBw82ORLoqNHMHHi+wwceBPZ2Wexe/eTBDfbZ0ZHM2/8eJ4YOZLvbNjAnVu3UtNCHwzA\nT4cMYUF4DSElYYRN/LRh89NAQ1vbCbh8EE2gpWkrsAb4r5bKbm0U33eAe1V1baPtE4BfqOpFLdb8\nGLM+EWPMcWfFCvjFL+Dzz+FnP4Mbb3Q3ODahqmo769dfRXh4CmPHvnzEvCfFPh83bNzIvtpa5o0f\nz6AW+kpOW7mSn867gYHpO5lxWym4sbPewk08mAcU4fpEcoIeRwBDGoaGb0przVnpjRMIgLcts5XX\nGmOMaWzqVPj73+Htt+Gdd9zAkK+/3mSfSXT0cCZN+oiYmNF88cXUw5q3AJLDw3lj/HguSknhlJUr\nWVtZ2ezbfjM1lU+Sz8I35FBz1vO44azAXdY7AagHRuNuNhyByxH/3dLHaS2J9Gth39Hdq286lbV9\nB1gsAiwWAT0uFlOmwIIF8Nxz8PDDMGuW65RvJCQknJEjnyAz80Gys89m//4PD9svItw3bBiPDx/O\nnOxsVlZUHFEGwAXJySwYOp36mEMd8uMIzBEVhutU3w003JRSjhs7q9m71aH1JLJCRG5svFFEvoe7\nHMwYY8zRmD3b3W9y1VVwzjlw771QVXXEYRkZ32HcuNdYv/4qCgpeP2L/Venp/G70aC5Ys4ZNTfS3\njIqOpjAljciiQ5uuxI3OXonrUK8F/h34J/AXXGI5EygWkeTmqt9an0g6LhPVEkgaU3E995e11E7W\nFaxPxBjTq+zbB7ff7pLKSy/ByScfcUhFxWrWrj2fESOeID39yiP2z83L49GcHJZNmUL/RkOyXLT6\nC37++TROvkkbpsedh7tXRIF5qnqTiHwf1+G+CngUGKSqzbZKtXgmoqr5qnoq8CBumsSdwIOqekp3\nJxBjjOl1MjLg1VfhscfgkkvgwQeh0f0g8fFZTJy4kK1bb6eo6O0jirhuwAAuSknh2o0bafwje1Ji\nf7Ta3TsoIqcBlwJJQH/gVBE5Dze673Dc/SJxQElLVW7TfSKqukhVn/GW/2vLa0zX6nHtvd3IYhFg\nsQg4rmLx9a+7MbqWLIHzzoPCwsN2x8WNZ8KEd9i06XuUlX1yxMsfHz6cvTU1PL/v8N/6Y2Ni4O2B\nAKjqx8CbuOashao6UVX/ibtfZCOwEHd5b7OzGoLddW6MMT3TgAHw/vtuBsapUyE7+7DdCQlTGTPm\nBdat+zpVVTsP2xcREsLzY8Zw7/btFAXd3Dg8OpqPhk4FQETmA5cDCcBYEVkpIotxk1E9hJuw6mHg\ntpaq2eZRfLuSNx3vjUDDUCs/8zJk4+OsT8QY0/v99a/urvcXXoALLjhs1+7dT5Cf/zKTJn1MaGjU\nYftu2byZUBGeGjUKgLyaGsZ8tJDycy4CNxR8Cq7fO0FVK7zmrF/jOtTvBUpV9ZGWqtaTz0R+o6qT\nveWIBGKMMX3Gt77l7iu5/no3yGOQwYNvJzp6BNu23XnEy36emclL+fmHRgBOi4jgYOihuzNuxo1E\nosA6EbkON9NhHK4p61ZgTGtV68lJpOPTe/VBx1V77zFmsQiwWAQc97E45RT44AO46y74058ObRYR\nRo9+luLidyguPnxIw/SICL6dns7/7HVDZIWKkBTmrthS1atxAy1+qapDVXWuqo5S1WG4JqzVLc2t\n3qAnJ5FbRGS1iPyviDQ7q5YxxvQZ48e7WRbvu89dxeUJD+/HmDFz2bz5Jurqyg97yS2DBvFcXh4+\nb3yt5IjDm7yacRXuXpFWtThO/LEkIguB9OBNuNOq+4DfAg+pu5D5EeA3wA1NlXPttdeSmZkJQL9+\n/cjKyjo0e1nDL4++sD5r1qweVR9b7znrDXpKfbprvWFbT6nPUa2/9x6LzzgDcnOZdccdAGRnh7Br\n13hSUx9g5MgnDjs+Zd06znvmGYZERVHRylwkIhKK63Cf3OKBDcf39I5pERkGzFfViU3ss451Y0zf\ntHgxXHEF/OtfcOKJANTWFrJ8+Tiysv5FbGygO+OZPXtYUVHBC2PHcl52Nu9lZTXcbJiJ+36d0HCs\n17l+j6rObks1emRzlohkBK1eDnzZXXU5XjT+1dmXWSwCLBYBvS4Ws2bBI4+4Sa+88bIiIlIZMuRu\nduy497BDL0lJ4d2SEvyqnJXkRgIWkT8DnwCjRWSX17EO8C3a2JQFPTSJAL8SkTUishp3S/5PurtC\nxhjT43z/+67D/ZZbDm0aNOjHlJcvp6Ji5aFtQ6OiSAkPZ3VlJXcPHQq4jnVVHaiqkQ0d697261S1\nzfNF9fjmrJZYc5Yxps87cMCNCPzQQ655C9iz5ylKS5cyfvy8Q4fdtGkTY2JiuH3IEEQEVe2UK2B7\n6pmIMcaYtoiNdTch3norFLkhejMybqCsbClVVdsOHXZKYiKfNzNM/NGwJNJL9Lr23qNgsQiwWAT0\n6licfDJceSXccw8AYWFxZGRcx969vzt0yNT4+GbnGjkalkSMMaY3ePBBN8nVCjde4sCBPyA//0X8\nfjd21ujoaHbV1HCw0ajAR8v6RIwxprd49lk3ztYHH4AIq1adyeDBd5CaeikA45ct4+WxY5mUkGB9\nIsYYYxq5/nrYvRsWLQIgPf3bFBQErtYdFRPDliZmTTwalkR6iV7d3ttOFosAi0VAn4hFWJgbEuXR\nRwFISbmMkpL3qK+vBmB4VBQ7qqs79S0tiRhjTG9y9dWweTOsXk1ERBpxcRMpLXVnJkOjotjVyUnE\n+kSMMaa3+eUvYdMmmDuXnJxfUluby6hRT/N6QQGvFBTw5oQJ1idijDGmGddfD2+8AaWl9O//VUpK\n3gdgQGQk+4JmOuwMlkR6iT7R3ttGFosAi0VAn4pFWhrMmQOvvUZcXBY+XwE1NftICw+n0JKIMcaY\nVl1zDbz8MiIhJCScSnn5x6SEh1Pk83Xq21ifiDHG9EY1NZCRAevXk1P7PD7ffoaP+E/ClyzBP3s2\nQBSwFIjAzS31uqo+2N63sTMRY4zpjSIj4cIL4a23iI+fRkXFckJEiA8NBUBVa4DZqjoJyALOF5Hp\n7X0bSyK9RJ9q722FxSLAYhHQJ2Nx0UUwfz5xcZOorFyNqpIQFpjQVlUPek8jcWcj7W7asSRijDG9\n1Zw58K9/EaEJhIbGUl2dc+hMBEBEQkRkFbAPWKiqy9v7FtYnYowxvdnJJ8Pjj5Od9AsGD76NC3Zm\nsGzq1MPuExGRBOBN4BZVXd+e4u1MxBhjerNZs2DxYmJixnDw4CZig85EGqhqObAIOK+9xVsS6SX6\nZHtvMywWARaLgD4bizPOgI8/Jjp6NFVVW4gOcV/7IpIiIone82hgDrCxvcVbEjHGmN7slFNg2TKi\nI4dTVbWVy1NTG/YMABaJyGrgc+A9VX23vcVbn4gxxvR2I0Zw8G/PsKb2VmbM2GpzrBtjjGmHKVOI\nWrePmprdqPo7tWhLIr1En23vbYLFIsBiEdCnY5GVRcjajYSGxuPzFXVq0ZZEjDGmtxs/Hr78ksjI\nQdTU7O3Uoq1PxBhjertt2+Css8h+ewyDB99GSsqF1idijDGmjTIzoaCAyPoUamvzO7VoSyK9RJ9u\n723EYhFgsQjo07EIDYXhw4ndF47PV9ipRVsSMcaYvmDECKL21Hd6ErE+EWOM6Qtuu42yfnnkXRXH\n2LFzj/8+ERH5hoh8KSL1IjK50b57RWSLiGwQka92Vx2NMabXyMwkPPcAdXX7O7XY7mzOWgtcBiwJ\n3igiY4ErgLHA+cBvRaRTMmZv1qfbexuxWARYLAL6fCyGDiUst5S6utJOLbbbkoiqblLVLUDjBHEJ\n8Kqq1qnqTmAL0O7ZtowxxgQZPJjQvF6URFowCNgdtL7X22ZaMGvWrO6uQo9hsQiwWAT0+VgMHEhI\nXhF1deWdWmxY64d0nIgsBNKDN+GmX7xPVecfy/c2xhgTJCMDikqor+3csbOOaRJR1TkdeNleYEjQ\n+mBvW5OuvfZaMjMzAejXrx9ZWVmHfnE0tIH2hfXg9t6eUJ/uXG/Y1lPq053rq1ev5vbbb+8x9enO\n9SeffLJPfz/86U9/QkNCCPtd53asd/slviKyCPipqn7hrY8DXgFOxjVjLQRGNXUtr13iG7B48eJD\n/3j6OotFgMUiwGIBOm4cK+7awPTr6bRLfLstiYjIpcAzQApQCqxW1fO9ffcCNwA+4DZVfb+ZMiyJ\nGGNMW82cSfbly8i6vbrTksgxbc5qiaq+iZsYvql9jwGPdW2NjDGml0tJIbIiAqjutCJ74tVZpgOC\n+wP6OotFgMUiwGIBJCcTXhE4dxCRnSKSLSKrRGRZR4rstjMRY4wxXSwpiYjK0OAtfmCWqna4t73b\nO9aPhvWJGGNMO/zyl+xb/wQDXipAVUVEdgBTVbW4o0Vac5YxxvQV/foRWnnYFgUWishyEbmxI0Va\nEuklrL03wGIRYLEIsFgAiYmEHTys9eY0VZ0MXAD8SEROb2+RlkSMMaaviI8ntDJwx7qq5nmPhcAb\ndGCcQusTMcaYvmLpUip/chnxK0sAYoEQVa0UkVjgfeDB5u7La45dnWWMMX1FfDwR1VENa+nAGyKi\nuFzwSnsTCFhzVq9h7b0BFosAi0WAxQKIjSWiNgYAVd2hqlmqOklVJ6jqLztSpCURY4zpK2Jj4eDB\nTi3S+kSMMaavKC2FzEykrOz4n2PdGGNMF4uO7vQzEUsivYS19wZYLAIsFgEWCyAiAurqOrVISyLG\nGNNXiLizkc4s8njuU7A+EWOMaafkZKSkxPpEjDHGdEBUVOvHtIMlkV7C2nsDLBYBFosAi4UnMrJT\ni7MkYowxfUlQEhGR20Rkrbfc2pHirE/EGGP6kqwsJDsbYDzwF2AaUAcsAG5S1e3tKc7ORIwxpi+J\niGh4Nhb4XFVrVLUeWApc3t7iLIn0EtbeG2CxCLBYBFgsPIEk8iVwhogkiUgMbk6RIe0tzkbxNcaY\nvsRLIqq6UUQeBxYClcAqoL69xVmfiDHG9CXnnou8//4R94mIyKPAblX9fXuKszMRY4zpS8LDDz0V\nkVRVLRSRocBlwIz2Fmd9Ir2EtfcGWCwCLBYBFgvPoEHBa/NE5EvgLeBmVS1vb3GWRIwxpi/5wx8O\nPVXVM1V1vDcx1eKOFGd9IsYY08eIiI2dZYwxpvt1WxIRkW+IyJciUi8ik4O2DxORgyKy0lt+2111\nPJ5Ye2+AxSLAYhFgsTg2uvNMZC3uaoAlTezbqqqTveXmLq7XcWn16tXdXYUew2IRYLEIsFgcG912\nia+qbgIQkaba5Tqlra4vKS0t7e4q9BgWiwCLRYDF4tjoqX0imV5T1iIROb27K2OMMaZpx/RMREQW\nAunBmwAF7lPV+c28LBcYqqr7vb6SN0VknKpWHsu6Hu927tzZ3VXoMSwWARaLAIvFsdHtl/iKyCLg\nTlVd2d79ImLX9xpjTAd01iW+PWXYk0MfRkRSgBJV9YvIcGAk0OT49p0VBGOMMR3TnZf4Xioiu3Fj\ntbwjIgu8XWcCa0RkJfAa8ANVtR4xY4zpgbq9OcsYY8zxq6denXUYETlPRDaKyGYRuaeZY54WkS0i\nslpEsrq6jl2ltViIyNUiku0tH4nIhO6oZ1doy78L77hpIuITkXbP2na8aOP/kVkissq7yXdRV9ex\nq7Th/0iCiLztfVesFZFru6Gax5yIPCci+SKypoVjjv57U1V79IJLdFuBYUA4sBoY0+iY84F/eM9P\nBj7r7np3YyxmAIne8/P6ciyCjvsQeAe4vLvr3Y3/LhKBdcAgbz2lu+vdjbG4F3isIQ5AMRDW3XU/\nBrE4HcgC1jSzv1O+N4+HM5HpwBZVzVFVH/AqcEmjYy4BXgRQ1c+BRBFJp/dpNRaq+pmqlnmrnwGD\n6J3a8u8C4MfA60BBV1aui7UlFlcD81R1L4CqFnVxHbtKW2KhQLz3PB4oVtW6Lqxjl1DVj4D9LRzS\nKd+bx0MSGQTsDlrfw5FfjI2P2dvEMb1BW2IR7HvAghb2H89ajYWIDAQuVdXf0btHQWjLv4vRQH/v\nBt7lInJNl9Wua7UlFv8NjBORXCAbuK2L6tbTdMr3Zk+5xNd0MhGZDVyHO6Xtq54EgtvEe3MiaU0Y\nMBk4C4gFPhWRT1V1a/dWq1ucC6xS1bNEZASwUEQmqt3Q3CHHQxLZCwwNWh/sbWt8zJBWjukN2hIL\nRGQi8Cxwnqq2dDp7PGtLLKYCr3rjs6UA54uIT1Xf7qI6dpW2xGIPUKSq1UC1iCwFTsL1H/QmbYnF\ndcBjAKq6TUR2AGOAFV1Sw56jU743j4fmrOXASG+I+AjgSqDxl8DbwL8BiMgMoFRV87u2ml2i1Vh4\ncyXPA65R1W3dUMeu0mosVHW4t5yA6xe5uRcmEGjb/5G3gNNFJFREYnAdqRu6uJ5doS2xyAHOAfD6\nAEbTzA3NvYDQ/Bl4p3xv9vgzEVWtF5FbgPdxSe85Vd0gIj9wu/VZVX1XRC4Qka3AAdwvjV6nLbEA\nfg70B37r/QL3qer07qv1sdHGWBz2ki6vZBdp4/+RjSLyHrAGqAeeVdX13VjtY6KN/y4eAf4UdOnr\n3apa0k1VPmZE5M/ALCBZRHYBDwARdPL3pt1saIwxpsOOh+YsY4wxPZQlEWOMMR1mScQYY0yHWRIx\nxhjTYZZEjDHGdJglEWOMMR1mScQcFRGpF5GV3hDjK72bHY+2zEtEZEzQ+oMiclYnlDtTREpF5Atv\nqPDFInLhUZT3AxH5ztHWqwPv+10RKWgU9zGtv7LJsiracMxHHSm7iXKGicjazijL9Bw9/mZD0+Md\nUNXJze0UkVBVrW9nmZfihm7fCKCqDxxF/RpbqqoXe3U7CXhTRA6qarvn11DVP3RivdrrVVW9tRPK\nafVGMVXtzPHX7Ma0XsbORMzROmJIBe+X8lsi8iHwgYjEisgHIrLCmyzr4qBj/83btkpEXhCRU4CL\ngV95v7BPEJG5DRNKicjZ3vZsEflfEQn3tu8Qkf/nnWVki8jo1iquqtnAQ7jh4hGRFBF5XUQ+95ZT\nxNkhIglBdd4sIqki8oCI3OFt+56ILPM+x99EJMrbPldEnhKRj0VkqwRNjCUi94jIGu81v/C2DReR\nBeJG2l3SwudoKu6XisgH3vMBIrJJRNK8v8eb4kbw3SQi9zfx2pb+RhXe40yvjL+JyAYReSnomMne\nmd1yr/7p3vYp4iY8WgX8qLW/iTkOdffEKbYc3wtQB6wEVuHmqwD4LrCLwORYIUCc9zwZN98DwFdw\nZxtJ3no/73EuQRNINawDkV65I7ztLwC3es934MbGAvgh8Mcm6joTeLvRtpOAdd7zV4BTvedDgPXe\n8yeA73rPpwPve88fAO7wnicFlfkw8KOguv/Vez426LOfD3wERDb67B8Efb7pwIdNfI7v4uZHaYj7\nyqByXsR9Wc8Hrgg6fi/QD4gC1gKTvX3l3mNoU3+jRsfMxM1PMQCXxD4BTsW1aHwMJHvHXYEbbgTc\nUOunec9/RTMTJNly/C7WnGWO1kFtujlroQYmxwoBHhORMwE/MFBE0oDZwN/UG2lYVUtbea8Tge0a\nGFjyBeBm4Glv/Q3v8QvgsjbWP/gX/TnAWBFp2BYnbrDC14D7vfe7EvhrE+VMFJGHcV/UscB7Qfve\nBFA3hlOat+1sYK6q1nj7SkUkFvel/LegOoQ3U+/mmrNuBb4EPlXV14K2L2yIr4j8HTdFwMqgoqSh\nlAAAAmpJREFUzy808TdS1caTeS1T1TyvnNVAJlAGjMcNqS64v3euiCTifkh87L32Jdxsm6YXsSRi\njpUDQc+/jRuKfZKq+sUNvR3l7WvvHB8tHV/jPdbT9n/bkwmMZivAyepmxAv2qYiMEJEUXH/Nw02U\nMxe4WFW/FJHv4n61N65Xa/UPAfY3k5TbagguCTSeoa5xX4Q2emzpbxQs+LM0xFmAL1X1tOADvSRi\nejnrEzFHqy1JIBEo8L6cZuPmvwb4P+AbItIfQESSvO0VQMKRxbAJGCYiw731a4DFHa2vuHlX/gM3\n0x24kV9vC9p/UtDr3gB+g2viamqOljhgn9dH8+02vP9C4DoRifbeK0lVK4AdIvKNRnVs8XMEHRsG\nPIc7W9ogIncG7Z4jIv2897sU15QWXE5zf6Mm36uRTUCquOHEEZEwERnnnYmWisip3nEtxcUcp+xM\nxByttlxt8wowX0SycRP/bABQ1fUi8iiwRETqcO371+Pmxf6jiPwY+EbDe6hqjYhcB7wuIqG4uSMa\nrpBq61U/p4vIF7gmp3zgFlVd7O27Dfgfr56hwFJccxm4Jq1luP6Fptzv7S8APicwh3eTZwCq+p6X\npFaISA3wLi6hfQf4nYj8B+7/56u44dsbu0JETsN9watXzzm4q88+ETfM+TIRecc7fhnwd9z0py+p\n6qpG9Wvyb9TMZ2j8WXxe4nvGO/sIxc0quR7393xeRPy4JG16GRsK3phezmtem9JMH4oxR8Was4wx\nxnSYnYkYY4zpMDsTMcYY02GWRIwxxnSYJRFjjDEdZknEGGNMh1kSMcYY02GWRIwxxnTY/wfSUt9c\nOafS6wAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(fit, xvar = 'dev', label = True);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Prediction is a little different for logistic from Gaussian, mainly in the option `type`. \"link\" and \"response\" are never equivalent and \"class\" is only available for logistic regression. In summary,\n", + "* \"link\" gives the linear predictors\n", + "\n", + "* \"response\" gives the fitted probabilities\n", + "\n", + "* \"class\" produces the class label corresponding to the maximum probability.\n", + "\n", + "* \"coefficients\" computes the coefficients at values of `s`\n", + "\n", + "* \"nonzero\" retuns a list of the indices of the nonzero coefficients for each value of `s`.\n", + "\n", + "For \"binomial\" models, results (\"link\", \"response\", \"coefficients\", \"nonzero\") are returned only for the class corresponding to the second level of the factor response.\n", + "\n", + "In the following example, we make prediction of the class labels at $\\lambda = 0.05, 0.01$." + ] + }, + { + "cell_type": "code", + "execution_count": 31, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0., 0.],\n", + " [ 1., 1.],\n", + " [ 1., 1.],\n", + " [ 0., 0.],\n", + " [ 1., 1.]])" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = np.array([0.05, 0.01]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For logistic regression, `cvglmnet` has similar arguments and usage as Gaussian. `nfolds`, `weights`, `lambda`, `parallel` are all available to users. There are some differences in `ptype`: \"deviance\" and \"mse\" do not both mean squared loss and \"class\" is enabled. Hence,\n", + "* \"mse\" uses squared loss.\n", + "\n", + "* \"deviance\" uses actual deviance.\n", + "\n", + "* \"mae\" uses mean absolute error.\n", + "\n", + "* \"class\" gives misclassification error.\n", + "\n", + "* \"auc\" (for two-class logistic regression ONLY) gives area under the ROC curve.\n", + "\n", + "For example," + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "warnings.filterwarnings('ignore')\n", + "cvfit = cvglmnet(x = x.copy(), y = y.copy(), family = 'binomial', ptype = 'class')\n", + "warnings.filterwarnings('default')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "It uses misclassification error as the criterion for 10-fold cross-validation.\n", + "\n", + "We plot the object and show the optimal values of $\\lambda$." + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYkAAAElCAYAAAARAx4oAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXucFNWV+L8HFJHhMQwoCsrMiIma+EBdXRNMGGI0JmaR\nRDfRFbXzUFkVJYnGJBt+w2SS3cS4UYOJms1jfLAxMcZnEoVkGTZqiKiIxoCyOA0KMshjgBkEcTi/\nP6qqqWn6UT3TNd1Vfb6fT3+67q17b53TVV2n6p57zxVVxTAMwzAyMaDUAhiGYRjlixkJwzAMIytm\nJAzDMIysmJEwDMMwsmJGwjAMw8iKGQnDMAwjK2YkjLJERLpF5HkR+ZuILBWRL4uIlFqu3iAi14jI\n30XknrT8ySLS4eq5VETmh3T8X4jIp8No24g/+5VaAMPIQpeqngQgIqOBXwLDgTl9bVhEBqjqnr62\nUwD/Cpyhqusy7PtfVZ2araKIDFTV7vBEM4zc2JuEUfao6kbgcuBqcG7yInKjiPxVRF4QkcvcfBGR\nH7tP7U+IyO+8J2gRaROR74rIs8D5InKEiPxBRJaIyCIRea9bbrSI/MZt+68i8gE3f7L7tP+8iDwn\nIlXpcrpvOy+JyIsico2bdztwBPAHEbk2g3r7vB25T/63i8hi4HsiMkREfiYii91jT831O7j7bhOR\n5e7bycG+/DNcHZaJyE9FZH/f7/Pvro7PiMiJIvK4iKwUkSsKP2tGbFBV+9in7D7Atgx5m4GDgMuA\nb7h5g4AlQC1wHvCYmz/GLf9pN90GXOdr64/ABHf7VOBP7vY84IPu9uHA393tR4APuNtDgAFpsp0E\nLAMGA1XA34AT3H2vASMz6DMZ6ACedz9fd/N/ATziK/cd4F/c7RHAK8CBOX6HTwFPuPmHAluATwMH\nAGt8et8FXOP7fS53t38AvODqORpYX+rrwT6l+1h3kxFFzgKOE5F/dtPDgfcApwP3A6hqu4gsTKv3\nKwD3LeCDwP0+P8f+7vdHgWN8+UNFZAjwFHCziMwDfquqa9PaPh14UFV3usf4LfAhHMMhZHhjcMnW\n3XR/mr7/JCLXu+lBwPgcv8OHcbrnUNU3ReRP7v6jgNdUdZWbvgu4Evihm37U/X4JqFLVHcAOEdkp\nIsNVdVsWHYwYY0bCiAQicgTQrapvuTfwmaq6IK3MOXma6XK/BwBb1PV5pB8K+EdV3Z2W/z0ReQw4\nB3hKRM5S1VcL1yQwXWnp81R1ZQ9Bg/8OkmU7nV3u9x7fNoBi94qKxXwSRrmSupmJyEHA7cBcN+sJ\n4EoR2c/d/x7f0/75rm9iDNCQqWFV3Q60icj5vmMc727OB6715Z/gfh+hqi+r6o043TpHpzX7Z2Ca\niAx231Q+BfxvrzTflyeAa3wyTfTlZ/od/hf4rOuzOBSY4pZ/Bah1DS7AxUBrkWQ0Yoo9HRjlymAR\neR6na2U3cLeq3uzu+ylQBzzvPk1vAKYBDwAfAV4GXgeeA7a6ddLDHV8E3CEi38T5H9wHvIhjIH4k\nIsuAgTg33CuBWSIyBeh22/+DvzFVXSoiLTgGRIGfqOqLWY6dj/Ty3wZuEZEXcYxnGzA12++gqg+K\niPc7rAGedmXcJSKfA34jIgNdWe8MIKOFiq5gRNXOvxEfRKRKVbtEpAb4KzBJVTeUWi7DiCr2JmHE\njcdEpBrHEf0tMxCG0TfsTcIwDMPIijmuDcMwjKyYkTAMwzCyEikjISIHuCEIlrrhDxrd/JEiMl9E\nXnHDMYwotay9oQL0O0xE/kdEXnb180JXnCAiT7uhIh4WkaGllrVQ8uj2F1+4i38otay9IYd+sbg2\n/WTTNS6IE+Kl3R0tl59ST/ku9AMMcb8HAotxQip8D/iqm38D8N1Sy2n6ZdTtEGCiuz0UWAEcAzwD\nnO7mJ3AcziWXt0i6PQGc5eZ/HFhYalmLpN8rOHNFYnFtBtG11HIVUb/TgYnAi0HKR+pNAkCdUAHg\nxKHZD2cM97k4IQZwv6eVQLSiEGf9VHW9qr7gbnfi3EjHAe9R1SfdYn/EicEUKbLoNhZn9rL3dF0N\npIfziAQZ9FsOHEZMrk0/WXQdV1qpiof7X9sStHzkjIQ7i3QpsB5YoKpLgDGq2g7OCcYX9TJqxF0/\nDxGpw3maWQy8LG5kU+AzODefyOLT7a/Al4CbRGQNcCPw9dJJVhzSzl3srk0/aeeyIomckVDVPap6\nIs6N5FQReT/7zgiN7LjeuOsH4PocfgNc6z6pfQG4SkSW4ERQfaeU8vWFDLr9q7s9Hsdg/LyU8vWV\nDPrF6tr0k0HXiiRyRsJDnYiUrcDZQLsbqwcROQQnPEGkiat+bpyh3wD3qOrDAKr6iqp+TFVPwQmP\nsSpXG+VKJt2AS1X1IQBV/Q2OjymSZNEvNtemnyy6ViSRMhLiLAgzwt0+EDgTp7/wERyHJ8ClQCRP\natz1c/k5zhoNt3oZ4gTwQ0QGAN8E7iiRbH1lH92AtSIyGZwFf4AwI8eGTSb94nRt+smka5zIFb6+\nZ0HX2x0JROQ4HOfYAPfzK1X9jhun59c4i8SsBj6jqh2lk7R3VIB+k3AC5r2E0y2hwDeA9wJXuenf\nquo3SiZkL8mh2zac9RoGAjuBK1V1aank7C059HuGGFybfrLpqqqPl1SwIiEi/40TIXkU0A40quov\nspaPkpEwDMMw+pfQu5tE5GwRWSEir4rIDRn2TxaRDnHW3X3eDd0cqK5hGIYRLqG+Sbh9zK8CZwDr\ncOLXX6CqK3xlJgNf0bQlHIPUNQzDMMIl7DeJU4GVqrpaneUg78OZfJNOJgdK0LqGYRhGSIRtJMbh\nrBDm8QaZZy5+QEReEJHficj7CqxrGIZhhEQ5LDr0HDBeVXeIyMeBh3BGuxiGYRglJmwjsRYY70sf\nRlrsGv9MRlX9g4j82B3ymbeuh4jYEC3DMIwCUdW8cyXC7m5aAhwpIrUiMgi4AGfyTQpvtqa7fSqO\nM31zkLp+Sh1ZMaxPY2NjyWUw/cLTr7GxWMcpT/2i/omzfkEJ1UioajdwNTAfeBm4T1WXi8gVInK5\nW+x8EfmbG9TuFuCzueqGKW85kkwmSy1CqFS6fk1NxTlOsdoplEo/f5VA6D4JdWYpHpWWd6dv+0fA\nj4LWNQzDMPqPSMVuqkQSiUSpRQgV0y/amH7xJxZhOURE46CHUXmIQDEu3WK1Y/SOZDKZ6ppasWIF\ngwcPBmDnzp0cffTRANTV1VFXV1ciCfdFRNAycFwbfaS1tbXUIoSK6RdtTD+Huro6GhoaaGhooL29\nnUQiQSKRoL29PZVfTgaiEMxIGEYJaWwsr3YMIx0zEmVOQ0NDqUUIlUrXb86c4hynWO0USqWfv0rA\njIRhGIaRFTMSZY71+UYb0y/axF2/IJiRMAzDMLJiRqLMiXufqOkXbUy/+GNGwjBKSNQd10b8MSNR\n5sS9T7TS9Yt67KZKP3+VgBkJwzAMIytmJMqcuPeJmn7RxvSLP2YkDMMwjKyYkShz4t4navpFG9Mv\n/piRMIwSYrGbjHLHjESZE/c+0UrXL+pDYCv9/FUCZiQMwzCMrJiRKHPi3idq+kUb0y/+hG4kRORs\nEVkhIq+KyA05yp0iIrtF5NO+vKSILBORpSLyTNiyGoZhGD3ZL8zGRWQAcBtwBrAOWCIiD6vqigzl\nvgs8kdbEHqBBVbeEKWc5E/c+UdMv2ph+8SfsN4lTgZWqulpVdwP3AedmKDcT+A2wIS1fsC4xI8ZE\n3XFtxJ+wb8DjgNd96TfcvBQiMhaYpqq34xgFPwosEJElInJZqJKWKXHvE610/Sx2U3kTd/2CEGp3\nU0BuAfy+Cr+hmKSqb4rIQTjGYrmqPpmpkUQikVpovLq6mokTJ6ZeFb0TbWlLxzkN5SVPpabb2tpo\nbW0tG3n2Xh/OdjKZpBBEVQuqUFDjIqcBc1T1bDf9NUBV9Xu+Mq95m8BooAu4XFUfSWurEdiuqj/I\ncBwNUw/DCAsRKMalW6x2jL7T1NREozu70b9dbogIqpree7MPYb9JLAGOFJFa4E3gAuBCfwFVPcLb\nFpFfAI+q6iMiMgQYoKqdIlIFnAWU6KXaMAyjJ8lkMvVUnkwmUz0ZcSNUn4SqdgNXA/OBl4H7VHW5\niFwhIpdnquLbHgM8KSJLgcU4xmN+mPKWI/5XxThi+kWbStavrq6OhoYGGhoaWL16NfW1tSz66U9Z\n1dLC9eeey5emTWNVSwtN06ezuq2t/4QuMqH7JFT1ceCotLw7s5T9vG+7DZgYrnSGUVosdlM86Ni8\nmblnnknTqlVUAV3JJLOBZmB0Mknj4sXMXLCA2vr6EktaOKH6JPoL80kYhlFKzjv+eO5+6SWqfHld\nwE1Ao7d90UU03ntvSeTLRFCfhM1BMAzD6CNV27f3MBAAVTizgVPb69b1r1BFwoxEmVPJfb5xwPSL\nNkH16xo2jK70PPbeYLuAAWPHFk+wfsSMhGEYRh8ZP2UKjRMmpAxFFzAbSLjbjRMmkGhuLpV4fcJ8\nEoZhGH2kqamJxCWXcO6HLuJIbaf+H47lXRE2LVvGhEmTSDQ3l53T2nwShhEBLHZTfKitr2fDnqep\nveAqvv/ww9z80ENMSCRovPfesjMQhWBGosyxPt9ok08/i91U3hSiX3c3vPUWVFWleyeijRkJwzCM\nIrBpE4wYAQMH7slfOEKYkShzvCBdccX0izam317a2+GQQ8KTpVSYkTAMwygC69ebkTBKgPX5RhvT\nL9oUot+fk0+z68hfhydMiSiH9SQMo2Kx2E3x4bm3nuLtUeuB4aUWpajYm0SZY32+0SafflEfAlvp\n589Pe+d6xlTFr7/JjIRhGEYR2LSrncOrzUgY/Yz1+UYb0y/aFKLf1u711B1kRsIwDMPIwI4B63nP\noWNKLUbRMSNR5lifb7Qx/aJNIfrt98z1nFhfF5YoJcOMhGGUkKg7rg2H7u4BvPPMpdQdGq+RTWBG\nouyxPt9oY7Gbok3g9SS6qhg9GgbE8I4aukoicraIrBCRV0XkhhzlThGR3SLy6ULrGoZhlJLOzqGx\nnG0NIRsJERkA3AZ8DHg/cKGIHJ2l3HeBJwqtG3eszzfamH7RJqh+ZiR6z6nASlVdraq7gfuAczOU\nmwn8BtjQi7qGYRglxYxE7xkHvO5Lv+HmpRCRscA0Vb0dkELqVgLW5xttTL9ok0m/ZDJJa2srra2t\ntLS00NraStsBL9A+4pf9L2A/UA6xm24B+uxvSCQS1NXVAVBdXc3EiRNTr4reiba0pcst3dhYnPYu\nvRSg9PpUQjqZTKbSTU1N1NXV8ca2hRz5DyMAaGtro7W1tWzk9dLetid/YFQ1tA9wGvC4L/014Ia0\nMq+5nzZgO7AemBqkrm+fGoZh9Ddz5sxRVdXhX/iYXvOT+3rkpW+XG+59M+99POw3iSXAkSJSC7wJ\nXABcmGakjvC2ReQXwKOq+oiIDMxX1zAMoxzYtf8Wjjg4nk6JnD4JERkoIjf1tnFV7QauBuYDLwP3\nqepyEblCRC7PVCVf3d7KElX8r4pxxPSLNpWuX8fmzTRNn86pj73I32+/idVtbf0jWD+S801CVbtF\n5PS+HEBVHweOSsu7M0vZz+eraxiGUQ6sbmtj6z338O0tW2gEut54jMYzl9N9zjmlFq2oiNM1laOA\nyO04o4ruB7q8fFX9bbiiBUdENJ8ehmEYxaRp+nSumzePKl9eF3DJccfxwIsvOmWammgs0xWhRARV\nlXzlgvgkBgObgI/48hQoGyNhGFFlzpzixF0qVjtGcPasXdvDQABUASN37aKlpQWAMWPGpLqs6urq\nUiMwI0UQ73a5f4jx6KaFCxeWWoRQqXT9inXpluovUMnnb85FF2knOD++++kEnXPRRf0nYB8g4Oim\nvJPpROQwEXlQRDa4nwdE5LDQrZdhGEYZk2huZubIkak++C6gccIEEs3NpRSr6ATxSSwA/hu4x82a\nDlykqmeGLFtgzCdhRBUR5xG0XNoxCuNL117Ly49uomrbi0w8+3gSzc3U1teXWqxAFNMncZCq/sKX\nbhGRWb0XzTAMIx5U19Sw9aBbOXzKz2j82RdKLU4oBIndtElEprtzJgaKyHQcR7bRD1T6OPSoY/pF\nm3z67VHl+fd+iurR7f0jUAkIYiQ+D3wGJ1zGm8D5wOfCFMowKoVijY4s01GWsae9S+ke+xTDh+wu\ntSihkdMn4YbGuEZVb+4/kQrHfBKGYZSCj834GosHP82XR55RtvMhshHUJ5HzTUKd0BgWL8kwDCMD\n63Zt49D94x0UIkh301MicpuIfEhETvI+oUtmANbnG3VMv2iTT7+NbOI9NfFeMDPI6KaJ7ve3fHlK\nzxnYhmEYFcfW/d/kxMOPglXPlVqU0MjnkxgAnK+qv+4/kQrHfBKGYZSC4Uc+xaJHjuWR+2+pWJ/E\nHuCrRZPKMIweFCveksVt6n927IAdyX/kuPeOKLUooRLEJ/FHEblORA4XkRrvE7pkBmB9vlEnn35N\nTcU5TrHaKZRKPn8rV0JNzWb2K4dFoEMkiHqfdb+v8uUpcESGsoZhGLEjmUym1oZOJpMMPuAAfv3v\n8zipawNN099Lx6hRpRUwRPLGbooC5pMwoorFbooeX7r2Wgb+7nc0rVpFFU5gv5kjR9L43HORidsE\nRfBJiMhXfdv/nLbv3/smnmEYRjRZs3BhykCAs4bE3C1baJk9u5RihUYun8QFvu2vp+07OwRZjAxU\ncp9vHDD9ok0m/aq2b8+42NCedev6Q6R+J5eRkCzbmdLZGxE5W0RWiMirInJDhv1TRWSZiCwVkWdE\nZJJvX9K/L+gxDSMqWOym6NE1bNjedZy9PGDA2LGlECd0svokROR5VT0pfTtTOmvjzjyLV4EzgHXA\nEuACVV3hKzNEVXe428cBv1bVY9z0a8DJqrolz3HMJ2EYRr/wpWuvRR57jObXXqsIn0Su0U0niMg2\nnLeGA91t3PTggHKcCqxU1dWuUPcB5wIpI+EZCJehwB5fWgg2TNcwDKNfqK6p4bjb7+LEWWdx2ttj\nmDBpEiNGjYqUgSiErDdgVR2oqsNVdZiq7udue+n9A7Y/Dnjdl37DzeuBiEwTkeXAozihyVNiAAtE\nZImIXBbwmLGiEvt844TpF22y6bf+HXjzrIlMSCRovPdeqmviO3WsLKaBqOpDwEMicjrwbcBbGnWS\nqr4pIgfhGIvlqvpkpjYSiQR1dXUAVFdXM3HiRBoaGoC9J9rSlra0pfuabmtrY+nWxxk5YHxqf1tb\nGx6lli9b2tv25nsEJdR5EiJyGjBHVc92018DVFW/l6POKuAUVd2clt8IbFfVH2SoYz4JwzD6haam\nJmTQ9azf0smYqttpbGykqampMmM3FYElwJEiUisig3CG1T7iLyAiE3zbJwGDVHWziAwRkaFufhVw\nFvC3kOU1jH7FYjdFk/Y3hnDM4QeXWox+IVQj4S5adDUwH3gZuE9Vl4vIFSJyuVvsPBH5m4g8D8zF\nWSoVYAzwpIgsBRYDj6rq/DDlLUf8r4pxpNL1s9hN5U02/dasgfHj+1eWUpHXJyEinwa+BxyMM9pI\ncLqMhgc5gKo+DhyVlnenb/tG4MYM9drYu5aFYRhGv7N+/fqUoUgmkym/5//93zuMHz+IF14onWz9\nRZA3iRuBqao6wje6KZCBMPqO53yKK6ZftIm7fhdccAENDQ00NDSwevXqlL7r1w+qmDeJIEaiXVWX\nhy6JYRhGBNi1axC7dkGMR732IIiReFZEfiUiF4rIp71P6JIZQOX2+cYF0y/aZNKvrXMH+vlJSODg\nRNEmyDyJ4cAOnNFFHgr8NhSJDKOCsNhN0aN95y6GDEkP8Rdf8hoJVf1cfwhiZCbufb6Vrl/Uh8BW\n4vnbuLuL0ftXiEOCAN1NInKYiDwoIhvczwMiclh/CGcYhlFudGgn44aOT81cbm1tpba2ltbW1l7N\naC53gvgkfoEzAW6s+3nUzTP6gUrs840Tpl+0yaRf54DN1NeMTw2HbWhoIJFIpEZBeflxIYiROEhV\nf6Gq77qfFuCgkOUyDMMoS94etJFjxlp3k59NIjJdRAa6n+nAprAFMxwqsc83Tph+0SaTfoMf+C2f\nOPZD/S9MiQgyuunzOOEybsYZ1fQ0YM5swygCc+YUx+lcrHYMZ2a151fwz7Lu2LyZORdN57iNb/DL\nxsP44n80l07I/kRVI/9x1IgnCxcuLLUIoVLp+hXr0i3VXyDu5+/SSy9VVdXka6/p50aO1E5QBe0E\n/cqECTrrmmtKK2AfcO+bee+vWd8kROSrqnqjiMzFeYNINy7XhGe6DMMwyoeW2bOZu2UL3uyIKqBp\n1SouWbiwlGL1C7m6m7xQHM/2hyBGZiqxzzdOmH7Rpt5dknTP2rWkT5+rAqq2b+93mfqbrEZCVR91\nN3eo6v3+fSLyz6FKZRiGUUYMGDeOLuhhKLqArmHDSiRR/xFkdNPXA+YZIVCJ49DjhOkXbbxlSRPN\nzcwcOZIuN78L+OrYsXxg+vTYTqLzyOWT+DjwCWCciPzQt2s48G7YghlGJWCxm6JBbX09Iy6+mJOf\nuJv3dY7ghIbT+WpzM7Vud1ScybrGtYicgLPoz7eA/+fbtR1YqKpbwhcvGLbGtWEYfSHbsNdFixal\n1q7+f42NNHd/n6/suZ6b/r1ESwEWkaBrXOfySSwDlonIf6vq7qJKZxiGUUbU1dWlDENTUxOJRAJw\njITHG9uVgfvVMKwq1FWfy44g2taJyG9E5O8i8pr3CV0yA4h/n6/pF23KSb9kMpnyD7S0tBTFV+D5\nJACSnTsZ/s7RRZA0WgQN8Hc7jh9iCnA3cG/QA4jI2SKyQkReFZEbMuyfKiLLRGSpiDwjIpOC1jUM\nw/Coq6vbZ6nRYgbce2tnN+MPOK4obUWJIEbiQFX9E47/YrWqzgHOCdK4iAwAbgM+BrwfuFBE0k3x\nH1X1BFU9EfgC8NMC6saeuI9DN/2iTdz1q/c5pgcvv5AvHn5zCaUpDUGMxC73hr1SRK4WkU8BQwO2\nfyqw0jUuu4H7gHP9BVR1hy85FNgTtK5hRJ2oLzpUSWzcOIqjjiq1FP1PECNxLTAEuAY4GZgOXBqw\n/XHA6770G25eD0Rkmogsx1mr4vOF1I075dTnGwaVrl9TkQbJFKudQin38+f3U9xxxx20tLTQ0tLC\nHXfcEchn4fdJbNw4mqMrri8j2PKlS9zNTkKK/qqqDwEPicjpwLeBMwttI5FIpPoeq6urmThxYupV\n2LuQLW3pOKehvOQpZdq7udfV1aWMQHt7O42NjbS6ju0ZM2akyieTyVT9trY2Wltbe7TX2Qm7dn2Q\nceMy7y+1vkHSfl0LIl8EQGABUO1LjwSeCBI9EDgNeNyX/hpwQ546q4CaQuoS4yiwRryJehTYcmXO\nnDk58zLtz1Xur39VPfTQtXnrRgkCRoEN0t00WlU7fEZlC3BwQBu0BDhSRGpFZBBwAc5SqClEZIJv\n+yRgkKpuDlLXMAyjP1jy9w2MOHRVqcUoCUGMxB4RSa3VJyK1ZAgdnglV7QauBuYDLwP3qepyEblC\nRC53i50nIn8TkedxFjf6TK66AfWKDf5XxThi+kWbuOvX1tbG6rY25v3nRzh87T/TNH06HZs3l1qs\nfiXIynT/BjwpIosAAT4EXJ67yl5U9XHgqLS8O33bNwI3Bq1rGHHCYjeVN53btzP3zDNZsGoVVUDX\nvHnMHDmS1bNmVUTcJgjmuH7c7QY6zc2apaobwxXL8PCcT3Gl0vWL+hDYcjx/nmPWc9L2ZTKdrlxJ\nk2sgwAkVPnfLFm6aPZvGewPPKY40WbubvIlrroEYD6xzP+PdPMMwjLLDMwoNaTOve0PV9u0ZFxva\ns25dX0SMFLl8El92v/8zw+emkOUyXOLe52v6RZu465cUSa0h4dEFDBg7thTilIRc3U0L3O8vqKoF\n9DMMIxZk647yR4L1OPjEE7miYwd3bml3fBLAzJEjaWxu7k+RS0ouI/F14H7gN4B1L5WIcuzzLSam\nX7SJon7+7ih/WPBMHHf88Tyx9ia+fOBs3k4+xYRJkxgxalTFOK0hd3fTJhGZD9SLyCPpn/4S0DDi\nTNQd13Gnu3sAL71cz40P3cuERILGe++luqam1GL1K7mMxDk4K9JtJLNfwugH4t7nW+n6Weym4ISx\nXkQ+nntuK0ccASNGhHaIsifXynTvAItF5IOq+lY/ymQYhrEP2VaPC5O33jqYSZPyl4szWY2EiNyi\nqrOAn4vIPjOsVXVqqJIZQDT7fAvB9Is2cdDPexvyO7G97c7OT3L66aWTrRzI5bi+x/224a6GYcQW\nz9B5byeLFi1CVJlz0UUctPavPP+rUxl/+L8CjkGpra1NGZZMI6LiRq7upufc79RK4CIyEjhcVV/s\nB9kM6BGSOI6YftGm1Polk8mUT6Kvs6s9OjZvZu6ZZ9K0ahUNwCmPrKLx5WfoPuecWJ/LbOQNyyEi\nrcBUt+xzwAYReUpVv5yzomEYebHYTbnJZwSy+SkWLVpEb1mzcCF3p4XiaFq1iksWLux1m1EmSBTY\nEaq6Dfg0cLeq/iPw0XDFMjzi/uRS6fpFfQhs2Oevrq4uFVajryE2guIPxeEdqcrNr0SCGIn9RORQ\nnBDej4Usj2EYRknpGjYsYyiOrmHDSiFOyQliJL4FPAH8n6ouEZEjgJXhimV4VPo8gqhj+kWP8VOm\ncMmIIXQBrTgGonHCBMZPmVJawUpEkFDh9+OE5/DSrwHnhSmUYRhGPnINXe0NHZs30zR9Om8uXMIr\n3afytU8M5ZXnljDpox9lZnMzLXffXRzBI0YQx/WNwLeBt4HHgeOBL6lqZQRTLzGV3mcfdUy/3pHN\nYe3fzjR0tbcGYnVbG1vvuYdvb9niBvJ7lcZXJvD+z36Wxltv7ZMuUSdId9NZruP6k0ASOBK4Pkyh\nDKNSiLrjOiwyOay9/DBomT2bua6BgL0jmtZU6IgmP4Ec1+73OcD9qro1RHmMNOLY5+un0vWz2E3l\nwZ61azMuLrR9w4ZSiFNWBDESj4nICuBk4E8ichCwM+gBRORsEVkhIq+KyA0Z9v+LiCxzP0+KyPG+\nfUk3f6mKfZ5wAAAgAElEQVSIPBP0mIZhGIUwYNy4jCOadg4ZUgpxyoogjuuvuX6JraraLSJdwLlB\nGheRAcBtwBk4S58uEZGHVXWFr9hrwIdVdauInA38hL3rae8BGlR1S3CV4oX1aUcb069/Wd3WRsvs\n2ax66imaVq6kY9SoHnnXP/8874qwadmy1H6ARHMzM3//+1SXkzei6eRzzimpPuVAXiPhMhb4qIgM\n9uUFcfWfCqxU1dUAInIfjoFJGQlVXewrvxgY50sLwd52DMOocPzhNKqArmSSK4cP5zsPPsjNr7+e\nypsNNAOjk0lmjhzJ6lmzqK2vZ8TFF/OBJ37L4dvg1I9MrugRTX7y3oBFpBGY636mADfihOkIwjjg\ndV/6DXoagXS+CPzBl1ZggYgsEZHLAh4zVsSlzzcbpl+0KSf91ixcmDIQ4PgUfrxtG2NcA+HlNQMt\n7vbcLVtomT0bgOqaGl75xDCGnn8pjffeS219PW1tbf2sRfkR5E3ifOAEYKmqfk5ExgBFH/4qIlOA\nzwH+wLyTVPVN1w+yQESWq+qTmeonEonUyIfq6momTpyYehX2LmRLW7rc0o2NxWnv0kvBCyJRTvoV\nI93W1tbDGLW2tva4eXv7vXAaXskGHEOwxs1rcPOX4PRx4+5/7eWXaW1tZXe38k7VawxIvk6rL3Ch\n/9jl8Hv0Nu1tF7pIk6jus1REzwIiz6jqqSLyHM6bxHZguaoenbdxkdOAOap6tpv+GqCq+r20cscD\nDwBnq+qqLG01AttV9QcZ9mk+PQzDyE+2+QleID3//hUrVjB48GB27txJZ2cno0ePpqOjg7q6Oqqr\nq4sWRrupqYnGxsbUtz/Pv33e8cdz90sv9Ril1AV8F+ftwZ93E9DobV90EY333svF187mvv3n8c1h\nl+Y8TlwQEVRV8pUL8ibxrIhUA/+FEwW2E/hLQDmWAEeKSC3wJnABcGGaoONxDMTFfgMhIkOAAara\nKSJVwFlAiQb6GUZlkG/1N//+RYsW9biBXnfddTQ1NTFt2rSCjukZno6ODpLJJNXV1T2MTVDGT5nC\n7K4dNL+2KuV8vnL4cDrfHUHXjtdTebOBa93tmSNH0tjsmJA1O95m5NC8z74VR16fhKpeqaodqnoH\ncCZwqap+LkjjqtoNXA3MB14G7lPV5SJyhYhc7habDdQAP04b6joGeFJEluI4tB9V1fkFaRcD0l91\n44bpF22KoZ83cW7atGls3bqVRCLB1q1bmTZtWqrLJAjVNTV8/McL+PCwizjvkHF8ePgANg8ewYp3\nTuSGs8/lkro65kydip57LrNq3sNHB9WxdcgQWmbPZnVbG5u7BnHk4NN6tGk+idzLl56Ua5+qPh/k\nAKr6OHBUWt6dvu3LgH2c0qraBkwMcgzDMAyA7V311JzSzIilJ/O/2/ZQte11unidxpUT2H/qVL5/\n662sbmuj6eSTufedLVStha5582hcvBg94BYunfxJ1q+3Dgs/ubqb/jPHPgU+UmRZjAwU8iQVRUy/\naFNu+q1YAUPezBxiw1s0KFsIjj8O+SlHH/1J1q/f2159fX0/Sl+e5Fq+tDLj4hpGPzJnTnHiLs2a\n1cG0aS8AmR3KGzduZOjQoQD75A0ePJht27YxfPhwAHbu3MnRRzt9872Jqtra2trDv+A/jr/tMNaH\nfuUVGLMnc4gNb9GgbCE4Rry9haOPhpj3EBZMkHkSV7mOay89UkSuDFcsw8P6tKNNPv2KFXPp1lur\naXAD4rW3t5NIJJgxYwZdXV0kEgm6urqYMWNGxrxEIsGWLVtIJBIkEgna29tTbwj5buKZ9Ev3L/iP\n47Xd0NAQSrC+P+w3g3fHDc+5aFC2EBztMpaDD+6Z7w2vbW1tpba2NrVd6DDSKBNkdNNlqvojL6Gq\nW9yJbT8OTyzDMIzC2KPKW2N+ySVfWMjMpX/eJ8SGt2hQphAcVxwwhNHdr/Cti6enQnV4lFuXWn8T\nxEgMFN9EBBEZCAwKVyzDI+4XqOlX/mSbOwHlpV971x5k4CAaPngSD198MTdt2sSqp55iwqRJPUJs\neCE4btq0iVcWLWLDhnbm7trBMSyla97SHqE6zCcRLC7S48CvROQMETkD+KWbZxhGBZBpbYdyMg4e\nye3vMGyn4++orqmh8d57mZBIpEJs+PH27xo5koff2c0xbn56qA4j2JvEDcDlwL+66QXAT0OTyOiB\nPzxAHDH9yhvP55Btdbje6Jfu2O7NxLlMrN21nUOGHZW/oA8vlEePPGDPunWAzZOAYJPp9qjqHap6\nPo6x+Is7Sc4wjD5SrCgPYUWLSH+DgL6vDleMiXOZ2Mgm3lNT2IzprmHDMjqxB4wd2ydZ4kSQ0U2t\nIjJcRGpwwnL8l4jcHL5oBpRXn28YVLp+UV++tJzO3wFLZ/DZo6cXVGf8lCk0TpiQMhReqI6EG6rD\nfBLBuptGqOo2EfkicLeqNorIi2ELZhiGkU76okKJ5mY6Nm+mafp06v+2mhd+UcuHJzbnb8iluqaG\nxIIF3OS2OWHSJEaMGrWPD6OSCbTGtYgcCnwGeCxkeYw0Kn0eQdQx/YqHt6jQdfPmcXcyyXXz5vGD\nhgbWtbRw3bx5LOx+km89Oo+5Z55Jx+bNgdutra/v4eSurqlJ7TOfRLA3iW8BTwBPquoSETkCWBmu\nWIZh+CkkRHdcWbNwIXenLSr072vW8F1328vzh+Aw+k4Qx/X9qnq8ql7ppl9T1fPCF82A8urzDQPT\nLxj+YaiZZlQXy/lbKP15vGwjkdJvYv4QHH3FfBI5jISIfNX9nisiP0z/9J+IhhFfiuVwXrhwcnEa\nKmOyjUTakyHPC8Fh9J1cbxLL3e9ncUY1pX+MfsD6tKNNf8VuWrSooTgNFUh/nD/PMX3A5s1M3X9g\n6sbUBVw1ZAjP4ixKs5p9Q3D0FfNJ5I4C+6j7fVf/iWMYhrGX1W1tbL3nHr7ti7N0ddVQtgytonPj\nVubu2MExbv6M/fZj9DnnMOvmm1MhOHqLZ/wOOeSQ1HYYUWujQK5Fhx7JVVFVpxZfnPiQz9GYKUxz\nttDOzz77bMF1enOcoHWKuY6x+SSiTdj6ZVr74bauTqbuN5BHunf2yL/j3Xe5aejQogxf9fSK+/kL\nQq7upg8AhwF/xlk3/D/TPkYO8jkaM4Vp7k1o51LU8TtJK/HJyug/sq39ULNzZ85wGkbxyGUkDgG+\nARwL3IqzvvVGVV2kqov6QzjD+uyjjunXN7Kt/bB58OB+CacR9/MXhFw+iW6caK+Pi8gBwIVAq4g0\nqeptQQ8gImcDt+AYpJ+p6vfS9v8LThBBgO3Alar6YpC6RunItfpYId1a69atI5lMZl0ZLe79wMWK\nuTR5civQUJzGyohEczNX/e4P/Khjc4+1IepPP53GJ5+kyZ034YXTaGwOPtvaj9c17F9cCGC9fy3T\nCiXnZDrXOJyDYyDqgB8CDwZtXEQGALcBZwDrgCUi8rCqrvAVew34sKpudY3CT4DTAtaNPQ0NDSxa\nVH4vbl5fbVNTE7NmzaKpqYnrrrsulXfNNdektq+77rp99nt5P/jBDzLWmTFjRj9rFA69id2UyZ8F\nuZcVnTJlEaUwEmH22XshODZ2H8Inhg5n9IjdHN/QkFobItHYWLRwGt5vaT6Ifck1T+Ju4C/ASUCT\nqp6iqs2quraA9k8FVqrqalXdDdwHnOsvoKqLVXWrm1wMjAta1zDiSCZ/VqHLikYdfwiOx7b/nd93\nJhmxYweJ5uaUIcgVTsMoHrl8EtOB9wDXAk+LyDb3s11EtgVsfxzwui/9BnuNQCa+CPyhl3VjSdz7\nRE2/aBOWfmsWLkx1JUHpFgOK+/kLQi6fRJDgf0VDRKYAnwNO7039RCKRerqqrq5m4sSJqacu70SX\nKu0tpu7f75+kE3S/R6H70y/0fPszyZNvfxj6lMv5K5e0RyVcT9s3bEgZCK90A87opbDlLZfzHcb1\n09ramurKDEqQAH99YS0w3pc+zM3rgYgcj+OLOFtVtxRS16OlpSWrEOn9jP2drq+v75HXkOZnyLXf\n286Ul14/236v7aD7M8mTb39QfdLlTW871/GjmE7f15f2wvj9e7PfL0+m81eM62nuwQfT1d5OFXs9\nLd7opULl9RgzZkzqPnHaaaelbp7e4Ah//XRZ/ZTT9VVo2r99113B5kmHbSSWAEeKSC3wJnABjhM8\nhYiMBx4ALlbVVYXUNYyoM2dOceI3LVw4ObTV6UrB4VOmMH3NCu7durvPo5e8G2OmG/6iRYti79/p\nK6EaCVXtFpGrgfnsHca6XESucHbrT3DCrtQAPxYRAXar6qnZ6oYpbzmS/iocN/zdDNBzKGKumeoQ\n7qzyoHUg95DddP3SaWoqjpEoZuwmL1aSf2EfP/6Ffz47fz5jDzqI9mef5byWFti9O2OdfHhtvtLa\nynktLWzr2snrow7km5M/zqYXXyzqYkD+0WP+4a6ZyHf+KoGw3yRQ1ceBo9Ly7vRtXwZcFrSuEW/8\nQxEXLVpEo/t4nG8obW+G3xazTl+G7LamzTkp5doQ+8RKSiZpXLyY7nPOAfaOOvKcyn9IJlkA/Acw\nGmgEPjNvHnN9dfKR3maX284P34KfycuMmjqVxltvpalI0RCzzb0px6Hm5cCAUgtg5CbuTzGmn1Nm\n2rRpbN26taRrQ0DmWElNq1axxl3EJ33U0ceBZqDFKwv8Oq1OPjKNZOpNO2EQ9+szCKG/SRiGER2y\nxUryFvHJtvDPnrTtQhb+ydVmoQsIeV1H/m6k9ImHRmGYkShzKs0nETX8PhT/zcjr0vDr5+8L31u2\noaxuYl6sJP9N27+Ij7fwj3946ins7ZLocrcLWfgnvc3etgPOk79/iGdtbW3qd+9NiJeoX5/FwIxE\niPgdfNc//zzvivRw8Hl5m5Yty7q/bdMmBiWTBdXpzXEKrdMb52Qc8ftQmpqaSCQSOct65b2yjY3F\nmT09eXIrq9tqA11vuc7ztq4uLh4ovL9b+SKOn+HqqqF0t7dzXn0973Tt4J/2hx/thmOAt3FGnlzL\nXl/CF4CrhlSl6uS7nt7e+i5TZSi3aWdqbQivnd4sIBT3eF/9jRmJkFjd1tbTGZdMMpueDr7Pr13L\nf+H06Y7Osf/aXtTpzXEKqVOoczIbcX9Ky6dfsZYvPfGEB5l75u8CXW/5zvM33P1XDBzI6zqIO7o6\nOaars8fN+/tDh7J1xAiOOPlkVISvP/ccu/bbj+6332bm5k3M3dHFMTu6UnXyXU9em5urqhh44IGw\neze/btgbp6lUxP36DII5rkOiZfbsfZxxmRx8/rx8+8utTqmdisZeMjl/+3qe7+zu5sN73uYYX5te\nnbmdnVBTw/cffpibH3qIo774RR5oa2PgwQfz8O53M9bJdz3N7exk4MEH80BbG8dfdhmN995blCGv\nuUgmk7S2ttLqi/7am1nJccbeJEIimwMwk4MvPc+/vaQXdXpznN7WKcSpmIm49/n2l36FOJQLOc/p\nT5Hp5z5dv3xO6LCvp0LJ1zUV9+szCPYmERLZFkvJ5OBLz/Nvv92LOr05Tm/rFOJUNMLDc/72yKPv\n59m7kae3me3c55LDrqdoYm8SIZFobqZx8eIeE4QyOfj8eZn2/1cv6vTmOL2p8/+OKNypmE7cntLS\nJ8Zt3Lixx8zxo48+mo6OjlTZYo1sGj9lCpeueY27tnYFvt7ynecrhw/ngBEj6Hr99R6T3PwO5fTz\nN37KFBp37NhnYlyQ66k3Tuqwidv12RvMSIREbX09MxcsSC2KMub443s4+Ni9m5+7Tr/Zy5bl3F9u\ndXT3bj51wFbeW3MwQx94gPMeeaSoo6g2LVtG08qVdIwaVerTWDDeTSXbYkzezOxly5bR0NBAQ0Mr\nra1OnXwzfv3hMtJ/yxffuho59B2u/8An6Fzx9z6d59nLljFh0iRqRo1i1qxZ3DR7Nq8sWpSqk8uh\nXF1TQ8K97v118l2D5eCkNjJj3U0h4l8UJd3Bd/xll6Xycu0/d9asguv05jiF1Kk97zxO3Xogv3z2\nL/z32rXcnUxSv3Ytn3/kEeThh2lOJpnjbv9Hhv3+vGMz1GlOJrlu3jy23nMPq30hnaNIWx75g8Zc\n8sJlXDdvHndn+H3/r+srPPx/uxmy8hVGTZ3ap/PsX8THu4b9dfwO5UzzeDLVyXcN9oeTujfEfZ5S\nEMxIGAWzZuFCbt/0VsYwCoWOolqYo04pFpkpVzKFy/D/VmAjzoxwMCNR5pRjn2hfR7D482rz1Vm3\nrtji9yv1RXo6zjdaDkozQqgcr89iEnf9gmBGwiiYvo5gKWgU1dixxRY/kuQbLQc2QsgIB3Nclznl\n2CfalxEs6XnH5agzY7/9GLJ+PV+aNq0koUeK4WjP55PIRnpIl21dXVw8QHj/nr3hMvy/FcBn6P8R\nQqWeR+CPneUP6les0Byl1q8cMCNhFExvR7BkGl3zem0tYw86KFWnq6uLryxs5Zg93Xz73XcZ/ac/\nlSz0SNA63vbMkSNZPWtWQQ7YyZNb2btAp8M+6yukhcuYsd9+DJk8mSFDh6Z+y6M3/YBfT72o4kYI\n+WNnGeFg3U1lTrle/L0ZwZIp71dPPdWjzthDDuGuPd00s9dfEZVwJZkc7fl8ElOm7DvsNV+IjTve\nfZexhxzS47e84CvbSzJCqFyvz2IRd/2CYEbCKCv6Es6kLMKVFMHRHijERsQd+kZ0CN1IiMjZIrJC\nRF4VkRsy7D9KRJ4WkZ0i8uW0fUkRWSYiS0XkmbBlLUfK0SdRTNL160s4k7IIV5LmaO+NTyJQiI0y\ncej39/WZKRhf2MerdEL1SYjIAOA24AxgHbBERB5W1RW+YpuAmcC0DE3sARpUdUuYchrlQ6K5mZm/\n/31qTkCpQ48UUmfmyJE0FmGNjfFTpnDl6h38eFvmkC7FOk4UydT9Y2tTh0vYjutTgZWquhpARO4D\nzgVSRkJVNwIbReSTGeoLFd4l1tDQEOs/Qfqfvra+nhEXX8xNmzblDWdSLuFKLnvqL3S808nhQ4bQ\nMnt2j8WY0n0S/lFL3ogoL++V1lbOa2mhe+e7vLrjJK4761i6Xt03XMaIUaPKZnZyqfrs/av8hTGq\nycN8EuHfgMcBr/vSb7h5QVFggYgsEZHLiiqZUbZU19QECmdSDuFKZt18M2Pf3c3CbTt4YO1arps3\nj7lnnknH5s376OWNWvJCa1w3bx7/c8chfGfyZK6bNy8V4uTI9W/wwLuPULXqbz1CbPjDZaSzcOHk\n/jg1ZUNdXZ0b+6qBRCKR2rYV6YpPuT+lT1LVk4BPAFeJyOmlFqi/iXufaNT1a5k9mx919AyX4Q+N\n4fdJZBq19OI7X2eMG2U1VZ/CQ2wEjQFVbKJ+/vIRd/2CEHZ301pgvC99mJsXCFV90/1+S0QexOm+\nejJT2UQikXqKqK6uZuLEialXRe9Elyrd1tbWY1JOa2trj5tH0P0ehe5Pv9Dz7c8kT779pdSnlPLu\nWbuWJW5+g/u9BNi+YcM+8nqjllrTyq9x8/z1X6Pnwj7B9G3osz5B9pfqekpvv1z+31FJe9uFrroX\ntpFYAhwpIrXAm8AFwIU5yktqQ2QIMEBVO0WkCjgL5yErIy0tLVkbTe9X7I+0tywiwGmnnUYymeSO\nO+6gqqqKZDLJscceyx133MHgwYM56aSTsu4HOPbYY0kmk1RVVfWo09LSws6dO/fZ79VpaWmhqqoq\ntZ7ByJEje9Tx7weoqqqipaWFY489lhUrVpBMJhk5ciTJZJKdO3em9k+cOJGOjo4eF6Pfb1JfX9/j\nN8m2f9GiRaly2fZnqp9eJ9Px8u3vrbzp+weMG8cp0GPI6inAsIMP3qftue6opQZ6Mj4t7xRgEXtD\nbATVtxj6BNnvlyfT+Svm+Ukmkym/TrrvIZv+xUyn7wv7eGGn/dt33XUXQQjVSKhqt4hcDczH6dr6\nmaouF5ErnN36ExEZAzwLDAP2iMi1wPuAg4AHRURdOeep6vww5S0mfgdapgstLsTZqR6ETKOxrq4a\nSnd7O+fV1/cI5TFww3amMpTb6OQY9obT+B8O5F94O5VXzovw9DeV8B8qd0IPy6GqjwNHpeXd6dtu\nBw7PULUTmBiudOWP/zU8jkRdP/9orFcWLeKt9Vv4YVcnx3R10gV8nr2hPL6PE1bjqiFD2PD225ys\nCsB83mbm0KFsrqpi4IEHRmoRnqifv3zEXb8glLvj2jDKHm801q6RI3no3S6OcfOrgAT7hvL40Y4d\nnKxKM9DIHCekR2cnAw8+uNeL8DgxoAyj+JiRKHPi/hQTJ/0yhdP4OJlDeXh/vDmum62v60BkigHV\nH8Tp/GUi7voFwYyEYRSJQtbZ2JOhnK0DYZQjZiTKnPThgnEjTvqNnzKFxgkTUobC80l8BiesRsLN\n+8b48bw6fHiPclF1Uod5/lozxGkqdPhmMWSodGw9CcMoEpnW2Xirs5Off/CDPcJqfLm5mVtuuSUV\nemTCpEmRcFL3N9bVUx6YkShzyvGP4sXN6ejoYMSIEan5Ft4cDv98DG9uhX8Ohz/PPx/Dm+syZsyY\n1BNcMpmMVKgFb52NpqYmGhsbU99Aj+3qmhoab721R14UKdb12dra2uN6GjFiBA899BDV1dVFj8dU\nCOX4/+tvzEgYBdNff9pFixZFykD0hoULJ1MMG1GsdrItB+rHyxszZsw+DwP+m3sh2M24fDEjUebE\nfZx23PXLt55EsWIuFaudbJPXMs2obmhoyHn+4jDRMu7XZxDMSBiGkZFs4bj72gVYrl1LRmbMSJQ5\ncX+KSdfPf2PK1J2RzbfRG39IMep4cma7geZb47qc8W7Y/hFFtbW1qXPkhesulChd01GSNSzMSBhl\nRZRjXsXVh2JP95WNzZMoc+I+Tjvu+vVmjesoEffzF3f9gmBvEoZRQpyYSw1Face7n/Wlm86r7w/J\nXchbRL7h0X1p2ygNZiTKnKh1uRRK3PXL55NwYi419Pk4U6YsoqHBGQPbn79p+rHiduOP+/UZBDMS\nhhEB/PMX8s1PiNuN2igtZiTKnLiP0467fsXySfid+eX0e8X9/MVdvyCYkTCMMiR9VrO3bf35Rn9j\nRqLMiftTTNT1yzbhzCPdJ+Htzzc5zT+ruZwpd/n6Stz1C4IZCcPoA9me6LOFpEi/6cyZA42N+9Yv\nlDlznI9hFJvQ50mIyNkiskJEXhWRGzLsP0pEnhaRnSLy5ULqVgJxH6cdd/36K3ZTU1NRmimYuJ+/\nuOsXhFCNhIgMAG4DPga8H7hQRI5OK7YJmImzTnyhdWPPCy+8UGoRQiXu+q1fvz5vmdbWVh566KF9\nYhmVYpGdQon7+Yu7fkEIu7vpVGClqq4GEJH7gHOBFV4BVd0IbBSRTxZatxLo6OgotQihEnf9du7c\nmbdMlPu9437+4q5fEMI2EuOA133pN3Bu/mHXNYx+J9Ns40GDBu0z29i78ThrLjTYiCWjrDHHdZlT\n7t0NfSVO+mW6ybe2tjJjxoyc9aL8JhGn85eJuOsXBFHV8BoXOQ2Yo6pnu+mvAaqq38tQthHYrqo/\n6EXd8JQwDMOIKaoq+cqE/SaxBDhSRGqBN4ELgAtzlPcLHLhuEEUNwzCMwgnVSKhqt4hcDczHGUn1\nM1VdLiJXOLv1JyIyBngWGAbsEZFrgfepamemumHKaxiGYfQk1O4mwzAMI9pEdtEhETlfRP4mIt0i\ncpIv/6Mi8qyILBORJSIypZRy9pZs+rn7vi4iK0VkuYicVSoZi4WInCAifxGRpSLyjIj8Q6llKjYi\nMtM9Xy+JyHdLLU8YiMhXRGSPiNSUWpZiISI3uuftBRF5QESGl1qmYlDIROXIGgngJeBTQHr8g7eA\nT6rqCUACuKef5SoWGfUTkWOAzwDHAB8HfiwiUffJ3Ag0quqJQCNpEyujjog0AP8EHKeqxwE3lVai\n4iMihwFnAqtLLUuRmQ+8X1UnAiuBr5dYnj5T6ETlyBoJVX1FVVfS09mNqi5T1fXu9svAYBHZvxQy\n9oVs+uFMKLxPVd9V1STOhRv1+SN7gBHudjWwtoSyhMG/At9V1XchNYE0btwMXF9qIYqNqv5RVfe4\nycXAYaWUp0ikJiqr6m7Am6ickcgaiSCIyPnA8+4PERfSJxmudfOizJeAm0RkDc5bReSf1tJ4L/Bh\nEVksIgvj1p0mIlOB11X1pVLLEjKfB/5QaiGKQKaJylnvIWU9mU5EFgBj/FmAAv+mqo/mqft+4D9w\nXoHLkr7oFzVy6Qp8FLhWVR9yDfvPKePzlokc+n0T5382UlVPE5FTgF8DR/S/lL0nj37foOf5ilT3\nZ5D/oYj8G7BbVf+7BCKWlLI2EqraqxuF2z/6W+Bit0umLOmlfmuBw33pw4hA90wuXUXkHlW91i33\nGxH5Wf9JVhzy6DcD53pEVZe4zt1Rqrqp3wTsI9n0E5FjgTpgmesbOwx4TkROVdUN/Shir8n3PxSR\nBPAJ4CP9IlD4rAXG+9I57yFx6W5KPbmIyAjgMeAGVV1cOpGKiv/J7BHgAhEZJCL1wJHAM6URq2is\nFZHJACJyBvBqieUpNg/h3mBE5L3A/lEyELlQ1b+p6iGqeoSq1uN0XZwYFQORDxE5G8fXMlVVd5Va\nniKRmqgsIoNwJio/kq1wZOdJiMg0YC4wGugAXlDVj7uvhV/Dceh6r41nRc1ZmE0/d9/XgS8Au3G6\naeaXTNAiICIfBH4IDAR2Aleq6tLSSlU83IETPwcmAruAr6hq5lWJIo6IvAb8g6puLrUsxUBEVgKD\ncJY0AFisqleWUKSi4Bq/W9k7UTnrsOzIGgnDMAwjfOLS3WQYhmGEgBkJwzAMIytmJAzDMIysmJEw\nDMMwsmJGwjAMw8iKGQnDMAwjK2YkjNgjItv7WP9+Ealzt9uKHQrbjed0UoByeY8tIgvcCaWGURTM\nSBiVQK8nA4nI+4ABvvAupZxYFOTYdwNXhS2IUTmYkTAqChH5vrvwzzIR+YybJyLyYxH5u4g8ISK/\nE6vvlYsAAAK0SURBVJFPu1UuAh72N5GhzVNE5GkReU5EnhSR97j5l4rIgyIyX0ReE5GrRORLIvK8\nW77a18wl7qJLL7pBABGRGleel0Tkv+gZfuZBcRbVeklEvuhr51FyryNvGAVhRsKoGETkPOB4d+Gf\nM4Hvi7PG+qeB8ar6PuAS4AO+apOA5/I0vRw4XVVPxlk06T98+94PTMOJ4f8doFNVT8JZm+ASX7kD\n3UWXrsIJ4YHb1p9deR+kZ1C2z6nqKcApwLUiMhJAVTuAQV7aMPpKWUeBNYwiMwn4JYCqbhCRVpyb\n9+nA/W5+u4gs9NU5FGe1w1xUA3e7bxBKz//VQlXdAewQkQ6c4JPgrDx4nK+cJ9efRWSY61f4MM7q\nhKjq70Vki6/8LDe+FzhRPN/D3kCPbwFjAX95w+gV9iZhVDJeAMhcvA0MzlOmGfgf94n/n9LK+yOH\nqi+9h57GJF2OPeyLALgRcz8C/KO7rOYLaccc7MptGH3GjIRRCXh9+X8GPisiA0TkIOBDOE/fTwHn\nu76JMUCDr+5ynHDsmdrzGM7eePyf66WMnwUQkdOBraq6HfhfHJ8IIvJxnDcWcJZ63aKqu9y1iU9L\na2sMkOylHIbRA+tuMioBBVDVB0XkNGAZzpP69W630wM4T+Yv4yzr+Byw1a37O2AK8D++tpaJiLrb\nv8ZZcvVuEfmmWz6nHFnyd4rI8zj/Sc/QNAG/FJELgKeBNW7+48AMEXkZeAX4i9eQiJyME84605uI\nYRSMhQo3DEBEqlS1y52H8FdgkmtABuMYiEkagT+LiNwCPKyqC/MWNowA2JuEYTg85g5J3R/4lrey\nmqruFJFGnIXi3yilgAF5yQyEUUzsTcIwDMPIijmuDcMwjKyYkTAMwzCyYkbCMAzDyIoZCcMwDCMr\nZiQMwzCMrJiRMAzDMLLy/wE6hB6+hilfqQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "cvglmnetPlot(cvfit)" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.00333032])" + ] + }, + "execution_count": 34, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvfit['lambda_min']" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 0.00638726])" + ] + }, + "execution_count": 35, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvfit['lambda_1se']" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "`coef` and `predict` are simliar to the Gaussian case and we omit the details. We review by some examples." + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.1834094 ],\n", + " [ 0.63979413],\n", + " [ 1.75552224],\n", + " [-1.01816297],\n", + " [-2.04021446],\n", + " [-0.3708456 ],\n", + " [-2.17833787],\n", + " [ 0.37214969],\n", + " [-1.11649964],\n", + " [ 1.59942098],\n", + " [-3.00907083],\n", + " [-0.3709413 ],\n", + " [-0.50788757],\n", + " [-0.54759695],\n", + " [ 0.37853469],\n", + " [ 0. ],\n", + " [ 1.22026778],\n", + " [-0.00760482],\n", + " [-0.8171956 ],\n", + " [-0.4683986 ],\n", + " [-0.44077522],\n", + " [ 0. ],\n", + " [ 0.51053862],\n", + " [ 1.06639664],\n", + " [-0.57196411],\n", + " [ 1.10470005],\n", + " [-0.529917 ],\n", + " [-0.67932357],\n", + " [ 1.02441643],\n", + " [-0.49368737],\n", + " [ 0.41948873]])" + ] + }, + "execution_count": 36, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvglmnetCoef(cvfit, s = 'lambda_min')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As mentioned previously, the results returned here are only for the second level of the factor response." + ] + }, + { + "cell_type": "code", + "execution_count": 37, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.],\n", + " [ 1.],\n", + " [ 1.],\n", + " [ 0.],\n", + " [ 1.],\n", + " [ 0.],\n", + " [ 0.],\n", + " [ 0.],\n", + " [ 1.],\n", + " [ 1.]])" + ] + }, + "execution_count": 37, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvglmnetPredict(cvfit, newx = x[0:10, ], s = 'lambda_min', ptype = 'class')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Like other GLMs, glmnet allows for an \"offset\". This is a fixed vector of N numbers that is added into the linear predictor.\n", + "For example, you may have fitted some other logistic regression using other variables (and data), and now you want to see if the present variables can add anything. So you use the predicted logit from the other model as an offset in." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Like other GLMs, glmnet allows for an \"offset\". This is a fixed vector of N numbers that is added into the linear predictor.\n", + "For example, you may have fitted some other logistic regression using other variables (and data), and now you want to see if the present variables can add anything. So you use the predicted logit from the other model as an offset in." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Logistic Regression - Multinomial Models" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For the multinomial model, suppose the response variable has $K$ levels ${\\cal G}=\\{1,2,\\ldots,K\\}$. Here we model\n", + "$$\\mbox{Pr}(G=k|X=x)=\\frac{e^{\\beta_{0k}+\\beta_k^Tx}}{\\sum_{\\ell=1}^Ke^{\\beta_{0\\ell}+\\beta_\\ell^Tx}}.$$\n", + "\n", + "Let ${Y}$ be the $N \\times K$ indicator response matrix, with elements $y_{i\\ell} = I(g_i=\\ell)$. Then the elastic-net penalized negative log-likelihood function becomes\n", + "$$\n", + "\\ell(\\{\\beta_{0k},\\beta_{k}\\}_1^K) = -\\left[\\frac{1}{N} \\sum_{i=1}^N \\Big(\\sum_{k=1}^Ky_{il} (\\beta_{0k} + x_i^T \\beta_k)- \\log \\big(\\sum_{k=1}^K e^{\\beta_{0k}+x_i^T \\beta_k}\\big)\\Big)\\right] +\\lambda \\left[ (1-\\alpha)||\\beta||_F^2/2 + \\alpha\\sum_{j=1}^p||\\beta_j||_q\\right].\n", + "$$\n", + "\n", + "\n", + "Here we really abuse notation! $\\beta$ is a $p\\times K$ matrix of coefficients. $\\beta_k$ refers to the kth column (for outcome category k), and $\\beta_j$ the jth row (vector of K coefficients for variable j).\n", + "The last penalty term is $||\\beta_j||_q$, we have two options for q: $q\\in \\{1,2\\}$.\n", + "When q=1, this is a lasso penalty on each of the parameters. When q=2, this is a grouped-lasso penalty on all the K coefficients for a particular variables, which makes them all be zero or nonzero together.\n", + "\n", + "The standard Newton algorithm can be tedious here. Instead, we use a so-called partial Newton algorithm by making a partial quadratic approximation to the log-likelihood, allowing only $(\\beta_{0k}, \\beta_k)$ to vary for a single class at a time.\n", + "For each value of $\\lambda$, we first cycle over all classes indexed by $k$, computing each time a partial quadratic approximation about the parameters of the current class. Then the inner procedure is almost the same as for the binomial case.\n", + "This is the case for lasso (q=1). When q=2, we use a different approach, which we wont dwell on here.\n", + "\n", + "For the multinomial case, the usage is similar to logistic regression, and we mainly illustrate by examples and address any differences. We load a set of generated data." + ] + }, + { + "cell_type": "code", + "execution_count": 38, + "metadata": { + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } + }, + "outputs": [], + "source": [ + "# Import relevant modules and setup for calling glmnet\n", + "%reset -f\n", + "%matplotlib inline\n", + "\n", + "import sys\n", + "sys.path.append('../test')\n", + "sys.path.append('../lib')\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", + "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", + "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", + "from cvglmnetPlot import cvglmnetPlot; from cvglmnetPredict import cvglmnetPredict\n", + "\n", + "# parameters\n", + "baseDataDir= '../data/'\n", + "\n", + "# load data\n", + "x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The optional arguments in `glmnet` for multinomial logistic regression are mostly similar to binomial regression except for a few cases.\n", + "\n", + "The response variable can be a `nc >= 2` level factor, or a `nc`-column matrix of counts or proportions.\n", + "Internally glmnet will make the rows of this matrix sum to 1, and absorb the total mass into the weight for that observation.\n", + "\n", + "`offset` should be a `nobs x nc` matrix if there is one.\n", + "\n", + "A special option for multinomial regression is `mtype`, which allows the usage of a grouped lasso penalty if `mtype = 'grouped'`. This will ensure that the multinomial coefficients for a variable are all in or out together, just like for the multi-response Gaussian." + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "fit = glmnet(x = x.copy(), y = y.copy(), family = 'multinomial', mtype = 'grouped')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We plot the resulting object \"fit\"." + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAElCAYAAAAPyi6bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd8VMX2wL+T3fTegEDoXXpRQBCCiIICjypKe6jYC8+u\n71nfs7+f3WdBUAQrVRGUJgREBEQIIL130uum7u78/pgNCSEJaVuZ7+dzPnPv3bv3npO72bNzzswZ\nIaVEo9FoNJrSeDlbAY1Go9G4Hto5aDQajeYitHPQaDQazUVo56DRaDSai9DOQaPRaDQXoZ2DRqPR\naC5COweNUxFCWIQQ24QQfwkhtgshHhFCCGfrVROEEA8JIfYIIeaWOT5ACJFhs3O7EGKlne7/uRBi\ntD2urbn8MDpbAc1lj0lK2R1ACBEFfAOEAC/U9sJCCC8ppbW216kG9wKDpJRnynltvZRyREVvFEIY\npJQW+6mm0VQP3XPQuAxSyhTgLuABUF/uQog3hBCbhRAJQog7bceFEOJD26/0FUKIZcW/mIUQR4UQ\nrwkhtgJjhRAthBA/CyH+EEKsE0K0sZ0XJYRYYLv2ZiFEH9vxAbZf99uEEH8KIQLL6mnr3ewSQuwU\nQjxkO/YR0AL4WQgxvRzzLuoN2X7pfySE2AS8LoQIEELMEkJsst17RGV/B9trHwgh9tp6I/VKHR9k\ns2GHEGKmEMK71N/nFZuNW4QQ3YQQy4UQB4UQd1f/qWk8FimlFi1OEyCrnGNpQDRwJ/BP2zEf4A+g\nKTAGWGo7Xt92/mjb/lHgsVLXWg20tG1fBfxi2/4KuNq23RjYY9teAvSxbQcAXmV06w7sAPyAQOAv\noIvttSNAeDn2DAAygG02edp2/HNgSanzXgYm2LZDgf2AfyV/h1HACtvxGCAdGA34AidK2f0F8FCp\nv89dtu23gASbnVHAOWd/HrS4juiwksaVuR7oJIQYZ9sPAVoD/YD5AFLKRCHE2jLv+w7A9qv/amB+\nqTyGt629Dmhf6niQECIA+A14WwjxFbBISnm6zLX7AYullPm2eywCrkE5DEE5PQQbFYWV5pexd7gQ\n4nHbvg/QpJK/Q39UGA4p5VkhxC+219sCR6SUh237XwD3Ae/Z9n+0tbuAQCllLpArhMgXQoRIKbMq\nsEFzGaGdg8alEEK0ACxSymTbF/eDUspVZc656RKXMdlaLyBd2nIaZW8F9JJSFpU5/roQYilwE/Cb\nEOJ6KeWB6ltSZUxl9sdIKQ9eoGjV/w6igu2yFNhaa6ltAIn+TtDY0DkHjbM5/yUmhIgGPgLetx1a\nAdwnhDDaXm9d6tf9WFvuoT4QV96FpZTZwFEhxNhS9+hs21wJTC91vIutbSGl3C2lfAMVvmlX5rK/\nAiOFEH62nskoYH2NLL+YFcBDpXTqWup4eX+H9cB4W04iBhhoO38/0NTmaAEmA/F1pKPmMkH/StA4\nGz8hxDZUCKUImCOlfNv22kygGbDN9us5CRgJLASuBXYDJ4E/gUzbe8qWGZ4IfCyEeAb1ef8W2Ily\nDP8TQuwADKgv2vuAfwghBgIW2/V/Ln0xKeV2IcRslOOQwAwp5c4K7n0pyp7/EvCOEGInymkeBUZU\n9HeQUi4WQhT/HU4AG206FgghbgMWCCEMNl0/qYKOukSz5jxCSv150LgfQohAKaVJCBEBbAb6SimT\nnK2XRuMp6J6Dxl1ZKoQIQyWY/60dg0ZTt+ieg0aj0WguQiekNRqNRnMR2jloNBqN5iK0c6gjhBCx\nQog1QojdttIKxWUVwoUQK4UQ+22lHkIreP8QIcQ+IcQBIcSTjtX+Il08xhabPrOEEIm2UUCljz9o\nKz2xSwjxWgXvdSlboHx7hBBjhSpeaBFClDevo/g8l7OnNEKIY7aSH9uFEFsqOOc9W7mPhFLDfV2O\nij53Zc5xXVucPUXbUwRoAHS1bQehxpq3A14HnrAdfxJ4rZz3egGHUCURvFElDdppW+rMnn5AV2Bn\nqWNxqLkORtt+lDvYUok9bVGzptcA3St4n0vaU0bHckuQlHp9KLDMtt0L2ORsnavznNzJFt1zqCOk\nlOeklAm27RxgLxAL/A1VvgBbO7Kct18FHJRSHpdqxu63tvc5BU+yBUBKuQFVd6g096Kcm9l2Tko5\nb3U5W6B8e6SU+6WaWV3ZzGiXtKcMgsojGn8D5gBIKTcDobaJkC5HBZ+70ri0Ldo52AEhRDPUL4ZN\nQH0pZSKoL11KVc4sRSPUZK5iTtmOOR1PsqUMbYD+QlVAXSuE6FnOOe5iS1VxB3sksEqoKrp3lvN6\nWRtO43o2VBWXtkXPc6hjhBBBwAJgupQyRwhRdqyw24wd9iRbysGICl/0FkJcCcxDldzWOJe+UhUR\njEY5ib22X+AaB6N7DnWIrfbNAmCulPIH2+HE4q6iEKIBqvRBWU6jqm8WE2s75jQ8yZYKOAksApBS\n/gFYhRCRZc5xF1uqisvbI6U8a2uTgcWoUFhpTqNKrBfjcjZUA5e2RTuHuuUz1LoA75Y6tgSYatv+\nO/BD2Tehat+0EkI0FUL4ALfY3udMPMkWuLic9veo+kwItQCQt5Qytcx7XNUWqLw8eEXHXdkehFrs\nKMi2HYgqVf5XmdOWAFNs5/QGMopDnS5KZc/JtW1xdkbcUwToiyrWlgBsRy3qMgSIQC04sx81OibM\ndn4MtgVrbPtDbOccBJ7SttSpPV8DZ1DlqU8At6HCSnNRaxpsBQa4gy2V2DMS1RvKA84CP7uLPaV0\na17qM7erWD/gbmwLFNn2P0CNutpBBSOzXEEqeE5uY4sun6HRaDSai7BrWKmiyVTlnOe6E0E0Go3m\nMsTeo5XMwCNSygRbLPFPIcRKKeW+4hOEEENRa922FkL0Aj4GettZL41Go9FUgl17DrL8yVRlx/G6\n9EQQjUajuRxx2GilUpOpNpd5yaUngmg0Gs3liEOcQ9nJVI64p0aj0Whqjt1nSFcwmao0VZoIUs7s\nXI1Go9FUASllZTW3ysURPYfyJlOVpsoTQZw97rcu5Pnnn3e6Dtoez7fF0+zRttRcaopdew5CiL7A\nRGCXEGI7qhbPP1Elg6WUcoaU8ichxI1CiEOACTVRxGM5duyYs1WoUzzJHk+yBTzLHm2L47Grc5BS\n/gYYqnDeA/bUQ6PRaDTVw+1qK7399tt07NiRzp07M3HiRAoLC52tUrWYOnWqs1WoUzzJHk+yBTzL\nHm2L43Gb8hlCCHn69Gn69evHvn378PHxYfz48dx0001MmTLF2eppNBqNSyKEQLpoQrpOsVgsmEwm\nzGYzubm5NGzY0NkqVYv4+Hhnq1CneJI9nmQLeJY92hbH41bOoWHDhjz66KM0adKERo0aERYWxnXX\nXedstTQajcbjcKuwUnp6OmPGjGH+/PmEhoYyduxYxo0bx4QJE5ytnkaj0bgkl0VYafXq1bRo0YKI\niAgMBgOjR49m48aNzlZLo9FoPA63cg5nvc7yy6+/sOHwBnae28ninxbTtGXTWk30cDTuEm+sKp5k\njyfZAp5lj7bF8di9fEZd8kPOD5jbmhncbzDSS+LdyJvVHVfzwqsv0Ci4EbEhsdQPqk+AMYAA7wD8\nvf0J8A4gwj+CqIAoogOiiQ6MPr/ta/R1tkkajUbjkrhVzqEiXbMKsjiddZrT2adJzEkktyiXPHMe\nuUW55Bblkp6XTnJuMim5KSTnJpNsUtv+3v5EB0RTL7AeMcExNApupCSkETFBMcQExxATFEOYXxhC\nVDtkp9FoNE6npjkHj3AONUFKSUZ+Bsm5ySSZkjibfZbT2ac5k33mfHs2+yxnc85SaCkkJiiG2JBY\nYkNiaRzSmNiQWJqGNaVZWDOahzUn2De4znTTaDSaukI7BztiKjSddxonM09yKusUJ7NOcjzzOMcy\njnEs4xh+Rj+ahzWnZURLWobbJKIlrSNa0zC44fmeR3x8PHFxcU6xwx54kj2eZAt4lj3alppTU+fg\nVjkHZxHoE0jryNa0jmxd7utSSpJzkzmSfoQj6Uc4nHaYX0/8yuwdszmUdojsgmxaR7amTWQbfE74\ncDriNO2j29M2si2BPoEOtkaj0Wguje45OICsgiwOph7kQOoB9qXsY1/qPval7ONg6kGiA6PpEN2B\nTvU60bFeRzrW60j76Pb4Gf2crbZGo/EAdFjJDbFYLRzLOMbu5N3sStzFX8l/sStxF4fTD9MqohVd\n6neha4Ou5yUqIMrZKms0GjdDOwc3oSrxxgJzAXuS95BwLoEdiTvYfm47CecSCPUNpVtMN7o16EaP\nmB70ju1NdGC0YxSvAB0Ldl08yR5tS83ROQcPwtfoq5xATLfzx6zSytH0o2w/t53tZ7fz3pb3mLx4\nMlEBUfRp3IfejXrTr0k/OtXvhJdwq7mNGo3GBdE9BzfGKq3sTd7LplOb2HhyIxtObiDJlMTVja/m\nmibXMKDpAHo27Im3wdvZqmo0Giehw0oaABJzEtlwYgO/nviVdcfXcST9CH0b92Vgs4Fc2/xausV0\n0z0LjeYy4rIovOcJ2LuuSv2g+oy5YgzvDHmH7Xdv58hDR5jWfRonMk8wefFk6v9ffW5deCuzE2Zz\nJvtMre/nLnViqoIn2QKeZY+2xfHonIOHExkQyej2oxndfjQAJzNPsuLwCpYdXMYjKx6hSWgThrUZ\nxrA2w7iq0VW6V6HRaAAdVrqsMVvNbDq1iaUHlrL0wFKSc5O5sfWNjGo3isEtBuPv7e9sFTUaTS3R\nOQdNrTmafpQl+5eweN9itp/bzg0tb2B0+9Hc1PomXTtKo3FTdM7BTXDleGPz8OZM7z2d+KnxHHrw\nEDe0vIE5O+YQ+3YsY+eNZcGeBeQW5V7wHle2p7p4ki3gWfZoWxyPdg6acokOjOaO7nfw08SfODr9\nKENbDeWTPz+h4ZsNmbhoIssPLcdsNTtbTY1GYyd0WElTLZJMSczbPY+5O+dyIvMEt3a8lSldptC1\nQVdnq6bRaMpB5xw0Dmd/yn6+3Pklc3fOJdw/nGndpjGh0wTC/cOdrZpGo7Ghcw5ugrvEG6tC26i2\nDPIaxJHpR/jv4P/y64lfaf5ucyYtmkT8sXi3WtsbPOvZgGfZo21xPHqeg6bWeAkvrmtxHde1uI7U\n3FS+3Pkl9/90Pxarhbt73M3fu/6dCP8IZ6up0WiqgQ4raeyClJLfTv7Gx1s/ZumBpYxoO4IHr3qQ\nKxtd6WzVNJrLCp1z0LgsqbmpfJ7wOe9veZ/GIY35R+9/MLLdSIxeuuOq0dgbnXNwE9wl3lhVqmJP\nZEAkj139GIcfOsz0XtN5e9PbtHqvFW///jY5hTn2V7KKXI7Pxl3Qtjge7Rw0DsPoZWRch3H8dvtv\nfDf2Ozae2kjzd5vz3NrnSDYlO1s9jUZTCh1W0jiVA6kH+L+N/8f8PfOZ1GkST/R9gsahjZ2tlkbj\nMeiwksYtaRPZhhnDZ7Dnvj34e/vT5eMu3LP0Ho5nHHe2ahrNZY12Dg7GXeKNVaWu7IkJjuGNwW9w\n4MEDRPpH0n1Gd+5ccidH04/WyfWrgn42rou2xfFo56BxKaIConh50MscfPAgMcExXPnpldy/7P46\nWZhIo9FUHZ1z0Lg0KbkpvL7hdT5L+Izbu97Ok/2eJCogytlqaTRug845aDySqIAo/nv9f9l17y5M\nRSbafdCO1za8Rr4539mqaTQejXYODsZd4o1VxVH2NAxuyIc3fcjvd/zOltNbaPdBO77e9TVWaa2z\ne+hn47poWxyPdg4at6J1ZGsWjV/EnFFzeOv3t+g9szcbT250tloajcehcw4at8UqrXy18yue/uVp\nBjYfyOvXvU7D4IbOVkujcSl0zkFz2eElvJjcZTL7HthH45DGdP6oM69veJ0Cc4GzVdNo3B7tHByM\nu8Qbq4or2BPkE8Qrg15h07RN/HbyNzp+1JHVR1ZX+zquYEtd4kn2aFscj3YOGo+hVUQrlty6hLdv\neJtpS6YxadEkEnMSna2WRuOW6JyDxiMxFZp4If4FvtjxBS9d+xLTuk/DS+jfQprLD72eg0ZTDjsT\nd3L30rvxMfgwc/hMWke2drZKGo1D0QlpN8Fd4o1VxdXt6Vy/Mxtu28CodqPoM6sPb258E4vVUu65\nrm5LdfEke7QtjseuzkEIMUsIkSiE2FnB6wOEEBlCiG02ecae+mguTwxeBv7R+x9snraZpQeX0vez\nvuxJ3uNstTQal8auYSUhRD8gB5gjpexczusDgEellCOqcC0dVtLUGqu0MuPPGTy79lmeuPoJHunz\nCAYvg7PV0mjshkuGlaSUG4D0S5xWbaU1mpriJby4p+c9bJm2haUHlxL3RRyH0w47Wy2NxuVwhZxD\nHyFEghBimRDiCmcrY2/cJd5YVdzVnubhzVn797WMbjea3rN68/HWj1m7dq2z1apT3PXZlIe2xfEY\nnXz/P4EmUspcIcRQ4HugTUUnT506lWbNmgEQFhZG165diYuLA0r+4K6+X4yr6HO52/Nw3MMMaTWE\nka+NxJBkYO2Va6kfVN9l9KvNfkJCgkvpU5v9hIQEl9LHlffj4+OZPXs2wPnvy5pg96GsQoimwI/l\n5RzKOfco0ENKmVbOazrnoLEbRZYiXlz3IrO2z2LGsBkMbzvc2SppNHWCS+YcbAgqyCsIIeqX2r4K\n5awucgwajb3xNnjz0rUvMW/sPB5a/hD3LL0HU6HJ2WppNE7D3kNZvwY2Am2EECeEELcJIe4WQtxl\nO2WsEOIvIcR24B1gvD31cQXKhmPcHU+yJz4+nmuaXkPC3QmYikz0/LQnOxPLHYXtFnjas/EU3MUW\nu+YcpJQTLvH6/4D/2VMHjaa6hPqFMnfUXObsmMOgOYP4d9y/uafnPQihB9ZpLh90+QyNphIOpB5g\n/ILxtAxvycwRMwnzC3O2ShpNtXDlnING47a0iWzD73f8TkxQDN0+6cYfp/9wtkoajUPQzsHBuEu8\nsap4kj0V2eJn9OP9G9/nzevf5Kavb+KDLR/gDr3Yy+HZuCPuYot2DhpNFRndfjS/3/E7n23/jFsW\n3kJ2QbazVdJo7IbOOWg01STfnM/0n6cTfzyeBeMW0Kl+J2erpNFUiN1yDkKIQCHUKilCiDZCiBFC\nCO+aKKnReAJ+Rj8+Gf4Jz/Z/lmvnXMvXu752tkoaTZ1TlbDSesBPCNEIWAlMBmbbUylPxl3ijVXF\nk+ypri2TOk/ilym/8Nza53jo54cotBTaR7Eacjk/G1fGXWypinMQUspcYDTwoZRyHNDBvmppNO5B\n5/qd2XrXVo5mHGXgFwM5k33G2SppNHXCJXMOttnL9wFvA3dIKXcLIXZJKR0aaNU5B40rY5VWXvn1\nFT7a+hHzxs6jb5O+zlZJowHsO8/hH8DTwGKbY2gBeFZtY42mlngJL57p/wwzh89k1Hej+GTrJ85W\nSaOpFZd0DlLKdVLKEVLK1237R6SUD9lfNc/EXeKNVcWT7KkLW4a2Hspvt//Gu5vf5e4f73ZqHkI/\nG9fEXWypymilnkKIRbY1nncWiyOU02jckdaRrdk8bTNJuUkM/GIg53LOOVsljabaVCXnsB94HNgF\nWIuPSymP21e1i/TQOQeNW2GVVv6z7j/M2j6L72/5nu4x3Z2tkuYypKY5h6o4hw1Syn411qyO0M5B\n464s3LOQe5bdw4c3fsi4DuOcrY7mMsOeCennhRAzhRC3CiFGF0sNdNTgPvHGquJJ9tjLljFXjGHl\npJU8tuoxXoh/Aau0XvpNdYB+Nq6Ju9hSlfUcbgPaAd6UhJUksMheSmk0nka3mG5snraZ0d+NZnfy\nbr4Y+QUB3gHOVkujqZAq5RyklG0dpE9leuiwksbtKTAXcMeSOziQeoAlty6hQVADZ6uk8XDsGVba\nKIS4ogY6aTSaMvgafZk7ai7D2gyj98ze7Erc5WyVNJpyqYpz6A0kCCH224ax7tJDWWuOu8Qbq4on\n2eMoW4QQPDfgOV4Z9AqD5gxi+aHldrmPfjauibvYUpWcwxC7a6HRXIZM6DSBpqFNGTt/LM/1f457\nr7zX2SppNOepNOcghDAAu6WU7RynUoW66JyDxiM5nHaYG7++kRFtRvD64NfxEnoNLk3dYZecg5TS\nAuwXQjSpsWYajaZSWka0ZOPtG9l8ejPjF4wnryjP2SppNFXKOYQDu4UQvwghlhSLvRXzVNwl3lhV\nPMkeZ9oSGRDJqsmr8PbyZtCcQSSbkmt9Tf1sXBN3saUqOYdn7a6FRqPB1+jLl6O/5Nk1z9JnVh9+\nmvgTbSLbOFstzWVKldaQFkLUB6607W6RUibZVavyddA5B81lw8xtM3lmzTMsuHkB/Zo4vXqNxo2x\n5xrSNwNbgHHAzcBmIcTY6quo0WiqyrTu05gzag6jvxvNd39952x1NJchVck5/Au4Ukr5dynlFOAq\ndKipxrhLvLGqeJI9rmbL9S2vZ9XkVTy+6nFe2/Aa1e05u5o9tUHb4niq4hy8yoSRUqv4Po1GU0u6\nNOjC73f8zrd/fcu9y+7FbDU7WyXNZUJVaiv9F+gMfGM7NB7YKaV80s66ldVD5xw0ly1ZBVmMmTeG\nAO8AvhnzjS7ap6kydlvPwXbxMUDxium/SikXV/dGtUU7B83lTqGlkGlLpnEw7SA/3vojUQFRzlZJ\n4wbYs/AeUsqFUspHbOJwx+BJuEu8sap4kj2ubouPwYcvRn7BwGYDuXrW1RxJP1Lp+a5uT3XQtjie\nCuc5CCGOotZtKA8ppWxpH5U0Gk1FCCF4ZdArNA5pzDWfX8OyCcvo2qCrs9XSeCAVhpWEEJFlDnmh\nhrI+BmyTUo6xs25l9ZFSSt59911mzpwJwJ133slDDz3kSDU0GpdhwZ4F3LfsPhbcvID+Tfs7Wx2N\ni1LnYSUpZaqUMhVIB4YBa4E+wE2OdgzF7N69m1mzZrF161YSEhJYunQpR45U3rXWaDyVsVeM5Zsx\n3zB23liW7NcVbTR1S4XOQQjhLYS4G9gDXAOMlFJOklLucZh2Zdi7dy+9evXC19cXg8FA//79WbTI\nvVYrdZd4Y1XxJHvc0ZZBLQbx08SfuHvp3Xy+/fMLXnNHeypC2+J4KqutdBQwA+8AJ4DOQojOxS9K\nKR3+rdyxY0eeeeYZ0tPT8fX15aeffuLKK6+89Bs1Gg+mZ8OexP89nhu+vIHUvFQeu/oxZ6uk8QAq\nyznMpvKE9O32Uqo8inMOn3/+Of/73/8ICgqiQ4cO+Pr68tZbbzlSFY3GJTmZeZLrv7yekW1H8sqg\nVxCi2mFmjQdi13kOrkB58xz+9a9/0bhxY+655x4naaXRuBYpuSnc+NWNdG3QlY9u+giDl8HZKmmc\njF3mOQgh2gkhBgkhgsocd87SoceOkbxrF6SkcGL3bhYvWsSEW291iio1xV3ijVXFk+zxBFuiAqL4\nZcovHE4/zKB/D6LAXOBsleoET3g2xbiLLZXNc3gIuB/YC8wSQkyXUv5ge/kVwD6roldGXBxjzp4l\nzWLBW0o+9PIipF49iIgokdBQ8PcHX1/w81MSFATBwRASUiJl98PD1fs0Gjcn2DeYZROWMfjfgxnx\n7QgW3byIQJ9AZ6ulcTMqyznsAvpIKXOEEM2ABcBcKeW7QojtUspujlOzkvIZ+fmQng5paUoyMqCg\nQB3Pz4e8PMjJgexsyMoqkbL7aWng5QWRkSWOJiysRMLD1bGoKHVOZCRERyvRTkXjgpitZu788U4O\npB5g2YRlhPmFOVsljROo85yDEGK3lLJDqf0glIPYA1wrpXTotEy711aSEnJzlZNITVVtZqZyNhkZ\nJQ4oJUW9npKiJClJ9VKio6FePWjQAOrXV22xxMRAw4Zq29vbfjZoNGWwSisPL3+Y9SfWs2LSCuoF\n1nO2ShoHYw/nsAZ4REqZUOqYEfgMmCildGimy2UL70mpeh7JyZCYqOTcuQvlzBk4exaSkogPDCSu\neXNo1KhEmjRR0rixEj8/Z1tVZeLj44mLi3O2GnWCJ9kCJfZIKXlx3Yt8+9e3rJq8isahjZ2tWrXx\npGfjaFtq6hwqm+cwBTXP4TxSSjMwRQjxSXVv5LEIofIcoaHQqlXl51os8P330KwZnD6t5NQpWLsW\nTpxQcuqUCmE1a6akeXPVtmihtps0AR8f+9ul8RiEELwQ9wKhvqFc8/k1rJy8Uq9Nrbkkbj2U1SOx\nWlVv49gxOHq0pD16FI4cUb2QmBho2RLatIG2bVXbpo1yHgY9dFFTMZ9t/4xn1jzDzxN/pkuDLs5W\nR+MALst5DpclRUWqh3HwoJIDB5Ts369CWq1aQfv2Sjp0gI4doXVrnevQnGf+7vk88PMDLB6/mKsb\nX+1sdTR2RjsHN8Gu8cbcXOUk9u5Vsns3/PUXnDypHETHjtC1K3Tpotr69Wt9Sx0Ldl0qs2f5oeVM\nXjyZr0d/zeCWgx2rWA3wpGfjCTmH4gu/XnZJ0PKOVfDeWaiKrolSys4VnPMeMBQwAVNLJ8DL4447\n7mDp0qXUr1+fnTt3XkqFy4uAAOjWTUlp8vKUs9i1C3bsgOXLISFBjbLq1g169oQePZQ0aqTyKBqP\nZkirISwev5jR343m42EfM7r9aGerpHExqrKG9DYpZfcyx3ZW9GVf5rx+QA4wp7zzhRBDgQeklDcJ\nIXoB70ope1dwLSmlZMOGDQQFBTFlyhTtHGqDlKpHsW0b/PknbN2qxGiEq66C3r2hVy+48ko1YVDj\nkWw/u50bv76R1wa9xt+7/t3Z6mjsQJ33HIQQ9wL3AS2EEKW/hYOB36pycSnlBiFE00pO+Rswx3bu\nZiFEqBCivpQysaI39OvXj+PHj1fl9prKEKJkCO3IkeqYlCqfsXkzbNoEzz6rehitWkHfvtCvn2qb\nNNG9Cw+hW0w31v59LdfPvZ7swmweuOoBZ6ukcREqq630NTAcWGJri6WHlHJSHd2/EXCy1P5p2zGP\nxaXrqggBTZvCzTfDW2/Bb7+pyX8zZigHsXCh6lU0aQKTJ8PMmcR/9ZVyKh6ASz+bGlBVe9pFtWP9\nbet5Z9M7vLz+ZVwxt+dJz8ZdbKmw5yClzAQygVuFEAagvu38ICFEkJTyhIN0PM/UqVNp1qwZGRkZ\npKSkXJAR190wAAAgAElEQVTYKf6Du/p+Ma6iT5X2e/UiPi8PuncnbsECOHiQ+Bkz4NtvVc/iySeJ\nv+IK6NmTuAcegIYNXUv/Ku4nJCS4lD6OtOdYwjFeb/U6L+x+gcyCTIYahyKEcBl7EhISnHp/d9qP\nj49n9uzZADRr1oyaUpWcwwPAC0AiYLUdllXJOdje3xT4sYKcw8fAWinld7b9fcCA8sJKpUcrHT9+\nnOHDh+ucgysgJRw6BL/8AqtXw5o1qlTI4MEwZAj0769rT7kRaXlpDP1qKF3qd9Elvz0Eu5TstvEP\noK2UsoOUspNNquQYinWzSXksQc3ERgjRG8ioLN9QjJTSJbu+lyVCqGGy99wDCxaoMiKzZ6sChS+9\npOpNDR0K774Lhw87W1vNJYjwj2D15NUcTj/MxEUTKbQUOlsljZOoSs9hLTDYVjqjehcX4msgDohE\n9TyeB3xQPY8ZtnM+AIaghrLeJqXcVsG1ZOTrkZi+NVF4uBCryYoxxEjsiFjaDW5HhH8EEX4RhPqF\n4mPwwcfgg7eXNz4GH4J8ggjxDblIQv1CCfQOdOiKWfEeNF4bqmBPRobqVfz0EyxbpqrZjhih5Kqr\nXGpG92X3bCoh35zP+AXjMVvNzB83nwDvgLpVrpp40rNxtC12m+cAHAHihRDLgPMrh0gpL7k2p5Ry\nQhXOqfLwiL3376Xo3iKKLEUUWYsotBSSXZBNal4qaXlppOWlkZmfSaGlkNyiXAothRSYCzAVmcgq\nyDovmQWZqrWdG+IbQrh/OOF+4SWtXzgR/hGE+6u2WCL9I4kMiCTSPxJfo29VVb98CQuDMWOUWK3w\nxx+wZAncdZeqajtyJIweDXFxeha3C+Fn9GPBuAXc9sNtDP1qKD/e+iMhviHOVkvjQKrSc3i+vONS\nyhftolHFethlhnSRpYjMgkwy8jNIz0snPT+d9Lx00vLSLtgu7YBS81JJzU3F1+hLvcB61AusR3RA\nNPUC61E/sD4NghpQP0i1DYIaEBMUQ7CvnitwEYcOwaJFSg4ehGHD1EipwYN1cUEXwSqtPPjTg2w+\nvZnlk5YTFRDlbJU01cTu5TOEEAFSytxqa1ZHuFr5DCkl2YXZJJmSSDIlkWxKJtGUSGJOIudyzpFo\nUu25nHOcyT6DwctATFAMDYMb0iikEbHBscSGxNIopBFNQpvQOKQx9QLrXb6Lwp86pZzEd9+pEiAj\nR8L48TBwoJqYp3EaUkqeWfMMi/ctZuXklcSGxDpbJU01sJtzEEL0AWYBQVLKJkKILsDdUsr7aqZq\nzXA151AdpJRkFWRxNucsy1YuI7pDNKezTnMq6xSnsk9xKusUJzJPkF2QTePQxjQNbUqzsGYXScPg\nhniJqowhcBx2iZ+eOAHz5ytHceKEchITJ6rZ2nZ0np4U14a6t+eN397go60fsWryKlpFXKI8fR3j\nSc/Gk3IO7wA3oEYWIaXcIYToX90b1RUHDhxg/PjxxQZz5MgR/vOf//DQQw85S6VLIoQg1C+UUL9Q\nzjU8R1yXuHLPyy3K5WTmSY5nHudYxjGOZRzjp4M/cTTjKMcyjpGel06T0CY0D29Oy/CWtIlsc16a\nhTXD6OUhv7CbNIFHH1Vy8CB8/bVyDkKodsoUVZ5c41Ce6PsEYX5hDJg9gJ8n/kzn+tUZtKhxN6rS\nc9gspexVet1oIcQOKaVDi8GX13OwWq3ExsayefNmGjd2v9WtqktuUS7HM45zNOMoh9IOcTD1IAfS\nDnAg9QBns8/SIrwF7aPb0z5KSYd6HWgf1d4zEudSqmT23Llq8l3HjnDbbSrRHRjobO0uK77961um\nL5/OkluW0Cu2l7PV0VwCe4aVFgBvAR8AvYDpQE8p5S01UbSmlOccVq5cyX/+8x9+/fVXR6rikuQV\n5XEw7SB7k/eyN0XJ7qTdHE4/TLOwZnSs15FO9TrRtUFXujboSuOQxu6b3ygogKVL4fPPVYmPsWPV\n6KeePXXNJwex7MAypv4wlXlj5zGw+UBnq6OpBHs6hyjgXeA61GS2lcB0KWVqTRStKeU5hzvuuIMe\nPXpw330OTX/UCkfHGwsthexP2c+upF3sStzFjsQdJJxLIM+cR9cGXeneoDs9GvagZ8OetIpoVe2c\nhtNjwWfPqkl3n36qlmq96y6YMEFtVxOn21LH2NuetUfXcvOCm5n9t9nc1OYmu90HPOvZeEzOQUqZ\nAkyskVZ2wmq10qNHD/bs2cNrr73mbHVcGh+DD53qd6JT/U7QqeR4kimJhHMJbDu7jUV7F/GvNf8i\nLS+N7jHd6d2oN71ie9E7tjcNgho4T/mqEBMDTz8NTz6pJtvNmAH//KdKYt9/P3TqdOlraGrEwOYD\nWXrrUkZ8O4L3hrzH+I7jna2Spg6psOcghHhCSvmGEOJ94KKTpJQOzQCX7jm8/fbb/PDDD+zZs4ek\npCRHquHRpOSmsPXMVjaf2sym05vYcnoLwT7B9Gnch36N+9G3SV861evk+vV2zp5VPYkZM6BFC7jv\nPjXRTs+dsAs7E3cy9Kuh/Gfgf7i92+3OVkdThjoPKwkhhkspfxRClLsCiJTyi+rerDYIIeRdd0ny\n80+xdu1tgJX8/DTuu287QUFqPZqgIJWb9PZWQ+OLxd9fHQ8IUFK87eVao0JdDiklB9MOsvHkRjac\n2MBvJ3/jTPYZ+sT2YUDTAcQ1i6Nnw554G1x0ZnNRkZqN/cEHap3te+9VYad69ZytmcdxIPUA1825\njif7Psn9V93vbHU0pbgs1pD++GPJJ5+Mo3//R/jkk6E0aXI1t9zyEzk5kJ0NOTlgMoHZXCJFRWqV\nzNzcEjGZVOvrqxxFUFCJlN4PDi6RkJCL25AQVR0iNFSdX5VcqLvHTpNNyWw4sYF1x9ex7vg69m3d\nxzX9r+Ha5tdyXYvr6Nagm2v2LHbtgvffV/Mn/vY3mD79ouVU3f3ZlMXR9hxNP8qgOYO4t+e9PN73\n8Tq9tic9G4/JOQghVgHjpJQZtv1w4Fsp5Q3VV7N2xMYu4+qr6/POO30YOfJ73nzzTV6sYREPKZXT\nKHYoJpPaLt7Pzi6RrCw4erRku7jNylJ15TIzIT9fOYvwcOUwwsNLJDISIiJUe+aM6s1ERSkJD3ep\n2nOXJDowmlHtRzGq/SgAfmz2I9amVtYcXcOUxVNINCUqR9H8Oq5veT3Nw11kPkKnTirM9OqrMHOm\nKvzXqpWaS3HjjbobWQc0D2/O+tvWc92c68gtyuW5Ac+574g4TZVGKyVIKbuWOXZ+zoOjEELIp59+\nmi+//BKj0UheXh7Z2dmMHj2aOXPmOFKVcjGblZNIT1eSkaHatLQLJSUFUlNVm5Ki3hMWpiId0dGq\nrVcP6tdX0qBBSdugAfj5OdvSyjmddZo1R9ew8shKVh5eSahvKDe0vIHrW17PoBaDnF7d8zxFRTBv\nHrz5pupGPvywmlyn156oNYk5iQyeO5ghrYbw+nWvawfhZOw5lPVPYFTxym+2xXsWSym710jTGiKE\nkMdeOca57HM8Mv8RTqSdIN2UzpOjn+Tu0XdjDDZiCDZgCDQgvIUSo2q9/Lww+Bvw8vNCGFzrg2o2\nK6eRlKSWQkhOhsTEEjl3TknxfkCAchIxMRAbC40aKYmNhcaN1eTiqCjXGO5vlVZ2Ju5kxaEVLD+8\nnD/P/En/pv0Z3mY4w9oMo1GIC6wIKyWsW6ecxNat8NBDKjcRFuZszdya1NxUhnw1hF6NevHe0Pdc\nruzL5YQ9ncMQYAawDjXP4RrgLinlipooWlOEEPLwU4dJykwiKSuJ7LxsZmydwYmME7zb612a0hRL\ntgWLyYIskkizxFpkRRZJrPlWrHlWrPlWhFHg5e+Fl9+FYggyYAg2XOBkvAK88PK3OZYAdY4xxPa6\nTYwhRgwh6n1VcTy1iTdKqRzJuXMqPHX6dImcOgUnT6pSRHl5ylk0bQrNml0oLVoo51JXzqM69mTk\nZ7D80HJ+PPAjyw8tp0V4C0a3G83o9qNpG9W2bhSqBfGff07c2rVq3Yk77lC9iZgYZ6tVY5wdp8/M\nz+Smr2+idWRrZg6fWatclLNtqUs8JucgpVwuhOgO9LYd+odt7oPDafFqC1rQ4vz+RCYycuRIAh8M\npMugS1fzkFIiCyWWPAuyQLXFjsNisijnUiwmizqea6EotQjLCQuWHNtrttacbcaSZcGcZcaSbcHL\n3wtjqFFJWKk23NaGGUlJSiEpJQnvcG+MEeo170hvDEGGS3a/hVB5i8hI6NCh4vNMJuUkTpyAY8eU\nLF2q8iZHjqjXmzdXjqJVK2jTRi3m1rq1cir2Cr+H+YVxS8dbuKXjLZitZtYfX8+ivYu4ds61hPmF\nMbrdaMZ1GEenep2cE4po3lyV5Dh+HN56S/2RJ0xQcygug/IsdU2oXygrJq1g5HcjmbBoAl+O+tJ1\nR7ZpLqKyoaztpJT7bI7hIipasc1elDdD+tixY8TFxfHXX38RFBTkSHUuQlolFpMFc6YZS6ZqzRml\nJN1MUXoR5nTzeSlKU/tFqUVIs8Q7yhvvSG/VRivxifZR2/W88annc741hhtr/AVanGA/fFgtqXDw\noJIDB1SupE0baNeuRNq3h7Zt7ZfvsEorW05vYeGehczbM49A70DGdxjP+I7jaRfVzj43rQqJicpJ\nfPopjBsHTz2lC/7VgHxzPuPmj0MgmDduHn5GF0+ceRj2mOcwQ0p5l22Z0LJIKeW11b1ZbSjrHHJy\ncoiLi+PZZ5/lb3/7myNVsQuWPNVDMaeaKUwupCi56LwUJhVe0BYlFWExWfCp74NPTIn4NvI9Lz6N\nfPCN9cUYWj0nkp2tnMS+fUr27oU9e1SPo2lT9WO6Y0fo3FlJy5Z1O9pKSsmmU5uYt3se8/bMo15g\nPSZ1msStnW6lYXDDurtRdUhJgXfegY8/VutMPPOMitFpqkyRpYhJiyeRmpvKD7f8QKCPLpboKOzh\nHMZJKecLIVpIKY/UWsNaUto5mM1mhg0bxtChQ5k+fbqTNasedRVvtORbKDxXSOHZEik4XXBeCk8X\nUnCqACklfo398I31xbexL35N/fBr5odvU1/VxvriZbx0HKmwUDmN3bvhr7/UtIGdO+HMmXg6dYqj\nWzfOS+fOKnFeW6zSyvrj65m7Yy6L9y2mR8MeTOo0iTFXjCHIp+57ipd8Nmlpqifx0UeqGuy//qU8\npovianF6i9XCtB+ncTD1IMsmLCPUr+r1r1zNltrgLjmHypzDNill9+K21hrWEiGElD/8AEYjU956\ni6iwMN564AEV6yg9/dnf/8Lp0QaDawzdseHoD4Y500zBqQLyT+ZTcLKA/OP55B/Lp+B4AfnH8ilM\nKsQ31hf/Fv74tfDDv4U//q388W/tj39LfwyBlXcLli2LJywsjoQE2L4dtm1TPY4WLaBHD1UotWdP\n6NKldg4jryiPpQeWMmfnHDac2MCodqO4rett9GvSr87yE1V+NqmpanTTJ5+oZU2feUYNGXMxXPEL\n1SqtTP95OhtPbWTFpBVVXnbUFW2pKZ7gHFYDVuAqYH3Z16WUI6p7s9oghJBy2DB+S0mh/6ZNdAoK\nQkiJkJJXoqMZYjCoTGte3oVTpK1WVVPH11c5kuK2WIr3i51LafH3V1K8XVx7o7SUPeZmS1paC6zk\nH88n70ge+UfzyTuUd17yj+RjjDAS0CYA/7b+BLQLIKBtAAHtAvBr6ofwKv/zVlioehd//qlGh/75\npwpNtWsHvXuXSOvWNfPb53LOMXfHXD5P+JwiaxG3d72dqV2nEhPs4JFFKSnw+uswa5ZKZD/1lJqs\noqkUKSX//OWfLDmwhNWTVzv+uV1m2MM5+ADdgbnAtLKvSynXVfdmtaHGy4RarerbqqBASX6+ktLb\neXkX19jIzb3weF5eSd2N4inVpfeLW2/vC2tvFNfZKJbQ0JI2NFSNpy89pTosTDkZF+jtSKuk4FQB\nuftzyd2fS97+PHL35WLaa8KcbiagXQCBHQIJuCKAwI6BBHUKwreJb7m/5PPzISEBfv8dNm1SbV4e\n9O0L/fop6d69erXxpJRsPr2ZWdtmsWDvAgY0HcC07tMY0mqIY1fFO3sWXn4ZvvlGFfl77LEalQy/\n3Hh5/cvM3jGbX6b8QpPQJs5Wx2Oxh3OYK6WcXFydtdYa1hK3WENaSvUtWLruRulaG1lZxG/bRlxU\nlJoanZmphgcVS/HU6qKiknobxW2xREWpNjq6ZEp1dLRyRA50KOYsM7l7c1m1aBVdLF0w/WXCtNOE\nxWRRjqJzEEFdlQR2CsQQcHF46tQptVbPhg1KDh6EXr0gLg4GDFDbvlVcxC67IJt5u+cxc/tMTmae\nZFr3aUzrPo3YkNgq21Tr7v6xY/Dii2qexBNPqJLhTpxx7Q6hmHc2vcM7m97hlym/0DKiZYXnuYMt\nVcVdwkqV/bzqIYRoCEwUQnyKmgB3HillWnVv5vEIURKKqqjyZ3y8+varjIKCknobqakXSnKyCuoX\nT6dOSlJiNpfU3ChdeyMmpqSNiYGGDav+jVsJxhAjIb1CiMyLpFVcyWLzRalF5OzKwbTDRNbmLM7M\nOEPu3lz8mvoR1D2I4J7BBPcMJqhbELGxRsaPV0svgPKVGzaoP9Fjj6lQVK9ecO21MGiQyl1UFLUL\n9g3mju53cEf3O9iZuJNPtn5C5486M6DZAO7pcQ+DWw62/yzdZs3U6nR79qg8xDvvwPPPq5CTm4Ub\nHcU/ev8Df6M/A2YPYNXkVbSPbu9slTQ2Kus5PATcC7QATnOhc5BSyhblvtFOFPccCgoK6N+/P4WF\nhZjNZsaOHcvzzz/vSFVck9zcC+tuFNfeOHtWSfG06nPnVMijuO5G48YldTeK29jYOl37wFpkJXdv\nLtnbssneqsS0y4RfMz9CeoUQ0iuE4F7BBHYMvGDkVFYWrF+v1vBZs0bNTevfH66/XsmlchY5hTl8\ns+sbPtr6ETmFOTxw1QNM7TqVEN+QOrOtUjZvVgsRnT4NL72kljN1gXChKzJnxxyeWv0UP0/8mS4N\nHLo8vcdjz/IZH0kp762xZnVE6bBSbm4uAQEBWCwW+vbty3vvvcdVV13lZA3dBKtV9TSK626cPHmh\nnDihnEl0tPol3LSpGnpUWho1qvU0amuRFdMu1bvI2pxF9uZsCk4VEHxlMCFXhxDaN5SQ3iF4h5fM\nqE1KUk5i1SpYsUL9GL/hBiXXXadSOeUhpWTjyY28v+V9Vh5eycROE3ngqgccU7JDSli9WiWrhYDX\nXlPKai5i/u75PPDzA/x4649c1Uj/P9cVdl3PQQjRD2gtpfzctqZ0sJTyaA30rDHl5Rxyc3Pp378/\nH330EVdeeaUj1akxbhE7NZuV4zh+XMXRi+tuFEtampop3KoV8X5+xA0cqKZVt2mjeh01/HVclF5E\n1qYssjZmkflbJtl/ZOPXzI/Q/qGEXhNKWP8wfBuqkJiUaoLeihWwfDls3KiGzg4dqipwd+xYvhqn\ns07z8daPmbFtBr0a9eLRPo/Sv2l/hBD2fTZWKyxYoMJNTZooJ9Gzp33uZcMtPmtl+HH/j9yx5A4W\n3ryQa5pec/64O9pSEe6Sc6hKz+F5oCfQVkrZxpaHmC+l7FszVWtGsXNYvnw506dP5/jx4wA8/PDD\nvPrqq45UpVZ4xIc8N1c5iYMHiV+xgjirVc2Q279fJeBbt1b1Nq64QtXeuOIKdayaoSprkZWchBwy\nf80kY30GmRsyMYYZCRsQRlicEr/GqhSDyaRyFT/9pMRqhZtugmHDYODAi/PCeUV5zNkxh7c2vUWw\nTzCP9nmUesn1GHTtoDr6I1VAUZEa+vrvf8M116hwU+vWdrmVu37WVh1exYRFE/hmzDdc10L1stzV\nlvLwJOeQAHQDthWv4SCE2Cml7FwjTWuIEEJaLBbatGnDL7/8QsOGDenevTuBgYF89tlnXHHFFY5U\nR1MRmZlq2NHevSWyZ4/qhbRooX7Sd+pUUoOjefMqh6ikVWLaYyJzXSYZ8RlkrMvAEGQ47yjCBoTh\n19TvfK9i6VI1cGj7duUghg9XzqJBg5JrWqWVZQeW8d+N/+VU1ikev/pxpnadir+3nUcZmUzw7rtq\nxvXNN8Nzz12o2GXOr8d/Zcy8MXz2t88Y1maYs9Vxa+zpHLZIKa8qNWM6EPjdGc7h999/58UXX+Tn\nn38G4LXXXmPNmjUMGTKERx55xJHqaKpLQYEaZfXXXxfW38jIUM6iSxcl3bur/SpU+ZNSkrsnl4x1\nGSXOIsDmLAYq8WvsR1qaCj0tWaLCUG3bqoXgRo1SHZtiNp7cyGsbXuOPM38wvdd07u15b7VKPNSI\nlBR45RX44gu1lsSjj6p5Mhq2nN7C8G+G88HQDxjXYZyz1XFbauocVBnrSgR4DPgEOALcCfwOPHip\n99W1AHL+/Ply8uTJMiMjQ0op5axZs2TDhg3lsmXLpLuwdu1aZ6tQp9TantRUKdeulfKdd6ScOlXK\nLl2k9PdX7W23SfnBB1Ju3ixlfv4lL2W1WmXOnhx56sNT8q9xf8kNURvk7y1/l/um7ZOJ3ybKguQC\nWVAg5erVUj7wgJSxsVK2bSvl009LuWWLlGvWKFt2ntspJy6cKCNfj5TPr31epuWm1c7GqnDkiJS3\n3iplTIyUn3wiZVFRrS/pCZ+1hLMJMub/YuRTnz7lbFXqDEc/F/U1X/3v3KompAcD16OGs66QUq6q\ntheqJUIIyQsvwC+/IM6dUxnJrCwM0dE0njmTQIOBQC8vAgwGjEKcF28h8PXywr9YDAYCbNsBtu3i\nNtBgIMB2nUCDgUCDgSBba3B0/R43wS725OernkVx/Y0//lChqiuugKuuUrU3+vRRi1FU8lykVWLa\nbSJjTQbpq9PJWJ9BQJsAwq8PJ+L6CIL7hLBthxeLF8OiRZCWFs+ECXGMHq1mbh/NPMTLv77Mkv1L\nuK/nfTzc52Ei/CPq1taybN0Kjz+uhhy/8YaKg9Xws+cpn7W9yXvp/0J/XrrtJe7uebez1ak1HpNz\nsF28PlA8HGiLlDKpujeqLUIIuXHjRl544QWW/PwzZin57+uvY5aS2x55BJPFgsliIddqxSzleSmy\nWimQkjyLhTyrlTyrldwy27lW6/n3Fl/HZLFgslrJsW37enkRZHMWxRJsMBBiMBBiNKpto5FQg4FQ\no/H8dpjReF7Cvb3x1QvZ14zcXFV/Y8sWVXujuP5G797qm/yaa9Ton0om+FkLrWRtyiJtZRrpK9LJ\nO5RH2LVhRN4YScSQCA5n+rJwoXIUZ8+q6txjx0Ljzof576ZXWbxvMfdfeT+P9HmEMD87LiMqZcks\n63r14P/+z+4jm1ydw2mHGTRnENN7TefhPg87Wx23wp45h5uB/wLxlCwT+riUckEN9KwxQgj58MNm\n/ve/cAwGX4KDG+DlZWDSpG/YseNzdu/+EV9fX2JjW/LPf35OWFgIRqMqdeTnVzJxuViqswaBlPK8\n48ixSbatzTKbyba1WRYLmWazklLb6WYzGTYxCEGE0Ui40UiEtzcRtjbS25tIo5Eo23a0tzfRPj5E\neXsTbjTipSdPXcypU8pJFNfg2LdP1Qy/5ho1W65vX1VWpAIKEwtJW5FG2s9ppK1Mw6+pH5HDI4kc\nFklSWDCLvxcsWKAGZo0YAX1vOEu817P8fPQHpveazvRe0wn2rfj6tcZshs8+gxdeUBn1V15x6RLh\n9uZE5gkGzRnE1C5T+Vf/fzlbHbfBns5hBzC4uLcghIgGVkspHTqNUQgh33hDsnbt/7Fx4/vk5Jyj\nf/8X6dbtKfbtW01w8LXk5Hixc+dTWK2C2NhXKSpSIwdL19YrFqPxQmdRulZecPDFRVcDAi58vbxa\nen5+lUcApJQsX7OGTn37kmZzGmlFRaQWFZFqNqu2qIgUmyTbJMdiIcJopJ6PD9He3tTz9qa+jw8N\nbBJjaxv5+hLp7e1QR+JSoYvsbFXV79dfYd06FZa64grlKAYOVE6jgplyVrOVn/73E+1PtSflxxQs\nmRYiR0QSPTqazBZhfL/Ui/nz1Yjda4dmktrif+wK+IAnrnmE+6+8376jm3JyVO/h/ffhzjvVrOsq\nFPZzqWdTS4ptOZt9luvmXsfItiN56dqXnLOcbC3xmLCSEGKXlLJTqX0vYEfpY46g9CS448ePM3z4\ncHbu3HnRed9//z0LFy5k7ty5FV5LSlWotXTRVZNJfbfk5JS0pQuu5uSUvFa6nl5mZkkrZUmR1dJS\nXHA1PBySk+Pp0yfuopp6lQ3OKbJazzuLpMJCkouKOFdYSGJhIWcLCzlna88UFJBtsdDAx4eGvr7E\nlpHGvr408fUlxtf38sih5OerMNS6dbB2rdru1Ek5ikGDVM+i1B++tC25B3NJ+SGFlIUp5O7PJXJY\nJFGjo8i5IoKFSwx89x0cOWYmtPsqstt8wr8nD2Vaj9vtu0by6dNqyOuyZfDss3DXXaprXAEu/Wyq\nSWlbkk3JXP/l9cQ1jeOtG95yOwfhSc7hv0Bn4BvbofHALinlE9XWshZU1TmMGDGCW265hQkTJjhS\nPUB9F5UttJqRoSYUp6crKa6nV7auno+PKrgaFVVSbLWsFNfTi46u+Dsh32LhXGEhpwsLOV1QwMmC\nAk4VFHAyP5+TBQWcKCggraiIRjZH0czPj+b+/jT386O5nx8t/P2J8fHxzDBWfr4KQ61Zowo27dql\nEtuDB6uSFl26lDvnouB0ASnfp5C8IJmchBwih0USPT6a9JYRzF/sxcwvcjmbmo1ft+956p4mPD76\nBvsW+duxQ1UmPHlSzZO48Ub73ctFSc9LZ8hXQ+jWoBsf3vSh/YsqujH2TkiPBvrZdn+VUi6u7o1q\nS1Wcw8svv8y2bdtYuHCho9WrFVKqXklyshr2npR0YcHVxMSSNjFROZOwMFVgtVhiYlTlitISGVl+\nmCvfYuFUQQHHCwo4lp/P0bw8jubnn5dMs5nmfn609Penlb8/rf39aRMQQBt/fxr5+nqO48jMVNOq\nVwWVVLEAACAASURBVK2ClSvVfnGxpuuvL3fhnoKzBSQvTCb5u2RMe0xEjYyi3oT6nIwI49VPjrN4\nnh9eAVlMngwvPNiGhvZa9lpKNRX8kUfU5MI331QhtMuIrIIshn09jObhzZk1YpZj1/BwI+yxnkMr\noL6U8rcyx/sBZ6WUh2ukaQ0p7RxiY2NJTU2lXbt2eHt7s2XLFmbPns2nn37KmjVr8K2DktT2oi66\nlBaLch7FBVfPnCmpo3fqlJKTJ9W8s+JCq02bqrZZMzUpuXlz5VDKS8znmM0cyc/nUF7eeTmQm8uB\nvDwyzWZa+/vTLiCA9oGBWLZvZ8zgwbT198evOll+FyT+m2+Iy8xUM+bi41WtqGHDlHTrdpGnzT+V\nT9K3SSR9lURhUiH1bq1H9K31eWfnNt6akUTuziH06Gnl/mmhjBplp7lthYXw4YdqsaFbblHrSUSo\n4baeGlYqjanQxMjvRhLhH8GXo760b1ivjnD7sJIQYinwtJRyV5njnYBXpJTDa6RpDRFCyKys7Qhh\npF27wQQFBbJ9+2oMBn9WrfqdJ574F+vXrycyMtKRalUbR34wcnJKCq2eOKEqWBw9WlJLLy1NOYxW\nraBlSyWtWqnvxObNyw9dZZnNHMjNZX9eHntNJn5dt46UDh04kp9PY19fOgQG0iEggI6BgXQKCqKN\nvz/ebjJ894JnU1ioRkH9+KOSvDxVrGnkSLXARJkfIKY9JhK/TiTxy0SMoUaip0SzrPVanpm3Bf+9\n08g51JURww1MmqQiWHW+vENKispHLFyo6jXdfjvxv/7q8c4BIN+cz9h5YzF6Gflu7Hf4Gl33xyF4\nhnP4Q0pZbqnTsklqRyCEkFu2dOGZZ46xenUmBgNERBi4/fYA5szJwWyWhIR4IYQXHTr48sQT0Qjh\njRDeeHn54eXlj8Hgj5dXsZQ+5ofBEITBEFymDcDLK6BUG4zRGIyXl2t/+KpKXp5yFIcPw6FDJe2B\nA6oX0qSJKjXRtq0qM9G+vVoHOqKceWBFVisH8/LYbTKx22TiL5OJXSYTJwsKaO3vT6fAQLoGBdEt\nOJhuQUFEVJJIdUn271f1N374QZX/uOEGVX/jxhsvGAElrZKMdRmcm32OlB9SCO4fzKY+m3jW9D5t\nkp8hb9sYzpz0YeJEmDpV5cfrlO3b4YEHlHP74AO1WtJlQKGlkFsX3kpeUR4Lb15o/9pYboQ9nMNB\nKWW55SKFEIeklK3Ke81elA4rtWjRgrCwMAwGA3fddRd33nknUlqwWHKxWnOxWouQshApi7Bai7Ba\n87Fa886LxZJX6lg+VmsuFosJiyUbiyUHs1m16vVcLJbi13OwWLIAbI4iBIMhBKMxFKMxFIMhFKMx\nzLZfug3HaAzH2zvcth2KEK4dgikoUOP79+9X0weKa+jt26eG9XbooGrndeyotjt1Kn9KQa7Fwh6T\niZ0mEwk5OWzPyWFHTg4RRiPdg4PpaZMewcFEuovDSExUvYnFi9Ww2QEDYMwYNRmilOc0Z5tJnpfM\n2ZlnyTuRx95r9/JG4zfo3+pOQvY9wPxv/KlXTzmJCRNUjqhOkBLmzoUnn1TO69VXK16Z0IMwW81M\nWTyFRFMiS25ZQqBPoLNVcgns4Ry+AdZIKT8tc3waat7D+BppWkNKO4ezZ88SExNDcnIygwcP5oMP\nPqBfv36XuELdYbUW2BxIFmZz5vm2WCyWTMzmjPNSVJSO2azkjz+S6NQpH6MxxOYwIjAaI/H2jrJJ\n8XY03t5R+Pio1miMxMsFEm5SqpzG7t3/z955x0dRrX38O9s3fdN7Qu8gXXpUlKIIgiL6Ktiwe+3X\nit177b1e5YpdUa8oKigtIL13pEN6zybZvjtz3j/OJgQISCAQLD8+z+ecmczOnsPMzjPnKb9HvkDP\nnZtNWVkW27ZJUtGuXQ9w6HXvLn0dh/qvNSHY7XazpqaG1UFZ63AQazTSNyKCMyMi6BseTvfw8FOa\nUX5cy/2qKkn/+s03sqhP//6SZfWii2TschCODQ4K3iug6NMictvkMq3LNM66YihdPLfzxScWfvxR\nPscnT5ZVZJvC55/9ww9kzZ8vFcUjj8BNN/1hy5Ue67VRNZXJMyezs2InP17+46mr+tcI/BnMSgnA\nt4APWBPc3QswARcJIYqOc6zHhYaK/QA8/vjjhIeH/2FYWbOzsxkyZBCBQBV+fwWBQAV+fzl+f1m9\ntrSBthKDIbyeEonHZErEZErCbE4KtimYTCmYTPEopyi0r/ZGV1VJf7Rxo5R166R4PFJJdO8uGSB6\n95bBNQ0pjO0uFyuqq1leXc2Kmhp2uFx0CQ1lQGQkAyMj6R8ZSUITli890lyOGw6HzEGYPl0qioED\nZYHsMWPqTE+qU6XkixJ2v7ab8uJyfujzA33v6MuFXa/ii88MvPeeXLVdd50sPd1AwFTj57NlC9x2\nm/RLvPGGTAr8g6Ex10YTGrf8eAvritYx+4rZJ5fq5Djwh1cO9U58FtA5uLlFCDH/OMZ3wlAUReTm\nvoLbrSKEQlhYKG53gMsue4V77hnP2Wf3RaczB/0BehTFgKLUtsbg30woimx1OnOw/8d4kxJCDa5C\npALx+Yrx+Qrx+QrxemvbfHy+fAKBqqCySA1KGhZLWrDNwGzOwGiMOSXJQ0VFUkmsXXuAQ8/tlkqi\nlkPvzDMb9mM4VZVV1dUsqa5mSVUVy6qriTUaGRgZyaCgtLZaT88kqJoaaXr68ksZ+TRsGPzf/8Hw\n4WA2I4SgekU1G5/fiGO2g9VnrKb7A90ZOfJ8Vq5UePddmDFDriZuvFHqmROaphDw1VcyP6JfP3j2\nWRm69ieFEII7f76TRfsXMefKOcSEnN6BKicTJzXP4XSAoihix47byMmp5NZbZ6EoEAionH9+Jtde\n2wohvEH/gRch1KAEgiJ9EJrmRdN8wWOlAEHFYQlK/b6lznF9aHuwc7t2OwS9PrSuPSBh6PVh6HSh\np0QZaZoXr7cArzcfrze3TjyeXLze/Xg8+9A0PxZLBhZLC6zWllgsLbFaW9X19fqT59ArLJRKopZD\nb9UqGVZ75pnyIThwoHR8N7S62OJ0sriqil+DEhCCQZGRDImKYnBkJJ1CQ0+/PIyKClki9NNPpS3u\n4oulo+HMM0FR8BZ7WfivhbinuSlNK6XTfZ048//OxF6l8NFH8M47Mjfv5pth4sSj0kX9PlwueP55\neO01qXUeeOBPWz9CCMED8x7gp50/MefKOSSEJTT3kJoFfwnlcDLGqmkBNM1TT2F4gm2ts9pT57w+\n2JFd37l9wHF9wLntQtOc9RzZDlTVyfr1enr3jgxGRMmoqFrHdkNO7lpHt3RmRzWZQzsQqMLj2Y/H\nsxe3e09d63bvwuvdj9EYh9XaBqu1NSEhbQkJaY/V2g6LJfMgBddUeRtbtkhFsXixlJoaqSQGDZL+\n3jPOONxcLoRgv8fDoqoqFtrtLLTbsQcCDI6KYqjNxrk2W6NWFqdkuZ+bK5XEBx/I7auukk/8lBR8\nbh/fPvct7v+4iVQiaXN/GzpO7ohi0rFwIbz5pkzsvvxyuOWWgwsVNXo+eXlSMcyfLwn9rrzymCvy\nNQeO99oIIXh84eN8ueVL5k2cR3L4ycpKPHb8acxKJwpFUYYDrwA6YKoQ4tlD/j4E+A5ZTAjgf0KI\npxo4z0lRDqcSQgjmz/+ZQYN6HBIZVevcrj6Cc7sq6NCWzm1VralzaBsM0UGndnTQmS2l1sktHdrS\nqa3XhxzjOFU8nhzc7l243Ttxubbjcm3H7d6Oz1eExdKS0NCOhIR0ZP16OPfcSwgJaYtO13T+gLw8\nqSRqOfTy8qSvNytLphl0795wAl++18uCykrmVlYyp7ISo6JwbnQ059lsDLXZsJ0uXERCSJLAadOk\nuadfP7jhBhg5Eofq4f033od3oX1Fe1rd3YoWt7TAEGEgLw/+8x8pnTvLBOnhwxt+rh/TfFaskP4I\nvV76I3r2PBmzPWGc6LX516//4uONH5M9KbvZVxB/KwfqSPp2AOcABcAqYIIQ4rd6xwwB7hZCXPg7\n5/rDK4emwgH/Q20UVAV+v3RsBwLl9RzbZXUObZ+vFEXRYzIlBJ3Z8cF+QtChnVjn4DaZko5oVlJV\nd1BZbMPp3ILLtRWncwtebw5Wa2tCQ7sQGtqFsLCuhIZ2xWxObRKfQGmpVBTZ2fLtubBQrijOOUcm\nlbVrd7gZSgjBNpeLOZWV/FxRweKqKjqFhnKezcbw6Gj6REQ0GQHhCcHlkk7sd9+VWvDaa+G668iP\nUHhp6ktEfxzNmXvOJOPmDNLvTMcUZ8Lnk+6MF1+UKQ133QVXXHFM1VUPh6ZJJfXQQ7LQ9tNPn5gn\n/DTF49mPM33rdLInZRMX+ueb35FwuiqHM4FHhRAjgtv3I0vWPVvvmCHAPb+Xcf23cjgxCCFQVQd+\nfwk+Xwk+XzF+f3HQsV1UT6SDW68PwWRKxmxOxmxOqefclg5uszkdo/FAFIhUGltxODbhdG7C6dyI\nw7EJIbyEhnYlLKwrYWE9CQ/vSUhIhxP2vRQWSovI3LlSFEXy5517rlQYDT3bPKrKkupqfq6oYFZF\nBYVeL8Oiozk/JoZh0dGnR57Fxo1yWfDZZ3KZdPPNrOtg44nPnqLXzF4M2DiA1KtTSbsnDXOKGSEk\n4eyLL0qG8ttvlyanIzCTHx12u6Tf+PRTePJJGVd7GpuaGgshBFMWTGHmjpnMnzj/L+OkPl2Vwzhg\nmBDi+uD2FUAfIcQ/6h0zBPgGyAPykYWEtjZwrj+Fcvgj8N0IIQgEKuqc2j5fPl5vXtChnRd0cOcA\nOrZsiaZ//45YLJn1pAVWawsMhmj8/pKgwthATc1aampW4/XmExbWjfDw3kRE9CEioi8WS8vjXmEI\nIZP15s6VHHoLF0KbNtLcMmyY9Ps2FN6f4/Ewq6KCn8rLWWC3k/Hbb0waNowLY2NpG3JsJriTBodD\nPqTffBO8XsSNN/J9v2genf0yV626ih5Le5A4IZH0B9KxpMvlwtatMt9t9myZ0tCzZzajR2c1/rs3\nbpQnUFV4+21pw2tmNNXvRgjB/XPvZ86eOcybOA+b1fb7H2pi/FHMSqdDHOcaIF0I4VIUZQQwA2jb\n0IFXXXUVmcHwu6ioKM4444y6/+Ts7GyA0367FqfLeBraVhSFJUs2BbeHN3j8ggULUFUHGRnFJCfH\ns2DBXHy+lXTrtgyPZx9Ll+4ABP37t8NqbcX69UYslhTOOed9TKZE5s6didu9nU6dvmH37n+yZk0N\nISEdGTp0FBER/Vm71oNebznG8UJRUTadO8Ott2bh98Nbb2WzciX84x9Z7N0L3bpl068f3HVXFnFx\nBz5/Q1YWNyQn88u8eXxdXMxOt5us9esxbNjAwMhI7h41ih5hYSxcuPDUXo/Vq6FdO7I2bIAlS1j4\nyCNETlnJ6ssuZeq4nVwa8yyTtk/i/DPOJ2FCAvuy9mGKN/Hxx1ns3g23357NCy+s57bbsrj3Xti8\nuRHf37Ur2U8+CbNmkTV8OFx2Gdnnnguhoc12f65fv77JzvfM0GfY8/we+k3px8qnVxJhjjitfn8n\nup2dnc20adMA6p6Xx4NTYVZ6TAgxPLh9mFmpgc/sBXoKISoO2f+nWDn8VSBXH5W43btxu3fj8ezG\n5dqJ270Dl2sHQvgJCWlHSEh7QkI6YDTGo2lO3O691NQsx+HYSGhoRyIjBxEVNYTIyEEYjQ0kQxwD\niopg1iyZyDx3rqT7uOACmZvWoUPDIbOra2r4tqyMr0tLCQjBuNhYxsXF0TciovlCZQsL4b334N13\n8WSk8u4gC29bCvn3/meJ+z6O+MviyXgwA3OK5P7KzYVnnoEvvpBRq3ff3XA+yVFRViZrWc+ZIyvR\njRnT9PNqBgghuPnHm9latpVZ/zeLEGMzrxRPIk5Xs5Ie2I50SBcCK4HLhBDb6h2TIIQoDvb7ANOF\nEJkNnEsIIcjLy2PixIkUFxej0+m4/vrrue2223jkkUf47rvv0Ol0JCQkMG3aNBITE0/a3P7GicHv\nLw86tn8LOre34XJtw+crxGptQ0hIOwyGSFTVg9e7H4djPRZLC6KiziImZgSRkUPQ6xvvffV6YdGi\nAxx6Fot83o0ZI81PukNM7EIINjmdfFNaylelpThUlfFxcVwaH0+v8PDmScDz+2WG3DPP4PLU8NQ5\nRrJTInhy11OYvzGTeE0i6fenY4qV0WP790sf8//+J/0R99xzHLkSCxfKaKoOHaSSSE1t+nmdYmhC\nY+K3E6lwVzBjwgxM+pOXfd+cOF7lgBDipAowHKkgdgL3B/fdAFwf7N8CbAbWAUuBvkc4jxBCiMLC\nQrFu3TohhBA1NTWibdu2Ytu2baKmpkbU4rXXXhM33nijOB2xYMGC5h5Ck6Kp5xMIOEV19WpRWPih\n2LXrXrFhwwixZEmKWLgwTKxY0UWsXt1HLFvWSixcGCo2bDhf5Oe/Kzye/OP6Lk0TYs0aIaZMEaJz\nZyGioxeIm28WYv58Ifz+ho7XxKaaGvHwnj2izfLlouWyZeKB3bvFpnr33imFpgkxY4bQunUTFZ1a\nietuSBLjXh0rll61VPwa/av4bOJnwm8/MJHdu4W48kohEhOFePNNIXy+Rn6fxyPEo48KERMjxKuv\nChEINOl0joaT9bvxq34x5osxYtyX44RfbeCinwSc6mdA8NnZ6Gf3Hz4JbsyYMdx2222cc845dfue\neeYZcnNzefPNN0/lEI8JfwSHdGNwqubj91fgdG7C4diAw7GBmprVuFzb0OksaJoXkykBm20oiYnX\nEBk54Lje6D/+OJu8vCy+/lqaZMaMkfVzhgw5PKdCCMF6h4MvSkr4vKSEKIOBy+LjuSw+nkzrKaaL\n1jT49lu0xx6lRDj4Rz87ib2upNP0znTe1Jm0e9NIuSUFfYicxLp10lKUkyPNTmPGNJKa47ff5CrC\n45Fmrq5dT8686uFk3mfegJdRn48iJSKFqRdOPeklR/8oDuk/tHLYt28fWVlZbN68mbCwMB5++GE+\n+ugjoqKiWLBgwWlf+OdvnBg0zY/L9Rs1NasoL/+B6url+HxFKIoei6UV0dHDiI+/lPDwno2uwbF3\nr8xN+/JLWdvikkukoujX73DTkyYES6qq+KykhK9LS+kQEsKVCQlcEhdH1KkMj9U0mD4d9dFHyDW5\nub1/FX373s/ImSNxrXSR8XAGSdcmoTPpEEJWRr33XoiMlKWoezdYveUo3zV1Kjz4oAx5nTIFTrVS\nbEI4fU6GfTKMXsm9eHnYy6cnX9dx4i+nHBwOB1lZWUyZMoXRo0cfdOyzzz6L2+3mscceO8Wj/BvN\nDU0LUF7+PUVFH1JVtYRAoAoQhIS0wWYbTlTUICIjB2AyHXuW7I4dMkft889lvtrll8uEs4boK3ya\nxqyKCj4uKmJuZSXnRUczKTGRYTYbhkO1yslCIACff47/0SnsDPNx11leLun+HD0/64lnt4eWz7Yk\nblwciqKgqpLJ45FHZH7Iv//dSHdCYaFMrli3TuZnnHXWSZvWyYbdY2fItCFc0vESHh78cHMPp8lw\n2vocmkoI+hyEEMLv94thw4aJV155pUEbW05OjujcufMxWONOPf72OZxauFy7xb59/xIrVrQXCxeG\niKVL08TChWFi2bJWYtu2a0Rh4TThcu0Rmqb97lw0TYh164S4+24hkpKE6NFDiJdfFqK4uOHjK3w+\n8W5+vui3Zo1IXrJE3Ldrl/jN6Wz6SR4BC+bMEeI//xHepHixqHeCGPZQhpgxdYZY2XWlWDtoraha\nVVV3bHW1EA89JER0tPTBOByN/LLvvxciNVWIyZOFqKxs2omIU3efFVQXiFavthJvrXzrpH3HH8Xn\n8MdKf5w5k2uHDSM8JITVy5dze48esGoVj9x0E+1btaJ7ly4MHzqUj95/nw7t2snsqL/xl4bV2pKM\njAfo02cbvXtvISXlVkJC2hEI2PF49lBU9Alr1/Zn+fJ09u17moKC93C5dta+kBwERZEEgC+8cCBM\ndO1aWXN7zBgZQOTzHTjeZjRyfXIyS3v0YG63bmhA1vr1DFy7lmmFhThV9eRO3mCAyZMx7drLoIvv\n4vu37KhTr+HpK27BOcrJ5gs3s23SNrz5XsLDZenpdetkXY4OHeRq6Zh/QqNGScZZvV6SPs2YcVKn\ndrKQFJ7EL1f+wtO/Ps0Xm79o7uE0K/5YZqULLuDp7dt5eOdOFMCsKMQaDLyWkMA/CgspU1VUQADt\ngU0gDcQWywGxWg+09SUk5GAJDT28DQ2V9Ma1ba2Ehv6paAb+CnC5dlBc/BklJZ8CCjbbMEymJNzu\nbdjt2QihEhWVRVTUWURFnYXV2uqIdujqasnIPW2azNS+8kpZrKd9+8OP9WsaP1VU8H5hIUuqqrg0\nPp7JSUn0OCEe7mNEZSXav/+F7z9v88aZeraOHc4t2/6J+2M36femk3pHKjqzvI8XLZJ8fNHRkt27\nUbWuFy6Ufohu3SSZX8Ifjyp7U/Emhn48lA/HfMjw1sObezgnhL+Mz6GoqIi1a9dy//33s3TpUnr2\n7Ml3331H++Av8ZlnnuGLL77g4osv5uGHHpIUAF6vrDDj8Uhxuw8Xl+tgcToPb51OSWtQ2zocklva\n7ZYKIjxcktpEREgvX/1+rURFSbHZpERFyV9geHjT1Ib8G42CEIKamlUUF39CSckXWK1tSUycSHh4\nXxyONdjtC6isXICiKERFnYXNNhSbbShmc8PUzzt3Sj/ttGlyRXHdddKZ3ZCvNs/j4YOiIqYWFhJr\nNHJTSgoT4uMJbYhutimxdy+B+/6JK/sX7j9LJfa8OxnzzUX4d/lp/VprYobLQI5AQAYjPfqo9LE8\n8UQjSj+43ZKn6YMP4OWX4bLL/nD399LcpYz+YjTfTfiO/mn9m3s4x42/jHIA2L9/P6NGjWLjxo11\noawLFiyoi1SqqKhg4cKFtGrV6tQMTlWlwqiulsqiulpKVdWB1m6Hqiqyf/uNLItFbtvtUFkpxe0+\noCiioyE2Vlacr9/GxR1o4+KkcmnmH9yfKTR3/vw5dO3qpqjoQyor5xITcz5JSdcTGTkYj2c3dvt8\nKivnUlk5H5MpEZvtXKKjzyMqKuswOnS/XxaCe+89Wcxo0iRJV9S69eHfqwrBLxUVvFNQwOKqKi5P\nSOCm5GQ6hoae0Hx+99osW4bv9tsorNjP7UP9jGn7Cu3fbk9ox1Bav9Iaawup0crKJOvrr79KqqXh\njXmRXrUKrrkGWrSQH05JOTlzOUmYvWs2k2ZMYu6Vc+mS0Jjl05HxdyhrE6Mh5fD9998fFMoKcOON\nNzJz5kzy8/Obc7hHxBFvDL9fKomKCigvl1JWdqCtldJS2ZaUyBVNfLxctickQGKiLKlW2yYnHxDT\nycn+/DMph/pz8fvLKS7+lIKC/yCEn6SkySQmTsJkikMIlZqatVRWzqGi4mccjrVERPQnOnoE0dHD\nCQlpd5AJas8eycb93//Kcgk33wznn99wPYocj4f3Cwt5v7CQ9iEh3JqSwoUxMccV6XRM10YImD4d\n3713sTrWz9PDIvmH5w1CPwol9c5U0u9NrzM1/fKLpOHo1w9eeaURrN4+nywo9Oabku31+usbbYZt\nzvvs802fc++ce1l09SJa2lqe8Pn+Vg5NjEOVw8iRI7FarYeFsk6cOJE5c+ZQWFjYXEM9dfB6pZIo\nLpZSVCRDC+tLfr7cHxUl39pSUyEt7YCkp0NGhvxbQ9Slf3EIIaiuXkZBwX8oK5tBbOwoUlJuJTy8\nT50CCASqqKycR0XFbCoqZqEoJmJizic6emRwVSFpPjwe6eR9/XW5aLzzTrmiaGiB4NM0/ldaypsF\nBezzeLgxOZnrkpJIOElKHo8H8dpr+J95mq+66Pn5nAFcv/Q+jHuMtHmjDdHnSVImpxMeeww++kjm\nRlx+eSMWr5s2yeQ5RZFhr506nZy5nAS8teotXlr2EouvWUxi2B+LlucvoxyuvfZaZsyYQWVlJS+/\n/DK33347P/zwA//6179wOBxs27aNoUOHMmvWrOYe8ukDTZNKJD9fFpPJzT0gOTmSfKe4WK42MjOl\nCaBlS9m2aCFtIQkJzW7Cam74/RUUFX1Afv6bGI0xpKTcSlzcpQdxPAkhcDo3UV7+I+XlP+J0biQq\nakhwVTECq7UFQsgqdy+/LE01kyfDrbfKBV5DWF9Twxv5+XxTVsbI6GhuSUmhX0TEyUnUKitDe+xR\nvJ9+xLMDQel8O0O/GEFU3yhav9wac5JMJly9WlqL0tNljetjzo3QNLmMeuQRqSgeeugPkzz35MIn\n+Xrb1yy+ejHh5lMQQNBE+Esoh5SUFMrKyvB6vQAYDAYSEhIoKipCr9ejKAqBQIC0tDTOOOMMjEYj\nJpMJk8mExWLBYrEQEhJykISGhmKxWLBarXXH1N+u35pMphP+QZ62ZhifTyqOfftkenCt7NkDu3dL\nE1br1tCqlSyW0LYttG1LdlkZWaNH/ykUx7FeGyFUKipmk5f3Og7HelJSbiI5+SZMpvjDjvX7y6mo\nmENFxSwqKmZjMNiIiZGKIjJyMHv3Wnj1VVm64aKLJCnekWpDV/r9fFBUxFv5+YQbDNyaksLl8fFY\nj+DAPqF77bff8N99B9XrVvDPIXqy9C/R4ueWtHiiBck3JKPoFHw+mTT3xhuS2G/y5EbcBrXJc2vW\nyHCo888/6uGnw+9GCMG131+LKlQ+HPPhcZ/nb7NSE0NRFDF69GiWLl1KaWkpAGFhYQghcLvdJCYm\nUlRUVPsfQUpKCpqmEQgEUFW1rq3ta5qGqqoy2UOnQ1EUdEE7aK0CqJ8QomkaQgj0ej0GgwGDwXCQ\n8jGbzXVKJDQ0lNDQUMLDwwkPDycyMpKoqChsNhtFRUUMGTKE2NhYYmJi6o7Rn+wIlRNFVZVUErt2\nyZCcHTtg+3ayN28my2iUcZsdO8onW4cOsp+R8YcK8T2eH63TuY28vFcoLZ1OXNzFpKbeSWhoBPgB\nagAAIABJREFUxwaPFULD4VhHefksKit/xuHYQGTkAKKjh6PTnc/Uqa154w3o21dyHw0c2PB3akEH\n9uv5+ayuqeH65GRuTk4myXwwRUiTPITmzcN7+63sEmX8q28LJq1/ihhzDO3ebUdYV+nn27wZrr5a\nxkdMnSqtlceMX36RMbPt2sGrr8qVagM4HZQDSJqNXu/14qFBD3FF1yuO6xx/K4cmRq1ZafHixVRX\nVzNu3DjcbjcAXbp0Qa/Xs3r1al577TUee+wxqqurj+m8mqbh8/nw+Xx4vd66tiFxuVxUV1dTU1ND\ndXU1DofjIHE6nbhcLpxOJ263G7fbjdfrxePx4PP58Pv9+P3+OiV1yPwOUji1q5ywsDDCwsLqFEx0\ndDSxsbHEx8eTkpJCWloaaWlpREdHN4+CEUI6yH/7TZYi27ZNytat0rDeoYO0LXfuLAnaunWTTvQ/\nGXy+UgoK3iE//00iIvqSnn4/kZH9jvoZv9+O3T6P8vJZVFT8hMEQSWTkhSxfPoqnnupHYqKeBx6A\nESOO/Ea+3eXitbw8Pi8p4fyYGO5MTW36nAlVhXffxffIQ3zVxcimpAmM+GUcaTekkTElA71FTyAA\nzz4rHdUvvAATJzZiFeH1SgfGiy/CP/4h+ZpOY//XhqINDP14KEuvWUqbmDbNPZzfxV9GOVx77bV8\n++232O12NE0DYODAgaxfvx5N04iOjsbpdFJZWdnMIz46hBD4fD4cDgc1NTWUlJTUSWlpKWVlZVRU\nVFBZWUlVVVWdMnI6nXg8HjweD16vF7/fX5fNq9PpMBgMdYolPDwcm81GbGwsCQkJJCcnk56eTmZm\nJvHx8cTExBAbG4v1ZNl87XapJLZskc7IjRthwwaZhNitmyw/2aOHlJYt/xSmKVV1U1Q0jdzc5zGb\n00hPv5/o6OG/a44UQqOmZg3l5TMpK/sen68Au30s//3vePbuHcx99xm4+OKGI5xAmpzeLyzk9fx8\nWlos3JWWxgUxMU1bmKisDO3BB/H870se72+jY9njtClrQ8epHYkaJOuJr18vneyZmdK10KiSKjk5\ncO21YDRKxsNTkRh4nHhj5Rt8sP4Dll6zFLOhcaSOpxp/GeWwePFidu3axTXXXFOnHCIiIhg9ejQ7\nd+6ksrKS/Px8HA5HM4+4YZyMJWUgEKC8vJycnBz27dtHbm4u+fn5FBUVUVxcTHl5OZWVlVRXV+N0\nOvH7/XWrDFVV0ev1hIaGEhUVRVxcHElJSWRkZJCZmUlCQgKJiYl1EhMTc9CDrtHzEUI6wjdskE+S\ntWul3bm6WiqL3r2l9OkjvZ2nUGE05bXRtAClpV+Rk/MMiqIjPf0h4uLGohwjHbTbvYfS0q8oKfmK\n6upcVq4cx+LFlzFhwgAmTNAd8cXar2l8XVrKS3l5FK5YwYOjR3NVYiIhTbmqXL2awI03kOMt4V8t\nOjJ++d2kXZRGm+fbYIgw4PXKhLmpU2Vqw0UXNeLcfr+sSLRypSzdF/R0ny5mpVoIIbjoy4toaWvJ\nS8NeatRn/zYrNTHqh7J+/fXXjB8/vk45hIWFMXjwYH744Qd69uzJ1q1b65zWpxtOh5vc7/dTWlpK\nYWEhhYWF7Nmzh927d7N//37y8/PrFIrH48FisWA0GutWOn6/H5vNRlJSEpmZmeh0Onr37k1ycjKp\nqamkpKSQmppKRERE4wZVWioVxapVUlaulOaMvn1libZ+/aTSOIlvkyfj2gghKC//kf37n0JVq0hP\nf5D4+MvQ6Y7dbOJ276akZDp79nyC3e5i6dL/o3v3Kxg/vv0RlYQQgtd//JH5GRksra7mxuRkbklJ\nabpQWFWFd94h8OgUZvRN47fyi+i/fzBd3ulC3GiZALF0qaQSGTJEuhOO+dIJAc8/L2N+v/8eunc/\nLX43h6LcVU73d7vz7gXvMqLNiGP+3N/KoYmhKIpYubIz996bw+LF1agqGAwKQ4fGsXlzDQUFnjrz\nil6v47LLOjFlShaKYkSnM6EoZnQ6EzqdGUWR7cF9KzqdBZ3Oil5vrbdtDrYWFOXEo5X+SPB4POTn\n55Obm0tOTg45OTns3buXnTt3sn//fgoLCzGbzYSHh2MOOkM9Hg92ux29Xk96ejppaWmkpqaSlpZG\nZmZmnaSmpmI4ml1ZCBl6u2IFLFsmZf16GTE1cCAMGiTlODNuTzWEEFRWzmP//qfwenNJT3+AxMSJ\n6HTH/rAWQuBwbGD16k+oqfmMiooUbLarGT78Msxm2xE/t8Pl4uW8PL4oKeHiuDjuSk2lwwlmX9eh\noADuvBPXsl95qmdnei+7ieQzk+n+bndMCSZqamQ+x4IFMjdiwIBGnPurr2TG4EcfScfLaYi5e+Yy\neeZktt2yDYuh8WVrTwX+EsqhpmYjRUVF/PLLr9x117Ps2vU9Z511HZGRIZjNRoxGhaysTjzzzDes\nWvU0NpsZTfMjhA9N8yGEF03zoWneYL9221NP3PXEe9DfhFCDyiMEnS7koFavDw32Q4P9UPT6sDox\nGMLrbYej10cE98n9x2puOJ2gaRrFxcXs3buXvXv3snv37jrZuXMndrudhIQEbDZbnV/D7XZTXl5O\naWkpSUlJtGzZ8iBp3bo1rVu3Jioq6vAv9Pnk6mLJEpkgsHixfB0dMgSysqRkZp7K/4Ljgt3+K/v3\nP4nLtZ309PtJSrqm0cWIhFCZP38umzd/QJs2szEaR9Cjx9VERw894r1U6vPxVkEBb+Xn0zcignvS\n0hgUGdk0LzyzZiFuuonN7ZL42NmLoVtG0fa5tmRcl4GiKMyYIbOrb75ZpjYcs5Vr6VJpl3rlFcnP\ndBrioi8vondybx4c9GBzD6VB/CWUg2jbljEFBfzkcOAHjIpCvMGAXVWx6vVUBQL4hcCi0+G+7DLp\n2DIYJHXEoWI2H9w3m6Wj9NC2Xl8zG9DMoJkEquJDEx5U1YmmuVBVV7DvRFUdqKozKI6g1KCqDlas\nyOGMMwzB7RoCgWo0zR1UIJFBpRGJwRBVTyIxGGwYDDaMRluwH43RGI3RGINOF9JsK5qjLZGdTudB\nymL79u389ttvbNu2DYAWLVqQmJhIWFgYiqLgdDrJy8tj165dmM1m2rRpQ5s2bWjXrl2dtGnTBosl\n+IamaTJKatEiyM6WYrFIJXHOOVKOlFnWyLmcDFRVLWf//idxODYElcR1ByXUHQuEgJkzK5g9+3MG\nDJhKSkolLVpMJjHxGpYt+63B+bhVlQ+LingxLw+bwcA9aWmMjY098WJENTXw8MOoX37Bf88djCn7\nAlq1bEXPaT2xtrBSUCAzqg0G+OSTRjirN20i++yzyXriCUlQdZphd8Vu+rzfh003bSI5/Pfvt7/N\nSk0MRVFE+dQNFFcWs3HfVh7/3+tMvfZxBj89iRRbHJqmMbJzHz5e/gug8PPND9MzJQNF9QfFhxLw\noQQ8KH4PqH6UgBfF50HxeWU4ndcrOQ5qWVzr76vP6OrxyAfTobTftdTftTTf9am+gzTf2QUFZHXv\nfhDltwi1EgjXo4YIAhaVgNGHqlYTCNgJBOz4/ZUEApXBbdn3+ysIBCrw+8sRQgsqiliMxrhgK/sm\nUzxGYzwmU1ywTcRgiGoyZXI8N7oQgtLSUrZt28aWLVvqZPPmzSiKQpcuXWjdujUxMTGYTCacTie7\ndu1i+/bt7N27l7S0NDp27EjHjh3p1KkTnTp1okOHDljMZsmZPX8+zJsnbRlJSTB0KJx3nlQaRzGn\nNJddu7p6Ffv3P0FNzbrjVhKqCl98AVOnrubCC9/ljDO+Zu/erowePQWb7ZwGr7cqBN+XlfFibi75\nPh93pqZyTWIiYScaRrpsGVx3Hbmxobxo6MKotZfT7rF2pNyagioUnnxSEhJ+9JG8NMeC7M8+I2vK\nFBnN9MADp11k2/1z76fIUcS0MdN+99i/lUMTQ1EUsX7oeu7fdD+rK1dj99lBgXhjPJclXMYbeW9g\n1VnxCz+XR17OjzU/8nXy1whVgApCFYiAkK1f1G2jAXrQGXUoRgXFpBzoG5QDrUFBZ6p/jIKiF+j0\nGopOQ6dTURQVnaKiU/wo+NEJHzrhQxFedKoXnepGF3CjC7jQ+Z3ofA4UTw06TzV6tx2dy47OWYHO\n70IXbkQXaUUXGSZpv6OiDlB+17ZB2m/VFoI/SkcgXOAL9eM3OPH7y4JSgs9Xgt9fis9XjM9XjKa5\nMZkSgpKEyZSM2VzbJmMypWA2p2I0xpzSFUktJfvGjRvrZP369ezevZt27drRvXt3unXrRmLwlXP7\n9u11SmXPnj1kZmbSpUsXunbtSvfu3enRrRtJRUUwZ45Mtlq9Wjq4hw+X0qnTafWQqa5ezf79j1NT\ns46MjAdISrqu0eYmnw/efx9efLGaSZM+5eyz38Zo9JKcfBOJiZMwGhv2TSyrquLF3Fyy7XauS0ri\n1pQUUi0nYEP3euGppwi88zZPDMqg045/0iqsFR2mdSC0fSjz5kln9eTJkknjmMxMBQUwbJiU558/\nra5djbeGdm+0Y8aEGfRJ6dPcwzkIfwnlUDvWQCDABRdcQN++ffnwww/ZvHkz48eP57777uOxxx7j\nxRdfZPz48axYsYKYmJijnleIoNLwS9H8GsIX3A4cEM2vyX0+geaTx9QeW7ftPbCteTWEV+6rFeEV\naJ4D25rnEHHXtiqqW24rOtCZQWfU0Bs1dIYAep0fneJDjwed5kavOtD7Heh9Vei9lehUF/pQHfpw\nA4YoE/poK/rYEPRx4ehTolDSwtBSDfhiNXwRAXxmB75AMV5vAT5fAV5vPl5vHqrqwmxOwWxOw2JJ\nr9emY7FkYrFkHEZVfTLgdrvZtGkTa9euZc2aNaxcuZJdu3bRpUsX+vbty5lnnkmPHj1wu91s2bKF\nDRs2sG7dOtatW4fBYKBHjx707NmTXp060cvtJnnlSpTZs+Xq7/zzYeRIOPtsudI7DVBdvYp9+x7D\n6dxERsbDJCZejU5nbNQ5nE7JSvHSS4Lrr1/CRRe9jcfzE7GxY0lJuYXw8B4Nfm63282reXl8UlzM\nuTYbt6emnhiP0+rVqBOvZGWYg+9SL2TkoktpOaUlKbelUFyiMGGCtNp++qlko/9dVFRIxd6nj4xm\nOo0UxAfrPuC9te+x5Jolp1Xgyl9KOUyYMIHVq1eTn59PXFwcX331FRs2bCA/P59Fixbxj3/8gzvu\nuIP9+/c386gPR2OWlEJIJaW6VKk43FpdX3WqaC7Zqk4VzRnsO1TUKh9quQu10o1q96JW+eV+pyDg\nBtVrQA0Y0On8GHBi0BzojV4MZhVDqMAQoUdvM6JL1EELLyLDjUh1o8ZX4bcU4Vdy8Wm5eH05bNhg\npn//tlgsLbBYWmC1tsRiaYnV2hKzOb1RIZuNgcPhYPXq1axYsYLly5ezbNkyFEWhX79+9OvXj4ED\nB9K9e3dKSkpYt24da9asYdWqVaxevRqDwUCvXr3ok5lJb7eb3tu2EbNpE9mdOpF1zTVwwQXSHNXM\nqKpaxr59j+B27yYj4xESEq5o1P9ndnY2Xbtm8dxz0oxz/fUlTJw4Fbv9HczmZJKTbyE+/pIGVydV\ngQDTiop4PS8Pm9HIrSkpXBIXd3z5Eh4P4tFHcb7/Fg+fncG4fW8THhZOuw/aYUy18uCDkq32q69k\ntPKR5lL3u6mqkgrijDMkDfhpQtGiCY0+7/Xhrn53cXmXy4943N9mpSaGoihi2bJMNm70c8MN+ZhM\nCpGRBmw2I5Mnp5OVlcxjj21n/vwikpJCePTRM+nTJ/U4Qllr5dBtS12I64lEFp0u8dpCE6gOlUB1\nALXCR2B/GYH9Zah5dgKFVQSKnQRK3QQqfQSqNQIOhYDPhF8XSUCJIKCGgFFlk+1XenWKRdeiHDLL\nILkENaoANWw/qrEco0jFYmxFSGhrQm3tCY3sQEhIO8zm1CZ9uxJCsG/fPpYvX87SpUtZsmQJO3bs\noHv37gwcOJDBgwczYMAAwsPDycnJYdWqVXWyZs0aYqOjyQgJYZTZTN9du+jerh3Wiy6CceMk708z\nwm5fxN69U/D7y2jZ8l/ExFx4TP939e+1wkJZSmH6dLjttgBXX/0jlZVv4HBsJDn5epKTb8FsPtxD\nrArBT+XlvFtQwLLqai5PSOD6pCS6HHNJuHpYuhTn+IuYnuog5czphH4aRcvnW5I4KZFvv1W48cYD\n5R4Ond5hv5vqarni69BBpmKfJgpiSc4SJnwzge23bifE2PBK9G/l0MRQFEW4XHuoqqqkTZtBTJo0\nln//+45giKovGHbqZcyYB3jkkcvp2jUtuP/YQlnltgdVddcLafU2EOLqQVGMDYSzHhzK2lA468Fh\nrQfEYIhAr484aW/ZTYZAQFJ75+UhcnJRd+UT2FWMf28F/rxq/EVOAm4T/og0/KHJ+EIj8cZ58cVU\n4LcVo8bkQnoupOWC1YW+KhOTuw1mrS0hpg6ERnYiLL4t5mQrpgQTOuOJ/eBrampYvnw5ixcvZtGi\nRaxatYqOHTsyZMgQsrKyGDx4MOHh4Wiaxvbt21m5cqWUFSvYumUL7cLC6O9y0c9mo//YsWReey1K\n167NYsoQQlBRMYs9e+5Hrw+nZctniYo6AjPfUbBnj6zH8PPPktzv6qu3U1b2KiUlnxMbO5bU1DsJ\nC+vc4GdzPB7+GyxElGo2Mzk5mUvj4hrnwK6qouyq8VQsnc/6Ox4h85NhhPcIp81bbdhTaGDcOFkQ\n6e23j4HJ2+GQZsGWLaWj5TQhr7x4+sX0Tu7NfQPva+6hAH8R5WC1ClR1Gj7f1SiKDSE8KEoIMTFT\nMRgEZWW3oapl6PVRhIaeQadOszAYZOic0SjFZDq8PZLURrvWRrpKEZjNHkwmNyaTG7PZhcnkwmBw\nYTA460QIRzCs1VkXxlo/pFWGsdYEt6sJBGrQ6cx1iuJACGv9tjaMNQqjMbounNVgiMZgiDw9ciWc\nTsmRcyjt9549iF27CZij8ad1xZ2eiSPDijPBizu6HF94Lv6IXWjWcpT8TMTOTHTFrTE5OmAVXbBE\nJ2BONWNOMWNOM2NJs2BONaMPPfYHgsfjYeXKlWRnZ7NgwQJWrVpFt27dGDp0KOeccw5nnnkmpmAG\nscfjYe3atSxbupRlP/7IklWrwONhgNXKoAEDGHjNNXQbO/boiXwnAUKoFBd/yt69UwgL606rVs8T\nEtJ48rctW2DKFJmIPmUKXHllGSUl71BQ8CZhYWeQlnYfUVFDGlyhBDSN2RUVTC0qIttu5+K4OK5L\nSqJPePgxrwZL3n0Zwz33smB8Fh3F61Qvqabjlx3RtQnn+uslJdc338jn/lHhdMKoUZLN9b33TosV\nxPay7Qz8YCDbb91OtDW6uYfz11AODodg1KjzWLBgDkajiVat2hIVFUN5eRmqGqCwMBe/P4DBYODt\ntxfSpk0v/H75wuv3S/H5Du97vQe2a6U2irV+vyGpjXytH+16tEhXvz+bpKSsgyJdZSsID3cREVFF\neHgVoaFVhITYsVqrMJvtmM12jEY7en0limInEKgIhrSW4/dXoKoOjEZbXRirwRCD0RgbDGWNC4a1\nxmE0yiglozGuSVYqjVoiCyGLDu3ceUC2b5eyezfExxPo1gZn71icbUxU21w4THm42IouEIbB3gF9\nflvE9raoa1rh2xSJ3qrHnG7Gkm6pay0ZFiyZFiwtLBjjjEd8YLlcLpYsWcK8efOYN28eW7ZsISsr\ni6FDhzJ06FA6d+5cR+MuhGDf3r0s+egjFn/7Lb9u3UquptGvRQuGjB7NkLFj6d27d51yOdlQVQ/5\n+a+Rk/MciYlXkZHxMEbjwYmDx3JtVq6USWn79kk+pEsu8VBS8gm5uc9hNMaQnn4/MTGjjvjiUej1\n8lFxMe8XFmJWFK5NSuKKhATijuH/oXrrOgpHDqYiMZLUSfPIeaSMzCcySbohmTffVHjqKfjgA2k9\nOupcHA4ZwdSrl0yWOw2cwdfPvJ4oSxTPnfvcYX/726zUxKh1SPft25eVK1eiKAodOnSgsLCQiIgI\nCgoK0Ol0qKpKVFQU7du359dff2228QYCB9Ii3G5ZK8fthsWLs2nfPguXS+5zOg+0teJwHGgdDplb\nVCvV1fL40FAZ0RoRISUyMkBCQgWxsWXExJRjs5USEVFGWFgpISGlmM2l6PUl6HTFaFoxgUAFBkMU\nJlNiMJxVyoGQ1hTM5hRMpqSjUjw02Y2uqrIiXS3ddy2b67ZtiIhwPANa4ehtw9FaUBNbQY1uFyAI\nNffA6jsDY0VX9Ps74d8bine/F/deN569HjSvJhVFrWTU67ewYIw5oDy+//57AoEAc+fOZc6cOdTU\n1DB8+HBGjhzJueeei81WLwxU0yj/+WcWv/EGCxcsYKGisENV6dunD8NGjWLEiBF06tTppEeteL1F\n7Ns3hbKymWRmPhYMf5VKvzHXZt48yZTt9cpyz8OHq5SVfUtOzr/RNA/p6f8kPv7yI0ZNCSH4taqK\n/xYWMqOsjHNsNq5JSmKYzXbU5Dqvo4pF43rRfn0epjdms/8xK3Fj48h8IpOlSxUuvRSuuw4GD87m\n7LOPMhe7XUacjRwJTz11THM+mcivzqfrO13ZcOMGUiMOLpP3t3JoYtQqh++++4677rqLvLw8ysvL\niYmJIT09nby8PB588EHee+89Bg4cyKZNm9i0aVNzD/ukQFUPVhbV1TKAo77Y7bKtrJRit8sowMpK\n2VqtKunppWRkFJGSUkRiYiFxcYVERRUSEZGP1VqA0ZiPohSj10djtaZhsaRhNqfVhbRaLBmYzemY\nTAknz6SladJMtWWLrCoTpP4WO3fg7ZFOzeBEaroYqEmsotqwC6MphvDwPkRG9iMioh8WtTO+HA3P\nPg+e/Z4D7V4pIiCwtLBgaWkhtEMooZ2lhLQPYV/+PmbNmsWsWbNYtGgR3bp144ILLuDCCy+kffv2\nBx78gQDMm4d92jQWzZzJ7JgYZrndBEwmho8YwYgRIxg6dGjjyQgbgZqa9ezadQeBQCWtW7+KzZbV\n6HMIATNmyJVEfDw89xz07i2orJxLTs6zuN07SE29i6Sk6zAYjuyQrg4E+LKkhKmFheR4vUxKTOSa\nxETaHCFUWBManz9wISPenI3vgWfI/WII0SOjafnvlhQXK1x8McTFyaS5o5L3lZZKKpVJk+C+5rf3\nPzD3AUpdpbx/4fvNOo6/hHIYOXIkmzZtoqCgAFVVCQsLw+FwYLPZ6uo3KIpSxyR66aWXotPp0Ol0\n6PX6Btvafm11t9q+Xq8/TOpXgKvt198+WmsymTAajYdJcxToEUKuSMrLpaIoL5f1emrbsjL5Oyst\nhbIyFb+/GIMhl4yMXDIzc0lJySUhIQebbT9hYfsxGKqBdMzmTCIiWhAenlkvpLUVRuNJsLt6vXKV\nUY/6W2xYh7tdGNXnJFHVVU91fBluQwlhYWcQGTmAyMhBREb2P2g8frtfKoo9HpxbnTg3S/Hs8WBt\nbSW8bzgRfSOwdLewsnglM3+aycyZMzGZTIwaNYrRo0czcODAA74HpxNmzEBMm8aOlSuZ1bEjszSN\npVu30rNnT0aMGMHIkSPp3Llzk68qZOb51+zefQ8REX1p1ep5LJaMRp8nEIBp06Tjun9/uZJo3Vom\n6eXmPovdnk1y8i2kpt6G0Xj0PKItTicfFBbycXEx7UJCeCIzkyxbw4l4H3/xIH3veJ6IAePI230X\nUUOjafV8K/x+hdtuk1Ra330nx3JE5OfD4MFw112S+rsZYffYaft6WxZetZAOcUeo/XoK8JdQDjNn\nzmTp0qV89NFH5Ofn1z1cBwwYwLx58wAwGo2EhYVRVVXF22+/jaqqaJpWVxb0SG1t+dCGyooe+vdA\nIFBX0c3v99dt1/YP3a7t+/1+XC4XQB39taIodYqjtuRo/dKjh/brtxaLpa49klit1jqpX0PbarUe\n1Nf9jiNPVaUiKS6WUlQkwyNXrcrGbO6Dy5WDpu3FYNhLQsI+MjP3kJKym+jo3SiKHlVthdHYmoiI\nNsTHtyEqqg0hIe2aVnFomnR+r1kjs6FXryawbQ3VvUKpzoqlqp2P6vA8zNZMomyDiYwcQlTUEMxm\nmdNQf7mveTWcm51Ur6imenk11Suq8RX6CO8TTmT/SMqSyvgl9xe+mf0NeXl5XHjhhYwdO5Zzzjmn\njqGWvDyZ3fXBBzhVleyBA5kF/LRwYV0i5/nnn8/ZZ5/dpAWXVNVFTs5z/PDDS1xwwZ2kpd171Df9\nI8Hlkib8l16CCRNkJnN8PLhcO8jJeY6ysm9JSrqW1NS7GgyDrY/aOhMP7NlD34gInm/VivQGMrC/\nX/0pXHMN/f0d2Gd4i8hz4mj9cmsWLlzI9u1ZPPIIfPihTHM4Ivbulcy9r78OY8c2et5NieeXPM/S\nvKV8e+m3dfv+Nis1MWrNSkVFRXz44Yfcf//9pKSkIITgq6++YujQoXg8Hmw2GwaDgZKSEk7HuR16\nY9Qqj9pSpX6/v65caUPlSw8tY1pbFa62Mlxt3+1217WHisvlwu1215Uzra3bEBISUlf/ur6EhYUR\nHh5eV7K0drs2Z6Bfv35EREQQGRlJeHgEEEFFhZW8PIXcXEFRURlVVbvx+3dhMOwkNHQnaWk7SUnZ\ngRAm3O52QDtCQtoTG9uBFi06EhOT2TSmKk2T9a5XrIDly9FWLcfh30bVuQnYexmpii/GaIknKuZs\ntmxJ5IILbsVkSmjwVP5yP1XLqqheUk3Vkipq1tYQ0iYE5QyFDWIDX/72JSu3r+SCCy5g/PjxnHfe\neVJRCCH5hv77X/jmG8TAgfw2ciQ/1tTww08/sXbtWs4++2zGjh3LqFGjDvZtnAB++eULUlO/x25f\nSIsWT5KYOAlFafxKtaxMmvE/+QTuuENScIeGgseTQ27u8xQXf0pCwv+RlvZPLJajF5B2qSrP5eTw\nen4+d6Smck9aGtZDVs9Lc5Yw5+bh3Lk0nJ3xnxJ+Vgr5l+Rz1tlnsXgxjB8Pd98tFweGZWtkAAAg\nAElEQVRHXHytWSM1yA8/SMqUZoLb76btG22ZfvF0+qXJsrF/K4cmhqIoYtZKN68+fSO//PAJmqpy\n+30vsG7VQi4aN4n77roCv89Lx45nkJ+/j5qaaoqKAuh0MrpNr2+4PQ0CG5odmqbVKQ2n03mQ1JYm\nrV8ru6am5iCpratdW860qqqKQCBAZGRkXe3r2jYqKoqIiEhMpmRUNQVFsWA0VmE0lmC15hARsYu4\nuG1ERJRTXt4Ol6sjitKJ8PCOJCV1onXrFiQl6U/sujmdsqDQkiWIJYtxFi7B3sdM5YAQqlLKMFlS\nsMUPx2Y7l6ioLAyGhg3dmk+jZk0NVYuqsC+0U7WkCn2CnsKkQuaUzeGn/J/IujCLCRMmcO6552I0\nGqU9b/p0GXaZlwfXXUfl2LH8sH4933zzDfPnz6dfv36MGzeOcePG/S79y7GgunoFu3bdhao6adXq\nBaKjj5Ht7hDs3i39Eb/+Co8/DldfLX9HXm8ReXkvUVg4lfj48aSn3/+75qx9bjd3797NeoeDd9q2\n5dzog1eQO8t38vijQ3jjswB7o97HMqA17T5oj86gIycHRo+WlWbfeUcS8TaIH36Q5E1LlhxDTOzJ\nw3tr3uOrrV/xy5W/NMv3/yWUg+6y69Bmz4DKstqdso3JBGcleGtAKCACENsZOl4LqgE0g2wDJtn3\nG0E1yW2hB80IGFGEEaWuNaGjfmtEr5jQ6QwYDAp6PegNAr0B9LW5FAYwGA/0jUYwGRTMJjAZFUxG\nsBgVLCawmBQsRoUQs4LVpGA2K0fMvai/r36/NvfCZDrALG42nx65QD6fj6qqqjqx2+11bW2/srIS\nu91OZWXlQVJeXkFoaDzt2mXSokU4qakaKSmVJCXlERFRSU5Oe4qK2lNT0xa9vj1xcd3o3LkFnTub\nSU4+DoWvadLhvXgx2q/ZOHLnU9nJS+XAEGqS7YRZumBLvpDo6GGEh/c84tu3UAWO9Q4qF1Rin2/H\n/qsdZ5STVdoqsp3ZdLq0ExMmTqBfv37SjLdhg1QSn38ujfs33IBj0CBmz5nD9OnT+fnnnxk0aBCX\nXXYZo0ePJux4spJrxyYEpaXfsGePfHC3aPEkkZH9j+tcK1fCPffIIIcXXpCEtwA+Xyl5eS9RUPAf\n4uLGkp7+IFZri6Oe66fycm7esYNBUVG81KrVQSGwJc4SrntzGC+/U4zD8wz6Xt3o+FUXdGYdTidc\ndZXUr99+exT677fekiRTS5dCdPPkHPhUH21fb8vn4z6vWz2cSvwllIP1KSue/3gQOfXGbAJEsHUe\n2B3zeCy6ED2gCzr+akUeXisI0ESQx0gINDSEpqEJFU3IVohAcDuAQEXBiE4xoFOCikMxoWBCwXyg\nFRYQJsACmgU0M0Iz499TjpLWEi1gQQtYUANWRMCKErCiD4Si84eg94cFJRSdNxSdLxTFF4IuoIeA\nAgEF4deh+RQ0n4LqUwgExedRMBiFVBQWCLGC1QIWi4LFcnDOhcUicyxqmcYPlaMwjxMWJj+3cOHJ\nWSLXFgWqqKigvLy8rl9RkY/TuZdAoAyrtZK4uDLS0grx+czs3t2VPXu6kZubQmVlEooSR2amia5d\nLfTvH0fv3umYzccQliuE9F0sXIi68BfsJXOo7Oqj4kwD/ggVm20o0cljiY4ehskUd8TzaX6NmpU1\nVMypoPD7QlxbXWw3bme9cT3p49MZe9tYOnXqJFcy06dLGoiCAhm7ee211ERE8N133/H555+zePFi\nRo4cycSJEzn33HOPKfmuIfOFpvkpLv6IffueIDS0E5mZTxAR0etYL0sdaiOb/vlP6SB+4QVJcgvg\n95eTm/syBQXvEBs7ivT0BwgJaXvEczlVlUf37uXj4mKeadmSqxIT65z1Tp+TKz+7mIHPr+G8/AdR\nu/aj86xe6EP1aJqk25g6VSqInj2P8AX33CNXir/8It+emgHvrn6Xb3/7ltlXzP7brNTUUBRF7Pp+\nFyX2Euatnscjrz/CnVffyTc/f0NZZRnjLhjH9z9/T2xMLPtz93PJJZcw+cbJqIp6mPjxoyoqASVA\n3T8lQEAECGgB/Jofn+rDr/rxa37+v70zj5OiOvf3c2rrfdaeYWeYAWRRQFwiKCKuPyXRSNSg0WDQ\nGzVxwdwbjYm5N3ivJjG57kaNMS5BzabXqFFciIJCAJHVhW3YZwZmmK27Z7q7uqvq/P6o7mGGRRlk\njfXM53zOqerqqrd7quutOud9vydr55adLKZlkrbSpK00pm2SyqYwbXedaZmYttlR59+T34/lWCgb\nFbSBGpqioQgFRSgIoSBw+7ikBAfpOibHxnIsbCeDlA6q6kNTfWiqH00LompBVC2EqgVRtBBoYVBD\n2ATJyCAZO0TaDmJbYQIUE7QLCcgSwlYRfsvAZ2n4shpaRkUzNdSMikirYKrItIKTVrBSCtmUINku\nuuRitLe7AUM+32yKiiZ0np6CSKRrnW9HIjvyMnbkZ+woeveERzuQUpJOb2H79gXU1n5AY+NHOM4a\ngsE6Ghr6sG7dMFauPJE1a45j06ZiDCNLRUWKo49WGTu2iNNP70dlZW/mzJmz+x+tlO64xTvvkJ7/\nMs3p92gep9MyLE1Qq6K0/zcpLb+QcPjYz4xAyrZkaZnVQvUfq4m/FafFbGFV0Sp6TerFV2/7Kv2q\n+rmRV48/7k7OcOaZ7tRpEybQ2NTEX/7yF/7whz+wceNGLrvsMqZMmcKxx+75mJ91EXIck61bf8+m\nTT+noOAkKivvIhQa2u3vPpNxu3buugsuvNBNpOuRG7LJZluorX2I2tqHKC4+m4qK2wmFjt7jvpYm\nEvzb6tVEdZ3fDRnSMWCdtbOc//OvMXH5Gs5+bQqpQaczcu44tELXQb74ojvL3IMP7mGyOMeBSy5x\nT7gnnzwkfckZO8OgBwfx10v+Sqo65TmH/YkQQv7oyh/x6pxXWb15NbZjUxAsIGgEaYw3Yjs2EknY\nCNOeaScajHLbmNsQtkC1VYQlUC0VxVJ2lGyuziioWRXVUlGliqqoaIqGpmroQkdXdTTVDVvtmONB\n7zTvQ345N9+DYigIQ3RpK4YCBliGRdbIkvFlyGpZMkaGtJ4mo2UwNdMtqompmKTVNGmRJpn7S9gJ\nEk6CRNYtbZk22jJtJLNJ2rPtHQ7KkQ66oqMq7mcBgQRs6WA5FpadQVd96HoInxbGMArQjUI0oxBF\nLwS9CEcvxFTDJJUQbSKEz1dMaaCUUn+YEk0jquuUKDqFWYNQxi0BU8dn6mhpHSWpkk4quyTy5fMy\nds7NiMdd57DzlBVFRW5vQEmJO31FNOpO7ta3r1s+K+7dttMkkytpa1tOW9sympsX096+jFSqgM2b\nj+Kjj0aydOkYVqw4k1QqTmFhLX37Jhg+XDBmTAFnn92P4cP77xrJZVmwaBHOrJnEVr9IU3k1jeM1\nnJBBaclEogMup6jojM+csEc6kvjiOEsfWUrja434G/1sLN9Iz4t6cvbtZ1MYUWDGDFd1VErXSVx5\nJUQirFmzhmeffZYZM2YQDoeZMmUKl19+Ob27Mevdju8oRW3tw2zZ8iui0UkMGDAdn6/7+2lpcQet\nn3nGHSy++eYd2kiWFaeu7lG2bLmX0tKvUVX18z0P+jsOv96yhftqarirspLv9uqVv7hx26zbaPzb\nn7j1yUtp63UmIz88Hb3EvaNYscIdh7j0UteOXbpW29rcCayvugqmTev259sfPPbhY7yy+hVev/z1\ng3rcL4VzuOG1G6ieV80bP38DcCfokbbEX+gn1ZoiWBrEztqYMROhCs77w3nYjo0t83fgbm05Fra0\nd1nO2tmO5c7t/NMEgK7orsNQdDThOg+NnBMh10bHwECTGrrU3eLkajvXtnQM23Bry0DLauhZHSNj\noJv6jpLS0dM6RsrAaDcw2gz0Nh2/9BMwAvh8PpSAghJUUAMqSlDBCTmkw2lSoZRbgilS/hTtvnaS\nviRxPU6z2kyLaKGFFlqdVlrtVtrsNtos19lk7AyaonU4F0c6WE4WIVR8epiAUYjfV4zPH0XzlaL6\nynCMEtJaEQklQkwpIBKI0tMfolzX6WEY9OxUeuXq3j4fZbqOQJBMdk3iyyfv5RP3Wlrc3Iu6Orev\nuabGHd/p39+dOrqyckddVeWWnZ2HlA6pVDWJxIckEouIxz+grW05Uvahvv5oPvpoKP/85ygWLx5N\nPN4PaKCwsIb+/dsYOVLltNNKOP/8QfTo0SmhrakJ+dabpOb9mcbkLJpOhrYKm2LfWKKDplBadsHn\n5gPE1sWY88s5NL7SSM+GnjT3bqbf5f0Y+8MxGJ/Oh4cfdme4mzIFbrgBBg3CcRzmzp3LH/7wB158\n8UVOOukkpk6dyoUXXrgjnHYvyWab2bz5l2zd+gS9e19H//63oWndT9pbt87NP1u0CO6+GyZP3nGj\nblkxNm78H+rrn6F//9vp0+f6PWZcf9LeztRVqyhQVZ4YMoQBOU9z3/z7eO2lX/HIo5cTKziDUcvO\nwihzuwobG+Hii90n1eeec28wurBxI4wd6zrdvZ2Cbj9iWiaDHxrMC9984aBOCPSlcA5SSpYsWcLU\nqVNZtWoViqJgGAaXXHIJzzzzDNFolJaWFrLZLIFAgLa2tv1qg+3YXbqJdq47d0Vl7MwuxbRMli9c\nTtXoKkzL7XbKd0N16aqyUl3WpawUqWyKZDZJynLrZDZJKptCIAhqQQJqgIASICACdPw5AYJOEL/t\nJ2AFCGTc4k/7CaQD+Np9+JN+/HE//liuNPvxt/vxhXykS9MkS5O0FbfRVthGrCBGU7iJhkADjUYj\njVojdRvqyFZmaZNtJO0kilBQhYpEYjk2mmoQ9BUS8JUQDJTjC/ZC8/dE+nuQ1EpoUYtJqEX09Ifp\nbRj08fno6/N11H19PvrlamOnO3gpXeexebP7u9+4cRetP0IhGDjQ7Rc/6igYPHhHnXccjpOlvf1j\n3nhjBscc00Q8voBMZivh8Ikkk8exfHkls2cPZMmSYurqSkil+qDrdZSX1zN0aIZx4yJceOEARo0q\nRTg2fPABmTf/QnPNCzQObqBltCSiDCFadSXR3pd8biTPtg3beP1nr9P6civD2oZhVpkMuWYIg84x\n0P/0hKtAOmYM3HSTe5ETgmQyyUsvvcSTTz7JihUruOKKKxg5ciRTp07t1jmeTm9hw4af0tLyFgMG\n3EGvXlfvU/jr+++7PqxHD3dMuHPiWnv7Kqqrp2GaWxg8+DcUF5++231YjsM9NTXcs2UL362v585J\nkxBC8MyyZ3johVv546PfpkWMZ9SKc/D1znVBZd2nlvfegzlzdjMGPWeOGws7b97nZNMdGB5Z9Agz\nXp7B/DvnH7RjfimcwxWffsqcG2+kYfFizFgMPRzGMU1O//GPeeeuuwDoO3Iksbo6jECAO95/H00I\nDEXBEAJfrjYUpaOdr/2Kgj+3Pt/WhNjvWaz7ezAqa2c7nEW+tGfbac+071K3Zdpoz7aTMBO0Zdvc\nOtNGIpMgYSZIZBLEzThxM46CQoFeQIFWQFgJEyFCgSwgbIWJZCOEzTChVIj6NfWMiIwgFAsRaApA\nHMx2k6SaJFYeo75nPdtKttEQaaAp2ESL3kJciZMkicD9fm1po2s+Qr4SwoFyQqE++EP9UAJ9yPh6\n0axFaRARooaffj4fFX4/FT4f/f3+jnaF30+RpnX5f0npJuutWwfV1e6wQb5UV7sXjqFDd5RMZjbf\n+tYEevQAy3KdRCw2n3h8PonEIgyjN4WFJxMInMxHH1Xyxht+Fi3KUF0doqWlAiF8lJRsYejQFKee\nGuIb3xjA6JJGeP0FWj75A42ln9J0soJP7UW032VE+3+LUGjPmdJSShbMXsDbd72N8p7CifJE1NEq\nR19XSY/ULJTf/sa9Gt5wg/tEkfN269at46mnnuKxxx5j0KBBXHPNNUyePJnQZ8yfvTOJxGKqq/8d\ny2ph4MB7KCk5u/vnZhYeeAB++Us3N+KWW9zIuvxna2x8merqG4lGL6Kq6heo6u4TAZclElz47LOM\nGT+eR486imJd54VPX+AnL3yPVx//Dq3xUzh2+Zn4Knc8Jt5yixugNGvWbqS/H3nE7a6bP98dhziI\nmJZJ32l9mXn7TE7o3f1AgH3hS+EcLr7tNj58/XU2f/IJmmFw2lVX8Y/f/pbRkyax6p13MNvbsdJp\nVJ+PHiNHUnXuudiAIwS2EFiADVidlq3csgVkc8USggwgFQVdVTE6FZ+m4cvVfk3Dr6oEdB2/phHI\nlZCuE9J1gppGyDAI6zphXSdiGEQMgyKfjwLDoNDnI5KT0VAUZb87on1FSolpm8TNOLF0jJgZ61K3\npluJmW7dmm6lJd1CS6qlS521s5T4SijWiylWiimWxRRmCylIFhBJRIi0RFCaFKxWi1QiRSwao7Fn\nI/Ul9TSEG2jWmkmIBBmZ6ehzDhhhIoEyCkL9iEQqMSIDkIEBxI1e1Dh+EIL+eefh93e0+/v9VPr9\n9DQMlNx3nJdsWrVqR8lr/YEbeXPMMTBypBtPf/TRNoryMbHYPGKxucRi83CcJAUFp1BUdCqRyCl8\n+mkJr7xSx9y57axa5ae5uQIhelNWVsuIEWnOO9XggtAKSjY8SLPxAY3jFQiGiPa4iGjF5RQUnLxH\npdy2tjb+8sxfWHTvIobWDmW4GE7RuYUMObWByD+fQbzzD7jiClcyIjc5kWVZvPHGGzz++OPMnTuX\nyZMnc8011zB69Oi9Pg8aG//GunW3EAoNY+DAe/dJHnzTJrjxRtchP/ssHNdphtJstpm1a6+nrW0Z\nQ4fO2GPkVMq2+dH69fytsZGnhw7ljOJiXl/7Otf99UrefPpq4rUnMeqjs/BXuQ7CcVx/2dYGL7zg\ndj92+mDuCPa2bW6Y00GW+X74g4d5a91bvHLZKwfleF8K53DbbbexePFiZs2ahcjd1TuOw8SJE3n3\n3Xc7pCmEEEyePJnS0lJs23ZDVHMSGlLKXdbl13de7pDMcBwsxyFrWVi2TTa/zrLcuvN2O8lt5IuT\nL46DY1lIx0HmamzbPZMdB1QVoWkoquoWTUPNaz5pGkZOq8mn6xi6jt8wCBgGAZ8PI+dkOms47SzH\nsTtpDp/P10WK47PkOPIyHNpOd+i7w7RMmlJNNCWbdls3JhtpTDa67fZGGtsbacu2UaQWUSJLKM4U\nU9ReRDgWRokpWEmLZHGSRHGC1kgrrb5W4iJOUiaRSBShEPYVUBzqQ3FBJSVFQwkVDUOGBlIvg2w0\nTeK2TYXPR2UgQJXfT1UgQKXf39Eu0LQOVfG8zt/y5W759FN3IPy443aU4cNr0fX3iMXeJxZ7n3R6\nIwUFYygsHE9R0Xj8/tEsXLiOl17axJw5JmvXFpNMHoOmBamq2MaZfes4M/Q2A8ueJHtOO2YZlJZ8\nldJ+l1FScs4e5+f+6KOPmPHgDLY9v42v6l+l3Cin8hKNPvar6C89406fef317kQ4uZHZmpoannrq\nKX73u9/Rq1cvrrvuOiZPnkxwL+bNdhyTmpr72bz51/TqdTUVFT/dY2LgnpDSDcC66SZ3wHjn2d7q\n6/9EdfU0+vT5Pv37375HJ/lGUxNXr17N1J49mT5gAHM3v8e3/nQJbz95FYm6sRy74iz8VW4+SCbj\nzvhaWelGVHU5ZTMZmDDB/Y5uv71bn+WLkrbSVD5Qyaxvz+Lo8j1Hb+0vvhTOIW/runXrGDVqFCUl\nJTz00EMMHDiQ448/nmg0SkNDA7ZtEwwGWb9+PeXl5YfY8q7s3K1kS0mbbRPLZGjJZGg1TZpTKWKZ\nDK2ZDLF0mtZ0mlgmQ8w0iZkmcdMknk4TN00SpkkykyEgJWEpCTsOISkJOg4BxyEoJX7HwWfbGJaF\nYdvoto2SzZLtJM/RWX6jszRHXl6jc9txnC6aTMXFxR3SG3m9pj1JcEQiEQoKCjrqfBZ1YWEhQhU0\npZrY3r6d7cntHXVDewMNiQbqm+qpb62nKdlEc7aZVlrxWT5CqRCGY6AYCpbPIq25UV6mNLGljUAQ\n8UXoFenLgOhI+pWfQLTsRFK+XmxIp1mfTrMhlUJZvpzBY8dS6fd3OJBBgQCDAgF6qT7WVyssXQpL\nlrhl6VJ30PMrX3HnPh49Os6gQfNwnHdpbX2P9vaPiURG5wT/xlFQcDKtrVlefXUpL7+8jQ8/VNi2\nrQI4nvJgMycXr+D4nm9See4yep6ymGjpKZT2voTS0q/tVruovb2d5557jhf/90VOaDqBCdkJFA8N\nMGjkUgqXzWBOzWYm/Pu/u3kTuc5327aZOXMmjz32GAsWLOCKK67ge9/7HkP2YipU09zK+vW30dLy\nNlVVv6RHj293+2l39Wo3qnTkSPeC3TmvzzRrWbXqKiyrlWHDnu3ylNL5d1OfyfDtlStJOQ7PDxtG\nbeMyLppxPm8/eSXxrady7PIz8Q90d5xIuD7g6193taG6UFvr/uOefnpHJt9BYPbs2cxT5rGuZR1P\nfv3JA368L5VzmDRpEq+99hq//vWvmTZtGq+88gqzZ8/m3nvvpbKykkwmQ1FREZ/k+wgOIw5EAowt\nJTHLojmbpSlf59qN2WxH2Z7JsD2bpSGbpdWyKFRVehhGR+kcTdQ7F1HUy+ejZKcnhWw226HT9O67\n7zJixIgu8hs7y3DkJTg6S23E4/EOqY287Iau6x0SG/lSUlJCaWlpR4lGo0SjUcrKyohGoxgRg1gm\nRt3GOurW1rF181a2bdvG9sbtNCYaaSlsob6snqZgEwklQYZMl+9OV3QK/YX0L+hPeXM/Lpo0jVDR\nMDaZJuvSadalUlSnUjRkMlT4/QwLBhkeCjE8GGRoIIRva5AVi1UWLXIzh5cvd6Okxo6Fr3wlzdFH\nL6G8/E0SifdJJBbh9w+kqOhUiorOoKjoNLJZP/PnL+LFF1fz7rvtrK8uISzGYNq9OTk6hxPHz2Xk\nBX9n0BA/PftcTY8eV+xy1y6l5P333+fh+x8mNivG1B5T6d3Qm7rh/+AbhavRF76JuOQSt29nxIiO\n923atInHH3+cJ554guOOO45p06ZxzjnnfK4IYzy+kDVrrkdVQxx11KOEQsO7db4mk+4wyYIFbpfP\n8E5vl1JSW/sbNm26g8rKO+nV6xqEELv8bhwp+dXmzdxXU8MTQ4ZQlq7m4hnn8/bvpxDbNr6Lg6iv\ndxPQ/+u/3GjgLsyZ44ZULVjghrodBGbPns2Ir4xg8EOD+fj7H9M70v3Q4e7wpXAOt923mJqNy3n2\ngasQQqG872AkEjub4bJpD1PWu4L7/uNc0qk4A48Zw+U/vB+hCRRFoORqVRUoqkDVBIqqoGruOlVz\n25oq0HJtXVPQdAVdV9BUBV2IHUXZsWzk2vnBbkMI1AMwmL0/saWkKZulPpNxSzbLtkyGbZkMW02T\nrZkMWzMZ6kyTtOPQOxdB1McwdkQR+d0B4n4+Hz069efvC1JKkslkF4mNlpaWLhnSTU1NNDY20tjY\nyPbt29m+fTtNTU2Ew+EOh5F3GnlnUkABoViI0PYQ/k1+jNUG2USW1AkpPj32UxaVL2KNuoZt5jYS\nmQSOdADQFI0ifxHDosM4o/IMTqs8i9KSkaxNm3zS3s6n7e18kkyyLpWi0u/n2HCY0eEwRxth9I0F\nrFyksWCBO+bZ0uJqv40ZYzFqVDWDB7+B47xJLDaPQGAwxcVnUFr6NQoLx5HN2ixevJjXX/+AmS+3\n0/JJP1rl2RQYbYwbN5NTJ/2V004/mr4VN+w2oWzDhg089NBD/O2pv/FvFf/GKbFTiCjtVFX9g8KP\n/ogYNsTt27nggo4up3Q6zR//+EceeOABTNPkpptuYsqUKZ85gC2lTW3to2zadAe9en2Xioqf7rEb\nbE889ZSbYf3oo24Iamfa21eycuUVGEYvhg79/R7zIubFYnzr00/5Znk5XzcauGzGBbz9+8tpbTiD\n0SvPxtfXHY1eudKd6uHll13H3YX77nMHQ+bO3YuJq/cfN828iZAe4hdn/eKAHudL4RxmfvVDPm5c\nxq0Lr0FTNPoFB5B1MjSbjW6egsxiSwtD+Lh3yBNU+AaCI5EOCHtHjQPYEuEAtrtO5NfZIBxQLHed\ncEC1wBHgqLmiga3uVBSwcu2sCpaW21YHRxNILVfrIHWB1AToAgy3FrlaMRQUn0DxKaiGguJT0AwF\n1a+g+xT0gIbuVzD8Kr6Aij+g4QuoBIMqwaBOMKgSCusEDBV1Pw20JW2bOtOkNpOh1jSpyZUtpsmW\ndJrNpkncsujXKYpoQG4QuDLX7u3zfSHnsSccx6G1tZXt27fv4jQ6O5T8+oaGBkzTJFoYJeqLUmQV\nUdhWSFG2iD4Vfeg9sjeNxzayrGwZn6Q+oSZRQ8bOIF2xFfyan36RfhxdfjRj+47lzIHnooQrWdbW\nxrK2Npbm6mJN47hIhNHhMJXtEayVBaxapLFggWDJEje89tRTbU44YS3Dh/8dIZ7DNLdSVjaJsrKL\nKSw8DUXRyGazLF68hBeeXsryF2FVyxnIgM6pZ7/AGafOY+Ill9K7zyW7hJvG43GefPJJ7r/vfsYW\njuU75d8h/KFBv4Ef0Kv9RbR0A+L66+Hqqzu6nKSUvPfee9x///3MnTuXa665huuvv/4zk+tMcyvr\n1v078fgCjjrqMUpK/l+3/n+LF8NFF7nJa3fd1TV5zXEybNx4B9u2Pc2wYc/tcQKjpmyWK1auJGnb\n3Fqc5nvPX8Cbj36bRPI0Rq85D73UDZF67TV3rGPhQjeBsgMp3dTqYNDV4jhIN3UbWjZw4u9OZMO0\nDUR83RvD6Q6HrXMQQpwL3A8owO+llHfvZpsHgfNw1ZG+I6Vctptt5LJlrq11dRu56abzeemljzr+\nj0K45eyzK3nxxSUUFRV3rNtdySuy7t02EiFBSJlzJDLnXHa0cdzXsCVO1sHOSrKmTSbrkDVtslmH\njOkwf9lcRg0eQ9a0sTLOjmK6tW26xck4OKbM1Q7SlJBxwJQIUyIybq2YEjUjURKSklcAABCGSURB\nVE3QMhLdBD0DQkLWgIwfsj6wfALLL3D8AiegIAMCAgoiqKCEVNSQghZS0UMaRljFH9EIFOgEIxrh\nAp1IoUGkyECPaKgRFUV3HU/+cT9l22w2TTal02xKp9mYTrMhX1IpWi2LAX4/AwOBjpLvz6/0+3fJ\nYTiQpFIpGhoaqK+vZ9u2bdTX11NTXcPC2QuxW23qttVRn6gnJmNEA1F6DOiBb6SPRGWCWn8tSZL4\nFB82NmknDUCxv5iKogpG9xzNuQMnMrjveNZkYEkiwdK2NpYkEmhCcFwkwigjTMGGYuJLw3w8X2Pe\nPEHPnjBhQivHH/8PBg16GE37mGj0IsrLJ1NUNL7j4p9Om7zw0Bxee2Q7c+vHICIWp43/KyeM3sS5\nF13HkCE7BIZmz57NuHHj+L//+z/uuece2hvb+cnYn3DUuqMw1i6nstffCW+ejfjOFHeO0B477s6r\nq6t54IEHeO655zj//PO5+eabPzPKqbn5TVavvpaiogkMGnRvt+bp2L7ddQ6q6moQ7ixE29z8Fs8/\nfynnn/9D+ve/bbdS7raU/M/GjTyxdSv/WQa/fv4CXnnwalKhcYz69Dy0sDvA/atfuTJW773n+oIO\n2trcR4qLL4af/Wyvbd8XOneRTX5hMmP7juXmMTcfsOMdls5BuP/FNcCZQB2wCLhUSrmq0zbnATdI\nKb8qhDgJeEBKOWY3+5IjR0o2bfoW7e2zse0mVLUH5eV3UFg4lfzHWLeuiv79P0RRSpCSzyyOs/v1\nsOO1PdW2vet+HCdvKx1S4XkHI+VsdH0CjjMbw5jQRTZ8Z1nx3a1T1W4W4aDrWVQ1i6pZqEoWTdgo\nwkLFQpUWqmOhOjaqbaNaNpplo2UctKyNZjoYaQfDlBhpyfqWpYyWxxJIQTDpityaQcESdRlDS47H\nCSmQcxx6gUagSCdcYlAcNSgu90GhQkPIoSZgscGXYY2Rodo2WZtMUmOa9PH5GBQIMDgQ4KhgsKMe\n4Pej7oc7ub0Z6+m8jXQksU9irH1rLWvfX8v6ZevZXLeZ5pJmakpr2BjdSGNpI7JKouoqkWwEXdPJ\n6BniahwHh5AvxKCSQYzvP57LR1xOz9IRLG1v7+Iw2h2HYwNh+taU4nxYzOb5AZYtVDjmGJNx4xZy\n7LGPMGDAe2zYMJbzz7+VgoKTOrorpSOZ/fBCnn50PTNrJlBYvo3jq/7MgFFJzp90Kel0mjPPPNPd\nVkrmzZvH3XffzbJly5g+dTrjEuOIzVhNZeDPlLbORNz0fcStt3RJLW5ububxxx/nN7/5DZWVlUyb\nNo2vf/3ruxX+s6wE69f/mMbGlxg8+GHKyibt9XdvWfDjH7tjEM8+6ypddOatt/5KNPoAmlbAsGEz\n9phxPrOpie+sWsU3Cxze+fM3+Mt938OqGMOIpeei+BSkdENcLQuef36nh4T6ence6osucjXJcy/u\n73HCzvtbVLuIi/96MdU3VqOr+ygu9jkcrs5hDPAzKeV5ueXbANn56UEI8RjwrpTyz7nllcAEKWX9\nTvuSR0IXWGeHkXcgjgN33jmdH/94ese6fATrzss7R7fml/elWFbX9s7L2WzX1/LrOrfzy4sXT2fo\nsJ+REjZJslhkkDIDTgZVZlHJoJHBkFn8joXfsQg6NkHLJpyWhFsFkZgg0gaRdkkoLcmqgnZDI6Wr\npHSdpKLTZmi0+hWa/YLtfkljCJQSg1C5j2jPAJVRP0OK/RSGlC4Ks51LMOjmNnX+4U+fPp3p06d/\nof+t1WaRWJggNi9GbF6M1vmtJHokWDV6FfP7zGdFcAWb1E3oMZ3MugyGZWCUG5hlJqlQCsVR6FPQ\nh3FV47hsxGVMHDyRJstmSSLBh4kEi3Ilk4aqtT3RFkbZPCeEk3YoKb6eqVMFJ5zwPv37f5MePS7v\nEs1jx9t56+dv8NvXbP6x/hyOrpxLgfo8/U6IcNlllzBhwoSOC/oHH3zA9OnT+eSTT/jpbT9lYngi\nLfcvo3zVI5SKhcibf4B287WukFWObDbLSy+9xAMPPEBtbS3f//73mTp1KmVlu6rStra+x+rV/0Yw\nOIyKitu5997X9/q7f/llNwXhyivd63NnFRDHybJhw+3U1z9PRcVP6Nnzqt3qV21MpZiyahXb03HU\nWbfw/J2TUI47geHvnYmiK6TT7vjDKafAf/6nq9nVQUODm3X+ta+5/VxC7Jdz57OY8PQErj3+Wi4b\nsTvVwC/O4eocLgL+n5TymtzyFcBXpJQ3ddrmVeAXUsp/5pZnAbdKKZfstK8jwjnsiQN9gh1ovoj9\nGcehLpmlpj1DbSpDbTJDXdqksdUk0WSSajWxWjMQtyiOCcpiKtEWhaIWQbhVEmqRBGM2oaRNWlVo\n1jUSmk67ZtCuGrQpPmIYNDs+tlsGW02DurRBoEjtEOxrbp7OmDHTKS11uy2iUbeUlXWt90IJuwNp\nS9qWt9E6p9Wd7Of9GDIqqT6nmvmD5jMrO4tYJkaVqMK/3c+2jdvY3LoZq4eF098BFQqsAo4pP4Yz\nhpzBeSPP46R+J1GftVgYjzM/HuefsRiLV9poDz1PxLmL1pU6Y07+mHEnPcFJJ1UzaNBoysrOoLBw\nXMeAcGzFGp7+3zeZ8cEIVteM4riKWRSqr1HxlSDfuPwShg4dSo8ePVi4cCHTp09n0aJFTJw4kYtG\nXcQxcxUKZj5KVM7DPGYC6s3X4vv2xC6JYh9++CEPP/wwL7/8Mueddx7XXnst48eP7xKAYdsptm79\nHVu23MMzzwjuuutRSkrO3asgjYYGd2xg/XpXBmnUqK6vx+MfsGnT/5BILKFfv1vo3fuaXQbDpZTM\nqK/nB2tXEfnkDV78QSkpRhIdI4l+pxLztMHc8hOFmTPdMNdrr3UVSYTAFWk66yw3vPXuu5l+xx0H\n9Lf79zV/57/e/S8WX7P4gASxeM7hMCd/cT3YWu77iz05h/35eaSUtFhWx6B3baeB782mSU0yRWuj\nSahF0ieuUtoqKGiWBJsdwk2SomYobYGSZihsAUuHlmJBaxG80vg05/SaipSuLLoEHCkg1yWII1jX\ntoSB4eNASIRCfvqPbuGOTeGOQUnYHq5hXY+lbCxbwYbocjJait4tR6E6qhvtIKQ72C12Pre7Hrxx\nUS3RE/tgS0Fb1k+b5SNla9hSRVOzGEYaTbW6vMuqi6H13ll9bv8hpSSTlWSyNgCKsvsvzF5egzqq\n725f21ucbXGUngdX6iLP/rC/M3v6LMdu78+imX/db8fJc7g6hzHAdCnlubnlvelWWgWctrtupQNm\nqIeHh8e/MPviHLrxEL1PLAIGCSEqgK3ApcDOHWuvANcDf845k9adHQPs24fz8PDw8Ng3DqhzkFLa\nQogbgLfYEcq6UghxrfuyfFxK+boQYqIQoho3lLV7GsMeHh4eHvudIyYJzsPDw8Pj4HFwtWq/IEKI\nUUKI+UKIpUKID4QQB0cQfT8hhPiTEGJJrmwQQiz5/HcdXgghbhRCrBRCfCSE+OWhtmdvEUL8TAhR\n0+n7P/dQ27QvCCH+QwjhCCH2PsvsMEAI8d9CiOW53+4bQohdlQQPY4QQv8qd98uEEC8KIQ7N6Pg+\nIoS4WAjxsRDCFkIc9/nvOMKeHIQQbwL3SCnfyiXP3Sql3P00Uoc5Qoj/xR1fufNQ27K3CCEmAD8B\nJkopLSFEVErZeIjN2iuEED8DElLKew+1LfuKEKIv8AQwBDheStl8iE3aa4QQYSllW659IzBcSvm9\nQ2zWXiOEOAt4R0rp5G6KpJTyx4farr1FCDEEVzjot8APd44G3R1H1JMD7ofLx+YVAbWH0JYvyjeB\nPx5qI7rJ94BfSiktgCPFMXTiSA9quA+45VAbsS/kHUOOEO5v+YhBSjlLSpm3eQGw/2JbDwJSytVS\nyrV04zdwpDmHHwD/K4TYDPwKOGI8d2eEEKcC26SU6w61Ld3kKGC8EGKBEOLdI61bD7gh1y3whBDi\nwCUAHACEEBcAW6SUHx1qW/YVIcSdud/ut4CdZ1c4krgKmHmojTjQHOhQ1m4jhHgb6KzPK3Bzlm4H\nzgKmSSn/JoS4GHgS6P7ktgeQz7JfSvlqbt1lHKZPDZ9h/09xz5diKeUYIcSJwF+AqoNv5e75nHPn\nEeC/pZRSCHEncC9w9cG3cs98znf/E7qe64fdU9DnnftSyp8CPxVC/Ai4EZh+8K3cM3vz2xVC3A5k\npZTPHwITP5O9vPbs/f6OsDGHVillUaflmJTySLsDVHG7w46TUtYdanu6gxDideBuKeWc3HI1cJKU\nsunQWtY9cnk3r0opRx5qW/YGIcQxwCwgifuD74t7Dn1FStlwKG3bF4QQ/YDXpZQjPnfjwwghxHeA\n7wJnSCnNQ2zOPiGEeBf4j3/FMYdaIcRpAEKIM3EVX480zgZWHmmOIcffgDMAhBBHAfqR4hh2io75\nBvDxobKlu0gpP5ZS9pRSVkkpK4EaYPSR5BiEEIM6LV4IrDxUtuwLuei2W4ALjlTH0Im9euo87LqV\nPofvAg/m7r7TwDWH2J59YTKHaZfSXvAU8KQQ4iPABKYcYnu6w6+EEMfiDoRuBK49tOZ8ISSHYbfS\n5/DL3A2FA2wCrjvE9nSXhwADeDsnjrdASvn9Q2vS3iOEuBD3M0SBvwshluXVsvf4niOpW8nDw8PD\n4+BwpHUreXh4eHgcBDzn4OHh4eGxC55z8PDw8PDYBc85eHh4eHjsguccPDw8PDx2wXMOHh4eHh67\n4DkHj38phBCJI2SfG/ZGdvtAHNvDY2/wnIPHvxoHInHnUO7TS0TyOCR4zsHjXx4hRIUQ4h85Rda3\nc/MiIISoyk0etVwI8T/duUsXQnwtp067WAjxlhCiLLf+Z0KIp4UQ7+WeDiYJIe4WQqwQQryey+4H\nN8P5R7n1C4QQVbn3DxBC/DNvU6fjhYQQs4QQH+Zeu2D/fUMeHrviOQePLwMPAU9JKY8Fns8tAzwA\n3CelHIWrV9Sdu/T3pZRjpJTHA38Gbu30WhUwAfg68Czwj5zIXxr4aqftWnLrf5OzJW/Tb3I2be20\nbRq4UEp5Aq6+1T3dsNXDo9t48hke/1IIIeJSyoKd1m0HekopbSGEBtRJKcuFEI1AeW52rwhQu/N7\nP2Ofx+BeoHsBOrBBSjkxN+NcRkr5C+GK8CSllIHce+4AmqSUDwohNgCnSyk35mzaKqUsy9nUI2dr\nh025be4DxuPqEx0FVB5J4nseRxbek4PHl4G9uQPqrpDdQ8CDuTv/6wB/p9dMcOeRBLKd1jt0FbuU\nn9PubNPluKJpo6WUo4GGnY7p4bFf8ZyDx78au7vI/xN3giWAK4D3c+35wMW59qXd3GcBkJddv7Kb\n780zudOx5+faczvZenmnbQuBhtxTzulAxWfs18PjC3OkSXZ7eHwegdxUlPlZsO7FnXXsaSHED4Ht\nwNTctj8AnhVC/AR4E4h1Y5/TgReEEM3AO8CAPbx3T08tEigWQizHHU/IO4SbgeeFELcCL3fa/jng\n1dz2H3KEzYfgceThjTl4fGkRQgSklKlcezJwqZRy0iE2y8PjsMB7cvD4MnO8EOJh3CeCFtyJ4z08\nPPCeHDw8PDw8doM3IO3h4eHhsQuec/Dw8PDw2AXPOXh4eHh47ILnHDw8PDw8dsFzDh4eHh4eu+A5\nBw8PDw+PXfj/qL1TZoIQrUIAAAAASUVORK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(fit, xvar = 'lambda', label = True, ptype = '2norm');" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The options are `xvar`, `label` and `ptype`, in addition to other ordinary graphical parameters.\n", + "\n", + "`xvar` and `label` are the same as other families while `ptype` is only for multinomial regression and multiresponse Gaussian model. It can produce a figure of coefficients for each response variable if `ptype = \"coef\"` or a figure showing the $\\ell_2$-norm in one figure if `ptype = \"2norm\"`\n", + "\n", + "We can also do cross-validation and plot the returned object." + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAElCAYAAAD+wXUWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3X2cXPPd//HXJ4mbSBClTSTIxl5oVQkVolHZKC3SlqpW\na1OWEldpKJebUrl2p9FeSi51oTdSZBuJpHcSdw2ishEhKElEhEbsbjRBUUHi5ify+f1xzkxmZ8/s\nnJnZM+d8Zz/Px2MemZuzZ947mZ3PnO/dEVXFGGOMydUr7gDGGGOSyQqEMcaYQFYgjDHGBLICYYwx\nJpAVCGOMMYGsQBhjjAlkBcLESkQ+FpGnReRZEVkiIheKiMSdqxQicp6IPCcit+XcP1pE1vu/5xIR\neSCi558qIidGsW/TM/WJO4Dp8Taq6kEAIrILMBPYAWgqd8ci0ktVN5e7nyL8APiSqq4LeOxhVf16\nvh8Ukd6q+nF00Ywpnh1BmMRQ1TeA8cAPwfuAF5GrReRxEVkqImf594uI/Nr/tn6/iNyb/uYsIq0i\ncpWI/B04SUT2FJG5IvKkiCwQkb397XYRkT/7+35cRA7z7x/tf8t/WkSeEpF+uTn9o5zlIvKMiJzn\n3/cbYE9groicH/DrdToq8r/x/0ZEFgO/EJHtROQWEVnsP/fXu3od/MduFJGV/lHJp7Lu/5L/OywT\nkZtFZKus1+fn/u/4hIgcKCL3icgqETm7+P81U9VU1S52ie0CvBNw37+BTwJnAZf7920NPAkMBb4J\n3OPfP9Df/kT/ditwUda+HgRq/euHAH/zr88AvuBf3x14zr9+F3CYf307oFdOtoOAZcC2QD/gWeAA\n/7GXgJ0Cfp/RwHrgaf9ymX//VOCurO1+BpziX98ReAHo28Xr8A3gfv/+XYG3gBOBbYA1Wb/374Hz\nsl6f8f71a4Gl/u+5C/Bq3O8HuyTrYk1MJsm+DHxORL7l394B2As4HPgTgKq+JiLzc37uDwD+t/8v\nAH/K6tfYyv/3KOAzWff3F5HtgEXAL0VkBnCHqq7N2ffhwGxV/cB/jjuAL+IVDSHgSMGXr4npTzm/\n79dE5GL/9tbAHl28DkfgNcmhqq+IyN/8x/cBXlLV1f7t3wPnANf7t+/2/10O9FPV94D3ROQDEdlB\nVd/J8zuYHsYKhEkUEdkT+FhVX/c/vCeo6rycbcYW2M1G/99ewFvq93HkPhVwqKp+lHP/L0TkHmAs\nsEhEvqyq/yj+NwltY87tb6rqqg5Bw78Okud6rg/9fzdnXQdQ7DPBZLE+CBO3zAeZiHwS+A1wg3/X\n/cA5ItLHf3yvrG/5J/l9EQOBuqAdq+q7QKuInJT1HPv7Vx8Azs+6/wD/3z1VdYWqXo3XlPPpnN0u\nBE4QkW39I5RvAA+X9Jt3dj9wXlam4Vn3B70ODwMn+30UuwJj/O1fAIb6xRbge0BLN2U0PYh9WzBx\n21ZEnsZrTvkImKaqv/QfuxmoAZ72v0X/CzgB+AtwJLACeBl4Cnjb/5nc5Ynrgd+KyBV47/dZwDN4\nxeFXIrIM6I33YXsO8CMRGQN87O9/bvbOVHWJiDTjFQ8FpqjqM3meu5Dc7a8ErhORZ/AKZyvw9Xyv\ng6rOFpH067AGeNTP+KGInA78WUR6+1lvCpHRlnY2HYiqvSeMe0Skn6puFJFPAI8Do1T1X3HnMqaa\n2BGEcdU9IjIAr9P5p1YcjOl+dgRhjDEmkHVSG2OMCWQFwhhjTKDYC4S/tMBr/siN9H0HiMhjWcsB\nHJznZ48RkedF5B8icmnlUnfIYPkTlt+/f4K/BMVyEbkqz8/Gnj8nz/l+3uXiL+ERsM31/rIYS7OG\nwSaCiLSJt7THEhF5Is82Sc5f8P3Q4/LHPZUbb2bqcOCZrPvuB77sXz8WmB/wc72AF/GWHNgKb8mA\nT1t+y483L+IBoI9/e5ek5s/K81m84bfb4A27fQDYM2ebY4F7/euHAovjypvndwhcasSF/GHeDz0x\nf+xHEKr6CN4aMtk2461FAzAAyF3uALx1dVapart6s2FnAcdHFjQPy5/I/D8ArlLVTf42bwT8aCLy\nZ/kM8Liqfqjeqq4P462rlO14YBqAqj4O7OhPFEwKoetWiSTnD/N+6HH5Yy8QeVwATBaRNcDVwGUB\n2wzBmySV9k//viSw/PHaGzhCvFVR5+dpIkta/meBL4rITv4s6ePwFhHMlpt5Lcl5zcGbaDdPvJVz\nzwp4PMn5w7wfelz+pBaIHwDnq+oeeB9Wt8acp1iWP1598Jo6RgKXAH+MOU9Bqvo88AtgHvBXYAne\nbG6XjFJv3avjgHNF5PC4A5nyJLVAnKaqcwBU9c94h0+51uKtdJm2G8FNIXGw/PF6GbgDQFWfBDaL\nyM452yQuv6pOVdWDVbUOb3nw3EUC19LxqCL2zNlU9RX/39eB2XR+3yQ5f5j3Q4/Ln5QCkbtM8loR\nGQ3eiU/o/IcC3voy/yEiQ0Vka+A7eGv5x8HyJyv/HLy1mhDvBEFbqeqbOT+TpPxAZrFCRGQPvEUA\nb8/Z5C7gVH+bkcB6VX2toiHzEO9kR/396/3wlih/NmezxOYn3Puh5+VPQO/77cA6vGWH1wCn463h\n/3e8w+zHgAP9bXfFP1GMf/sYvJUrVwE/tvyW38/fB7gN73wHfwdGJzV/zu/yMN6H6hKgzr/vbPwT\n/Pi3b8QbrbIMOCjuzFm5huGNnFniv+4/dil/vvdDT89vS20YY4wJFGkTk4jsJiIPiciKrib/+NuO\nEJGPxD+3sDHGmHhFvZrrJuBCVV3qt08+JSIPqDdiI0NEegFX4U3QMsYYkwCRHkGo6ququtS/vgFY\nSfC42wnAn/FOhGKMMSYBKjaKSURq8JZEeDzn/sF4Z8f6DV2fR9cYY0wFVaRA+M1Lf8abfLUh5+Hr\ngOyFpaxIGGNMAkQ+ikm8E63fA8xV1f8LePyl9FVgF2Aj3rCsu3K2s+FWxhhTAlUt6Yt3JY4gbgWe\nCyoOAKq6p38ZhneUcU5uccja1tlLY2Nj7Bksf/w5emJ+l7NXQ/5yRDqKSURGAfXAchFZgreY1+V4\nS9Kqqk7J+ZGqPUpoa2uLO0JZLH+8XM7vcnZwP385Ii0QqroIb237sNufEWEcY4wxRUjKWkxVr6Gh\nIe4IZbH88XI5v8vZwf385XBmqQ0RUVeyGmNMUogImuBOagO0tLTEHaEslj9eLud3OTu4n78cViCM\nMcYEsiYmY4ypYtbEZIwxpttZgagQ19sxLX+8XM7vcnZwP385rEAYY4wJZH0QxhhTxawPwhhjTLez\nAlEhrrdjWv54uZzf5ezgfv5yWIEwxhgTyPogjDGmilkfhDHGmG5nBaJCXG/HtPzxcjm/y9nB/fzl\nsAJhjDEmkPVBGGNMFbM+CGOMMd3OCkSFuN6Oafnj5XJ+l7OD+/nLYQXCGGNMIOuDMMaYKmZ9EMYY\nY7qdFYgKcb0d0/LHy+X8LmcH9/OXwwqEMcaYQNYHYYwxVcz6IIwxxnQ7KxAV4no7puWPl8v5Xc4O\n7ucvhxUIY4wxgSLtgxCR3YBpwEBgM/A7Vb0+Z5tTgEv9m+8CP1DV5QH7sj4IY4wpUjl9EFEXiEHA\nIFVdKiL9gaeA41X1+axtRgIrVfVtETkGaFLVkQH7sgJhjDFFSmwntaq+qqpL/esbgJXAkJxtFqvq\n2/7NxbmPVwvX2zEtf7xczu9ydnA/fzkq1gchIjXAcODxLjY7E5hbiTzGGGO6VpF5EH7zUgswSVXv\nzLPNGOBG4HBVfSvgcWtiMsZUnfbWVponTmTz2rX0GjKEhkmTGDpsWLftv5wmpj7dliIPEekD/Bm4\nrYvisD8wBTgmqDikNTQ0UFNTA8CAAQMYPnw4dXV1wJbDQLttt+223Xbl9rChQ7nh6KP50urV9AVG\nAI2LF3PwpEkM2nXXkvbf0tJCc3MzQObzsmSqGukFbxTTtV08vgewChhZYD/qsvnz58cdoSyWP14u\n53c5u2q0+S86/njdAKpZlw2gFx1/fLc9h//ZWdLnd6RHECIyCqgHlovIEkCBy4GhfugpwETgE8Cv\nRUSAj1T1kChzGWNMEmz39tv0y7mvH7DdO+/EEacTW4vJGGNictHxx5O6664ORWIj0HT88VwzZ063\nPEdih7kaY4zJb+len+V7O27DRv/2RqCxtpYf/vKXccbKsAJRIelOJFdZ/mBNTZHstpOw+SuVpxj2\n3tmivbWV1LhxNI4Zw+lHHcdDG37Dd25/gMn19TSOGcPk+nomzJvXraOYyhH5KCZjqlkqlawP5aTl\nMVu0t7Zyw9FHk1q9mn54Rwubl32KQy/dnW9Pnx53vEDWB2FMGUS8oSdJkbQ8ZouLTziBpjvvjLS/\nIYj1QRhjTMIlfcRSECsQFWLtsPGy/PFxOTt0X/5eQ4ZkOqPTNgK9Bg/ulv1HwQqEMcZUwJgzz+aM\nrYd1GLF0yeDBfOnss+OM1SXrgzCmDE1NyeoUTloes8WPfv4ci+f05Sv/MRFeXUevwYO7fd2lIIk9\nH0R3sgJhjHFNeiG+tctX0vLRCq6/8VGOOfKgimawTmoHWDtsvCx/fFzODqXnf2ThQq4+/HAumjGD\nKc88zZKVH3L3977GIwsXdm/ACFmBMMaYCPztppu4et26zMilfsDV69bxt5tuijNWUayJyRhjItA4\nZgypgKOPxjFjSD30UMVyWBOTMcYkzIr1OwYOa31vhx3iiFMSKxAV0lPbYZPC1mKKT09477S1tTFr\n5kzOOeooxn/mM3znoOOYt+qHXLzr0MQuxBeGrcVkTBmStvZR0vL0FKLK3ydO5JrMOkvPc8ngf3DK\nzGlMnjKFzeu8Ya0TKjCstTtZH4QxZUja2kdJy9NTpMaN46IZMzqtszS5vp7GmBfisz4IY4yJ0ea1\nawPXWdq8bl0ccbqNFYgK6QntsElm+ePjcnYIzp/b5/DosytYmbNN0tdZCqNgH4SIDAR+DgxW1WNF\nZF/gMFW9JfJ0xhiTQJ37HOBsEX6iymfYss7SdxO8zlIYBfsgRGQuMBX4iaoeICJ9gCWq+rlKBMzK\nYX0QJnGStvZR0vJUq3x9DqfW1LDfsGEVW2cpjHL6IMKMYtpFVf8oIpcBqOomEfm4lCczptok7cM4\naXmqVb4+h/2GDavoJLiohemD2CgiOwMKICIjgbcjTVWFqrEd1iWWPz4uZwcvf26fw6JlL1Rln0Ou\nMEcQFwJ3AbUisgj4JHBSpKmMMSZBgvoczundhx9/vCnT59BYW8uESZPiDdrNQs2D8Psd9gEEeEFV\nP4o6WEAG64MwxsQiX5/DtwcOZMhOO9FnyBCO+P73GXnYYdTU1MSUMlikfRAici4wQ1VX+Ld3EpHv\nquqvS3lCY4xJsra2Ntra2jLXa2pqeO2ZZwP7HA7ed9+q6nPIFaYP4ixVXZ++oapvAWdFF6k6VUM7\nrMtsLab4uPbeqampYdjQoSy4+WYeSqX4v8uuYN6GVufOJ90dwhSI3iKSOTwRkd7A1tFFMsYdqVTc\nCTpKWh4Xtbe2csPRR3PRjBlMa2tj/OJFjPh3Ly7YbfdOC+81VFmfQ64w8yCuAYYC6bNcnA28rKr/\nFXG23BzWB2ESJ2lrHyUtj4vy9Tf88AtfoG/fvmxauzbRfQ65op4HcSleUfiBf3secHMpT2aMMUmT\n2+fw5ooVgf0Ne2yzDakHH6x0vFgVbGJS1c2q+htVPcm/3KSqoSbKichuIvKQiKwQkeUicl6e7a4X\nkVUislREhhf7S7jAtXbYXJY/Xi7nT3r23D6H519e32GOQws9o78hSJhRTKOAJrxmpj54Q11VVfcM\nsf9NwIWqulRE+gNPicgDqvp81v6PBWpVdS8RORT4LTCy+F/FGGOKl+5zSOWZ4/A+1TnHIYwwfRDP\nAxcATwGZIwdVfbPoJxOZA9ygqn/Luu+3wHxV/YN/eyVQp6qv5fys9UGYxEna2kdJy+MCl+c4hBF1\nH8Tbqjq3lJ1nE5EaYDjweM5DQ4CXs26v9e97DWMSLmkfxknL44J86ypV+xyHMMIUiPn+SKY7gA/T\nd6rq02GfxG9e+jNwvqpuKDqlr6GhIVPBBwwYwPDhw6mrqwO2tHMm9fZ1113nVF7Ln6zbLufP7oNI\nQp7c2+/s8AnmAn2BOj/nXKC9d+8O2yYlb6HbLS0tNDc3A5R9xBOmiWl+wN2qqkeGegJvmY57gLmq\n+n8Bj+c2MT0PjK62JqaWlpbMf6aLLH+8XM6fpOy5I5YG77Y7J067lLF3r+bW9eszfRCNtbVMmDeP\nocOGJSp/KcppYor8nNQiMg14Q1UvzPP4ccC5qjrWXyn2OlXt1EnteoEwxiRDe2srzRMnsnrRIlb1\n247nR+zEE5c1c/tPm1i9aBG1o0Yl5lwO3SHyAiEiY4HPAtum71PVn4b4uVHAw8ByvOXCFbgcb0SU\nquoUf7sbgWPwivfpQc1XViCMMeUKGrF08ZDdOOKaqxm0666ZtZfAa55xsVM6VzkFAlXt8oI37HQa\nXkdyI96H/S2Ffq67L15Ud82fPz/uCGWx/MEaGyPZbSdh81cqTzGS9N5pqq/XDd5k88xlA2hTfX3e\nn0lS/lL4n50lfe6GWYvpC6p6KvCWqqaAw4C9S6pGxlSZpK19lLQ8SZNvxNLmdeviiJN4YQrE+/6/\n74nIYOAjYNfoIlUnlzu5wPLHzeX8Screa8iQoldlTVL+SgszzPUeERkAXAM8jdePYGsxGWMSL3fU\n0s6HfIPv3f4Yt+lLHUcs9cBZ0mGEWYtpkqquV9W/4HUuf1pVJ0Yfrbpkj6V2keWPl8v548xeU1ND\nXV0ddXV1tLW1c89fv8meFzzI5Pp6Tq2pYXJ9fWY4az4uv/blynsEISJHqupDInJiwGOo6h3RRjPG\nmPKlh7Uuu28Zb256ngPrv86Q3c9k6FFHUVNTQ2t7OypSFSOWulveYa4iklLVRhGZGvCwquoZ0Ubr\nlEfzZTUmLklb+yhpeeIWNKw1exJcTxDpPAgR6a0hl/eOkhUIY0yx8i3EN7m+nsbp0+OKVVHlFIgw\no5haRWSKiHwp+9Sjpjiut2Na/ni5nD/O7N0xrNXl175cYQrEp4EHgXPxisWNInJ4tLGMMaZ8pQxr\nNVsUtRaTiOwE/B9Qr6q9I0sV/NzWxGSMKUrznAeZc/pXmLF+s/VBlPKzYT50RWQ0cDLeekl/B/7g\nD3utGCsQxphijR3/BB9vN5/D3lhelQvxhRFpH4SItAE/AhYCn1PVb1e6OFQD19sxLX+wSo0YCps/\niSOYKvXeaWtro6WlJXM+hJkzF7Nw1gF8b8R+jD7zTI5sbGT0mWfS2t6emTwXhuvv/XKEmUm9v6q+\nE3kSYxyUSiXrQzlpeSope/XVVCrFunUNnHce1NePjTeYw8IMc90b+A0wUFX3E5H9ga+r6pWVCJiV\nw5qYTOKIeEuCJkXS8lRaelLcCwsWs+xfI5n+6CQO/HzPaU4KEvU8iAXAxcBNqnqgf9+zqrpfKU9Y\nKisQJomS9oGctDyVZJPigkU9D2I7VX0i575NpTxZT+Z6O6blj5fL+SuVvXnixExxAG++Q2r1apon\nlrd0nMuvfbnCFIg3RKQWbxVXROQk4JVIUxljTJHsXA/dL0wn9bnAFODTIrIWaAXqI01VhVxfU97y\nB2tsjGS3nYTNX6k8xajUeyc9KS53WY1yJ8W5/t4vR+iJciLSD+ilqu9GGynv81sfhDEmr/bWVi77\n/MH87q1/Wx9Elsj6IERkHxH5XxG5F/gj0OiPajJFcr0d0/LHy+X8lcred8CnmPUtuOArY0Of6yEM\nl1/7cnV1PojDgDuAm/CamAQ4EGgRkRNVdXFlIhpjTGe5Z4u7ZcWLfKLPYVz+2xv5/e9/T2MS29sc\n09X5IOYCv1DVlpz7RwM/VtVjo4/X4XmtickYE2jixEn85p46zv3+K4zZ71O0tbVlJs1lT6DriSKZ\nByEi/1DVwOYkEXlBVfcp5QlLZQXCGJMrPTFu6X3LeHOrA7jt0Z61zlIYUfVBdNUZnbuCrinA9XZM\nyx/M1mIqLKrXPj0x7qIZM5j95rPMfXUGNxx9NO2trd36PK6/98vRVYHYXUSuD7jcAAypVEBjkiyV\nijtBR0nLE6WoJsaZLbpqYjqtqx9U1d9HkigPa2IySZS0pS2SlidKjWPGkAr4dt84Zgyphx6qfKCE\nKqeJKe8opkoXAGOMKUZUE+PMFmGW2jDdwPV2TMsfL5fzR5X95Cv+m3E79sl0iKYnxjVMmtStz+Py\na1+uMEttlExEbgG+CrymqvsHPL4DMB3YA+gN/K+qNkeZyRjjntw5DzU1NVx53wKePP4QJn88LHO2\nuAk97GxxkVPVyC7A4cBw4Jk8j18G/I9/fRfgTaBPnm3VmKRpbIw7QUdJyxOFpqYmffPt97TXRUO0\n+YEnMveZYP5nZ0mf4V3NpL4BfwXXPIXlvBDF5xERGdrVJsD2/vXtgTdV1ZYSN85I2rDSpOWJymm/\n+jUDNx3CaUePiDtKVeuqD+LvwFNdXLrDjcC+IrIOWAac3037TRzX2zEtf7xczt9d2dtbW0mNG8c/\nbm3mhduuYMI+38ycg3ro0KGZ68WcbzoMl1/7csU9iukrwBJVPdI/58Q8EdlfVTcEbdzQ0JCZMj9g\nwACGDx+eWYo3/Z+Y1NtLly5NVB7Ln6x81Z6/3NuzZs5k9kUXceu6dfQD5gLNky7hlEceYeiwYbHn\nS9LtlpYWmpubAcpeYiTMKUc/CVwK7Atsm75fVY8M9QReE9PdGtxJfQ9eH8Qi//bfgEtV9e8B22qh\nrMaY6pQaN46LZszoNKR1cn09jdOnxxXLCVGfcnQGsBIYBqSANuDJIp5D/EuQduAoABEZCOwNvFTE\nvo0xPYCdLS4eYQrEzqp6C/CRqi5Q1TOAsEcPtwOPAnuLyBoROV1EzhaR8f4mVwJfEJFngHnAJar6\n7xJ+j8RLHwK6yvIHs7WYCivltW9ra6PF71Nobm7m9d69Oy0AV6lJca6/98sRpkB85P/7ioiMFZED\ngU+E2bmqnqKqg1V1G1XdQ1WnqupNqjrFf/wVVf2Kqu7vX2aW+HsYE4ukrX2UtDylqqmpoa6ujrq6\nOtrb27lkyu84q29t5JPiTEdh+iC+CiwEdgduAHYAUqp6V/TxOuSwPgiTOElb+yhpebpDKpVi054H\n8etZD/DDHd+i9TFvUlyDTYoLJZLzQSSNFQiTREn7QE5annKkz/Xw4iOP8ETff/O1H1zP5PMaSKVS\ndra4IkTSSS0il/j/3hC07HepYXsq19sxLX+8XM5fSvbscz3c1t7O08+/y/u/+AmzZs6MdM5DEJdf\n+3J1tRbTSv/fTkNOjTEmSkHnerh63Tom33uvDWutoK4myt3t/2vLfneD9IQWV1n+YJVq6QibP4kt\nL6W89kka1ur6e78cBVdzFZGDgZ8AQ7O3D5r4ZkxPk7RhpUnLUyo710MyhJ0oNxX4JvC1rIspguvt\nmJY/Xi7nD5M9d97DXseMpaHPsEQMa3X5tS9XmPNBvF7pIa3GmJ6lpqYms25QKpVixx0beWXESK4Z\nNpGXHrVzPcQlzDyILwHfBf4GfJi+X1XviDZapxw2zNWYHuDCC69l2rQLWbQI9tkHG9ZapkjOSZ3l\ndODTwFbAZv8+BSpaIIwx1S097+G52f/giD2fZtutJ+EtAWfiEqYPYoSqHqyqp6nq6f7ljMiTVRnX\n2zEtfzBbi6mwMNmz5z3c996T3PbsDK4+/PBY5j3kcv29X44wBeJREdk38iTGOChpax8lLU9Y+eY9\nvHDvvTQ0NGTWZSr3/AamOGH6IFYCtUArXh+E4J3jtKLDXK0PwiRR0pa2SFqeIG1tbZkjgba2Nmpq\narjjggu43j8pUrbGMWNIPfRQhRNWl6j7II4pZcfGGBMkd8RSQ0MDv+q/k817SKCCTUyq2g4MYMv8\nhwH+faYIrrdjWv54uZy/UPb219Yz+zMrOW/gromY95DL5de+XAULhIicjzdZ7lP+ZbqITIg6mDGm\nerW3tpIaN47Vzc2cNuo77MtJ/Pdji5hcX8+pNTVMrq9nwrx5Nu8hbqra5QV4BuiXdbsf8Eyhn+vu\nixfVmGRpbIw7QUdJyxOk7aWX9L9qa3WD112iG0AvGFarbS+9pKqqTU1NMSesLv5nZ0mfu2FGMQnw\ncdbtj8l/jmljepSkDStNWp4gQSOWJrWupnnixDhjmQBhCsRU4HERaRKRJmAxcEukqaqQ6+2Ylj9e\nLuefNWtWh3WW3lyxInCl1jefe46WlpbY5z3kcvm1L1fBUUyqeq2ILABG+XedrqpLoo1ljKkWgwYN\nyiyZnUql2Pmzn2Xj0qWdRiztvO++PXpp7SQKdcpREekNDKTjct9rIswVlEHDZDXGJFcqlWLQvody\n32njmP7+m/Rjy4gl65SORqTnpPZHLDUCr7Gl/0HVJsoZY0JKr7O06pFF/L3vW+z15bM5+M21rF7k\nrdTaYCu1RiaSc1JnOR/YR1U/q6r7q+rnKl0cqoHr7ZiWP5itxVTYrJkzM+ssTW9v46nn32bve/5C\nw6RJ1DY00Dh9eqKLg+vv/XKEmUn9MvB21EGMcVEqlawP5bjzBC2jMXPyZG7PGbX005dWM3niRNhr\nr7iimhDCFIiXgBYRuZeO54O4NrJUVcj1zjfLHy9X8gctozF/hx3ynl+6lwMFwpXXPgphCsQa/7K1\nfzHGmNA2Dww+v/TrvXpxiD+kFToWF5MMYYa5OrqAcLK0tLQ4/U3E8sfL1fyq8GDbWNq2X8xv313d\nYdTSpb/7XaL7HtJcfe27Q94CISLXqeqPRORuvDPIdaCqX480mTHGSekRS6sXLeLbs1fx+jtjaX5i\nHpOvnJgZtWTnl3ZD3mGuIvJ5VX1KREYHPa6qCwruXOQW4KvAa/lGPolIHfBLvFOavq6qY/JsZ8Nc\nTeI0NSWrk7rSeXI7pbfdZhsWXnQRV69blzla+MnQWi6Y781xsPNLV14kw1xV9Sn/6nBVXZB9AYaH\n3P9U4Cv5HhSRHYFfAV9V1f2Ab4XcrzGJkKTiAJXPU1NTkznbW3t7Oy/ce2+mOIDX7/CzdltnyVVh\n5kGcFnA0BerpAAAXDElEQVRfQ5idq+ojwFtdbHIK8BdVXetv/0aY/brI9bHUlj9eruTfvHZtpxFL\nT+KNWHKVK699FLrqg/gu3gf4MBG5K+uh7YF/d9Pz7w1sJSLzgf7A9ap6Wzft2xhTYR/s/KlOI5be\nxxuxlL0QH9ioJRd0NYrpUeAVYBfgf7PufxfvHBHd9fwHAUfivaceE5HHVPXFoI0bGhoyb6gBAwYw\nfPjwzOiC9JsuqbfT9yUlj+VPVj6X87e3ttI4fjxrly9nXb+PWbF9P859dyN9gRHA32prOeL73we8\nv+H0z6cn0sWdv9Dturq6ROUpdLvFXzUXKLsAh1qsr6wnEBkK3B3USS0ilwLbpofSisjNwFxV/UvA\nttZJbUyMgmZJv/rKKzz24x/z8zVrMp3Sl+2+B70POpA3ly2zdZYSIJJOahF5V0TeCbi8KyLvFJOP\n/CcYuhM4XER6i8h2wKHAyiL27Yx0hXeV5Q/Wk9Ziyu2Qrqur44V7780UB/CaAf7n5TUM6N8/s85S\na7vbp7B3/b1fjq5GMW2vqjsEXLZX1R3C7FxEbsdrqtpbRNaIyOkicraIjPef43ngfrwmq8XAFFV9\nrvxfy5jKSCVsGmml8wR1SqeX0TDuKziTWkT2CLo/zPkgVPWUENtMBiYX2s512W3JLrL88Upq/ufX\nBy+j0Wvw4MztpGYPy/X85QhzPojlWTe3BYYBL6jqZ6MMFpBDp06dSk1NDevXrwe8juq2tjYGDBjA\n+vXrM/+G3QZsJIUpj4i3nERSdGeefH0OT/3hD7y2bBkf7TyKhf8cz0nbnsHP2rcso3HJ4MF8cfJk\nPvjwQ/s7S4By+iDCrMX0uZwnOwg4p5QnK1d7e3tmFERaesXIfLe72ib9B5C+RFloskeguMjyxyuO\n/Lkrs44ZPZp7zjyTlL9098a2Ni7fYzHfmnYrk6dMySyjcUlOp3RLS4vTxcH19045wqzm2oGqPi0i\nh0YRptKCliZOC1to6urqMgUm/YcQVFQWL17M+vXr7ejFOKt54sRMcQCvWenna1YzecoUGqdPt2U0\nqlCYPogLs272wpu3YD1Qvq6KTPZ9J5xwQofbYY5ecvcfJ9e/QUWVv1Kfh2Hzl5onqDkJOr7/Pi6x\nQ9reO+4KcwSxfdb1TcC9QKd5CqY4YY5eXCsiPVG1rMWU7/3Y3tpKatw4XnxkEaveeoPvAp/J+rnc\nDmlTXex8EBVSSjtmkoqI6+2wlr947a2t3HD00Vv6HIBz+vThx5s28Rn/9uV77MFhY8fS0sUyGvba\nu6urtZjuyvcY2PkgkqJQEQnqI8n9OdPzdNWklBbU5/DrTZs4taaGfkDtqFFcaLOkq1pXRxCHAS8D\nM4HHyT8b2oQQ1zeQoAJSylGH69+gLH9HQe+L9tZWmq+4gtWLFpFatYrVT74Y2Oew37Bh9Bo9OnSH\ntL327uqqQAwCjgbSq7reC8xU1RWVCGaiU0rTlR15uK3QEUOn5qS2Nk6X/qzE+hx6sq6W2vhYVe9T\n1dOAkcCLQIuI/LBi6XKsbm4mNW4c7a2tmc6z9H2LHn64w+2w21SKa+u51OSsu/Pqq68C3odLc3Mz\nLS0tzJkzhzlz5mRWj2xpacms0pk0PX0tptz/z/T1dIEIak6aqhu4tH9/Nvr3pSfB7TN2bKa/Icz/\nt2vv/Vyu5y9Hl53UIrINMBbvKKIGuB6YHX2sYNPa2rzJOQsX8r4qv3z55cy3nXP/8AcuTXeehdym\ncfFivnHrrTzoT/JJrVrFUePHd7jdMGkSQOYcu/m2qfZ22EGDBmUOtfMN5+2J/R2pVLJGMhWTJ/vc\n0e++9x5v0HHJjH7Ap/fbj8m1tXknwZnq1lUn9TRgP+CvQEpVn61Yqi54k3PWcBV0+Lbzq02bmAw0\nFrFNavVqvjt2LDM3bCir0Fy+cCG9DvSWN+6qqKTGjXO2qIRphw3T3xFX0XC9HbmY/GHmNASNUJoI\nnA8M9fezEehXW1v2JLie9NpXm66OIMbhvUfOB84TyfRRC6BhV3SNQj86t431AzaXsM3+fnFI3y6l\n0Px8zRquWrOGXxL90YtrhSW3aPTEo4xKKzR5E4KblCYBV/n/bgQaa2uZ4L8HTc+Ut0Coat7+ibht\npOMHffq+XiVss1XONqUWml5Z14OKysmbNvFHco5ejhvLzI3FHb2EOVqJooh011jwuI4yXB/L3lX+\n7CMG2HL2s9zXLrtJ6a23g5uU2gcO5NS+fakdNYoTzj6b1vZ2WtvbyzpVaDW/9tWu6LWY4paenPO+\nKhvTH5zAuX36cOmmTUVtM6F/fy7esKHT/kspNNm3g4pKXwKOXjYWf/QS5mglt4gkvc+k0FGGzSDv\nWu7rEPRhFrZJqfaoo2CvvWxNJQM4ViBOranJTM4BmOx/G6odNYqzxo/nj1krSobZ5vvjx3PLGWd0\n+KMppdCk/9DSgorKCGBBzjalHr0UOlrJLSKlNm9lF5FKfoOKYhhutazFVKh/YfToFqCuw9FCatUq\n3nrnXX4Wskmpedq0SLK7yvX85XCqQNQ2NHT4ZpPbeTbqiCM6daYV2ma3efPKKjQD99+fDUuWsMvL\nLwOVP3oJU0RK6ZwPKiJJOvJISt9GpUcwFepfGDNmAe2tQzvNaTiztxRsUpqQoP9fkwxOFYgoDB02\nrOxC097a2qGABBWVz48YwR+ffLLso5cwRytBRaTYzvncIjK3rY1rY+r/CKNQ38Z9993HyJEjnWuq\nSudfunQpAwYM6DTBLS37iOHC5maubGvr8P9788eaOVpIy25SOu2002htayu7vyGI6234rucvR48v\nEN0hqMhAx6LS0tJC3fnnl3X0EvZoJaiIlNI5n11E+lJa/0ecRx3ZH27Nzc2ZJdchecNwofBs5wUL\nFgSOSArqX2gEJrClf6Ef8NK227Lxgw+2bJPVpJTkImniYwWiQoK+gZRy9FLoaCWoiJTavJVdRNLp\ni+3/KKW/IwrD8uw/TFNVobMKlnomwtxtsgtCbjFI3wd06l9Yv2EDV+b0L6Qgc0QI3v/n4C9/mcnb\nb59571SqScn1b9+u5y+HFQjHhDlayS0ipXTOhykiYfo/UqtX853jjmPWxo1O9HcUauMvdObBUn4m\n331BgtZMOnfrbQP7Fz7yr2eOFq67jqHDhpFKpSJtUjLVwwpEhVSyHTPMkUmh5q3cIjIXmEdp/R8H\n+MUhfTtMp3l3D9VtjWjdrfnzR0c+kqm9tZXG8ePZ/OKLgf0Lv/p/H2T6F5popIkUG4GVNTWcCoFH\nC5UsBK634buevxxWIHqoYovIh4MHs+PLL5fU/1Fsf0epQ3XjOOpYsKCuW/eX23x01PjxzD7jDE5e\nvZpjKdy/kKKJi0nRWFvLtfPm0Txtms1pMCWzAlEhLn4DyS0ipfR/lNLfAaUN1e1qlvnmRYtIjRuX\nqKG60LEgXPz007y9ZEmHTv8fzrmTSzZuyCy5Xah/gRkwub6+0xFDdgd4pZuUXHzvZ3M9fzmcKhDp\nN3Z2597QoUOZM2dOpgOwmG3A2lyLUUr/R3f2dxR71FHqUiXd1YkedDSQ21k/O/u1aWtjImT6E/oB\nN27c0KEYpH/XfP0LTTO8/49c9j43pXCqQITpxCtG9vDGUgtN2MLjejtm2PxBRaTY/o5yhurmG2XV\ngjcSq7sXVoTppMaNC/Xhn7vf9NFB7uzm7IKQLgbp/Onfvav+hTiPFoL0lPd+NXKqQHS3qP5gggrP\n4sWLWb9+fd7Ck2+BtWpQbH9Hdw3VLXWpkmJmnjcxnYtmzOj84X/nnVxSYDJivqOD7COljcAz/fsz\nyv9d00cMXfUv2NGC6S49ukBEJegPNHuSVpDcopK0ZrKov0EV6u8o9agj/WFbl3VfmCISfuZ5U/CH\n/4bCH/7ZTUVBmdPF4FL/6GVmiPkL6bWYksT1b9+u5y9HpAVCRG4Bvgq8pqr7d7HdCOBR4GRVvSPK\nTElV6od8mGay9P5d0h1HHd21VElXI7FSpDrczn486MM/d7/P9O/PxvSRiZ+514EHcuqyZR2KQdAE\nyrTsJqWGhrbYm5RM9Yj6CGIqcAOQd3lIEemFt6jk/RFniVVU7ZiFPgTCHJmEKSJJbIctZpTVogcf\n5IBDD410YcVCH/65+00fHUzOWV4lPZktuxh0dV7kpBeCJL53iuF6/nJEWiBU9RERGVpgswnAn/FW\nxDbdLMyHR5gisnTp0sz+kqqrUVYNDQ1c09wcamHFUmaeh/nwz91vmKODXEnrgDbVLdY+CBEZDJyg\nqmNE5JA4s0Qtyd9AuquIJLk5K70WU5ihulD8zPOwH/7FFAPoXBDS12tqahL9nsrmSs58XM9fjrg7\nqa8DLs26LXEFMV3rCUWkK2H6RIr98A/DjgxMnOIuEAcDs0REgF2AY0XkI1W9K2jjhoaGzB/LgAED\nGD58eKa6pz94knr7uuuucypvd+VPj95Kr4xaU1NDW1sbc+bM4f33388UkWeffZb+/ftnisjSpUsZ\nNGgQablt8MXmf+yxxzq0Jbe2tna43dLS0mG9pqDbQc+fXoup3Hytra3MmjUr8zu///77mde8pqaG\nOXPmhHr9W1rqaGqK//2SfTv7tUlCnmrP39LSQnNzM9ANX8RUNdILUAMsD7HdVODELh5Xl82fPz/u\nCGWpZP7W1ladP3++zp8/X6dOnaqzZ8/u8G/6sdbWVlVVbWpq6vDzubdVVU877bSC24TZT+59uW/L\nYvab+3vm/l7Zwr7+Sfwzsfd+vPzPzpI+v6Me5no73qDsnUVkDd6w8K39wFNya1WUWeKWrvSuqmT+\nUpqzCs2Cz/4mWIkmm9y+g3LnsLj8/nE5O7ifvxziFZjkExF1JatJpqAztpVyYp/sbb7xjROYPXtO\np5+BePoPRMD+TEw2EUFVS+rftQJRIdnt3S6y/MEq9YEcNn8SC4S9d+JVToHIXWXAGGOMAewIwpiy\nNDV5l6RIWh4TP2tiMsYYE8iamByQO07eNZY/Xi7ndzk7uJ+/HFYgjDHGBLImJmOMqWLWxGSMMabb\nWYGoENfbMS1/sEqNGAqbP4kjmOy94y4rEMaUIZWKO0FHSctj3GZ9EMaUIWkzl5OWx8TP+iCMMcZ0\nOysQFeJ6O6blj5fL+V3ODu7nL4cVCGOMMYGsD8KYMiRt7aOk5THxs7WYjDHGBLJOage43o5p+ePl\ncn6Xs4P7+cthBcIYY0wga2IyxpgqZk1Mxhhjup0ViApxvR3T8sfL5fwuZwf385fDCoQxxphA1gdh\njDFVzPogjDHGdDsrEBXiejum5Y+Xy/ldzg7u5y+HFQhjjDGBrA/CGGOqmPVBGGOM6XaRFggRuUVE\nXhORZ/I8foqILPMvj4jI56LMEyfX2zEtf7xczu9ydnA/fzmiPoKYCnyli8dfAo5Q1QOAK4HfRZwn\nNkuXLo07Qlksf7xczu9ydnA/fzn6RLlzVX1ERIZ28fjirJuLgSFR5onT+vXr445QFssfL5fzu5wd\n3M9fjiT1QZwJzI07hDHGGE+kRxBhicgY4HTg8LizRKWtrS3uCGWx/PFyOb/L2cH9/OWIfJir38R0\nt6run+fx/YG/AMeo6uou9mNjXI0xpgSlDnOtxBGE+JfOD4jsgVccvtdVcYDSf0FjjDGlifQIQkRu\nB+qAnYHXgEZga0BVdYqI/A44EWjHKyIfqeohkQUyxhgTmjMzqY0xxlRWkkYxFSQiB4jIYyKyRESe\nEJGD485UDBGZJSJP+5dWEXk67kzFEpEJIrJSRJaLyFVx5wlLRBpF5J9Zr/8xcWcqhYj8l4hsFpFP\nxJ2lGCLyU39C7BIRuU9EBsWdqRgicrX/vl8qIn8RkR3izlQMETlJRJ4VkY9F5KDQP+fSEYSI3A/8\nr6o+ICLHApeo6pi4c5VCRCYD61X1yrizhCUidcDlwHGquklEdlHVN2KOFYqINALvquq1cWcplYjs\nBtwM7AN8XlX/HXOk0ESkv6pu8K9PAPZV1R/EHCs0ETkKeEhVN/tfjFRVL4s7V1gisg+wGbgJuEhV\nQ305deoIAu8X3NG/PgBYG2OWcn0bmBl3iCL9ALhKVTcBuFIcsrg+0OGXwMVxhyhFujj4+uH9LTtD\nVR9U1XTmxcBuceYplqq+oKqrKPJvwLUCcQEwWUTWAFcDzlTwbCLyReDVQiO3Emhv4AgRWSwi811r\n4gN+6DcR3CwiOxbePDlE5OvAy6q6PO4spRKRK/2/3VOA/447TxnOoIdM6k3ERLlsIjIPGJh9F6DA\nT4CjgPNVdY6InATcChxd+ZT5dZVfVe/27/suCT166CL/FXjvl51UdaSIjAD+COxZ+ZTBCrx3fg38\nVFVVRK4ErgW+X/mU+RV47S+n43s9cUdDhd77qnoFcIWIXApMAJoqnzK/MH+7IvITvNGWt8cQsUsh\nP3uK26djfRDrVXVA1u23VdW1b4K98ZrGDlLVdXHnKYaI/BX4haou8G+/CByqqm/Gm6w4hSZvJo2I\n7Ac8CLyH90e/G9576BBV/Vec2UohIrsDf1VVp1ZvFpEG4CzgSFX9MOY4JRGR+cB/VWsfxFoRGQ0g\nIl8C/hFznlIcDax0rTj45gBHAojI3sBWrhSHnFEzJwLPxpWlWKr6rKoOUtU9VXUY8E/gQJeKg4j8\nR9bNE4CVcWUphT/q7WLg664Whyyhjz4T18RUwFnA9f638A+A8THnKcXJJLR5KYSpwK0ishz4EDg1\n5jzFuFpEhuN1jrYBZ8cbpyxKApuYCrjK/1KxGW9i7H/GnKdYN+BN8p0nIgCLVfWceCOFJyIn4P0O\nuwD3iMhSVT224M+51MRkjDGmclxrYjLGGFMhViCMMcYEsgJhjDEmkBUIY4wxgaxAGGOMCWQFwhhj\nTCArEKZqiMi7Zf78n0Skxr/e2t1LavvrVxVcajnMc4vIPNfWkzLusQJhqknJk3pEZF+gl6q2lbuv\nbhDmuacB50YdxPRsViBMVRKRa/yTGi0TkW/794mI/FpEnhOR+0XkXhE50f+ReuDO7F0E7HOEiDwq\nIk+JyCMispd//2kiMltEHhCRl0TkXBG5wD8x0aMiMiBrN6f6J815xl/wEBH5hJ9nuXin4ZWs55wt\nIk/6j52ZtZ+78RZ9NCYyViBM1RGRbwL7+4vBHQ1cIyID8dZg2kNV98VbJuSwrB8bBTxVYNcrgcNV\n9fN451f/n6zHPou3xtAhwM+ADap6EN65A7KXJOmrqgfiffu/1b+vEVjo550N7JG1/emqOgIYAZwv\nIjsBqOp6YOv0bWOi4NpaTMaEMQp/vStV/ZeItOB9cB8O/Mm//zV/Zcu0XYHXC+x3ADDNP3JQOv79\nzFfV94D3RGQ9cI9//3Ige9XSdK6FIrK9349wBPAN//6/ishbWdv/yF9HB7xVXPcCnvBvvw4MBrK3\nN6bb2BGE6QnS6+J35X1g2wLbTMI77eTngK/lbJ+9wqdm3d5Mx0KSmyPozGoC4K9cfCTekurDgaU5\nz7mtn9uYSFiBMNUk3Xa/EDhZRHqJyCeBL+J9614EnOT3RQwE6rJ+diWQvSR19v7SdmDLaW5PLzHj\nyQAicjjwtqq+CzyM1weCeOdaT/dZ7Ai8paofisingZE5+xqItzKtMZGwJiZTTRRAVWeLyEhgGd43\n9Iv9pqa/4H0jXwG8jNfn8Lb/s/cCY4CHsva1TETUv/5HvNPcThORK/ztu8yR5/4PRORpvL+9dJFJ\nATNF5DvAo8Aa//77gP8UkRXAC8Bj6R2JyOfxlpx26tzOxi223LfpUUSkn6pu9OcZPA6M8ovHtnjF\nYZQ68EchItcBd6rq/IIbG1MiO4IwPc09/rDTrfDOUf0vAFX9QEQagSF4Z2xLuuVWHEzU7AjCGGNM\nIOukNsYYE8gKhDHGmEBWIIwxxgSyAmGMMSaQFQhjjDGBrEAYY4wJ9P8BL0ufhyDLyKMAAAAASUVO\nRK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "warnings.filterwarnings('ignore')\n", + "cvfit=cvglmnet(x = x.copy(), y = y.copy(), family='multinomial', mtype = 'grouped');\n", + "warnings.filterwarnings('default')\n", + "cvglmnetPlot(cvfit)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note that although `mtype` is not a typical argument in `cvglmnet`, in fact any argument that can be passed to `glmnet` is valid in the argument list of `cvglmnet`. We also use parallel computing to accelerate the calculation.\n", + "\n", + "Users may wish to predict at the optimally selected $\\lambda$:" + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([ 3., 2., 2., 1., 1., 3., 3., 1., 1., 2.])" + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "cvglmnetPredict(cvfit, newx = x[0:10, :], s = 'lambda_min', ptype = 'class')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Poisson Models" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Poisson regression is used to model count data under the assumption of Poisson error, or otherwise non-negative data where the mean and variance are proportional. Like the Gaussian and binomial model, the Poisson is a member of the exponential family of distributions. We usually model its positive mean on the log scale: $\\log \\mu(x) = \\beta_0+\\beta' x$.\n", + "The log-likelihood for observations $\\{x_i,y_i\\}_1^N$ is given my\n", + "$$\n", + "l(\\beta|X, Y) = \\sum_{i=1}^N (y_i (\\beta_0+\\beta' x_i) - e^{\\beta_0+\\beta^Tx_i}.\n", + "$$\n", + "\n", + "\n", + "As before, we optimize the penalized log-likelihood:\n", + "\n", + "$$\n", + "\\min_{\\beta_0,\\beta} -\\frac1N l(\\beta|X, Y) + \\lambda \\left((1-\\alpha) \\sum_{i=1}^N \\beta_i^2/2) +\\alpha \\sum_{i=1}^N |\\beta_i|\\right).\n", + "$$\n", + "\n", + "Glmnet uses an outer Newton loop, and an inner weighted least-squares loop (as in logistic regression) to optimize this criterion.\n", + "\n", + "First, we load a pre-generated set of Poisson data." + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": { + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } + }, + "outputs": [], + "source": [ + "# Import relevant modules and setup for calling glmnet\n", + "%reset -f\n", + "%matplotlib inline\n", + "\n", + "import sys\n", + "sys.path.append('../test')\n", + "sys.path.append('../lib')\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", + "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", + "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", + "from cvglmnetPlot import cvglmnetPlot; from cvglmnetPredict import cvglmnetPredict\n", + "\n", + "# parameters\n", + "baseDataDir= '../data/'\n", + "\n", + "# load data\n", + "x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We apply the function `glmnet` with the `\"poisson\"` option." + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": { + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } + }, + "outputs": [], + "source": [ + "fit = glmnet(x = x.copy(), y = y.copy(), family = 'poisson')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The optional input arguments of `glmnet` for `\"poisson\"` family are similar to those for others.\n", + "\n", + "`offset` is a useful argument particularly in Poisson models.\n", + "\n", + "When dealing with rate data in Poisson models, the counts collected are often based on different exposures, such as length of time observed, area and years. A poisson rate $\\mu(x)$ is relative to a unit exposure time, so if an observation $y_i$ was exposed for $E_i$ units of time, then the expected count would be $E_i\\mu(x)$, and the log mean would be $\\log(E_i)+\\log(\\mu(x)$. In a case like this, we would supply an *offset* $\\log(E_i)$ for each observation.\n", + "Hence `offset` is a vector of length `nobs` that is included in the linear predictor. Other families can also use options, typically for different reasons.\n", + "\n", + "(Warning: if `offset` is supplied in `glmnet`, offsets must also also be supplied to `predict` to make reasonable predictions.)\n", + "\n", + "Again, we plot the coefficients to have a first sense of the result." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAElCAYAAADKuLQKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXd4XOWV/z9n1Jt7L7JcsLGNC7gXsEkwNUCCSUhfkk2o\nCaQRk93sQsjuLz0hQBoJSUiWNDAktIRq2dgG27jjhrGxZctN7urSaM7vj/eONBaSLMuS7r3S+TzP\nfW6de7/zajTfec95i6gqhmEYhnGmRPwWYBiGYYQTMxDDMAyjRZiBGIZhGC3CDMQwDMNoEWYghmEY\nRoswAzEMwzBahBmIEWhEpEZE1ojIWyKyVkS+IiLit66WICJ3iMhmEfljveNzROS49z7XisiLbfT8\n34nIdW1xb6Nzkuy3AMM4DaWqegGAiPQC/gx0Ae492xuLSERVY2d7nzPgVuD9qrqvgXNLVPWaxl4o\nIkmqWtN20gzjzLEaiBEaVPUwcBPwBXAGICLfF5EVIrJORD7vHRcR+bn3a/8FEXku/stbRN4Vke+K\nyJvA9SIyTET+KSKrRGSxiIz0ruslIk94914hIjO843O8WsIaEVktIln1dXq1pI0iskFE7vCO/QIY\nBvxTRO5s4O29p1bl1Rh+ISJvAN8TkUwReURE3vCefU1T5eCde0hEtni1mj4Jx9/vvYf1IvIbEUlJ\nKJ//573HlSJyvoj8S0S2i8jNZ/5XMzo0qmqLLYFdgJMNHDsK9AY+D/yHdywVWAUMAeYDz3rH+3rX\nX+ftvwt8LeFeLwPDve2pwCve9mPATG97MLDZ234amOFtZwKRetouANYD6UAW8BYwwTu3E+jewPuZ\nAxwH1njLN7zjvwOeTrjuf4GPe9tdgW1ARhPl8CHgBe94f+AYcB2QBhQkvO9HgTsSyucmb/vHwDrv\nffYCDvj9ebAlWIuFsIwwcykwTkQ+7O13Ac4BZgOPA6jqQRFZVO91fwXwag8zgccT8iop3voSYHTC\n8WwRyQSWAT8RkceAJ1W1sN69ZwNPqWqF94wngQtxpiI0UNPwaCyE9Xi993u1iNzl7acCuU2Uw0W4\nkB+qul9EXvHOjwJ2quoOb/9R4DbgAW//GW+9EchS1TKgTEQqRKSLqp5s5D0YnQwzECNUiMgwoEZV\ni7wv9y+q6kv1rrnqNLcp9dYR4Jh6OZb6jwKmqWp1vePfE5FngauAZSJyqaq+febvpNmU1tufr6rb\nTxHa/HKQRrbrU+mtYwnbAIp9ZxgJWA7ECDq1X3Qi0hv4BfCgd+gF4DYRSfbOn5NQS7jey4X0BeY2\ndGNVLQbeFZHrE54x3tt8Ebgz4fgEbz1MVTep6vdxoaJz6932NeCDIpLu1XA+BCxp0Tt/Ly8AdyRo\nmphwvKFyWALc4OVI+gMXe9dvA4Z4ZgzwKSC/lTQanQj7NWEEnXQRWYML11QDf1DVn3jnfgPkAWu8\nX+GHgA8CC4H3AZuAPcBq4IT3mvrDT38C+KWIfBP3//AXYAPOPH4mIuuBJNyX8W3Al0TkYqDGu/8/\nE2+mqmtF5Pc4c1HgYVXd0MizT0f96/8HuF9ENuCM9V3gmsbKQVWfEpF4ORQAyz2NlSLyGeAJEUny\ntP6qGRpt6G7jFETVPhNGx0NEslS1VER6ACuAWap6yG9dhtGRsBqI0VF5VkS64ZLi95l5GEbrYzUQ\nwzAMo0VYEt0wDMNoEWYghmEYRovoEAYiIpeLyFYReVtEFvitpzG8YSgOeq1oAouIDBKRV0Vkkzck\nxx2nf1X7IyJp3vAdaz2d9/itqSm85rRrRORpv7U0hYjs8oY4WSsiK/3W0xgi0lVEHveGatkkItP8\n1tQQIjJS6oa/WSsiJ4L6P3WmhD4HIiIR4G3g/cA+XJPEj6rqVl+FNYCIzAZKcE1Rx5/uer8QkX5A\nP1VdJyLZuGaw1wa0TDNVtcxrjroMNyRHIL/0ROTLwCSgSyO9zgOBiOwEJqnqMb+1NIXXXHqxqv7O\n6wOTGfRe8t731V5cJ9U9fus5WzpCDWQqsF1Vd3u9hv8CXOuzpgZR1aW48YgCjaoeUNV13nYJsAUY\n6K+qhvGG2QA3vlMyAe2rICKDgCtxfTaCjhDw7wYR6QJcqKq/A1DVaNDNw+MSYEdHMA8I+IekmQzE\ndRaLs5eAftmFERHJAybi+lIEDi8stBY4ALykqqv81tQIPwHuIqAGVw8FXhI3QvHnT3u1PwwFDosb\nsXiNiDwsIhl+i2oGN+CNT9YR6AgGYrQRXvjqCeBOryYSOFQ1pqrnA4OAaSIyxm9N9fHGpDro1eqa\nGlAxKMzyxge7ErjdC70GjWTcyMc/87SWAXf7K6lpxA2Zfw2nDpAZajqCgRTiRiSNM8g7ZpwFXkz5\nCeCPqvoPv/WcDi98sQi43G8tDTALuMbLLfwZuFhE/uCzpkZR1f3eugh4ChcmDhp7gT2q+qa3/wTO\nUILMFcBqr1w7BB3BQFYBI0RkiIikAh/FzdkQVMLwCxTgt7g5MH7qt5DGEDfpU1dvOwOYBwQu0a+q\n/6Gquao6DPf5fFVVP+23roYQN2lVtredhRsq/i1/Vb0XVT0I7BFvAjBcI5rNPkpqDh+jA4WvoAMM\nZaKqNSLyBdzoqRHgEVXd4rOsBhGRP+FGhu0pIgXAPfEkYJAQkVm4QQY3evkFxU1Y9C9/lb2H/sCj\nXsuWCPBXVX3eZ01hpy/wlIjEh25/TFXbZI72VuAO4DEvNLQT+IzPehrFGx35EtyMmh2G0DfjNQzD\nMPzB1xBWczusicgD4uZkXpcwB4JhGIbhI36HsKLAVxI7rInIi4kd1kTkCtzczed4PU1/CUz3Sa9h\nGIbh4WsNpJkd1q4F/uBdswLo6s0yZxiGYfhIYFphNdFhrX5HwUKso6BhGIbvBMJAwtBhzTAMwzgV\nv3MgzemwVggMTthvtKOg1/TQMAzDOANUtUV904JQAzldh7WngU8DiMh04LjXiahBVDXwyz333OO7\nho6kM0xaw6IzTFpN59ktZ4OvNZDGOqwBQwBV1YdV9XkRuVJE3gFKCXBnoeaya9cuvyU0i7DohPBo\nDYtOCI9W0+kfvhqIqi4Dkppx3RfaQY5hGIZxBgQhhNXpuPHGG/2W0CzCohPCozUsOiE8Wk2nf3So\noUxERDvS+zEMw2hrRAQNcRK905Gfn++3hGYRFp0QHq1h0Qnh0Wo6/cMMxDAMw2gRFsIyDMPoxFgI\nyzAMw2h3zEB8ICyx0LDohPBoDYtOCI9W0+kfZiCGYRhGi7AciGEYRifGciCGYRhGu2MG4gNhiYWG\nRSeER2tYdEJ4tJpO/zADMQzDMFqE5UAMwzA6MZYDMQzDMNodMxAfCEssNCw6ITxaw6ITwqPVdPqH\nGYhhGIbRIiwHYhiG0YmxHIhhGIbR7piB+EBYYqFh0Qnh0RoWnRAerabTP8xADMMwjBZhORDDMIxO\njOVADMMwjHbHDMQHwhILDYtOCI/WsOiE8Gg1nf5hBmIYhmG0CMuBGIZhdGIsB2IYhmG0O2YgPhCW\nWGhYdEJ4tIZFJ4RHq+n0DzMQwzAMo0X4ngMRkUeADwAHVXV8A+fnAP8AdnqHnlTV/2nkXpYDMQzD\nOAPOJgeS3NpiWsDvgAeBPzRxzRJVvaad9BiGYXRMysuhqgq6dm2V2/kewlLVpcCx01zWIncMKmGJ\nhYZFJ4RHa1h0Qni0mk6PigrYvh1eeQV+/3u47z74/Ofhsstg7Fjo3t0t3/1uqz0yCDWQ5jBDRNYB\nhcBdqrrZb0GGYRjtRkUFFBbC3r1uKSyEPXvc9p49UFAAJ07AwIGQm+uWwYNh8mT40Idg0CB3rkcP\nkNb7Pe57DgRARIYAzzSSA8kGYqpaJiJXAD9V1ZGN3MdyIIZhhAtVKCpytYddu9yyZ0+dQRQWQnEx\nDBhQZwSDBrll8OC6pW9fiJx5UCnsOZAmUdWShO1/isjPRaSHqh5t6Pobb7yRvLw8ALp168bEiROZ\nO3cuUFeFtH3bt33bb/f9V16BvXuZm5UFW7eSv3ix29+/H5KTye/TB/r1Y+6UKTB+PPmDB0Pv3sy9\n9lro3Zv8JUsavv/UqWekJ769a9cuzpag1EDycDWQcQ2c66uqB73tqcDfVDWvkfuEogaSn59f+0cN\nMmHRCeHRGhadEB6tgdOp6moPGzfCW2+5ZeNG8jdvZm5uLowbB+eeC6NGwciRbt2zp29yQ10DEZE/\nAXOBniJSANwDpAKqqg8D14vIrUA1UA7c4JdWwzCMUygqcgaxadOphpGZCeed58zife+DL34RjhyB\nK67wW3GrEogaSGsRlhqIYRgh4/hxZxKbNtWZxFtvQXW1M4qxY+sMY+xY6NXLb8XN5mxqIGYghmEY\ncYqLYfPmU81i0yZnIGPGOJOIG8a4cdC/f6u2avIDG0wxZCQms4JMWHRCeLSGRSeER2uLdJaWwqpV\nrr/EXXfBlVfCkCHQrx/cdhvk50OfPi70tHQpnDwJK1fCb38LX/mK61sxYMAZmUdYyvNM8D0HYhiG\n0WZUVcG2baeGnd56C/bvdwnssWPdcsstbp2XB0lJfqsODRbCMgwj/NTUwI4ddSGnuFHs3OlMIZ6f\niIeghg2DZPv9DJYDqcUMxDA6OLGY63UdN4i4WWzb5sJPifmJ885zTWTT0vxWHWjMQDzCYiCBa7fe\nCGHRCeHRGhad4LNWVdcLu34ye8sW6NatrtXT2LHkV1Qw91Ofguxsf7Q2k6D+7UPdD8QwjE6MKuzb\nV2cU8WXzZteXIp6jmDEDPvc5t92t26n3yM8PvHl0VKwGYhhG25NoFPFmsvF1WlqdUYwd65rLjh3r\na+/szoSFsDzMQAzDZ+Khp0STiC/p6XXmkLgOUae7joj1AwkZYWkPHhadEB6tYdEJzdB68qTrI/HQ\nQy68NG2am6ho2jT4wQ/cqLKTJ7v5J3bsgAMH4NVX4cEH4dZb4aKLWsU8wlKmYdF5JlgOxDCM03P8\nuOtIt3IlrFvnlv37XWunCRNg0iS48UZXo+jRw2+1RjthISzDME6lshI2bHBmsWKFWxcWwgUXwNSp\nziwmToRzzrFOdx0Ay4F4mIEYxhkSi8HWrW5Yj5Ur3fqtt2DECBeKmjrVrceMsY53HRTLgYSMsMRC\nw6ITwqPVV52qrhPeE0/AggVw8cVujuxrroF//cuZxo9/7IYo37CB/E98ws2pPX58oM3D/vb+EdxP\nhWEYZ0dJiQtBLV9el78QcbWKqVPh7rtdktuayxotxEJYhtFR2LsXli2rW7Ztc7mKmTNh+nSYMsXN\nox3y4ceN1sVyIB5mIEanIRp1M+AtW+ZqGMuWQVkZzJpVt0yaZONAGafFciAhIyyx0LDohPBobbHO\nEyfghRfgv/8bLrnENZX9xCdg/Xq49FJ46SU4dAj+/nc3v8XMmWdtHh2+TNuZsOg8EywHYhhBQ9V1\nvFu+vG7ZudPlK2bOhC9/2Y0NZf0tDJ+xEJZh+E15OaxefaphpKa6MNTMmW6ZMMEdM4xWxnIgHmYg\nRijYuxdef90Zxeuvu1zGmDF1ZjFzJgwe7LdKo5NgOZCQEZZYaFh0QoC1VlW55rP33w833EB+nz5w\n/vnwxz9C377wve+53MWqVfDTn8INNwTGPAJbpvUwnf5hORDDaE0OHHC1ingNY906GD7c5Syuugqu\nvtolv60prdEBsBCWYbSU6mrXCipuGK+/7lpLTZ/uDGPGDNdhr0sXv5UaRqNYDsTDDMRoU06ccLWK\n115zw5ivWQNDh9aZxYwZMHIkRCwybIQHy4GEjLDEQsOiE9pI68GDbtyoO+90eYuBA+H734eUFPiv\n/3Ij1G7cCA8/DJ/5DJx77mnNo9OXaRtgOv3DciCGEaegABYtcjWMJUvcoIKzZrmJj37+c9ez25rS\nGkYtFsIyOi8nT0J+vuvF/eKLcPSoG6H2oovcct55Fo4yOjyWA/EwAzGaJBqFN9+sM4x169xcF/Pm\nueFAJkwwwzA6HaHOgYjIIyJyUEQ2NHHNAyKyXUTWicjE9tTXFoQlFhoWndCE1h074Je/hPnzoU8f\nuOkmlwz/5jddjuPll93cGOef3y7m0SHKNGCYTv8IQg7kd8CDwB8aOikiVwDDVfUcEZkG/BKY3o76\njDBx9Ci8+qqrZbz0khsmZN48+OAH4aGHoH9/vxUaRochECEsERkCPKOq4xs490tgkar+1dvfAsxV\n1YMNXGshrM5GZaXrfxE3jK1bYfZsZxrz5sHYsdZpzzCa4GxCWEGogZyOgcCehP1C79h7DMToBKi6\nObtfftkZxtKlrvnsvHmuie2MGTYHhmG0E2EwkDPixhtvJC8vD4Bu3boxceJE5s6dC9TFIP3ejx8L\nip7G9u+///5glN+IEfDyy+Q/9hisXs3cHj3gkkvInzYNbrmFuddcU1e2r7/uv94m9tetW8eXvvSl\nwOhpaj8wf3/7f2rV/fj2rl27OGtU1fcFGAJsaOTcL4EbEva3An0buVbDwKJFi/yW0Cx803nihOo/\n/qH6xS+qnnuuas+eqh/5iOrDD6vu3NngS6xMW5+waDWdZ4f3vdmi7+6g5EDycDmQcQ2cuxK4XVWv\nEpHpwP2q2mAS3XIgIaWqClascGGpl1+GDRvceFKXXOJCUxMnWvNaw2gjQt0PRET+BMwFeuLyGvcA\nqThXfNi75iHgcqAU+IyqrmnkXmYgYSAxj/Hyy67n98iRzjDe/36XBM/I8FulYXQKQt0PRFU/rqoD\nVDVNVXNV9Xeq+qu4eXjXfEFVR6jqhMbMI0wkxiKDTKvqLCiA3/4WPv5x6NcPPvQh2LbNjSH17ruu\ng993v+tqHC0wj05Zpm1MWLSaTv/ocEl0IyBUVsK//gUvvOBqGceOudrFJZfA//6vG8XWMIxQ43sI\nqzWxEJbPxGKuWe3//R8sXAjjxsEHPuBMY/x4y2MYRgDp6P1AjKCzebMzjcceg5wc+NSnYO1ayM31\nW5lhGG2I/ST0gbDEQpvUuW8f/OhHcMEFbiDCaBSeftrNj7FgQbubR4co04ARFq2m0z+sBmI0n4oK\n+PvfXTJ81SqXCP/hD2HOHEhK8ludYRjtTLNyICIyC1inqqUi8kngAuCnqrq7rQWeCZYDaSPWrYNH\nHoE//9mNWvvZz7rBCa2prWGEnvbIgfwCmCAiE4CvAr/BjZ47pyUPNULAsWPwpz+52sbhw6657Ztv\ngjdMjGEYRnNzIFHvp/21wEOq+jMgp+1kdWwCGwuNxeCVV+ATn4ChQ8lfuBC+8x3YuRPuvTfQ5hHY\nMq1HWHRCeLSaTv9obg2kWES+AXwSuEhEIkBK28ky2pW9e+H3v3e1jS5d4N//HR54wCXEvYHYDMMw\n6tPcHEg/4OPAKlV9TURycXNyNDgJlF9YDuQMqK6GZ5+F3/zGzadxww3OOCZNsvkzDKMT0R45kC+r\n6oL4jqoWiMjYljzQ8Jlt21xC/A9/cONPfe5z8PjjkJnptzLDMEJGc3Mg8xo4dkVrCulMtHsstLQU\nHn0ULrzQNbkVgcWLYckS+PSnGzWPMMVsw6I1LDohPFpNp380WQMRkVuB24BhIrIh4VQOsLwthRmt\nwOrVLkT117/CzJnwla+4oUVSLH1lGMbZ02QORES6At2B7wB3J5wqVtWjbaztjLEcCHDihBtS5De/\ngaNHXV7jM5+BQYP8VmYYRgBpl/lARCQJ6EtCrUVVC1ry0Lai0xqIKixbBr/+NfzjH25I9M9/3g1i\naAMYGobRBG0+H4iIfAE32dNLwHPe8mxLHmi0Yiy0qMiNRzVmjEuGjxsHb7/tkuKXXnrW5hGmmG1Y\ntIZFJ4RHq+n0j+a2wvoSMEpVj7SlGKMZxDv7/frX8OKLcM018PDDbhY/a35rGEY70tx+IIuAeaoa\nbXtJLafDh7DKyuBjH4MdO+DWW93sft27+63KMIwQ0x79QHYC+SLyHFAZP6iqP27JQ40WcOgQXH01\nnHsurFkDqal+KzIMo5PT3CB5AS7/kYprwhtfjBZwxrHQt992zXAvu8wNOdJO5hGmmG1YtIZFJ4RH\nq+n0j2bVQFT1WwAikqmqZW0ryTiF5cvhuuvgf/7HJcoNwzACQnNzIDOAR4BsVc31hnW/WVVva2uB\nZ0KHy4E89RTcfLPrRX6Fdfw3DKP1aY8cyP3AZcDTAKq6XkQuaskDjWby4IPwve/Bv/7lpo01DMNo\nBqquvc2xY3XL8eNw8iSUlMCoUXDxxa3zrGZPaauqe+TUZqI1rSOh85Gfn8/cxoZJj8Xg7rvhmWdg\n6VJf5+BoUmfACIvWsOiE8GjtyDpjMfflf+jQe5eiolMNItEwkpJcA80ePaBbN7d06QI5OdCzZ+u9\np+YayB4RmQmoiKQAdwJbWk+GAUBVlRt2ZPdu17O8Rw+/FRmG0YaUlLhW+Tt2uHnbCgrcsncv7Nvn\nJgPNzIS+faF371PX557rviK6d3cG0b173XZ6evvob24OpBfwU+ASQIAXgTuD1rEw1DmQEydg/nz3\nM+Gxx2y+ccPoIFRWOoN4+203m0J8/c47Lqw0bBiMGAFDh8KQITB4sFsGDIA+fdq+0WW7jIUVBkJr\nIIWFcOWVrjf5Aw+4+qdhGKGhpgb27HHmkGgU27e7mkRurss9jBrlpuEZNcqZRv/+/g9X12YGIiJf\nV9Xvi8iDwHsuVNU7WvLQtiIsBnJKLHTzZtfC6tZbYcGCQA1HEpbYMoRHa1h0Qni0tpdOVZd3iJtE\n4rJjh8stjBoF55xz6jovz82gENTybMtWWPE8x5stuXlzEJHLca28IsAjqvq9eufnAP/A9YYHeFJV\n/6et9LQrr70G118PP/whfOpTfqsxDAMoLnY1h4aMIhI51Rw++tG62kRWlt/K2x9fQ1giEgHeBt4P\n7ANWAR9V1a0J18wBvqqq1zTjfqGogQDwxBNw220u3zGvoQkfDcNoK6qqXNI6HmpKXE6ccAYxcmSd\nUcS3e/XyW3nr0+b9QETkJeDDqnrc2+8O/EVVL2vJQxOYCmxX1d3eff8CXAtsrXddcOI6rcEDD7g+\nHi+8AOef77caw+iwHD4MW7c6o9i6tW7Zs8clqkeOdMsFF7jaxMiRMHCg/3mJsNDcZry94+YBoKrH\nRKRPKzx/ILAnYX8vzlTqM0NE1gGFwF2qurkVnt3+eH088v/yF+YuW+ZrH4/mENSYbUOERWtYdEJ4\ntL7ySj5Dhsw9xSDiSzTqmruOGgWjR7sJOkeNguHD23880rCU55nQXAOpEZHc+AyEIjKEBpLqbcRq\nIFdVy0TkCuDvwMjGLr7xxhvJ876Yu3XrxsSJE2v/aPHBzHzZr6wk/8or4eBB18s8L89fPc3YX7du\nXaD0dIT9devWBUpPU/tB+/s//3w+e/ZAVpYzi9dey6egwDViHDgQevfOJzcXLrlkLp/6FBw5kk/3\n7nDxxafeb/Rof/QHpTzj27t27eJsaW4/kMuBh4HFuHDShcBNqvrCWT1cZDpwr6pe7u3fDWj9RHq9\n17wLTGpoTvbA5kBOnHADInbpAn/6k/XxMIxGUIX9+2HLllNrElu2wNGjLsQ0erSrVcSXc85xne2M\nltFec6L3AqZ7u2+o6uGWPLDePZOAbbgk+n5gJfAxVd2ScE1fVT3obU8F/qaqeY3cL3gGYn08DOM9\nVFe7pq9xc0hcZ2TUhZwSzSI313ITbUFb9gM5V1W3ikiDo/mp6pqWPLTeMy7H9XKPN+P9rojc7G6v\nD4vI7cCtQDVQDnxZVVc0cq9gGUgjfTzyQxILDYtOCI/WsOiE1tFaXOwS2Fu2nGoU774LgwY5Y4ib\nRHx9piP4hKVMg6qzLVthfQW4CfhRA+cUeF9LHnrKTVT/BYyqd+xXCds/A352ts9pd6yPh9GJOHIE\nNm58r1EcOVIXdho92rV0Gj3ahZ3aa7wmo+04XQ3kw6r6uIgMU9WdjV4YEAJTA7E+HkYHZv9+N6ty\n4nL8OIwbd2rYafRoN7aThZ2CTVuGsNao6gXxdYsVthOBMJAHH4Tvfheefdb6eBihJxqFdevc4NBL\nl7p1VRVMmuT6Tpx/vlsPG2ZGEVbOxkBO9yc/KiIvAsNE5On6S0se2GGJxeDrX4ef/cz9lzVhHonN\n6YJMWHRCeLQGXefJk/Dii3DPPTBpUj49ergZBrZuhQ9+0M2wXFTk+sB+5zvwkY+4YTz8NI+gl2mc\nsOg8E06XA7kSuAD4Iw3nQQyom8dj1y5nHq05Y4thtCEFBe4jG1+2b3c1ilmzXArvllvcHBOG0RCn\nC2H9UVU/FR+Vtx11tQhfQljxeTxycqyPhxFoolGX6I6HopYtc3NVzJ7tDGPWLGceqe3cQ9vwl7bM\ngWzGTSL1T2Au9cakaqgzn5+0u4Hs2+ea6VofDyOAlJbCG284w1i6FFascE1n42Yxa5YLPwVoBgHD\nB9oyB/JL4BXgXNyQIolLmw3xHgo2b4aZM+FjH4OHHjoj8whLLDQsOiE8WttSZ1ER/P3v8NWvwtSp\nbtrTe+6B8nK4807X92LzZvj1r+HGG11T2qbMw8q0dQmLzjOhyRyIqj4APCAiv1DVW9tJU/CxPh6G\nz6g6Q4jXLl57zVWIZ850FeIf/hCmTLGIqtG2nMlQJrOBc1T1d96wJjmq+m6bqjtD2iWEtXCh61lu\nfTyMdqSmBjZsqDOMpUudiVx4oTOM2bNh/HiLohpnTpuPhSUi9wCTgVGqOlJEBgCPq+qsljy0rWhz\nA3nwQTePx7PPwsSJbfcco9NTVuZyFvH+F2+8AQMG1JnF7NkwdKjlL4yzpy1zIHE+BFwDlAKo6j4g\npyUPDCXxPh4//7n7bz5L8whLLDQsOiE8WhvTefAgPPmky19Mmwa9e8N//qfrl3Hrra557ebN8PDD\n8OlPu457bW0eYS/ToBEWnWdCc+cDqVJVFREFEJHOM/tvZaXr4xFvMH+mI70ZRj1U3QCDib27i4pc\n/mLWLPjBDyx/YYSD5oawvgacA8wDvgN8FviTqj7YtvLOjFYPYcXn8eja1eU87D/aaAGVlbB69an9\nL3JyTm1Oe955NhSI4Q/tNR/IPOBSXF+QF1T1pZY8sC1pVQOJz+Nx4YXw059adtJoNocPuyE/li93\nZrF2rZsXFwSCAAAgAElEQVTfIrHD3sCBfqs0DEdbDueeyAYgzdte35KHhYb4PB633eZyH60cbA7q\nvAD1CYtO8E9rPBwVN4tly9xotdOmOaO45x63nZNTp3PgwPbX2RLC8vfvzDqrYzFKamooi8Uoa2Jd\nHou5paaG83NyuKqVhltqloGIyEeAHwD5uBrIgyJyl6o+0SoqgkS8j8ePfgSf/KTfaoyAUV3twlGL\nFzuzWL4csrPrahZ33OHCUVZhNU6HqnI8GmVfVRUHqqo4VFXF8WiUE9Eox6NRTtbUUFxTQ0lNDcXR\nKCXedknC8RpVspKSapeMSITMSKRuOymJTG+dHomQ0cpx0ubmQNYD81T1kLffG3hZVSe0qpqz5KxD\nWDaPh1GPuGHk57tl+XLXAuqii+pCUhaOMppCVdlXVcXq4mLeLC5mbUkJW0pLKayqIlWE/qmp9E9L\no09KCt2Tk+manEy35GS6JCeTk5REdlLSe9fJyWRFIqRFIshZRkjaox/IRlUdl7AfAdYnHgsCZ2Ug\nDzxQ18fD5vHotESjboKkRYucYSxb5vpbzJ0LF1/sUmI22LLRFPsqK1ldXFxrGKtLSoiqMjknh0nZ\n2Zyfk8PYzExy09PJDEBVtT0M5AfAeODP3qEbgA2quqAlD20rWmQgsRjcfTc8/TT861+Ql9cm2hLp\nzDHbtuJstBYWwnPPuSU/382iFzeMiy5qXcPoLGXanvipc79nFnGjWF1cTFUsxqScnNplck4OuWlp\nLF68OJDl2WZJdBEZAfRV1btE5DpgtnfqdeCxljwwUMT7eOzebfN4dCJqamDVKlfZfO4518Xnssvg\nhhvgN79xnfgMI5HEMNTq4mLWeGZRGYu5mkVODjf268dD55xDblraWYeVwsLphnN/FviGqm6sd3wc\n8P9U9eo21ndGnFENJN7Ho0sXm8ejE3D8uJtp79lnXUWzb1/4wAfgqqtg+nRIPpP2iEaHRlUpqKxk\nTYJRrCkupgaYlJ19Su2iI5hFW84HskpVpzRybmNocyDxPh42j0eHJd68Nl7LWL3a5S8+8AH3px8y\nxG+FRhBQVXaUl7OmpKTWMNYUF5MSidTmK+KmMagDmEVDtKWBbFfVcxo5946qjmjJQ9uKZhlIvI/H\nLbe43IcPHwiLLbc++fn5zJgxl8WLnWE8+6ybaThey3jf+yAz02+V4SvTMGhtrs6YZxaJOYu1xcV0\nSU5mUk4OF2Rnc0FODudnZzMgLe2092srne1NW3YkfFNEPq+qv673wM/hJpUKFzaPR4dj/354/nn4\n3e/cdK1jxzrTeOopGDfORqvtrKgq71ZU1JrFm17uomtycm3O4u7cXC7Izqa3zeHbYk5XA+kLPAVU\nUWcYk4FU4EOqeqDNFZ4BTdZArI9HhyAWc81sn33WLTt2uAT4VVfB5ZdbArwzEs9ZJJrFm8XFZEYi\nTPZaQcVzFn3MLN5DezTjvRg4z9vdpKqvtuRhbU2jBmJ9PEJNSQm89FJdPqNbt7rQ1OzZkJLit0Kj\nvVBV9tZrOvtmcTHJInVmkZ3N5Jwc+rVBGKoj0i6DKYaB9xiID308mkNQY6H18VPnzp11uYzly11L\nqbhpjGgg82Zl2vr4rbV+D+74GqjtXzEpJ4eqNWu4ft68wCe4/S7PxmivwRTDhfXxCBXRKLz+el1o\nqqjImcVNN8Hjj7vW1kbHZn9lZa1RxGsWiT24P9e/P78cOfI9raHyU1MDbx4dFd9rICJyOXA/bnbE\nR1T1ew1c8wBwBW5GxBtVdV0j93I1EOvjEQqOHXMVw3jfjNxcZxpXX+0mVLL5MTou8R7c8d7bbzbR\ng9vMoW0JbQjLG1PrbeD9wD5gFfBRVd2acM0VwBdU9SoRmQb8VFWnN3I/1b17rY9HQEnsm/Hssy4Z\nPmdOXWhq0CC/FRptwYHKytoaRbwnd0WCWcRrGEPS00NnFqpK/BtUG9pPuI6E4zGgKhajMhajSpWq\nWIyod433hY4CUVWiqlTXW0dVqY7FqAGisRpi0aMQPYxGj0DNUSLRo0j0MJGaI0RqjiI1x4jUHCcp\ndoKUbtdyydgf1+oJcwhrKrBdVXcDiMhfgGuBrQnXXAv8AUBVV4hIVxHpq6oHG7zjzJluEukFCwLb\nhjOosdD6tIbOqipYsqTONCornWHcdZcba6q1+mZ0pjJtLxrTWqNKRSz2nqUyFmN/ZSVvlZWxpbSU\nLWVlbC8vpyoWY2hGBkPS0hiclsaUAQPokpREFDefxZbSUjaUlFCd+EXpfaE2tdTgvmCLVq2iy6RJ\n7pi3xM/XJByL78dUicW3oXZf42vvWOK6dqm3H0cS1gK1RigJ53TtWiJeI56ICKkipHkj6qaKkOS9\nRgFRJZuT9JCjdNMjdOcYXfUIXfUIXfQoOXqYbD1CZuwoWbGTlFT24nBZHsdKB3CiuB8nivty8uRg\nik+Op/RkV0pPZlNRnEFFSQYXnl/MJb9ohQ8I/tdA5gOXqepN3v4ngamqekfCNc8A31HV5d7+y8DX\nVXVNA/fT2z/2eaLdurfPG2gh+woLGDAwt1nX1n0ApdYQI9I+sZ3Cwt0MHNhwl+1YjRtTKlYD0Rqo\niXnb1a7tQnXUnS8thdR06NbFtZ5qq2jinj27GTz47LuXS5JActv98NhTsIvBuXltdv/GqEGIihCt\n3YYoQhShRqBaICZCFUIMqBbh8O6d5Awd7l4j7vU1dV+HRFQRb8H7YhZVkgWSEVIRUrztCCAoyYBo\n3ec6AiShtXdNQolo3bmI9/UU8V4XIf5aJf5fsH/3uwwcMvQ95xK/1OP3kfjXvgixSIRYRIhGItRE\nkqhOihCNRKhOSqImKUJ1JEI0KUKNdywaceej3nXudadu14h4x9z9o96xWESo3PgWMmECsYhQI4KI\nEtEYEa0hQowINdQgxBBqSKbGlQaKuBISQeMm41yq3l+53nd5pIHPsSpTD+9mxYdvrD0U5hpIq/PI\nvrdJivYBQDIzSRk6lNTzxgJQ9dYmAP/33z8/WHoa3U8NmJ6m9s8PmJ5G9keNCJYetPHzSVGguunX\nC6SO8/n9XPeBeufPa+J6IfW8sYgqun49SdEoXc85h/TKSio3bya1upr+gweRXlbJ8e07SIlWk9ev\nL6nV1RzaXUBKNMrInt1Jra5iz/4DJNfUMLZLDqnV1bxz+Agp0SgTszJIromy+XgxyTVRJqelklIT\nZX1JGUmxGNOTI2RUR3mzMsrO7qloXg5be6XyZnk1hzPg2KgMolINu0qAGshNAY3C7ipvPwm0Ggq8\nX2793WEKYi421jfi1vtxceO+np0erAGU7UkjuXfTLnbt2sXZ4ncNZDpwr6pe7u3fDWhiIl1Efgks\nUtW/evtbgTkNhbBadU70ALNi7wruW3IfGw5uYMGsBXzugs+Rnpzut6wOgapSfaSail0VVOyqoHJ3\npdveXVF7DCBjRAbZE7Pdcn422ROySe7S4X6PGW3J0aOwYQOsXw9vvYVu3Qq7CuDAfjQ9m5qsnkRT\nu1NNVyprcqiozKayogtVZTlUpXWhIiuL0ux0jmdEOJBazmE5yqHYIQ5GD1JUVURRZRFHKo5QWl5K\nRUUFsViM5JQ0Lrn0Mp5/5u+1MsKcRE8CtuGS6PuBlcDHVHVLwjVXArd7SfTpwP1NJtFDYCCtFQdf\nVbiK+5bcx9r9a/n6rK/z+Qs+T0ZK68WIOkK8vrVRVaLHo5S/XU7JuhJK1pVQvLaY0o2lpPZPJef8\nnDpTmZhNav9Tm5hambY+HU5nLObM5cABOHzYtWmPr48cQYsOoweK0IOH4ehR5MRRpLKMWGo2Ncld\niEoO1ZpNdXU2VVVZ1KR3g5wuxLp2I9Ylh5QrxzPovitqHxfaEJaq1ojIF4AXqWvGu0VEbnan9WFV\nfV5ErhSRd3DNeD/jp+YgMWXgFJ752DOs3rea+5bcx3eXfpevz/o6N0+6uVWNxKhDREjpnkLKtBS6\nTKvrnKI1StnbZc5U1paw9/69lKwtgQhkT8iura2UV5UTi8aIJFsbZaMRIhHo1cstDZCYmK8lGiXp\n+HGSjh0j9dgx10b+6FFih48SKzxM7MBRtKgAPXIcLU/B9Yo4e3zvB9KahKUG0las3b+Wby/5Nm/s\nfYOvzfwaN0+6mazULL9ldVpUlcrCSkrXl9bWVkrWlVC5r5KssVl1IbCJ2WSNyyI5x0JgRvsT2hBW\na9PZDSTO+gPr+faSb7O0YClfnfFVbptymxlJgIgWRyndcKqplG52IbDsidl1NZYJ2aQNto50Rtti\nBuIRFgNpr5jtxoMb+faSb7Nk95IWGUlYYssQHq2N6YxFYy6vsr6EkvUltbWWWGWM7AnZZE3IcsYy\nPpvMsZkkpbd9B9mwl2nQCKrO0OZAjLZlXN9x/O3Df+OtQ2/x7SXfZvgDw61GElAiyRGyxmSRNSaL\nvh/rW3u86lBVrakcf/U4e3+yl/Lt5aQPSyd7fIKxTHhvwt4w2hqrgXQi4kayeNdiM5IQE6uMUba1\nrNZY4jUWVXWmMj6htjImk6QMG87HaBwLYXmYgTSPuJHk78qvNZLs1Gy/ZRlngapSdbDKhb42lLgc\ny4YSyt8uJz0vncyxmWSNzSLrvCyyxmaRcU4GkRRrCWaYgdQSFgMJSix006FNfHvJt1m0a1GDRhIU\nnc0hLFrbW2esytVWSjeVUrqplLJNbrtyTyXpw9OdqXhL5thMMkZk1DYxtjJtXYKq03IgRosY22cs\nf7n+L7VGEs+R3D7ldgttdRAiqRGyx7twViI15TW1xlK2qYwDjx6g9K1SqvZXkTEyg6yxWRzIOEDR\nsSKyxmSRPjzd+q4Y78FqIEYtmw5t4r4l97F412K+NvNr3Dr5VjOSTkZNaQ2lW7yayuZSyja7ddW+\nKjLOySBzTCaZozLJHJlJxsgMMs7JIKWbzSkcZiyE5WEG0jq8degtvrX4W7y2+zXumnkXt065lcyU\nVhp33QglNWVejWVzKeVvl1P2dhnl28spf7ucSEbEmcmIDDKGZ5AxLIP0YelkDMsgpU+KtQwLOGYg\nHmExkKDGQuvzyJOP8Hz0eZbvWc7XZ36dWybfEtghUsJSpmHRCc3TqqpUHaii/O1yyt8pp3xnORU7\nKyjfWU75jnJi5bE6QxleZywZwzNIz0snknb2YbGwlGlQdVoOxGgThvcYzsK5C1l3YB33Lb6PHyz/\nAQtmLeCmSTcF1kiM9kVESOufRlr/NLrN6fae89ETUWcq7zpTKdtcxpFnjlCxs4KKPRWk9k4lfXj6\nKbWWuNmk9LLaS9CxGojRbNbuX8u3Fn+LVftW8dUZX+WmSTdZ81+jxWiNUrGnwpnLjrqaS8VOt69V\nWmcq9U0mL51IqiX1WwMLYXmYgbQPa/ev5TtLv8OiXYu4bfJtfHHaF+mV2fDIoYbRUqqPV7/HVOL7\nlXsrSe2X+t6wmLed3CPZai/NxAzEIywGEtRYaH1Op3P7ke18f9n3WbhlIZ+e8Gm+OuOrDO46uP0E\nJtBRyjRIBFlrLBqjck8l5TvKefWfr3J+yvlU7KjLvQANhsUyhmeQNjjNl06UQS1Py4EYvnBOz3P4\n9TW/5t659/KTN37ChF9O4Npzr2XBrAWc2+tcv+UZHZhIcoSMoRlkDM2gV3Ivhs8dfsr56qPVruay\nw9Vcit8spuhvRZTvKKfqQBVpA9OcqQx15pI+1Nsemk5Kb8u9NBergRitxtHyo/x81c95cOWDzBo8\ni7tn383UgVP9lmUYpxCrirkpit+tqE3u126/W06sIkZ6Xp2hxJeMYW6/o83bYiEsDzOQYFBaVcpv\n1/6WH73+I7JSs5g/ej7Xj7mecX3G2S87I/BET0ZrzSRuLLVGs6vC9XsZlvEeY8kYlkFarj/hsbPB\nDMQjLAYS1Fhofc5WZ0xjrNi7goVbFvLE5idISUrh+tHXM3/MfCb1n9SqZtJZyrQ9CYvW9tSpqlQf\nqj6laXKiyVTuqyS1f2pd3mVoXR5mxb4VXPLBSwL3I8pyIEYgiUiEGYNnMGPwDH4w7wes2b+GhVsW\n8vGFH6eqpor5o+czf8x8pg+aTkTC9avN6JyICKl9U0ntm0rXGV3fcz5WXS88tqOcw08dpnxnOVu3\nbSVN00jPSydtUBrJ3ZJJzkkmKSeJpC5JJHdNJqV3Cqm9U0npneKWnikkZQZ3OH6rgRjtjqry1qG3\neGLzEyzcspBjFce47tzruH7M9czOnU1SJLj/MIZxNlQfr6ZiVwVVhVVET0SpKa4hWuytj0epLqp2\ny+FqqoqqqD5cjYiQ0iuF5J7JpPRMqV2SeyaT3CXZmU+OWyflJNWZUnzJSmqy1mMhLA8zkHCy9fBW\nFm5eyMItCyksLuSDoz7I9WOuZ27eXFKSbKA+o/OiqsTKYlQfqa5bDlcTPRKl+kg10ZPOfGpO1hlR\nfIkWR6k5WUOsMkZSZhKRzAiRjAj9P9OfvHvyap9hBuIRFgOx2HLj7Dy2s9ZM3jn6DlePupr5o+cz\nb9g80pLTGn2dlWnrExatprNptEapKa2hpqyGWLkzk9S+qbXnz8ZALPBsBIph3Ydx16y7eONzb7D2\n5rVM7DuR7y/7Pv1+1I9PPPkJntzyJGXVZX7LNIzQIElCcpdk0vqlkTE04xTzOOt7h+EXe3MJSw3E\nOHMOlBzgqS1PsXDLQlbtW8W8YfOYP3o+V428ii5pXfyWZxihxUJYHmYgnYPDZYd5etvTPLH5CZYW\nLGVO3hzmj57PNaOuoUdGD7/lGUaosBBWyMjPz/dbQrMIqs5emb347Pmf5flPPE/Blwu4YewN/ObJ\n35B3fx6X/vFSfvXmrzhYctBvmQ0S1DJtiLBoNZ3+Yf1AjFDTLb0bnxz/SQYdHcTkmZP55/Z/snDL\nQha8vIAJ/SYwf/R8rht9HYO6DPJbqmF0OCyEZXRIKqIVvLjjRRZuWcgz255hZM+RtR0Xh3Uf5rc8\nwwgMocyBiEh34K/AEGAX8BFVPdHAdbuAE0AMqFbVRkfnMwMxGqKqpopF7y7iyS1P8vdtf2dAzgCu\nO/c65o+Zz5jeY/yWZxi+EtYcyN3Ay6o6CngV+EYj18WAuap6flPmESbCEgsNi05oWmtqUiqXjbiM\nX139K/Z9ZR/3X3Y/RWVFXPrHSxn9s9F889Vvsnb/Wtrjx0dHKdMgYTr9w88cyLXAHG/7USAfZyr1\nESzZb7QSSZEk5uTNYU7eHO6//H5WFa5i4ZaFXP/49agq14y6hsuGX8acvDlkpmT6LdcwAo2fIayj\nqtqjsf2E4zuB40AN8LCq/rqJe1oIy2gRqsr6g+t57u3neHHni6zZv4ZpA6dx6fBLuWz4ZYzvOz5w\no6gaRmsQ2ByIiLwE9E08BCjwTeD39QzkiKr2bOAe/VV1v4j0Bl4CvqCqSxt5nhmI0SoUVxazaNci\nXtzxIi/seIHiymIuHX4plw6/lHnD5tE3u+/pb2IYISCwBtLkg0W24HIbB0WkH7BIVUef5jX3AMWq\n+uNGzuu//du/kZeXB0C3bt2YOHFi7fgz8Rik3/vxY0HR09j+/fffH8jya2i/ftm29v13HtvJQ397\niFWFq9iYuZEh3YYw4sQIJvSbwG0fvo1emb2adb9169bxpS99yffyas5+WP7+8WNB0RP08oxv79q1\nC4BHH300lAbyPeCoqn5PRBYA3VX17nrXZAIRVS0RkSzgReBbqvpiI/cMRQ0k3wZ/a3XaU2t1TTWr\n969m8a7F5O/OZ/me5eR2zWXOkDluyZtDn6w+vus8W8Ki1XSeHWGtgfQA/gYMBnbjmvEeF5H+wK9V\n9QMiMhR4Chf2SgYeU9XvNnHPUBiI0bGIxqKs3b+WxbsXs3j3Yl7b/RoDcgbUmsmcIXPon9Pfb5mG\n0SChNJC2wAzECAI1sRrWH1zP4l2eoRS8Rs+Mnlw05KJaU8ntmuu3TMMAzEBqCYuBBLUqW5+w6IRg\na41pjE2HNrF492Ief/5xtmZvJSM5o7Z2MmfIHIZ1Hxa4Vl5BLtNETOfZYXOiG0aAiUiEcX3HMa7v\nOM4rO485c+aw7cg2Fu9azMs7X+a/Fv0XgnDRkItql9G9RgfOUAyjPlYDMQyfUVV2HtvJkt1LWFKw\nhMW7FlNcVezMJNcZyvi+422ueKNNsBCWhxmI0VHYc2IPrxW8xuJdi1lSsIT9xfuZlTuLi3IvYk7e\nHC7ofwGpSa03s5zReQnrWFidlsT22EEmLDohPFqbq3Nw18F8fNzH+dXVv2LL7VvY9oVtfHbiZ9lX\nvI9bn7uVnt/vySV/uIT7Ft9H/q58yqvLfdPqN6bTPywHYhghoG92X+aPccPRAxyvOM6ygmUs2b2E\nb7zyDTYc3MDEfhNrQ14zB8+ka3pXn1UbHR0LYRlGB6C0qpQ39r7Bkt1LeK3gNVYWrmRkz5FcNOQi\nLsy9kNm5s234FaNBLAfiYQZiGI6qmipW71tdm5hfvmc5vTN7Myt3FrMHz2Z27mxG9hxpLb0My4GE\njbDEQsOiE8Kjtb10pialMmPwDBbMXsBzH3+OI18/wpM3PMm0gdNYtGsRl/3fZfT5YR8+9NcP8cPl\nP+T1Pa9TVVPli9azxXT6h+VADKMTEJEI5/U5j/P6nMctk28BYO/JvSwtWMqygmU8tvExth/ZzgX9\nL2DW4FnMzp1NrDLms2oj6FgIyzAMAE5WnuSNvW+wrGAZS/csZWXhSoZ0HcLs3Nm1y5CuQyzs1cGw\nHIiHGYhhtB7VNdWsP7iepQVLa5fkSDIzBs9g+sDpzBg8gwv6X0B6crrfUo2zwAzEIywGEtQxceoT\nFp0QHq1h0Qnv1aqq7Di2gzf2vsEbe9/g9b2vs6VoC+f1OY/pg6YzbeA0pg6cyogeI9q1lhKWMg2q\nThsLyzCMNkdEGNFjBCN6jOCT4z8JQFl1Gav3reb1va/z1Nan+MYr36CkqoTJAyYzdeBUpg6cypQB\nU2w4+w6K1UAMw2hVDpYcZNW+VawsXFm7zkzJrDWTqQOnMnnAZLqkdfFbqoGFsGoxAzGM4KGqvHv8\nXVYWrqxd1h1YR27X3NpaytSBUxnfd7yN7+UDZiAeYTGQoMZC6xMWnRAerWHRCW2rtbqmmk1Fm1ix\nd0VtLWXHsR2M6zPuFFMZ0WMEEWm6u1pYyjSoOi0HYhhGqEhJSmFiv4lM7DeRm7kZgJKqEtbuX8vK\nwpU8ve1p/mvRf3G84nht2CuepLchWYKD1UAMwwgsifmUFYUrWFW4ipy0HFdDGTCVaYOmMan/JLJS\ns/yWGloshOVhBmIYHRtV5Z2j79TmUlYUrmDjoY0M7z68toYybdA0xvQeQ3LEAizNwQzEIywGEtRY\naH3CohPCozUsOiE8Wl965SW6j+7Oir0rWLlvJSv2rqCwuJAL+l9QW0uZOnAqg7sM9rUXfVDL03Ig\nhmF0WlKSUpg8YDKTB0zmdm4H3HwpqwpXsaJwBY+uf5Tbn7+diEROyaVMHjCZbundfFYfbqwGYhhG\nh0dVKThRwIrCFbWhr3UH1jEgZ0Bt/5QpA6Ywsd9EMlIy/JbbrlgIy8MMxDCM5hKNRdlStKW2w+Oq\nfavYUrSFUb1GMXXAVKYMdKYyts/YDp1PMQPxCIuBBDUWWp+w6ITwaA2LTgiP1tbUWV5dzvqD61lV\nuKq29dfek3uZ2G/iKT3ph3Ufdsb5lKCWp+VADMMwWoGMlAymD5rO9EHTa4+dqDjB6v2rWVm4kie2\nPMGClxdQWl1aayZTBkxhysAp9Mvu56Nyf7AaiGEYxhlyoOQAqwpPHe8rOzX7lF70k/pPIictx2+p\np8VCWB5mIIZh+EF8qPt4y6+VhStZf3A9Q7sNZcrAKUwdUDfeV0pSit9yTyGUBiIi1wP3AqOBKaq6\nppHrLgfux83f/oiqfq+Je4bCQIIaC61PWHRCeLSGRSeER2tQdVbXVLPx0MZaU1m0aBGH+hxifN/x\nTOo/ifP6nMeY3mMY03sMvTJ7+aYzrDmQjcCHgF81doGIRICHgPcD+4BVIvIPVd3aPhJbn/z8fL8l\nNIsw6Qzil0d9TGfrEgadKUkpnNx2kpvn3szNk28mv2s+k2ZMYs3+Nazev5pVhat4dP2jbCnaQnIk\nmZE9R5LbNZfBXQYzsMtABuQMoF92P/pn96dfdj+yU7MDN52wbwaiqtsApOkSmQpsV9Xd3rV/Aa4F\nQm0g9957r98yTkuYdM6dOzfwXyZh0wkEWmuYdc7Jm8OcvDm116gqB0sPsv3IdgpOFLD35F52HtvJ\n0oKlHCw9yP7i/RwoOYCi9M3q60wlpz/9svrRJ6sPvbN6u3Vmb3pn9aZXZi96ZvQkKZLU5u8v6K2w\nBgJ7Evb34kzFMAyjQyAi9Mvud9pWXMWVxacYyoGSAxSVFbHh4AYOlR6iqKyIotIiisqKOFFxgm7p\n3WoNpmdGT7qnd6dbejfm5s3l6lFXt4r2NjUQEXkJSBx7WQAF/lNVn2nLZweZMFS/ITw6ITxaw6IT\nwqO1s+jMScshJy2HET1GnPbaaCzK0fKjFJUWcaj0EEfKj3Cs/BjHK463aqdI31thicgi4KsNJdFF\nZDpwr6pe7u3fDWhjiXQRCX4G3TAMI2CEMYmeSGPiVwEjRGQIsB/4KPCxxm7S0kIwDMMwzpym54ps\nQ0TkgyKyB5gOPCsi//SO9xeRZwFUtQb4AvAisAn4i6pu8UuzYRiGUYfvISzDMAwjnPhWA2kpInK5\niGwVkbdFZEEj1zwgIttFZJ2ITGxvjZ6GJnWKyBwROS4ia7zlmz5ofEREDorIhiauCUJZNqkzCGXp\n6RgkIq+KyCYR2SgidzRyna9l2hydfpepiKSJyAoRWetpvKeR6/wuy9Pq9Lss62mJeBqebuT8mZWn\nqoZmwRneO8AQIAVYB5xb75orgOe87WnAGwHVOQd42ufynA1MBDY0ct73smymTt/L0tPRD5jobWcD\n2wL6+WyOTt/LFMj01knAG8DUoJVlM3X6XpYJWr4M/F9DelpSnmGrgdR2LFTVaiDesTCRa4E/AKjq\nCnIAtEMAAAQ6SURBVKCriPSlfWmOTmi88UC7oKpLgWNNXBKEsmyOTvC5LAFU9YCqrvO2S4AtuL5M\nifheps3UCf5/Psu8zTRcg5/68Xbfy9J79ul0QgA+nyIyCLgS+E0jl5xxeYbNQBrqWFj/g1//msIG\nrmlrmqMTYIZXVXxORMa0j7QzIghl2VwCVZYikoerNa2odypQZdqETvC5TL1wy1rgAPCSqq6qd0kg\nyrIZOiEYn8+fAHfRsMFBC8ozbAbSkVgN5KrqRNx4X3/3WU+YCVRZikg28ARwp/cLP5CcRqfvZaqq\nMVU9HxgETAvCD4OGaIZO38tSRK4CDno1T6GVakRhM5BCIDdhf5B3rP41g09zTVtzWp2qWhKv+qrq\nP4EUEenRfhKbRRDK8rQEqSxFJBn3pfxHVf1HA5cEokxPpzNIZaqqJ4FFwOX1TgWiLOM0pjMgZTkL\nuEZEdgJ/Bi4WkT/Uu+aMyzNsBlLbsVBEUnEdC+u3Jnga+DTU9mQ/rqoH21fm6XUmxhZFZCquSfXR\n9pXpHk/jv0aCUJZxGtUZoLIE+C2wWVV/2sj5oJRpkzr9LlMR6SUiXb3tDGAe7x1E1feybI5Ov8sS\nQFX/Q1VzVXUY7vvoVVX9dL3Lzrg8g9ITvVmoao2IxDsWxucH2SIiN7vT+rCqPi8iV4rIO0Ap8Jkg\n6gSuF5FbgWqgHLihvXWKyJ+AuUBPESkA7gFSCVBZNkcnAShLT+cs4BPARi8mrsB/4FrjBaZMm6MT\n/8u0P/CouCkdIsBfvbIL1P96c3Tif1k2ytmWp3UkNAzDMFpE2EJYhmEYRkAwAzEMwzBahBmIYRiG\n0SLMQAzDMIwWYQZiGIZhtAgzEMMwDKNFmIEYRgOISHEDxy4UkdUiUi0i1zXx2piI/CBh/6si8t9t\npdUw/MIMxDAapqEOUruBfwMeO81rK4HrWjpchYgkteR1htHehKonumH4iaoWAIjI6XrfRoGHga8A\np0weJCJDcMOI9ASKgM+o6l4R+R1QgRsZd5lXAxoKDMONT/QV3PTPV+BGd75a3ZTPhuEbVgMxjNZH\ngZ8BnxCRnHrnHgR+543M+idvP85AVZ2hql/z9ofhhnC5FjcJ0CuqOh5nNFe1oX7DaBZmIIbRBnjD\noz8K3Fnv1AzcaKgAf8SNkhrn8XrX/lNVY8BGIKKqL3rHNwJ5rSrYMFqAGYhhtB0/Bf4dyEo41lT4\nq7TefiW4ke5wA/HFiWHhZyMAmIEYRsOcbsKdps4LgKoeA/6GM5E4y4GPedufBF5rJT2G0e6YgRhG\nw2SISIGI7PHWXxKRySKyB7ge+KWIbGzktYm1jB/hEubxY3cAnxGRdbgh1e9s4DWnu6dhBAIbzt0w\nDMNoEVYDMQzDMFqEGYhhGIbRIsxADMMwjBZhBmIYhmG0CDMQwzAMo0WYgRiGYRgtwgzEMAzDaBFm\nIIZhGEaL+P82rrVwmcP3yQAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(fit);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Like before, we can extract the coefficients and make predictions at certain $\\lambda$'s by using `coef` and `predict` respectively. The optional input arguments are similar to those for other families. In function `predict`, the option `type`, which is the type of prediction required, has its own specialties for Poisson family. That is,\n", + "* \"link\" (default) gives the linear predictors like others\n", + "* \"response\" gives the fitted mean\n", + "* \"coefficients\" computes the coefficients at the requested values for `s`, which can also be realized by `coef` function\n", + "* \"nonzero\" returns a a list of the indices of the nonzero coefficients for each value of `s`.\n", + "\n", + "For example, we can do as follows:" + ] + }, + { + "cell_type": "code", + "execution_count": 46, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.61123371],\n", + " [ 0.45819758],\n", + " [-0.77060709],\n", + " [ 1.34015128],\n", + " [ 0.043505 ],\n", + " [-0.20325967],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0.01816309],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ]])" + ] + }, + "execution_count": 46, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "glmnetCoef(fit, s = np.float64([1.0]))" + ] + }, + { + "cell_type": "code", + "execution_count": 47, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 2.49442322, 2.54623385],\n", + " [ 10.35131198, 10.33773624],\n", + " [ 0.11797039, 0.10639897],\n", + " [ 0.97134115, 0.92329512],\n", + " [ 1.11334721, 1.07256799]])" + ] + }, + "execution_count": 47, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = np.float64([0.1, 0.01]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We may also use cross-validation to find the optimal $\\lambda$'s and thus make inferences." + ] + }, + { + "cell_type": "code", + "execution_count": 48, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [], + "source": [ + "warnings.filterwarnings('ignore')\n", + "cvfit = cvglmnet(x.copy(), y.copy(), family = 'poisson')\n", + "warnings.filterwarnings('default')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Options are almost the same as the Gaussian family except that for `type.measure`,\n", + "* \"deviance\" (default) gives the deviance\n", + "* \"mse\" stands for mean squared error\n", + "* \"mae\" is for mean absolute error.\n", + "\n", + "We can plot the `cvglmnet` object." + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAElCAYAAADtFjXiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmYXFW1t99fCDIkhqBIwiDpEEBxIqDgEIZmCIL3UxD5\nPi92JA0IDhhzQVBQQncTrgLm4oDiFSUJuQk4c0FkipIKEgUioSFABEy6EwQCDkxJQEOyvj/qVFOp\nrqo+1V2nTu2q9T5PPXX2qV3n/Gqns9fZa6+9tswMx3Ecp/kYlrYAx3EcJx3cADiO4zQpbgAcx3Ga\nFDcAjuM4TYobAMdxnCbFDYDjOE6T4gbAqSmSNklaJukhSfdLOluS0tY1GCR9QdIjkv6n4Pxhkp6P\nfuf9km5P6P5zJJ2QxLWd5mB42gKcpmO9mR0AIGkn4DpgFNA51AtLGmZmm4d6nQr4LHCkmT1V5LM7\nzewjpb4oaSsz25ScNMcZGB8BOKlhZn8DzgA+D9kOXNJlku6R1C3p9Oi8JF0ZPW3fJunXuSdfST2S\nLpH0R+BESXtKukXSUkmLJe0T1dtJ0s+ja98j6f3R+cOip/Rlku6TNKJQZzRKWS7pQUlfiM59H9gT\nuEXS9CI/r9+oJnpi/76ku4FLJW0v6WpJd0f3/ki5dog++66kFdGoYue880dGv+EBST+StHVe+3wt\n+o33Stpf0q2SHpf06cr/1ZyGwsz85a+avYAXi5z7B/Am4HTgK9G51wFLgXHAx4CbovNjovonROUe\n4Jy8a/0GmBAdHwT8NjpeAHwgOn4z8Eh0fCPw/uh4e2BYgbYDgAeAbYERwEPAftFnq4Adi/yew4Dn\ngWXR6/zo/Bzgxrx6/wl8IjreAXgU2K5MO3wUuC06vwvwHHACsA2wJu93XwN8Ia99zoiOLwe6o9+5\nE7A27b8Hf6X7cheQU08cDbxT0v+NyqOAvYGDgZ8BmNkzkhYVfO8nANHT+weAn+XNK2wdvR8F7Jt3\nfqSk7YElwDclLQB+aWZPFlz7YOB6M3sluscvgUPIGgVR5Ek/opQL6GcFv/fDks6Nyq8D9ijTDoeS\ndZlhZk9L+m30+VuAVWa2MipfA3wO+E5U/lX0vhwYYWYbgA2SXpE0ysxeLPEbnAbHDYCTKpL2BDaZ\n2V+jznmamS0sqPNvA1xmffQ+DHjOojmGwlsB7zWzjQXnL5V0E/BvwBJJR5vZY5X/ktisLyh/zMwe\n30Jo/HZQieNC/hm9b847BjC8D2hqfA7AqTV9HZWkNwHfB66ITt0GfE7S8OjzvfOe0k+M5gLGAK3F\nLmxmLwE9kk7Mu8e7osPbgel55/eL3vc0s4fN7DKyrpa3Flz2d8DxkraNRhgfBe4c1C/vz23AF/I0\nTcw7X6wd7gQ+Hs0R7AIcHtV/FBgXGVOATwKZKml0Ghi3/k6t2VbSMrLujo3APDP7ZvTZj4AWYFn0\nFPwscDzwC+AI4GHgCeA+4IXoO4XpbNuA/5Z0Adm/7x8DD5Lt/L8n6QFgK7Kd6eeA/5B0OLApuv4t\n+Rczs/slzSVrHAy4ysweLHHvgSisfzHwLUkPkjWMPcBHSrWDmV0vKdcOa4DfRxr/KekU4OeStoq0\n/iCGRk8F3OTIzP8GnPpH0ggzWy/pDcA9wCQzezZtXY4TMj4CcELhJkmjyU7qXuSdv+MMHR8BOI7j\nNCk+Cew4jtOkuAFwHMdpUurGAERL4p+JIiJy5/aT9Ie8ZezvSVNjOYrpj85Pi5buL5d0SVr6KkHS\n9Ejv8lzqg1CQtE9eaof7Jb0Q4G/YQdLPor+bhyW9N21NlRCFqS6TdGPaWiql1P/jUKhUf90YALLL\n5D9YcO4yoMPM9gc6gG/UXFV8+umX1Ap8GHinmb0TmJWCroqQ9HbgNOA9wETg/+TFl9c9ZvaYme0f\nLQZ7N9mFV9enLKtSvg3cbGb7AvsBK1LWUynTgUfSFjFIivVDIVGR/roxAGZ2F9ncJvlsJpsjBWA0\nULhMv24oof+zwCVm9mpU5281F1Y5+wL3mNk/LZut8k6y+WZC5ChgpZk9kbaQuEgaBRxiZnMAzOzV\nkFI1SNod+BDZtQzBUeL/cTBUqr9uDEAJzgJmSVpDdjRwfsp6KmUf4FBlsz0uqmcXVh4PAYdI2jFa\nffohssnTQuTjRLlzAmI88DdlM4cuk3SVpO3SFlUB3wTOxReZBUG9G4DPAtPNbA+yxmB2ynoqZTjZ\nbJHvA74E/DRlPQNiZn8CLgUWAjcD95NdJRsUUTrkj7Bl8rUQGE42A+n3IjfWBuC8dCXFI8pV9IyZ\ndVM+UZ5TJ9S7AZhqZv8LYGY/J5veNySeAH4JYGZLgc2S3piupIExszlm9h4zayWb1jjJ5GhJcSxw\nn5n9NW0hFfIX4Akz+2NU/jlZgxACk4CPSFpFduR1uKR5KWtyylBvBqDwqeFJSYdBdsML6r8jKtT/\nv2Rz2KDsxiRbm9nf0xBWCVGSNiTtQTb52bXpKhoUJxGe+wczewZ4Ivp7ATiSQCZUzewrZraHme0J\n/Dtwh5mdnLauQRD66CW2/rpJBSHpWrJZHt8Y+fw7yG6M8Z0owdUrZHePqktK6J8NzJG0nGwa3lD+\nM/wiyrmzEfhcSJOQANHcxVHU8d/LAHwBWBC5sVYBp6Ssp2ko9v84NyEfApXq91QQjuM4TUpNXECF\nC0OiCJPbJT2q7B6vOwx0DcdxHKe61GoOoHBhyHnAb8zsLcAdhBfe6TiOEzyJG4ASC0OOI7tvKdH7\n8UnrcBzHcbakFiOAYgtDxkTRDpjZWmDnGuhwHMdx8kjUABRZGFIKn4l2HMepMUmHgeYWhnwI2A54\nvaT/AdZKGmNmz0gaS3bP035IcsPgOI4zCMxswLUAiY4ASiwM+STwK6A9qjYVuKHMNVJ5dXRU4xod\nqemvThskq78abextX58v15/uKy5prQS+BJgs6VGyKx3rLk9+V9fQr9Hb2zv0i6RI0vqr0cal8LZP\nF9cfBjVbCWxmi4HF0fE/yK7UdBzHcVKi3nIBNRTt7e1pSxgSIesPWTu4/rQJXX9c6joVhCRLS58E\nddw0DYG3seMkgyQs7UngZieTyaQtYUiErD9k7eD60yZ0/XFxA1CCjo60FTQ+3saOky7uAnIcx2kw\n3AXkOI7jlMUNQIKE7kcMWX/I2sH1p00S+nt7e8lkMmQyGebOndt3nOaag7rZEcxxHKeRaWlpoaWl\nBYCurq66CDX1OQDHcZwa09XVRUeCURA+BzBEOjvTVtD4eBs7Trq4AShBNfLUuB+0PEnmAvK2TxfX\nHwZuABzHcZoUnwMoeW9PU5A03sZOs1IvcwAeBeQ4jpMAvb29fSGevb29fRFAufd6wF1ACRK6HzFk\n/SFrB9efNtXQ39LSQmtrK62traxevbrv2A1AAHiemuTxNnacdPE5AMdxnIQp9PnXyxyAjwAcx3Ga\nFDcACeJ+0PQIWTu4/rQJXX9cEjUAkraRdI+k+yUtl9QRne+Q9BdJy6LXMUnqcBzHcfqTaBiomf1T\n0uFmtkHSVsASSbdEH19uZpcnef+0aW1tTVvCkAhZf8jawfWnTej645K4C8jMNkSH25A1OLlZ3QEn\nKNLE89Qkj7ex46RL4gZA0jBJ9wNrgYVmtjT66POSuiX9SNIOSeuoFM8F5LmA0sT1p0ul+usx138c\najEC2Gxm+wO7AwdJehtwJbCnmU0kaxga2hXkOE5jE8Kir2LULBWEmb0oKQMcU+D7/yHwq1Lfa29v\n72vE0aNHM3HixD7/XM5K12s5d65e9NSbfsiQySSjv7W1NfX2c/31o6eW+nPUUm8mGnlAZakmEl0I\nJmknYKOZvSBpO+A24BJgmZmtjeqcBRxoZp8o8n1PBtfAeBs7jUixRV7NuhBsF2CRpG7gHuA2M7sZ\nuEzSg9H5w4CzEtaRCoVPBKERsv6QtYPrT5vQ9ccl6TDQ5cABRc6fnOR9q4HnqUkeb2PHSRdfCVyC\naoQo5vvSQyRp/UmGgXrbp4vrDwPfD8BxHKdCQsj1HwcfASRI6H7EkPWHrB1cf9oMpD/UsM9C3AA4\njuM0KW4AEiR0P2LI+kPWDq4/bULXHxc3ACXwPDXJ423sOOniBqAEngvIcwGlietPl3z9oeb5iYNH\nATmO45ShpaWlb3K3q6uL9vb2VPVUEx8BJEjofsSQ9YesHVx/2oSuPy5uABzHcZoUNwAJ0kh+0NAI\nWTu4/rQJXX9c3ACUwPPUJI+3seOkixuAEnguIM8FlCauP11C1x8XNwCO4zhNihuABAndjxiy/pC1\ng+tPm9D1x8UNgOM4TpPiBiBBQvcjhqw/ZO3g+tMmdP1xcQNQAs9Tkzzexo6TLm4ASuC5gDwXUJq4\n/nQJXX9cEjUAkraRdI+k+yUtl9QRnd9R0u2SHpV0m6QdktThOI7j9CdRA2Bm/wQON7P9gYnAsZIO\nAs4DfmNmbwHuAM5PUkdahO5HDFl/yNrB9adN6PrjkrgLyMw2RIfbkM0+asBxwDXR+WuA45PW4TiO\n42xJ4gZA0jBJ9wNrgYVmthQYY2bPAJjZWmDnpHWkQeh+xJD1h6wdXH/ahK4/LonvB2Bmm4H9JY0C\nrpf0drKjgC2qlfp+e3t7Xy7u0aNHM3HixL7hWe4fKYlyR8fQr9fd3Z2YvlqUk9Y/dWqGTKZ+fq+X\nvQzZ/P/d3d10d3ezdu1ajjnmGADWrl1LT08POXL1ByrXQn8m2qwmpz82ZlazFzAD+CKwguwoAGAs\nsKJEfXMcx0mLzs7OsuVK6/SuWmWdbW32yZYW62xrs95Vq6qo9jWivnPAPjnREYCknYCNZvaCpO2A\nycAlwI1AO3ApMBW4IUkdjuM4abO6p4crJk+ma+VKRgDre3vpuPtupi1cyLjx41PRlPQcwC7AIknd\nwD3AbWZ2M9mOf7KkR4EjyRqFhqNwSBgaIesPWTu4/rRJQv/cGTP6On+AEUDXypXMnTGj6veKS6Ij\nADNbDhxQ5Pw/gKOSvLfjOE49sfnJJ/s6/xwjgM1PPZWGHMBXAidKbrImVELWH7J2cP1pk4T+Ybvt\nxvqCc+uBYbvuWvV7xcUNQAk8T03yeBs7zcTHzp/BlB2G9xmB9UDHhAm0z5yZmiY3ACXwXECeCyhN\nXH+6JKF/xk238rsTDmZWWxsnt7Qwq60t1QlgqME6AMdxnGbnuZc3c8OGr/GL037LRye9g66uLjrq\nYFNsNwAJ4n7Q9AhZO7j+tKmW/tU9PcydMYM/37ScD4zZiwM+UzgNnC5uABzHcRKgX9z/C9AxeTLT\nFi5MW1ofPgeQIO4HTY+QtYPrT5tq6K/HuP9C3ACUoA7ccw2Pt7HTyNRj3H8hbgBKUI0QRfeDlifJ\nMFBv+3Rx/fUZ919I7DkASdvba7n9Hcdxgqe3t5fe3t6+41wmzZaWlsqyahZh7EemcvLPMsz7V3Yk\nkIv7nzZzJnPnzRvStavFgCMASR+Q9Ajwp6i8n6QrE1fWALgfND1C1g6uv1a0tLTQ2tpKa2srq1ev\n7jvOGYWh0PWHK1k/vb2u4v4LiTMC+CbwQbIZPDGzByQdmqgqx3GcgJn/2/t4Zut7ufaCa3nDqO3q\nJu6/kFhzAGb2RMGpTQloaTjcD5oeIWsH1582Q9X/xZs6OeFN5/GGUdtVR1BCxDEAT0j6AGCStpZ0\nDtkNXRoaz1OTPN7GTiOxuqeHrilT6L5qNqNv+y0zjp2ctqQBiWMAPgOcCewGPAlMjMoNjecC8lxA\naeL606VS/blFX+csWMD1T61h2YqXmXf8/2F13haS9ciABsDM/mZmbWY2xsx2NrMpZvb3WohzHMcJ\ngRAWfRUjThTQNZJG55V3lDQ7WVmNQbP7QdMkZO3g+tOmUv0hLPoqRhwX0LvM7PlcwcyeA/ZPTpLj\nOE5YhLDoqxhxDMAwSTvmCpLegCeRi0Wz+UHriZC1g+tPm0r1H3nGTE4eNqGuNnuJQ5yO/L+AP0j6\nGSDgROA/41xc0u7APGAMsBm4ysyukNQBnA48G1X9ipndWqn4JKnDkN2Gw9vYaQQ2m3HR13fjgC8u\nZNZTM1i5ZAkTJk1i2syZdbXoqxgDGgAzmyfpPuDw6NQJZvZIzOu/CpxtZt2SRgL3ScrlQr3czC6v\nXHJt8FxAngsoTVx/usTRn8v1/8ffLubJEXO48tsZ9tpnft0u+ipGXFfOn4DncvUl7WFmawb6kpmt\nBdZGx+skrSAbTgrZ0YTjOE7NKJf7pxL65foHOj5UX7n+4xAnCmga8AywELgJ+HX0XhGSWsiuIbgn\nOvV5Sd2SfiRph0qvFwLN5getJ0LWDq4/KUrl/ik0AAPpDzXss5A4I4DpwFuGEvsfuX9+DkyPRgJX\nAheZmUm6GLgcOK3Yd9vb2/v+cUaPHs3EiRP7hme5f6R6LXd3d9eVnmbT72Uvlyv39PSQyWRKljOZ\nDD15C7nyy5uffJKl0fnW6H0psOrhh9lz771r/nsymQxz584FKhzNmFnZF7AIGD5QvTLfHw7cSrbz\nL/b5OODBEp+Z4zhOEnR2dpYtl6vz6aOPt3VglvdaB9bZ1lbRdZIi6jsH7J/jhIGuAjKSzpd0du4V\n38QwG3jEzL6dOyFpbN7nJwAPVXC9muB5apLH29gJkeefh1tW/BfTd94juLDPQuIYgDVk/f+vA16f\n9xoQSZOANuAISfdLWibpGOAySQ9K6gYOA84alPoE8VxAngsoTVx/uhTqzyV6WzlnLse/fQpHHiFm\n3J2p61z/cYgTBjro/6ZmtgTYqshHdRXz7ziOU4r+ET+9XPi7u4GFdMwPK+yzkDhRQG+S9A1JN0u6\nI/eqhbjQyU3WhErI+kPWDq4/bfL1F4v4uWhVeBE/xYjjAlpAdh3AeKAL6IW+CXDHcZyGJtREb3GI\nYwDeaGZXAxvNbLGZnQockbCuhqDR/KAhEbJ2cP1pk68/1ERvcYhjADZG709L+jdJ+wNvSFBTXRCo\nSy8ovI2dEGifOZNP7fiG4CN+ihFnIdjF0UrdLwJXAKOow6idauO5gDwXUJq4/nTJ1z9mtzfzsxO3\nZrueY3n1zyuCSfQWhzhRQLm0Dy/wWkI4x3GcpqDrupvYfvsJzF54c9ARP8Uo6QKS9KXo/QpJ3yl8\n1U5iuDSSHzQ0QtYOrr8a9Pb2kslk+tIk5I5zyeDKka//qvuvZMpbP5ec0BQpNwJYEb3/sRZCHMdx\nqklLS0tfXpyuri7a29srvsbf/j6aV3rfwyX/eWJ1xdUJJQ2Amf0qOlxuZstqpKehaCQ/aGiErB1c\nf9qMHzeOrilT6PnVCibvvi/PPfsUoxrA519InCig/5K0QtJMSe9IXFGd4Hlqksfb2KlHcit/z1mw\ngJteXMaCRxZwxeTJrM7LDNooDGgAzOxwspO/fwV+IGm5pAsSV5YyngvIcwGlietPj7kzZnBkA+T6\nj0OcEQBmttbMvgN8BugGLkxUleM4TkpsfvJJtis41ygrfwuJkwtoX0mdkh4iuw7g98DuiStrAEL3\ng4asP2Tt4PrTZNhuu3FgwblGWflbSJwRwGyy+wEfbWatZvZ9M3s2YV2O4zipcHLXRUzZYZuGXPlb\nSJw5gPcDVxFzDwDnNUL2g0LY+kPWDq4/TX6ybCU3feCNXHbSJ4LO9R+HOC6gD5P1+98alSdKujFp\nYWnTQIv96hZvY6ce+frvZ3LsW9rpunYBE9rb6Zg/vyE7f4jnAuoEDgKeBzCzbrKpoRsazwXkuYDS\nxPWnw3duWMyGYU/x00sTDFGrI2JlAzWzFwrOWRJiHMdx0qQrM5Mp477Ctq+LkyczfOIYgIclfQLY\nStLeknKRQM4AhOwHhbD1h6wdXH8tye33+8h/X8vYnzzPuZMnBaV/KMQxANOAtwP/BK4DXgT+I87F\nJe0ebSH5cLSA7AvR+R0l3S7pUUm3RemmHcdxakr+qt+frH2Me5++j9kf/jfWPv102tJqQpx00BuA\nr0avSnkVONvMuiWNBO6TdDtwCvAbM7tM0peB84HzBnH9uiZUP2iOkPWHrB1c/2Do7e3ty/TZ29vb\nlwgu916MYvv9dq1cyaxf/xpOOikxfePGjesbZeQnras1ZQ2ApKnAdOAt0akVwHfMbF6ci5vZWmBt\ndLxO0gqyi8iOAw6Lql0DZKgzA9DZ6blqksbb2Kkmg8n+Wcv9ftPs6EtRbj+AqWRdPV8EdgV2A74E\nTJf0yUpvJKkFmAjcDYwxs2egz0jsXOn1ksZzAXkuoDRx/bWh1H6/q7faKg05NafcCOCzwEfNrDfv\n3B2SPgb8GPifuDeJ3D8/B6ZHI4HCKKKSUUXt7e19VnP06NFMnDixb3iZ+yOr13J3d3dd6ak3/ZAh\nk6mf3+vl5ir39PTwwWM/xCnX38GcDU+zFHgZ+O2ECRxz6qlkMhl68jKAFivnU6pci9+TiTa9gfIu\nr36YWdEX8MhgPitSdzjZRWTT886tIDsKABgLrCjxXUuLFG/dNHgbO0nR2dlZtpw717ng1/a6U/ey\nGSedZJ9sabHOtjbrXbWq4utUWidpor5zwP65XBTQy4P8rJDZkcH4dt65G4H26HgqcEMF13Mcxxky\nmzZv5tKlFzD94Eu56NprG37VbzHKGYB9JT1Y5LUceGuci0uaBLQBR0i6X9IySccAlwKTJT0KHAlc\nMtQfUo8UDglDI2T9IWsH158kubj/ZVfNZtwtq/nsoRP71aln/dWk3BzAvkO9uJktAUrNphw11Osn\nieepSR5vY6fW5OL+c6Gf69dCxwePZtrChWlLS4VyewKvrqWQesNzAXkuoDRx/clQMu5/xgzYe+++\nevWqv9o0R8ILx3Ecysf9D8szANWgHhd+FRJrS0hncITuRwxZf8jawfUnRam4/8Ldvqqhv6WlhdbW\nVlpbW2lvb+87rpfOH9wAOI7TRLRdOJP24ROaYrevOAzoAooieTqBcVF9kY0x3TNZaeETuh8xZP0h\nawfXH4fB5P65867xPH3gQmbtOYOVS5YwYdIkps2c2S/0M/T2j0ucEcDVwOXAwcCBwHui94bGc9Qk\nj7exMxTyXSyrV68e0MWycSNcfDF8/dLxdMyf35Rx/4XEMQAvmNktZvasmf0990pcWcp4LiDPBZQm\nrr/6TP/BL3j9u3/FIYcMXLce9SdBnCigRZK+AfyS7J4AAJjZssRUOY7jVIHVPT3MnTGDP9+1hHu3\n+xufOOuHaUuqK+IYgPdG7+/JO2fAEdWX01iE7kcMWX/I2sH1V4N+i76AjssuYPXk9w7o9qkH/bUg\nzoYwh9dCiOM4TjUpt+irY/78NKXVDQPOAUjaQdLlkv4Yvf7Lt3CMR+h+xJD1h6wdXH81GMpmL5Xq\n7+3tJZPJkMlk+hZ9ZTKZviileiWOC2g28BDw/6LyJ4E5wAlJiaoHPE9N8ngbO0mSW/SVbwSKLfqq\nBvW0urcS4kQBTTCzDjNbFb26gIZfA+C5gDwXUJq4/qHTPnMmX95tcIu+6kF/LYgzAnhZ0sFmdhf0\nLQyrZD8Ax3GcmjNu/HhWvXMhp+8yg+F/K73oq5mJMwL4LPA9Sb2SVgPfBT6TrKzGoB78oEMhZP0h\nawfXX0i+j33u3LmxfOyrVsG9S8dz1aLKF32F3v5xiRMF1A3sJ2lUVH4xcVWO4zh55PvYu7q6aG9v\nH/A73/kOnHYajByZrLaQiRMFND3q/F8CLo929To6eWnhE7ofMWT9IWsH1z9UXnllG+bNg2nTBvf9\ntPXXijguoFOjp/6jgTeSjQJqyC0c8/E8NcnjbexUm9x2j/fP+R77vvEQNm3sSeQ+oYZ9FhLHACh6\n/xAwz8wezjvXsHguIM8FlCauv3JyK3/PWbCAW5/5C7f/+S6umDyZ1T2VG4GB9IeQ6z8OcQzAfZJu\nJ2sAbpP0emBznItLulrSM5IezDvXIekvkSspt0m84zjOkCi18nfujBlpyqpr4hiA04DzgAPNbAOw\nNXBKzOvPAT5Y5PzlZnZA9Lo15rWCI3Q/Ysj6Q9YOrn8wDGXlbyGht39c4hiA9wOPmtnzkqYAFwAv\nxLl4tHbguSIfNbwLyXGc2hJ3u0fnNeIYgO8DGyTtB3wRWAnMG+J9Py+pW9KPGjmvkPtx0yNk7eD6\nB8OUjpl8cuSoqmz3GHr7xyXOSuBXzcwkHQd818yulnTaEO55JXBRdM2Lye42VvJ67e3tfRMro0eP\nZuLEiX3Ds9w/UhLljo6hX6+7uzsxfbUoJ61/6tQMmUz9/F4vh1HOUfj5D69ezZ92+2++8e5fs+r3\nSxi2114cc+qpfYu/Cuv39PSQyWRKljOZDD09PX1bT3Z3d/Pyyy/3XWft2rWMHTs29fZobW0lEy2Q\ng/JbYvbDzMq+gMXA+cBjwFiyo4blA30v7/vjgAcr/Sz63BzHaT56enps0aJFtmjRIpszZ07fcU9P\nj3V2dvarf8EFF9n48WaLF2fLhXWKfWcwdUIh6jsH7J/jjAA+DnwCOM3M1kraA/hGfBODyPP5Sxpr\nZmuj4glkM406juP0UenK3+7u/dl7bzj00OS1NRIDzgGY2Vozu9zMfheV15hZrDkASdcCvwf2kbRG\n0inAZZIelNQNHAacNQT9dU3hkDU0QtYfsnZw/XFZ3dPDhSdNYd0tl7KPpgwq5r8Yobd/XEqOACTd\nZWYHS3qJ7BaQyn83s1EDXdzMPlHk9JzBinUcx8mRv+XjRcD62+6iY/LdTFu4MG1pwVDSAJjZwdH7\n62snp7HITdaESsj6Q9YOrj8O5bZ8ZO+9B33dXDqHTF6aBwh305dyxJkDIAoBPSQq3mlmD5ar3wh0\ndnqumqTxNnaGQrmFX8OGYAAasaMvRaxsoMACYOfotUDSIHPshYPnAvJcQGni+gfmHyNGJ7bwK/T2\nj0vcVBDvNbMLzexC4H3A6cnKchzHKc+Nu8BpO46uysKvZiWOC0jAprzyJjyVQyzcj5seIWsH1z8Q\n837zR/6y4z2c/Zs7mXX5paxcMrgtH3OLvIB+/v5mII4BmAPcI+n6qHw8cHVykhzHcUpjBt+7dAz/\ncfx8DjrgnRw0fz5dXV10dHRUfK1m8vcXI846gMvJZv/8R/Q6xcy+lbSwRiB0P2LI+kPWDs2lP+5+\nv7nNXh7xkE62AAAV70lEQVS68qdsXno+Z34wuc3dQ2//uJRbB7At2c3f9wKWA1ea2au1EpY2g3iY\ncCrE29iBeKt+82P+RwDreYSOY7Ix/5W4fJwtKTcCuAZ4D9nO/1hgVk0U1QnVCE90P255kgwB9bZP\nl2rrr/VmL6G3f1zKzQG8zczeCdmdvYB7ayPJcRxnS6q12Uu5Sd9mnAsoNwLYmDtoJtdPNQndjxiy\n/pC1g+svpFqbvcTdyzf09o9LOQOwn6QXo9dLwLtyx5JerJVAx3GcjQd/iCk7DPeY/ypTLhfQVrUU\n0oiE7kcMWX/I2sH15/OvTcasxy7mtK9ewawH7hp0zH8lhN7+cYmzErgp8Rw1yeNt7JQjF/b5x6uu\nYp+bn+fcE46mY/58JrS30zF/vkf/VAE3ACXwXECeCyhNGll/nLj/XNjnOQsWcNszT3P3o0/zvQ8e\nXVG+//z75CZ8i60vqFR/IxErG6jjOE61iBP3X41Uz80a2VMJPgJIkND9iCHrD1k7uP5qhX0OltDb\nPy5uABzHqTs27VydsE+nPG4AEiR0P2LI+kPWDq5/7YiZfGbkhNTCPkNv/7gkagAkXS3pGUkP5p3b\nUdLtkh6VdJukHZLUMFg8T03yeBs7xbj3Xrjp5vF8cfFCZrW1cXJLC7Pa2jzvTwLIzJK7uHQwsA6Y\nZ2bvis5dCvzdzC6T9GVgRzM7r8T3LUl9juOkSy6N8+qeHubOmMGf71rC8n/uwSlfmsP0s/bcok7h\ndwqvM3Xq1L4In97e3r4J4GacDJaEmQ24b0uiUUBmdpekcQWnjwMOi46vATJAUQPgOE7j0z/TZy8X\nfvdoVh9f2RN/M3b0QyWNOYCdzewZADNbS3af4YYkdD9iyPpD1g6Noz9OzH+xkM+LViWX6TMOobd/\nXOphHUBZH097e3ufVR89ejQTJ07sC9HK/SPVa7m7u7uu9DSbfi/XV7m9vb0vAVvu856eHsZFIZ/Z\n2tBK1gisevjhLTriwk457d9TT+VMZGChwu0szSzRFzAOeDCvvAIYEx2PBVaU+a45jtMYdHZ2Fj3X\n2dZm67I7Pfa91oF1trUV/V6p6zivEfWdA/bPtXABiS03kb8RaI+OpwI31EBDxXiemuTxNnYA9j1u\nBlNGbeeZPlMg6TDQa4HfA/tIWiPpFOASYLKkR4Ejo3Ld4bmAPBdQmjS6/lyitz/PvoZLTp3Je865\nteKQz6Hk+hmq/kYh6SigT5T46Kgk7+s4Tv3SP+qnh45rsvv7zp03r1+IZyk86mfo1MMkcMOSm6wJ\nlZD1h6wdwtWfv+Xi3Llzt4jFz1GNRG9JE2r7V4obAMdxqkacTJ/lEr0NqxMD0Cx4LqAECd2PGLL+\nkLVDY+t/5Y07132it9DbPy4+AiiB56lJHm/j5uNfGzcxe9Rf6Rk9ijnPvxjNAWSjfqbNnMncefOK\nfi/ftZSb8AWfBxgqbgBKUI0QxdD9iEnrTzIM1Ns+XVpbW1m8eDFAX56flUuWcNjPf8q/DhzJRX9Y\nyqyLL4q9v2+tO/rQ2z8ubgAcx0mM/hE/cP5LLWw3Y2s65s8vmtjNqR0+B5AgofsRQ9YfsnYIQ3+5\nPD85/cUifr6+ujfVPD9xCKH9q4GPABzHGRTlIn5y/vq0t3Z0yuMGIEFC9yOGrD9k7dAY+hcvXswr\no7NbO+YbgTgRP2lP+obe/nFxA1CCzk7PVZM03saNSW7S9/Hf/YEVz72D3lFvZvaLT/SL+CmHR/fU\nBp8DKIHnAvJcQGkSqv7cpO9BCxYwf80q7nzpRsaOFhccd1xQWzuG2v6V4iMAx3Fike+WKdxyMUdu\n0ndpVB4BfG3NGmYdcgij29s94qfOcAOQIKH7EUPWH7J2qE/9cdI8vLI66+ppzTs3UJqHtP39xajH\n9k8CNwCO4wya/EVeX17+CNc/+RAXUNmkr/v708PnABIkdD9iyPpD1g5h6M/5+89ZsIB5vb1c+Iuf\nst/f/8VZu+/OLVGdUDd3CaH9q4GPAErgrsrk8TauX8r5+3PHxRZ5zX1xHZ2tR/CTt76V6/7851hp\nHpz0cANQAs8F5LmA0iRt/UNJ67z9Sy8x9447SqZ5qEeffyFpt3+tcAPgOE5s8n3+z6yDFcC+eZ/H\nWeRVTx19s5OaAZDUC7wAbAY2mtlBaWlJikwmE/STRMj6Q9YOtdcfJ8SzWGK3z201nPM2vcq+bLnI\nK3Qfeuh/P3FJcwSwGWg1s+dS1OA4DvFcPsV8/lduepWTW1oYAVv4+3tWr66RcmcopGkARINHIYX+\nBBGy/pC1Q33oz3f3dD72GMuW3FPU5/+O8eMZdthhW/j7c7mAQvD3F6Me2r8WpGkADFgoaRNwlZn9\nMEUt/fA8NcnjbVy/9HP39PZyytaq2Odf7x19s5PmE/gkMzsA+BBwpqSDU9TSD88F5LmA0iRp/cVy\n+f/4uus49/jjWTl3LmcfcQSnFbh75mw0vjxyZN9+vuVi/L39wyC1EYCZPR29/1XS9cBBwF2F9drb\n2/ueIEaPHs3EiRP7hme5f6R6LXd3d9eVnnrTDxkymfr5vc1Ubmlp6XPNrF69msMPO4zvt7XR/tRT\nHEu2cz8V+Cjw72RZCmz/5jcz64ADWLlkCcP22otjTj21L8Y/k8mwdu1axo4dS3d3Ny+//DLf+ta3\nmDhx4hb3q4ff32jlTGTIgYpGXDKz2JWrhaTtgWFmtk7SCOB2oMvMbi+oZ2noy94bUrp10+BtXBsG\nWtTV1dUFjz/OOQsW9EvhMAvoyC+3tRXdytG3dqwvJGFmGqheWiOAMcD1kizSsKCw83ccpzqUivBZ\n3dND15QprFyyhJc2bOBvbJnDZwSwMTqOm8ffCYtU5gDMrMfMJprZ/mb2TjO7JA0dSZMbooVKyPpD\n1g7J6y/M4zP/2Wf5NpAfvLkeWNHSUjKPf/48Qi7CJ1OwJ3CohK4/Lr4SuAQ+mk0eb+NkKObyWfv0\n09z3k5/wzAMP0PX44zy/bh0XF0zyzgQuid5zT/yXL1zI3Hnzirp3ykX45O7v1DepzAHEJc05AMdp\nBLq6umg/+eR+K3hP32o4X9/0KuMK6p88Zgxstx0TJk2iPVrUVcy/7z7/+qbe5wAcx6kCpSZ4h2+1\nFb/9wQ9YuWQJZ8+dy8W9vVs87f9w06t9T/s51gMTjjoK9t472CRuTmU09ErctAndjxiy/pC1Q3z9\nLS0tjB83jsU/+hF3dHWx+Ec/Yuthw/jfU07p8+/P6+3larb0748AVm27bayY/vx7tba20traSnt7\ne99xsc6/Wdo/dHwE4DgBUfjEv+022/C7c87hsqee6luxO+2GGzh33botnvi76B/SuevRRzPr9a9n\n5ZIl/fL2+9N+c+BzAI4TGPk5etYDF/f29kvPkN/Z57gAuJi8kM4oqsdj+hsPnwMYIp6nJnm8jcvT\n29vL3X/4A3defTWvPvkkw3fbjX0//GH+fPnlfG3Nmr4J3Q5gGvRN6ObH7+foC+mEort0+RN/c+Ij\ngJL3Hvoq1UzgOcWT1p/kSuAQ2z6/w1/z2GO86c1vZtPjj/P9v/61r7P//IiRfGn9urJP/OuBk0aO\n5LrIDZT/xJ8L6Yyz5eNQCLH98wldv48AHKeOKfd0/401a1gKHPjEE8yAvhW6I4Dvrl/Xz71TbMXu\nl2fPZtZVVxX174M/2TtZfARQ8t6epyZpmq2Nc777zU8+yYZRo3jh/vv55hNP9D2lTxs5knPXlX+6\nh9d8+fl18jdlac/r7Lu6upg6dWqiT/tO/eEjAMdJmfwO/2/Dh/PSAw9s4c4pfLq/Yl3xp/vNeeX1\nwIMjR7K+wL1zeYF7J+fDHzduXF/nnwvjdJwcbgASJHQ/Ysj6a609v7MftttuvP3DH2bR2We/Fp5J\n/w5/Jls+3ee7cjJAa/S9nAEo5t4Zs99+vOfjH6dn9eq6mrwN+W8HwtcfFzcAJfAouOQJpY0LO/fc\nYqlyT/ef++UvOe/ll/vl2ins8Es93efKX9ljD4btvz8nP/AAEyZN4vhPf5qNmzZx2Kc+xbijjtrC\nndMMHZZTXXwOwGkqCjvzo844g99cdVXJzr2Yr/5Lu+7KSxs39nPnTOe1UMxSsfgdZBdl5eoUJl/7\n6OzZ/Cbv6f7dH/84Y3fZxX33TkXEnQNwA+AEQaUdd7E6R51xBtefeuoWSdHOHD6cL7/6KvtSWede\nLI9OnMna/A7/7DFjeHnPPdn2uecYvttuTDzxRHYeO5bRo0d7h+8MiaYzANXoIKpVJ1de9dBD7PmO\nd9T03tX8DT+cOZNxr76aur5iT+GFHfdX9tiDl8366twC/KSgzpnbb8+XN2wYMIY+Tuee/yRf7Fyx\nWPzCDv/Q007jfe9/f8lcOiG7dFx/usQ1AJhZ3b6y8gamd9Uq++KECbYuG1Vo68CmDh9uj+SVv7DH\nHnb6m99ckzq58qIU7l3N3zC3jvSdBdYblXPnOgvKF+SVF5Wok1/OvS4coFx4rvBehefWgX16zBj7\n2kUX2WePPNJOf+tb7bNHHmnXXXut9fT0xPqbXrRoUax69YrrT5eo7xy4j41TKa1XXAPQ2dbW11nE\n7SCSrpPmvZvhNwym4zawrw7y3vmd+6d22sna3vjGLQzUGWPG2BlHHWUXHn64dba1We+qVZX9j3Wc\nKhLXADREFNBzK1ZssZcp9I+wGEH/3Nfl6nTSQSddg77OUO5dL3WSvvdQ2nh9QZ31BZ+XqrNs221Z\n/8orr0XrbLUV523a1Pd5zpW0Ps/dlIvE6XjxRYbtuisXRC6qWTNmsPmppxi26658pWClreOEQGoG\nQNIxwLfI/h+92swuHey1dtx3X9YvW7aFERhsB5Gr00UnnXQN+jrDKB7LXS19tahTuD9ste9daRvn\nx8Pn5gBy5cKOO38OIFenY8IEvhrF0Oc67jPOOIOf5pXPLtK5n12ic++YP7/fuWoRug/a9YdBKgZA\n0jDgu8CRwFPAUkk3mNmfBnO99pkz6bj77qLRHVD6ya5cHYZwnVz5NuDAQdy7HuqcOXw4YxO+dyVt\nXPgUfvoAHfc9L7zAjG9+c4s6uXw4kw49dIu/n8IyJNu5x6G7uzvoDsj1h0FaI4CDgMfNbDWApB8D\nxwGDMgDjxo9n2sKFWzy1DdRBDFSHBTCrrW1Q18mV71iyhBcmTar43vVQ5/QzzuCrn/88HTvtlNi9\nK2njYk/h5Truzs5OJh16aNE6IfD888+nLWFIuP5AiDNRUO0X8DHgqrzyFOA7RepVf3YkJtW4dUdH\nx9AvkiJJ60/yn9fbPl1cf7oQcxLY9wROkFwSrlAJWX/I2sH1p03o+uOSykIwSe8DOs3smKh8HlmL\ndWlBvdqLcxzHaQCsXlcCS9oKeJTsJPDTwL3ASWa2ouZiHMdxmpRUJoHNbJOkzwO381oYqHf+juM4\nNaSucwE5juM4yRHMJLCkL0raLOkNaWupBEkXSXpA0v2SbpU0Nm1NcZF0maQVkrol/ULSqLQ1VYKk\nEyU9JGmTpAPS1hMXScdI+pOkxyR9OW09lSDpaknPSHowbS2VIml3SXdIeljScklfSFtTJUjaRtI9\nUV+zXFLHQN8JwgBI2h2YzJaLU0PhMjPbz8z2B35N/xTx9cztwNvNbCLwOHB+ynoqZTnwUWBx2kLi\nkrdI8oPA24GTJL01XVUVMYes9hB5FTjbzN4OvB84M6S2N7N/AodHfc1E4FhJB5X7ThAGAPgmcG7a\nIgaDma3LKxamtKlrzOw3ZpbTezewe5p6KsXMHjWzx4GB0+LWD32LJM1sI5BbJBkEZnYX8FzaOgaD\nma01s+7oeB2wAtgtXVWVYWYbosNtyM7xlvXx170BkPQR4AkzW562lsEi6WJJa4BPABemrWeQnEo2\nxY6TLLsBT+SV/0JgnVAjIKmF7FP0PekqqQxJwyTdD6wFFprZ0nL16yIbqKSFwJj8U2Qt1wXAV8i6\nf/I/qyvK6P+qmf3KzC4ALoj8udOAztqrLM5A2qM6XwU2mtm1KUgsSxz9jlMJkkYCPwemF4zg655o\nxL5/NF/3v5LeZmaPlKpfFwbAzCYXOy/pHUAL8IAkkXVB3CfpIDN7toYSy1JKfxGuBW6mjgzAQNol\ntQMfAo6oiaAKqaDtQ+FJYI+88u7ROacGSBpOtvP/HzO7IW09g8XMXpS0CDgGKGkA6toFZGYPmdlY\nM9vTzMaTHQ7vX0+d/0BI2iuveDxZv2IQRCm7zwU+Ek0whUzdjRxLsBTYS9I4Sa8D/h24MWVNlSLC\nae9CZgOPmNm30xZSKZJ2krRDdLwdWc9J2QSbdW0AimCE94d1iaQHJXUDR5HdXzwUrgBGAgslLZN0\nZdqCKkHS8ZKeAN4H3CSp7ucwzGwTkFsk+TDw45AWSUq6Fvg9sI+kNZJOSVtTXCRNAtqAI6JQymXR\nQ1Ao7AIsivqae4DbzOzmcl/whWCO4zhNSmgjAMdxHKdKuAFwHMdpUtwAOI7jNCluABzHcZoUNwCO\n4zhNihsAx3GcJsUNgBM0kl4a4vd/FuV9QVJPtdONS1oUJxV1nHtLWphb6OM41cANgBM6g17IIult\nwDAz6x3qtapAnHvPA85MWojTPLgBcBoGSd+INsJ4QNL/i85J0pWSHpF0m6RfSzoh+kobkJ/vpd8q\nc0kHSvq9pPsk3SVp7+j8VEnXS7pd0ipJZ0o6K1o9+ntJo/Muc3K0svRBSQdG339DpGe5pB/m3zu6\n7tLos0/lXedXwElVaSzHwQ2A0yBI+hjwLjN7J9kcKN+QNAY4AdjDzN4GnEx2o48ck4D7Brj0CuBg\nM3s32c18vp732dvJ5nc6CPhPYJ2ZHUB274ST8+ptF23ScSbZXDNE1/pdpPd6tkwAd4qZHQgcCEyX\ntCOAmT0PvC5XdpyhUhfZQB2nCkwCrgMws2clZch2zAcDP4vOPxNlSMyxC/DXAa47GpgXPfkbW/6f\nWRRtwLFB0vPATdH55cA78+rldP1O0usjP/6hZHcrw8xulpS/icp/SDo+Ot4d2Bu4Nyr/FdiVQDdd\nceoLHwE4jUpuX4ByvAxsO0CdmcAd0ZP6hwvq52dItbzyZrY0FIU6iu0KJwBJh5FNvf3eaCvO7oJ7\nbhvpdpwh4wbACZ2c7/x3wMejHZHeBBxC9ql5CXBiNBcwBmjN++4KID9dd/71cozitXz8g81s+XEA\nSQcDL5jZS8CdZOcgkHQs2ZEGwA7Ac2b2z2g/2vcVXGsM0DtIHY6zBe4CckLHAMzseknvAx4g+4R9\nbuQK+gXZJ+qHyW61eB/wQvTdXwOHA3fkXesBSRYd/xS4jKwL6IKoflkdJc6/ImkZ2f9vOSPSBVwn\n6d/Jpk9eE52/FfiMpIeBR4E/5C4k6d3A3Xn7NDvOkPB00E7DI2mEma2P4uzvASZFxmFbsp3/JAvg\nP4KkbwE3mNmiASs7Tgx8BOA0AzdFYZlbAxfldpQzs1ckdZDddP0vaQqMyXLv/J1q4iMAx3GcJsUn\ngR3HcZoUNwCO4zhNihsAx3GcJsUNgOM4TpPiBsBxHKdJcQPgOI7TpPx/CyuTbnYl7h4AAAAASUVO\nRK5CYII=\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "cvglmnetPlot(cvfit)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can also show the optimal $\\lambda$'s and the corresponding coefficients." + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 2.72128916e-02, 1.85696196e-01],\n", + " [ 6.20006263e-01, 5.75373801e-01],\n", + " [ -9.85744959e-01, -9.32121975e-01],\n", + " [ 1.52693390e+00, 1.47056730e+00],\n", + " [ 2.32156777e-01, 1.96923579e-01],\n", + " [ -3.37405607e-01, -3.04694503e-01],\n", + " [ 1.22308275e-03, 0.00000000e+00],\n", + " [ -1.35769399e-02, 0.00000000e+00],\n", + " [ 0.00000000e+00, 0.00000000e+00],\n", + " [ 0.00000000e+00, 0.00000000e+00],\n", + " [ 1.69722836e-02, 0.00000000e+00],\n", + " [ 0.00000000e+00, 0.00000000e+00],\n", + " [ 3.10187944e-02, 2.58501705e-02],\n", + " [ -2.92817638e-02, 0.00000000e+00],\n", + " [ 3.38822516e-02, 0.00000000e+00],\n", + " [ -6.66067519e-03, 0.00000000e+00],\n", + " [ 1.83937264e-02, 0.00000000e+00],\n", + " [ 0.00000000e+00, 0.00000000e+00],\n", + " [ 4.54888769e-03, 0.00000000e+00],\n", + " [ -3.45423073e-02, 0.00000000e+00],\n", + " [ 1.20550886e-02, 9.92954798e-03]])" + ] + }, + "execution_count": 50, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "optlam = np.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", + "cvglmnetCoef(cvfit, s = optlam)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `predict` method is similar and we do not repeat it here." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Cox Models" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The Cox proportional hazards model is commonly used for the study of the relationship beteween predictor variables and survival time. In the usual survival analysis framework, we have data of the form $(y_1, x_1, \\delta_1), \\ldots, (y_n, x_n, \\delta_n)$ where $y_i$, the observed time, is a time of failure if $\\delta_i$ is 1 or right-censoring if $\\delta_i$ is 0. We also let $t_1 < t_2 < \\ldots < t_m$ be the increasing list of unique failure times, and $j(i)$ denote the index of the observation failing at time $t_i$.\n", + "\n", + "The Cox model assumes a semi-parametric form for the hazard\n", + "\n", + "\n", + "$$\n", + "h_i(t) = h_0(t) e^{x_i^T \\beta},\n", + "$$\n", + "\n", + "\n", + "where $h_i(t)$ is the hazard for patient $i$ at time $t$, $h_0(t)$ is a shared baseline hazard, and $\\beta$ is a fixed, length $p$ vector. In the classic setting $n \\geq p$, inference is made via the partial likelihood\n", + "\n", + "\n", + "$$\n", + "L(\\beta) = \\prod_{i=1}^m \\frac{e^{x_{j(i)}^T \\beta}}{\\sum_{j \\in R_i} e^{x_j^T \\beta}},\n", + "$$\n", + "\n", + "\n", + "where $R_i$ is the set of indices $j$ with $y_j \\geq t_i$ (those at risk at time $t_i$).\n", + "\n", + "Note there is no intercept in the Cox mode (its built into the baseline hazard, and like it, would cancel in the partial likelihood.)\n", + "\n", + "We penalize the negative log of the partial likelihood, just like the other models, with an elastic-net penalty.\n", + "\n", + "We use a pre-generated set of sample data and response. Users can load their own data and follow a similar procedure. In this case $x$ must be an $n\\times p$ matrix of covariate values — each row corresponds to a patient and each column a covariate. $y$ is an $n \\times 2$ matrix, with a column \"time\" of failure/censoring times, and \"status\" a 0/1 indicator, with 1 meaning the time is a failure time, and zero a censoring time." + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } + }, + "outputs": [], + "source": [ + "# Import relevant modules and setup for calling glmnet\n", + "%reset -f\n", + "%matplotlib inline\n", + "\n", + "import sys\n", + "sys.path.append('../test')\n", + "sys.path.append('../lib')\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", + "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", + "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", + "from cvglmnetPlot import cvglmnetPlot; from cvglmnetPredict import cvglmnetPredict\n", + "\n", + "# parameters\n", + "baseDataDir= '../data/'\n", + "\n", + "# load data\n", + "x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `Surv` function in the package `survival` can create such a matrix. Note, however, that the `coxph` and related linear models can handle interval and other fors of censoring, while glmnet can only handle right censoring in its present form.\n", + "\n", + "We apply the `glmnet` function to compute the solution path under default settings." + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Warning: Cox model has no intercept!\n" + ] + } + ], + "source": [ + "fit = glmnet(x = x.copy(), y = y.copy(), family = 'cox')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "All the standard options are available such as `alpha`, `weights`, `nlambda` and `standardize`. Their usage is similar as in the Gaussian case and we omit the details here. Users can also refer to the help file `help(glmnet)`.\n", + "\n", + "We can plot the coefficients." + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZAAAAElCAYAAADKuLQKAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXd4nFeV/z9XM6Peiy3ZkotkWe6SHctN7o6DnZDEKUAC\nyxLYJVliNkA2hCzsEkpgScgPCBuWbIBNAULYZNMMSUicWLHjKluW5SJbsmXJkmX1Lk2f+/vjzoyK\nq2RJU3w/z3Oft8z4fe+ZV57vnHPuPVdIKdFoNBqNZqiE+LoDGo1GowlMtIBoNBqNZlhoAdFoNBrN\nsNACotFoNJphoQVEo9FoNMNCC4hGo9FohoUWEI1fI4RwCiGKhRBHhBAHhRAPCiGEr/s1HIQQDwgh\njgkhfj/o/CohRLvbzoNCiPdG6f7PCSFuH41ra65NjL7ugEZzGXqklAsAhBDJwJ+AWOB7V3thIUSI\nlNJ1tdcZAl8B1kkp6y7w2nYp5S0X+4dCCIOU0jl6XdNoho72QDQBg5SyGbgX+CooARBCPCGE2CuE\nKBFCfNl9Xggh/sv9a/9vQoi/en55CyFOCyF+IoTYD9wphMgUQrwjhCgSQnwkhJjufl+yEOJV97X3\nCiGWus+vcnsJxUKIA0KIqMH9dHtJh4UQpUKIB9znfg1kAu8IIb52AfPO86rcHsOvhRB7gMeFEJFC\niN8JIfa4733LpT4H92tPCyHK3F7NuH7n17ltOCSE+K0QwtTv8/mx28Z9Qoj5Qoh3hRAVQoj7hv7U\nNEGNlFI33fy2AZ0XONcKpABfBr7tPhcKFAGTgTuAv7jPj3e//3b38WngoX7X2gpkufcXAR+49/8I\nLHPvZwDH3PtvAUvd+5FAyKC+LQAOAeFAFHAEyHW/VgkkXMCeVUA7UOxu/+o+/xzwVr/3/Qj4rHs/\nDjgBRFzic7gN+Jv7fBrQBtwOhAFn+tn9AvBAv8/nXvf+z4ASt53JQL2v/x5086+mQ1iaQOYGYK4Q\n4lPu41ggG1gOvAIgpWwQQmwb9O/+DOD2HpYBr/TLq5jc2+uBmf3ORwshIoGdwM+FEH8EXpNSnh10\n7eXA61JKi/serwErUKIiuICn4eZiIaxXBtl7sxDim+7jUGDSJT6HlaiQH1LKc0KID9yv5wCVUspT\n7uMXgPuBX7qPt7i3h4EoKWUv0CuEsAghYqWUnRexQXONoQVEE1AIITIBp5Syyf3l/s9SyvcHveem\ny1ymx70NAdqkO8cy+FbAYimlfdD5x4UQfwFuAnYKIW6QUpYP3ZIrpmfQ8R1SyooBHb3yz0FcZH8w\nVvfW1W8fQKK/MzT90DkQjb/j/aITQqQAvwb+033qb8D9Qgij+/Xsfl7Cne5cyHhg9YUuLKXsAk4L\nIe7sd4957t33gK/1O5/r3mZKKY9KKZ9AhYpmDLrsDmCTECLc7eHcBmwfluXn8zfggX59yut3/kKf\nw3bgM+4cSRqwxv3+E8BktxgDfB4oHKE+aq4h9K8Jjb8TLoQoRoVr7MCLUsqfu1/7LTAFKHb/Cm8E\nNgH/B6wFjgI1wAGgw/1vBpef/hzwjBDi31D/H14GSlHi8SshxCHAgPoyvh/4uhBiDeB0X/+d/heT\nUh4UQjyPEhcJPCulLL3IvS/H4Pc/BvxCCFGKEtbTwC0X+xyklK8LITyfwxlgl7uPViHEF4FXhRAG\nd1//+wr6qEt3awYgpNR/E5rgQwgRJaXsEUIkAnuBAillo6/7pdEEE9oD0QQrfxFCxKOS4j/Q4qHR\njDzaA9FoNBrNsNBJdI1Go9EMCy0gGo1GoxkWQSEgQogNQojjQohyIcS3fN2fq8E95LJYCPGWr/sy\nXIQQ3xCq+GGpEOKPQohQX/fpSnCXCWlwj3LynEsQQrwnhDghVFmUOF/28XIEgw1wUTt+4C69ctBd\nXiXVl328Ei5ixzwhxC63LW8KIaJ92cerIeAFRAgRAjwNfAKYDdwthBg8Nj+Q+BpwzNedGC5CiAnA\nPwMLpJTzUAM17vJtr66Y51B/R/15BNgqpcwBPgT+dcx7NTSCwQa4sB1PSClzpZTzgb8Cj459t4bM\nhez4LfCwlDIXeB14eMx7NUIEvICg6hdVSCmr3bOGXwZu9XGfhoUQIh24EfUHFsgYgCj3xLZI4ELV\nZ/0OKeXHqHpR/bkVVeoD93bTmHZqiASDDXBhO6SU3f0Oo1Az5f2aizyPbPd5ULXY7hjbXo0cwSAg\nE1GTxTzUus8FIj8HvkkAT9iSqlT5/0NNXDsLtEspt/q2V1fFOCllA4CUsp5+FW0DiGCwAQAhxGNC\niDPAZ4Hv+ro/w+SocFdSBj4NpPuyM1dDMAhIUOCuW9QgpSzh0kX3/Br33ItbUdVgJ6CKEH7Wt70a\nUQJW3PsRsDZIKf9NSjkJVS35n33dn2HyJWCzEKII5UnZfNyfYRMMAnIWVZHUQ7r7XKBRANwihKhE\nVVBdI4R40cd9Gg7Xoyq9tkq1ANJrqIq3gUqDu54W7qRtIE5IDAYbBvMSARr6kVKWSyk/IaXMR4Xc\nT13u3/grwSAgRcA0IcRk92ifu1BrNgQUUspvSyknSSkzUTZ8KKX8e1/3axicAZa4iwkKYB1Q5uM+\nDYXB3t9bwD3u/S8Ab451h4ZBMNgAg+wQQkzr99omAufvarAdKe5tCPBvwDM+6tdVE/ClTKSUTiHE\nV1HVU0OA30kpA+UPK+iQUu4TQrwKHEQVPzwIPOvbXl0ZQoiXUJV7k9xx9keBn6DWC/kSUI2KWfst\nwWADXNSOm4QQOahCltXAP/muh1fGReyIEUJsRoUSX5NSPu+7Hl4dupSJRqPRaIaFz0NYVzIJUAix\n2j156Ig4f3U5jUaj0fgAn3og7hhgOSpOXofKZ9wlpTze7z1xqHUMbpBSnhVCJEspm33SYY1Go9F4\n8bUHciWTAD8L/J9n7WktHhqNRuMf+FpArmQS4HQgUQixTQhRJIT4/Jj1TqPRaDQXJRBGYRmBBagl\nSqOA3UKI3VLKk77tlkaj0Vzb+FpArmQSYC3QLKW0ABYhxHYgFzhPQIQQekiZRqPRDBEp5bAqX/g6\nhHUlkwDfBJYLIQxCiEhgMZeYQCSlDOj26KOP+rwP2g5thz+2YLDBH+24GnzqgciLTAIUQtynXpbP\nSimPCyH+BpSiJhA9K6UM2HLnl6OqqsrXXRgRtB3+RTDYEQw2QPDYAb4PYSGlfBfIGXTuvwcdPwk8\nOZb90mg0Gs2l8XUISzOIe+65x9ddGBG0Hf5FMNgRDDZA8NgBQVbKRAghg8kejUajGW2EEMgATaJr\nBlFYWOjrLowI2g7/IhjsCAYbIHjsAC0gGo1GoxkmOoSl0Wg01zBXE8Ly+SgsjUaj0Qwfux2OHVMt\nNBTuGMN1GnUIy88IlviotsO/CAY7gsEGGL4dXV2wZw/87nfw4IPwiU9AZibExsJdd8Hrr0Nt7cj2\n9XJoD0Sj0Wj8CIsFysrg6FE4cqSvNTXBjBkwZw7Mng3r1sH06ZCeDhERvumrzoFoNBqND3A64dQp\nKC1VAnH0KBw+DNXVkJXVJxRz56ptZiYYDCPfj6vJgWgB0Wg0mlGmu1sJxaFDUFKitkeOQEoKzJvX\nJxJz5kBOjspljBVaQNwEg4AUFhayevVqX3fjqtF2+BfBYEcg2CClykP0F4qSEqirg1mzIC8PIiML\nufPO1cybB3Fxvu6xHoWl0Wg0Y47LBRUVcPAgFBf3bY1GJRS5ubBpE3z/+ypXYXR/2xYWwooVPu36\niKE9EI1Go7kMnqGy/YXi0CEVgpo/X7UFC9Q2Lc3XvR0aOoTlRguIRqO5Wsxmla/wCEVxsRKPyZP7\nRGLBAuVlJCb6urdXj66FFURc62Pd/Q1th/8wGjZ0dcGOHfDUU/CFL6hkdlIS/NM/QVGRSnD/8pfQ\n2KiG1v7xj/DQQ7B27fDFYzTscEkX9d31VLZVjvi1L4XOgWg0mmuC1taBXkVxsUp4z5unPIoVK+Br\nX1OjocLCfN3b8+mx9VDZVsmptlNUtlVS2VZJVXsVtZ21nGw9SYQpgk9O/yTP3frcmPVJh7A0Gk3Q\n0dMDBw6omdt79yqxaGkZmKu47jo1ZNboRz+je2w9nGw9SUVrBRUtFd79k60nabO0MTV+KlmJWWTG\nZ5KVmMXkuMmkx6YzLXEaceHDG9KlcyButIBoNNcentFQe/b0tfJyFY5asgQWL4aFC9XkvBA/CNrb\nnDZOtZ7iRMsJTjSfoLylnJNtJznZepJWcytZCVlkJ2WTnZjNtMRpZCdmk5WYRXpsOiFi5A3QAuIm\nGAQkEMa6XwnaDv8iGOzw2NDRobyKPXtg9261HxfXJxZLl6oEt6/DUK3mVo43H+d483HKmso43qL2\nq0uqmZw3mZykHHKScpieNJ3spGyyEpRIGEJGYbr5JdDzQDQaTVAiJVRVwc6d8MorKkdx6pQKQy1b\nBvfdB//zP74bOiulpLazlrLmMo41HaOsqYyy5jKONx/H6rSSk5TDzJSZzEiawZfyvsSM5BnUzqpl\n/br1vunwCKM9EI1G4zfYbCrRvWuXEo1du5SIFBT0tby8sS31AeB0Oalqr1Ii4RGL5jLKmsqINEUy\nK2UWs1JmMTN5JjNTZjIzeSap0akIMawf9mOKDmG50QKi0QQWzc0qDOURi+JimDZNeRcFBSocNXUq\njNX3sN1p52TrSY41HVOtWXkV5S3lpESlKIFInsnscbO9YpEYEdiTQbSAuAkGAQmGWDVoO/wNf7DD\n5YLjx5VQeDyM+nqVu1i6VAnG4sVqfYsLMZI22Jw2ylvKOdZ0jKONRznWrATjVOspMuIylEeRPIuZ\nKTOZlTKLGckziA6NHpF7+8Oz6I/OgWg0Gr/DaoX9++Hjj9VkvV27ICFBCcWyZfD1r6s5F6NRotyD\n3WnnRMsJjjYe5WjTUSUYTUepaq9iSvwUr1DcPuN2/m3Fv5GTnEO4MXz0OhRkaA9Eo9GMCB0dSiR2\n7FCiUVysFkBavlxN0isogNTU0bt/j62H0oZSDtYf5OC5g5Q0lHC08SjpsenMHT+X2SmzmZUyi9kp\ns5meNJ0wox/OFvQBOoTlRguIRjN29PQoofjwQ9i2TdWLys9XYrF8uQpLxcSMzr3bzG1eoSiuL6b4\nXDHV7dXMTJnJ/NT5qqXNZ974eSMWegpWtIC4CQYB8bf46HDRdvgXI2GHxaLmXngE4+BBNZx27VpY\ns0blMkZj7kWPrYeD9Qf505Y/0ZbaRlFdEfXd9eSl5nnFYkHaAmamzCTUMMbDs4aBv/1N6RyIRqMZ\ncex2lcP48EPV9u1TiyKtXQvf/a7KY0RFjfA9nXYONx6m6GwRRXWqnWw9yeyU2UzonMCmgk18Z8V3\nmJE8Y8wn3GnOx+ceiBBiA/ALVGXg30kpH7/I+/KBXcBnpJSvXeQ9Ae+BaDS+wulUa1x4BGPnTjWE\ndu1a1VasGNkV9FzSxYnmE0oo3IJxuPEwU+Onkj8xn0UTFpE/MZ+54+bqfMUoErAhLCFECFAOrAPq\ngCLgLinl8Qu8733ADPyPFhCN5uqREo4e7QtJffQRjB/fJxirVkFy8kjdS1LVXsX+uv1ez6L4XDHJ\nkcnkT8hn4YSFLJq4iAVpC3TOYowJZAFZAjwqpdzoPn4EkIO9ECHE1wAbkA/8JZgFxN/io8NF2+Ff\nFBYWsmrVaioqlFh4RCMmpi+HsWbNyJUEaeppoqiuiH1n93m3oYZQr1h4tkmRSUOyIViehT/ZEcg5\nkIlATb/jWmBR/zcIISYAm6SUa4QQA17TaDSX5vRpJRR/+hN87nOqGu3atbBxIzzxhFpl72qxOW0c\nPHeQnTU72Xt2L/vO7qPN3Eb+xHzyJ+Tz5QVf5tlPPsvE2IlXfzONX+FrD+QO4BNSynvdx38HLJJS\nPtDvPf8LPCml3CeEeA7lgfzfRa4X8B6IRnM11NYqwfA0s7nPu1i7VpU0v9qyIG3mNnbX7mbnmZ18\nXPMxB+oOMC1xGgUZBSxJX8KiiYvITsoeldLjmpEnkD2Qs8Ckfsfp7nP9WQi8LFRVsmRgoxDCLqV8\n60IXvOeee5gyZQoA8fHx5OXled1Fz1KS+lgfB8txayvYbKvZtg3++tdCurpg/frVrFkDK1cWMnky\nrFnT9/7a2qFdX0rJ1PlT+fjMx/zvX/+Xw42HaRnXQv7EfCa2TOSmcTfx1oNvERcep/59G+Tk5vjN\n56OPzz/27FdVVXG1+NoDMQAnUEn0c8A+4G4pZdlF3v8csEXnQPwfbcfo0NgIhYWqbdumakmtXNnn\nZcyde+FFk67UDofLQWlDKR+f+ZidNTv5+MzHuKSL5ZOWszxjOQWTCsgdn4vJYBpp0y6Lvz2L4eJv\ndgSsByKldAohvgq8R98w3jIhxH3qZfns4H8y5p3UaHxIc7MaHbVtmxKN2lo1nHbNGvjHf4Tc3Kur\nJdVl7WJP7R521uxUOYzavWTEZbA8Yzk3Zd/Ef6z7D6bGTw2IsuSascfn80BGkmDwQDTXNm1tfYKx\nbRtUV6saUh4PY/78qxOM2s5adp7Z6fUuylvKWZC2gIKMAgomFbAsY1nAlye/1pBS0tzcTHV1NQAL\nFy4c0r8P2GG8I40WEE2g0dGhig96BOPkSVVDyiMY110HxmHGCZwuJ4cbD3sFY2fNTnrtvUosMgpY\nPmk5C9IW6El6fo7D4aC2tpaqqipvq66uprq6mtraWurr6zEajUyePJk1a9bws5/9bEjX1wLiJhgE\nxN/io8NF23FhurrUDG+PYJSVwaJFfYKRnz/81fa6bd3srd3rFYs9tXtIi06jIKOApMYk/uG2f2B6\n0vSADUcF899UW1sblZWVnDp1yrs9ffo0lZWVnD17lnHjxjF16lQmT57MlClTmDx5MpMnTyY9PZ20\ntDTi4+OH3Z+AzYFoNMFOT0+fYBQWwuHDsHAhrF4NP/3p1RUgrOmoYWfNTnbV7GJnzU5ONJ8gLzWP\ngowCNudv5o+3/5HkSDWVvLCwkJzknBGzSzM0pJS0tLRQXl7O3/72Nz744ANOnjzpbU6nk8zMTDIz\nM8nKymLBggXceeedXtEIG40qlSOA9kA0mhGkt1ct0eoRjJISlbfweBhLlkBExNCva3faKW0o9XoX\nu2p2YXVYKZikwlHLMpZxXdp1OhzlY7q7uykvL6eiooLy8nJvq6iowOVyMX36dLKzs71t2rRpZGVl\nkZSU5DPPUIew3GgB0Yw1FosSDM+w2uJiNTJq9WolGMuWQWTk0K/bZm7zjo7aVbOLoroiJsdN9opF\nwaQCshKyAjYcFci4XC5qa2s5ceIEx48fH7BtaWlh2rRpTJ8+3SsWOTk5ZGdnk5yc7JfPSwuIm2AQ\nkGCO8wYig+2wWGDv3j7B2L8f5szpE4yCAogeYi1AKSWn2k6pUJQ74V3dUU3+hHwlFu4Z3gkRCSNm\nRyAy1jb09vZSUVHB8ePHve3EiROUl5cTGxvLjBkzyMnJIScnhxkzZjBjxgwmTZpEyIUm4vTD356F\nzoFoNKOEzQbbt/dN3isqUmtirF4NjzyiBGOoq+7ZnDYO1B3w5i521ezCGGL0hqO+fN2XfTZZ71pD\nSklDQ8MAkfC0hoYGMjMzmTlzJjk5Odx44408+OCDTJ8+nbiRrGsfwGgPRKPph82mFk7yeBj79ql1\nvdesUaKxfDnExg7tmk09Teyq2eUVjJL6ErKTsr3DaQsmFZARm+GX4Y1gwW63U1lZ6RWHsrIy777J\nZPJ6Ex6xmDFjBlOmTME43DHUAYQOYbnRAqIZKlKqtbzff1+1HTsgO7tPMIa6iJJLuihrKhvgXTT2\nNLIkfYk3f7Fo4iJiwkZpsfBrHLvdTkVFBUePHuXo0aMcOXKEo0ePcvr0adLT072hJo9YzJgxg+SR\nWvQkQNEC4iYYBMTf4qPDxZ/tqK+HrVuVYGzdquZdrF+v2tq1kNRviYrL2dFt66bobJFXLHbX7iYp\nIknN6k5Xye5ZKbN8XpnWn5/HldLfBpfLRVVVFUeOHBnQKioqSE9PZ86cOcyePdu7zc7OJjw83LcG\nuPG3Z6FzIBrNJejtVXkMj5dRU6M8jPXr4d///cpLnEspOdNxxhuO2lW7i+PNx8lLzWNZ+jLuve5e\nnrv1OcZHjx99o64huru7KS0t5c033+Tll1/m0KFDHDlyhISEBObMmcOcOXPYsGEDDz30EDNmzCBy\nOMPeNMNCeyCaoENKOH4c/vIXeOcdlfhesEAJxvXXq4l8VxLatjltlNSXDAhHOV1OlmUs846O0qVA\nRg4pJWfOnOHQoUMDWl1dHbNmzSI3N9fb5s2bd1WzrzV96BCWGy0g1y5Wq/Iy/vIX1ex2uPlmtfLe\nqlVXNlKq1dzqHUa7q2YXxeeKyUrM8uYulmUs05VpRwiz2cyRI0coLS31CkVpaSkRERHk5eUNEIvs\n7OxrIpntK7SAuAkGAfG3+OhwGQs7Ghrg7beVYHzwAcyeDZ/8pGpz5lw+LHW28yw7zuxgR/UOtp/Z\nTlV7FYsnLmb5pOUUZBSwOH0xxbuL9fO4Surr6ykpKeHgwYNesaiqqmL69OkDhCI3N5eUlJSLXkf/\n3xgddA5Ec00gpSoN4vEyysvhhhtg0yZ45hm4xHcPUkoqWivYUb2DHWd2sL16O53WTpZPWs6KSSv4\nQt4XmJ86X8+9uAqklFRWVnLw4MEBzWq1Mn/+fPLy8rjxxhv513/9V2bOnEnocKtGavwG7YFo/BqL\nBT78EN56S4lGRIQKTX3yk2qIreki3/dSSg43HqawqtDrZYQaQlkxeQUrJqk2M2Wmz0dHBSpSSmpq\nati/fz9FRUUUFRVx4MABoqOjmT9//oA2adIkHfbzY3QIy40WkOCgoQH++lfYskWJR24u3HKLEo6c\nSxSUrW6vZmvlVj44/QEfnP6AmNAY1kxZw8rJK1k5eSWT4yePnRFBRnNzs1coPE1KSX5+vrctXLiQ\ncePG+bqrmiGiBcRNMAiIv8VHh8tQ7JASjh5VXsaWLWqNjBtuUKKxcePAeRn9aeltYVvVNq9odFg6\nWJe5juunXs+6zHVMiZ8ypnb4M0Oxo6uriwMHDgwQi7a2Nq677roBgpGRMbaz56/FZzEW6ByIJuDw\n1JjaskUJh5TKw/jBD9SoqQuFx812Mx+f+ZitlVvZenorFS0VrJi8gnVT13F//v3MGTdHh6SGiNVq\n5dChQxQVFbFv3z6Kioqorq5m3rx5LFq0iFtuuYUf/vCHZGdnX7ZIoObaQ3sgmjGjtVXNy3jrLXjv\nPRWOuvlm5WlcaNSUS7ooqS/h/VPv837l++yp3UNeah7rpq7j+szrWZy+mFCDTsReKU6nk7KyMq9X\nsW/fPo4dO8b06dMHeBZz5szBdLHkkibo0CEsN1pA/I+Kir7QVHGxmgF+yy1w002Qmnr++2s6ani/\nUgnG1sqtJEUksT5zPeuz1rN6ympiw4ZYyfAaRUrJ6dOnB4jFwYMHSUtLIz8/n0WLFpGfn09eXp6e\nuX2NowXETTAIiL/FR4eKw6EWWPrVrwopKVlNZ6fyMm6+GdatO381vi5rF4VVhV7RaOpp4vrM672i\nMSlukm8McRMoz6O+vn6AWOzfv5/w8HCvUISGhvKlL32JhIThryniawLlWVwOf7ND50A0PqWzE/72\nN+VlvP02ZGTA3Lnwhz+oEiL9Q+cOl4P9dfu9YamD9QdZNHER6zPX84fb/sD8tPk6j3EZ2tvbBwyf\nLSoqore3l4ULF7Jo0SLuv/9+8vPzSUtL8/6bwsLCgBYPjX+iPRDNsGhvh9dfh//9X9i5Uy2s5Jmf\nMWmQ03Cq9RTvnXqP9yvfZ1vVNjJiM7wexsrJK4k06RDKxejt7aW4uHiAWNTX1zN//vwBeYvMzEw9\n10IzLHQIy40WkNGlu1vlM15+GT76SJU+/8xnVD6jf62pVnMrH57+0OtlWBwW1metZ33meq7PvJ7U\n6AskPzTY7XaOHDniHQ1VVFRERUUFs2fPHiAWM2fOxGAw+Lq7miBBC4ibYBAQf4uPms0qLPXyy2rk\n1PLlcNddcOutfSvz2Zw2dtXs8grG8ebjzOyeyV2fvIv1WeuZnTI7YH8dj9bzcLlcVFRUDBg+W1pa\nypQpU7xCsWjRIubNm0dY2NVX+/W3v6vhEAw2gP/ZoXMgmhHFalVi8ec/q/IhCxcq0Xjmmb5Jfc29\nzTx3cAuvH3+dwqpCZiTPYH3mep5Y/wRL05ey++PdrF662qd2+AtSSmpraweEofbv309CQoJXKO64\n4w4WLFhAzFAXWNdofIj2QDQAuFxqYt/vfw9vvKEq237mM3DnnTDevT7S2c6zvHH8DV47/hr76/az\nPnM9t824jQ3TNpAUeZHp4tcgLS0tA8Ri3759A8p+LFq0iIULF16y8qxGM1boEJYbLSBD59QpePFF\n1WJi4AtfUMKRnq5eP9l6ktfKXuO1stcobynnk9M/ye0zb+eGrBt08hu1Wt7gJHdTUxMLFy4cIBhj\nXfZDo7lSAlpAhBAbgF8AIcDvpJSPD3r9s8C33IddwFeklIcvcq2AF5CxiI92dcErr8Dzz6uV++6+\nG+65B/LyACSlDaW8fvx1Xit7jcaeRm6bcRu3zbyN1VNWX/HMb3+L8w6X/nbYbDZKS0sHiMWpU6eY\nO3fugMl5OTk5flf2IxieRzDYAP5nR8DmQIQQIcDTwDqgDigSQrwppTze722VwEopZYdbbH4DLBn7\n3gY2LpeqbPvCC2q+xpo18OCDcOONqu5UeUs5jxb+gZcOv4RTOrl9xu38+qZfsyR9CYaQa2/Ej8vl\n4sSJE7z77ru8+uqrFBUVceTIETIzM71isXnzZubOnavXtdBcs/jUAxFCLAEelVJudB8/AsjBXki/\n98cDh6WUGRd5PeA9kJHm5Enlabz4IiQnK0/j7rvV4ktNPU38+eif+X3p76lur+buOXfzd/P+jgVp\nC665cEtdXR179+5lz5497Nu3j+LiYpKTkwd4FvPnzyc6OtrXXdVcozilpNlu55zVyjmbzdvqbTbO\nWa3U22xkRUTwwsyZQ7puwHogwESgpt9xLbDoEu//R+CdUe1RkHDwIPz4x2q+xuc/r0ZTzZunKtpu\nKd/C77cykE5PAAAgAElEQVT+nh3VO7hp+k18f/X3uT7zeowhvv5zGBvMZjPFxcXs2bPHKxo9PT0s\nWbKExYsX861vfYv8/HySLlZHXqMZQaRbGM5ardTZbNT1256z2bz7jXY78UYjaaGh3pYaGkpWeDgr\n4uJIDQ1l0ggM+R4KAfONIYRYA3wRWH6p991zzz1MmTIFgPj4ePLy8rzxxsLCQgC/Pi4pKeHrX//6\nsP/94cPwzjurOXQINm0q5IUX4BMbVrK9ejs3PvYEO87sYMmKJXx+3uf5StJXiAyNZPW0kbfHsz/W\nn9/gYykl6enp7Nmzh9dee41jx45RW1vLrFmzSE9PZ9asWfz4xz8mKyuLjz766Lx/f7XPw1+O/eV5\nXM3xL37xi4D7/yylZE5BAWesVt754AMa7XYabTZC5s/nyM6dNNnttM2eTaTBQNyRI6QYjcxdvpy0\n0FAiSkvJN5m4Ye1a0kJDOb5rFyYpWV1QcNH7VQFTLtM/73urqrha/CGE9T0p5Qb38QVDWEKIecD/\nARuklKcucb2AD2EVDiPBJqWat/GjH8HZs/Ctb6nRVG32en6171e8WPoi8eHxfH7e57l7zt1MjJ04\nOp3vx3DsGAmsVisHDhxgx44dfPzxx+zatYuoqCivd7FkyRIWLFhAxOCqjhfBV3aMNMFghz/aYHE6\nqbFaOWO1Um2xUG2xcMZqpca9rbVaiQgJISMsjIzwcCaFhWErLmbV6tWkh4WRHhbGhLAwIn1YWSBg\nR2EJIQzACVQS/RywD7hbSlnW7z2TgA+Az0sp91zmegEvIEPB5VJzNn78Y7V2+Le/DZ/+NFR2lPPk\nrid59dir3D3nbu5beB/zxs/zdXdHhc7OTnbt2uUVjAMHDjB9+nRWrFjB8uXLKSgoYMKECb7upiYA\nkVLS5nB4RaHaYuGMxUK11aq2FgttDgcTw8KYHB7O5LAwJrlFwrNNDwsj2ujfgZ6AFRDwDuN9ir5h\nvD8RQtyH8kSeFUL8BrgdqAYEYJdSXjBPcq0IiNOpZok/9hhER8N3vqMKGRbV7eXxnY/z8ZmPuT//\nfjbnbyYlKrgmq507d84rFjt27KCiooKFCxd6BWPp0qXExuo1QzSXxyUlDTYbVW4xqHKLQ39PIgSU\nOAwSCM+58aGhGAJ8wElAC8hIEgwCcik3XUpVAfe731V1qL7/fVi7zsW7J9/hiV1PcKbjDP+y9F/4\nYt4XiQqNGtuOD2Ikwg2eRZG2b9/ubW1tbRQUFHgF47rrrhvVYbT+GDYZDsFgx1Bt8CSnKy0WTpvN\nVFksnHa3Krc3EWc0Mjk8nCn9RMIrGOHhxI2C9+BvzyKQR2FprgApVUHDf/93tezrE0/AuhtsvHzk\nT+Q+81NMBhMPL3uYT83+VECPpJJSUlZWNkAwXC4XK1euZOXKlXzjG99g9uzZfjdJT+M7rC4XVRYL\nlWYzlRfYhgrB1PBwpkZEMDU8nNzoaDYlJ3sFw5e5h2BAeyB+jJTwwQdKOLq74Qc/gBtvtvHioRf4\n0Y4fMS1xGt8q+BbXZ14fkPM2nE4nhw4d8orFjh07iImJ8QrGypUrycrKCkjbNCOHxenklMVCeW8v\nFWYzFWYz5b29VFosNNpsTAoPJzM8nEy3SGRFRJAZHs7U8HDi9drul0WHsNwEk4Ds2KGE49w5Fara\ndIeNF0uf58c7fkxOcg6PrnqUZRnLfN3NIWGz2Thw4ADbt2/no48+YteuXUyYMMErFitWrCAj44Jz\nRDVBjt3tSZSbzVS4hcKzX2+zMSU8nOmRkWRHRHi3WRERpIeFBXwOwtdoAXETDALy618X8uabqykv\nV7mOT99t4w9HlHDMSJ7Bo6seZWnGUl9387IUFhayaNEi9u7d6/Uw9u3bx7Rp0wYIxrhx43zd1Uvi\nb/Hq4eIPdrikpMZq9XoS/T2KMxYLE8LCmB4RQbZHKNz7k8PCMIaE+IUNI4G/2aFzIEFASYkSjN27\n1eiqz/29jZeOPc+sXyvh+NMdf/J74fAMqd2+fTtbtmzh9OnTzJ07l5UrV/Lggw9SUFBAfHy8r7up\nGWU6HA5O9PZyoreX4+7tCbOZU2YziUbjAE9ibUIC0yMimBoRQZjObQ2d3l6or4fmZmhsVIXtbrhh\nzG5/RR6IEKIAKJFS9ggh/g5YADwlpawe7Q4OhUD0QI4ehUcfhV274JFH4J5/sPHy8ef50Y4fMTN5\npl97HN3d3Wzbto3CwkI++ugjjh8/zsKFC70extKlS4mK8u1oMM3o0et0crSnh9KeHkq7uznc00NZ\nby9dDgc5kZF9LSKCGZGRZEdGEqWT1pfHbFaicKHW0KCEwrO12yE1VRW3S0mBggI1rn8IjHoISwhR\nCuQC84Dngd8Cn5ZSrhrOTUeLQBKQ8nKV29i6Fb75TfjH+2z8b7l/C4eUkqNHj/Luu+/yzjvvsG/f\nPvLz81m3bh2rVq0iPz9/RJZf1fgXLimpslgo7e72ikVpTw81ViszIiOZFxXFvOho5kZFMSsykolh\nYXrgw4Uwm1VSs65uYOt/7tw59b7x4yEtTYlDWpo6Tk1V2/HjYdw4tY2JUUMzr4KxEJBiKeUCIcR3\ngbNSyt95zg3npqNFIAjI6dNqNNVf/gJf/zr802Yb/3eqTzhuDr2ZzZ/e7Otueuno6GDr1q28++67\nvPvuuxiNRjZu3MiGDRtYs2bNRZdg9bc473C51uzocDg4PEgoDvf0kGA0Mi8qirnR0eS6BSM7IgLT\nGIad/PZZSAmtrVBbq9rZsxdu3d2QlkZhdDSrZ86ECROUOKSlwcSJajthAsTHX7UoDIWxyIF0CSH+\nFfg7YKV7HQ89Pm4I1NSoWlWvvAKbN8PR4zbeqHqe655XOY6X73iZpRlLBxQ88wUul4tDhw55vYyD\nBw9SUFDAhg0beOihh5g+fbr+dRkEOFwuTprNA4SitLubZrudOW6BmBcVxWfHj2duVBQJ1+pwWJcL\nmpr6xKF/q6npE4yICLWMZ3q6EoOJE2HRor79iRMhKQlCQqCwEPxRCIfBlXogqcBngSIp5Q53farV\nUsoXR7uDQ8EfPZBz51Stqpdegi9/Gb72oI0tNf41qspisfDhhx/yxhtvsGXLFmJiYtiwYQMbN25k\n1apVREbqpWsDmSabjcODhOJYby8TQkO9QuHZZkZEEHKt/ECQElpalBCcOaO2nuYRiLo6VfbBIw4e\ngcjIUPsZGeo4gHN9Y+GBfENK6VlWFinlGSHE7OHc8FqhsRF+8hO1mNMXvwiHjth4p+4Flr70I3KS\nc3jpjpd8Oo+jvb2dt99+mzfeeIP33nuPuXPnsmnTJr75zW+SnZ3ts35pho/N5eJ4b+95uYpep9Mr\nEItjY7k3LY3ZUVHE+HmRv6tGSjU66dQpqKo6v505A2FhSgQmTVLbjAyYM2egOFxh5eZrkSHlQAad\nK5VS+lWJV3/wQJqb4ckn4Te/gc99Dh78po33G9XM8elJ0/ne6u9dUjhGM85bW1vLm2++yRtvvMHe\nvXtZvXo1t956KzfffPOIz8fw23j1EPFHO6SUnLPZBgjFoZ4eTprNTA0PH+BRzIuOJiMsjI8++sjv\n7BgqF3wWLpdy80+eHNhOnVLNaISsLJgypa9NntzXLpLDG3M7BmGzQWenal1dfdvubtV6etS2qws6\nOlQ17p4emDoV/uM/htafUfNAhBBfAe4HMt0jsTzEALuGc8Ngpa0NfvYz+K//UiXVi4rtfNjyAmte\n+xHZidk+8zhOnDjBq6++yhtvvEFlZSU33XQTX/nKV3j99df18qwBgMXp5Oggr+JQdzdCCG8y+/qE\nBB7MyGBWZCThwThM1uFQ4aT9+6GsTAmDRygqK1WIado01bKy4I471DYrCxITx7y7Uqov87Y21drb\nVfPsl5TAW2+p/Y6Ovtbe3icaDgfExSl9i41V25gYFSnzbKOjVUtLg8hI1SZPHltbL+mBCCHigATg\nP4BH+r3UJaVsHeW+DRlfeCAdHfDUU/DLX8KmTfCtb9vZ3v4ij+14jOzEbB5d9SgFkwrGtE8Oh4Mt\nW7bw9NNPc+zYMT71qU+xadMmVqxYgelaTYb6OVJKzlit54WfqiwWsiMiyB2Uq0gNDQ2uwQwWixqi\n6BGH/tszZ9SQ1aysPpHIyoLsbMjMHDUvwulUX+qtrVfePKIRGgoJCUoEEhJUi4/v28bF9W0v1MLD\nx24g1piUMnEv/jSefl6LlPLMcG46WoylgDid8OtfqyG5GzfCI9+xs6v7RX6040dkJWbx6KpHWT7p\nkqvvjjiNjY389re/5ZlnnmHSpEls3ryZO+64Y1TLnWuGTqfDwZFBSe3DPT1EGwznJbVzIiMJDZYZ\n2t3dfaGlweGm+nr189kjEv2FYupU9Y16FbhcSgyamlR+sqlJhZv7t5YWtW1tVfudnerXf1KScmQ8\nLSFBnUtIGHjseT0+XglIoDDqSXQhxFeB7wENgMt9WqImFl5zlJTAvfeqv+n3PrBT7Pg9N73zGJkJ\nmbyw6QVWTF4x7GsPJ+a+b98+nn76abZs2cIdd9zBm2++yfz584fdh5HAH3MHw+Fq7HBKqYbKDvIq\nGm02ZkVFeUNQn05JYW50NEmj6B2O2fOw21WC+sQJNVvWsy0vVz/NMzP7xGH+fPjUp9T+pEkqXzFE\nG+x2NSm7/3y8c+f6Wl1dX6WPyEg1/84zaTslBZKTVZ48N1eJQHKyEoGkJCUEoxERDJb/G3Dlo7C+\nDuRIKVtGszP+Tk8PfO978MILKlEVv/R17tj6EFPip1y1cAwVq9XKn//8Z55++mmamprYvHkzP//5\nz0lKShqzPmj6uNBQ2bLeXlL7DZX9+9RU5kZFkRUREfgVZKVUeYnS0oGtslJNhps+HXJyYN48JRLT\np6uRTVfoTXlG2Hrm4NXVwccfq+HwnuO6OqVJKSl9c/A82/z8vjl6nkofukjCyHOlo7C2AeullI7R\n79LwGc0Q1ttvw/33w4oV8PAPzvG9oq9ypPEIv77p16ydunZU7nkhOjo6eOaZZ3jqqaeYM2cODzzw\nABs3bsQQjMlTP8TqclHmrv/UXzDM/YbKerZBM1S2p0cVbTt0aKBYhIcrgcjNVdu5c5VoXCbcZLGo\nL/8LTdb2nK+rU6Nn+8/DmzhRiUP/7bhxo+MlXEuMxTyQSqBQCPFXwOo5KaX82XBuGkjU1amSI8XF\n8OyzkjPJv2Pdq9/m3uvu5Y+3/5Fw49XFZq+Uc+fO8dRTT/Gb3/yGDRs28M4775Cbmzsm974Wke7S\n44O9ikqLhazwcOa6ReKB9HTmRUWRHgz1n6RUCWuPUHi2NTVKGDxCsWmTEosLDP22WqH21MA5ef3n\n5p09q3ILqanni8PChQOFQs9f9X+u1AN59ELnpZTfH/EeXQUj6YG4XPDMM6pS7n33wWe+UsEDW++l\nx9bDb2/5LfPGj076Z3B8tLy8nCeffJJXX32Vz33uc/zLv/wLU6ZMGZV7jySBFOc1u6vKlnR3U9JP\nLCIMBtKPHWP1mjXMjYpiXlQUM6OiArLs+HnPo7cXjhw5XywiI/uEwrPNyQF3fsZshurqC8/Lq65W\nCei0tPPn5nkmbk+cqMJJw/kIA+lv6lL4mx2j7oF4hEIIESml7B3OjQKJI0dUkjwkBN7/0M7fOn7G\nmj/+lO+s+A4PLH4AQ8jo+8xFRUU8/vjjfPTRR9x///2cOHGClJSUUb9vsNNos3HILRSedto9VDYv\nOpq86GhuS0lhXlQUKaGhFNpsrM7K8nW3h4+U6qf/7t2wc6cSikOHlKcxY8ZAr2LePCwxKQMF4o9q\ndK3nuL1dCUP/eXm33NI3Py81VYeUriWu1ANZCvwOiJZSThJC5AL3SSnvH+0ODoWr9UAsFlXw8Jln\n4Ic/hIU3F3PvX/6R5Mhk/vuT/83UhKkj2NvzkVLy3nvv8fjjj3Pq1CkefPBB/uEf/kFP+BsGUkpO\nmc0UDxILs8tFnruirEcwAtWrOA+LpS9X4WmlpWpMaW4u5ObimDWPc+NyKQ+Zwelak1ccPNuWFuUt\nTJ06UCQ8x6mpw/MeNP7LWJRz3wvcCbwlpZzvPndESjlnODcdLa5GQLZvV17HrFnw059b+G3F9/nd\nwd/x0/U/5e9z/35U49sOh4NXX32Vxx9/HIfDwcMPP8xdd92lJ/0NgXNWK/u6uijq7KSoq4uiri6i\nDQaui4lhfnQ0uW6xmBQMuQpQkxlKSpRIeLanTiGnTaN3Wi71qblURudSInM50jTeKxINDSq/0F8U\n+m/T0rQHca0xJkvaSilrBv3Hcw7nhv5Gezs8/LAaZfWf/wlp+Xu4+c0vMTNlJoe/cpjx0eNH7d5m\ns5nnnnuOJ598kvT0dB577DEiIyNZs2bNqN1zrBjNOG+73c5+t0gUdXWxr7MTs8tFfkwMi2Jj+erE\nieTHxJA6AuM2fR6vdjqhosIrFLKkBNfBQ8heM63puVQn5HHEtJ69UQ+xfcIsTpWHkdw6UBRWrYK8\nvEI2bVpNero3nRFw+PxZjBDBYgdcuYDUCCGWAVIIYQK+BpSNXrdGHynh1Vfha19T4d/9h8z8vwPf\n5fcv/55fbvwln5r1qVH7pdrW1savfvUrnn76aRYvXswf/vAHli1TdbJ8vR6Iv2FzuTjU3c3ezk72\nusXirNXK/JgYFsXE8OmUFJ7MymJqeHjgexZdXVBaiq3oED27lFcRVXWUrvBxVETlUezK46POr1Bm\nysU0bRJTMwWZmUokNk2Fb0xVeYgLjaItLFTv0wQfTosTR6sDR5sDQiBq5tiVlr/SEFYy8BRwPSCA\n94Cv+dvEwisNYdXUqEWdTp5UVXPFpF188c0vkjs+l6dvfJpxUSNbmbbvvjX8/Oc/5/nnn+fWW2/l\nm9/8JrNmzRqVewUiUkoqLRYlFu52uKeHaRERLIqNZbHbw5gVGYkxgAPxToekYX8NrR+WYNt/iLCy\nEpJrDxHbe47jhtkcdOZSk5xH19RcXHPmkZYTS2YmXrGIj/e1BZqRREqJy+zC3mLH0erA3mbH0ebo\na+3nn/Met6upeaYkE8Z4I3Er48h5JmdI9x+TWliBwJUIyJ//DF/9qmoP/EsvP9z5b/zpyJ/4z43/\nyZ2z7hyVfpWVlfHEE0/w5ptv8sUvfpFvfOMbpKenj8q9AolWu52iri6vWOzr6iJMCBbHxnrbddHR\nRAfgZLyODndi+oSVjj1liEMlxJwqIa3pEDnmEqwhEVTF5tI0MQ9LTi6mhbkkLs4mc7qRCRN0ojrQ\nkFLi7HFe9Ev+cudEiMCYYMSUaMKYaFT7CUoUjAlGtU10n/McJxgxxBmwuCy0t7fT2tqKEIK5c+cO\nqe+jWc79YSnlE0KI/0TVvhqAlPKB4dzUF/T0wAMPqGT5u+9Cb/IOFj33JRZOWMjhrxwmOTJ5xO+5\nZ88efvKTn7B7927++Z//mZMnT5J4mfLSwRIfHWyHtV8oap9bNOptNq6LiWFxTAz/mJbGszk5TPSz\nehMXex5Op/JkKytVLcDKSmgsayG07BDJtSXMtJVwnfEQG+0VtMZn0jE1D3l9HlHLbiL8+lwSJo8j\n1Q/sCCTGwgbplDg6HNhb3d5A/22b4/xzHo+h1YEwioFf8gkDhSAiO4KYhBj21u5l1YpVA95niDDg\ncrloaWmhqamJpqYmWlpaaG5upqWlhdaaVlpKWmhtbfW2trY2WlpaCAkJITExkfj4eFavXs3TTz89\nqp9Rfy73086T59g/Wh0QQmwAfgGEAL+TUj5+gff8EtgI9AD3SClLhnKPgwfhrrtg6VL489YTPHv4\n57xV+Ba/uvFX3DbztpEww4uUknfffZfHH3+c6upqHnroIV566aVrallYKSU1Fgt/qK/35i2O9PSQ\nHRHB4thYVsfH83BGBrOiovy6JlRXlwpztrX1icSpU3D6lAvjmUqWxxyiILqEfFnCpztKiLB3YsnJ\nxXh3HhFLVyPyvgazZ5MWHk6ar425BpEuib3Vjr3Jjr35AttmO/YWuxKCFocKIXU6MMa6f+0nmgZ4\nBaZEE2GTwojKjeo75xGBRCOG8POHr3V3d9PY2Eh9Yz0NDQ00Njay99RetjVso7GxkaamJhobG2lo\naKClpYXY2FjGjRtHcnIyycnJJCUlkZSUREpKCjNmzCAxMZHExEQSEhK8+778bvFpCEsIEQKUA+uA\nOqAIuEtKebzfezYCX5VS3iSEWAw8JaVccpHrDQhhSanW6njsxw4+/9hbHA7/Lw43HuZLeV/ioWUP\nkRQ5coUHHQ4Hr7zyCo8//jgul4tHHnmET3/60xgDMPwyVJpstgGeRVFXFzEGA4tjY1kUE8Pi2FgW\nxMQQ5YfjQ1tblUhUVKjWfzG77m6YMcXCmpQj5IeWMMtaQnpLCXHVpYQkJiDm50Geu+XmqgSFHwti\noONyuHC0OLA12bA3KiGwNdmUIDSdf+xoc2CIMWBKMamWbCI0JdS7b0oyYUwyYkoyeZsx3ogwXPoZ\n9vb2Ul+vBKG+vv6CrbGxkcbGRqSUjB8/nnHjxg1oKSkp3m1KSgqpqamkpKT4ZOj+WMwDeR/4lJSy\n3X2cALwspfzEcG7a77pLgEellBvdx48Asr8XIoR4Btgmpfyz+7gMWC2lbLjA9bwC0tgId325jvKY\n3+DM/Q1ZSVO4P/9+7ph5B2HGkQuT9B+Km5GRwSOPPMKGDRsCf0RQP1xSUm+zUWk2c9piodJi4bTZ\nTKXFQqXZTLfTSX4/sVg0QkNoR4oLiYRn67S7mJ/ZQd7EJmalNJEV28RU50lS60sIP1GCqKxUlWQ9\nIuHZ+mClu2BDSqkSxI12bI3qi9/WaOs77re1N9lxtDuUR+AWhNBx/cQgpZ84eFqSiRDTlSWTXC4X\nzc3NnDt3jvr6eu/2Qvs2m43U1FRvGz9+PGlpaQOOPaIRFRU1qt8FFoua/NnWBi2tLjDYWFUwtPp8\nYyEgJVLKvEHnDnomFQ4XIcQdwCeklPe6j/8OWNQ/tyKE2AL8h5Ryl/t4K/CwlLL4AteTmxcsvZou\nXRUj8WdS29VBekzcCFypj9HwMS9n62jYcTkuZacYtL1Sars7x9yO0WCkn4dEeD9w2e9Tlc4QXA4T\nUoZc+IFIvOdFv1PeE/0fkKDPoxNQ19PKhGgl3C4EbaZwGsMiaQ6NxDFo1IHofzEJdunC6XThkC4c\nLjt2lwO7w4rD0YvdbsbhtBISYsJkiiDUFIHJGIHJGElYaDhhxkiM7vOhxggMhvNXixLu2wkkhhAX\nJpMLk8lJqNGJ0SC9XQkRgua606ROzsZgMGI0GjEaDSAkEokdMxZ6MMtuehw99Dp7MTt7sTrN2JxW\nbC4zFmnFihWroRenqRfCusFgQRpsTHWtp/IH717u8Q3q++hPJHQKISZ5ViAUQkxmdL6Xrpqysv2M\nN6o/pighyAw1MDdMuYWHrXYAvz622Jx8ojfcb/oz3OPDVju04Tf9Ge6x2ebkhp4geR7uRahH/X7h\n6mvlsMVxkfeHghz69d/sNjO508jcMBMGKanutpBkdXCrw0mHycBbRgOtoUbGxUdSHxFKkcNJS6gR\nV2o8jeGh1HWZ6TUaMKQmIBC4GjoQCMJSkwgnFGdDGxIzMjURG2Z6608CAkOqEi1nvfoAL3gsPccC\nkZqI0yFw1LbhkgLGJ2EQEhqbCUFiAExNp3A1tBAiICw1DgMhOOvbMDpNJCRnEO4Mx36ulTAZSlZS\nFuGuMFqbz2GQJqZFzSGuO5S2c6eJ6whlfsh1iOhIjhrLMM7py4d45pR5Bh70n2NWWFhIVVUVV8uV\neiAbgGeBj1BaugK4V0r5t6u6uQphfU9KucF9fCUhrOPAqouFsFwuV1CFj66Izs6++Ex5+cB9lwu5\nchU9ubfRxgLaDkLHjg7Cp4aTsC6BhLUJxK2Mwxhz8d8SDpeDTmsn7ZZ2b+uwdNBuaafV3MrhxsPs\nqtlFQ08DiyYuYmn6UpamL2VJ+hISIhLG8IPQ+ANSShyOVqzWWiyWGqzWGszmSiyW01itNdhs9djt\nzUhpIyQkipCQUCAEKZ1IacXp7CEkJIrQ0BRCQycQFjaB0NDxhIaOx2QaN2h/HAbC1ZKD/ZciHNwa\nG5ENDeBw4ExOxJIUjzk2AnOYAbPBhVk4sDqs2B1WnBYLwmrB2GslqttKfI+TxF5JmBPaogy0R5vo\niAmnIzaC9tgo2mJjaImLoTkunvrYeM7GJVETm4R13ETCE1JJNiWSYI0myhxOZG8oYb2hGLtNhPSa\noMeAo9NAb6uT9iYHbW0uOjpcdHYKenoMmM1GrFYTUgqMRjMGg5mQkB5kiBlpMOM09CKNFowhkogw\nIxkZZkq23zik5zVWa6InA57k9R4pZfNwbjjomgbgBCqJfg7YB9wtpSzr954bgc3uJPoS4BeXSqI3\nNv4fKSm3X23XgofGRvjgA1Wr5d13ITUV1yduomvqBtqaJ9Fe2ElnUSfR86JJWJdA/Np4YhfHYogc\nesK7ubeZPbV72F2zm921uymqKyI9Np1l6ctYmqFEZWbKTEKEnuSgAaezB6v1HDZbHVbrWWy2c1it\nddhsZ7FYzmC1nsVubwAEBkM0ISERCKF+6Ehpw+m04HR2AAZCQ1MwmVIJDR3n3h+HyZTi3k/BZEpW\nzR6BodmM8CyMPrh5Fkj37FutkJSEKykJe0Is9sgwLKYQrAaJBSdWpxWHzYLDZgGLGUN3L+HdFmK6\nrMR12TG4JM2R0BIZQlu0kbaYMNpjI2iNiaQ1Lorm2BjqY2M5GxdPfXwSlsTxRMUkkRAeT2J4HCnh\ncaRExpNIDFH2MMItJsJtJowWEyaLAdFrxNFjwNwL7e024uLsbN48tOKroyYgQogZUsrjQogFF3r9\nQnmIIXdAeTdP0TeM9ydCiPvU5eWz7vc8DWxADeP94sXuK4SQe/fOJj//EEqbAo9RHevudML+/UpM\n3n5beSnXX4/z+pvoSFhB+6EQ2j5so+dwD9HzoolbHkfcijjiCuIwJQ1tdEhhYSHLVy7ncMNhdtcq\nQZ6DDhEAACAASURBVNlds5vm3mYWpy/2eimL0xcTH+6/U6uDYf4EBK4dUkqczk6s1nN8+OHb5OeP\n6yc4Ddhs9dhsddhs9bhcFozGeAyGqAFi43I5kNKKy9WLw9GFy2XFaEwkNDQZkykJkykJozFx0H6i\n2jqjMHYITB1OQlq6EC0tSlw8W4/YtLT0iY/JpBZVT0qCxERkVCT2UCM2IbHhpLC+nvy4CFzmXlzm\nHgyd3Zi6eojo6CW6y4rVGEJ7VAgtkYLmCElTmJOWCElbtImOqFDao8Jpiw6nLSqSppgommLi6I2N\nQkTEkDt+LoUbvz2kz3g0BeRZKeW97iVtByOllGO3lusVIISQBw4sYeLEBxg//m5fd2dYjOl/9IYG\n5ZW8/Ta8/76qvrdxI87l19MZMouOfWY6dnTQuaeTsIww4lbEEb8inrjlcYRPvvRIj4vZ0djTOMBL\nOXDuAJPiJnlDXv7mpQTqF+9ggsGOy9ngdJrdgtKA3e4Rl3rvOU+z2xtwuRyYTAkYDLGDBEcAEint\nuFwWnM5uHI5unM42pHQMFBdTwqDjRIyGBIyWcEydAlOXC2ObA0O7lZDWdiUura0UHjvGaqNRiU5r\n6wBPh6QkiI2FqCi1pq/JBCEhuKTE4bDhsFlwWs24zD3QY0Z0d2Hs6CK0sxcpoHLpbHIKhzRNblQF\n5FNSyleEEJlSysrh3GAsEULIlpb3qajYTH7+UUJCgn8OxojhcKhFh955R4W8jh2DJUtg7Vpcq9bS\nY8yhfVcXHR930LGjg5CwEOWdLI8jdnEsUXOiCAkd+pe+w+WgtKGU3TW72XN2T0B6KZrAw+nswWZr\nwm5vdIuKZ9vkPa/21TYkJByTKRmjMQGDIcYtOmFu0QlBiY4Dl8uKy2XB4ejE6WzH4WjD4WhHiFCM\nxgS36MRjNA7cmlyxhHYaMHUZMHaBsd2JsctOSIeFkPZeRHu7Gqvb1qZKiPffj4qCuDjVli6F//7v\nIX0WoykgxVLKBZ7tcG4wlniS6EeO3AIIZs78A0ZjrK+7FZh0dMBHHykx+eADtZj1qlWwbh1yzRrM\noVPp+LhTeShFnVgqLUTOiiTmuhhvi5oTRUjY0EWlv5ey5+we9tftJyM2w+ulLE5fzOyU2WOyMqRG\n4wmjecSkT2T6t+Z+55oBlzvvkuIWnnhvHickJBwhjAgRgpQupHTicllxOtux29u8ouNwdOB0duB0\n9mIwxGA0xmI0xrkFTDVjSAyhvRGYzAaMnYKwiKkkrv3GkOwbTQHZCriARcD2wa9LKW8Zzk1HC89E\nQpfLxsmTX6e9fRtz5rxBZOTQqlP6Er8NNdTXw7ZtfYJiscDatbBuHaxYgTNtKt2lPXT9//bOPLit\n6773n0OAxEKAu7gvEiVSIrVRliXLllwri1TXWVxnHCdpVqd106aZOJOldtJMm3lvmjeOX1+dPS9u\n7Nhp4xcnbRM3S2M7FmNLkWTJWkiTFClKoriKIkUSBAgQIIHz/jgACFJcIZJYdD4zZ+6Cy3t/v3NJ\nfnF+v7O84cT5hpPf//731PbXYq2ZEhXbThu2rbYli8pkYDKcSznec5zj3cfpcfaws2gnt5Xcxm2l\nt3FbyW2UZJQsu9tx+z6WSDL4kUg++P1uJiYGZxWXw4cbuOWW1PDnPt8Afr8j2EJZExYe1QEgD6Mx\nF4PBhsFgQYhUhAj1WvMyOelgcnKEQMCN3+8kLa2YsrLVE5CFYjz3ALcAPwL+KZoHxIKUlDSqq79D\nb++TnD59J5s2PU1u7jtibVZiU1gIH/iAKqAmhnrlFZU7+cpXMLjdZO7ZQ+aePfDh2+l/oJy9+/fi\nOuvCdcrF6PFRer7Tg+e8B+tGK7adtqmWyrb0WecRCmFMMbKjaAc7inbwyV1qFeVhzzAnek9wvPs4\nT51+ik/88hOYDKawmNxWchs7i3diS9PLAWtWH4PBisFQjtlcft1nnZ31bNmyf9q5QGCSyclrM1o2\nSmA8nvZpx2o7GAyrrZnW28xsXrs6DgZZqAXyIynlh0Oz8q6iXVEx23TuDscfaGp6LyUlf0N5+Rdv\nvjEiq0VPDxw7psrRo2oGy/XrVUx2zx61ra7G75WMRbRUnG848bR5sFRbpoe/tqVjsCw+RCWl5NLI\nJY53H1etlJ7jNPQ3UJldyY7CHaoU7aCusE7nUzQJz/Sw2tVwK8dozCA//4El3WslQ1jNqEWkfgPs\nZ8ZMEFLKoWgeulLMtR6I19vDm2++B7O5nI0bn8Zo1N9KVxyfTy3DGhKUY8dUwu+226YEZfduyMrC\nP+6fJiquN1y4W91q+uvI8Nd225JExTvppWmgidN9pzl9RZWG/gbyrHnTRGVH4Q6K7cX6y4XmpmQl\nBeTTwF8DlUAP0wVESikro3noSjHfglJ+/zjnz38Sp/MkW7b8HIslrkwPk0hx3vmY1Y8rV+D4cSUo\nR4/CqVNQVjbVStmzB2prwWBQotI4FhYU5xtO3OfcWNZbsO2wYdthw36LHVudDWPm4nvb+QN+2ofa\nlaBECItAhMWkrrCObQXbqM6t5vCrh5P3fSQYyeADxJ8fK5YDkVJ+A/iGEOK7Usq/jsq6OMFgMLNx\n4w/o6fkWp07dTlXVt1iz5n79rXM1KSyEe+9VBVTX4cZG1To5fBgef1yNTdm1C8OePWTs2UPG/Xvg\nr1RyPOANMNY0huu0C+cpJwM/HcDV4CKtIG1KUILiYiqcfTZgQ4qBjXkb2Zi3kfdveT+gwgE9zp6w\noPys+Wf8/aG/p3u0m+Jrxewd2cu2/G1sK1ClwFawKtWl0cQ7S5nKZB9QJaV8OjitiV1KeWlFrVsi\ni10T3eE4Smvrx7FYNlJd/R1MpuJVsE6zKK5dU62UUOjr9dchP3+qhbJnD2zbpgZYoVaQc5934zrl\nUsJy2onrtIuUtBQlJnU20rekk745Hesm65J6gI35xmgaaKKxv5GG/gYarjbQ0N+AQRjCYhIqtWtq\nMRuXNo22RhMPrMZ07v8A3ApslFJWCyGKgZ9KKfdG89CVYrECAhAIeLl8+R/p7f0u69Z9laKiv9Ct\nkXjE74dz56YE5fhxtdj49u0qnxIqFRXhqb+llHi7vDhPORk7O8ZYkyrjF8cxrzVj3WwNi0r6lnQs\nGyyLXjdCSkmvs5fGq0FR6W/gbP9Z2ofaWZu1lq35W8OisjV/K2uz1urfK01csyrrgQA7gFOhNUCE\nEA1Sym3RPHSlWIqAhHC5Gmht/XMMBjvV1d/Hat2wQtYtjniLj0bLivoxOqrm9Dp+fKoEAlNismcP\n7NqlpoSIIOAN4G5zK0F5UxV3kxtvtxdLlWWqpbLZSnptOuZKM68efnVRfvj8PloHW8OiEhKYUe8o\nWwu2XicsmebVXWMkGX6vksEHiD8/VmM9EJ+UUgohZPCB6dE8LB6x2baxY8dRenq+zqlTeygvf5TS\n0s/oaVDimYwMNYjxrcGp2KSErq4pMfnKV1Q34ooKJSQ7d8LOnaTU1WHbqgYzRuJ3+3G3TAlL37/0\n4W524+vzca7wHGt2rcFaq0TFWmPFWn19KCzNkKaEomArH+SD4fNDnqFwCOzMlTM8e/ZZ3rz6JrnW\n3LCohLbVudWkGlZ/SVONJloW2wL5PFAFHAD+F/Bx4MdSym+urHlLI5oWSCQezwVaW/+SyUkHmzb9\nAJtt+zJap1lVJiZUgv7kSXjjDVWam6GyEm65JSwq1NWBbfZu3X63H3erW4lL8xjuFjfuZjfjHeOY\nykzTRCW9VuVYDOkLdzMOyACXhi9Na6k0Xm2ky9FFdW71NFHZVrCNQluhDoNpVozVWg/kAHAQ1ZX3\nt1LKl6J54EpyowICKsZ95cpTXLz4RYqKHqKi4ssYDJZlslATU3w+aGqaEpRTp+DNN6G8fEpQQqKS\nMfccagFfAE+7Z0pYmt2MtYzhafOQmp86XVRqrVhrrKRmLdyycE+4abraNE1UGvobkFJeJyqb8zdj\nTbUueE+NZiFWS0AKgF3Bw9ellFejeeBKshwCEsLr7aO9/WGczlNUV3+XnJwDy3LfhYi3+Gi0JIwf\nExOqZXLq1JSwNDZCQQFs3059Zib777tPJe3Ly6fW6J4F6ZeMd4xPExV3s2rBGOwG1WKpmRKV9Np0\nUtekztu6kFLSP9Z/XW6ldbCV0oxSthZsZVv+NrUt2EZlduWsU+EnzPuYh2TwAeLPjxXPgQghHgAe\nB+pRLZBvCiG+IKX8WTQPTQRMpiI2b36ea9d+RWvrQ2Rm7mPDhv9DWlp+rE3TLCepqUoctm+HBx9U\n5/x+aG+HM2fghRfge99T+x6Puq6uTpXt29XAR5MacyIMAst6C5b1FnjX1COklHi7vWFRcZ12cfXH\nVxlrGoMUprdYaqxYa62YSkwIIRBCUGgrpNBWyMH1B8P3nPBPcH7ovBKV/kaePvM0jf2NDLoH2Zy/\nma35U4n7rQVbV7NGNTcRi82BnAUOhFodQog1wMtSyrhKEixnCyQSv3+Mjo6vcOXKM8Euvx9HxMmC\nR5pV5OpVNT3LmTNT2wsXoKpqSlBC27y8BW8npWTi6kS4pRKZZ/G7/Vg3TYXBQltzhRmRMveXRce4\ngzevvjktDNbY34gl1TItab+1YKseu6IBVqcbb6OUcmvEcQpwNvJcPLBSAhLC6TxDW9snSEkxUV39\nPdLTa1fsWZoEYXxc5VEiheXsWbDbrxeVDRsgZXFfPCaGJ8I9wyJzLRPXJmYVFkulBWGY/X+AlJKu\n0S4a+xuVoATFJXLsSkhUthVsY23W2rhZEVKz8qyGgDwObAOeC556H9AgpXwkmoeuFCstIABS+unt\n/R4dHV+huPivKC//OwyG5fsWF2/x0Wi5qf2QEjo6prdUzp5VS5du2TJdWLZuVSvKLZLJ0Unc5yKS\n901q67viw1JtmZa8T68NDpJMS5nVj8ixK5HCMjI+wuY1m6eJytb8reRac5dWD8vMTf07tYKsWA5E\nCLEBKJBSfkEI8R5gX/Cjo8C/RfPAREcIAyUlf0Ne3p9y/vzDvP76RsrKPkdR0Z9jMCTN8BjNjSAE\nrFunyn33TZ0fGYGGBiUoJ07Ak09CSwuUll7fWikpmTVhb8wwkrE7g4zd03uJ+cf8SliC4bD+f+1X\nXY47x7Gss3BpzSUq7qoI51isG62kWabGrkQy7BkOh8Ea+xt5vul5Gq82kp6aHh4UGRKXmrwaLKm6\nl+LNykKz8f4S+KKUsnHG+a3AV6WU75r9J2PDarRAZjI6+jqdnY/hcLxGcfEnKSn5FGlpC8e/NRpA\n9QJrbZ0KfZ05o4rfPyUmIWGpqYG0tCXd3j/ux3PeM5VjCXU5bvdgKjVdFwqzbrJitF3/vTIUBgsl\n7UMtlvahdioyK8LCsnnNZjbnb2Z99no9KDJBWMnp3E9IKXfN8VnjzZYDmQ+3u5Wurv/NwMC/U1Dw\nYcrKPovZXBETWzQJjpRq6vuZeZVLl6C6erqwLDJhP5PARADPhRnC0hwcy7Im9XphmWMsSygMFmqt\nNA000TzQTI+zh/XZ69mcv5navFpq16hSlVtFmmFpIqhZWVZSQM5LKavm+KxdShnbiaNmEEsBCeH1\n9tLd/QR9fT8gN/ceysr+Fptt8Tobb/HRaNF+rAAez1TCPiQuDQ0qYR8Z/gol7A1To+IX68d1Y1lC\nPcMix7KExCU4piVtzfWC4JnwcG7wHM0DzTQPNIeFpdPRSXlmOTVratiUu4lNeZvUft6mBVeKjKt3\ncQPEmx8rOQ7kpBDiISnlkzMe+BfAG9E8MNkxmYpZv/5rlJd/id7e79HQcBCbbSfl5Y+QmblPT0mh\niR6LRc3ttSsiKDAzYf/cc/Doo6rL8ZYtU4Li96tR9nb7vI+YcyxLQI1lCQmK6w0X/T9SeRZhFOFW\ninWTFWuVFUu1he1rt7OjaMe0+3snvVwYvsC5wXO0DLTwSscrfPvEtzk3eA67ya4EJa9m2rY0o1T/\n3cQpC7VACoD/BHxMCcatQBpwn5TyyopbuATioQUyE79/nP7+Z+jsfJy0tALKyx8hN/edehyJZmVx\nOFTrJLK10twMRUXT8yqLGGE/H1JKfP2+aSEwd5sbz3kP3h4v5gozlioL1morliqL2q+yYio1Tet2\nLKWke7RbCctgS3jbMtDC2MQYG3M3hlstNWtqqMmroTK7EpNx9oXDNItnNbrxvgXYEjxsklK+Es3D\nVpp4FJAQUvoZGPh3OjsfIxAYp6zsCxQU/BkpKToerFklJifh/Pnrcysej1qkKzIEtnkzmG+se3rA\nG8Bz0TMlKm0eNYfYeTeT1yYxr1PiYtkwJSyWDRZMZaZpgyWHPcO0XmulZaBlmsB0OjopzSilOrea\n6txqNuZuDO+XZJTosSyLZFXmwkoE4llAQkgpGR7+HV1dj+F2n6O09LMUFT2E0ahmhI23+Gi0aD/i\ni3n9CI2wjyznz6uZiyOT9du3q2WJlyGc5Hf78VxQguI5r4r7vBtPu0eJS+WUuFirVOvlxMAJDrz3\nQFhcfH4fl4Yv0XqtlbZrbbQOttJ6rZXzQ+cZ9Y5SlVMVFpT12etZl72OyuxKSuwlGFIWnjV5pYi3\n36nVWA9Es0wIIcjJeTs5OW/H6XyDzs7H6Oz8KsXFf0VJyadjbZ7mZiQ/Hw4cUCWE16vGqIRaKr/9\nrdoajaq1EikqmzYtuXuxwWqYdW0WUGNaPBemRGX0xCj9P+6n7c02zA+albhsUHkaW6WNvZV7eXvl\n2zHvNIfXaXGMOzg/dJ62a220XWvjUMchnjrzFJeGLzHgHqAso4y1WWspzyynIrOCiqwKKjIrKM8s\npzSjVIfGFknMWiBCiGzgJ0AF0AE8IKV0zLimFHgWKAACwJNSym/Mc8+4b4HMhtvdHuwC/Dz5+X9G\nWdnnsFjWxdosjWY6UkJPjxKSyPzK5cuqe/H27Spxv3mzKjeQW5mLSdck4xfGcZ93M35xHM9FT3jr\n7fKSlp+GeV1QZNZZMFeaVahsnYW0ojREimB8cpzLI5e57Lgc3nY6OsPbXmcvOZYcyjPLKcsooyyj\nTO1nqv2yzDIKbYVJEyJLyBCWEOIx4JqU8mtCiEeAbCnlozOuKQQKpZRnhBA2VCL/XinluTnumZAC\nEsLrvUJPz9fp7X2SnJyDlJc/ohe10sQ/brdaZ+XsWdXNuKlJFZdLDX4MCUqolJYuu7AABCYDeLu9\njF8aV6JySYnL+KVxxjvGmRiewFRqwrzWrErF9G1acRopxhT8AT/9Y/1KVEYu0zXaRZejS21Hu+h0\ndDIyPkKxvTgsKCGhKcssCwtPjiUnIXqPJaqAnAPuklL2B4WiXkq5aYGf+TnwTSnl7+b4PKEFBFR8\ndN++W+jt/b90dz+BzbaNsrJHyMq6KyF+GUPEW5w3WrQfN8DQkOr5FRKUpiZ1PDamhKW2VglKba0q\n5eXzTjZ5oz74x/14O72MdwRF5XKwdKjtxMAEaUVpmMvNmMpN07dlamvMVFF/76SX7tHusKCEBKbT\n0UnXaBfdo914J71hcQm3ZjLLGGoZ4p4D91BiLyHLnBXzv+tEzYHkSyn7AaSUV4QQ8y60IYRYC9QB\nx1fetNhiNGZQXv4FSks/zZUrP6Kt7RMYjVmUlz9CXt6f6i7AmsQgJwf27VMlkqEhlV9pblblpZeU\nuIyMqHxKSFBCZd26aYMio8VgNmCtVmvaz0bAG8Db42W8cxzvZbV1nnQy+J+DeLu8eLu8IAiLianM\nxNqytWws26iON5owlZowWJStTq/zOpE52nWUhuYGfjjyQ3qcPUz4JyjJKKHEXkKxvZgSewmlGaXh\ncyUZJRTZiuJ2WpgVbYEIIV5C5S/CpwAJfBn4oZQyJ+Laa1LKWaf7DIav6oH/KaX8xTzPkx/96EdZ\nu3YtAFlZWdTV1YW/tdTX1wMk3PFdd93J4OAv+I//+Dv8fhf33fcVCgo+xKuvHo0L+/SxPl6W41/9\nCi5fZr/FAk1N1L/2GnR0sN/phOpq6nNzYe1a9r/jHVBbS313NxiNq2bfoUOH8I/52VO+B2+Xl1d+\n9woT/RPUiTq8XV6Oth3FN+hjV9YuTGUmGswNpK5J5Y/2/BGmUhMnBk+QuiaVA+85gMFqoL6+Hs+E\nh8odlfQ6e3n5lZcZcA9g3mCme7SblpMtDI4N4ihykGXOwt5nJ8eSw+Zdmym0FeJqc5FjyeGtb3kr\nhbZC2k+1k56azlve8pZ5/Qntd3R0APDMM88kZAirBdgfEcI6JKWsmeU6I/BL4DdSyq8vcM+ED2HN\nh5SSkZF6OjsfY2yskdLST1NU9JekpmbH2jSNZuVwueDcuakWSygs1turuhqHwmE1Naps3KhG7ccA\nGZD4rvrwdntV6fJev9/rxZBuwFRiwlRiIq0kTe0XB/eLTaQVp5G2Jg1hEPgDfgbcA/Q5++hz9XHF\ndYU+Zx/9Y/1ccV0Jlz5XH7eV3MbLH3l5STYnag7kMWBISvnYXEn04HXPAoNSys8u4p4JLyD1i4zz\nOp1n6O7+Z65de4H8/A9QUvJp0tPnTSGtKov1I97RfsQP1/ng8UBbmxKUlpapsNiFC1BcPCUoNTVq\n1ciqqmUbx3IjHDp0iL1b9uLt8eLr8eHt8YaLr9eHt1edn3RMkromVQlKUVq4mIoijgvTSF2TisGs\nwmYT/oklh7sSNQfyGPC8EOLjwGXgAQAhRBGqu+47hRB7gQ8CjUKI06jw15eklP8dK6PjBbu9jpqa\nZ/B6++jt/R5nztyF3b6T0tLPkJ19IOaJOY1mxbFYpsaiRDI5CRcvTonKq6/CU0+pwZFut5posqpK\nbdevnyolJcuSa1kIIQRpa1QLg7q5rwv4Aviu+PD1BUWlT+07Tzrx9nnVZ1d8TAxMYLAZSCtMI/ut\n2VR9c9b5b1fGl0T/xh5JMrRAosXvH+fq1efo7n4CKScpLX2YgoIPYTDMnjDUaG5KHA5ob1dicuGC\n2r9wQZVr16CiQolJZeVUCS0OlpGx8P1jgAxIJoYm8PX5IAC27dcPzpyPhAxhrQQ3s4CECOVJuruf\nYHT0KEVFf0Fh4YNYrav3rUSjSUjcbjWz8cWLqly4oLaXLqlisUwXlHXrpo7Ly5c8Gj9e0AISJBkE\nZDlj1W53O72936a//znM5jLy89/PmjXvw2wuXZb7z0cyxNxB+xFPxNQHKdWcYZGCErnf2wsFBVPC\nUlEBa9dOldJSSE2NvR+zkKg5EM0KY7VuYMOGf6ay8nFGRuq5evU5Ll/eRnr6VvLzP8CaNffr5Xc1\nmsUghBKIggK4/fbrP5+chK4uJSYdHarU16vjy5ehr08l8Csq1CzHL72k9kOlrAzS01fZqRtHt0Bu\nMgIBL0ND/01//3MMDf2GzMx95Od/gLy8ezEa519sSKPRRMnEBHR3KzGJLJ2datvVpUJkZWWqlJdP\n7ZeWqlJcDNblz2nqEFYQLSBLY3LSxbVrL3D16nOMjLxKTs4fk5//fnJy/hiDIfG+DWk0CYuUKonf\n1aVEpbNTCU5Xl9p2d6swmcWieosVF6tSWKhmUy4tVedLS1WLZgloAQmSDAISq/joxMQQAwP/ztWr\nP8HpfJ2srP3k5d1Lbu67SEubd5aZWYm3OG+0aD/ih2TwAW7Aj5DI9PWpWZH7+lTp758SmKoqePbZ\nJd1W50A0N0xqag7FxQ9RXPwQExPDDA39msHBX9De/jnS07eQl3cveXn3YrVWx9pUjebmRAjIy1Nl\n69ZYWwPoFohmAQIBL8PDrzA4+AuuXXsBozEr2DK5l4yM3XpiR40mwdEhrCBaQFYWKQM4nScZHPw5\ng4O/YHJyiNzcd5Obew9ZWW/BaIzPgVYajWZubkRA9NfHOCNyxsx4Q4gUMjJ2U1n5VXbvbqKu7lUs\nlip6er7F0aMlnD59Jx0d/5PR0eMcOjTrki0JRzy/j6WQDH4kgw+QPH6AzoFobgCrtYry8s9TXv55\n/H43DsdrDA29SGvrQ7z55iXy8+8mO/sg2dkHsFjWxtpcjUazzOgQlmZF8Hp7GR5+maGhFxkefgmj\nMYvs7APk5BwMhrv0mBONJh7QOZAgWkDiEykDuFwNDA+/yNDQizidx7HZdpCdfZCcnIPY7TsRYuVn\nQdVoNNejcyBJRLLERyP9ECIFu72O8vK/pa7uZe64o5/y8i8xOTnEuXMf58iRfJqa3kdf3w8YH++M\nndGzkIzvI1FJBh8gefwAnQPRxACDwUpu7t3k5t4NgNfbw9DQSwwPv8jFi4+SmpoXbp1kZt6F0bi0\n6ak1Gs3qoENYmrhChbvOBHMnL+J0nsBuvzUsKDbbDj32RKNZRnQOJIgWkORjctKFw/FqUFB+i883\nQHb228jJUb27zObyWJuo0SQ0OgeSRCRLfHS5/DAabeTm3kNV1RPs3t3CrbeeJifnboaHX+aNN3Zy\n/PhG2to+FRzYOLosz4xEv4/4IRl8gOTxA3QORJNgmM1lFBU9SFHRg8Fw11mGh1+ip+dbtLR8iPT0\n7eTkHCA7+wB2+25SUvSvuEazUugQliZp8Ps9OByvMTz8EkNDLzE+3kFW1l3Bsh+bbbvuLqzRzEDn\nQIJoAdFE4vP1MzJSHyy/x+frIzNzH5mZIUGp0y0UzU2PzoEkEckSH40HP9LSCsjPfx/V1d9l9+5m\ndu8+R0HBRxgf7+DcuY9x5EgeDQ330Nn5NUZHjxMITFx3j3jwYzlIBj+SwQdIHj9A50A0NxFKUN5L\nfv57AfD5BnA4XmVk5Pe0tj7E+HgHGRl3kJV1J5mZ+7Dbd8XYYo0mvtEhLI0myMTENUZGXsXhOIzD\ncYSxsUbS07eQmbmXzMy9ZGTsxWQqjLWZGs2yonMgQbSAaJYTv9+D03kCh+MIDscRRkf/gNGYPU1Q\n0tNr9cBGTUKjcyBJRLLER5PBD4PBwpkzASoqvsi2bb9k795Btm79LzIz78ThOEpT030cOZJLQ8M9\nXL78jwwP1+P3u2Nt9qwkw/tIBh8gefwAnQPRaBaNECmkp9eSnl5LcfFDgOrp5XD8AYfjCBcvhoo3\nbAAADitJREFUPqrDXpqbCh3C0miWkelhr8PBsFduWFAyM/ditdbosJcmbkjIHIgQIhv4CVABdAAP\nSCkdc1ybApwEuqWU757nnlpANHGFlAHc7pZwYt7hOMLk5DAZGXcEx6TsxW7fhcFgjrWpmpuURM2B\nPAq8LKXcCLwCfHGeax8GmlfFqhiTLPFR7YdChb02U1z8CWpqnmXPngvs2tVEYeHH8PmucOHCZzly\nJJdTp/Zy4cLfMjj4Aj7f4PIYH0EyvI9k8AGSxw+IbQ7kXuCu4P4zQD1KVKYhhCgF7gH+Efjsahmn\n0awUJlMR+fn3k59/P6BmHHY6X8fhOExPz7dpafkwJlNxsIWyj4yMvVgs6xEiqi+JGs2KEcsQ1pCU\nMmeu44jzP0WJRybwOR3C0iQ7gcAkY2ON4TyKw3EYKSeDORQV9rLZdpCSkhprUzVJwI2EsFa0BSKE\neAkoiDwFSODLs1x+3X9+IcQ7gH4p5RkhxP7gz2s0SU1KihG7fQd2+w5KSz+FlBKvtzMsKFeu/JDx\n8YvY7beGWyiZmbdjNGbG2nTNTcaKCoiU8sBcnwkh+oUQBVLKfiFEIXB1lsv2Au8WQtwDWAC7EOJZ\nKeVH5rrvxz72MdauXQtAVlYWdXV17N+/H5iKPcbz8ZkzZ/jMZz4TN/ZEexwZ540He6I9jpf3YTZX\ncOzYJeAB9u//DhMTI/z6199nbKyR2trDNDWdoLm5gPT0bRw8+D4yM/dx7Fh7+OeT4X088cQTCff3\nPNtx6Fwsn19fX09HRwc3SixDWI8BQ1LKx4QQjwDZUsrrciAR19/FTRDCqq+vD7/wREb7sboEAj5c\nrjPhkJfDcZiUFGs4j9LQYOLuuz+a0N2HE+VdLES8+ZGo3XhzgOeBMuAyqhvviBCiCHhSSvnOGdff\nFAKi0SwHUko8nvM4HK+FBWVi4lq4+3BW1p3Y7beSkmKKtamaGJOQArISaAHRaObG670SHNx4hJGR\n13C7z2G37wi2Uu4kI+MOUlOzYm2mZpVJ1HEgmlmIjFMmMtqP+KK+vh6TqZD8/PvZsOGfufXWk9xx\nRx8VFf+AECa6uv6JY8fKOHFiG21tn6S//znGx7tibfY0kuldJAt6LiyN5ibFaLSTk/N2cnLeDkAg\nMIHLdRqH4zADAz+lvf3haXmUrKw79TQsmmnoEJZGo5kVlUdpY2QklEd5jcnJETIz7yAzM7To1k6d\nR0lwdA4kiBYQjWZl8Xp7IwY4vobb3YbdvjNi1PxuUlNzY22mZgnoHEgSkSzxUe1HfLFcfphMxeTn\nv5eqqq9z662nuOOOXioq/g4hDHR1fY1jx9Zx7Nh6mpreR2fn4wwPH2JycnRZnq3fRfyhcyAajSZq\njMYMcnIOkpNzEAjNPtyK03kSp/Mkg4P/ict1FpOpDLt9J3b7LdhsO7DZdpCamh1j6zU3ig5haTSa\nFSUQmMTtbsbpPInLdRqn8zRjY2cxGnOnCYrdfgtpaUV60shVRudAgmgB0WgSAyn9eDztOJ2ncblO\n43Kdwuk8jRCGCFG5Bbt9B2ZzpRaVFUQLSJBkEJB4m+YgWrQf8UUi+KEmjewOtlJOhYVlctKJzVZH\nc/Ma3va2e7Hbd2CxbCQlJTEj8PH2LuJ2Nl6NRqNZLEIIzOYyzOYy8vKmZizy+QZxuU7T1vZTrl37\nLy5f/h94vT2kp28mPX0bNtt2bLbtpKdv0yPpVxndAtFoNAnH5OQoY2ONuFwNuFxnGRtrYGysEaMx\nB5ttW1hY0tO3YbFsSNjWymqgQ1hBtIBoNDcvUgbweC4GhUWJisvVgM/Xh9VajdVaS3p6bXhrNq/X\nwoIWkDDJICDxFh+NFu1HfJEMfkTrw+SkC7f7HG53M2NjzeGtz9eD2VyJ1VpDenoNVmstVmsNVutG\nDAbL8jsQJN7ehc6BaDQazRwYjTYyMm4lI+PWaef9fg8eTxtjYy243S0MDv4HY2MteDztmEwlQWEJ\niYoSGb3q43R0C0Sj0WgiCAQmGB+/GBSWZtzulmDLpRWjMTNCVGrDLZe0tDWxNjtqdAgriBYQjUaz\nUkgZYHy8E7e7JUJUlMiAISwska0Wk6k07sewaAEJkgwCEm/x0WjRfsQXyeBHvPogpcTnuzJDVFTx\n+11YrZvC+ZX09BpOnhzl4MEPxE0CX+dANBqNJkYIITCZijCZisjOfuu0zyYmhqe1WHp7X+XixVMc\nPvyXWCwbwi0Vq3UjFks1Vms1RmNGjDxZOroFotFoNKuM3+/G7W6NaK204Xa34vGcx2jMxmrdGC4h\nYTGZKlak1aJDWEG0gGg0mkRGygBeb1ew23FIVNpwu9vw+a5gNldgsWzAYlmPxbIes3kdJlMZFssG\njEZ7VM/UAhIkGQQkXuO8S0X7EV8kgx/J4ANE74ffP874+EU8ngt4PO14PBcYH+/A6+3E42nHYMgg\nN/cdbNr0gyXdV+dANBqNJskxGMykp6tR9DORMoDP14ff71lVm3QLRKPRaG5i9JK2Go1Go1l1tIDE\nGcmyXrL2I75IBj+SwQdIHj9AC4hGo9FookTnQDQajeYmRudANBqNRrPqxExAhBDZQogXhRCtQojf\nCiFmnSdZCJEphPipEKJFCNEkhLhttW1dTZIlPqr9iC+SwY9k8AGSxw+IbQvkUeBlKeVG4BXgi3Nc\n93Xg11LKGmA70LJK9q0qif5Lpe2PLdr+2JLo9kdLLAXkXuCZ4P4zwJ/OvEAIkQHcKaV8GkBKOSml\nHF09E1eP0C9goo60nfkHlGh+zPUPIFH8WOgfWLz7sZh/wPHsw1IEJJ79WCqxFJB8KWU/gJTyCpA/\nyzXrgEEhxNNCiFNCiO8LIVZurUmNRqPRLJoVFRAhxEtCiIaI0hjcvnuWy2frPmUEbgG+LaW8BXCj\nQl9JS7I0hbUf8UUy+JEMPkDy+AEx7MYrhGgB9ksp+4UQhcChYJ4j8poC4KiUsjJ4vA94REr5rjnu\nqfvwajQazRJJxMkUXwA+BjwGfBT4xcwLguLSJYSollK2AW8Dmue6YbSVoNFoNJqlE8sWSA7wPFAG\nXAYekFKOCCGKgCellO8MXrcd+BcgFbgIPCildMTEaI1Go9GESaqR6BqNRqNZPRJuJLoQ4m4hxDkh\nRJsQ4pE5rvmGEOK8EOKMEKJutW2cj4XsF0LcJYQYCfY6OyWE+HIs7JwNIcQPhBD9QoiGea6J57qf\n1/54rnsAIUSpEOKV4IDaRiHEp+e4Lu7ewWJsj+f6F0KYhBDHhRCng/b/wxzXxV3dw+Lsj6r+pZQJ\nU1CC1w5UoEJaZ4BNM675E+BXwf3bgGOxtnuJ9t8FvBBrW+ewfx9QBzTM8Xnc1v0i7Y/bug/aVwjU\nBfdtQGui/P4v0vZ4r39rcGsAjgG7E6Hul2D/kus/0Vogu4HzUsrLUsoJ4P+hBiRGci/wLICU8jiQ\nGezNFQ8sxn6AuOwMIKU8DAzPc0k81/1i7Ic4rXtQ46WklGeC+y7UrAwlMy6Ly3ewSNshvuvfHdw1\noTogzYz/x2Xdh1iE/bDE+k80ASkBuiKOu7n+l3DmNT2zXBMrFmM/wO3BJvCvhBDXr18Zv8Rz3S+W\nhKh7IcRaVGvq+IyP4v4dzGM7xHH9CyFShBCngSvAS1LKEzMuieu6X4T9sMT612uixx9vAOVSSrcQ\n4k+AnwPVMbbpZiEh6l4IYQN+Bjwc/DafMCxge1zXv5QyAOwITrH0cyFErZRyzmEF8cYi7F9y/Sda\nC6QHKI84Lg2em3lN2QLXxIoF7ZdSukJNTSnlb4DUYJfnRCCe635BEqHuhRBG1D/gH0kprxs7RRy/\ng4VsT4T6B5BqPr5DwN0zPorbuo9kLvujqf9EE5ATwAYhRIUQIg14P2pAYiQvAB8BEELsAUZkcM6t\nOGBB+yNjpkKI3aiu1kOra+a8COaOk8Zz3YeY0/4EqHuAp4BmKeXX5/g8nt/BvLbHc/0LIfJEcMkJ\noebjOwCcm3FZ3Nb9YuyPpv4TKoQlpfQLIT4FvIgSvx9IKVuEEJ9QH8vvSyl/LYS4RwjRDowBD8bS\n5kgWYz9wvxDir4EJwAO8L3YWT0cI8WNgP5ArhOgE/gFIIwHqHha2nziuewAhxF7gg0BjMJYtgS+h\nevXF9TtYjO3Ed/0XAc8IIVJQf7s/CdZ1QvzvYRH2E0X964GEGo1Go4mKRAthaTQajSZO0AKi0Wg0\nmqjQAqLRaDSaqNACotFoNJqo0AKi0Wg0mqjQAqLRaDSaqNACotHMghDCOcu5O4UQbwghJoQQ75nn\nZwNCiMcjjj8nhPj7lbJVo4kVWkA0mtmZbYDUZdTyy/+2wM96gfdEOw2HEMIQzc9pNKtNQo1E12hi\niZSyE0AIsdDo20ng+8BngWmL8gghKlBTeuQCA6glmruFEE8D46hZao8EW0DrgErU/EqfBfag1pzo\nBt4lpfQvk2saTVToFohGs/xI4NvAB4UQ9hmffRN4WkpZB/w4eByiREp5u5Ty88HjStTUK/cC/wr8\nTkq5DSU071hB+zWaRaEFRKNZAYJTlT8DPDzjo9uB54L7PwL2Rnz20xnX/iY4BXcjkCKlfDF4vhFY\nu6wGazRRoAVEo1k5vg78OZAecW6+8NfYjGMvqJnuUBPchQigw8+aOEALiEYzOwst7Tnf5wJASjkM\nPI8SkRB/AD4Q3P8Q8Noy2aPRrDpaQDSa2bEIITqFEF3B7WeEELcKIbqA+4HvCSEa5/jZyFbGP6ES\n5qFznwYeFEKcQU1v/vAsP7PQPTWauEBP567RaDSaqNAtEI1Go9FEhRYQjUaj0USFFhCNRqPRRIUW\nEI1Go9FEhRYQjUaj0USFFhCNRqPRRIUWEI1Go9FEhRYQjUaj0UTF/wffj/WREO+cUwAAAABJRU5E\nrkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "glmnetPlot(fit);" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As before, we can extract the coefficients at certain values of $\\lambda$." + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": { + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.37693638],\n", + " [-0.09547797],\n", + " [-0.13595972],\n", + " [ 0.09814146],\n", + " [-0.11437545],\n", + " [-0.38898545],\n", + " [ 0.242914 ],\n", + " [ 0.03647596],\n", + " [ 0.34739813],\n", + " [ 0.03865115],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ],\n", + " [ 0. ]])" + ] + }, + "execution_count": 54, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "glmnetCoef(fit, s = np.float64([0.05]))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Since the Cox Model is not commonly used for prediction, we do not give an illustrative example on prediction. If needed, users can refer to the help file by typing `help(predict.glmnet)`." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Currently, cross-validation is not implemented for cox case. But this is not difficult to do using the existing `glmnet` calls that work perfectly well for this case. (TBD: `cvglmnet` to be implemented for cox)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## References\n", + "\n", + "

Jerome Friedman, Trevor Hastie and Rob Tibshirani. (2008).
\n", + "Regularization Paths for Generalized Linear Models via Coordinate Descent
\n", + "Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010.

\n", + "

Noah Simon, Jerome Friedman, Trevor Hastie and Rob Tibshirani. (2011).
\n", + "Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent
\n", + "Journal of Statistical Software, Vol. 39(5) 1-13.

\n", + "

Robert Tibshirani, Jacob Bien, Jerome Friedman, Trevor Hastie, Noah Simon, Jonathan Taylor, Ryan J. Tibshirani. (2010).
\n", + "Strong Rules for Discarding Predictors in Lasso-type Problems
\n", + "Journal of the Royal Statistical Society: Series B (Statistical Methodology), 74(2), 245-266.

\n", + "

Noah Simon, Jerome Friedman and Trevor Hastie (2013).
\n", + "A Blockwise Descent Algorithm for Group-penalized Multiresponse and Multinomial Regression
" + ] + } + ], + "metadata": { + "celltoolbar": "Raw Cell Format", + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/_sources/glmnet_vignette.ipynb.txt b/docs/_sources/glmnet_vignette.ipynb.txt index 0319135..6cd9c0a 100644 --- a/docs/_sources/glmnet_vignette.ipynb.txt +++ b/docs/_sources/glmnet_vignette.ipynb.txt @@ -176,12 +176,12 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64)\n", - "y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64)\n", + "x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64)\n", + "y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64)\n", "\n", "# create weights\n", - "t = scipy.ones((50, 1), dtype = scipy.float64)\n", - "wts = scipy.row_stack((t, 2*t))" + "t = np.ones((50, 1), dtype = np.float64)\n", + "wts = np.row_stack((t, 2*t))" ] }, { @@ -394,7 +394,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.5]), exact = False)" + "glmnetCoef(fit, s = np.float64([0.5]), exact = False)" ] }, { @@ -436,7 +436,7 @@ ], "source": [ "fc = glmnetPredict(fit, x[0:5,:], ptype = 'response', \\\n", - " s = scipy.float64([0.05]))\n", + " s = np.float64([0.05]))\n", "print(fc)" ] }, @@ -595,7 +595,7 @@ }, "outputs": [], "source": [ - "foldid = scipy.random.choice(10, size = y.shape[0], replace = True)\n", + "foldid = np.random.choice(10, size = y.shape[0], replace = True)\n", "cv1=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=1)\n", "cv0p5=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0.5)\n", "cv0=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0)" @@ -635,10 +635,10 @@ "f.add_subplot(2,2,3)\n", "cvglmnetPlot(cv0)\n", "f.add_subplot(2,2,4)\n", - "plt.plot( scipy.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", + "plt.plot( np.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", "plt.hold(True)\n", - "plt.plot( scipy.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", - "plt.plot( scipy.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", + "plt.plot( np.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", + "plt.plot( np.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", "plt.xlabel('log(Lambda)')\n", "plt.ylabel(cv1['name'])\n", "plt.xlim(-6, 4)\n", @@ -676,7 +676,7 @@ } ], "source": [ - "cl = scipy.array([[-0.7], [0.5]], dtype = scipy.float64)\n", + "cl = np.array([[-0.7], [0.5]], dtype = np.float64)\n", "tfit=glmnet(x = x.copy(),y= y.copy(), cl = cl)\n", "glmnetPlot(tfit);" ] @@ -724,7 +724,7 @@ } ], "source": [ - "pfac = scipy.ones([1, 20])\n", + "pfac = np.ones([1, 20])\n", "pfac[0, 4] = 0; pfac[0, 9] = 0; pfac[0, 14] = 0\n", "pfit = glmnet(x = x.copy(), y = y.copy(), penalty_factor = pfac)\n", "glmnetPlot(pfit, label = True);" @@ -764,9 +764,9 @@ } ], "source": [ - "scipy.random.seed(101)\n", - "x = scipy.random.rand(100,10)\n", - "y = scipy.random.rand(100,1)\n", + "np.random.seed(101)\n", + "x = np.random.rand(100,10)\n", + "y = np.random.rand(100,1)\n", "fit = glmnet(x = x, y = y)\n", "glmnetPlot(fit);" ] @@ -857,8 +857,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -947,7 +947,7 @@ } ], "source": [ - "f = glmnetPredict(mfit, x[0:5,:], s = scipy.float64([0.1, 0.01]))\n", + "f = glmnetPredict(mfit, x[0:5,:], s = np.float64([0.1, 0.01]))\n", "print(f[:,:,0], '\\n')\n", "print(f[:,:,1])" ] @@ -1126,8 +1126,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1222,7 +1222,7 @@ } ], "source": [ - "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = scipy.array([0.05, 0.01]))" + "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = np.array([0.05, 0.01]))" ] }, { @@ -1500,8 +1500,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1681,8 +1681,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -1799,7 +1799,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([1.0]))" + "glmnetCoef(fit, s = np.float64([1.0]))" ] }, { @@ -1825,7 +1825,7 @@ } ], "source": [ - "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = scipy.float64([0.1, 0.01]))" + "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = np.float64([0.1, 0.01]))" ] }, { @@ -1930,7 +1930,7 @@ } ], "source": [ - "optlam = scipy.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", + "optlam = np.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", "cvglmnetCoef(cvfit, s = optlam)" ] }, @@ -2008,8 +2008,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -2126,7 +2126,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.05]))" + "glmnetCoef(fit, s = np.float64([0.05]))" ] }, { diff --git a/docs/glmnet_vignette.ipynb b/docs/glmnet_vignette.ipynb index 816a581..9325480 100644 --- a/docs/glmnet_vignette.ipynb +++ b/docs/glmnet_vignette.ipynb @@ -129,7 +129,10 @@ "cell_type": "code", "execution_count": 1, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -155,7 +158,10 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -166,7 +172,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -176,12 +183,12 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64)\n", - "y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64)\n", + "x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64)\n", + "y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64)\n", "\n", "# create weights\n", - "t = scipy.ones((50, 1), dtype = scipy.float64)\n", - "wts = scipy.row_stack((t, 2*t))" + "t = np.ones((50, 1), dtype = np.float64)\n", + "wts = np.row_stack((t, 2*t))" ] }, { @@ -195,7 +202,10 @@ "cell_type": "code", "execution_count": 3, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -218,6 +228,9 @@ "execution_count": 4, "metadata": { "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "scrolled": true }, "outputs": [ @@ -272,7 +285,10 @@ "cell_type": "code", "execution_count": 5, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -302,6 +318,9 @@ "execution_count": 6, "metadata": { "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "scrolled": true }, "outputs": [ @@ -337,7 +356,10 @@ "cell_type": "code", "execution_count": 7, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -359,7 +381,10 @@ "cell_type": "code", "execution_count": 8, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -394,7 +419,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.5]), exact = False)" + "glmnetCoef(fit, s = np.float64([0.5]), exact = False)" ] }, { @@ -419,7 +444,10 @@ "cell_type": "code", "execution_count": 9, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -436,7 +464,7 @@ ], "source": [ "fc = glmnetPredict(fit, x[0:5,:], ptype = 'response', \\\n", - " s = scipy.float64([0.05]))\n", + " s = np.float64([0.05]))\n", "print(fc)" ] }, @@ -459,7 +487,10 @@ "cell_type": "code", "execution_count": 10, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -494,7 +525,10 @@ "cell_type": "code", "execution_count": 11, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -516,7 +550,10 @@ "cell_type": "code", "execution_count": 12, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -558,7 +595,10 @@ "cell_type": "code", "execution_count": 13, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -591,11 +631,14 @@ "cell_type": "code", "execution_count": 14, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ - "foldid = scipy.random.choice(10, size = y.shape[0], replace = True)\n", + "foldid = np.random.choice(10, size = y.shape[0], replace = True)\n", "cv1=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=1)\n", "cv0p5=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0.5)\n", "cv0=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0)" @@ -612,7 +655,10 @@ "cell_type": "code", "execution_count": 15, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -635,10 +681,10 @@ "f.add_subplot(2,2,3)\n", "cvglmnetPlot(cv0)\n", "f.add_subplot(2,2,4)\n", - "plt.plot( scipy.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", + "plt.plot( np.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", "plt.hold(True)\n", - "plt.plot( scipy.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", - "plt.plot( scipy.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", + "plt.plot( np.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", + "plt.plot( np.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", "plt.xlabel('log(Lambda)')\n", "plt.ylabel(cv1['name'])\n", "plt.xlim(-6, 4)\n", @@ -661,7 +707,10 @@ "cell_type": "code", "execution_count": 16, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -676,7 +725,7 @@ } ], "source": [ - "cl = scipy.array([[-0.7], [0.5]], dtype = scipy.float64)\n", + "cl = np.array([[-0.7], [0.5]], dtype = np.float64)\n", "tfit=glmnet(x = x.copy(),y= y.copy(), cl = cl)\n", "glmnetPlot(tfit);" ] @@ -711,7 +760,10 @@ "cell_type": "code", "execution_count": 17, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -726,7 +778,7 @@ } ], "source": [ - "pfac = scipy.ones([1, 20])\n", + "pfac = np.ones([1, 20])\n", "pfac[0, 4] = 0; pfac[0, 9] = 0; pfac[0, 14] = 0\n", "pfit = glmnet(x = x.copy(), y = y.copy(), penalty_factor = pfac)\n", "glmnetPlot(pfit, label = True);" @@ -751,7 +803,10 @@ "cell_type": "code", "execution_count": 18, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -766,9 +821,9 @@ } ], "source": [ - "scipy.random.seed(101)\n", - "x = scipy.random.rand(100,10)\n", - "y = scipy.random.rand(100,1)\n", + "np.random.seed(101)\n", + "x = np.random.rand(100,10)\n", + "y = np.random.rand(100,1)\n", "fit = glmnet(x = x, y = y)\n", "glmnetPlot(fit);" ] @@ -784,7 +839,10 @@ "cell_type": "code", "execution_count": 19, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -838,7 +896,10 @@ "cell_type": "code", "execution_count": 20, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -849,7 +910,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -859,8 +921,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -874,7 +936,10 @@ "cell_type": "code", "execution_count": 21, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -894,7 +959,10 @@ "cell_type": "code", "execution_count": 22, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -927,7 +995,10 @@ "cell_type": "code", "execution_count": 23, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -949,7 +1020,7 @@ } ], "source": [ - "f = glmnetPredict(mfit, x[0:5,:], s = scipy.float64([0.1, 0.01]))\n", + "f = glmnetPredict(mfit, x[0:5,:], s = np.float64([0.1, 0.01]))\n", "print(f[:,:,0], '\\n')\n", "print(f[:,:,1])" ] @@ -967,7 +1038,10 @@ "cell_type": "code", "execution_count": 24, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -987,7 +1061,10 @@ "cell_type": "code", "execution_count": 25, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1016,7 +1093,10 @@ "cell_type": "code", "execution_count": 26, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1038,7 +1118,10 @@ "cell_type": "code", "execution_count": 27, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1107,7 +1190,10 @@ "cell_type": "code", "execution_count": 28, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1118,7 +1204,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1128,8 +1215,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1145,7 +1232,10 @@ "cell_type": "code", "execution_count": 29, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1163,7 +1253,10 @@ "cell_type": "code", "execution_count": 30, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1205,7 +1298,10 @@ "cell_type": "code", "execution_count": 31, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1224,7 +1320,7 @@ } ], "source": [ - "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = scipy.array([0.05, 0.01]))" + "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = np.array([0.05, 0.01]))" ] }, { @@ -1249,7 +1345,10 @@ "cell_type": "code", "execution_count": 32, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1271,7 +1370,10 @@ "cell_type": "code", "execution_count": 33, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1293,7 +1395,10 @@ "cell_type": "code", "execution_count": 34, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1315,7 +1420,10 @@ "cell_type": "code", "execution_count": 35, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1344,7 +1452,10 @@ "cell_type": "code", "execution_count": 36, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1403,7 +1514,10 @@ "cell_type": "code", "execution_count": 37, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1481,7 +1595,10 @@ "cell_type": "code", "execution_count": 38, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1492,7 +1609,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1502,8 +1620,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1524,7 +1642,10 @@ "cell_type": "code", "execution_count": 39, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1542,7 +1663,10 @@ "cell_type": "code", "execution_count": 40, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1575,7 +1699,10 @@ "cell_type": "code", "execution_count": 41, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1609,7 +1736,10 @@ "cell_type": "code", "execution_count": 42, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1636,9 +1766,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "Poisson regression is used to model count data under the assumption of Poisson error, or otherwise non-negative data where the mean and variance are proportional. Like the Gaussian and binomial model, the Poisson is a member of the exponential family of distributions. We usually model its positive mean on the log scale: $\\log \\mu(x) = \\beta_0+\\beta' x$.\n", "The log-likelihood for observations $\\{x_i,y_i\\}_1^N$ is given my\n", @@ -1662,7 +1790,10 @@ "cell_type": "code", "execution_count": 43, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1673,7 +1804,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1683,15 +1815,13 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "We apply the function `glmnet` with the `\"poisson\"` option." ] @@ -1700,7 +1830,10 @@ "cell_type": "code", "execution_count": 44, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1709,9 +1842,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "The optional input arguments of `glmnet` for `\"poisson\"` family are similar to those for others.\n", "\n", @@ -1729,7 +1860,10 @@ "cell_type": "code", "execution_count": 45, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1749,9 +1883,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "Like before, we can extract the coefficients and make predictions at certain $\\lambda$'s by using `coef` and `predict` respectively. The optional input arguments are similar to those for other families. In function `predict`, the option `type`, which is the type of prediction required, has its own specialties for Poisson family. That is,\n", "* \"link\" (default) gives the linear predictors like others\n", @@ -1766,7 +1898,10 @@ "cell_type": "code", "execution_count": 46, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1801,14 +1936,17 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([1.0]))" + "glmnetCoef(fit, s = np.float64([1.0]))" ] }, { "cell_type": "code", "execution_count": 47, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1827,14 +1965,12 @@ } ], "source": [ - "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = scipy.float64([0.1, 0.01]))" + "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = np.float64([0.1, 0.01]))" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "We may also use cross-validation to find the optimal $\\lambda$'s and thus make inferences." ] @@ -1843,7 +1979,10 @@ "cell_type": "code", "execution_count": 48, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1868,7 +2007,10 @@ "cell_type": "code", "execution_count": 49, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1897,7 +2039,10 @@ "cell_type": "code", "execution_count": 50, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1932,7 +2077,7 @@ } ], "source": [ - "optlam = scipy.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", + "optlam = np.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", "cvglmnetCoef(cvfit, s = optlam)" ] }, @@ -1945,18 +2090,14 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "## Cox Models" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "The Cox proportional hazards model is commonly used for the study of the relationship beteween predictor variables and survival time. In the usual survival analysis framework, we have data of the form $(y_1, x_1, \\delta_1), \\ldots, (y_n, x_n, \\delta_n)$ where $y_i$, the observed time, is a time of failure if $\\delta_i$ is 1 or right-censoring if $\\delta_i$ is 0. We also let $t_1 < t_2 < \\ldots < t_m$ be the increasing list of unique failure times, and $j(i)$ denote the index of the observation failing at time $t_i$.\n", "\n", @@ -1989,7 +2130,10 @@ "cell_type": "code", "execution_count": 51, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -2000,7 +2144,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -2010,8 +2155,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -2027,7 +2172,10 @@ "cell_type": "code", "execution_count": 52, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2055,7 +2203,10 @@ "cell_type": "code", "execution_count": 53, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2084,7 +2235,10 @@ "cell_type": "code", "execution_count": 54, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2128,7 +2282,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.05]))" + "glmnetCoef(fit, s = np.float64([0.05]))" ] }, { @@ -2182,9 +2336,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.4.5" + "version": "3.6.9" } }, "nbformat": 4, - "nbformat_minor": 0 + "nbformat_minor": 4 } diff --git a/glmnet_python.egg-info/PKG-INFO b/glmnet_python.egg-info/PKG-INFO new file mode 100644 index 0000000..a802c64 --- /dev/null +++ b/glmnet_python.egg-info/PKG-INFO @@ -0,0 +1,100 @@ +Metadata-Version: 1.1 +Name: glmnet-python +Version: 0.2.0 +Summary: Python version of glmnet, from Stanford University +Home-page: https://github.com/bbalasub1/glmnet_python +Author: Trevor Hastie, Balakumar B.J. +Author-email: bbalasub@gmail.com +License: GPL-2 +Description: # Glmnet for python + + [![PyPI version](https://badge.fury.io/py/glmnet-py.svg)](https://badge.fury.io/py/glmnet-py) + [![GPL Licence](https://badges.frapsoft.com/os/gpl/gpl.svg?v=103)](https://opensource.org/licenses/GPL-2.0/) + [![Documentation Status](https://readthedocs.org/projects/glmnet-python/badge/?version=latest)](http://glmnet-python.readthedocs.io/en/latest/?badge=latest) + + ## Install + + Using pip (recommended) + + pip install glmnet_py + + Complied from source + + git clone https://github.com/bbalasub1/glmnet_python.git + cd glmnet_python + python setup.py install + (use python setup.py install --user if you get a permission denied message. This does a local install for the user) + + Requirement: Python 3, Linux + + Currently, the checked-in version of GLMnet.so is compiled for the following config: + + **Linux:** Linux version 2.6.32-573.26.1.el6.x86_64 (gcc version 4.4.7 20120313 (Red Hat 4.4.7-16) (GCC) ) + **OS:** CentOS 6.7 (Final) + **Hardware:** 8-core Intel(R) Core(TM) i7-2630QM + **gfortran:** version 4.4.7 20120313 (Red Hat 4.4.7-17) (GCC) + + + ## Documentation + Read the Docs: [![Documentation Status](https://readthedocs.org/projects/glmnet-python/badge/?version=latest)](http://glmnet-python.readthedocs.io/en/latest/?badge=latest) or click [me](http://glmnet-python.readthedocs.io/en/latest/glmnet_vignette.html) + + + ## Usage + import glmnet_python + from glmnet import glmnet + + For more examples, see [iPython notebook](https://github.com/bbalasub1/glmnet_python/blob/master/test/glmnet_examples.ipynb "iPython Notebook") + + + + ## Introduction + + This is a python version of the popular `glmnet` library (beta release). Glmnet fits the entire lasso or elastic-net regularization path for `linear` regression, `logistic` and `multinomial` regression models, `poisson` regression and the `cox` model. + + The underlying fortran codes are the same as the `R` version, and uses a cyclical path-wise coordinate descent algorithm as described in the papers linked below. + + Currently, `glmnet` library methods for gaussian, multi-variate gaussian, binomial, multinomial, poisson and cox models are implemented for both normal and sparse matrices. + + Additionally, cross-validation is also implemented for gaussian, multivariate gaussian, binomial, multinomial and poisson models. CV for cox models is yet to be implemented. + + CV can be done in both serial and parallel manner. Parallellization is done using `multiprocessing` and `joblib` libraries. + + During installation, the fortran code is compiled in the local machine using `gfortran`, and is called by the python code. + + *The best starting point to use this library is to start with the Jupyter notebooks in the `test` directory ([iPython notebook](https://github.com/bbalasub1/glmnet_python/blob/master/test/glmnet_examples.ipynb "iPython Notebook")). Detailed explanations of function calls and parameter values along with plenty of examples are provided there to get you started.* + + ## Authors: + + Algorithm was designed by Jerome Friedman, Trevor Hastie and Rob Tibshirani. Fortran code was written by Jerome Friedman. R wrapper (from which the MATLAB wrapper was adapted) was written by Trevor Hastie. + + The original MATLAB wrapper was written by Hui Jiang (14 Jul 2009), and was updated and is maintained by Junyang Qian (30 Aug 2013). + + This python wrapper (which was adapted from the MATLAB and R wrappers) was originally written by B. J. Balakumar (5 Sep 2016). + + List of other contributors along with a summary of their contributions is included in the contributors.dat file. + + B. J. Balakumar, bbalasub@gmail.com (Sep 5, 2016). Department of Statistics, Stanford University, Stanford, CA + + REFERENCES: + * Friedman, J., Hastie, T. and Tibshirani, R. (2008) Regularization Paths for Generalized Linear Models via Coordinate Descent, + http://www.jstatsoft.org/v33/i01/ + *Journal of Statistical Software, Vol. 33(1), 1-22 Feb 2010* + + * Simon, N., Friedman, J., Hastie, T., Tibshirani, R. (2011) Regularization Paths for Cox's Proportional Hazards Model via Coordinate Descent, + http://www.jstatsoft.org/v39/i05/ + *Journal of Statistical Software, Vol. 39(5) 1-13* + + * Tibshirani, Robert., Bien, J., Friedman, J.,Hastie, T.,Simon, N.,Taylor, J. and Tibshirani, Ryan. (2010) Strong Rules for Discarding Predictors in Lasso-type Problems, + http://www-stat.stanford.edu/~tibs/ftp/strong.pdf + *Stanford Statistics Technical Report* + + +Keywords: glm glmnet ridge lasso elasticnet +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Science/Research +Classifier: Topic :: Scientific/Engineering :: Mathematics +Classifier: License :: OSI Approved :: GNU General Public License v2 (GPLv2) +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Operating System :: Unix diff --git a/glmnet_python.egg-info/SOURCES.txt b/glmnet_python.egg-info/SOURCES.txt new file mode 100644 index 0000000..dad132d --- /dev/null +++ b/glmnet_python.egg-info/SOURCES.txt @@ -0,0 +1,59 @@ +MANIFEST.in +README.md +setup.cfg +setup.py +data/BinomialExample.RData +data/BinomialExampleX.dat +data/BinomialExampleY.dat +data/CVXResults.RData +data/CoxExample.RData +data/CoxExampleX.dat +data/CoxExampleY.dat +data/MultiGaussianExample.RData +data/MultiGaussianExampleX.dat +data/MultiGaussianExampleY.dat +data/MultinomialExample.RData +data/MultinomialExampleX.dat +data/MultinomialExampleY.dat +data/PoissonExample.RData +data/PoissonExampleX.dat +data/PoissonExampleY.dat +data/QuickStartExample.RData +data/QuickStartExampleX.dat +data/QuickStartExampleY.dat +data/SparseExample.RData +data/convertToDat.R +glmnet_python/GLMnet.so +glmnet_python/__init__.py +glmnet_python/coxnet.py +glmnet_python/cvcompute.py +glmnet_python/cvelnet.py +glmnet_python/cvfishnet.py +glmnet_python/cvglmnet.py +glmnet_python/cvglmnetCoef.py +glmnet_python/cvglmnetPlot.py +glmnet_python/cvglmnetPredict.py +glmnet_python/cvlognet.py +glmnet_python/cvmrelnet.py +glmnet_python/cvmultnet.py +glmnet_python/dataprocess.py +glmnet_python/elnet.py +glmnet_python/fishnet.py +glmnet_python/glmnet.py +glmnet_python/glmnetCoef.py +glmnet_python/glmnetControl.py +glmnet_python/glmnetPlot.py +glmnet_python/glmnetPredict.py +glmnet_python/glmnetPrint.py +glmnet_python/glmnetSet.py +glmnet_python/loadGlmLib.py +glmnet_python/lognet.py +glmnet_python/mrelnet.py +glmnet_python/printDict.py +glmnet_python/structtype.py +glmnet_python/wtmean.py +glmnet_python.egg-info/PKG-INFO +glmnet_python.egg-info/SOURCES.txt +glmnet_python.egg-info/dependency_links.txt +glmnet_python.egg-info/requires.txt +glmnet_python.egg-info/top_level.txt \ No newline at end of file diff --git a/glmnet_python.egg-info/dependency_links.txt b/glmnet_python.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/glmnet_python.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/glmnet_python.egg-info/requires.txt b/glmnet_python.egg-info/requires.txt new file mode 100644 index 0000000..ebc1a51 --- /dev/null +++ b/glmnet_python.egg-info/requires.txt @@ -0,0 +1 @@ +joblib>=0.10.3 diff --git a/glmnet_python.egg-info/top_level.txt b/glmnet_python.egg-info/top_level.txt new file mode 100644 index 0000000..aac4a62 --- /dev/null +++ b/glmnet_python.egg-info/top_level.txt @@ -0,0 +1 @@ +glmnet_python diff --git a/glmnet_python/.ipynb_checkpoints/Untitled-checkpoint.ipynb b/glmnet_python/.ipynb_checkpoints/Untitled-checkpoint.ipynb new file mode 100644 index 0000000..7fec515 --- /dev/null +++ b/glmnet_python/.ipynb_checkpoints/Untitled-checkpoint.ipynb @@ -0,0 +1,6 @@ +{ + "cells": [], + "metadata": {}, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/glmnet_python/GLMnet.so b/glmnet_python/GLMnet.so index 28ffdcc..8f1cc76 100755 Binary files a/glmnet_python/GLMnet.so and b/glmnet_python/GLMnet.so differ diff --git a/glmnet_python/Untitled.ipynb b/glmnet_python/Untitled.ipynb new file mode 100644 index 0000000..14a5065 --- /dev/null +++ b/glmnet_python/Untitled.ipynb @@ -0,0 +1,100 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import cvglmnet\n", + "import numpy as np" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "X = np.random.rand(50,3)\n", + "y = np.random.rand(50)" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Option grouped=false enforced in cv.glmnet, since < 3 observations per fold\n" + ] + } + ], + "source": [ + "fit1 = cvglmnet.cvglmnet(x=X,y=y,nfolds=20)" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [], + "source": [ + "from cvglmnetPlot import cvglmnetPlot" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAY8AAAEjCAYAAADKRI1yAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAgAElEQVR4nO3deXxVxfnH8c83GyRhCbKJIOACKoiiAiKiRq0WV1BRtC5gF1pbd1FRKyK29YdapYttpVZwxwUXtCiu1w0XUFkERRHDpqgFIoQ9yfP7457gJWS5N8nJzfK8X6/74t45M3NmkpAnc+acGZkZzjnnXCJSkt0A55xz9Y8HD+eccwnz4OGccy5hHjycc84lzIOHc865hHnwcM45lzAPHq7eklQkaY6kBZLmSrpKUr38mZb0qKR5kq4olT5W0sqgn3Mk/V8I586T1Kam63UNW1qyG+BcNWwys94AktoBjwAtgJuqW7GkVDMrqm49cZ5rV6Cvme1dTpa7zOyOcsqmmVlheK1zrmz18q8050ozs++AkcDFikqVdLukWcFf9L8GkJQi6R+SPpP0sqTpkoYGx/IkjZf0EXCmpOMlvSvpI0lPSGoW5DtE0huSPpQ0Q1KHIP1SSQuD800p3UZJTSVNkjRf0seSjg4OvQR0DEYWR1TWV0mTJf1L0vvAbZL2kvRi0J63JO0b5GsraWrwNZgl6fAgvbWkl4IR272AYuq+UtInwevyIK1r8PWaLOlzSQ9L+omkdyR9IalfFb9trj4zM3/5q16+gIIy0vKB9kQDye+DtCbAbGAPYCgwnegfTrsCa4GhQb484JrgfRvgTSA7+HwtMAZIB2YCbYP0YcB9wfuvgSbB+5wy2nZVTN59gWVAU6Ar8Ek5fRwLrATmBK+fApOB54HUIM+rQLfg/aHAa8H7R4CBwfvOwKfB+78CY4L3JwEW9PcQYD6QDTQDFgAHBe0rBHoFX7cPgfuIBp3BwDPJ/lnwV+2//LKVa6iOBw4oGVUALYFuwEDgCTMrBlZJer1UuceCf/sDPYB3JAFkAO8C+wD7Ay8H6anAN0GZecDDkp4BnimjTQOBvwGY2WeSlgLdgXWV9GWHy1aSzgn6UBSMhgYATwTtgWiwBPgJ0CMmvUWQ/0jg9KAd/5W0NqZ9T5vZhuA8TwFHANOAr8xsfpC+AHjVzEzSfKLBxTUyHjxcgyFpT6AI+I7oX8WXmNmMUnlOrKSaDSVZgZfN7JxS5XsBC8zssDLKnkT0F/MpwA2Sell48xEl7UwB8i2Y+yklBehvZptjE2OCSSK2xLwvjvlcjP8eaZR8zsM1CJLaAv8C/m5mBswALpKUHhzvLikbeAc4I5j7aA/kllPle8DhkvYOymdL6g4sAtpKOixIT5fUM7jLa3cze53oJa6WRC/9xHoLOLekPUQvJS2qTr/NbB3wlaQzg3ol6cDg8EvAJSV5JZUEmDeBnwVpJwCtYto3RFJW8LU6LUhzbif+F4OrzzIlzSE6D1EIPAjcGRy7l+jllI8U/VP7e2AIMBU4FlgILAc+An4oXbGZfS9pBPCopJLLQL83s8+DS2F/ldSS6P+hCcDnwENBmoC/mll+qWr/AfwzuNRTCIwwsy1VHAnEOjeo9/fB12IKMBe4FLhb0rygnW8CvwFuDvq1gOj8zbKgzx9Jmgx8ENR7r5l9LKlrdRvoGh5F/0hzrvGQ1MzMCiS1JvqL8nAzW5XsdjlXn/jIwzVGz0vKIToJfosHDucS5yMP55xzCfMJc+eccwnz4JFkwVPHHyi6NtMCSTcnu001zfvYcEjaXdLrwZP0CyRdluw21bTG0Eeofj/9slWSBXcCZQcTuOnA28BlZvZekptWY7yPDYeiS7F0CO7Mak70afMhZrYwyU2rMY2hj1D9fvrII8ksqiD4mB68GlRE9z42HGb2jZl9FLxfD3wKdExuq2pWY+gjVL+fHjzqAEUX8ZtD9Mnol83s/WS3qaZ5Hxue4PmPg4AG28/G0EeoWj89eNQBZlYULC/RCegnaf9kt6mmeR8blmCNrKnA5cFT7g1OY+gjVL2fHjzqkOCJ5NeBQcluS1i8j/VfMKczFXjYzJ5KdnvC0Bj6CNXrpwePJFN0z4Wc4H0mcBzwWXJbVbO8jw1HcGPAf4gu735nZfnro8bQR6h+P/1uqySTdABwP9GlvVOAx81sXHJbVbO8jw2HpIFEF0ucT3RFXYDrzWx68lpVsxpDH6H6/fTg4ZxzLmF+2co551zCPHg455xLmAcP55xzCfPg4ZxzLmEePOoISSOT3YawNYY+QuPoZ2PoIzSOfla1jx486o4G/0NK4+gjNI5+NoY+QuPopwcP55xztaNRPOeRkpJimZmZyW5GhZo0acKWLVuS3YxQNYY+QuPoZ2PoIzSOflbUx40bN5qZlTnIaBR7mGdmZrJhw4ZkN6NCkUiE3NzcZDcjVI2hj9A4+tkY+giNo58V9VHSpvLK+WUr55xzCfPg4ZxzLmEePJxzziXMg4dzzrmEefBwzjmXMA8ezjnnEubBwznnXMI8eDjnnEuYBw/nnGugxo4Nr24PHs4510DdfHN4dXvwcM65Buqee8Kr24OHc841UCNDXFDeg4dzzjVQUnh1e/BwzjmXMA8ezjnnEubBwznnGqiTTw6vbg8ezjnXQD33XHh1e/CowOTJk5k8eXKlaYnkLa+8c87VtFNOCa9uDx51xOrVq6sVfKobvDz4OdfwPP98eHWHGjwkDZK0SNJiSaPLOH6kpI8kFUoaGpPeW9K7khZImidpWMyxyZK+kjQnePUOsw8uPrUVvJxzdUNaWBVLSgXuBo4DVgCzJE0zs4Ux2ZYBI4BRpYpvBC4wsy8k7QZ8KGmGmeUHx682syfDaruru0qCyYgRIypNLy+vc676QgseQD9gsZktAZA0BRgMbA8eZpYXHCuOLWhmn8e8/1rSd0BbIB/nqskDjWsszMKrO8zg0RFYHvN5BXBoopVI6gdkAF/GJP9R0hjgVWC0mW0po9xIYCRAWloakUgk0VOTnx+NVbFly0pLJG955YuKisjPz69y+eqevzbKFxQUJL39ieSdM2cOAL1773hltLz0EgUFBVX6eatPGkMfof7387nnOnDKKd9UmKeqfQwzeFSbpA7Ag8BwMysZnVwHrCIaUCYC1wLjSpc1s4nBcbKzsy03Nzfh8+fl5QEQW7astETylld+6tSp5OTkVLl8dc9fG+UjkQg5OTlJbX9Y54oduUQiEXJzcxv0aKakjw1dfe/n0UfDn/+8T4V5qtrHMCfMVwK7x3zuFKTFRVIL4L/ADWb2Xkm6mX1jUVuASUQvjzlXb/jNAa4hCDN4zAK6SdpDUgZwNjAtnoJB/qeBB0pPjAejESQJGAJ8UqOtdq6O8IDi6rLQgoeZFQIXAzOAT4HHzWyBpHGSTgWQ1FfSCuBM4B5JC4LiZwFHAiPKuCX3YUnzgflAG+APYfXBubrIg4qL17S4/lyvmlDnPMxsOjC9VNqYmPeziF7OKl3uIeChcuo8poab6Vy915DnV1zVHXJIeHX7E+bONVA+QnEdO4ZXtwcP5xoZDyquJnjwcM55QHEJ8+DhnCuTB5T671e/Cq/uOv2QoHOu7pk8eTLNmzdPdjNcHCZODK9uH3k456rNRyl1k99t5ZxzLmEffRRe3R48nHOh8NFIw+bBwzlXqzyo1J4OHcKr24OHc841UF9/HV7dHjycc0nno5FwjB0bXt0ePJxzdZIHlOq7+ebw6vbg4ZyrVzyo1A0ePJxzziXMg4dzrt7z0UjZZs8Or24PHs65BskDSrg8eDjnGpXGFFT69Amvbg8ezrlGrzEFlJoSavCQNEjSIkmLJY0u4/iRkj6SVChpaEx6b0nvSlogaZ6kYTHH9pD0flDnY5IywuyDc65xmjx5MqtXr052M+qs0IKHpFTgbuAEoAdwjqQepbItA0YAj5RK3whcYGY9gUHABEk5wbHxwF1mtjewFvhFOD1wzrmd1adRyk03hVd3mCOPfsBiM1tiZluBKcDg2Axmlmdm84DiUumfm9kXwfuvge+AtpIEHAM8GWS9HxgSYh+cc65SdTWghPmEeZibQXUElsd8XgEcmmglkvoBGcCXQGsg38wKY+osc4t3SSOBkQBpaWlEIpFET01+fj7ADmXLSkskb3nli4qKyM/Pr3L56p6/NsoXFBQkvf21ca6CggIikUjS+xpm+aysrHrd/njLl+5nIuXnzJkDQO/evUmWoUMP48kn360wT8nPa6Lq9E6CkjoADwLDzaw4OvCIj5lNBCYCZGdnW25ubsLnz8vLAyC2bFlpieQtr/zUqVPJycmpcvnqnr82ykciEXJycpLa/to4VyQSITc3t15/ryorn5qaWq/bH2/50v2sifPXptWrKz9/yc9rosK8bLUS2D3mc6cgLS6SWgD/BW4ws/eC5NVAjqSSoJdQnc45l2x19RJXosIMHrOAbsHdURnA2cC0eAoG+Z8GHjCzkvkNzMyA14GSO7OGA8/WaKudc66WhRVQDj64xqvcLrTgEcxLXAzMAD4FHjezBZLGSToVQFJfSSuAM4F7JC0Iip8FHAmMkDQneJVcOLwWuFLSYqJzIP8Jqw/OOVefffhheHWHOudhZtOB6aXSxsS8n0X00lPpcg8BD5VT5xKid3I555yrwMiRMHFiOHX7E+bOOVcH1cSlrH//u2baUhYPHs455xLmwcM551zCPHg451w9ksjlrJUhPshQYfCQlCrp4fBO75xzLixh3m1VYfAwsyKgi69c65xz9c+pp4ZXdzy36i4B3pE0DdhQkmhmd4bWKuecc3VaPMHjy+CVAjQPtznOOecSVTIHMmLEiFo7Z6XBw8xuBpDULPhcEHajnHPOVd8994RXd6V3W0naX9LHwAJggaQPJfUMr0nOOedqwsiR4dUdz626E4ErzayLmXUBrgJCfG7ROedcTUhgF4uExRM8ss3s9ZIPZhYBskNrkXPOuTovnuCxRNKNkroGr98TvQPLOedcHRX2niHxBI+fA22Bp4CpQJsgzTnnXB124IHLK89URRXebSUpFXjKzI4OrQXOOedCcfnlrwIjQqk7nifMiyW1DOXszjnnQjNhwrGh1R3PQ4IFwHxJL7PjE+aXhtYq55xz1TZ37u6h1R1P8HgqeDnnnHNAHKvqAiPM7P7Sr3gqlzRI0iJJiyWNLuP4kZI+klQoaWipYy9Kypf0fKn0yZK+KmNvc+ecc7UktDmPIPDcDZwA9ADOkdSjVLZlRGdzHimjituB88up/moz6x285iTaNuecawwmTZocWt1hznn0Axab2RIASVOAwcDCmDrygmPFpQub2auScuNon3POuTJEIt0Ja63EMOc8OgKxNxmvAA6tQj1l+aOkMcCrwGgz21I6g6SRwEiAtLQ0IpFIwifJz88H2KFsWWmJ5C2vfFFREfn5+VUuX93z10b5goKCpLe/Ns5VUFBAJBJJel/DLJ+VlVWv2x9v+dL9rG/tv//+AYwYsWP50kp+XhNVbvCQ1MLM1pU1vyGpc8JnqjnXAauADKLrbl0LjCudycwmBsfJzs623NzchE+Ul5cHQGzZstISyVte+alTp5KTk1Pl8tU9f22Uj0Qi5OTkJLX9tXGuSCRCbm5uvf5eVVY+NTW1Xrc/3vKl+1nf2l9W+dJKfl4TVdGcR6TkjaRXSx17Jo66VwKx94l1CtKqxcy+sagtwCSil8ecc87VooqCR+x6jLtUcKw8s4BukvYItrE9G5iWYPt2bpTUIfhXwBDgk+rW6ZxzDdFll70SWt0VBQ8r531Zn3cubFYIXAzMAD4FHjezBZLGSToVQFJfSSuAM4F7JC0oKS/pLeAJ4FhJKyT9NDj0sKT5wHyi62z9obK2OOdcY9Sly+rQ6q5owrydpCuJjjJK3hN8bhtP5WY2HZheKm1MzPtZRC9nlVX2iHLSj4nn3M4519hdeeUwrrginLorCh7/5sc9y2PfA9wbTnOcc87VB+UGj5K9y51zzrnS4tnPwznnXD101FGLQqvbg4dzzjVQI0a8G1rdHjycc66BGjv25NDqrugJ8yvLOwZgZnfWfHPqjmc/XsHEhZmsUhaPjPsvvzvlQICd0gYf1CnuvBWV/35zCrd92rrK5ctKd841bkuXtgmt7orutiq5u2ofoC8/PuB3CvBBaC2qA579eAV3PPgWd0y9lb4rFjKrUw9GrRnNltQ0/vbkH7enXZt/HR/m9eT1txZUmrey8qP3WMuiOy6tcvnY85ekv1KLwS+u8pu2eZBzroGo9G4rSW8CB5vZ+uDzWOC/tdK6JLn7ubmMn3orb+xxCBMO/xkpZrTK/54vW+/OP/ufyT1WTIoZbb5fySPvZnHQt8t48OCTefigE0kpLqbrqq+Y26E7U/c/lqd7HkOqFbPf8s94f/f9mdF9AC91O4xUK+aAL+fwWGomg774kIJ9enL3YWeRVlzEkZ+9y4vdD2Nh+71Y1LYrqcXFnDDvNZ7ueTQrW7Tj231bk2rFnP7Bc/w7pQkXvfs0W9IyeK9zL9KKirjwjUe4g+Fc/9okDli1mM/aduUPa3/JVqXyh+l/45CvP+OT9ntxc/7lfPDlfrwx89NqB794yp/fJ5M7HtuxPEQf56np4OVByTnIydkIZIVSdzyr6rYHtsZ83hqkNViLNxp9Vyzkra4Hk2JGsURm4RY2p2VQkJFFsVIwQZFSKFQq65o2Iz+zeTQdUZSSwoaMTN7r3ItipVCkFIpTUihoksWzPY6KflYKRSmpbElJ4/n9jmTvZsaEgefu0I4/HPPLndp29Uk7P/Hz56OGl9mP3w+6ZKe0S4Zct8Pnh2evhIwWnDfsD6QVF5FRVAjAptQmXH3iFaQXFZJevI30TRv4X/M23DXwPDKKtpFeVEj7b5fz6LtZ9F+5mOf2O4oZ3QeQUbSNfos/5PW9+jBv1+4satuVJoVbGfzxDDhkMBvTo1+Xptu28uvX7udPmVmkbNrEmP/+lf5L57Ng1z25Zs21oYy8EhklefBxDcFddz1OdMukmhdP8HgA+EDS08HnIUBcOwnWV3tnKfpL6M0fuzmzcy+uG3QJTz80aoe0i4beyE2v3MOAZfN3yvvGxJGVpl009Eb++eQtbG0/nCXjr6YwJZWZXXpx43G/5b+TL6MoJZWilBTe79STPx39c6Y8ej2FqWkUKYUPO+7LLcf9mtGv/Yce331FkVIpTEllXoe9uf/gkxn38j0UpkbTNqdmcPWJl/Onl+6mMCWVbSlpbElLZ/xRw7n0nSkUp6SwLSWNwtQ0tqSm89BBJ3DYsrlsTU2nMCWatjSnA2nFhWxOy2B9kyy2pGWwTal81aoji9p0YWtaOltSM9iSlk5xSir/d/SFO3xdr0op5JdDx+yQRhGQ0ZyLTrthe1JaUSHFSuHKk68ic9sWmm7bQrP1a1mTsyuTDzmVxw84nqytm4ORW1OGLniLr1p15LtmrcneupEL3nqMCZzHuJfuof+y+XzepjM35o+Ke5RU7eDTN6NaP3/O1ZRnnumdvP08zOyPkl6g5PoCXGhmH4fTnLrhd6ccyLX51zE+9pfMadFfMjM79/rxl8wZ1zF44D5ca5Xnraz86Iy1FKWkMLvTflx/6ii2pKbxya57bc/7xxN+x5bUNJbntN+e9vef/JzTB3bnHrtgh/Pf2+90hs15kZ98+ePU1MzOvej8wyrOmTtjh7SWhZvpv3z+TsHvrT0O4o7pf9khbXGb3Xl0yg07pF009EZue2HCTuVHD7qEFyddwua0JmxNTefdzvvzbY9LmHb/KDanNWFzWgZzOnTj7gHDGPvyPWxLTWdzegab0pqwISOTif1O46glH7IxvSmb0puyIaMpn7fpwrKcXdmQkcmm9CZsSM9kS2oGDx90Upnfx6tOvmr7+9TiIh54bxlttxYy7tiRtNhcQIstG9jrmy+Z06E773U+gIXt96LlpvWc+d4z/It0xrxyL4cvncuXu3Tixvyr4w4+8/ccy5in58UdfHyU48Ly7LPh7dIdz8gDohfN1pnZJEltJe1hZl+F1qoki/5nPoJRaSmsUhZ7Z6dwbfAX5qjUG7anjSr5hdB1l0rzVlZ++cJZ7DPq6SqXjz3/Cb1259n0VAYsm1drwa+s8nM6dN+edvtxIzk/JYWCjMztaU/0H8Iumel0yf9mp+Azo/thjH/xbzukfd2iLS9OumSHtN8MvZG7nhnP/t99xYb0pmzIyOT93Xvynz5DuPbNB1ifkcmGjCx+aNqMf/YfyuFL51KQkcn6JtmsbNGO9U2yWd8km78NGIZpxzvXR594+fb3TQq38uB7S+m8oYC7DxtGq03r2GXjOg77Yhav79WHNVktmd2xB+02rKVD/nf8/cNC7nmqepfYnKvLKg0ekm4C+hC962oSkA48BBwebtOSa/BBnVg7dxOwiREx476y0hLJW175qUveZ8x+q6tcvnR6PAGtJoNfXOVXfcqoYTuWB7h2Q9WD15CB+zDWrtqh/D39z2TYnBcZsjCy/Ws0s3MvHulzEmfNe6nMS4yvT/w165tksa5pM97s2pt/9D+L6yP3kd+0OfmZzVmd1ZL7+gxmr9UryM9sztct9mJ1VkvWNW0GwMWDR2+v86pWhazPSOXyk0fRdsNa2m7IZ6/li5gyM5Nz5kX4oWlz5u3ajS5rV/HHp8ZzsV3Pv8oIND5ycXVZPCOP04CDgI8AzOxrSc0rLuKSrbaDXzx5I5HFjOyxc754RnnVHXlVNkp6r/P+9F2xkAXt92TCEecxbM6LnLTone0tnNm5F1N7H88vZz29Q/B5s8uBXD/oEu6degtrslryfXYOG/b6Ba02pXH0ktl8n92K77Nb8V2zXdiaksb9fU7l/j6nbi8vK8YQfzr6F3RY9z27rf8fAxe8zRNkcO0bD3LM4lksa7Ur1+WP9pGLS9hNNz1H9OmKmhdP8NhqZibJACRlh9IS12iFEbxKB5RERkmJBJ/RQ65hS2oaa7Ja0HfFAmZ16sHXTZtx7sdPMerth7e3seQS2/jn76Jz/iq+bdaaVc1bM7tjD17Zux+tN+azrFUH3uvci/XBaGbscRcx9riLSC/aRquN63j43TwGrvicWZ16sqp5Gzrnr2L09L8z2i4r8xKZBxQXpniCx+OS7gFyJP0K+DnRJdqdq7PKC0jxBqrqBJ8RzVN4pO9JOwWfIQP34U92CeOn3soReR8zq1MP7jriPC746PkdAs0re/Zh3LEjufnVe1jZoh0rW7Rject2PL/fkXzabk/e2LPPjp0148bjf0uXtd/QJf8bjpkX4Qky+OOMfzJo0Uw+7rhvws/UeKBpGG6++RTGjg2n7gqDR7DV62PAvsA6ovMeY8zs5XCa41zdUJ3gE4lEGHV+r7iCT1mjnOtOvIxhc17k6CUfbq9/ZudevNWtLxOeu52DV37GypbtWN6yPW/scQhP98yl2/+WsTQnOnLZmJEJwBWnXM01J26jc/4q2n+7nGunfEjTwq1c8foDnPLpW3zWrmvCtyo7V6LC4BFcrppuZr0ADxjOxSmR4FPVOZtvm+3C8z2O5LyPp28fuRgwvfsAbj3651w6cwpfterIkl06srj17mwmlc3pWdx0/G+56fjf0mHd97Rd+y15rXZjRcv2NNu6iYO/XsT4qbfyG/v9TpP4sSOXnx2YxvEeVBq1eC5bfSSpb7BlbEIkDQL+AqQC95rZ/5U6fiQwATgAONvMnow59iLQH3jbzE6OSd8DmAK0Bj4Ezjez2CfgnatXqjNnU1agGXv8RQyb8yJnzX9l+zlmdu7FL8+4kWfvv5K8XXbji9ad+aJNZz5r25V1TbK5JrgtOaW4iD3WfM26jCw+3m1ftqRlsP+qLxk/9VauycpCGzdyx9Rb2dJ+OAf/5/4dgoqrewYPngOE86xHPMHjUOBcSUuBDUT3MDczO6CiQpJSgbuB44AVwCxJ08xsYUy2ZUSfnR+1cw3cTvT5kl+XSh8P3GVmUyT9C/gF8M84+uFcvRHGyKVldlO+b9aK4xZ/wHGLow+QljzQef8TY/msbVc+bbcH73Q5kLTiIm6PWfam/fr/8e22VM5Y9AEb0zMhJZUBy+ZvDyrFW/0SV100ZEhyg8dPq1h3P2CxmS0BkDQFGAxsDx5mlhccKy5d2MxelZQbmxbMwRwD/CxIuh8YiwcP14jFO3KB8p+p+aZ5a36y+H1abl7Po70H8Zv3nmDkB0+zsP2efNJ+LyJ79uG77F14av9jmNrrJ1zVrpAxF93H/qsWs2JbCje8/ihnfPIqn7Xbw+dM6pArrjgrqcuTLAWQ1A5omkDdHYHlMZ9XEB3FVEdrIN/MCmPq7FhWRkkjgZEAaWlpRCKRhE+Wn58PsEPZstISyVte+aKiIvLz86tcvrrnr43yBQUFSW9/bZyroKCASCSS1L62BM7a7RsAevfuDT8sBuCkQ1pzZcp1fJvajN1Tt3Jqz+gjW1fqx7QDdsvk6ZRB20cuxRKTBgyldco2bnviT7TYspG8G66iz4rPeH/3XqAU/njsr/jTMb9gzzUr6bJyMY+/3YTrX3+A0z95jXkdunH16mv4dGE7AJ75pAnfpjbjvuuncmLP5vTvmFGnvn+xaVlZWfXy/1pJWn7+zu0vreTnNVHxPGF+KvBnYDfgO6AL8CnQM+Gz1SIzmwhMBMjOzrbc3NyE68jLywMgtmxZaYnkLa/81KlTycnJqXL56p6/NspHIhFycnKS2v7aOFckEiE3N7dOfq9ygV23TQa27HA5rHTasx+vYFST9O0jl2uCkctNW6NP87femM/Zc2cws8uB3PLi39l93XfM69Cdebt2Y26HbmxOb8KY43/LLcf+ip7ffskBSxfwSGYzsrZs5s6nbt3hctrGrJ688nWH6LnWbdg+SqkLX7/U1NQ69f2rifKllfy8Jiqey1a3EJ24fsXMDpJ0NHBeHOVWArvHfO4UpFXHaqLPm6QFo4+aqNM5V0p5cy4lqwH8bNemTPvFraRvK2avtSsZsGw+uV99BMA7nXtxw/G/Y/Qb9zNnt+58tNu+vLZXX7aQzrom6Yw66QoO/OYLDvjmcy54+zH+aj/zhxxD0qXL/4BwdhOMJ3hsM7PVklIkpZjZ65ImxMQZ6XMAABvnSURBVFFuFtAtuDtqJXA2P85VVElw6/DrwFCid1wNB56tTp3OufiVBJXmTdN4acxgnv14xU7zKJefeg3D5rzIoC/eZdAX7wLwZtfejDzt94x660HmdujO3A7deWGfH5fH+/3xv+WAVYvp8d0Sznv7cf5m5zCxgluFPajEZ+zY50nmfh75kpoBbwIPS/qO6F1XFTKzQkkXAzOI3qp7n5ktkDQOmG1m0yT1BZ4GWgGnSLrZzHoCSHqL6MOJzSStAH5hZjOAa4Epkv4AfAz8J9FOO+dqRlkrUJd1t9cNp11Dq+wMeny3hF/Ojv69t7Zpc6YccBz39judPdd8zfu7788zPY/eXveVJ19Fz2+XcPDKTxnx5hTGZ2aRummjr+2VgMmTD0vehDnRO6Q2A1cA5xKdixsXT+VmNh2YXiptTMz7WUQvPZVVtsybx4O7t/rFc37nXPiqerfXp+26cl+/0zhnzgvbH3Jck9mCJ/c/hon9TuewpfOY16Ebr+4d/HcvNPZc/wOv7H0o+Zkt6LNiYaUPNDZ2b7yxT2h1x3O3Vewoo0HvIOicqxmVzZmUN0r5rG0X/n3oGZw958XtAWVt0+Y80vunTDj8XNpsyOeR3oO4r+8QAHbP/4Z1GVnktdqNVpvW02/5ggqfPfEl7WtOPHdbrSe66gFABtH9PDaYWYswG+aca3iq8pDjp+26MuWw02mTmcrl7zxCnxWfsrD9nszu1IMZ3frzdYt2XD8ouklY022b2ff7PFZsS2XkBy9y0qK3+S67FePyr0hoSXvwZVgqE8/IY/veHcFDeoOJ3n3lnHPVVtXLXhvTm3DPoWfwm3cf58xPXmNuh+7M69CNd7ocgDAm9h/KxP5DAcjcupmH3l3Kgd9/zQv7HM7Hu+1Lu4I1/OydJ/k7adz53F0cvWQ2s0ttLVzWMiz1aeRy552PAcNCqTvebWiB6N1OwDPB7oKjK8vvnHNVkchlr2npqRy+bB4nLnqbthvW8GyPo7j9+TvpvWoxn7Xrytct2rK8ZXsePOgkNqVnMG2/o/ghc8f97H59xo002baFDutXk/PD/1ia04F3uvRmr8wW7LFtC7c8czu/j1nbq75M2C9d2jq0uuO5bHV6zMcUolvSbg6tRc45V454Rinp24rZrWA1e69Zwd5rVgDRNbym9TqGMa/+mwHL5rM5LYPvslvx6l59uaffGYyc9TTfNm/N183b8E2LNmzIyOSeQ8/gspZw5QV3klpcRNG2FAau/JKF7fdia2o6e6z9mj89NZ7fJbCNcG37y19+woR4HqyognhGHrF7GBYCeUQvXTnnXNKVDihlPXtS1pL2K1q24x8DhjFszov8/MNp2+sr2df+tX//htfuvJN7nprCi90PY9p+uXzRpjNv73HQ9rzpRdvYlpLGfX2G8Maefdjn+zwuenUyt9ovdnrwEY6oMyOSmhDPnMeFtdEQ55yrCWU9e5LIkvYli0W+v3tP0ooKabJlA7O796VdU/Hnx++g+/+WsWSXTuS12o23uh7Ia3v1ZXlOe97c42C2pqVHGxHs7tjz2y858quPuP75v3JXTvQeo/o0Z1KReC5b/bWi42Z2ac01xznnqq86S9rHbi1csgxLWRP2hSkpvNflAi6cPY1Rbz9MoVLI22U3pu17JA8ddCJ7rl7Be7v3YlqPXGTF2Abj9w+9z/Wv/IczPnmN2bVwiWv48JnAgBqpq7R4Lls1BXoQ3Y4W4Eyiy6q/G0qLnHOuFlW0tXDJMiw/qvg5le+yW/HoQSfws+DBRwMWtN+LyQefzFP7H8P69KZcd8JlTBh4Hkd99SGnffAckyy1zEtcNSE393OSGTwOAAaWLIMebMD0lpn9JpQWOedcHVWV51TWNcni/X0PxSRm/mM4b3c9mDf2PJgXug9gfdNmYMadR5zHsYs/4JRP36zRDbYuvHBEUpcnaQW0ANYEn5sFac451+jF+5zK3c/NJa/Vbpw1/2XOmv8yhUrhwYNO5G8DzmZLagbjcy9kfO6FHLJiISu2pfCvZyZw7Jez6uyEezzB4/+Aj4PVbAUcSXT3Puecc2Uo71LYtfk73gX2jwHDtl/iWtayPc/tdySPHvhTUAq/G3IdRy35kCvefpjxU29lbE6L+hU8zGySpBeI7gJowLVmtir0ljnnXAMSzwrEB339GZP6nMqoyGQ2NMnm8QOO49ThdzFkwet8sdESXpvrwAOXs+O2SjWn3OAhqQvRLV9/MLNVktYBQ4C9Jf3dzLaG0iLnnGug4n3I8eBvFjFg2Xwueu8J7j7sLP7TdwhmcOPD7/OXp25j4NK5cU2uX375q4S1n0dKBcceB7IBJPUGngCWAQcC/wilNc4518gMPqgTI3tsYsx+q3lpzEmMPv0grj3jOmZ27kVm4RaO+upD2hRuokkKrEtryvWDLuGVvQ9lwLL5jJ96K3c/N7fcuidMODa0dld02SrTzL4O3p9HdDOnP0tKAeaE1iLnnGvEyrq8dd0pB3LFY3N45JHr+OMxv+Ci065n0KJ3GPPqRBZvtHLrmjs3nEtWUHHwUMz7Y4DrAMysOLq4rnPOuTCUdXnr7ufmguDZB65kYr/TmTDwZ7zV9SCyKeKehZl8W8tPrVcUPF6T9DjwDdFbc18DkNQB8PkO55yrRb875cDtd2v9atbTtC1Yw3UnXEohaey5aiVTpt3Gypbtam0nxYrmPC4HniK6EOJAM9sWpO8K3BBP5ZIGSVokabGknZZwl3SkpI8kFUoaWurYcElfBK/hMemRoM45watdPG1xzrn6bPBBnRh1/hGMGnYD+4x6mn+fdSWtm6RwwYfP8XmbLpw6fAKFKWk7zINMmjQ5tPaUGzwsaoqZ3WVmKwEknWxmH5vZjMoqlpQK3A2cQHR5k3Mk9SiVbRnRWwEeKVV2F+AmorcH9wNukhT7YOK5ZtY7eH1XaS+dc64BKD25/n2huPG1e3lh0iXsWrCaEWeOZW6H7nwRzINEIt1Da0tFI4+yjEsgbz9gsZktCW7rnUKppdzNLM/M5gHFpcr+FHjZzNaY2VrgZWBQgm11zrkGbe8sMatTD7rmf8NTD47ip1+8x/jcC0krLuZfCzO5//4BHD/uvzz78YoaP3dCOwmy4yR6ZToCy2M+ryA6kqhq2Y4xnydJKgKmAn8IdjjcsaHSSGAkQFpaGpFIJP6WB/Lz8wF2KFtWWiJ5yytfVFREfn5+lctX9/y1Ub6goCDp7a+NcxUUFBCJRJLe1zDLZ2Vl1ev2x1u+dD/rWvuP2Sudq4dcw+3P3EbfFQs576PneXOPg9mQ3pQm+etIbbGRsf+5jqtXX8OnC9vRv2MGpZX8vCYq0eDx64TPUPPONbOVkpoTDR7nAw+UzmRmE4GJANnZ2Zabm5vwifLy8gCILVtWWiJ5yys/depUcnJyqly+uuevjfKRSIScnJyktr82zhWJRMjNza3X36vKyqemptbr9sdbvnQ/61r7c4H9eqxgVJP07bf1tthWzG9fuZ9/9T+TDsPfodWU9dz+zG2MbX0ro889ntJKfl4TFVfwkDQA6AqkSdoXwMx2+oVdykp2fC6+U5AWj5VAbqmykeC8K4N/10t6hOjlscra4pxzDVLp23r3HP08Iz94mkGfv8vVA6+m69pv2Hv18gqfB6mKSuc8JD0I3AEMBPoGrz5x1D0L6CZpD0kZwNnAtErKlJgBHC+pVTBRfjwwQ1KapDZBu9KBk4FP4qzTOecavJJ5kL3WrOTv08aTWbiFWZ16sHdWzT6fF8/Iow/Qo6x5hYqYWaGki4kGglSiT6gvkDQOmG1m0yT1BZ4m+hzJKZJuNrOeZrZG0i1EAxDAuCAtm2gQSQ/qfAX4dyLtcs65hiz2eZDDl83jnc4HcO0Z123fDbGmxBM8PiH6bMc3iVZuZtOB6aXSxsS8n0X0klRZZe8D7iuVtgE4JNF2OOdcYxG7vAm3w9hgG92afuo8nuDRBlgo6QNgS0mimZ1aoy1xzjlXI0rmQd4FXhpzUijniCd4jA3lzM4550J11FGLgH1CqTuezaDeCOXMzjnnQjVixLuEFTziuduqv6RZkgokbZVUFGwM5Zxzrg4bO/bk0OqOZ3mSvwPnAF8AmcAvia5Z5Zxzrg5burRNaHXHtbaVmS0GUs2syMwm4etMOedcoxbPhPnG4CG/OZJuI3rLbqILKjrnnKtlOTkbgaxQ6o4nCJwf5LsY2EB0yZEzQmmNc865GnPXXY+HVnelwcPMlhJdTbeDmd1sZlcGl7Gcc87VYc880zu0uuO52+oUYA7wYvC5t6R416hyzjmXJM8+m8TgQfQhwX5APoCZzQH2CK1Fzjnn6rx4gsc2M/uhVFrNru3rnHOuXokneCyQ9DMgVVI3SX8DZobcLuecc9V0003PhVZ3PMHjEqAn0UURHwXWAZeH1iLnnHN1Xjx3W200sxvMrK+Z9Qneb66NxjnnnKu6m28+JbS6y31IsLI7qnxJdueca7wqesL8MGA50UtV7xN91sM555yr8LLVrsD1wP7AX4DjgP+Z2Ru+TLtzztV9gwfPCa3ucoNHsAjii2Y2HOgPLAYiwb7kcZE0SNIiSYsljS7j+JGSPpJUKGloqWPDJX0RvIbHpB8iaX5Q518l+YjIOefKMGRIEoIHgKQmkk4HHgJ+B/wVeDqeiiWlEl26/QSgB3COpB6lsi0DRgCPlCq7C3ATcCjRBxRvktQqOPxP4FdAt+DlK/w651wZrrjirNDqrmjC/AGil6ymAzeb2ScJ1t0PWGxmS4L6pgCDgYUlGcwsLzhWXKrsT4GXzWxNcPxlYJCkCNDCzN6LaeMQ4IUE2+accw1efn44K+pCxRPm5xFdRfcy4NKYq0MCzMxaVFJ3R6IT7iVWEB1JxKOssh2D14oy0nciaSQwEiAtLY1IJBLnqX+Un58PsEPZstISyVte+aKiIvLz86tcvrrnr43yBQUFSW9/bZyroKCASCSS9L6GWT4rK6tetz/e8qX7Wd/aX1b50kp+XhNVbvAws3q9Z4eZTQQmAmRnZ1tubm7CdeTl5QEQW7astETylld+6tSp5OTkVLl8dc9fG+UjkQg5OTlJbX9tnCsSiZCbm1uvv1eVlU9NTa3X7Y+3fOl+1rf2d+nyv53Kl1by85qoMAPESqJ7f5ToFKRVp+zK4H1V6nTOuUZl7NjnQ6s7zOAxC+gmaY9gJ8KzgXiXcp8BHC+pVTBRfjwww8y+AdZJ6h/cZXUB8GwYjXfOufpu8uTDQqs7tOBhZoVEdx+cAXwKPG5mCySNk3QqgKS+klYAZwL3SFoQlF0D3EI0AM0CxpVMngO/Be4leuvwl/hkuXPOlemNN/YJre549jCvMjObTvRurdi0MTHvZ7HjZajYfPcB95WRPpvoXWDOOeeSJNTgUd+NGDEirrRE8pZXvnXr1pxxxhmV5g3r/Inkdc45Dx4uIckOXh7QnIvfnXc+BgwLpW4PHq5B8FGWcztburR1aHV78HCuHB58XH33l7/8hAkTwqnbg4dzIUkk+DhX33jwcK4O8NGMq288eDhXz3igcfEaPnwmMCCUuj14ONdAeUBxubmf48HDOVcjfOTSeFx44QjC+tZ68HDOlckDiqtIvV523TlX+0aMGEHr1uE9P+DqBx95OOeqzUcpddOBBy5nx90tao4HD+dcKDygJN/ll78KjAilbg8ezrla5UGl9kyYcKxPmDvnGi6/Aywcc+eGc8kKPHg45+oZDyp1gwcP51y95wGl9nnwcM41SL4wJUyaNJmwJsxDfc5D0iBJiyQtljS6jONNJD0WHH9fUtcgPUPSJEnzJc2VlBtTJhLUOSd4tQuzD865hm/EiBE7BZWG8DxLJNI9tLpDG3lISgXuBo4DVgCzJE0zs4Ux2X4BrDWzvSWdDYwnuu3VrwDMrFcQHF6Q1NfMioNy5wZ7mTvnXK2qTyOX++8fwOTJ4dQd5mWrfsBiM1sCIGkKMBiIDR6DgbHB+yeBv0sS0AN4DcDMvpOUD/QBPgixvc45VyWN8W6xMINHR2B5zOcVwKHl5TGzQkk/AK2BucCpkh4l+njkIcG/JcFjkqQiYCrwBzOz0ieXNBIYCZCWlkYkEqmhboWjoKCgzrexuhpDH6Fx9LMx9BHC6Wd+fj7ADvXGm1YT5Uurah/r6oT5fcB+wGxgKTATKAqOnWtmKyU1Jxo8zgceKF2BmU0EJgJkZ2dbbm5uLTS76iKRCHW9jdXVGPoIjaOfjaGPEE4/y6ovLy9vp2NlpSWSNy8vj8sue4Xc3J9U2J6q9jHM4LGSHRdV6RSklZVnhaQ0oCWwOhhJXFGSSdJM4HMAM1sZ/Lte0iNEL4/tFDycc66+COsSV5cuq0OpF8K922oW0E3SHpIygLOBaaXyTAOGB++HAq+ZmUnKkpQNIOk4oNDMFkpKk9QmSE8HTgY+CbEPzjmXFGXdAZaoK68cVjONKUNoI49gDuNiYAaQCtxnZgskjQNmm9k04D/Ag5IWA2uIBhiAdsAMScVERyfnB+lNgvT0oM5XgH+H1QfnnHNlC3XOw8ymA9NLpY2Jeb8ZOLOMcnnAPmWkbyA6ee6ccy6J6uqEuXPOuTIkcinrqKMWUcbf4TXCdxJ0zrkGasSId0Or24OHc841UGPHnhxa3X7Zyjnn6rnyLmUtXdomtHP6yMM551zCPHg451wDlZOzMbS6PXg451wDddddj4dWtwcP55xrgEaMGEFe3ojQ6vfg4ZxzDdTNN4dXtwcP55xzCfPg4ZxzLmEePJxzroGaHeJm3R48nHPOJcyDh3PONVB9+oRXtwcP55xzCfPg4ZxzLmEePJxzroG66abw6vbg4ZxzDdTYseHVHWrwkDRI0iJJiyWNLuN4E0mPBcffl9Q1SM+QNEnSfElzJeXGlDkkSF8s6a+SFGYfnHPO7Sy04CEpFbgbOAHoAZwjqUepbL8A1prZ3sBdwPgg/VcAZtYLOA74s6SStv4zON4teA0Kqw/OOefKFubIox+w2MyWmNlWYAowuFSewcD9wfsngWODkUQP4DUAM/sOyAf6SOoAtDCz98zMgAeAISH2wTnnXBnC3EmwI7A85vMK4NDy8phZoaQfgNbAXOBUSY8CuwOHBP8WB/XE1tmxrJNLGgmMBEhLSyMSiVSzO+EqKCio822srsbQR2gc/WwMfYTG0c+q9rGubkN7H7AfMBtYCswEihKpwMwmAhMBsrOzLTc3t4abWLMikQh1vY3V1Rj6CI2jn42hj9A4+lnVPoYZPFYSHS2U6BSklZVnhaQ0oCWwOrgkdUVJJkkzgc+BtUE9FdXpnHMuZGHOecwCuknaQ1IGcDYwrVSeacDw4P1Q4DUzM0lZkrIBJB0HFJrZQjP7BlgnqX8wN3IB8GyIfXDOOVeG0EYewRzGxcAMIBW4z8wWSBoHzDazacB/gAclLQbWEA0wAO2AGZKKiY4szo+p+rfAZCATeCF4Oeecq0WKXiFq2IIgtCnZ7ahEGlCY7EaErDH0ERpHPxtDH6Fx9LOiPmaaWZlXqBpF8KgPJM02sxDXwEy+xtBHaBz9bAx9hMbRz6r20Zcncc45lzAPHs455xLmwaPumJjsBtSCxtBHaBz9bAx9hMbRzyr10ec8nHPOJcxHHs455xLmwcM551zCPHjUIZJ6S3pP0hxJsyX1S3abalqwf8uc4JUnaU6y2xQWSZdI+kzSAkm3Jbs9NU3SWEkrY76fJya7TWGRdJUkk9Qm2W0Jg6RbJM0Lvo8vSdqt0jI+51F3SHoJuMvMXgj+I15jZrlJblZoJP0Z+MHMxiW7LTVN0tHADcBJZrZFUrtge4EGQ9JYoMDM7kh2W8IkaXfgXmBf4BAz+1+Sm1TjJLUws3XB+0uBHmb2m4rK+MijbjGgRfC+JfB1EtsSqmBtsrOAR5PdlpBcBPyfmW2B7fvSuPrpLuAaov8/G6SSwBHIJo6+evCoWy4Hbpe0HLgDuC7J7QnTEcC3ZvZFshsSku7AEcH2ym9I6pvsBoXk4uByx32SWiW7MTVN0mBgpZnNTXZbwibpj8HvnnOBMZXm98tWtUvSK8CuZRy6ATgWeMPMpko6CxhpZj+p1QbWgIr6aGbPBnn+SXSnyT/XauNqUCXfyz8CrwOXAn2Bx4A9rZ79h6ukj+8B/yP6V+otQAcz+3ktNq9GVNLH64HjzewHSXlAn/p62Sqe/5dBvuuApmZ2U4X11bOf5QYt2EkxJ1iWXkTnA1pUVq6+CfZuWUn0+vGKyvLXR5JeBMab2evB5y+B/mb2fXJbFg5JXYHnzWz/JDelxkjqBbwKbAySOhG9lNzPzFYlrWEhk9QZmF7Z99IvW9UtXwNHBe+PARrqJZ2fAJ811MAReAY4GkBSdyCD6F/pDYakDjEfTwM+SVZbwmBm882snZl1NbOuRLe9PrghBg5J3WI+DgY+q6xMXd2GtrH6FfCX4C/zzQR7sDdAZ9NwJ8pL3AfcJ+kTYCswvL5dsorDbZJ6E71slQf8OrnNcdXwf5L2AYqJbv1d4Z1W4JetnHPOVYFftnLOOZcwDx7OOecS5sHDOedcwjx4OOecS5gHD+eccwnz4OEcIKmgmuWflLRn8D6vpldflRSR1CeOfJWeW9IrDXEpEVe7PHg4V02SegKpZrYk2W2J04PAb5PdCFe/efBwLoaibpf0iaT5koYF6SmS/hHsz/GypOmShgbFzgWeLb9WkNRP0ruSPpY0M3ggC0kjJD0T1Jkn6WJJVwb53pO0S0w15wf7LXxSsteLpNbB/gsLJN0LKOacz0j6MDgW+8DpNOCc6n+1XGPmwcO5HZ0O9AYOJLqMyu3BMhynA12BHsD5wGExZQ4HPqyk3s+AI8zsIKIrlv4p5tj+Qf19iS6ouDHI9y5wQUy+LDPrTXTUcF+QdhPwtpn1BJ4GOsfk/7mZHQL0AS6V1BrAzNYCTUo+O1cVvjyJczsaCDxqZkXAt5LeIPpLfSDwhJkVA6skvR5TpgNQ2YKHLYH7gzWEDEiPOfa6ma0H1geLYz4XpM8HDojJ9yiAmb0pqYWkHOBIooEHM/uvpLUx+S+VdFrwfnegG7A6+PwdsFvMZ+cS4iMP56pvE9C0kjy3EA0S+wOnlMq/JeZ9ccznYnb8A6/0WkLlri0kKZfoyOkwMzsQ+LjUOZsG7XauSjx4OLejt4BhklIltSX6l/0HwDvAGcHcR3sgN6bMp8DeldTbkugy9AAjqti2kvmXgUSX6/8BeBP4WZB+AlByF1VLYK2ZbZS0L9C/pJJguf9diS5m6FyV+GUr53b0NNH5jLlE/7K/xsxWSZpKdLOuhcBy4CPgh6DMf4kGk1di6pknqTh4/zhwG9HLVr8P8lfFZkkfE73kVbLp0s3Ao5IWADOBZUH6i8BvJH0KLCK6cVOJQ4D3zKywiu1wzlfVdS5ekpqZWUEw0fwBcHgQWDKJ7hp4eDBXUqdJ+gswzcxeTXZbXP3lIw/n4vd8MEmdAdxSsimQmW2SdBPQkR//8q/LPvHA4arLRx7OOecS5hPmzjnnEubBwznnXMI8eDjnnEuYBw/nnHMJ8+DhnHMuYf8PxWE6FfiyD6wAAAAASUVORK5CYII=\n", + "text/plain": [ + "

" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "cvglmnetPlot(fit1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/glmnet_python/__pycache__/__init__.cpython-36.pyc b/glmnet_python/__pycache__/__init__.cpython-36.pyc new file mode 100644 index 0000000..3caa67d Binary files /dev/null and b/glmnet_python/__pycache__/__init__.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/coxnet.cpython-36.pyc b/glmnet_python/__pycache__/coxnet.cpython-36.pyc new file mode 100644 index 0000000..772698c Binary files /dev/null and b/glmnet_python/__pycache__/coxnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvcompute.cpython-36.pyc b/glmnet_python/__pycache__/cvcompute.cpython-36.pyc new file mode 100644 index 0000000..17cc44a Binary files /dev/null and b/glmnet_python/__pycache__/cvcompute.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvelnet.cpython-36.pyc b/glmnet_python/__pycache__/cvelnet.cpython-36.pyc new file mode 100644 index 0000000..4868ead Binary files /dev/null and b/glmnet_python/__pycache__/cvelnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvfishnet.cpython-36.pyc b/glmnet_python/__pycache__/cvfishnet.cpython-36.pyc new file mode 100644 index 0000000..41c210f Binary files /dev/null and b/glmnet_python/__pycache__/cvfishnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvglmnet.cpython-36.pyc b/glmnet_python/__pycache__/cvglmnet.cpython-36.pyc new file mode 100644 index 0000000..979deda Binary files /dev/null and b/glmnet_python/__pycache__/cvglmnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvglmnetCoef.cpython-36.pyc b/glmnet_python/__pycache__/cvglmnetCoef.cpython-36.pyc new file mode 100644 index 0000000..707b52e Binary files /dev/null and b/glmnet_python/__pycache__/cvglmnetCoef.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvglmnetPlot.cpython-36.pyc b/glmnet_python/__pycache__/cvglmnetPlot.cpython-36.pyc new file mode 100644 index 0000000..9cc3e57 Binary files /dev/null and b/glmnet_python/__pycache__/cvglmnetPlot.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvglmnetPredict.cpython-36.pyc b/glmnet_python/__pycache__/cvglmnetPredict.cpython-36.pyc new file mode 100644 index 0000000..84a4693 Binary files /dev/null and b/glmnet_python/__pycache__/cvglmnetPredict.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvlognet.cpython-36.pyc b/glmnet_python/__pycache__/cvlognet.cpython-36.pyc new file mode 100644 index 0000000..5b5790b Binary files /dev/null and b/glmnet_python/__pycache__/cvlognet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvmrelnet.cpython-36.pyc b/glmnet_python/__pycache__/cvmrelnet.cpython-36.pyc new file mode 100644 index 0000000..7c8130d Binary files /dev/null and b/glmnet_python/__pycache__/cvmrelnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/cvmultnet.cpython-36.pyc b/glmnet_python/__pycache__/cvmultnet.cpython-36.pyc new file mode 100644 index 0000000..2ea7f86 Binary files /dev/null and b/glmnet_python/__pycache__/cvmultnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/dataprocess.cpython-36.pyc b/glmnet_python/__pycache__/dataprocess.cpython-36.pyc new file mode 100644 index 0000000..a661e35 Binary files /dev/null and b/glmnet_python/__pycache__/dataprocess.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/elnet.cpython-36.pyc b/glmnet_python/__pycache__/elnet.cpython-36.pyc new file mode 100644 index 0000000..d90c589 Binary files /dev/null and b/glmnet_python/__pycache__/elnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/fishnet.cpython-36.pyc b/glmnet_python/__pycache__/fishnet.cpython-36.pyc new file mode 100644 index 0000000..a92ddb0 Binary files /dev/null and b/glmnet_python/__pycache__/fishnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnet.cpython-36.pyc b/glmnet_python/__pycache__/glmnet.cpython-36.pyc new file mode 100644 index 0000000..d40055f Binary files /dev/null and b/glmnet_python/__pycache__/glmnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnetCoef.cpython-36.pyc b/glmnet_python/__pycache__/glmnetCoef.cpython-36.pyc new file mode 100644 index 0000000..dfcceb1 Binary files /dev/null and b/glmnet_python/__pycache__/glmnetCoef.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnetControl.cpython-36.pyc b/glmnet_python/__pycache__/glmnetControl.cpython-36.pyc new file mode 100644 index 0000000..757aff9 Binary files /dev/null and b/glmnet_python/__pycache__/glmnetControl.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnetPlot.cpython-36.pyc b/glmnet_python/__pycache__/glmnetPlot.cpython-36.pyc new file mode 100644 index 0000000..836be63 Binary files /dev/null and b/glmnet_python/__pycache__/glmnetPlot.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnetPredict.cpython-36.pyc b/glmnet_python/__pycache__/glmnetPredict.cpython-36.pyc new file mode 100644 index 0000000..92559a2 Binary files /dev/null and b/glmnet_python/__pycache__/glmnetPredict.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnetPrint.cpython-36.pyc b/glmnet_python/__pycache__/glmnetPrint.cpython-36.pyc new file mode 100644 index 0000000..438c8ac Binary files /dev/null and b/glmnet_python/__pycache__/glmnetPrint.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/glmnetSet.cpython-36.pyc b/glmnet_python/__pycache__/glmnetSet.cpython-36.pyc new file mode 100644 index 0000000..827f17a Binary files /dev/null and b/glmnet_python/__pycache__/glmnetSet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/loadGlmLib.cpython-36.pyc b/glmnet_python/__pycache__/loadGlmLib.cpython-36.pyc new file mode 100644 index 0000000..43e6951 Binary files /dev/null and b/glmnet_python/__pycache__/loadGlmLib.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/lognet.cpython-36.pyc b/glmnet_python/__pycache__/lognet.cpython-36.pyc new file mode 100644 index 0000000..596704d Binary files /dev/null and b/glmnet_python/__pycache__/lognet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/mrelnet.cpython-36.pyc b/glmnet_python/__pycache__/mrelnet.cpython-36.pyc new file mode 100644 index 0000000..74fdeab Binary files /dev/null and b/glmnet_python/__pycache__/mrelnet.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/printDict.cpython-36.pyc b/glmnet_python/__pycache__/printDict.cpython-36.pyc new file mode 100644 index 0000000..b99089f Binary files /dev/null and b/glmnet_python/__pycache__/printDict.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/structtype.cpython-36.pyc b/glmnet_python/__pycache__/structtype.cpython-36.pyc new file mode 100644 index 0000000..99d3ef1 Binary files /dev/null and b/glmnet_python/__pycache__/structtype.cpython-36.pyc differ diff --git a/glmnet_python/__pycache__/wtmean.cpython-36.pyc b/glmnet_python/__pycache__/wtmean.cpython-36.pyc new file mode 100644 index 0000000..cb69059 Binary files /dev/null and b/glmnet_python/__pycache__/wtmean.cpython-36.pyc differ diff --git a/glmnet_python/coxnet.py b/glmnet_python/coxnet.py index 30fb6e1..1a6ee2d 100644 --- a/glmnet_python/coxnet.py +++ b/glmnet_python/coxnet.py @@ -6,7 +6,8 @@ status -- column 1 """ # import packages/methods -import scipy +import numpy as np +import numpy as np import ctypes from loadGlmLib import loadGlmLib @@ -20,7 +21,7 @@ def coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, # pre-process data ty = y[:, 0] tevent = y[:, 1] - if scipy.any(ty <= 0): + if np.any(ty <= 0): raise ValueError('negative event time not permitted for cox family') if len(offset) == 0: offset = ty*0 @@ -35,17 +36,17 @@ def coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, ###################################### # force inputs into fortran order and scipy float64 copyFlag = False - x = x.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - irs = irs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - pcs = pcs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - ty = ty.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - tevent = tevent.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - offset = offset.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - weights = weights.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - jd = jd.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - vp = vp.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - cl = cl.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - ulam = ulam.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + ty = ty.astype(dtype = np.float64, order = 'F', copy = copyFlag) + tevent = tevent.astype(dtype = np.float64, order = 'F', copy = copyFlag) + offset = offset.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) ###################################### # --------- ALLOCATE OUTPUTS --------- @@ -54,24 +55,24 @@ def coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, lmu = -1 lmu_r = ctypes.c_int(lmu) # ca - ca = scipy.zeros([nx, nlam], dtype = scipy.float64) - ca = ca.astype(dtype = scipy.float64, order = 'F', copy = False) + ca = np.zeros([nx, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ia - ia = -1*scipy.ones([nx], dtype = scipy.int32) - ia = ia.astype(dtype = scipy.int32, order = 'F', copy = False) + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # nin - nin = -1*scipy.ones([nlam], dtype = scipy.int32) - nin = nin.astype(dtype = scipy.int32, order = 'F', copy = False) + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # dev - dev = -1*scipy.ones([nlam], dtype = scipy.float64) - dev = dev.astype(dtype = scipy.float64, order = 'F', copy = False) + dev = -1*np.ones([nlam], dtype = np.float64) + dev = dev.astype(dtype = np.float64, order = 'F', copy = False) dev_r = dev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # alm - alm = -1*scipy.ones([nlam], dtype = scipy.float64) - alm = alm.astype(dtype = scipy.float64, order = 'F', copy = False) + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # nlp nlp = -1 @@ -146,21 +147,21 @@ def coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, ninmax = max(nin) # fix first value of alm (from inf to correct value) if ulam[0] == 0.0: - t1 = scipy.log(alm[1]) - t2 = scipy.log(alm[2]) - alm[0] = scipy.exp(2*t1 - t2) + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) # create return fit dictionary if ninmax > 0: ca = ca[0:ninmax, :] - df = scipy.sum(scipy.absolute(ca) > 0, axis=0) + df = np.sum(np.absolute(ca) > 0, axis=0) ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) + beta = np.zeros([nvars, lmu], dtype = np.float64) beta[ja1, :] = ca[oja, :] else: - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) - df = scipy.zeros([1, lmu], dtype = scipy.float64) + beta = np.zeros([nvars, lmu], dtype = np.float64) + df = np.zeros([1, lmu], dtype = np.float64) fit = dict() fit['beta'] = beta @@ -170,7 +171,7 @@ def coxnet(x, is_sparse, irs, pcs, y, weights, offset, parm, fit['lambdau'] = alm fit['npasses'] = nlp_r.value fit['jerr'] = jerr_r.value - fit['dim'] = scipy.array([nvars, lmu], dtype = scipy.integer) + fit['dim'] = np.array([nvars, lmu], dtype = np.integer) fit['offset'] = is_offset fit['class'] = 'coxnet' diff --git a/glmnet_python/cvcompute.py b/glmnet_python/cvcompute.py index 7dcac66..eef8e4b 100644 --- a/glmnet_python/cvcompute.py +++ b/glmnet_python/cvcompute.py @@ -4,24 +4,24 @@ Compute the weighted mean and SD within folds, and hence the SE of the mean """ -import scipy +import numpy as np from wtmean import wtmean def cvcompute(mat, weights, foldid, nlams): if len(weights.shape) > 1: - weights = scipy.reshape(weights, [weights.shape[0], ]) - wisum = scipy.bincount(foldid, weights = weights) - nfolds = scipy.amax(foldid) + 1 - outmat = scipy.ones([nfolds, mat.shape[1]])*scipy.NaN - good = scipy.zeros([nfolds, mat.shape[1]]) - mat[scipy.isinf(mat)] = scipy.NaN + weights = np.reshape(weights, [weights.shape[0], ]) + wisum = np.bincount(foldid, weights = weights) + nfolds = np.amax(foldid) + 1 + outmat = np.ones([nfolds, mat.shape[1]])*np.NaN + good = np.zeros([nfolds, mat.shape[1]]) + mat[np.isinf(mat)] = np.NaN for i in range(nfolds): tf = foldid == i mati = mat[tf, ] wi = weights[tf, ] outmat[i, :] = wtmean(mati, wi) good[i, 0:nlams[i]] = 1 - N = scipy.sum(good, axis = 0) + N = np.sum(good, axis = 0) cvcpt = dict() cvcpt['cvraw'] = outmat cvcpt['weights'] = wisum diff --git a/glmnet_python/cvelnet.py b/glmnet_python/cvelnet.py index e9f6349..f4d6533 100644 --- a/glmnet_python/cvelnet.py +++ b/glmnet_python/cvelnet.py @@ -3,7 +3,7 @@ Internal cvglmnet function. See also cvglmnet. """ -import scipy +import numpy as np from glmnetPredict import glmnetPredict from wtmean import wtmean from cvcompute import cvcompute @@ -31,29 +31,29 @@ def cvelnet(fit, \ if len(offset) > 0: y = y - offset - predmat = scipy.ones([y.size, lambdau.size])*scipy.NAN - nfolds = scipy.amax(foldid) + 1 + predmat = np.ones([y.size, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 nlams = [] for i in range(nfolds): which = foldid == i fitobj = fit[i].copy() fitobj['offset'] = False preds = glmnetPredict(fitobj, x[which, ]) - nlami = scipy.size(fit[i]['lambdau']) + nlami = np.size(fit[i]['lambdau']) predmat[which, 0:nlami] = preds nlams.append(nlami) # convert nlams to scipy array - nlams = scipy.array(nlams, dtype = scipy.integer) + nlams = np.array(nlams, dtype = np.integer) - N = y.shape[0] - scipy.sum(scipy.isnan(predmat), axis = 0) - yy = scipy.tile(y, [1, lambdau.size]) + N = y.shape[0] - np.sum(np.isnan(predmat), axis = 0) + yy = np.tile(y, [1, lambdau.size]) if ptype == 'mse': cvraw = (yy - predmat)**2 elif ptype == 'deviance': cvraw = (yy - predmat)**2 elif ptype == 'mae': - cvraw = scipy.absolute(yy - predmat) + cvraw = np.absolute(yy - predmat) if y.size/nfolds < 3 and grouped == True: print('Option grouped=false enforced in cv.glmnet, since < 3 observations per fold') @@ -67,7 +67,7 @@ def cvelnet(fit, \ cvm = wtmean(cvraw, weights) sqccv = (cvraw - cvm)**2 - cvsd = scipy.sqrt(wtmean(sqccv, weights)/(N-1)) + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) result = dict() result['cvm'] = cvm diff --git a/glmnet_python/cvfishnet.py b/glmnet_python/cvfishnet.py index 6dbd3bd..4bf7ef7 100644 --- a/glmnet_python/cvfishnet.py +++ b/glmnet_python/cvfishnet.py @@ -3,7 +3,7 @@ Internal cvglmnet function. See also cvglmnet. """ -import scipy +import numpy as np from glmnetPredict import glmnetPredict from wtmean import wtmean from cvcompute import cvcompute @@ -34,8 +34,8 @@ def cvfishnet(fit, \ else: is_offset = False - predmat = scipy.ones([y.size, lambdau.size])*scipy.NAN - nfolds = scipy.amax(foldid) + 1 + predmat = np.ones([y.size, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 nlams = [] for i in range(nfolds): which = foldid == i @@ -43,23 +43,23 @@ def cvfishnet(fit, \ if is_offset: off_sub = offset[which] else: - off_sub = scipy.empty([0]) + off_sub = np.empty([0]) preds = glmnetPredict(fitobj, x[which, ], offset = off_sub) - nlami = scipy.size(fit[i]['lambdau']) + nlami = np.size(fit[i]['lambdau']) predmat[which, 0:nlami] = preds nlams.append(nlami) - # convert nlams to scipy array - nlams = scipy.array(nlams, dtype = scipy.integer) + # convert nlams to np array + nlams = np.array(nlams, dtype = np.integer) - N = y.shape[0] - scipy.sum(scipy.isnan(predmat), axis = 0) - yy = scipy.tile(y, [1, lambdau.size]) + N = y.shape[0] - np.sum(np.isnan(predmat), axis = 0) + yy = np.tile(y, [1, lambdau.size]) if ptype == 'mse': cvraw = (yy - predmat)**2 elif ptype == 'deviance': cvraw = devi(yy, predmat) elif ptype == 'mae': - cvraw = scipy.absolute(yy - predmat) + cvraw = np.absolute(yy - predmat) if y.size/nfolds < 3 and grouped == True: print('Option grouped=false enforced in cvglmnet, since < 3 observations per fold') @@ -73,7 +73,7 @@ def cvfishnet(fit, \ cvm = wtmean(cvraw, weights) sqccv = (cvraw - cvm)**2 - cvsd = scipy.sqrt(wtmean(sqccv, weights)/(N-1)) + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) result = dict() result['cvm'] = cvm @@ -88,8 +88,8 @@ def cvfishnet(fit, \ # end of cvfishnet #========================= def devi(yy, eta): - deveta = yy*eta - scipy.exp(eta) - devy = yy*scipy.log(yy) - yy + deveta = yy*eta - np.exp(eta) + devy = yy*np.log(yy) - yy devy[yy == 0] = 0 result = 2*(devy - deveta) return(result) diff --git a/glmnet_python/cvglmnet.py b/glmnet_python/cvglmnet.py index a539d54..6fd4d0f 100644 --- a/glmnet_python/cvglmnet.py +++ b/glmnet_python/cvglmnet.py @@ -20,8 +20,8 @@ ======================= INPUT ARGUMENTS - x nobs x nvar scipy 2D array of x parameters (as in glmnet). - y nobs x nc scipy Response y as in glmnet. + x nobs x nvar np 2D array of x parameters (as in glmnet). + y nobs x nc np Response y as in glmnet. family Response type as family in glmnet. options Options as in glmnet. ptype loss to use for cross-validation. Currently five options, not @@ -126,8 +126,8 @@ class Type of regression - internal usage. EXAMPLES: # Gaussian - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100, 1) + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) cvfit = cvglmnet(x = x, y = y) cvglmnetPlot(cvfit) print( cvglmnetCoef(cvfit) ) @@ -136,27 +136,27 @@ class Type of regression - internal usage. cvglmnetPlot(cvfit1) # Binomial - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100,1) + x = np.random.rand(100, 10) + y = np.random.rand(100,1) y = (y > 0.5)*1.0 fit = cvglmnet(x = x, y = y, family = 'binomial', ptype = 'class') cvglmnetPlot(fit) # poisson - x = scipy.random.rand(100,10) - y = scipy.random.poisson(size = [100, 1])*1.0 + x = np.random.rand(100,10) + y = np.random.poisson(size = [100, 1])*1.0 cvfit = cvglmnet(x = x, y = y, family = 'poisson') cvglmnetPlot(cvfit) # Multivariate Gaussian: - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100,3) + x = np.random.rand(100, 10) + y = np.random.rand(100,3) cvfit = cvglmnet(x = x, y = y, family = 'mgaussian') cvglmnetPlot(cvfit) # Multinomial - x = scipy.random.rand(100,10) - y = scipy.random.rand(100,1) + x = np.random.rand(100,10) + y = np.random.rand(100,1) y[y < 0.3] = 1.0 y[y < 0.6] = 2.0 y[y < 1.0] = 3.0 @@ -199,7 +199,7 @@ class Type of regression - internal usage. import multiprocessing from glmnetSet import glmnetSet from glmnetPredict import glmnetPredict -import scipy +import numpy as np from glmnet import glmnet from cvelnet import cvelnet from cvlognet import cvlognet @@ -212,7 +212,7 @@ def cvglmnet(*, x, family = 'gaussian', ptype = 'default', nfolds = 10, - foldid = scipy.empty([0]), + foldid = np.empty([0]), parallel = 1, keep = False, grouped = True, @@ -227,18 +227,18 @@ def cvglmnet(*, x, # we should not really need this. user must supply the right shape # if y.shape[0] != nobs: - # y = scipy.transpose(y) + # y = np.transpose(y) # convert 1d python array of size nobs to 2d python array of size nobs x 1 if len(y.shape) == 1: - y = scipy.reshape(y, [y.size, 1]) + y = np.reshape(y, [y.size, 1]) # we should not really need this. user must supply the right shape # if (len(options['offset']) > 0) and (options['offset'].shape[0] != nobs): - # options['offset'] = scipy.transpose(options['offset']) + # options['offset'] = np.transpose(options['offset']) if len(options['weights']) == 0: - options['weights'] = scipy.ones([nobs, 1], dtype = scipy.float64) + options['weights'] = np.ones([nobs, 1], dtype = np.float64) # main call to glmnet glmfit = glmnet(x = x, y = y, family = family, **options) @@ -246,34 +246,34 @@ def cvglmnet(*, x, is_offset = glmfit['offset'] options['lambdau'] = glmfit['lambdau'] - nz = glmnetPredict(glmfit, scipy.empty([0]), scipy.empty([0]), 'nonzero') + nz = glmnetPredict(glmfit, np.empty([0]), np.empty([0]), 'nonzero') if glmfit['class'] == 'multnet': - nnz = scipy.zeros([len(options['lambdau']), len(nz)]) + nnz = np.zeros([len(options['lambdau']), len(nz)]) for i in range(len(nz)): - nnz[:, i] = scipy.transpose(scipy.sum(nz[i], axis = 0)) - nz = scipy.ceil(scipy.median(nnz, axis = 1)) + nnz[:, i] = np.transpose(np.sum(nz[i], axis = 0)) + nz = np.ceil(np.median(nnz, axis = 1)) elif glmfit['class'] == 'mrelnet': - nz = scipy.transpose(scipy.sum(nz[0], axis = 0)) + nz = np.transpose(np.sum(nz[0], axis = 0)) else: - nz = scipy.transpose(scipy.sum(nz, axis = 0)) + nz = np.transpose(np.sum(nz, axis = 0)) if len(foldid) == 0: - ma = scipy.tile(scipy.arange(nfolds), [1, int(scipy.floor(nobs/nfolds))]) - mb = scipy.arange(scipy.mod(nobs, nfolds)) - mb = scipy.reshape(mb, [1, mb.size]) - population = scipy.append(ma, mb, axis = 1) - mc = scipy.random.permutation(len(population)) + ma = np.tile(np.arange(nfolds), [1, int(np.floor(nobs/nfolds))]) + mb = np.arange(np.mod(nobs, nfolds)) + mb = np.reshape(mb, [1, mb.size]) + population = np.append(ma, mb, axis = 1) + mc = np.random.permutation(len(population)) mc = mc[0:nobs] foldid = population[mc] - foldid = scipy.reshape(foldid, [foldid.size,]) + foldid = np.reshape(foldid, [foldid.size,]) else: - nfolds = scipy.amax(foldid) + 1 + nfolds = np.amax(foldid) + 1 if nfolds < 3: raise ValueError('nfolds must be bigger than 3; nfolds = 10 recommended') cpredmat = list() - foldid = scipy.reshape(foldid, [foldid.size, ]) + foldid = np.reshape(foldid, [foldid.size, ]) if parallel != 1: if parallel == -1: num_cores = multiprocessing.cpu_count() @@ -318,10 +318,10 @@ def cvglmnet(*, x, CVerr = dict() CVerr['lambdau'] = options['lambdau'] - CVerr['cvm'] = scipy.transpose(cvm) - CVerr['cvsd'] = scipy.transpose(cvsd) - CVerr['cvup'] = scipy.transpose(cvm + cvsd) - CVerr['cvlo'] = scipy.transpose(cvm - cvsd) + CVerr['cvm'] = np.transpose(cvm) + CVerr['cvsd'] = np.transpose(cvsd) + CVerr['cvup'] = np.transpose(cvm + cvsd) + CVerr['cvlo'] = np.transpose(cvm - cvsd) CVerr['nzero'] = nz CVerr['name'] = cvname CVerr['glmnet_fit'] = glmfit @@ -330,10 +330,10 @@ def cvglmnet(*, x, CVerr['foldid'] = foldid if ptype == 'auc': cvm = -cvm - CVerr['lambda_min'] = scipy.amax(options['lambdau'][cvm <= scipy.amin(cvm)]).reshape([1]) + CVerr['lambda_min'] = np.amax(options['lambdau'][cvm <= np.amin(cvm)]).reshape([1]) idmin = options['lambdau'] == CVerr['lambda_min'] semin = cvm[idmin] + cvsd[idmin] - CVerr['lambda_1se'] = scipy.amax(options['lambdau'][cvm <= semin]).reshape([1]) + CVerr['lambda_1se'] = np.amax(options['lambdau'][cvm <= semin]).reshape([1]) CVerr['class'] = 'cvglmnet' return(CVerr) diff --git a/glmnet_python/cvglmnetCoef.py b/glmnet_python/cvglmnetCoef.py index e0f0adf..711ce2f 100644 --- a/glmnet_python/cvglmnetCoef.py +++ b/glmnet_python/cvglmnetCoef.py @@ -69,7 +69,7 @@ """ -import scipy +import numpy as np from glmnetCoef import glmnetCoef def cvglmnetCoef(obj, s = None): @@ -77,7 +77,7 @@ def cvglmnetCoef(obj, s = None): if s is None or len(s) == 0: s = obj['lambda_1se'] - if isinstance(s, scipy.ndarray): + if isinstance(s, np.ndarray): lambdau = s elif isinstance(s, str): sbase = ['lambda_1se', 'lambda_min'] diff --git a/glmnet_python/cvglmnetPlot.py b/glmnet_python/cvglmnetPlot.py index f55a97d..f9a866e 100644 --- a/glmnet_python/cvglmnetPlot.py +++ b/glmnet_python/cvglmnetPlot.py @@ -44,11 +44,11 @@ EXAMPLES: - scipy.random.seed(1) - x=scipy.random.normal(size = (100,20)) - y=scipy.random.normal(size = (100,1)) - g2=scipy.random.choice(2,size = (100,1))*1.0 - g4=scipy.random.choice(4,size = (100,1))*1.0 + np.random.seed(1) + x=np.random.normal(size = (100,20)) + y=np.random.normal(size = (100,1)) + g2=np.random.choice(2,size = (100,1))*1.0 + g4=np.random.choice(4,size = (100,1))*1.0 plt.figure() fit1=cvglmnet(x = x.copy(),y = y.copy()) @@ -64,43 +64,43 @@ """ -import scipy +import numpy as np def cvglmnetPlot(cvobject, sign_lambda = 1.0, **options): import matplotlib.pyplot as plt - sloglam = sign_lambda*scipy.log(cvobject['lambdau']) + sloglam = sign_lambda*np.log(cvobject['lambdau']) fig = plt.gcf() ax1 = plt.gca() #fig, ax1 = plt.subplots() plt.errorbar(sloglam, cvobject['cvm'], cvobject['cvsd'], \ - ecolor = (0.5, 0.5, 0.5), \ + ecolor = (0.5, 0.5, 0.5),zorder=10, \ **options ) #plt.hold(True) plt.plot(sloglam, cvobject['cvm'], linestyle = 'dashed',\ - marker = 'o', markerfacecolor = 'r') + marker = 'o',zorder=5, markerfacecolor = 'r') xlim1 = ax1.get_xlim() ylim1 = ax1.get_ylim() - xval = sign_lambda*scipy.log(scipy.array([cvobject['lambda_min'], cvobject['lambda_min']])) - plt.plot(xval, ylim1, color = 'b', linestyle = 'dashed', \ - linewidth = 1) + xval = sign_lambda*np.log(np.array([cvobject['lambda_min'], cvobject['lambda_min']])) + plt.plot(xval, ylim1, color = 'k', linestyle = 'dashed', \ + linewidth = 1,zorder=2) if cvobject['lambda_min'] != cvobject['lambda_1se']: - xval = sign_lambda*scipy.log([cvobject['lambda_1se'], cvobject['lambda_1se']]) - plt.plot(xval, ylim1, color = 'b', linestyle = 'dashed', \ - linewidth = 1) + xval = sign_lambda*np.log([cvobject['lambda_1se'], cvobject['lambda_1se']]) + plt.plot(xval, ylim1, color = 'k', linestyle = 'dashed', \ + linewidth = 1,zorder=2) ax2 = ax1.twiny() ax2.xaxis.tick_top() atdf = ax1.get_xticks() - indat = scipy.ones(atdf.shape, dtype = scipy.integer) + indat = np.ones(atdf.shape, dtype = np.integer) if sloglam[-1] >= sloglam[1]: for j in range(len(sloglam)-1, -1, -1): indat[atdf <= sloglam[j]] = j diff --git a/glmnet_python/cvglmnetPredict.py b/glmnet_python/cvglmnetPredict.py index 7d325c5..04cdfaa 100644 --- a/glmnet_python/cvglmnetPredict.py +++ b/glmnet_python/cvglmnetPredict.py @@ -67,23 +67,23 @@ cvglmnet and glmnetPredict. EXAMPLES: - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100, 1) + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) cvfit = cvglmnet(x = x, y = y) cvglmnetPredict(cvfit, x[0:5, :], 'lambda_min') - cvglmnetPredict(cvfit, x[0:5, :], scipy.array([0.0866, 0.2323])) + cvglmnetPredict(cvfit, x[0:5, :], np.array([0.0866, 0.2323])) """ from cvglmnetCoef import cvglmnetCoef from glmnetPredict import glmnetPredict -import scipy +import numpy as np def cvglmnetPredict(obj, newx = None, s = 'lambda_1se', **options): if newx is None: CVpred = cvglmnetCoef(obj) return(CVpred) - if type(s) == scipy.ndarray and s.dtype == 'float64': + if type(s) == np.ndarray and s.dtype == 'float64': lambdau = s elif s in ['lambda_1se', 'lambda_min']: lambdau = obj[s] @@ -92,5 +92,5 @@ def cvglmnetPredict(obj, newx = None, s = 'lambda_1se', **options): CVpred = glmnetPredict(obj['glmnet_fit'], newx, lambdau, **options) - return(CVpred) + return (CVpred) \ No newline at end of file diff --git a/glmnet_python/cvlognet.py b/glmnet_python/cvlognet.py index adae68c..1b90cdb 100644 --- a/glmnet_python/cvlognet.py +++ b/glmnet_python/cvlognet.py @@ -3,7 +3,7 @@ Internal function called by cvglmnet. See also cvglmnet """ -import scipy +import numpy as np from glmnetPredict import glmnetPredict from wtmean import wtmean from cvcompute import cvcompute @@ -33,15 +33,15 @@ def cvlognet(fit, \ prob_max = 1 - prob_min nc = y.shape[1] if nc == 1: - classes, sy = scipy.unique(y, return_inverse = True) + classes, sy = np.unique(y, return_inverse = True) nc = len(classes) - indexes = scipy.eye(nc, nc) + indexes = np.eye(nc, nc) y = indexes[sy, :] else: - classes = scipy.arange(nc) + 1 # 1:nc + classes = np.arange(nc) + 1 # 1:nc N = y.size - nfolds = scipy.amax(foldid) + 1 + nfolds = np.amax(foldid) + 1 if (N/nfolds < 10) and (type == 'auc'): print('Warning: Too few (<10) observations per fold for type.measure=auc in cvlognet') print('Warning: changed to type.measure = deviance. Alternately, use smaller value ') @@ -53,8 +53,8 @@ def cvlognet(fit, \ grouped = False is_offset = not(len(offset) == 0) - predmat = scipy.ones([y.shape[0], lambdau.size])*scipy.NAN - nfolds = scipy.amax(foldid) + 1 + predmat = np.ones([y.shape[0], lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 nlams = [] for i in range(nfolds): which = foldid == i @@ -62,46 +62,46 @@ def cvlognet(fit, \ if is_offset: off_sub = offset[which, ] else: - off_sub = scipy.empty([0]) - preds = glmnetPredict(fitobj, x[which, ], scipy.empty([0]), 'response', False, off_sub) - nlami = scipy.size(fit[i]['lambdau']) + off_sub = np.empty([0]) + preds = glmnetPredict(fitobj, x[which, ], np.empty([0]), 'response', False, off_sub) + nlami = np.size(fit[i]['lambdau']) predmat[which, 0:nlami] = preds nlams.append(nlami) - # convert nlams to scipy array - nlams = scipy.array(nlams, dtype = scipy.integer) + # convert nlams to np array + nlams = np.array(nlams, dtype = np.integer) if ptype == 'auc': - cvraw = scipy.zeros([nfolds, lambdau.size])*scipy.NaN - good = scipy.zeros([nfolds, lambdau.size]) + cvraw = np.zeros([nfolds, lambdau.size])*np.NaN + good = np.zeros([nfolds, lambdau.size]) for i in range(nfolds): good[i, 0:nlams[i]] = 1 which = foldid == i for j in range(nlams[i]): cvraw[i,j] = auc_mat(y[which,], predmat[which,j], weights[which]) - N = scipy.sum(good, axis = 0) - sweights = scipy.zeros([nfolds, 1]) + N = np.sum(good, axis = 0) + sweights = np.zeros([nfolds, 1]) for i in range(nfolds): - sweights[i]= scipy.sum(weights[foldid == i], axis = 0) + sweights[i]= np.sum(weights[foldid == i], axis = 0) weights = sweights else: - ywt = scipy.sum(y, axis = 1, keepdims = True) - y = y/scipy.tile(ywt, [1, y.shape[1]]) + ywt = np.sum(y, axis = 1, keepdims = True) + y = y/np.tile(ywt, [1, y.shape[1]]) weights = weights*ywt - N = y.shape[0] - scipy.sum(scipy.isnan(predmat), axis = 0, keepdims = True) - yy1 = scipy.tile(y[:,0:1], [1, lambdau.size]) - yy2 = scipy.tile(y[:,1:2], [1, lambdau.size]) + N = y.shape[0] - np.sum(np.isnan(predmat), axis = 0, keepdims = True) + yy1 = np.tile(y[:,0:1], [1, lambdau.size]) + yy2 = np.tile(y[:,1:2], [1, lambdau.size]) if ptype == 'mse': cvraw = (yy1 - (1 - predmat))**2 + (yy2 - (1 - predmat))**2 elif ptype == 'deviance': - predmat = scipy.minimum(scipy.maximum(predmat, prob_min), prob_max) - lp = yy1*scipy.log(1-predmat) + yy2*scipy.log(predmat) - ly = scipy.log(y) + predmat = np.minimum(np.maximum(predmat, prob_min), prob_max) + lp = yy1*np.log(1-predmat) + yy2*np.log(predmat) + ly = np.log(y) ly[y == 0] = 0 - ly = scipy.dot(y*ly, scipy.array([1.0, 1.0]).reshape([2,1])) - cvraw = 2*(scipy.tile(ly, [1, lambdau.size]) - lp) + ly = np.dot(y*ly, np.array([1.0, 1.0]).reshape([2,1])) + cvraw = 2*(np.tile(ly, [1, lambdau.size]) - lp) elif ptype == 'mae': - cvraw = scipy.absolute(yy1 - (1 - predmat)) + scipy.absolute(yy2 - (1 - predmat)) + cvraw = np.absolute(yy1 - (1 - predmat)) + np.absolute(yy2 - (1 - predmat)) elif ptype == 'class': cvraw = yy1*(predmat > 0.5) + yy2*(predmat <= 0.5) @@ -117,7 +117,7 @@ def cvlognet(fit, \ cvm = wtmean(cvraw, weights) sqccv = (cvraw - cvm)**2 - cvsd = scipy.sqrt(wtmean(sqccv, weights)/(N-1)) + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) result = dict() result['cvm'] = cvm @@ -137,35 +137,35 @@ def cvlognet(fit, \ #========================= def auc_mat(y, prob, weights = None): if weights == None or len(weights) == 0: - weights = scipy.ones([y.shape[0], 1]) + weights = np.ones([y.shape[0], 1]) wweights = weights*y wweights = wweights.flatten() - wweights = scipy.reshape(wweights, [1, wweights.size]) + wweights = np.reshape(wweights, [1, wweights.size]) ny= y.shape[0] - a = scipy.zeros([ny, 1]) - b = scipy.ones([ny, 1]) - yy = scipy.vstack((a, b)) - pprob = scipy.vstack((prob,prob)) + a = np.zeros([ny, 1]) + b = np.ones([ny, 1]) + yy = np.vstack((a, b)) + pprob = np.vstack((prob,prob)) result = auc(yy, pprob, wweights) return(result) #========================= def auc(y, prob, w): if len(w) == 0: - mindiff = scipy.amin(scipy.diff(scipy.unique(prob))) - pert = scipy.random.uniform(0, mindiff/3, prob.size) - t, rprob = scipy.unique(prob + pert, return_inverse = True) - n1 = scipy.sum(y, keepdims = True) + mindiff = np.amin(np.diff(np.unique(prob))) + pert = np.random.uniform(0, mindiff/3, prob.size) + t, rprob = np.unique(prob + pert, return_inverse = True) + n1 = np.sum(y, keepdims = True) n0 = y.shape[0] - n1 - u = scipy.sum(rprob[y == 1]) - n1*(n1 + 1)/2 + u = np.sum(rprob[y == 1]) - n1*(n1 + 1)/2 result = u/(n1*n0) else: - op = scipy.argsort(prob) + op = np.argsort(prob) y = y[op] w = w[op] - cw = scipy.cumsum(w) + cw = np.cumsum(w) w1 = w[y == 1] - cw1 = scipy.cumsum(w1) - wauc = scipy.sum(w1*(cw[y == 1] - cw1)) + cw1 = np.cumsum(w1) + wauc = np.sum(w1*(cw[y == 1] - cw1)) sumw = cw1[-1] sumw = sumw*(c1[-1] - sumw) result = wauc/sumw diff --git a/glmnet_python/cvmrelnet.py b/glmnet_python/cvmrelnet.py index 40e8540..c3fe8f8 100644 --- a/glmnet_python/cvmrelnet.py +++ b/glmnet_python/cvmrelnet.py @@ -3,7 +3,7 @@ Internal function called by cvglmnet. See also cvglmnet """ -import scipy +import numpy as np from glmnetPredict import glmnetPredict from wtmean import wtmean from cvcompute import cvcompute @@ -34,27 +34,27 @@ def cvmrelnet(fit, \ if len(offset) > 0: y = y - offset - predmat = scipy.ones([nobs, nc, lambdau.size])*scipy.NAN - nfolds = scipy.amax(foldid) + 1 + predmat = np.ones([nobs, nc, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 nlams = [] for i in range(nfolds): which = foldid == i fitobj = fit[i].copy() fitobj['offset'] = False preds = glmnetPredict(fitobj, x[which, ]) - nlami = scipy.size(fit[i]['lambdau']) + nlami = np.size(fit[i]['lambdau']) predmat[which, 0:nlami] = preds nlams.append(nlami) # convert nlams to scipy array - nlams = scipy.array(nlams, dtype = scipy.integer) + nlams = np.array(nlams, dtype = np.integer) - N = nobs - scipy.reshape(scipy.sum(scipy.isnan(predmat[:, 1, :]), axis = 0), (1, -1)) - bigY = scipy.tile(y[:, :, None], [1, 1, lambdau.size]) + N = nobs - np.reshape(np.sum(np.isnan(predmat[:, 1, :]), axis = 0), (1, -1)) + bigY = np.tile(y[:, :, None], [1, 1, lambdau.size]) if ptype == 'mse': - cvraw = scipy.sum((bigY - predmat)**2, axis = 1).squeeze() + cvraw = np.sum((bigY - predmat)**2, axis = 1).squeeze() elif ptype == 'mae': - cvraw = scipy.sum(scipy.absolute(bigY - predmat), axis = 1).squeeze() + cvraw = np.sum(np.absolute(bigY - predmat), axis = 1).squeeze() if y.size/nfolds < 3 and grouped == True: print('Option grouped=false enforced in cv.glmnet, since < 3 observations per fold') @@ -68,7 +68,7 @@ def cvmrelnet(fit, \ cvm = wtmean(cvraw, weights) sqccv = (cvraw - cvm)**2 - cvsd = scipy.sqrt(wtmean(sqccv, weights)/(N-1)) + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) result = dict() result['cvm'] = cvm diff --git a/glmnet_python/cvmultnet.py b/glmnet_python/cvmultnet.py index aa3efed..f14a2ba 100644 --- a/glmnet_python/cvmultnet.py +++ b/glmnet_python/cvmultnet.py @@ -3,7 +3,7 @@ Internal function called by cvglmnet. See also cvglmnet """ -import scipy +import numpy as np from glmnetPredict import glmnetPredict from wtmean import wtmean from cvcompute import cvcompute @@ -33,16 +33,16 @@ def cvmultnet(fit, \ prob_max = 1 - prob_min nc = y.shape if nc[1] == 1: - classes, sy = scipy.unique(y, return_inverse = True) + classes, sy = np.unique(y, return_inverse = True) nc = len(classes) - indexes = scipy.eye(nc, nc) + indexes = np.eye(nc, nc) y = indexes[sy, :] else: nc = nc[1] is_offset = not(len(offset) == 0) - predmat = scipy.ones([y.shape[0], nc, lambdau.size])*scipy.NAN - nfolds = scipy.amax(foldid) + 1 + predmat = np.ones([y.shape[0], nc, lambdau.size])*np.NAN + nfolds = np.amax(foldid) + 1 nlams = [] for i in range(nfolds): which = foldid == i @@ -50,39 +50,39 @@ def cvmultnet(fit, \ if is_offset: off_sub = offset[which, ] else: - off_sub = scipy.empty([0]) - preds = glmnetPredict(fitobj, x[which, ], scipy.empty([0]), 'response', False, off_sub) - nlami = scipy.size(fit[i]['lambdau']) + off_sub = np.empty([0]) + preds = glmnetPredict(fitobj, x[which, ], np.empty([0]), 'response', False, off_sub) + nlami = np.size(fit[i]['lambdau']) predmat[which, 0:nlami] = preds nlams.append(nlami) - # convert nlams to scipy array - nlams = scipy.array(nlams, dtype = scipy.integer) + # convert nlams to np array + nlams = np.array(nlams, dtype = np.integer) - ywt = scipy.sum(y, axis = 1, keepdims = True) - y = y/scipy.tile(ywt, [1, y.shape[1]]) + ywt = np.sum(y, axis = 1, keepdims = True) + y = y/np.tile(ywt, [1, y.shape[1]]) weights = weights*ywt - N = y.shape[0] - scipy.sum(scipy.isnan(predmat[:,1,:]), axis = 0, keepdims = True) - bigY = scipy.tile(y[:, :, None], [1, 1, lambdau.size]) + N = y.shape[0] - np.sum(np.isnan(predmat[:,1,:]), axis = 0, keepdims = True) + bigY = np.tile(y[:, :, None], [1, 1, lambdau.size]) if ptype == 'mse': - cvraw = scipy.sum((bigY - predmat)**2, axis = 1).squeeze() + cvraw = np.sum((bigY - predmat)**2, axis = 1).squeeze() elif ptype == 'deviance': - predmat = scipy.minimum(scipy.maximum(predmat, prob_min), prob_max) - lp = bigY*scipy.log(predmat) - ly = bigY*scipy.log(bigY) + predmat = np.minimum(np.maximum(predmat, prob_min), prob_max) + lp = bigY*np.log(predmat) + ly = bigY*np.log(bigY) ly[y == 0] = 0 - cvraw = scipy.sum(2*(ly - lp), axis = 1).squeeze() + cvraw = np.sum(2*(ly - lp), axis = 1).squeeze() elif ptype == 'mae': - cvraw = scipy.sum(scipy.absolute(bigY - predmat), axis = 1).squeeze() + cvraw = np.sum(np.absolute(bigY - predmat), axis = 1).squeeze() elif ptype == 'class': - classid = scipy.zeros([y.shape[0], lambdau.size])*scipy.NaN + classid = np.zeros([y.shape[0], lambdau.size])*np.NaN for i in range(lambdau.size): classid[:, i] = glmnet_softmax(predmat[:,:,i]) classid = classid.reshape([classid.size,1]) yperm = bigY.transpose((0,2,1)) yperm = yperm.reshape([yperm.size, 1]) idx = sub2ind(yperm.shape, range(len(classid)), classid.transpose()) - cvraw = scipy.reshape(1 - yperm[idx], [-1, lambdau.size]); + cvraw = np.reshape(1 - yperm[idx], [-1, lambdau.size]); if grouped == True: cvob = cvcompute(cvraw, weights, foldid, nlams) @@ -92,7 +92,7 @@ def cvmultnet(fit, \ cvm = wtmean(cvraw, weights) sqccv = (cvraw - cvm)**2 - cvsd = scipy.sqrt(wtmean(sqccv, weights)/(N-1)) + cvsd = np.sqrt(wtmean(sqccv, weights)/(N-1)) result = dict() result['cvm'] = cvm @@ -115,16 +115,16 @@ def sub2ind(array_shape, rows, cols): #========================= def glmnet_softmax(x): d = x.shape - nas = scipy.any(scipy.isnan(x), axis = 1) - if scipy.any(nas): - pclass = scipy.zeros([d[0], 1])*scipy.NaN - if scipy.sum(nas) < d[0]: + nas = np.any(np.isnan(x), axis = 1) + if np.any(nas): + pclass = np.zeros([d[0], 1])*np.NaN + if np.sum(nas) < d[0]: pclass2 = glmnet_softmax(x[~nas, :]) pclass[~nas] = pclass2 result = pclass else: maxdist = x[:, 1] - pclass = scipy.ones([d[0], 1]) + pclass = np.ones([d[0], 1]) for i in range(1, d[1], 1): t = x[:, i] > maxdist pclass[t] = i diff --git a/glmnet_python/elnet.py b/glmnet_python/elnet.py index 0f462cd..d0aba5c 100644 --- a/glmnet_python/elnet.py +++ b/glmnet_python/elnet.py @@ -5,7 +5,7 @@ # import packages/methods -import scipy +import numpy as np import ctypes from loadGlmLib import loadGlmLib @@ -17,7 +17,7 @@ def elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, lempty, glmlib = loadGlmLib() # pre-process data - ybar = scipy.dot(scipy.transpose(y), weights) + ybar = np.dot(np.transpose(y), weights) ybar = ybar/sum(weights) nulldev = (y - ybar)**2 * weights # ka @@ -44,15 +44,15 @@ def elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, lempty, ###################################### # force inputs into fortran order and into the correct scipy datatype copyFlag = False - x = x.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - irs = irs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - pcs = pcs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - y = y.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - weights = weights.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - jd = jd.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - vp = vp.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - cl = cl.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - ulam = ulam.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) ###################################### # --------- ALLOCATE OUTPUTS --------- @@ -61,28 +61,28 @@ def elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, lempty, lmu = -1 lmu_r = ctypes.c_int(lmu) # a0 - a0 = scipy.zeros([nlam], dtype = scipy.float64) - a0 = a0.astype(dtype = scipy.float64, order = 'F', copy = False) + a0 = np.zeros([nlam], dtype = np.float64) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ca - ca = scipy.zeros([nx, nlam], dtype = scipy.float64) - ca = ca.astype(dtype = scipy.float64, order = 'F', copy = False) + ca = np.zeros([nx, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ia - ia = -1*scipy.ones([nx], dtype = scipy.int32) - ia = ia.astype(dtype = scipy.int32, order = 'F', copy = False) + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # nin - nin = -1*scipy.ones([nlam], dtype = scipy.int32) - nin = nin.astype(dtype = scipy.int32, order = 'F', copy = False) + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # rsq - rsq = -1*scipy.ones([nlam], dtype = scipy.float64) - rsq = rsq.astype(dtype = scipy.float64, order = 'F', copy = False) + rsq = -1*np.ones([nlam], dtype = np.float64) + rsq = rsq.astype(dtype = np.float64, order = 'F', copy = False) rsq_r = rsq.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # alm - alm = -1*scipy.ones([nlam], dtype = scipy.float64) - alm = alm.astype(dtype = scipy.float64, order = 'F', copy = False) + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # nlp nlp = -1 @@ -186,21 +186,21 @@ def elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, lempty, ninmax = max(nin) # fix first value of alm (from inf to correct value) if lempty: - t1 = scipy.log(alm[1]) - t2 = scipy.log(alm[2]) - alm[0] = scipy.exp(2*t1 - t2) + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) # create return fit dictionary if ninmax > 0: ca = ca[0:ninmax, :] - df = scipy.sum(scipy.absolute(ca) > 0, axis=0) + df = np.sum(np.absolute(ca) > 0, axis=0) ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) + beta = np.zeros([nvars, lmu], dtype = np.float64) beta[ja1, :] = ca[oja, :] else: - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) - df = scipy.zeros([1, lmu], dtype = scipy.float64) + beta = np.zeros([nvars, lmu], dtype = np.float64) + df = np.zeros([1, lmu], dtype = np.float64) fit = dict() fit['a0'] = a0 @@ -211,7 +211,7 @@ def elnet(x, is_sparse, irs, pcs, y, weights, offset, gtype, parm, lempty, fit['lambdau'] = alm fit['npasses'] = nlp_r.value fit['jerr'] = jerr_r.value - fit['dim'] = scipy.array([nvars, lmu], dtype = scipy.integer) + fit['dim'] = np.array([nvars, lmu], dtype = np.integer) fit['offset'] = is_offset fit['class'] = 'elnet' diff --git a/glmnet_python/fishnet.py b/glmnet_python/fishnet.py index 7b022ab..e71b8fd 100644 --- a/glmnet_python/fishnet.py +++ b/glmnet_python/fishnet.py @@ -3,7 +3,7 @@ Internal function called by glmnet. See also glmnet, cvglmnet """ # import packages/methods -import scipy +import numpy as np import ctypes from loadGlmLib import loadGlmLib @@ -14,7 +14,7 @@ def fishnet(x, is_sparse, irs, pcs, y, weights, offset, parm, # load shared fortran library glmlib = loadGlmLib() - if scipy.any( y < 0): + if np.any( y < 0): raise ValueError('negative responses not permitted for Poisson family') if len(offset) == 0: @@ -30,16 +30,16 @@ def fishnet(x, is_sparse, irs, pcs, y, weights, offset, parm, ###################################### # force inputs into fortran order and scipy float64 copyFlag = False - x = x.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - irs = irs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - pcs = pcs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - y = y.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - weights = weights.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - offset = offset.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - jd = jd.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - vp = vp.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - cl = cl.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - ulam = ulam.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + offset = offset.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) ###################################### # --------- ALLOCATE OUTPUTS --------- @@ -48,28 +48,28 @@ def fishnet(x, is_sparse, irs, pcs, y, weights, offset, parm, lmu = -1 lmu_r = ctypes.c_int(lmu) # a0 - a0 = scipy.zeros([nlam], dtype = scipy.float64) - a0 = a0.astype(dtype = scipy.float64, order = 'F', copy = False) + a0 = np.zeros([nlam], dtype = np.float64) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ca - ca = scipy.zeros([nx, nlam], dtype = scipy.float64) - ca = ca.astype(dtype = scipy.float64, order = 'F', copy = False) + ca = np.zeros([nx, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ia - ia = -1*scipy.ones([nx], dtype = scipy.int32) - ia = ia.astype(dtype = scipy.int32, order = 'F', copy = False) + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # nin - nin = -1*scipy.ones([nlam], dtype = scipy.int32) - nin = nin.astype(dtype = scipy.int32, order = 'F', copy = False) + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # dev - dev = -1*scipy.ones([nlam], dtype = scipy.float64) - dev = dev.astype(dtype = scipy.float64, order = 'F', copy = False) + dev = -1*np.ones([nlam], dtype = np.float64) + dev = dev.astype(dtype = np.float64, order = 'F', copy = False) dev_r = dev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # alm - alm = -1*scipy.ones([nlam], dtype = scipy.float64) - alm = alm.astype(dtype = scipy.float64, order = 'F', copy = False) + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # nlp nlp = -1 @@ -176,22 +176,22 @@ def fishnet(x, is_sparse, irs, pcs, y, weights, offset, parm, ninmax = max(nin) # fix first value of alm (from inf to correct value) if ulam[0] == 0.0: - t1 = scipy.log(alm[1]) - t2 = scipy.log(alm[2]) - alm[0] = scipy.exp(2*t1 - t2) + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) # create return fit dictionary - dd = scipy.array([nvars, lmu], dtype = scipy.integer) + dd = np.array([nvars, lmu], dtype = np.integer) if ninmax > 0: ca = ca[0:ninmax, :] - df = scipy.sum(scipy.absolute(ca) > 0, axis = 0) + df = np.sum(np.absolute(ca) > 0, axis = 0) ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) + beta = np.zeros([nvars, lmu], dtype = np.float64) beta[ja1, :] = ca[oja, :] else: - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) - df = scipy.zeros([1, lmu], dtype = scipy.float64) + beta = np.zeros([nvars, lmu], dtype = np.float64) + df = np.zeros([1, lmu], dtype = np.float64) fit = dict() fit['a0'] = a0 diff --git a/glmnet_python/glmnet.py b/glmnet_python/glmnet.py index 5d46e1e..a293ab2 100644 --- a/glmnet_python/glmnet.py +++ b/glmnet_python/glmnet.py @@ -22,11 +22,11 @@ INPUT ARGUMENTS: --------------- - x Input scipy 2D array of nobs x nvars (required). Each row is an + x Input np 2D array of nobs x nvars (required). Each row is an observation vector. Can be in sparse matrix format. Must be in - scipy csc_matrix format + np csc_matrix format - y Response variable (scipy 2D array of size nobs x 1, nobs x nc, etc). (required) + y Response variable (np 2D array of size nobs x 1, nobs x nc, etc). (required) For family = 'gaussian', Quantitative column vector For family = 'poisson' (non-negative counts), Quantitative column vector For family = 'binomial', should be either a column vector with two @@ -49,7 +49,7 @@ nlambda = 100 (number of lambda values) lambdau depends on data, nlambda and lambda_min (user supplied lambda sequence) standardize = True (variable standardization) - weights = all ones scipy vector (observation weights) + weights = all ones np vector (observation weights) For more details see help for glmnetSet OUTPUT ARGUMENTS: @@ -87,59 +87,59 @@ class Type of regression - internal usage EXAMPLES: -------- # Gaussian - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100, 1) + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) fit = glmnet(x = x, y = y) fit = glmnet(x = x, y = y, alpha = 0.5) glmnetPrint(fit) - glmnetPredict(fit, scipy.empty([0]), scipy.array([0.01]), 'coef') # extract coefficients at a single value of lambdau - glmnetPredict(fit, x[0:10,:], scipy.array([0.01, 0.005])) # make predictions + glmnetPredict(fit, np.empty([0]), np.array([0.01]), 'coef') # extract coefficients at a single value of lambdau + glmnetPredict(fit, x[0:10,:], np.array([0.01, 0.005])) # make predictions # Multivariate Gaussian: - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100,3) + x = np.random.rand(100, 10) + y = np.random.rand(100,3) fit = glmnet(x, y, 'mgaussian') glmnetPlot(fit, 'norm', False, '2norm') # Binomial - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100,1) + x = np.random.rand(100, 10) + y = np.random.rand(100,1) y = (y > 0.5)*1.0 fit = glmnet(x = x, y = y, family = 'binomial', alpha = 0.5) # Multinomial - x = scipy.random.rand(100,10) - y = scipy.random.rand(100,1) + x = np.random.rand(100,10) + y = np.random.rand(100,1) y[y < 0.3] = 1.0 y[y < 0.6] = 2.0 y[y < 1.0] = 3.0 fit = glmnet(x = x, y = y, family = 'multinomial', mtype = 'grouped') # poisson - x = scipy.random.rand(100,10) - y = scipy.random.poisson(size = [100, 1])*1.0 + x = np.random.rand(100,10) + y = np.random.poisson(size = [100, 1])*1.0 fit = glmnet(x = x, y = y, family = 'poisson') # cox N = 1000; p = 30; nzc = p/3; - x = scipy.random.normal(size = [N, p]) - beta = scipy.random.normal(size = [nzc, 1]) - fx = scipy.dot(x[:, 0:nzc], beta/3) - hx = scipy.exp(fx) - ty = scipy.random.exponential(scale = 1/hx, size = [N, 1]) - tcens = scipy.random.binomial(1, 0.3, size = [N, 1]) + x = np.random.normal(size = [N, p]) + beta = np.random.normal(size = [nzc, 1]) + fx = np.dot(x[:, 0:nzc], beta/3) + hx = np.exp(fx) + ty = np.random.exponential(scale = 1/hx, size = [N, 1]) + tcens = np.random.binomial(1, 0.3, size = [N, 1]) tcens = 1 - tcens - y = scipy.column_stack((ty, tcens)) + y = np.column_stack((ty, tcens)) fit = glmnet(x = x.copy(), y = y.copy(), family = 'cox') glmnetPlot(fit) # sparse example N = 1000000; - x = scipy.random.normal(size = [N,10]) + x = np.random.normal(size = [N,10]) x[x < 3.0] = 0.0 - xs = scipy.sparse.csc_matrix(x, dtype = scipy.float64) - y = scipy.random.binomial(1, 0.5, size =[N,1]) + xs = scipy.sparse.csc_matrix(x, dtype = np.float64) + y = np.random.binomial(1, 0.5, size =[N,1]) y = y*1.0 st = time.time() fit = glmnet.glmnet(x = xs, y = y, family = 'binomial') @@ -236,7 +236,8 @@ class Type of regression - internal usage # import packages/methods from glmnetSet import glmnetSet from glmnetControl import glmnetControl -import scipy +import numpy as np +import scipy.sparse from elnet import elnet from lognet import lognet from coxnet import coxnet @@ -245,17 +246,17 @@ class Type of regression - internal usage def glmnet(*, x, y, family='gaussian', **options): - # check inputs: make sure x and y are scipy, float64 arrays + # check inputs: make sure x and y are np, float64 arrays # fortran order is not checked as we force a convert later if not( isinstance(x, scipy.sparse.csc.csc_matrix) ): - if not( isinstance(x, scipy.ndarray) and x.dtype == 'float64'): - raise ValueError('x input must be a scipy float64 ndarray') + if not( isinstance(x, np.ndarray) and x.dtype == 'float64'): + raise ValueError('x input must be a np float64 ndarray') else: if not (x.dtype == 'float64'): raise ValueError('x input must be a float64 array') - if not( isinstance(y, scipy.ndarray) and y.dtype == 'float64'): - raise ValueError('y input must be a scipy float64 ndarray') + if not( isinstance(y, np.ndarray) and y.dtype == 'float64'): + raise ValueError('y input must be a np float64 ndarray') # create options if options is None: @@ -279,28 +280,28 @@ def glmnet(*, x, y, family='gaussian', **options): #print(options) ## error check options parameters - alpha = scipy.float64(options['alpha']) + alpha = np.float64(options['alpha']) if alpha > 1.0 : print('Warning: alpha > 1.0; setting to 1.0') - options['alpha'] = scipy.float64(1.0) + options['alpha'] = np.float64(1.0) if alpha < 0.0 : print('Warning: alpha < 0.0; setting to 0.0') - options['alpha'] = scipy.float64(0.0) + options['alpha'] = np.float64(0.0) - parm = scipy.float64(options['alpha']) - nlam = scipy.int32(options['nlambda']) + parm = np.float64(options['alpha']) + nlam = np.int32(options['nlambda']) nobs, nvars = x.shape # check weights length weights = options['weights'].copy() if len(weights) == 0: - weights = scipy.ones([nobs, 1], dtype = scipy.float64) + weights = np.ones([nobs, 1], dtype = np.float64) elif len(weights) != nobs: raise ValueError('Error: Number of elements in ''weights'' not equal to number of rows of ''x''') - # check if weights are scipy nd array - if not( isinstance(weights, scipy.ndarray) and weights.dtype == 'float64'): - raise ValueError('weights input must be a scipy float64 ndarray') + # check if weights are np nd array + if not( isinstance(weights, np.ndarray) and weights.dtype == 'float64'): + raise ValueError('weights input must be a np float64 ndarray') # check y length nrowy = y.shape[0] @@ -321,18 +322,18 @@ def glmnet(*, x, y, family='gaussian', **options): exclude = options['exclude'] # TBD: test this if not (len(exclude) == 0): - exclude = scipy.unique(exclude) - if scipy.any(exclude < 0) or scipy.any(exclude >= nvars): + exclude = np.unique(exclude) + if np.any(exclude < 0) or np.any(exclude >= nvars): raise ValueError('Error: Some excluded variables are out of range') else: - jd = scipy.append(len(exclude), exclude + 1) # indices are 1-based in fortran + jd = np.append(len(exclude), exclude + 1) # indices are 1-based in fortran else: - jd = scipy.zeros([1,1], dtype = scipy.integer) + jd = np.zeros([1,1], dtype = np.integer) # check vp vp = options['penalty_factor'] if len(vp) == 0: - vp = scipy.ones([1, nvars]) + vp = np.ones([1, nvars]) # inparms inparms = glmnetControl() @@ -345,12 +346,12 @@ def glmnet(*, x, y, family='gaussian', **options): if any(cl[1,:] < 0): raise ValueError('Error: The lower bound on cl must be non-negative') - cl[0, cl[0, :] == scipy.float64('-inf')] = -1.0*inparms['big'] - cl[1, cl[1, :] == scipy.float64('inf')] = 1.0*inparms['big'] + cl[0, cl[0, :] == np.float64('-inf')] = -1.0*inparms['big'] + cl[1, cl[1, :] == np.float64('inf')] = 1.0*inparms['big'] if cl.shape[1] < nvars: if cl.shape[1] == 1: - cl = cl*scipy.ones([1, nvars]) + cl = cl*np.ones([1, nvars]) else: raise ValueError('Error: Require length 1 or nvars lower and upper limits') else: @@ -358,7 +359,7 @@ def glmnet(*, x, y, family='gaussian', **options): exit_rec = 0 - if scipy.any(cl == 0.0): + if np.any(cl == 0.0): fdev = inparms['fdev'] if fdev != 0: optset = dict() @@ -366,12 +367,12 @@ def glmnet(*, x, y, family='gaussian', **options): glmnetControl(optset) exit_rec = 1 - isd = scipy.int32(options['standardize']) - intr = scipy.int32(options['intr']) + isd = np.int32(options['standardize']) + intr = np.int32(options['intr']) if (intr == True) and (family == 'cox'): print('Warning: Cox model has no intercept!') - jsd = scipy.int32(options['standardize_resp']) + jsd = np.int32(options['standardize_resp']) thresh = options['thresh'] lambdau = options['lambdau'] lambda_min = options['lambda_min'] @@ -387,16 +388,16 @@ def glmnet(*, x, y, family='gaussian', **options): if (lambda_min >= 1): raise ValueError('ERROR: lambda_min should be less than 1') flmin = lambda_min - ulam = scipy.zeros([1,1], dtype = scipy.float64) + ulam = np.zeros([1,1], dtype = np.float64) else: flmin = 1.0 if any(lambdau < 0): raise ValueError('ERROR: lambdas should be non-negative') - ulam = -scipy.sort(-lambdau) # reverse sort + ulam = -np.sort(-lambdau) # reverse sort nlam = lambdau.size - maxit = scipy.int32(options['maxit']) + maxit = np.int32(options['maxit']) gtype = options['gtype'] if len(gtype) == 0: if (nvars < 500): @@ -429,15 +430,15 @@ def glmnet(*, x, y, family='gaussian', **options): is_sparse = False if scipy.sparse.issparse(x): is_sparse = True - tx = scipy.sparse.csc_matrix(x, dtype = scipy.float64) + tx = scipy.sparse.csc_matrix(x, dtype = np.float64) x = tx.data; x = x.reshape([len(x), 1]) irs = tx.indices + 1 pcs = tx.indptr + 1 - irs = scipy.reshape(irs, [len(irs),]) - pcs = scipy.reshape(pcs, [len(pcs),]) + irs = np.reshape(irs, [len(irs),]) + pcs = np.reshape(pcs, [len(pcs),]) else: - irs = scipy.empty([0]) - pcs = scipy.empty([0]) + irs = np.empty([0]) + pcs = np.empty([0]) if scipy.sparse.issparse(y): y = y.todense() diff --git a/glmnet_python/glmnetCoef.py b/glmnet_python/glmnetCoef.py index 574552a..9fa0907 100644 --- a/glmnet_python/glmnetCoef.py +++ b/glmnet_python/glmnetCoef.py @@ -14,8 +14,8 @@ Fewer input arguments (more often) are allowed in the call, but must come in the order listed above. To set default values on the way, use - scipy.empty([0]). - For example, ncoef = glmnetCoef(fit,scipy.empty([0]),False). + np.empty([0]). + For example, ncoef = glmnetCoef(fit,np.empty([0]),False). INPUT ARGUMENTS: obj Fitted "glmnet" model object. @@ -28,7 +28,7 @@ algorithm. Note that exact = True is not implemented. OUTPUT ARGUMENTS: - result A (nvars+1) x length(s) scipy 2D array with each column being the + result A (nvars+1) x length(s) np 2D array with each column being the coefficients at an s. Note that the first row are the intercepts (0 if no intercept in the original model). @@ -61,14 +61,14 @@ glmnet, glmnetPrint, glmnetPredict, and cvglmnet. EXAMPLES: - x = scipy.random.rand(100,20); - y = scipy.random.rand(100,1); + x = np.random.rand(100,20); + y = np.random.rand(100,1); fit = glmnet(x = x.copy(),y = y.copy()); - ncoef = glmnetCoef(fit,scipy.array([0.01, 0.001])); + ncoef = glmnetCoef(fit,np.array([0.01, 0.001])); """ -import scipy +import numpy as np from glmnetPredict import glmnetPredict def glmnetCoef(obj, s = None, exact = False): @@ -79,7 +79,7 @@ def glmnetCoef(obj, s = None, exact = False): if exact and len(s) > 0: raise NotImplementedError('exact = True not implemented in glmnetCoef') - result = glmnetPredict(obj, scipy.empty([0]), s, 'coefficients') + result = glmnetPredict(obj, np.empty([0]), s, 'coefficients') return(result) diff --git a/glmnet_python/glmnetControl.py b/glmnet_python/glmnetControl.py index fae3b60..f9681a3 100644 --- a/glmnet_python/glmnetControl.py +++ b/glmnet_python/glmnetControl.py @@ -70,19 +70,19 @@ """ def glmnetControl(pars = None): - import scipy + import numpy as np # default options ivals = dict(); - ivals["fdev"] = scipy.float64(1e-5) - ivals["devmax"] = scipy.float64(0.999) - ivals["eps"] = scipy.float64(1e-6) - ivals["big"] = scipy.float64(9.9e35) - ivals["mnlam"] = scipy.float64(5) - ivals["pmin"] = scipy.float64(1e-5) - ivals["exmx"] = scipy.float64(250) - ivals["prec"] = scipy.float64(1e-10) - ivals["mxit"] = scipy.float64(100) + ivals["fdev"] = np.float64(1e-5) + ivals["devmax"] = np.float64(0.999) + ivals["eps"] = np.float64(1e-6) + ivals["big"] = np.float64(9.9e35) + ivals["mnlam"] = np.float64(5) + ivals["pmin"] = np.float64(1e-5) + ivals["exmx"] = np.float64(250) + ivals["prec"] = np.float64(1e-10) + ivals["mxit"] = np.float64(100) # quick return if no user opts if pars == None: diff --git a/glmnet_python/glmnetPlot.py b/glmnet_python/glmnetPlot.py index 76b9437..bbaad9e 100644 --- a/glmnet_python/glmnetPlot.py +++ b/glmnet_python/glmnetPlot.py @@ -59,10 +59,10 @@ EXAMPLES: import matplotlib.pyplot as plt - scipy.random.seed(1) - x=scipy.random.normal(size = (100,20)) - y=scipy.random.normal(size = (100,1)) - g4=scipy.random.choice(4,size = (100,1))*1.0 + np.random.seed(1) + x=np.random.normal(size = (100,20)) + y=np.random.normal(size = (100,1)) + g4=np.random.choice(4,size = (100,1))*1.0 fit1=glmnet(x = x.copy(),y = y.copy()) glmnetPlot(fit1) plt.figure() @@ -71,7 +71,7 @@ plt.figure() glmnetPlot(fit3) """ -import scipy +import numpy as np def glmnetPlot(x, xvar = 'norm', label = False, ptype = 'coef', **options): @@ -93,7 +93,7 @@ def glmnetPlot(x, xvar = 'norm', label = False, ptype = 'coef', **options): for i in range(len(beta)): which = nonzeroCoef(beta[i]) nzbeta[i] = beta[i][which, :] - norm = norm + scipy.sum(scipy.absolute(nzbeta[i]), axis = 0) + norm = norm + np.sum(np.absolute(nzbeta[i]), axis = 0) else: norm = 0 @@ -114,11 +114,11 @@ def glmnetPlot(x, xvar = 'norm', label = False, ptype = 'coef', **options): if i < ncl - 1: plt.figure() else: - dfseq = scipy.round_(scipy.mean(x['dfmat'], axis = 0)) + dfseq = np.round_(np.mean(x['dfmat'], axis = 0)) coefnorm = beta[1]*0 for i in range(len(beta)): - coefnorm = coefnorm + scipy.absolute(beta[i])**2 - coefnorm = scipy.sqrt(coefnorm) + coefnorm = coefnorm + np.absolute(beta[i])**2 + coefnorm = np.sqrt(coefnorm) if x['class'] == 'multnet': mstr = 'Coefficient 2Norms' handle = plotCoef(coefnorm, norm, x['lambdau'], dfseq, x['dev'], @@ -146,11 +146,11 @@ def getFromList(xvar, xvarbase, errMsg): # end of getFromList() # ========================================= def nonzeroCoef(beta, bystep = False): - result = scipy.absolute(beta) > 0 + result = np.absolute(beta) > 0 if len(result.shape) == 1: - result = scipy.reshape(result, [result.shape[0], 1]) + result = np.reshape(result, [result.shape[0], 1]) if not bystep: - result = scipy.any(result, axis = 1) + result = np.any(result, axis = 1) return(result) # end of nonzeroCoef() @@ -169,12 +169,12 @@ def plotCoef(beta, norm, lambdau, df, dev, label, xvar, xlab, ylab, **options): beta = beta[which, :] if xvar == 'norm': if len(norm) == 0: - index = scipy.sum(scipy.absolute(beta), axis = 0) + index = np.sum(np.absolute(beta), axis = 0) else: index = norm iname = 'L1 Norm' elif xvar == 'lambda': - index = scipy.log(lambdau) + index = np.log(lambdau) iname = 'Log Lambda' elif xvar == 'dev': index = dev @@ -189,7 +189,7 @@ def plotCoef(beta, norm, lambdau, df, dev, label, xvar, xlab, ylab, **options): ax1 = plt.gca() # plot x vs y - beta = scipy.transpose(beta) + beta = np.transpose(beta) ax1.plot(index, beta, **options) ax2 = ax1.twiny() @@ -199,7 +199,7 @@ def plotCoef(beta, norm, lambdau, df, dev, label, xvar, xlab, ylab, **options): ylim1 = ax1.get_ylim() atdf = ax1.get_xticks() - indat = scipy.ones(atdf.shape, dtype = scipy.integer) + indat = np.ones(atdf.shape, dtype = np.integer) if index[-1] >= index[1]: for j in range(len(index)-1, -1, -1): indat[atdf <= index[j]] = j diff --git a/glmnet_python/glmnetPredict.py b/glmnet_python/glmnetPredict.py index 9b732a9..e5ee403 100644 --- a/glmnet_python/glmnetPredict.py +++ b/glmnet_python/glmnetPredict.py @@ -56,7 +56,7 @@ DETAILS: The shape of the objects returned are different for "multinomial" objects. glmnetCoef(fit, ...) is equivalent to - glmnetPredict(fit,scipy.empty([]),scipy.empty([]),'coefficients"). + glmnetPredict(fit,np.empty([]),np.empty([]),'coefficients"). LICENSE: GPL-2 @@ -86,32 +86,33 @@ EXAMPLES: - x = scipy.random.normal(size = [100,20]) - y = scipy.random.normal(size = [100,1]) - g2 = scipy.random.choice(2, size = [100, 1])*1.0 # must be float64 - g4 = scipy.random.choice(4, size = [100, 1])*1.0 # must be float64 + x = np.random.normal(size = [100,20]) + y = np.random.normal(size = [100,1]) + g2 = np.random.choice(2, size = [100, 1])*1.0 # must be float64 + g4 = np.random.choice(4, size = [100, 1])*1.0 # must be float64 fit1 = glmnet(x = x.copy(),y = y.copy()); - print( glmnetPredict(fit1,x[0:5,:],scipy.array([0.01,0.005])) ) - print( glmnetPredict(fit1, scipy.empty([0]), scipy.empty([0]), 'coefficients') ) + print( glmnetPredict(fit1,x[0:5,:],np.array([0.01,0.005])) ) + print( glmnetPredict(fit1, np.empty([0]), np.empty([0]), 'coefficients') ) fit2 = glmnet(x = x.copy(), y = g2.copy(), family = 'binomial'); - print(glmnetPredict(fit2, x[2:5,:],scipy.empty([0]), 'response')) - print(glmnetPredict(fit2, scipy.empty([0]), scipy.empty([0]), 'nonzero')) + print(glmnetPredict(fit2, x[2:5,:],np.empty([0]), 'response')) + print(glmnetPredict(fit2, np.empty([0]), np.empty([0]), 'nonzero')) fit3 = glmnet(x = x.copy(), y = g4.copy(), family = 'multinomial'); - print(glmnetPredict(fit3, x[0:3,:], scipy.array([0.01, 0.5]), 'response')) + print(glmnetPredict(fit3, x[0:3,:], np.array([0.01, 0.5]), 'response')) """ -import scipy +import numpy as np +import scipy.sparse import scipy.interpolate def glmnetPredict(fit,\ - newx = scipy.empty([0]), \ - s = scipy.empty([0]), \ + newx = np.empty([0]), \ + s = np.empty([0]), \ ptype = 'link', \ exact = False, \ - offset = scipy.empty([0])): + offset = np.empty([0])): typebase = ['link', 'response', 'coefficients', 'nonzero', 'class'] indxtf = [x.startswith(ptype.lower()) for x in typebase] @@ -136,7 +137,7 @@ def glmnetPredict(fit,\ raise NotImplementedError('exact = True option is not implemented in python') # we convert newx to full here since sparse and full operations do not seem to - # be overloaded completely in scipy. + # be overloaded completely in np. if scipy.sparse.issparse(newx): newx = newx.todense() @@ -145,15 +146,15 @@ def glmnetPredict(fit,\ if fit['class'] == 'lognet': a0 = fit['a0'] else: - a0 = scipy.transpose(fit['a0']) + a0 = np.transpose(fit['a0']) - a0 = scipy.reshape(a0, [1, a0.size]) # convert to 1 x N for appending - nbeta = scipy.row_stack( (a0, fit['beta']) ) - if scipy.size(s) > 0: + a0 = np.reshape(a0, [1, a0.size]) # convert to 1 x N for appending + nbeta = np.row_stack( (a0, fit['beta']) ) + if np.size(s) > 0: lambdau = fit['lambdau'] lamlist = lambda_interp(lambdau, s) - nbeta = nbeta[:, lamlist['left']]*scipy.tile(scipy.transpose(lamlist['frac']), [nbeta.shape[0], 1]) \ - + nbeta[:, lamlist['right']]*( 1 - scipy.tile(scipy.transpose(lamlist['frac']), [nbeta.shape[0], 1])) + nbeta = nbeta[:, lamlist['left']]*np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1]) \ + + nbeta[:, lamlist['right']]*( 1 - np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1])) if ptype == 'coefficients': result = nbeta @@ -163,7 +164,7 @@ def glmnetPredict(fit,\ result = nonzeroCoef(nbeta[1:nbeta.shape[0], :], True) return(result) # use scipy.sparse.hstack instead of column_stack for sparse matrices - result = scipy.dot(scipy.column_stack( (scipy.ones([newx.shape[0], 1]) , newx) ) , nbeta) + result = np.dot(np.column_stack( (np.ones([newx.shape[0], 1]) , newx) ) , nbeta) if fit['offset']: if len(offset) == 0: @@ -171,16 +172,16 @@ def glmnetPredict(fit,\ if offset.shape[1] == 2: offset = offset[:, 1] - result = result + scipy.tile(offset, [1, result.shape[1]]) + result = result + np.tile(offset, [1, result.shape[1]]) # fishnet if fit['class'] == 'fishnet' and ptype == 'response': - result = scipy.exp(result) + result = np.exp(result) # lognet if fit['class'] == 'lognet': if ptype == 'response': - pp = scipy.exp(-result) + pp = np.exp(-result) result = 1/(1 + pp) elif ptype == 'class': result = (result > 0)*1 + (result <= 0)*0 @@ -202,13 +203,13 @@ def glmnetPredict(fit,\ lambdau = fit['lambdau'] lamlist = lambda_interp(lambdau, s) for i in range(nclass): - kbeta = scipy.row_stack( (a0[i, :], nbeta[i]) ) - kbeta = kbeta[:, lamlist['left']]*scipy.tile(scipy.transpose(lamlist['frac']), [kbeta.shape[0], 1]) \ - + kbeta[:, lamlist['right']]*( 1 - scipy.tile(scipy.transpose(lamlist['frac']), [kbeta.shape[0], 1])) + kbeta = np.row_stack( (a0[i, :], nbeta[i]) ) + kbeta = kbeta[:, lamlist['left']]*np.tile(np.transpose(lamlist['frac']), [kbeta.shape[0], 1]) \ + + kbeta[:, lamlist['right']]*( 1 - np.tile(np.transpose(lamlist['frac']), [kbeta.shape[0], 1])) nbeta[i] = kbeta else: for i in range(nclass): - nbeta[i] = scipy.row_stack( (a0[i, :], nbeta[i]) ) + nbeta[i] = np.row_stack( (a0[i, :], nbeta[i]) ) nlambda = len(fit['lambdau']) if ptype == 'coefficients': @@ -228,33 +229,33 @@ def glmnetPredict(fit,\ return(result) npred = newx.shape[0] - dp = scipy.zeros([nclass, nlambda, npred], dtype = scipy.float64) + dp = np.zeros([nclass, nlambda, npred], dtype = np.float64) for i in range(nclass): - qq = scipy.column_stack( (scipy.ones([newx.shape[0], 1]), newx) ) - fitk = scipy.dot( qq, nbeta[i] ) - dp[i, :, :] = dp[i, :, :] + scipy.reshape(scipy.transpose(fitk), [1, nlambda, npred]) + qq = np.column_stack( (np.ones([newx.shape[0], 1]), newx) ) + fitk = np.dot( qq, nbeta[i] ) + dp[i, :, :] = dp[i, :, :] + np.reshape(np.transpose(fitk), [1, nlambda, npred]) if fit['offset']: if len(offset) == 0: raise ValueError('No offset provided for prediction, yet used in fit of glmnet') if offset.shape[1] != nclass: raise ValueError('Offset should be dimension %d x %d' % (npred, nclass)) - toff = scipy.transpose(offset) + toff = np.transpose(offset) for i in range(nlambda): dp[:, i, :] = dp[:, i, :] + toff if ptype == 'response': - pp = scipy.exp(dp) - psum = scipy.sum(pp, axis = 0, keepdims = True) - result = scipy.transpose(pp/scipy.tile(psum, [nclass, 1, 1]), [2, 0, 1]) + pp = np.exp(dp) + psum = np.sum(pp, axis = 0, keepdims = True) + result = np.transpose(pp/np.tile(psum, [nclass, 1, 1]), [2, 0, 1]) if ptype == 'link': - result = scipy.transpose(dp, [2, 0, 1]) + result = np.transpose(dp, [2, 0, 1]) if ptype == 'class': - dp = scipy.transpose(dp, [2, 0, 1]) + dp = np.transpose(dp, [2, 0, 1]) result = list() for i in range(dp.shape[2]): t = softmax(dp[:, :, i]) - result = scipy.append(result, fit['label'][t['pclass']]) + result = np.append(result, fit['label'][t['pclass']]) # coxnet if fit['class'] == 'coxnet': @@ -262,8 +263,8 @@ def glmnetPredict(fit,\ if len(s) > 0: lambdau = fit['lambdau'] lamlist = lambda_interp(lambdau, s) - nbeta = nbeta[:, lamlist['left']]*scipy.tile(scipy.transpose(lamlist['frac']), [nbeta.shape[0], 1]) \ - + nbeta[:, lamlist['right']]*( 1 - scipy.tile(scipy.transpose(lamlist['frac']), [nbeta.shape[0], 1])) + nbeta = nbeta[:, lamlist['left']]*np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1]) \ + + nbeta[:, lamlist['right']]*( 1 - np.tile(np.transpose(lamlist['frac']), [nbeta.shape[0], 1])) if ptype == 'coefficients': result = nbeta @@ -273,16 +274,16 @@ def glmnetPredict(fit,\ result = nonzeroCoef(nbeta, True) return(result) - result = scipy.dot(newx, nbeta) + result = np.dot(newx, nbeta) if fit['offset']: if len(offset) == 0: raise ValueError('No offset provided for prediction, yet used in fit of glmnet') - result = result + scipy.tile(offset, [1, result.shape[1]]) + result = result + np.tile(offset, [1, result.shape[1]]) if ptype == 'response': - result = scipy.exp(result) + result = np.exp(result) return(result) @@ -302,18 +303,18 @@ def lambda_interp(lambdau, s): # sfrac*left+(1-sfrac*right) if len(lambdau) == 1: nums = len(s) - left = scipy.zeros([nums, 1], dtype = scipy.integer) + left = np.zeros([nums, 1], dtype = np.integer) right = left - sfrac = scipy.zeros([nums, 1], dtype = scipy.float64) + sfrac = np.zeros([nums, 1], dtype = np.float64) else: - s[s > scipy.amax(lambdau)] = scipy.amax(lambdau) - s[s < scipy.amin(lambdau)] = scipy.amin(lambdau) + s[s > np.amax(lambdau)] = np.amax(lambdau) + s[s < np.amin(lambdau)] = np.amin(lambdau) k = len(lambdau) sfrac = (lambdau[0] - s)/(lambdau[0] - lambdau[k - 1]) lambdau = (lambdau[0] - lambdau)/(lambdau[0] - lambdau[k - 1]) coord = scipy.interpolate.interp1d(lambdau, range(k))(sfrac) - left = scipy.floor(coord).astype(scipy.integer, copy = False) - right = scipy.ceil(coord).astype(scipy.integer, copy = False) + left = np.floor(coord).astype(np.integer, copy = False) + right = np.ceil(coord).astype(np.integer, copy = False) # tf = left != right sfrac[tf] = (sfrac[tf] - lambdau[right[tf]])/(lambdau[left[tf]] - lambdau[right[tf]]) @@ -334,14 +335,14 @@ def lambda_interp(lambdau, s): def softmax(x, gap = False): d = x.shape maxdist = x[:, 0] - pclass = scipy.zeros([d[0], 1], dtype = scipy.integer) + pclass = np.zeros([d[0], 1], dtype = np.integer) for i in range(1, d[1], 1): l = x[:, i] > maxdist pclass[l] = i maxdist[l] = x[l, i] if gap == True: - x = scipy.absolute(maxdist - x) - x[0:d[0], pclass] = x*scipy.ones([d[1], d[1]]) + x = np.absolute(maxdist - x) + x[0:d[0], pclass] = x*np.ones([d[1], d[1]]) #gaps = pmin(x)# not sure what this means; gap is never called with True raise ValueError('gap = True is not implemented yet') @@ -357,9 +358,9 @@ def softmax(x, gap = False): # end of softmax # ========================================= def nonzeroCoef(beta, bystep = False): - result = scipy.absolute(beta) > 0 + result = np.absolute(beta) > 0 if not bystep: - result = scipy.any(result, axis = 1) + result = np.any(result, axis = 1) return(result) # end of nonzeroCoef # ========================================= diff --git a/glmnet_python/glmnetPrint.py b/glmnet_python/glmnetPrint.py index fd2a373..5e867dd 100644 --- a/glmnet_python/glmnetPrint.py +++ b/glmnet_python/glmnetPrint.py @@ -41,8 +41,8 @@ glmnet, glmnetSet, glmnetPredict and glmnetCoef methods. EXAMPLES: - x = scipy.random.normal(size=[100,20]) - y = scipy.random.normal(size=[100,1]) + x = np.random.normal(size=[100,20]) + y = np.random.normal(size=[100,1]) fit=glmnet(x = x,y = y); glmnetPrint(fit); diff --git a/glmnet_python/glmnetSet.py b/glmnet_python/glmnetSet.py index 9f7b9ca..ee026f1 100644 --- a/glmnet_python/glmnetSet.py +++ b/glmnet_python/glmnetSet.py @@ -29,8 +29,8 @@ # given values for these parameters. options = glmnetSet( alpha = 0.1, \ intr = False, \ - maxit = scipy.int32(1e6), \ - offset = scipy.empty([0]) ) + maxit = np.int32(1e6), \ + offset = np.empty([0]) ) # same as previous case, except we pass in a # dict() object instead opts = dict(); opts['alpha'] = 0.5; @@ -40,17 +40,17 @@ Parameter Default value Description .................................................................. -alpha +alpha The elasticnet mixing parameter, with 0 < alpha <= 1. The penalty is defined as (1-alpha)/2(||beta||_2)^2+alpha||beta||_1. Default is alpha = 1, which is the lasso penalty; Currently alpha = 0 the ridge penalty. -nlambda +nlambda The number of lambda values - default is -lambdau +lambdau A user supplied lambda sequence. Typical usage is to have the program compute its own lambda sequence based on nlambda and lambda_min. Supplying a value of @@ -70,7 +70,7 @@ details below for y standardization with family='gaussian'. -weights +weights Observation weights. Can be total counts if responses are proportion matrices. Default is 1 for each observation. @@ -79,7 +79,7 @@ Should intercept(s) be fitted (default=true) or set to zero (false). -offset +offset A vector of length nobs that is included in the linear predictor (a nobs x nc matrix for the "multinomial" family). Useful for the "poisson" @@ -88,7 +88,7 @@ supplied, then values must also be supplied to the predict function. -lambda_min +lambda_min Smallest value for lambda, as a fraction of lambda_max, the (data derived) entry value (i.e., the smallest value for which all coefficients are zero). @@ -101,28 +101,28 @@ and glmnet will exit gracefully when the percentage deviance explained is almost 1. -thresh +thresh Convergence threshold for coordinate descent. Each inner coordinate-descent loop continues until the maximum change in the objective after any coefficient update is less than thresh times the null deviance. Defaults value is 1E-4. -dfmax +dfmax Limit the maximum number of variables in the model. Useful for very large nvars, if a partial path is desired. Default is nvars + 1. -pmax +pmax Limit the maximum number of variables ever to be nonzero. Default is min(dfmax * 2 + 20, nvars). -exclude +exclude Indices of variables to be excluded from the model. Default is none. Equivalent to an infinite penalty factor (next item). -penalty_factor +penalty_factor Separate penalty factors can be applied to each coefficient. This is a number that multiplies lambda to allow differential shrinkage. Can be 0 for some @@ -133,11 +133,11 @@ factors are internally rescaled to sum to nvars, and the lambda sequence will reflect this change. -maxit +maxit Maximum number of passes over the data for all lambda values; default is 10^5. -cl +cl Two-row matrix with the first row being the lower limits for each coefficient and the second the upper limits. Can be presented as a single column (which @@ -187,25 +187,25 @@ """ def glmnetSet(opts = None): - import scipy + import numpy as np # default options options = { - "weights" : scipy.empty([0]), - "offset" : scipy.empty([0]), - "alpha" : scipy.float64(1.0), - "nlambda" : scipy.int32(100), - "lambda_min" : scipy.empty([0]), - "lambdau" : scipy.empty([0]), + "weights" : np.empty([0]), + "offset" : np.empty([0]), + "alpha" : np.float64(1.0), + "nlambda" : np.int32(100), + "lambda_min" : np.empty([0]), + "lambdau" : np.empty([0]), "standardize" : True, "intr" : True, - "thresh" : scipy.float64(1e-7), - "dfmax" : scipy.empty([0]), - "pmax" : scipy.empty([0]), - "exclude" : scipy.empty([0], dtype = scipy.integer), - "penalty_factor" : scipy.empty([0]), - "cl" : scipy.array([[scipy.float64(-scipy.inf)], [scipy.float64(scipy.inf)]]), - "maxit" : scipy.int32(1e5), + "thresh" : np.float64(1e-7), + "dfmax" : np.empty([0]), + "pmax" : np.empty([0]), + "exclude" : np.empty([0], dtype = np.integer), + "penalty_factor" : np.empty([0]), + "cl" : np.array([[np.float64(-np.inf)], [np.float64(np.inf)]]), + "maxit" : np.int32(1e5), "gtype" : [], "ltype" : 'Newton', "standardize_resp" : False, diff --git a/glmnet_python/lognet.py b/glmnet_python/lognet.py index d231ec7..284cdd9 100644 --- a/glmnet_python/lognet.py +++ b/glmnet_python/lognet.py @@ -4,7 +4,7 @@ """ # import packages/methods -import scipy +import numpy as np import ctypes from loadGlmLib import loadGlmLib @@ -26,12 +26,12 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, raise ValueError('x and y have different number of rows in call to glmnet') if nc == 1: - classes, sy = scipy.unique(y, return_inverse = True) + classes, sy = np.unique(y, return_inverse = True) nc = len(classes) - indexes = scipy.eye(nc, nc) + indexes = np.eye(nc, nc) y = indexes[sy, :] else: - classes = scipy.arange(nc) + 1 # 1:nc + classes = np.arange(nc) + 1 # 1:nc # if family == 'binomial': if nc > 2: @@ -42,14 +42,14 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, # if (len(weights) != 0): t = weights > 0 - if ~scipy.all(t): - t = scipy.reshape(t, (len(y), )) + if ~np.all(t): + t = np.reshape(t, (len(y), )) y = y[t, :] x = x[t, :] weights = weights[t] - nobs = scipy.sum(t) + nobs = np.sum(t) else: - t = scipy.empty([0], dtype = scipy.integer) + t = np.empty([0], dtype = np.integer) # if len(y.shape) == 1: mv = len(y) @@ -57,7 +57,7 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, else: mv, ny = y.shape - y = y*scipy.tile(weights, (1, ny)) + y = y*np.tile(weights, (1, ny)) # if len(offset) == 0: @@ -71,7 +71,7 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, raise ValueError('offset should have the same number of values as observations in binominal/multinomial call to glmnet') if nc == 1: if do[1] == 1: - offset = scipy.column_stack((offset, -offset), 1) + offset = np.column_stack((offset, -offset), 1) if do[1] > 2: raise ValueError('offset should have 1 or 2 columns in binomial call to glmnet') if (family == 'multinomial') and (do[1] != nc): @@ -85,16 +85,16 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, ###################################### # force inputs into fortran order and scipy float64 copyFlag = False - x = x.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - irs = irs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - pcs = pcs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - y = y.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - weights = weights.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - offset = offset.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - jd = jd.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - vp = vp.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - cl = cl.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - ulam = ulam.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + offset = offset.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) ###################################### # --------- ALLOCATE OUTPUTS --------- @@ -104,32 +104,32 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, lmu_r = ctypes.c_int(lmu) # a0, ca if nc == 1: - a0 = scipy.zeros([nlam], dtype = scipy.float64) - ca = scipy.zeros([nx, nlam], dtype = scipy.float64) + a0 = np.zeros([nlam], dtype = np.float64) + ca = np.zeros([nx, nlam], dtype = np.float64) else: - a0 = scipy.zeros([nc, nlam], dtype = scipy.float64) - ca = scipy.zeros([nx, nc, nlam], dtype = scipy.float64) + a0 = np.zeros([nc, nlam], dtype = np.float64) + ca = np.zeros([nx, nc, nlam], dtype = np.float64) # a0 - a0 = a0.astype(dtype = scipy.float64, order = 'F', copy = False) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ca - ca = ca.astype(dtype = scipy.float64, order = 'F', copy = False) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ia - ia = -1*scipy.ones([nx], dtype = scipy.int32) - ia = ia.astype(dtype = scipy.int32, order = 'F', copy = False) + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # nin - nin = -1*scipy.ones([nlam], dtype = scipy.int32) - nin = nin.astype(dtype = scipy.int32, order = 'F', copy = False) + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # dev - dev = -1*scipy.ones([nlam], dtype = scipy.float64) - dev = dev.astype(dtype = scipy.float64, order = 'F', copy = False) + dev = -1*np.ones([nlam], dtype = np.float64) + dev = dev.astype(dtype = np.float64, order = 'F', copy = False) dev_r = dev.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # alm - alm = -1*scipy.ones([nlam], dtype = scipy.float64) - alm = alm.astype(dtype = scipy.float64, order = 'F', copy = False) + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # nlp nlp = -1 @@ -243,39 +243,39 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, ninmax = max(nin) # fix first value of alm (from inf to correct value) if ulam[0] == 0.0: - t1 = scipy.log(alm[1]) - t2 = scipy.log(alm[2]) - alm[0] = scipy.exp(2*t1 - t2) + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) # create return fit dictionary if family == 'multinomial': - a0 = a0 - scipy.tile(scipy.mean(a0), (nc, 1)) + a0 = a0 - np.tile(np.mean(a0), (nc, 1)) dfmat = a0.copy() - dd = scipy.array([nvars, lmu], dtype = scipy.integer) + dd = np.array([nvars, lmu], dtype = np.integer) beta_list = list() if ninmax > 0: # TODO: is the reshape here done right? - ca = scipy.reshape(ca, (nx, nc, lmu)) + ca = np.reshape(ca, (nx, nc, lmu)) ca = ca[0:ninmax, :, :] ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - df = scipy.any(scipy.absolute(ca) > 0, axis=1) - df = scipy.sum(df) - df = scipy.reshape(df, (1, df.size)) + df = np.any(np.absolute(ca) > 0, axis=1) + df = np.sum(df) + df = np.reshape(df, (1, df.size)) for k in range(0, nc): - ca1 = scipy.reshape(ca[:,k,:], (ninmax, lmu)) + ca1 = np.reshape(ca[:,k,:], (ninmax, lmu)) cak = ca1[oja,:] - dfmat[k, :] = scipy.sum(scipy.absolute(cak) > 0, axis = 0) - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) + dfmat[k, :] = np.sum(np.absolute(cak) > 0, axis = 0) + beta = np.zeros([nvars, lmu], dtype = np.float64) beta[ja1, :] = cak beta_list.append(beta) else: for k in range(0, nc): - dfmat[k, :] = scipy.zeros([1, lmu], dtype = scipy.float64) - beta_list.append(scipy.zeros([nvars, lmu], dtype = scipy.float64)) + dfmat[k, :] = np.zeros([1, lmu], dtype = np.float64) + beta_list.append(np.zeros([nvars, lmu], dtype = np.float64)) # - df = scipy.zeros([1, lmu], dtype = scipy.float64) + df = np.zeros([1, lmu], dtype = np.float64) # if kopt == 2: grouped = True @@ -298,18 +298,18 @@ def lognet(x, is_sparse, irs, pcs, y, weights, offset, parm, fit['offset'] = is_offset fit['class'] = 'multnet' else: - dd = scipy.array([nvars, lmu], dtype = scipy.integer) + dd = np.array([nvars, lmu], dtype = np.integer) if ninmax > 0: ca = ca[0:ninmax,:]; - df = scipy.sum(scipy.absolute(ca) > 0, axis = 0); + df = np.sum(np.absolute(ca) > 0, axis = 0); ja = ia[0:ninmax] - 1; # ia is 1-indexes in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64); + beta = np.zeros([nvars, lmu], dtype = np.float64); beta[ja1, :] = ca[oja, :]; else: - beta = scipy.zeros([nvars,lmu], dtype = scipy.float64); - df = scipy.zeros([1,lmu], dtype = scipy.float64); + beta = np.zeros([nvars,lmu], dtype = np.float64); + df = np.zeros([1,lmu], dtype = np.float64); # fit = dict() fit['a0'] = a0 diff --git a/glmnet_python/mrelnet.py b/glmnet_python/mrelnet.py index 3613e49..006a97f 100644 --- a/glmnet_python/mrelnet.py +++ b/glmnet_python/mrelnet.py @@ -4,7 +4,7 @@ """ # import packages/methods -import scipy +import numpy as np import ctypes from wtmean import wtmean from loadGlmLib import loadGlmLib @@ -19,9 +19,9 @@ def mrelnet(x, is_sparse, irs, pcs, y, weights, offset, parm, # nr = y.shape[1] wym = wtmean(y, weights) - wym = scipy.reshape(wym, (1, wym.size)) - yt2 = (y - scipy.tile(wym, (y.shape[0], 1)))**2 - nulldev = scipy.sum(wtmean(yt2,weights)*scipy.sum(weights)) + wym = np.reshape(wym, (1, wym.size)) + yt2 = (y - np.tile(wym, (y.shape[0], 1)))**2 + nulldev = np.sum(wtmean(yt2,weights)*np.sum(weights)) if len(offset) == 0: offset = y*0 @@ -39,15 +39,15 @@ def mrelnet(x, is_sparse, irs, pcs, y, weights, offset, parm, ###################################### # force inputs into fortran order and scipy float64 copyFlag = False - x = x.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - irs = irs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - pcs = pcs.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - y = y.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - weights = weights.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - jd = jd.astype(dtype = scipy.int32, order = 'F', copy = copyFlag) - vp = vp.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - cl = cl.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) - ulam = ulam.astype(dtype = scipy.float64, order = 'F', copy = copyFlag) + x = x.astype(dtype = np.float64, order = 'F', copy = copyFlag) + irs = irs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + pcs = pcs.astype(dtype = np.int32, order = 'F', copy = copyFlag) + y = y.astype(dtype = np.float64, order = 'F', copy = copyFlag) + weights = weights.astype(dtype = np.float64, order = 'F', copy = copyFlag) + jd = jd.astype(dtype = np.int32, order = 'F', copy = copyFlag) + vp = vp.astype(dtype = np.float64, order = 'F', copy = copyFlag) + cl = cl.astype(dtype = np.float64, order = 'F', copy = copyFlag) + ulam = ulam.astype(dtype = np.float64, order = 'F', copy = copyFlag) ###################################### # --------- ALLOCATE OUTPUTS --------- @@ -56,28 +56,28 @@ def mrelnet(x, is_sparse, irs, pcs, y, weights, offset, parm, lmu = -1 lmu_r = ctypes.c_int(lmu) # a0 - a0 = scipy.zeros([nr, nlam], dtype = scipy.float64) - a0 = a0.astype(dtype = scipy.float64, order = 'F', copy = False) + a0 = np.zeros([nr, nlam], dtype = np.float64) + a0 = a0.astype(dtype = np.float64, order = 'F', copy = False) a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ca - ca = scipy.zeros([nx, nr, nlam], dtype = scipy.float64) - ca = ca.astype(dtype = scipy.float64, order = 'F', copy = False) + ca = np.zeros([nx, nr, nlam], dtype = np.float64) + ca = ca.astype(dtype = np.float64, order = 'F', copy = False) ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ia - ia = -1*scipy.ones([nx], dtype = scipy.int32) - ia = ia.astype(dtype = scipy.int32, order = 'F', copy = False) + ia = -1*np.ones([nx], dtype = np.int32) + ia = ia.astype(dtype = np.int32, order = 'F', copy = False) ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # nin - nin = -1*scipy.ones([nlam], dtype = scipy.int32) - nin = nin.astype(dtype = scipy.int32, order = 'F', copy = False) + nin = -1*np.ones([nlam], dtype = np.int32) + nin = nin.astype(dtype = np.int32, order = 'F', copy = False) nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # rsq - rsq = -1*scipy.ones([nlam], dtype = scipy.float64) - rsq = rsq.astype(dtype = scipy.float64, order = 'F', copy = False) + rsq = -1*np.ones([nlam], dtype = np.float64) + rsq = rsq.astype(dtype = np.float64, order = 'F', copy = False) rsq_r = rsq.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # alm - alm = -1*scipy.ones([nlam], dtype = scipy.float64) - alm = alm.astype(dtype = scipy.float64, order = 'F', copy = False) + alm = -1*np.ones([nlam], dtype = np.float64) + alm = alm.astype(dtype = np.float64, order = 'F', copy = False) alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # nlp nlp = -1 @@ -182,54 +182,54 @@ def mrelnet(x, is_sparse, irs, pcs, y, weights, offset, parm, ninmax = max(nin) # fix first value of alm (from inf to correct value) if ulam[0] == 0.0: - t1 = scipy.log(alm[1]) - t2 = scipy.log(alm[2]) - alm[0] = scipy.exp(2*t1 - t2) + t1 = np.log(alm[1]) + t2 = np.log(alm[2]) + alm[0] = np.exp(2*t1 - t2) # create return fit dictionary if nr > 1: dfmat = a0.copy() - dd = scipy.array([nvars, lmu], dtype = scipy.integer) + dd = np.array([nvars, lmu], dtype = np.integer) beta_list = list() if ninmax > 0: # TODO: is the reshape here done right? - ca = scipy.reshape(ca, (nx, nr, lmu)) + ca = np.reshape(ca, (nx, nr, lmu)) ca = ca[0:ninmax, :, :] ja = ia[0:ninmax] - 1 # ia is 1-indexed in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - df = scipy.any(scipy.absolute(ca) > 0, axis=1) - df = scipy.sum(df, axis = 0) - df = scipy.reshape(df, (1, df.size)) + df = np.any(np.absolute(ca) > 0, axis=1) + df = np.sum(df, axis = 0) + df = np.reshape(df, (1, df.size)) for k in range(0, nr): - ca1 = scipy.reshape(ca[:,k,:], (ninmax, lmu)) + ca1 = np.reshape(ca[:,k,:], (ninmax, lmu)) cak = ca1[oja,:] - dfmat[k, :] = scipy.sum(scipy.absolute(cak) > 0, axis = 0) - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64) + dfmat[k, :] = np.sum(np.absolute(cak) > 0, axis = 0) + beta = np.zeros([nvars, lmu], dtype = np.float64) beta[ja1, :] = cak beta_list.append(beta) else: for k in range(0, nr): - dfmat[k, :] = scipy.zeros([1, lmu], dtype = scipy.float64) - beta_list.append(scipy.zeros([nvars, lmu], dtype = scipy.float64)) + dfmat[k, :] = np.zeros([1, lmu], dtype = np.float64) + beta_list.append(np.zeros([nvars, lmu], dtype = np.float64)) # - df = scipy.zeros([1, lmu], dtype = scipy.float64) + df = np.zeros([1, lmu], dtype = np.float64) # fit = dict() fit['beta'] = beta_list fit['dfmat']= dfmat else: - dd = scipy.array([nvars, lmu], dtype = scipy.integer) + dd = np.array([nvars, lmu], dtype = np.integer) if ninmax > 0: ca = ca[0:ninmax,:]; - df = scipy.sum(scipy.absolute(ca) > 0, axis = 0); + df = np.sum(np.absolute(ca) > 0, axis = 0); ja = ia[0:ninmax] - 1; # ia is 1-indexes in fortran - oja = scipy.argsort(ja) + oja = np.argsort(ja) ja1 = ja[oja] - beta = scipy.zeros([nvars, lmu], dtype = scipy.float64); + beta = np.zeros([nvars, lmu], dtype = np.float64); beta[ja1, :] = ca[oja, :]; else: - beta = scipy.zeros([nvars,lmu], dtype = scipy.float64); - df = scipy.zeros([1,lmu], dtype = scipy.float64); + beta = np.zeros([nvars,lmu], dtype = np.float64); + df = np.zeros([1,lmu], dtype = np.float64); fit['beta'] = beta fit['a0'] = a0 diff --git a/glmnet_python/wtmean.py b/glmnet_python/wtmean.py index c3c84aa..ad831fc 100644 --- a/glmnet_python/wtmean.py +++ b/glmnet_python/wtmean.py @@ -13,25 +13,25 @@ returns nan-removed weighted mean as a 1D array of size K """ -import scipy +import numpy as np def wtmean(mat,weights): if len(weights.shape) == 1: - weights = scipy.reshape(weights, [scipy.size(weights), 1]) + weights = np.reshape(weights, [np.size(weights), 1]) wmat = isfinite(mat)*weights mat[isnan(mat)] = 0 swmat = mat*wmat tf = weights != 0 tf = tf[:,0] - y = scipy.sum(swmat[tf, :], axis = 0)/scipy.sum(wmat, axis = 0) + y = np.sum(swmat[tf, :], axis = 0)/np.sum(wmat, axis = 0) return y # end of wtmean def isnan(x): - return ~scipy.isfinite(x) + return ~np.isfinite(x) # end of isnan def isfinite(x): - return scipy.isfinite(x) + return np.isfinite(x) # end of isfinite diff --git a/test/.ipynb_checkpoints/glmnet_examples-checkpoint.ipynb b/test/.ipynb_checkpoints/glmnet_examples-checkpoint.ipynb index 29d9e46..0a41d72 100644 --- a/test/.ipynb_checkpoints/glmnet_examples-checkpoint.ipynb +++ b/test/.ipynb_checkpoints/glmnet_examples-checkpoint.ipynb @@ -65,7 +65,10 @@ "cell_type": "code", "execution_count": 1, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -91,7 +94,10 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -102,7 +108,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -112,12 +119,12 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64)\n", - "y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64)\n", + "x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64)\n", + "y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64)\n", "\n", "# create weights\n", - "t = scipy.ones((50, 1), dtype = scipy.float64)\n", - "wts = scipy.row_stack((t, 2*t))" + "t = np.ones((50, 1), dtype = np.float64)\n", + "wts = np.row_stack((t, 2*t))" ] }, { @@ -131,7 +138,10 @@ "cell_type": "code", "execution_count": 3, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -154,6 +164,9 @@ "execution_count": 4, "metadata": { "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "scrolled": true }, "outputs": [ @@ -208,7 +221,10 @@ "cell_type": "code", "execution_count": 5, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -238,6 +254,9 @@ "execution_count": 6, "metadata": { "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "scrolled": true }, "outputs": [ @@ -273,7 +292,10 @@ "cell_type": "code", "execution_count": 7, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -295,7 +317,10 @@ "cell_type": "code", "execution_count": 8, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -330,7 +355,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.5]), exact = False)" + "glmnetCoef(fit, s = np.float64([0.5]), exact = False)" ] }, { @@ -355,7 +380,10 @@ "cell_type": "code", "execution_count": 9, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -372,7 +400,7 @@ ], "source": [ "fc = glmnetPredict(fit, x[0:5,:], ptype = 'response', \\\n", - " s = scipy.float64([0.05]))\n", + " s = np.float64([0.05]))\n", "print(fc)" ] }, @@ -395,7 +423,10 @@ "cell_type": "code", "execution_count": 10, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -430,7 +461,10 @@ "cell_type": "code", "execution_count": 11, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -452,7 +486,10 @@ "cell_type": "code", "execution_count": 12, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -494,7 +531,10 @@ "cell_type": "code", "execution_count": 13, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -527,11 +567,14 @@ "cell_type": "code", "execution_count": 14, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ - "foldid = scipy.random.choice(10, size = y.shape[0], replace = True)\n", + "foldid = np.random.choice(10, size = y.shape[0], replace = True)\n", "cv1=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=1)\n", "cv0p5=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0.5)\n", "cv0=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0)" @@ -548,7 +591,10 @@ "cell_type": "code", "execution_count": 15, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -571,10 +617,10 @@ "f.add_subplot(2,2,3)\n", "cvglmnetPlot(cv0)\n", "f.add_subplot(2,2,4)\n", - "plt.plot( scipy.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", + "plt.plot( np.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", "plt.hold(True)\n", - "plt.plot( scipy.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", - "plt.plot( scipy.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", + "plt.plot( np.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", + "plt.plot( np.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", "plt.xlabel('log(Lambda)')\n", "plt.ylabel(cv1['name'])\n", "plt.xlim(-6, 4)\n", @@ -597,7 +643,10 @@ "cell_type": "code", "execution_count": 16, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -612,7 +661,7 @@ } ], "source": [ - "cl = scipy.array([[-0.7], [0.5]], dtype = scipy.float64)\n", + "cl = np.array([[-0.7], [0.5]], dtype = np.float64)\n", "tfit=glmnet(x = x.copy(),y= y.copy(), cl = cl)\n", "glmnetPlot(tfit);" ] @@ -645,7 +694,10 @@ "cell_type": "code", "execution_count": 17, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -660,7 +712,7 @@ } ], "source": [ - "pfac = scipy.ones([1, 20])\n", + "pfac = np.ones([1, 20])\n", "pfac[0, 4] = 0; pfac[0, 9] = 0; pfac[0, 14] = 0\n", "pfit = glmnet(x = x.copy(), y = y.copy(), penalty_factor = pfac)\n", "glmnetPlot(pfit, label = True);" @@ -685,7 +737,10 @@ "cell_type": "code", "execution_count": 18, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -700,9 +755,9 @@ } ], "source": [ - "scipy.random.seed(101)\n", - "x = scipy.random.rand(100,10)\n", - "y = scipy.random.rand(100,1)\n", + "np.random.seed(101)\n", + "x = np.random.rand(100,10)\n", + "y = np.random.rand(100,1)\n", "fit = glmnet(x = x, y = y)\n", "glmnetPlot(fit);" ] @@ -718,7 +773,10 @@ "cell_type": "code", "execution_count": 19, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -773,7 +831,10 @@ "cell_type": "code", "execution_count": 20, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -784,7 +845,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -794,8 +856,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -809,7 +871,10 @@ "cell_type": "code", "execution_count": 21, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -829,7 +894,10 @@ "cell_type": "code", "execution_count": 22, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -862,7 +930,10 @@ "cell_type": "code", "execution_count": 23, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -884,7 +955,7 @@ } ], "source": [ - "f = glmnetPredict(mfit, x[0:5,:], s = scipy.float64([0.1, 0.01]))\n", + "f = glmnetPredict(mfit, x[0:5,:], s = np.float64([0.1, 0.01]))\n", "print(f[:,:,0], '\\n')\n", "print(f[:,:,1])" ] @@ -902,7 +973,10 @@ "cell_type": "code", "execution_count": 24, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -922,7 +996,10 @@ "cell_type": "code", "execution_count": 25, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -951,7 +1028,10 @@ "cell_type": "code", "execution_count": 26, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -973,7 +1053,10 @@ "cell_type": "code", "execution_count": 27, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1035,7 +1118,10 @@ "cell_type": "code", "execution_count": 28, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1046,7 +1132,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1056,8 +1143,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1073,7 +1160,10 @@ "cell_type": "code", "execution_count": 29, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1091,7 +1181,10 @@ "cell_type": "code", "execution_count": 30, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1133,7 +1226,10 @@ "cell_type": "code", "execution_count": 31, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1152,7 +1248,7 @@ } ], "source": [ - "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = scipy.array([0.05, 0.01]))" + "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = np.array([0.05, 0.01]))" ] }, { @@ -1177,7 +1273,10 @@ "cell_type": "code", "execution_count": 32, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1199,7 +1298,10 @@ "cell_type": "code", "execution_count": 33, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1221,7 +1323,10 @@ "cell_type": "code", "execution_count": 34, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1243,7 +1348,10 @@ "cell_type": "code", "execution_count": 35, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1272,7 +1380,10 @@ "cell_type": "code", "execution_count": 36, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1331,7 +1442,10 @@ "cell_type": "code", "execution_count": 37, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1410,7 +1524,10 @@ "cell_type": "code", "execution_count": 38, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1421,7 +1538,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1431,8 +1549,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1453,7 +1571,10 @@ "cell_type": "code", "execution_count": 39, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1471,7 +1592,10 @@ "cell_type": "code", "execution_count": 40, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1504,7 +1628,10 @@ "cell_type": "code", "execution_count": 41, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1538,7 +1665,10 @@ "cell_type": "code", "execution_count": 42, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1568,9 +1698,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "Poisson regression is used to model count data under the assumption of Poisson error, or otherwise non-negative data where the mean and variance are proportional. Like the Gaussian and binomial model, the Poisson is a member of the exponential family of distributions. We usually model its positive mean on the log scale: $\\log \\mu(x) = \\beta_0+\\beta' x$.\n", "The log-likelihood for observations $\\{x_i,y_i\\}_1^N$ is given my\n", @@ -1591,7 +1719,10 @@ "cell_type": "code", "execution_count": 43, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1602,7 +1733,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1612,15 +1744,13 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "We apply the function `glmnet` with the `\"poisson\"` option." ] @@ -1629,7 +1759,10 @@ "cell_type": "code", "execution_count": 44, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1638,9 +1771,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "The optional input arguments of `glmnet` for `\"poisson\"` family are similar to those for others.\n", "\n", @@ -1658,7 +1789,10 @@ "cell_type": "code", "execution_count": 45, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1678,9 +1812,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "Like before, we can extract the coefficients and make predictions at certain $\\lambda$'s by using `coef` and `predict` respectively. The optional input arguments are similar to those for other families. In function `predict`, the option `type`, which is the type of prediction required, has its own specialties for Poisson family. That is,\n", "* \"link\" (default) gives the linear predictors like others\n", @@ -1695,7 +1827,10 @@ "cell_type": "code", "execution_count": 46, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1730,14 +1865,17 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([1.0]))" + "glmnetCoef(fit, s = np.float64([1.0]))" ] }, { "cell_type": "code", "execution_count": 47, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1756,14 +1894,12 @@ } ], "source": [ - "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = scipy.float64([0.1, 0.01]))" + "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = np.float64([0.1, 0.01]))" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "We may also use cross-validation to find the optimal $\\lambda$'s and thus make inferences." ] @@ -1772,7 +1908,10 @@ "cell_type": "code", "execution_count": 48, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1797,7 +1936,10 @@ "cell_type": "code", "execution_count": 49, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1826,7 +1968,10 @@ "cell_type": "code", "execution_count": 50, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1861,7 +2006,7 @@ } ], "source": [ - "optlam = scipy.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", + "optlam = np.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", "cvglmnetCoef(cvfit, s = optlam)" ] }, @@ -1874,9 +2019,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "[Back to Table of Contents]\n", "\n", @@ -1886,9 +2029,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "The Cox proportional hazards model is commonly used for the study of the relationship beteween predictor variables and survival time. In the usual survival analysis framework, we have data of the form $(y_1, x_1, \\delta_1), \\ldots, (y_n, x_n, \\delta_n)$ where $y_i$, the observed time, is a time of failure if $\\delta_i$ is 1 or right-censoring if $\\delta_i$ is 0. We also let $t_1 < t_2 < \\ldots < t_m$ be the increasing list of unique failure times, and $j(i)$ denote the index of the observation failing at time $t_i$.\n", "\n", @@ -1913,7 +2054,10 @@ "cell_type": "code", "execution_count": 51, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1924,7 +2068,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1934,8 +2079,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -1951,7 +2096,10 @@ "cell_type": "code", "execution_count": 52, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1979,7 +2127,10 @@ "cell_type": "code", "execution_count": 53, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2008,7 +2159,10 @@ "cell_type": "code", "execution_count": 54, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2052,7 +2206,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.05]))" + "glmnetCoef(fit, s = np.float64([0.05]))" ] }, { @@ -2110,9 +2264,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.6.9" } }, "nbformat": 4, - "nbformat_minor": 0 + "nbformat_minor": 4 } diff --git a/test/example_binomial.py b/test/example_binomial.py index 5bced62..04450c4 100644 --- a/test/example_binomial.py +++ b/test/example_binomial.py @@ -4,7 +4,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import importlib import matplotlib.pyplot as plt @@ -34,15 +34,15 @@ baseDataDir= '../data/' # load data -x = scipy.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = scipy.float64, delimiter = ',') -y = scipy.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = scipy.float64) +x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',') +y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64) # call glmnet fit = glmnet.glmnet(x = x.copy(), y = y.copy(), family = 'binomial') glmnetPlot.glmnetPlot(fit, xvar = 'dev', label = True); -glmnetPredict.glmnetPredict(fit, newx = x[0:5,], ptype='class', s = scipy.array([0.05, 0.01])) +glmnetPredict.glmnetPredict(fit, newx = x[0:5,], ptype='class', s = np.array([0.05, 0.01])) cvfit = cvglmnet.cvglmnet(x = x.copy(), y = y.copy(), family = 'binomial', ptype = 'class') diff --git a/test/example_cox.py b/test/example_cox.py index e737daa..a11966d 100644 --- a/test/example_cox.py +++ b/test/example_cox.py @@ -4,7 +4,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import importlib import matplotlib.pyplot as plt @@ -34,8 +34,8 @@ baseDataDir= '../data/' # load data -x = scipy.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = scipy.float64, delimiter = ',') -y = scipy.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = scipy.float64, delimiter = ',') +x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',') +y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',') print(y[0:5, :]) @@ -44,5 +44,5 @@ glmnetPlot.glmnetPlot(fit) -c = glmnetCoef.glmnetCoef(fit, s = scipy.float64([0.05])) +c = glmnetCoef.glmnetCoef(fit, s = np.float64([0.05])) print(c) diff --git a/test/example_gaussian.py b/test/example_gaussian.py index 1b7e112..52fa36e 100644 --- a/test/example_gaussian.py +++ b/test/example_gaussian.py @@ -4,7 +4,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import importlib import matplotlib.pyplot as plt @@ -34,12 +34,12 @@ baseDataDir= '../data/' # load data -x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64) -y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64) +x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64) +y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64) # create weights -t = scipy.ones((50, 1), dtype = scipy.float64) -wts = scipy.row_stack((t, 2*t)) +t = np.ones((50, 1), dtype = np.float64) +wts = np.row_stack((t, 2*t)) # call glmnet fit = glmnet.glmnet(x = x.copy(), y = y.copy(), family = 'gaussian', \ @@ -53,11 +53,11 @@ # any(fit['lambdau'] == 0.5) # -coefApprx = glmnetCoef.glmnetCoef(fit, s = scipy.float64([0.5]), exact = False) +coefApprx = glmnetCoef.glmnetCoef(fit, s = np.float64([0.5]), exact = False) print(coefApprx) # fc = glmnetPredict.glmnetPredict(fit, x[0:5,:], ptype = 'response', \ - s = scipy.float64([0.05])) + s = np.float64([0.05])) print(fc) # cvfit = cvglmnet.cvglmnet(x = x.copy(), y = y.copy(), ptype = 'mse', nfolds = 20) @@ -67,7 +67,7 @@ cvglmnetPredict.cvglmnetPredict(cvfit, newx = x[0:5,], s='lambda_min') #%% -foldid = scipy.random.choice(10, size = y.shape[0], replace = True) +foldid = np.random.choice(10, size = y.shape[0], replace = True) cv1=cvglmnet.cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=1) cv0p5=cvglmnet.cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0.5) @@ -82,10 +82,10 @@ f.add_subplot(2,2,3) cvglmnetPlot.cvglmnetPlot(cv0) f.add_subplot(2,2,4) -plt.plot( scipy.log(cv1['lambdau']), cv1['cvm'], 'r.') +plt.plot( np.log(cv1['lambdau']), cv1['cvm'], 'r.') #plt.hold(True) -plt.plot( scipy.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.') -plt.plot( scipy.log(cv0['lambdau']), cv0['cvm'], 'b.') +plt.plot( np.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.') +plt.plot( np.log(cv0['lambdau']), cv0['cvm'], 'b.') plt.xlabel('log(Lambda)') plt.ylabel(cv1['name']) plt.xlim(-6, 4) @@ -94,22 +94,22 @@ #%% plt.figure() -cl = scipy.array([[-0.7], [0.5]], dtype = scipy.float64) +cl = np.array([[-0.7], [0.5]], dtype = np.float64) tfit=glmnet.glmnet(x = x.copy(),y= y.copy(), cl = cl) glmnetPlot.glmnetPlot(tfit) #%% plt.figure() -pfac = scipy.ones([1, 20]) +pfac = np.ones([1, 20]) pfac[0, 4] = 0; pfac[0, 9] = 0; pfac[0, 14] = 0 pfit = glmnet.glmnet(x = x.copy(), y = y.copy(), penalty_factor = pfac) glmnetPlot.glmnetPlot(pfit, label = True) #%% plt.figure() -scipy.random.seed(101) -x = scipy.random.rand(100,10) -y = scipy.random.rand(100,1) +np.random.seed(101) +x = np.random.rand(100,10) +y = np.random.rand(100,1) fit = glmnet.glmnet(x = x, y = y) glmnetPlot.glmnetPlot(fit) diff --git a/test/example_glmnet.py b/test/example_glmnet.py index 68ae5d8..97f03dd 100644 --- a/test/example_glmnet.py +++ b/test/example_glmnet.py @@ -9,7 +9,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import glmnet import importlib import pprint @@ -28,8 +28,8 @@ # call test functions if testType == 'gaussian': ## elnet caller - y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64) - x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64) + y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64) + x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64) fit = glmnet.glmnet(x = x, y = y, family = 'gaussian') #fit = glmnet.glmnet(x = x, y = y, family = 'gaussian', alpha = 0.5) print('fit:') @@ -37,40 +37,40 @@ if testType == 'binomial': # lognet caller - x = scipy.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = scipy.float64, delimiter = ',') - y = scipy.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = scipy.float64) + x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',') + y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64) fit = glmnet.glmnet(x = x, y = y, family = 'binomial') print('fit:') pprint.pprint(fit) if testType == 'multinomial': # multinomial caller - x = scipy.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = scipy.float64, delimiter = ',') - y = scipy.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = scipy.float64, delimiter = ',') + x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',') + y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64, delimiter = ',') fit = glmnet.glmnet(x = x, y = y, family = 'multinomial') print('fit:') pprint.pprint(fit) if testType == 'cox': # coxnet caller - x = scipy.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = scipy.float64, delimiter = ',') - y = scipy.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = scipy.float64, delimiter = ',') + x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',') + y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',') fit = glmnet.glmnet(x = x, y = y, family = 'cox') print('fit:') pprint.pprint(fit) if testType == 'mgaussian': # mgaussian caller - x = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = scipy.float64, delimiter = ',') - y = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = scipy.float64, delimiter = ',') + x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',') + y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',') fit = glmnet.glmnet(x = x, y = y, family = 'mgaussian') print('fit:') pprint.pprint(fit) if testType == 'poisson': # poisson caller - x = scipy.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = scipy.float64, delimiter = ',') - y = scipy.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = scipy.float64, delimiter = ',') + x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',') + y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',') fit = glmnet.glmnet(x = x, y = y, family = 'poisson') print('fit:') pprint.pprint(fit) diff --git a/test/example_mgaussian.py b/test/example_mgaussian.py index 8cce70c..4910b01 100644 --- a/test/example_mgaussian.py +++ b/test/example_mgaussian.py @@ -4,7 +4,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import importlib import matplotlib.pyplot as plt import time @@ -35,8 +35,8 @@ baseDataDir= '../data/' # load data -x = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = scipy.float64, delimiter = ',') -y = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = scipy.float64, delimiter = ',') +x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',') +y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',') # call glmnet mfit = glmnet.glmnet(x = x.copy(), y = y.copy(), family = 'mgaussian') @@ -44,7 +44,7 @@ plt.figure() glmnetPlot(mfit, xvar = 'lambda', label = True, ptype = '2norm') -f = glmnetPredict.glmnetPredict(mfit, x[0:5,:], s = scipy.float64([0.1, 0.01])) +f = glmnetPredict.glmnetPredict(mfit, x[0:5,:], s = np.float64([0.1, 0.01])) print(f[:,:,0]) print(f[:,:,1]) diff --git a/test/example_multinomial.py b/test/example_multinomial.py index 8d1ed8a..0bf4b19 100644 --- a/test/example_multinomial.py +++ b/test/example_multinomial.py @@ -4,7 +4,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import importlib import matplotlib.pyplot as plt import warnings @@ -35,8 +35,8 @@ baseDataDir= '../data/' # load data -x = scipy.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = scipy.float64, delimiter = ',') -y = scipy.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = scipy.float64, delimiter = ',') +x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',') +y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64, delimiter = ',') # call glmnet fit = glmnet.glmnet(x = x.copy(), y = y.copy(), family = 'multinomial', mtype = 'grouped') diff --git a/test/example_poisson.py b/test/example_poisson.py index 2ca51f5..2eb4e52 100644 --- a/test/example_poisson.py +++ b/test/example_poisson.py @@ -4,7 +4,7 @@ sys.path.append('../test') sys.path.append('../lib') -import scipy +import numpy as np import importlib import matplotlib.pyplot as plt @@ -34,19 +34,19 @@ baseDataDir= '../data/' # load data -x = scipy.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = scipy.float64, delimiter = ',') -y = scipy.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = scipy.float64, delimiter = ',') +x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',') +y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',') # call glmnet fit = glmnet.glmnet(x = x.copy(), y = y.copy(), family = 'poisson') glmnetPlot.glmnetPlot(fit) -glmnetCoef.glmnetCoef(fit, s = scipy.float64([1.0])) +glmnetCoef.glmnetCoef(fit, s = np.float64([1.0])) -f = glmnetPredict.glmnetPredict(fit, x[0:5,:], ptype = 'response', s = scipy.float64([0.1, 0.01])) +f = glmnetPredict.glmnetPredict(fit, x[0:5,:], ptype = 'response', s = np.float64([0.1, 0.01])) print(f) cvfit = cvglmnet.cvglmnet(x.copy(), y.copy(), family = 'poisson') -optlam = scipy.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape(2,) +optlam = np.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape(2,) cvglmnetCoef.cvglmnetCoef(cvfit, s = optlam) diff --git a/test/glmnet_examples.ipynb b/test/glmnet_examples.ipynb index 29d9e46..0a41d72 100644 --- a/test/glmnet_examples.ipynb +++ b/test/glmnet_examples.ipynb @@ -65,7 +65,10 @@ "cell_type": "code", "execution_count": 1, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -91,7 +94,10 @@ "cell_type": "code", "execution_count": 2, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -102,7 +108,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -112,12 +119,12 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64)\n", - "y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64)\n", + "x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64)\n", + "y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64)\n", "\n", "# create weights\n", - "t = scipy.ones((50, 1), dtype = scipy.float64)\n", - "wts = scipy.row_stack((t, 2*t))" + "t = np.ones((50, 1), dtype = np.float64)\n", + "wts = np.row_stack((t, 2*t))" ] }, { @@ -131,7 +138,10 @@ "cell_type": "code", "execution_count": 3, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -154,6 +164,9 @@ "execution_count": 4, "metadata": { "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "scrolled": true }, "outputs": [ @@ -208,7 +221,10 @@ "cell_type": "code", "execution_count": 5, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -238,6 +254,9 @@ "execution_count": 6, "metadata": { "collapsed": false, + "jupyter": { + "outputs_hidden": false + }, "scrolled": true }, "outputs": [ @@ -273,7 +292,10 @@ "cell_type": "code", "execution_count": 7, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -295,7 +317,10 @@ "cell_type": "code", "execution_count": 8, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -330,7 +355,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.5]), exact = False)" + "glmnetCoef(fit, s = np.float64([0.5]), exact = False)" ] }, { @@ -355,7 +380,10 @@ "cell_type": "code", "execution_count": 9, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -372,7 +400,7 @@ ], "source": [ "fc = glmnetPredict(fit, x[0:5,:], ptype = 'response', \\\n", - " s = scipy.float64([0.05]))\n", + " s = np.float64([0.05]))\n", "print(fc)" ] }, @@ -395,7 +423,10 @@ "cell_type": "code", "execution_count": 10, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -430,7 +461,10 @@ "cell_type": "code", "execution_count": 11, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -452,7 +486,10 @@ "cell_type": "code", "execution_count": 12, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -494,7 +531,10 @@ "cell_type": "code", "execution_count": 13, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -527,11 +567,14 @@ "cell_type": "code", "execution_count": 14, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ - "foldid = scipy.random.choice(10, size = y.shape[0], replace = True)\n", + "foldid = np.random.choice(10, size = y.shape[0], replace = True)\n", "cv1=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=1)\n", "cv0p5=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0.5)\n", "cv0=cvglmnet(x = x.copy(),y = y.copy(),foldid=foldid,alpha=0)" @@ -548,7 +591,10 @@ "cell_type": "code", "execution_count": 15, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -571,10 +617,10 @@ "f.add_subplot(2,2,3)\n", "cvglmnetPlot(cv0)\n", "f.add_subplot(2,2,4)\n", - "plt.plot( scipy.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", + "plt.plot( np.log(cv1['lambdau']), cv1['cvm'], 'r.')\n", "plt.hold(True)\n", - "plt.plot( scipy.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", - "plt.plot( scipy.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", + "plt.plot( np.log(cv0p5['lambdau']), cv0p5['cvm'], 'g.')\n", + "plt.plot( np.log(cv0['lambdau']), cv0['cvm'], 'b.')\n", "plt.xlabel('log(Lambda)')\n", "plt.ylabel(cv1['name'])\n", "plt.xlim(-6, 4)\n", @@ -597,7 +643,10 @@ "cell_type": "code", "execution_count": 16, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -612,7 +661,7 @@ } ], "source": [ - "cl = scipy.array([[-0.7], [0.5]], dtype = scipy.float64)\n", + "cl = np.array([[-0.7], [0.5]], dtype = np.float64)\n", "tfit=glmnet(x = x.copy(),y= y.copy(), cl = cl)\n", "glmnetPlot(tfit);" ] @@ -645,7 +694,10 @@ "cell_type": "code", "execution_count": 17, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -660,7 +712,7 @@ } ], "source": [ - "pfac = scipy.ones([1, 20])\n", + "pfac = np.ones([1, 20])\n", "pfac[0, 4] = 0; pfac[0, 9] = 0; pfac[0, 14] = 0\n", "pfit = glmnet(x = x.copy(), y = y.copy(), penalty_factor = pfac)\n", "glmnetPlot(pfit, label = True);" @@ -685,7 +737,10 @@ "cell_type": "code", "execution_count": 18, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -700,9 +755,9 @@ } ], "source": [ - "scipy.random.seed(101)\n", - "x = scipy.random.rand(100,10)\n", - "y = scipy.random.rand(100,1)\n", + "np.random.seed(101)\n", + "x = np.random.rand(100,10)\n", + "y = np.random.rand(100,1)\n", "fit = glmnet(x = x, y = y)\n", "glmnetPlot(fit);" ] @@ -718,7 +773,10 @@ "cell_type": "code", "execution_count": 19, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -773,7 +831,10 @@ "cell_type": "code", "execution_count": 20, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -784,7 +845,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -794,8 +856,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'MultiGaussianExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultiGaussianExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -809,7 +871,10 @@ "cell_type": "code", "execution_count": 21, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -829,7 +894,10 @@ "cell_type": "code", "execution_count": 22, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -862,7 +930,10 @@ "cell_type": "code", "execution_count": 23, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -884,7 +955,7 @@ } ], "source": [ - "f = glmnetPredict(mfit, x[0:5,:], s = scipy.float64([0.1, 0.01]))\n", + "f = glmnetPredict(mfit, x[0:5,:], s = np.float64([0.1, 0.01]))\n", "print(f[:,:,0], '\\n')\n", "print(f[:,:,1])" ] @@ -902,7 +973,10 @@ "cell_type": "code", "execution_count": 24, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -922,7 +996,10 @@ "cell_type": "code", "execution_count": 25, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -951,7 +1028,10 @@ "cell_type": "code", "execution_count": 26, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -973,7 +1053,10 @@ "cell_type": "code", "execution_count": 27, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1035,7 +1118,10 @@ "cell_type": "code", "execution_count": 28, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1046,7 +1132,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1056,8 +1143,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'BinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'BinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1073,7 +1160,10 @@ "cell_type": "code", "execution_count": 29, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1091,7 +1181,10 @@ "cell_type": "code", "execution_count": 30, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1133,7 +1226,10 @@ "cell_type": "code", "execution_count": 31, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1152,7 +1248,7 @@ } ], "source": [ - "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = scipy.array([0.05, 0.01]))" + "glmnetPredict(fit, newx = x[0:5,], ptype='class', s = np.array([0.05, 0.01]))" ] }, { @@ -1177,7 +1273,10 @@ "cell_type": "code", "execution_count": 32, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1199,7 +1298,10 @@ "cell_type": "code", "execution_count": 33, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1221,7 +1323,10 @@ "cell_type": "code", "execution_count": 34, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1243,7 +1348,10 @@ "cell_type": "code", "execution_count": 35, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1272,7 +1380,10 @@ "cell_type": "code", "execution_count": 36, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1331,7 +1442,10 @@ "cell_type": "code", "execution_count": 37, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1410,7 +1524,10 @@ "cell_type": "code", "execution_count": 38, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1421,7 +1538,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1431,8 +1549,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = scipy.float64)" + "x = np.loadtxt(baseDataDir + 'MultinomialExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'MultinomialExampleY.dat', dtype = np.float64)" ] }, { @@ -1453,7 +1571,10 @@ "cell_type": "code", "execution_count": 39, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1471,7 +1592,10 @@ "cell_type": "code", "execution_count": 40, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1504,7 +1628,10 @@ "cell_type": "code", "execution_count": 41, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1538,7 +1665,10 @@ "cell_type": "code", "execution_count": 42, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1568,9 +1698,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "Poisson regression is used to model count data under the assumption of Poisson error, or otherwise non-negative data where the mean and variance are proportional. Like the Gaussian and binomial model, the Poisson is a member of the exponential family of distributions. We usually model its positive mean on the log scale: $\\log \\mu(x) = \\beta_0+\\beta' x$.\n", "The log-likelihood for observations $\\{x_i,y_i\\}_1^N$ is given my\n", @@ -1591,7 +1719,10 @@ "cell_type": "code", "execution_count": 43, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1602,7 +1733,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1612,15 +1744,13 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'PoissonExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'PoissonExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "We apply the function `glmnet` with the `\"poisson\"` option." ] @@ -1629,7 +1759,10 @@ "cell_type": "code", "execution_count": 44, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1638,9 +1771,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "The optional input arguments of `glmnet` for `\"poisson\"` family are similar to those for others.\n", "\n", @@ -1658,7 +1789,10 @@ "cell_type": "code", "execution_count": 45, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1678,9 +1812,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "Like before, we can extract the coefficients and make predictions at certain $\\lambda$'s by using `coef` and `predict` respectively. The optional input arguments are similar to those for other families. In function `predict`, the option `type`, which is the type of prediction required, has its own specialties for Poisson family. That is,\n", "* \"link\" (default) gives the linear predictors like others\n", @@ -1695,7 +1827,10 @@ "cell_type": "code", "execution_count": 46, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1730,14 +1865,17 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([1.0]))" + "glmnetCoef(fit, s = np.float64([1.0]))" ] }, { "cell_type": "code", "execution_count": 47, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1756,14 +1894,12 @@ } ], "source": [ - "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = scipy.float64([0.1, 0.01]))" + "glmnetPredict(fit, x[0:5,:], ptype = 'response', s = np.float64([0.1, 0.01]))" ] }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "We may also use cross-validation to find the optimal $\\lambda$'s and thus make inferences." ] @@ -1772,7 +1908,10 @@ "cell_type": "code", "execution_count": 48, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [], "source": [ @@ -1797,7 +1936,10 @@ "cell_type": "code", "execution_count": 49, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1826,7 +1968,10 @@ "cell_type": "code", "execution_count": 50, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1861,7 +2006,7 @@ } ], "source": [ - "optlam = scipy.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", + "optlam = np.array([cvfit['lambda_min'], cvfit['lambda_1se']]).reshape([2,])\n", "cvglmnetCoef(cvfit, s = optlam)" ] }, @@ -1874,9 +2019,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "[Back to Table of Contents]\n", "\n", @@ -1886,9 +2029,7 @@ }, { "cell_type": "markdown", - "metadata": { - "collapsed": true - }, + "metadata": {}, "source": [ "The Cox proportional hazards model is commonly used for the study of the relationship beteween predictor variables and survival time. In the usual survival analysis framework, we have data of the form $(y_1, x_1, \\delta_1), \\ldots, (y_n, x_n, \\delta_n)$ where $y_i$, the observed time, is a time of failure if $\\delta_i$ is 1 or right-censoring if $\\delta_i$ is 0. We also let $t_1 < t_2 < \\ldots < t_m$ be the increasing list of unique failure times, and $j(i)$ denote the index of the observation failing at time $t_i$.\n", "\n", @@ -1913,7 +2054,10 @@ "cell_type": "code", "execution_count": 51, "metadata": { - "collapsed": true + "collapsed": true, + "jupyter": { + "outputs_hidden": true + } }, "outputs": [], "source": [ @@ -1924,7 +2068,8 @@ "import sys\n", "sys.path.append('../test')\n", "sys.path.append('../lib')\n", - "import scipy, importlib, pprint, matplotlib.pyplot as plt, warnings\n", + "import numpy as np\n", + "import importlib, pprint, matplotlib.pyplot as plt, warnings\n", "from glmnet import glmnet; from glmnetPlot import glmnetPlot \n", "from glmnetPrint import glmnetPrint; from glmnetCoef import glmnetCoef; from glmnetPredict import glmnetPredict\n", "from cvglmnet import cvglmnet; from cvglmnetCoef import cvglmnetCoef\n", @@ -1934,8 +2079,8 @@ "baseDataDir= '../data/'\n", "\n", "# load data\n", - "x = scipy.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = scipy.float64, delimiter = ',')\n", - "y = scipy.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = scipy.float64, delimiter = ',')" + "x = np.loadtxt(baseDataDir + 'CoxExampleX.dat', dtype = np.float64, delimiter = ',')\n", + "y = np.loadtxt(baseDataDir + 'CoxExampleY.dat', dtype = np.float64, delimiter = ',')" ] }, { @@ -1951,7 +2096,10 @@ "cell_type": "code", "execution_count": 52, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -1979,7 +2127,10 @@ "cell_type": "code", "execution_count": 53, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2008,7 +2159,10 @@ "cell_type": "code", "execution_count": 54, "metadata": { - "collapsed": false + "collapsed": false, + "jupyter": { + "outputs_hidden": false + } }, "outputs": [ { @@ -2052,7 +2206,7 @@ } ], "source": [ - "glmnetCoef(fit, s = scipy.float64([0.05]))" + "glmnetCoef(fit, s = np.float64([0.05]))" ] }, { @@ -2110,9 +2264,9 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.5.2" + "version": "3.6.9" } }, "nbformat": 4, - "nbformat_minor": 0 + "nbformat_minor": 4 } diff --git a/test/misc/example_elnet.py b/test/misc/example_elnet.py index 390acc4..45ac9b4 100644 --- a/test/misc/example_elnet.py +++ b/test/misc/example_elnet.py @@ -2,7 +2,7 @@ # Sample caller code for elnet # -import scipy +import numpy as np import ctypes from glmnet import glmnet from glmnetControl import glmnetControl @@ -22,12 +22,12 @@ # type.multinomial=c("ungrouped","grouped")){ baseDataDir= '/home/bbalasub/Desktop/Summer2016/glmnet/glmnet_R/' -y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64) -x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64) +y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64) +x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64) # convert x and y to 'F' (fortran) order and scipy float64 -y = y.astype(dtype = scipy.float64, order = 'C', copy = True) -x = x.astype(dtype = scipy.float64, order = 'C', copy = True) +y = y.astype(dtype = np.float64, order = 'C', copy = True) +x = x.astype(dtype = np.float64, order = 'C', copy = True) # call elnet directly # subroutine elnet (ka,parm,no,ni,x,y,w,jd,vp,cl,ne,nx,nlam,flmin,u 787 @@ -50,31 +50,31 @@ # y y_r = y.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # w -w = scipy.ones([no], dtype = scipy.float64) -w = w.astype(dtype = scipy.float64, order = 'F', copy = True) +w = np.ones([no], dtype = np.float64) +w = w.astype(dtype = np.float64, order = 'F', copy = True) w_r = w.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # jd -jd = scipy.ones([1], dtype = scipy.int32) -jd = jd.astype(dtype = scipy.int32, order = 'F', copy = True) +jd = np.ones([1], dtype = np.int32) +jd = jd.astype(dtype = np.int32, order = 'F', copy = True) jd_r = jd.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # vp -vp = scipy.ones([ni], dtype = scipy.float64) -vp = vp.astype(dtype = scipy.float64, order = 'F', copy = True) +vp = np.ones([ni], dtype = np.float64) +vp = vp.astype(dtype = np.float64, order = 'F', copy = True) vp_r = vp.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # cl options = glmnetSet() inparms = glmnetControl() cl = options['cl'] -cl[0, cl[0, :] == scipy.double('-inf')] = -1.0*inparms['big'] -cl[1, cl[1, :] == scipy.double('inf')] = 1.0*inparms['big'] +cl[0, cl[0, :] == np.double('-inf')] = -1.0*inparms['big'] +cl[1, cl[1, :] == np.double('inf')] = 1.0*inparms['big'] if cl.shape[1] < ni: if cl.shape[1] == 1: - cl = cl*scipy.ones([1, ni], dtype = scipy.float64) + cl = cl*np.ones([1, ni], dtype = np.float64) else: raise ValueError('ERROR: Require length 1 or nvars lower and upper limits') else: cl = cl[:, 0:ni-1] -cl = cl.astype(dtype = scipy.float64, order = 'F', copy = True) +cl = cl.astype(dtype = np.float64, order = 'F', copy = True) cl_r = cl.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ne ne = ni + 1 @@ -89,8 +89,8 @@ flmin = 1.0e-4 flmin_r = ctypes.c_double(flmin) # ulam -ulam = scipy.zeros([1], dtype = scipy.float64) -ulam = ulam.astype(dtype = scipy.float64, order = 'F', copy = True) +ulam = np.zeros([1], dtype = np.float64) +ulam = ulam.astype(dtype = np.float64, order = 'F', copy = True) ulam_r = ulam.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # thr thr = 1.0e-7 @@ -111,28 +111,28 @@ lmu = -1 lmu_r = ctypes.c_int(lmu) # a0 -a0 = scipy.zeros([nlam], dtype = scipy.float64) -a0 = a0.astype(dtype = scipy.float64, order = 'F', copy = True) +a0 = np.zeros([nlam], dtype = np.float64) +a0 = a0.astype(dtype = np.float64, order = 'F', copy = True) a0_r = a0.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ca -ca = scipy.zeros([nx, nlam], dtype = scipy.float64) -ca = ca.astype(dtype = scipy.float64, order = 'F', copy = True) +ca = np.zeros([nx, nlam], dtype = np.float64) +ca = ca.astype(dtype = np.float64, order = 'F', copy = True) ca_r = ca.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # ia -ia = -1*scipy.ones([nx], dtype = scipy.int32) -ia = ia.astype(dtype = scipy.int32, order = 'F', copy = True) +ia = -1*np.ones([nx], dtype = np.int32) +ia = ia.astype(dtype = np.int32, order = 'F', copy = True) ia_r = ia.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # nin -nin = -1*scipy.ones([nlam], dtype = scipy.int32) -nin = nin.astype(dtype = scipy.int32, order = 'F', copy = True) +nin = -1*np.ones([nlam], dtype = np.int32) +nin = nin.astype(dtype = np.int32, order = 'F', copy = True) nin_r = nin.ctypes.data_as(ctypes.POINTER(ctypes.c_int)) # rsq -rsq = -1*scipy.ones([nlam], dtype = scipy.float64) -rsq = rsq.astype(dtype = scipy.float64, order = 'F', copy = True) +rsq = -1*np.ones([nlam], dtype = np.float64) +rsq = rsq.astype(dtype = np.float64, order = 'F', copy = True) rsq_r = rsq.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # alm -alm = -1*scipy.ones([nlam], dtype = scipy.float64) -alm = alm.astype(dtype = scipy.float64, order = 'F', copy = True) +alm = -1*np.ones([nlam], dtype = np.float64) +alm = alm.astype(dtype = np.float64, order = 'F', copy = True) alm_r = alm.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) # nlp nlp = -1 diff --git a/test/misc/example_elnet2.py b/test/misc/example_elnet2.py index c64b56b..99f120f 100644 --- a/test/misc/example_elnet2.py +++ b/test/misc/example_elnet2.py @@ -4,7 +4,7 @@ @author: bbalasub """ -import scipy +import numpy as np import ctypes import glmnet import importlib @@ -15,7 +15,7 @@ # unless a new python console is started baseDataDir= '/home/bbalasub/Desktop/Summer2016/glmnet/glmnet_R/' -y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64) -x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64) +y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64) +x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64) fit = glmnet.glmnet(x = x, y = y) diff --git a/test/misc/temp.py b/test/misc/temp.py index beeff0e..682e9ec 100644 --- a/test/misc/temp.py +++ b/test/misc/temp.py @@ -1,7 +1,8 @@ import sys sys.path.append('../../test') sys.path.append('../../lib') -import scipy +import numpy as np +import scipy.sparse import glmnet from glmnetPlot import glmnetPlot from glmnetPredict import glmnetPredict @@ -19,13 +20,13 @@ if section == 1: # create x and y - scipy.random.seed(1) - x = scipy.random.normal(size = [10,3]) - y = scipy.random.binomial(1, 0.5, size =[10,1])*1.0 + np.random.seed(1) + x = np.random.normal(size = [10,3]) + y = np.random.binomial(1, 0.5, size =[10,1])*1.0 x[x < 0.0] = 0.0 # x is made sparse - xs = scipy.sparse.csc_matrix(x, dtype = scipy.float64) + xs = scipy.sparse.csc_matrix(x, dtype = np.float64) print("xs = ", xs.todense()) # nobs, nvars can be obtained from sparse x @@ -35,22 +36,22 @@ # tfs = xs[:,0] > 1.0 tfs = tfs.toarray(); - tf = scipy.reshape(tfs, [len(tfs), ]) + tf = np.reshape(tfs, [len(tfs), ]) elif section == 2: # sparse caller for glmnet baseDataDir= '../../data/' # load data - x = scipy.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = scipy.float64) - y = scipy.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = scipy.float64) - xs = scipy.sparse.csc_matrix(x, dtype = scipy.float64) - scipy.random.seed(1) - lambda_min = scipy.random.rand(y.size) - exclude = scipy.array([0, 1, 2, 3]) - penalty_factor = scipy.ones(x.shape[1]) + x = np.loadtxt(baseDataDir + 'QuickStartExampleX.dat', dtype = np.float64) + y = np.loadtxt(baseDataDir + 'QuickStartExampleY.dat', dtype = np.float64) + xs = scipy.sparse.csc_matrix(x, dtype = np.float64) + np.random.seed(1) + lambda_min = np.random.rand(y.size) + exclude = np.array([0, 1, 2, 3]) + penalty_factor = np.ones(x.shape[1]) penalty_factor[0] = 500 - pmax = scipy.array([18]) + pmax = np.array([18]) fit = glmnet.glmnet(x = xs.copy(), y = y.copy(), family = 'gaussian', exclude = exclude) print(fit['a0']) print(fit['beta'][:,-1]) @@ -58,10 +59,10 @@ elif section == 3: N = 1000; family = 'binomial' - x = scipy.random.normal(size = [N,10]) + x = np.random.normal(size = [N,10]) x[x < 2.0] = 0.0 - xs = scipy.sparse.csc_matrix(x, dtype = scipy.float64) - y = scipy.random.normal(size = [N,1]) + xs = scipy.sparse.csc_matrix(x, dtype = np.float64) + y = np.random.normal(size = [N,1]) y[y > 0] = 1.0 y[y < 0] = 0.0 st = time.time() @@ -78,10 +79,10 @@ elif section == 4: N = 1000; family = 'binomial' - x = scipy.random.normal(size = [N,10]) + x = np.random.normal(size = [N,10]) x[x < 2.0] = 0.0 - xs = scipy.sparse.csc_matrix(x, dtype = scipy.float64) - y = scipy.random.normal(size = [N,1]) + xs = scipy.sparse.csc_matrix(x, dtype = np.float64) + y = np.random.normal(size = [N,1]) y[y > 0] = 1.0 y[y < 0] = 0.0 st = time.time() @@ -91,10 +92,10 @@ elif section == 5: import matplotlib.pyplot as plt - scipy.random.seed(1) - x=scipy.random.normal(size = (100,20)) - y=scipy.random.normal(size = (100,1)) - g4=scipy.random.choice(4,size = (100,1))*1.0 + np.random.seed(1) + x=np.random.normal(size = (100,20)) + y=np.random.normal(size = (100,1)) + g4=np.random.choice(4,size = (100,1))*1.0 fit1=glmnet.glmnet(x = x.copy(),y = y.copy()) glmnetPlot(fit1) plt.figure() @@ -104,43 +105,43 @@ glmnetPlot(fit3) elif section == 6: - x = scipy.random.rand(100, 10) - y = scipy.random.rand(100, 1) + x = np.random.rand(100, 10) + y = np.random.rand(100, 1) fit = glmnet.glmnet(x = x, y = y) - f = glmnetPredict(fit, x[0:5, :], scipy.array([0.0866, 0.2323])) + f = glmnetPredict(fit, x[0:5, :], np.array([0.0866, 0.2323])) print(f) elif section == 7: - x = scipy.random.normal(size = [100,20]) - y = scipy.random.normal(size = [100,1]) - g2 = scipy.random.choice(2, size = [100, 1])*1.0 - g4 = scipy.random.choice(4, size = [100, 1])*1.0 + x = np.random.normal(size = [100,20]) + y = np.random.normal(size = [100,1]) + g2 = np.random.choice(2, size = [100, 1])*1.0 + g4 = np.random.choice(4, size = [100, 1])*1.0 fit1 = glmnet.glmnet(x = x.copy(),y = y.copy()); - print( glmnetPredict(fit1,x[0:5,:],scipy.array([0.01,0.005])) ) - print( glmnetPredict(fit1, scipy.empty([0]), scipy.empty([0]), 'coefficients') ) + print( glmnetPredict(fit1,x[0:5,:],np.array([0.01,0.005])) ) + print( glmnetPredict(fit1, np.empty([0]), np.empty([0]), 'coefficients') ) fit2 = glmnet.glmnet(x = x.copy(), y = g2.copy(), family = 'binomial'); - print(glmnetPredict(fit2, x[2:5,:],scipy.empty([0]), 'response')) - print(glmnetPredict(fit2, scipy.empty([0]), scipy.empty([0]), 'nonzero')) + print(glmnetPredict(fit2, x[2:5,:],np.empty([0]), 'response')) + print(glmnetPredict(fit2, np.empty([0]), np.empty([0]), 'nonzero')) fit3 = glmnet.glmnet(x = x.copy(), y = g4.copy(), family = 'multinomial'); - print(glmnetPredict(fit3, x[0:3,:], scipy.array([0.01]), 'response')) - print(glmnetPredict(fit3, x[0:3,:], scipy.array([0.01, 0.5]), 'response')) + print(glmnetPredict(fit3, x[0:3,:], np.array([0.01]), 'response')) + print(glmnetPredict(fit3, x[0:3,:], np.array([0.01, 0.5]), 'response')) elif section == 8: - x=scipy.random.rand(100,20); - y=scipy.random.rand(100,1); + x=np.random.rand(100,20); + y=np.random.rand(100,1); fit=glmnet.glmnet(x = x.copy(),y = y.copy()); - ncoef=glmnetCoef(fit,scipy.array([0.01, 0.001])); + ncoef=glmnetCoef(fit,np.array([0.01, 0.001])); elif section == 9: - scipy.random.seed(1) - x=scipy.random.normal(size = (100,20)) - y=scipy.random.normal(size = (100,1)) - g2=scipy.random.choice(2,size = (100,1))*1.0 - g4=scipy.random.choice(4,size = (100,1))*1.0 + np.random.seed(1) + x=np.random.normal(size = (100,20)) + y=np.random.normal(size = (100,1)) + g2=np.random.choice(2,size = (100,1))*1.0 + g4=np.random.choice(4,size = (100,1))*1.0 plt.figure() fit1=cvglmnet(x = x.copy(),y = y.copy())