julia> using GLM, DataFrames julia> df= DataFrame( y= repeat( [1,0], 5); x1= 1:10, x2= sin.((1:10)) ) 10×3 DataFrame │ Row │ y │ x1 │ x2 │ │ │ Int64 │ Int64 │ Float64 │ ├─────┼───────┼───────┼───────────┤ │ 1 │ 1 │ 1 │ 0.841471 │ │ 2 │ 0 │ 2 │ 0.909297 │ │ 3 │ 1 │ 3 │ 0.14112 │ │ 4 │ 0 │ 4 │ -0.756802 │ │ 5 │ 1 │ 5 │ -0.958924 │ │ 6 │ 0 │ 6 │ -0.279415 │ │ 7 │ 1 │ 7 │ 0.656987 │ │ 8 │ 0 │ 8 │ 0.989358 │ │ 9 │ 1 │ 9 │ 0.412118 │ │ 10 │ 0 │ 10 │ -0.544021 │ julia> lm( @formula( y ~ x1 + x2 ), df ) StatsModels.DataFrameRegressionModel{LinearModel{LmResp{Array{Float64,1}},DensePredChol{Float64,LinearAlgebra.Cholesky{Float64,Array{Float64,2}}}},Array{Float64,2}} Formula: y ~ 1 + x1 + x2 Coefficients: Estimate Std.Error t value Pr(>|t|) (Intercept) 0.644264 0.412811 1.56068 0.1626 x1 -0.0277944 0.0655179 -0.424227 0.6841 x2 0.0609811 0.271558 0.22456 0.8287 julia> glm( @formula( y ~ x1 + x2 ), df, Binomial(), ProbitLink() ) ## f(mu_Y) = invnorm(P) StatsModels.DataFrameRegressionModel{GeneralizedLinearModel{GlmResp{Array{Float64,1},Binomial{Float64},ProbitLink},DensePredChol{Float64,LinearAlgebra.Cholesky{Float64,Array{Float64,2}}}},Array{Float64,2}} Formula: y ~ 1 + x1 + x2 Coefficients: Estimate Std.Error z value Pr(>|z|) (Intercept) 0.374207 0.897261 0.417054 0.6766 x1 -0.0722244 0.142676 -0.506213 0.6127 x2 0.157343 0.584762 0.269072 0.7879 julia> glm( @formula( y ~ x1 + x2 ), df, Binomial(), LogitLink() ) ## f(mu_Y) = log(P/(1-P)) StatsModels.DataFrameRegressionModel{GeneralizedLinearModel{GlmResp{Array{Float64,1},Binomial{Float64},LogitLink},DensePredChol{Float64,LinearAlgebra.Cholesky{Float64,Array{Float64,2}}}},Array{Float64,2}} Formula: y ~ 1 + x1 + x2 Coefficients: Estimate Std.Error z value Pr(>|z|) (Intercept) 0.596395 1.44895 0.411604 0.6806 x1 -0.114614 0.230644 -0.496932 0.6192 x2 0.251826 0.938053 0.268456 0.7883