In [1]: import numpy as np
In [2]: import statsmodels.api as sm
In [3]: nsample = 100
In [4]: x = np.linspace(0,10, 100)
In [5]: X = sm.add_constant(np.column_stack((x, x**2)))
In [6]: beta = np.array([1, 0.1, 10])
In [7]: y = np.dot(X, beta) + np.random.normal(size=nsample)
In [8]: results = sm.OLS(y, X).fit()
In [9]: print results.summary()
OLS Regression Results
==============================================================================
Dep. Variable: y R-squared: 0.976
Model: OLS Adj. R-squared: 0.976
Method: Least Squares F-statistic: 1980.
Date: Sat, 02 Jun 2012 Prob (F-statistic): 2.30e-79
Time: 19:17:16 Log-Likelihood: -133.06
No. Observations: 100 AIC: 272.1
Df Residuals: 97 BIC: 279.9
Df Model: 2
==============================================================================
coef std err t P>|t| [95.0% Conf. Int.]
------------------------------------------------------------------------------
x1 1.0946 0.126 8.664 0.000 0.844 1.345
x2 0.0898 0.012 7.343 0.000 0.066 0.114
const 9.9258 0.273 36.311 0.000 9.383 10.468
==============================================================================
Omnibus: 4.394 Durbin-Watson: 2.601
Prob(Omnibus): 0.111 Jarque-Bera (JB): 2.583
Skew: -0.167 Prob(JB): 0.275
Kurtosis: 2.287 Cond. No. 144.
==============================================================================