# 【9】例子--1--general--保序回归(Isotonic Regression)

## 二、算法过程说明

原始序列：<9, 10, 14>



原始序列：<9, 14, 10>



原始序列：<14, 9, 10, 15>



## 四、代码

print(__doc__)

# Author: Nelle Varoquaux <nelle.varoquaux@gmail.com>
#         Alexandre Gramfort <alexandre.gramfort@inria.fr>

import numpy as np
import matplotlib.pyplot as plt
from matplotlib.collections import LineCollection

from sklearn.linear_model import LinearRegression
from sklearn.isotonic import IsotonicRegression
from sklearn.utils import check_random_state

n = 100
x = np.arange(n)
rs = check_random_state(0)
y = rs.randint(-50, 50, size=(n,)) + 50. * np.log(1 + np.arange(n))

ir = IsotonicRegression()
y_ = ir.fit_transform(x, y)

lr = LinearRegression()
lr.fit(x[:, np.newaxis], y)  # x needs to be 2d for LinearRegression

segments = [[[i, y[i]], [i, y_[i]]] for i in range(n)]
lc = LineCollection(segments, zorder=0)
lc.set_array(np.ones(len(y)))
lc.set_linewidths(0.5 * np.ones(n))

fig = plt.figure()
plt.plot(x, y, 'r.', markersize=12)
plt.plot(x, y_, 'g.-', markersize=12)
plt.plot(x, lr.predict(x[:, np.newaxis]), 'b-')
plt.legend(('Data', 'Isotonic Fit', 'Linear Fit'), loc='lower right')
plt.title('Isotonic regression')
plt.savefig('isotonic_regression',dpi=600)
plt.show()


## 五、撸代码

1.rs = check_random_state(0)

2.rs.randint(-50,50,size=(n,3))

[[ 41 -10 -14]
[ -2 -25 17]
[-15 -20 -21]
[-17 -32 -33]]

1. x[:,np.newaxis]

[[94] [95] [96] [97] [98] [99]]

type(np.newaxis)
NoneType


np.newaxis 在使用和功能上等价于 None，其实就是 None 的一个别名。 为numpy.ndarray（多维数组）增加一个轴

4.算法是个毛？

ir = IsotonicRegression()
y_ = ir.fit_transform(x, y)

lr.predict(x[:, np.newaxis])
lr = LinearRegression()
lr.fit(x[:, np.newaxis], y)


5.segments那部分没看明白

http://blog.csdn.net/bea_tree/article/details/51009810

http://www.cnblogs.com/lc1217/p/7015639.html

http://scikit-learn.org/stable/auto_examples/plot_isotonic_regression.html#sphx-glr-auto-examples-plot-isotonic-regression-py