本文整理汇总了Python中mlxtend.classifier.Adaline类的典型用法代码示例。如果您正苦于以下问题:Python Adaline类的具体用法?Python Adaline怎么用?Python Adaline使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Adaline类的19个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。
示例1: test_print_progress_2
def test_print_progress_2():
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
print_progress=2,
random_seed=1)
ada.fit(X_std, y1)
开发者ID:rasbt,项目名称:mlxtend,代码行数:7,代码来源:test_adaline.py
示例2: test_gradient_descent
def test_gradient_descent():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30, eta=0.01, learning='gd', random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:Afey,项目名称:mlxtend,代码行数:7,代码来源:test_adaline.py
示例3: test_0_1_class
def test_0_1_class():
t1 = np.array([0.51, -0.04, 0.51])
ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
ada.fit(X_std, y0)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y0 == ada.predict(X_std)).all())
开发者ID:Afey,项目名称:mlxtend,代码行数:7,代码来源:test_adaline.py
示例4: test_stochastic_gradient_descent
def test_stochastic_gradient_descent():
t1 = np.array([0.03, -0.09, 1.02])
ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:Afey,项目名称:mlxtend,代码行数:7,代码来源:test_adaline.py
示例5: test_ary_persistency_in_shuffling
def test_ary_persistency_in_shuffling():
orig = X_std.copy()
ada = Adaline(epochs=30,
eta=0.01,
minibatches=len(y),
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(orig, X_std, 6)
开发者ID:blahblueray,项目名称:mlxtend,代码行数:8,代码来源:test_adaline.py
示例6: test_score_function
def test_score_function():
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1)
acc = ada.score(X_std, y1)
assert acc == 1.0, acc
开发者ID:rasbt,项目名称:mlxtend,代码行数:8,代码来源:test_adaline.py
示例7: test_invalid_class
def test_invalid_class():
ada = Adaline(epochs=40, eta=0.01, random_seed=1)
try:
ada.fit(X, y2) # 0, 1 class
assert(1==2)
except ValueError:
pass
开发者ID:Afey,项目名称:mlxtend,代码行数:8,代码来源:test_adaline.py
示例8: test_stochastic_gradient_descent
def test_stochastic_gradient_descent():
t1 = np.array([[-0.08], [1.02]])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=len(y),
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:rasbt,项目名称:mlxtend,代码行数:9,代码来源:test_adaline.py
示例9: test_normal_equation
def test_normal_equation():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=None,
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:datasci-co,项目名称:mlxtend,代码行数:9,代码来源:test_adaline.py
示例10: test_score_function
def test_score_function():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1)
acc = ada.score(X_std, y1)
assert acc == 1.0, acc
开发者ID:GQiuQi,项目名称:mlxtend,代码行数:9,代码来源:test_adaline.py
示例11: test_standardized_iris_data_with_shuffle
def test_standardized_iris_data_with_shuffle():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
solver='gd',
random_seed=1,
shuffle=True)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:beingzy,项目名称:mlxtend,代码行数:10,代码来源:test_adaline.py
示例12: test_standardized_iris_data_with_zero_weights
def test_standardized_iris_data_with_zero_weights():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1,
zero_init_weight=True)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:datasci-co,项目名称:mlxtend,代码行数:10,代码来源:test_adaline.py
示例13: test_gradient_descent
def test_gradient_descent():
t1 = np.array([[-0.08], [1.02]])
b1 = np.array([0.00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:rasbt,项目名称:mlxtend,代码行数:11,代码来源:test_adaline.py
示例14: test_normal_equation
def test_normal_equation():
t1 = np.array([[-0.08], [1.02]])
b1 = np.array([0.00])
ada = Adaline(epochs=30,
eta=0.01,
minibatches=None,
random_seed=None)
ada.fit(X_std, y1)
np.testing.assert_almost_equal(ada.w_, t1, decimal=2)
np.testing.assert_almost_equal(ada.b_, b1, decimal=2)
assert (y1 == ada.predict(X_std)).all(), ada.predict(X_std)
开发者ID:rasbt,项目名称:mlxtend,代码行数:11,代码来源:test_adaline.py
示例15: test_refit_weights
def test_refit_weights():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=15, eta=0.01, solver='gd', random_seed=1)
ada.fit(X_std, y1, init_weights=True)
ada.fit(X_std, y1, init_weights=False)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:beingzy,项目名称:mlxtend,代码行数:7,代码来源:test_adaline.py
示例16: test_refit_weights
def test_refit_weights():
t1 = np.array([[-0.08], [1.02]])
ada = Adaline(epochs=15,
eta=0.01,
minibatches=1,
random_seed=1)
ada.fit(X_std, y1, init_params=True)
ada.fit(X_std, y1, init_params=False)
np.testing.assert_almost_equal(ada.w_, t1, 2)
assert((y1 == ada.predict(X_std)).all())
开发者ID:rasbt,项目名称:mlxtend,代码行数:10,代码来源:test_adaline.py
示例17: test_invalid_solver
def test_invalid_solver():
t1 = np.array([-5.21e-16, -7.86e-02, 1.02e+00])
ada = Adaline(epochs=30, eta=0.01, solver='bla', random_seed=1)
ada.fit(X_std, y1)
开发者ID:beingzy,项目名称:mlxtend,代码行数:4,代码来源:test_adaline.py
示例18: iris_data
from mlxtend.evaluate import plot_decision_regions
from mlxtend.classifier import Adaline
import matplotlib.pyplot as plt
X, y = iris_data()
X = X[:, [0, 3]]
X = X[0:100]
y = y[0:100]
X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()
# Closed Form Solution
ada = Adaline(epochs=30,
eta=0.01,
minibatches=None,
random_seed=1)
ada.fit(X, y)
plot_decision_regions(X, y, clf=ada)
plt.title('Adaline - Stochastic Gradient Descent')
plt.show()
# (Stochastic) Gradient Descent
ada2 = Adaline(epochs=30,
eta=0.01,
minibatches=1, # 1 for GD learning
#minibatches=len(y), # len(y) for SGD learning
#minibatches=5, # for SGD learning w. minibatch size 20
开发者ID:clover9gu,项目名称:simplemining,代码行数:32,代码来源:line.py
示例19: test_array_dimensions
def test_array_dimensions():
ada = Adaline(epochs=15, eta=0.01, random_seed=1)
ada = ada.fit(np.array([1, 2, 3]), [-1])
开发者ID:datasci-co,项目名称:mlxtend,代码行数:3,代码来源:test_adaline.py
注:本文中的mlxtend.classifier.Adaline类示例由纯净天空整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论