01-08 Optimizers: Buliding a par

2019-06-01  本文已影响0人  非常暴龙兽

优化器:构建参数化模型

Optimizer:

Steps:

  1. provide a function to minimize, likef(x)=x^2+5
  2. provide an initial guess
  3. call the optimizer
# Minimizer in Python
# Minimize an objective functions, using Scipy
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import scipy.optimize as spo
def f(x):
    # Given a scalar X, return some value, like a real number.
    Y = (X - 1.5) ** 2 + 0.5
    print "x = {}, Y = {}".format(X, Y)
    # for tracing
    return Y

def test_run():
    Xguess = 2.0
    min_result = spo.minimize(f, Xguess, method = 'SLSQP', options = 
    {'disp':True})
    print "Minima found at:"
    print "X = {}, Y = {}".format(min_result.X, min_result.fun)
    #Plot function values, mark minima
    Xplot = np.linspace(0.5, 2.5, 21)
    Yplot = f(Xplot)
    plt.plot(min_result.x, min_result.fun, 'ro')
    plt.title("Minimia of an objective function")
    plt.show()
if __name__ == '__main__':
    test_run()

凸问题:convex

“图像上任意两点连一条线,都在图像上方”
构建参数化模型

# Fit a line to a given set of data points using optimization.
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import scipy.optimize as spo
def error(line,data): 
    # error function
    """ 
    Compute error between given line model and observed data.
    Parameters
    ----------
    Line: tuple/list/array (C0,C1) where C0 is slope and C1 is Y-intercept.
    data: 2D array where each row is a point (X,Y)
    Returns error as a single real value.    
    """
    #  Metric: Sum of squared Y-axis differences.
    err = np.sum((data[:,1] - (line[0] * data[:,0] + line[1])) ** 2)
    return err 

def fit_line(data, error_func)
    """
    Fit a line to given data, using a suppled error function.
    Parameters
    ----------
    data: 2D array where each row is a point (X0,Y)
    error_func: function that computes the error between a line and observed data
    Returns line that minimize the error function.
    """
    #Generate initial guess for line model
    line = np.float32([0, np.mean(data[:,1])])
    # slope = 0, intercept = mean(y_values)

    # Plot initial guess (optional)
    x_ends = np.float32([-5, 5])
    plt.plot(x_ends, 
             l[0] * x_ends + l[1], 
             'm--', 
             linewidth = 2.0, 
             label = "Initial guess")

    # Call optimizer to minimize error function
    result = spo.minimize(error_func, 
                          l, 
                          args = (data), 
                          method = 'SLSQP', 
                          options = {'disp = true'})
    return result.x

def test_run():
    # Define original line 
    l_org = np.float32([4, 2])
    print "Original line: C0 = {}, C1 = {}".format(l_orig[0], l_orig[1])
    Xorig = np.linspace(0, 10, 21)
    Yorig = l_orig[0] * Xorig + l_orig[1]
    plt.plot(Xorig, Yorig, 'b--', linewidth =2.0, label = "Original line")
    
    # Generate noisy data points,给直线加一些噪点
    noise_sigma = 3.0
    noise = np.random.normal(0, noise_sigma, Yorig.sharpe)
    data = np.asarray([Xorig, Yorig + noise]).T
    plt.plot(data[:,0], data[:,1], 'ga', label = "Data points")

# Try to fit a line to this data
l_fit = fit_line(data, error)
print "Fitted line : C0 = {}, C1 = {}".format(l_fit[0], l_fit[1])
plt.plot(data[:,0], 
         l_fit[0] * data[:,0] + l_fit[1], 
         'r--', 
         linewidth = 2.0, 
         label = "")
def error_poly(C, data)
    """ Compute error between given polynomial and observed data.
    Parameters
    ----------
    C: numpy.poly1d object or equivalent array representing polynomial coefficients.
    data: 2D array where each row is a point (X, Y)
    Returns error as a single real value.
    """
    #Metric: Sum of squared Y-axis differences
    err = np.sum((data[:, 1] - np.polyval(C, data[:, 0)) **2)
    return err

def fit_poly(data, error_func, degree = 3):
    """Fit a polynomial to given data, using supplied error function.
    Parameters
    ----------
    data: 2D array where each row is a point (X, Y)
    err_func: function that computes the error between a polynomial and observed data.
    Returns polynomial that minimizes the error function.
    """
    # Generate initial guess for polynomial model (all coeffs = 1)
    Cguess = np.poly1d(np.ones(degree + 1, dtype = np.float32))

# Plot initial guesses(optional)
x = np.linspace(-5, 5, 21)
plt.plot(x, 
         np.polyval(Cguess, x), 
         'm--', 
         linewidth = 2.0, 
         label = "Initial guess")

# Call optimizer to minimize error function
result = spo.minimize(error_func, 
                      Cguess, 
                      args = (data), 
                      method = 'SLSQP', 
                      options = {'disp = true'})
return np.poly1d(result.x) # convert optional result into a poly1d object
上一篇 下一篇

猜你喜欢

热点阅读