如何在Python中模拟Microsoft Excel的求解器功能(GRG非线性)?

0 投票
0 回答
42 浏览
提问于 2025-04-12 05:43

这是一个展示如何使用Excel中Solver功能的截图:

查看展示Excel中Solver功能的截图

我有一个任务需要自动化一个Excel工作表。这个工作表使用了一个叫做Solver的插件,它的逻辑是这样的:在$O$9单元格中有一个值(-1.95624),这个值是通过一些计算得出的,计算结果在图中用红色和蓝色标记出来。然后,Solver会根据这个值,使用一种叫做“GRG非线性回归”的算法,返回三个值:C、B1和B2。我的任务是用Python来模拟这个逻辑。下面是我的尝试。主要的问题是,我得到的C、B1和B2的值和Excel的Solver插件计算出来的值不一样。

根据这些xData和yData的数据,正确的输出应该是: C= -2.35443383, B1 = -14.70820051, B2 = 0.0056217

这是我的第一次尝试:

import numpy, scipy, matplotlib
import pandas as pd
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
from scipy.optimize import differential_evolution
import warnings

xData = numpy.array([-2.59772914040242,-2.28665528866907,-2.29176070881848,-2.31163972446061,-2.28369414349715,-2.27911303233721,-2.28222332344644,-2.39089535619106,-2.32144325648778,-2.17235002006179,-2.22906032068685,-2.42044014499938,-2.71639505549322,-2.65462061336346,-2.47330475191616,-2.33132910807216,-2.33025978869114,-2.61175064230516,-2.92916553244925,-2.987503044973,-3.00367414706232,-1.45507812104723])  # Use the same table name as the parameter
yData = numpy.array([0.0692847120775066,0.0922342111029099,0.0918076382491768,0.0901635409944003,0.0924824386284127,0.092867647175396,0.092605957740688,20.0838696111204451,0.0893625419994501,0.102261091024881,0.097171046758256,70.0816272542472914,0.0620128251290935,0.0657047909578125,0.0777509345715382,0.088561321341585,0.088647672874835,90.0683859871424735,0.0507304952495273,0.0479936476914665,0.0472601632188253,0.18922126828463])  # Use the same table name as the parameter

def func(x, a, b, Offset): # Sigmoid A With Offset from zunzun.com
    return  1.0 / (1.0 + numpy.exp(-a * (x-b))) + Offset


# function for genetic algorithm to minimize (sum of squared error)
def sumOfSquaredError(parameterTuple):
    warnings.filterwarnings("ignore") # do not print warnings by genetic algorithm
    val = func(xData, *parameterTuple)
    return numpy.sum((yData - val) ** 2.0)


def generate_Initial_Parameters():
    # min and max used for bounds
    maxX = max(xData)
    minX = min(xData)
    maxY = max(yData)
    minY = min(yData)

    parameterBounds = []
    parameterBounds.append([minX, maxX]) # search bounds for a
    parameterBounds.append([minX, maxX]) # search bounds for b
    parameterBounds.append([0.0, maxY]) # search bounds for Offset

    # "seed" the numpy random number generator for repeatable results
    result = differential_evolution(sumOfSquaredError, parameterBounds, seed=3)
    return result.x

# generate initial parameter values
geneticParameters = generate_Initial_Parameters()

# curve fit the test data
params, covariance = curve_fit(func, xData, yData, geneticParameters,maxfev=50000)  

# Convert parameters to Python built-in types
params = [float(param) for param in params]  # Convert numpy float64 to Python float
C, B1, B2 = params
OutputDataSet = pd.DataFrame({"C": [C], "B1": [B1], "B2": [B2],"ProType":[input_value_1],"RegType":[input_value_2]})

有没有什么建议?谢谢!

这是我的第二次尝试:我改变了目标函数。

import numpy as np
import pandas as pd
from scipy.optimize import curve_fit

# Access input data passed from SQL Server
datasets = pd.DataFrame(InputDataSet)



def logistic_regression(x, C, B1, B2):
    return C / (1 + np.exp(-B1 * (x - B2)))

def initial_coefficients(num_features):
    return np.random.randn(num_features)


# Fetch x_data and y_data from SQL Server           
x_data = np.array([-2.59772914040242,-2.28665528866907,-2.29176070881848,-2.31163972446061,-2.28369414349715,-2.27911303233721,-2.28222332344644,-2.39089535619106,-2.32144325648778,-2.17235002006179,-2.22906032068685,-2.42044014499938,-2.71639505549322,-2.65462061336346,-2.47330475191616,-2.33132910807216,-2.33025978869114,-2.61175064230516,-2.92916553244925,-2.987503044973,-3.00367414706232,-1.45507812104723])

y_data = np.array([0.0692847120775066,0.0922342111029099,0.0918076382491768,0.0901635409944003,0.0924824386284127,0.092867647175396,0.092605957740688,20.0838696111204451,0.0893625419994501,0.102261091024881,0.097171046758256,70.0816272542472914,0.0620128251290935,0.0657047909578125,0.0777509345715382,0.088561321341585,0.088647672874835,90.0683859871424735,0.0507304952495273,0.0479936476914665,0.0472601632188253,0.18922126828463])

initial_guess = initial_coefficients(3); # Example initial guess    
# Fit the logistic regression function to the data
params, covariance = curve_fit(logistic_regression, x_data, y_data, p0=initial_guess, maxfev=5000)
    

# Convert parameters to Python built-in types
params = [float(param) for param in params]  # Convert numpy float64 to Python float
C, B1, B2 = params
OutputDataSet = pd.DataFrame({"C": [C], "B1": [B1], "B2": [B2],"ProType":[input_value_1],"RegType":[input_value_2]})

但仍然没有得到想要的结果: C= -2.35443383, B1 = -14.70820051, B2 = 0.0056217

0 个回答

暂无回答

撰写回答