I know the original post was quite a while ago, but curiosity got the best of me… Another approach to Alexander’s question.

Note that if c is known, the values for a & b can be determined by linear regression. Therefore, this could be attacked as a minimization function in the single variable c. I wrote a program (below) to test if this would really work. The coding is a bit more elaborate than it needs to be, but was helpful to me for clarity. I also changed from double[] to vectors in the middle and there is still at least one place where it could be done better. But it does seem to work about as well as the program above, and there was no need to do any calculus for gradients.

As I indicated above, I don’t think least squares is a good approach for fitting this function. It seems a bit unstable with my test data which is a lot of observations (100) and a very moderate noise level.

```
private void btnLogFit5_Click(object sender, EventArgs e)
{
Random RanGen = new Random();
int n = 100; // number of observations
Vector<double> x = new DenseVector(new double[n]);
Vector<double> y = new DenseVector(new double[n]);
// fit exponential expression with three parameters
double a = 100.0;
double b = 0.5;
double c = 0.05;
// create data set
for (int i = 0; i < n; i++) x[i] = 10 + Convert.ToDouble(i) * 90.0 / 99.0; // values span 10 to 100
for (int i = 0; i < n; i++)
{
double y_val = a + b * Math.Exp(c * x[i]);
y[i] = y_val + RanGen.NextDouble() * y_val; // add error term scaled to y-value
}
LogLin ll = new LogLin(x, y); // create object with data
//var r = ll.cSolve(c);
var f0 = new Func<double, Vector<double>>((c0) => ll.cSolve(c0)); // finds a and b for any c.
var f1 = new Func<Vector<double>, Vector<double>>((p) => x.Map(z => p[0] + p[1] * Math.Exp(p[2] * z))); // create vector of estimated y values at point p
var f2 = new Func<Vector<double>, double>((z) => MathNet.Numerics.Distance.SSD(f1(z), y)); // sum squared error at point p
var f = new Func<double, double>((c0) => f2(f0(c0)));
Console.WriteLine("Graph of Objective function");
double c_min = 1000000; // c0 to begin search
double f_min = 1000000;
for (double c0 = 0.01; c0 < 0.15; c0 += 0.01)
{
double f_est = f(c0);
if (f_est < f_min)
{
c_min = c0;
f_min = f_est;
}
Console.WriteLine(c0.ToString() + ", " + f_est.ToString());
}
// find best c0
var obj = ObjectiveFunction.ScalarValue(f);
var r1 = GoldenSectionMinimizer.Minimum(obj, Math.Max(c_min - 0.02,0.001), c_min + 0.02);
Console.WriteLine("Iterations: " + r1.Iterations.ToString());
Console.WriteLine("Solution: "+f0(r1.MinimizingPoint).ToString());
}
class LogLin
{
Vector<double> x;
Vector<double> y;
double a;
double b;
double c;
public LogLin(Vector<double> x_in, Vector<double> y_in)
{
x = x_in;
y = y_in;
a = 0;
b = 0;
c = 0;
}
public Vector<double> cSolve(double c)
{
double[][] mx = new double[x.Count][];
double[] rr; // regression result
Vector<double> r = new DenseVector(new double[3]); // return value
for (int i = 0; i < x.Count; i++)
{
mx[i] = new double[1];
mx[i][0] = Math.Exp(c * x[i]);
}
rr = MultipleRegression.NormalEquations(mx, y.ToArray(), intercept: true);
r[0] = rr[0];
r[1] = rr[1];
r[2] = c;
//Console.WriteLine(r.ToString());
return r;
}
}
```