This webpage is for programmers who need examples of use of the functions of the class. The examples are designed to illustrate the syntax. They do not correspond to any meaningful model. For examples of models, visit biogeme.epfl.ch.
import datetime
print(datetime.datetime.now())
import biogeme.version as ver
print(ver.getText())
import biogeme.optimization as opt
import numpy as np
import biogeme.messaging as msg
logger = msg.bioMessage()
logger.setDetailed()
A = np.array([[0.3571,-0.1030,0.0274,-0.0459],[-0.1030,0.2525,0.0736,-0.3845],[0.0274,0.0736,0.2340,-0.2878],[-0.0459,-0.3845,-0.2878,0.5549]])
A
L,E,P = opt.schnabelEskow(A)
L
The factor $L$ is such that $A + E = PLL^TP^T$. Therefore, the expression below should be the null matrix.
P @ L @ L.T @ P.T - E - A
A2 = np.array([[1890.3,-1705.6,-315.8,3000.3],[-1705.6,1538.3,284.9,-2706.6],[-315.8,284.9,52.5,-501.2],[3000.3,-2706.6,-501.2,4760.8]])
A2
L,E,P = opt.schnabelEskow(A2)
L
The factor $L$ is such that $A + E = PLL^TP^T$. Therefore, the expression below should be the null matrix.
P @ L @ L.T @ P.T - E - A2
def rosenbrock(x,hessian=False):
n = len(x)
f = sum(100.0 * (x[i+1]-x[i]**2)**2 + (1.0-x[i])**2 for i in range(n-1))
g = np.zeros(n)
for i in range(n-1):
g[i] = g[i] - 400 * x[i] * (x[i+1]-x[i]**2) - 2 * (1-x[i])
g[i+1] = g[i+1] + 200 * (x[i+1]-x[i]**2)
if hessian:
H = np.zeros((n,n))
for i in range(n-1):
H[[i],[i]] = H[[i],[i]] - 400 * x[i+1] + 1200 * x[i]**2 + 2
H[[i+1],[i]] = H[[i+1],[i]] - 400 * x[i]
H[[i],[i+1]] = H[[i],[i+1]] - 400 * x[i]
H[[i+1],[i+1]] = H[[i+1],[i+1]] + 200
return f,g,H
else:
return f,g,None
x = np.array([-1.5,1.5])
def function(x):
f,g,h = rosenbrock(x)
return f,g
f,g = function(x)
alpha,nfev = opt.lineSearch(function,x,-g)
print(f"alpha={alpha} nfev={nfev}")
x0 = np.array([-1.5,1.5])
xstar,nit,nfev,msg = opt.newtonLineSearch(rosenbrock,x0)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
import pandas as pd
import biogeme.biogeme as bio
import biogeme.database as db
from biogeme.expressions import *
df = pd.DataFrame({'Person':[1,1,1,2,2],
'Exclude':[0,0,1,0,1],
'Variable1':[1,2,3,4,5],
'Variable2':[10,20,30,40,50],
'Choice':[1,2,3,1,2],
'Av1':[0,1,1,1,1],
'Av2':[1,1,1,1,1],
'Av3':[0,1,1,1,1]})
myData = db.Database('test',df)
Choice=Variable('Choice')
Variable1=Variable('Variable1')
Variable2=Variable('Variable2')
beta1 = Beta('beta1',0,None,None,0)
beta2 = Beta('beta2',0,None,None,0)
V1 = beta1 * Variable1
V2 = beta2 * Variable2
V3 = 0
V ={1:V1,2:V2,3:V3}
likelihood = bioLogLogit(V,av=None,choice=Choice)
myBiogeme = bio.BIOGEME(myData,likelihood)
myBiogeme.modelName = 'simpleExample'
print(myBiogeme)
First with the default optimization algorithm, from scipy. We include an irrelevant parameter to illustrate the warning.
results = myBiogeme.estimate(algoParameters={'myparam':3})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
results = myBiogeme.estimate(algorithm=opt.newtonLineSearchForBiogeme)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
Changing the requested precision
results = myBiogeme.estimate(algorithm=opt.newtonLineSearchForBiogeme,algoParameters = {'tolerance': 0.1})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
CFSQP algorithm
results = myBiogeme.estimate(algorithm=None)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
x0 = np.array([-1.5,1.5])
xstar,nit,nfev,msg = opt.newtonTrustRegion(rosenbrock,x0)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
trBiogeme = bio.BIOGEME(myData,likelihood)
results = trBiogeme.estimate(algorithm=opt.newtonTrustRegionForBiogeme)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
We illustrate the parameters. We use the truncated conjugate gradient instead of dogleg for the trust region subproblem, starting with a small trust region of radius 0.001, and a maximum of 3 iterations.
results = trBiogeme.estimate(algorithm=opt.newtonTrustRegionForBiogeme,algoParameters={'dogleg':False,'radius':0.001,'maxiter':3})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
Changing the requested precision
results = trBiogeme.estimate(algorithm=opt.newtonTrustRegionForBiogeme,algoParameters={'tolerance':0.1})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")