This webpage is for programmers who need examples of use of the functions of the class. The examples are designed to illustrate the syntax. They do not correspond to any meaningful model. For examples of models, visit biogeme.epfl.ch.
import datetime
print(datetime.datetime.now())
import biogeme.version as ver
print(ver.getText())
import biogeme.optimization as opt
import numpy as np
%matplotlib inline
import matplotlib
import matplotlib.pyplot as plt
Defne the verbosity of Biogeme
import biogeme.messaging as msg
logger = msg.bioMessage()
logger.setSilent()
#logger.setDetailed()
#logger.setDebug()
A = np.array([[0.3571,-0.1030,0.0274,-0.0459],[-0.1030,0.2525,0.0736,-0.3845],[0.0274,0.0736,0.2340,-0.2878],[-0.0459,-0.3845,-0.2878,0.5549]])
A
L,E,P = opt.schnabelEskow(A)
L
The factor $L$ is such that $A + E = PLL^TP^T$. Therefore, the expression below should be the null matrix.
P @ L @ L.T @ P.T - E - A
A2 = np.array([[1890.3,-1705.6,-315.8,3000.3],[-1705.6,1538.3,284.9,-2706.6],[-315.8,284.9,52.5,-501.2],[3000.3,-2706.6,-501.2,4760.8]])
A2
L,E,P = opt.schnabelEskow(A2)
L
The factor $L$ is such that $A + E = PLL^TP^T$. Therefore, the expression below should be the null matrix.
P @ L @ L.T @ P.T - E - A2
class rosenbrock(opt.functionToMinimize):
def __init__(self):
self.x = None
def setVariables(self,x):
self.x = x
def f(self, batch = None):
if batch is not None:
raise excep.biogemeError('This function is not data driven.')
n = len(self.x)
f = sum(100.0 * (self.x[i+1]-self.x[i]**2)**2 + (1.0-self.x[i])**2 for i in range(n-1))
return f
def g(self):
n = len(self.x)
g = np.zeros(n)
for i in range(n-1):
g[i] = g[i] - 400 * self.x[i] * (self.x[i+1]-self.x[i]**2) - 2 * (1-self.x[i])
g[i+1] = g[i+1] + 200 * (self.x[i+1]-self.x[i]**2)
return g
def h(self):
n = len(self.x)
H = np.zeros((n,n))
for i in range(n-1):
H[[i],[i]] = H[[i],[i]] - 400 * self.x[i+1] + 1200 * self.x[i]**2 + 2
H[[i+1],[i]] = H[[i+1],[i]] - 400 * self.x[i]
H[[i],[i+1]] = H[[i],[i+1]] - 400 * self.x[i]
H[[i+1],[i+1]] = H[[i+1],[i+1]] + 200
return H
def f_g(self, batch = None):
if batch is not None:
raise excep.biogemeError('This function is not data driven.')
return self.f(), self.g()
def f_g_h(self, batch= None):
if batch is not None:
raise excep.biogemeError('This function is not data driven.')
return self.f(), self.g(), self.h()
def f_g_bhhh(self, batch = None):
raise excep.biogemeError('This function is not data driven.')
theFunction = rosenbrock()
class example58(opt.functionToMinimize):
def __init__(self):
self.x = None
def setVariables(self,x):
self.x = x
def f(self, batch = None):
if batch is not None:
raise excep.biogemeError('This function is not data driven.')
n = len(self.x)
f = 0.5 * self.x[0] * self.x[0] + self.x[0] * np.cos(self.x[1])
return f
def g(self):
n = len(self.x)
g = np.array([self.x[0]+np.cos(self.x[1]),-self.x[0]*np.sin(self.x[1])])
return g
def h(self):
n = len(self.x)
H = np.array([[1,-np.sin(self.x[1])],[-np.sin(self.x[1]),-self.x[0]*np.cos(self.x[1])]])
return H
def f_g(self, batch = None):
if batch is not None:
raise excep.biogemeError('This function is not data driven.')
return self.f(), self.g()
def f_g_h(self, batch= None):
if batch is not None:
raise excep.biogemeError('This function is not data driven.')
return self.f(), self.g(), self.h()
def f_g_bhhh(self, batch = None):
raise excep.biogemeError('This function is not data driven.')
ex58 = example58()
x = np.array([-1.5,1.5])
theFunction.setVariables(x)
f,g = theFunction.f_g()
alpha,nfev = opt.lineSearch(theFunction,x,f,g,-g)
print(f"alpha={alpha} nfev={nfev}")
Rosenbrock
x0 = np.array([-1.5,1.5])
xstar,nit,nfev,msg = opt.newtonLineSearch(theFunction,x0)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Example 5.8
x0 = np.array([1,1])
xstar,nit,nfev,msg = opt.newtonLineSearch(ex58,x0)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Rosenbrock
x0 = np.array([-1.5,1.5])
xstar,nit,nfev,msg = opt.newtonTrustRegion(theFunction,x0)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Example 5.8
x0 = np.array([1.0,1.0])
xstar,nit,nfev,msg = opt.newtonTrustRegion(ex58,x0)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Rosenbrock
x0 = np.array([-1.5,1.5])
xstar,nit,nfev,msg = opt.bfgsLineSearch(theFunction,x0,maxiter=10000)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Example 5.8
x0 = np.array([1,1])
xstar,nit,nfev,msg = opt.bfgsLineSearch(ex58,x0,maxiter=10000)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Rosenbrock
x0 = np.array([-1.5,1.5])
xstar,nit,nfev,msg = opt.bfgsLineSearch(theFunction,x0,maxiter=10000)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
Example 5.8
x0 = np.array([1,1])
xstar,nit,nfev,msg = opt.bfgsTrustRegion(ex58,x0,maxiter=10000)
xstar
print(f"nit={nit} nfev={nfev} {msg}")
import pandas as pd
import biogeme.biogeme as bio
import biogeme.database as db
import biogeme.models as models
from biogeme.expressions import *
df = pd.DataFrame({'Person':[1,1,1,2,2],
'Exclude':[0,0,1,0,1],
'Variable1':[1,2,3,4,5],
'Variable2':[10,20,30,40,50],
'Choice':[1,2,3,1,2],
'Av1':[0,1,1,1,1],
'Av2':[1,1,1,1,1],
'Av3':[0,1,1,1,1]})
myData = db.Database('test',df)
Choice=Variable('Choice')
Variable1=Variable('Variable1')
Variable2=Variable('Variable2')
beta1 = Beta('beta1',0,None,None,0)
beta2 = Beta('beta2',0,None,None,0)
V1 = beta1 * Variable1
V2 = beta2 * Variable2
V3 = 0
V ={1:V1,2:V2,3:V3}
likelihood = models.loglogit(V,av=None,i=Choice)
myBiogeme = bio.BIOGEME(myData,likelihood)
myBiogeme.modelName = 'simpleExample'
print(myBiogeme)
The default optimization algorithm is from scipy. It is possible to transfer parameters to the algorithm. We include here an irrelevant parameter to illustrate the warning.
results = myBiogeme.estimate(algoParameters={'myparam':3})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
results = myBiogeme.estimate(algorithm=opt.newtonLineSearchForBiogeme)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
Changing the requested precision
results = myBiogeme.estimate(algorithm=opt.newtonLineSearchForBiogeme,algoParameters = {'tolerance': 0.1})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
results = myBiogeme.estimate(algorithm=None)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
results = myBiogeme.estimate(algorithm=opt.newtonTrustRegionForBiogeme)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
We illustrate the parameters. We use the truncated conjugate gradient instead of dogleg for the trust region subproblem, starting with a small trust region of radius 0.001, and a maximum of 3 iterations.
results = myBiogeme.estimate(algorithm=opt.newtonTrustRegionForBiogeme,algoParameters={'dogleg':False,'radius':0.001,'maxiter':3})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
Changing the requested precision
results = myBiogeme.estimate(algorithm=opt.newtonTrustRegionForBiogeme,algoParameters={'tolerance':0.1})
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
results = myBiogeme.estimate(algorithm=opt.bfgsLineSearchForBiogeme)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")
results = myBiogeme.estimate(algorithm=opt.bfgsTrustRegionForBiogeme)
results.getEstimatedParameters()
print(f"nit={results.data.numberOfIterations} nfev={results.data.numberOfFunctionEval}: {results.data.optimizationMessage}")