by Hans Dembinski (maintainer), Max Planck Institute for Nuclear Physics, Heidelberg
We are doing a gentle introduction on how to use iminuit. Feel free to ask any questions!
# basic setup of the notebook
# !pip install iminuit matplotlib numpy
%matplotlib inline
from matplotlib import pyplot as plt
plt.rcParams["font.size"] = 20
import numpy as np
# let's make a line model
def line(x, a, b):
return a + x * b
a_true = 1.0
b_true = 2.0
# let's make some data
x = np.linspace(0, 1, 10)
# precomputed random numbers from standard normal distribution
z = np.array([-0.49783783, -0.33041722, -1.71800806, 1.60229399,
1.36682387, -1.15424221, -0.91425267, -0.03395604,
-1.27611719, -0.7004073 ])
sigma_y = 0.1 * np.ones_like(x)
y = line(x, a_true, b_true) + sigma_y * z
plt.errorbar(x, y, sigma_y, fmt="o")
plt.xlim(-0.1, 1.1);
# least-squares score function = sum of data residuals squared
def LSQ(a, b):
return np.sum((y - line(x, a, b)) ** 2 / sigma_y ** 2)
# everything in iminuit is done through the Minuit object, so we import it
from iminuit import Minuit
# create instance of Minuit and pass score function to minimize
m = Minuit(LSQ)
# set start values via keywords for a and b
m = Minuit(LSQ, a=5, b=5)
Notice how iminuit figured out that the arguments of LSQ are called "a" and "b". This is a cool gimmick of iminuit.
# set step size with error_<name>=... keyword
m = Minuit(LSQ, a=5, b=5, error_a=0.1, error_b=0.1)
errordef=1
for least-squares score functionerrordef=0.5
for maximum-likelihood score function# set errordef=1 for least-squares score function
m = Minuit(LSQ, a=5, b=5, error_a=0.1, error_b=0.1, errordef=1)
# no more warnings! :-D
# fast alternative: just silence all warnings
m = Minuit(LSQ, pedantic=False)
import matplotlib.image as mpimg
img = mpimg.imread("img/trollface.png")
plt.imshow(img);
# check current parameter state (do this at any time)
m.get_param_states()
# one-sided limit a > 0, two-sided limit 0 < b < 10
m = Minuit(LSQ, a=5, b=5,
error_a=0.1, error_b=0.1,
limit_a=(0, None), limit_b=(0, 10),
errordef=1)
m.get_param_states()
# fix parameter a
m = Minuit(LSQ, a=2, b=5, fix_a=True,
error_a=0.1, error_b=0.1,
errordef=1)
m.get_param_states()
# run migrad; will not vary a because we fixed it, only b
m.migrad()
# get parameter values
a_fit = m.values["a"] # m.values[0] also works
b_fit = m.values["b"] # m.values[1] also works
plt.errorbar(x, y, sigma_y, fmt="o")
plt.plot(x, line(x, a_fit, b_fit));
# release fix on "a" and minimize again
m.fixed["a"] = False # m.fixed[0] = False also works
m.migrad()
iminuit can fail, so carefully check Migrad status report
Common reasons for failures
Acceptable issues
m.hesse()
to repair thisPossibly tolerable issues (but indicate that something is fishy)
# get better parameter values
a_fit = m.values["a"]
b_fit = m.values["b"]
plt.errorbar(x, y, sigma_y, fmt="o")
plt.plot(x, line(x, a_fit, b_fit))
plt.xlim(-0.1, 1.1);
def LSQ_numpy(par): # par is numpy array here
ym = np.polyval(par, x) # for len(par) == 2 this is a line
return np.sum((y - ym) ** 2 / sigma_y ** 2)
# pass starting values and step sizes as numpy arrays
m = Minuit.from_array_func(LSQ_numpy, (5, 5), error=(0.1, 0.1), errordef=1)
# automatic parameter names are assigned x0, x1, ...
m.get_param_states()
# can easily change number of fitted parameters and assign names
m = Minuit.from_array_func(LSQ_numpy, (2, 1, 3, 5), error=0.1,
name=("a", "b", "c", "d"), errordef=1)
m.get_param_states()
# fit the thing
m.migrad()
# can also use score function with scipy.optimize.minimize-like interface
from iminuit import minimize # has same interface as scipy.optimize.minimize
minimize(LSQ_numpy, (5, 5, 5, 5))
# get parameter values as arrays
par_fit = m.np_values()
plt.errorbar(x, y, sigma_y, fmt="o")
plt.plot(x, np.polyval(par_fit, x), label="pol4")
plt.plot(x, line(x, a_fit, b_fit), label="pol2")
plt.legend()
plt.xlim(-0.1, 1.1);
# check reduced chi2, goodness-of-fit estimate, should be around 1
m.fval / (len(y) - len(m.values))
m.hesse()
explicitly after m.migrad()
m.minos()
# calling hesse explicitly
m.hesse()
# get full correlation matrix (automatically prints nicely in notebook)
m.matrix(correlation=True)
# or get covariance matrix
m.matrix()
# or get matrix as numpy array
m.np_matrix()
# access individual elements of matrices
corr = m.np_matrix(correlation=True)
print(corr[0, 1])
print(corr[0, 3])
m.minos()
# Minos errors now appear in parameter table
m.get_param_states()
# plot parameters with errors
v = m.np_values()
ve = m.np_errors()
vm = m.np_merrors()
npar = len(v)
indices = np.arange(npar)
# plot hesse errors
plt.errorbar(indices - 0.05, v, ve, fmt="ob")
# plot minos errors
plt.errorbar(indices + 0.05, v, vm, fmt="or")
# make nice labels
plt.xticks(indices, m.values.keys())
plt.xlim(-0.2, indices[-1] + 0.2)
plt.xlabel("parameter")
plt.ylabel("value");
m.draw_mncontour('a','b', nsigma=4); # nsigma=4 says: draw four contours from sigma=1 to 4
m.draw_profile("a");
m.draw_mnprofile("b");
# get scan data to plot it yourself
px, py = m.profile('a', subtract_min=True)
plt.plot(px, py);