#!/usr/bin/env python3 # plotting a few distribution functions import math # math import numpy as np # numerics import matplotlib.pyplot as plt # plotting max_X = 5.0 nPoints = 100 xData = [ii*max_X/nPoints for ii in range(nPoints)] yNormal = [math.exp(-0.5*xx*xx) for xx in xData] yExp = [math.exp(-xx) for xx in xData] yTsalis = [1.0/(1.0+xx)**2 for xx in xData] plt.plot(xData, yNormal, "ob", markersize=4, label="normall") plt.plot(xData, yExp, "og", markersize=4, label="exponential") plt.plot(xData, yTsalis, "or", markersize=4, label="Tsalis-Pareto") # plt.xlim(0, max_X) plt.ylim(0, 1.05) # axis limits plt.legend(loc="upper right") # legend location plt.savefig('foo.svg') # export figure plt.show()
#!/usr/bin/env python3 # sum of uniform distributions, binning with # np.histogram() import numpy as np # numerics import matplotlib.pyplot as plt # plotting nData = 10000 nBins = 20 # ploting at bin midpoints xData = [(iBin+0.5)/nBins for iBin in range(nBins)] # (adding) uniform distributions in [0,1] data_1 = [ np.random.uniform() for _ in range(nData)] data_2 = [(np.random.uniform()+\ np.random.uniform())/2.0 for _ in range(nData)] data_3 = [(np.random.uniform()+\ np.random.uniform()+\ np.random.uniform())/3.0 for _ in range(nData)] hist_1, bins = np.histogram(data_1, bins=nBins, range=(0.0,1.0)) hist_2, bins = np.histogram(data_2, bins=nBins, range=(0.0,1.0)) hist_3, bins = np.histogram(data_3, bins=nBins, range=(0.0,1.0)) #print("Bin Edges", bins) #print("Histogram Counts", hist_1) y_one = hist_1*nBins*1.0/nData # normalize y_two = hist_2*nBins*1.0/nData y_three = hist_3*nBins*1.0/nData plt.plot(xData, y_one, "ob", markersize=4, label="flat-one") plt.plot(xData, y_two, "og", markersize=4, label="flat-two") plt.plot(xData, y_three, "or", markersize=4, label="flat-three") plt.plot(xData, y_three, "r") # plt.xlim(0, 1) plt.ylim(0, 2.5) plt.legend(loc="upper right") plt.savefig('foo.svg') plt.show()
#!/usr/bin/env python3 import numpy as np # *** # *** global variables # *** trueBias = 0.3 # true coin bias nX = 11 # numerical discretization xx = [i*1.0/(nX-1.0) for i in range(nX)] pp = [1.0/nX for _ in range(nX)] # starting prior # *** # *** Baysian updating # *** def updatePrior(): """Bayesian inference for coin flips; marginal correspond to normalization of posterior""" # evidence = 0.0 if (np.random.uniform()>trueBias) else 1.0 # coin flip for iX in range(nX): prior = pp[iX] likelihood = xx[iX] if (evidence==1) else 1.0-xx[iX] pp[iX] = likelihood*prior # Bayes theorem # norm = sum(pp) for iX in range(nX): # normalization of posterior pp[iX] = pp[iX]/norm # return evidence # *** # *** main # *** ee = 0.0 for _ in range(200): # iteration over observations for iX in range(nX): print(f'{pp[iX]:6.3f}', end="") print(f' | {ee:3.1f}') ee = updatePrior() # for iX in range(nX): # support points print(f'{xx[iX]:4.1f} ', end="") print()