I have the following code, that was actually intended to be a MWE for a different issue.
However, this causes something strange to video output and then signs out my user from Mac OS X,
when run. If I remove plt.show()
, everything exits with exit code 0
except that the graph is not showing.
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
if __name__ == "__main__":
n = [1e4,1e4,1e4]
p = [0.999,0.993,0.995]
q = [1-x for x in p]
K = len(n)
num_simulations = 10000
simulations = np.random.binomial(n,q,(num_simulations,len(n)))
master_matrix = np.zeros((len(n),num_simulations))
for j in range(K):
master_matrix[j,:] = simulations[:,j]
arr = np.zeros((num_simulations,K))
for i in range(num_simulations):
l = list(master_matrix[:,i])
r1 = {j:l[j] for j in range(len(l))}
arr[i,:] = np.array(list(r1.values()))
assert len(arr[i,:]) == K
df1 = pd.DataFrame(arr,columns=pd.Series([chr(i+ord('a')) for i in range(K)],name="group"),index=range(num_simulations))
df1 = df1.melt(value_name="value")
for i, value in enumerate(df1["group"].unique()):
if value == 'c':
df = df1.loc[df1["group"] == value]
sns.distplot(df["value"])
plt.show()