Click here to hide/show the list of notebooks.
  pyAgrum on notebooks   pyAgrum jupyter
☰  ParametricEM 
pyAgrum 0.18.0   
Zipped notebooks   
generation: 2020-06-11 14:09  

Creative Commons License
This pyAgrum's notebook is licensed under a Creative Commons Attribution-NonCommercial 4.0 International License.

In [1]:
import pyAgrum as gum
import pyAgrum.lib.notebook as gnb

from pyAgrum.lib._utils.oslike import head

import os
#the bases will be saved in "out/*.csv"
EMnomissing=os.path.join("out","EM_nomissing.csv")
EMmissing=os.path.join("out","EM_missing.csv")

generating data with missing values (at random)

In [2]:
src=gum.fastBN("A->B<-C->D->E<-B;D->F")
gum.generateCSV(src,EMnomissing,5000,random_order=False)
src
Out[2]:
G A A B B A->B E E B->E C C C->B D D C->D D->E F F D->F
In [3]:
import pandas as pd
import numpy as np

def add_missing(src,dst,proba):
  df=pd.read_csv(src)
  mask=np.random.choice([True, False], size=df.shape,p=[proba,1-proba])
  df.mask(mask).to_csv(dst,na_rep='?',index=False,float_format='%.0f')

gum.generateCSV(src,EMnomissing,5000,random_order=False)
add_missing(EMnomissing,EMmissing,proba=0.1)
In [4]:
print("No missing")
head(EMnomissing)
print("Missing")
head(EMmissing)
No missing
A,B,C,D,E,F
0,0,0,0,1,1
1,0,1,1,1,0
1,0,0,1,0,1
1,1,1,1,1,1
0,1,0,0,1,1
1,0,0,0,0,0
1,0,0,1,0,1
0,1,1,1,0,1
1,0,0,1,0,1

Missing
A,B,C,D,E,F
0,0,0,?,1,1
1,0,1,1,1,0
1,0,0,1,0,1
1,1,1,?,1,1
0,1,0,?,1,1
?,0,0,0,?,0
1,0,0,1,0,1
0,1,1,1,0,1
1,0,0,1,0,?

learning with missing data

In [5]:
learner = gum.BNLearner(EMmissing, ["?"])
print(f"Missing values in {EMmissing} : {learner.hasMissingValues()}")
Missing values in out/EM_missing.csv : True
In [6]:
# this will fail : missing data !
# learner.learnParameters(src.dag())
In [7]:
learner.useEM(1e-3)
learner.useAprioriSmoothing()
bn=learner.learnParameters(src.dag())
print(f"# iterations : {learner.nbrIterations()}")
gnb.sideBySide(gnb.getInference(src),gnb.getInference(bn))
# iterations : 5
structs Inference in   2.50ms A B A->B E B->E C C->B D C->D D->E F D->F
structs Inference in   0.74ms A B A->B E B->E C C->B D C->D D->E F D->F

learning with smaller error (and no smoothing)

In [8]:
learner = gum.BNLearner(EMmissing, ["?"])
learner.setVerbosity(True)
learner.useEM(1e-8)
bn2=learner.learnParameters(src.dag())
print(f"# iterations : {learner.nbrIterations()}")
gnb.sideBySide(gnb.getInference(src),gnb.getInference(bn2))
# iterations : 14
structs Inference in   0.79ms A B A->B E B->E C C->B D C->D D->E F D->F
structs Inference in   1.01ms A B A->B E B->E C C->B D C->D D->E F D->F
In [9]:
print(learner.history())
(0.29931046961739805, 0.039081653010927093, 0.007425025797295249, 0.0022960551280704184, 0.0006587408703660158, 0.00018224595697919875, 4.94682199966976e-05, 1.3288148402941336e-05, 3.5483471210275698e-06, 9.441935320722696e-07, 2.506968040231101e-07, 6.646893891140688e-08, 1.760614292941759e-08, 4.660183794176849e-09)
In [ ]: