In [2]:
import neukrill_net.highlevelfeatures as hlf
In [4]:
import neukrill_net.utils
In [18]:
import numpy as np
In [10]:
import imp
In [6]:
cd ..
In [7]:
# Load the settings
settings = neukrill_net.utils.Settings("settings.json")
# Load raw training data
rawdata, labels = neukrill_net.utils.load_rawdata(settings.image_fnames, classes=settings.classes)
In [48]:
imp.reload(hlf)
foo = hlf.HighLevelFeatureBase(preprocessing_func=lambda x:np.array([1]))
In [104]:
imp.reload(hlf)
attrlist = ['mean','std']
boa = hlf.BasicAttributes(attrlist, preprocessing_func=None, augment_func=None)
In [46]:
X1 = boa.transform(rawdata)
In [55]:
pths = [pth for k in settings.image_fnames['train'].keys() for pth in settings.image_fnames['train'][k]]
X2 = boa.transform(pths)
In [50]:
X3 = boa.transform(settings.image_fnames['test'])
In [56]:
print X1.shape
print X2.shape
print X3.shape
In [57]:
X1
Out[57]:
In [58]:
X2
Out[58]:
In [59]:
X3
Out[59]:
In [116]:
imp.reload(hlf)
boa2 = hlf.BasicAttributes(['mean','height'])
mHLF1 = hlf.MultiHighLevelFeature([boa,boa2])
mHLF2 = boa + boa2
In [106]:
print boa.transform(rawdata[:10])
print boa2.transform(rawdata[:10])
print mHLF1.transform(rawdata[:10])
print mHLF2.transform(rawdata[:10])
In [107]:
mHLF2._childHLFs
Out[107]:
In [108]:
np.ravel(mHLF2._childHLFs[0]._preprocess_extract_image(rawdata[0]))
Out[108]:
In [109]:
[child._preprocess_extract_image(rawdata[0]).ravel() for child in mHLF2._childHLFs]
Out[109]:
In [121]:
bow = hlf.BagOfWords(preprocessing_func= n_features_max=500, patch_size=17, clusteralgo='kmeans', n_clusters=10, random_seed=42)
In [123]:
bow.transform(X1)