In [1]:
from __future__ import print_function, division
import matplotlib.pyplot as plt
import pandas as pd
from neuralnilm.data.loadactivations import load_nilmtk_activations
from neuralnilm.data.syntheticaggregatesource import SyntheticAggregateSource
from neuralnilm.data.datapipeline import DataPipeline
from neuralnilm.data.processing import DivideBy, IndependentlyCenter
from neuralnilm.data.datathread import DataThread
In [2]:
NILMTK_FILENAME = '/data/mine/vadeec/merged/ukdale.h5'
TARGET_APPLIANCE = 'kettle'
SEQ_LENGTH = 256
SAMPLE_PERIOD = 6
STRIDE = SEQ_LENGTH
WINDOWS = {
'train': {
1: ("2014-01-01", "2014-06-01")
},
'unseen_activations_of_seen_appliances': {
1: ("2014-06-02", "2014-07-02")
},
'unseen_appliances': {
2: ("2013-06-01", "2013-06-07")
}
}
LOADER_CONFIG = {
'nilmtk_activations': dict(
appliances=['kettle', 'microwave', 'washing machine'],
filename=NILMTK_FILENAME,
sample_period=SAMPLE_PERIOD,
windows=WINDOWS
)
}
In [ ]:
from neuralnilm.data.stridesource import StrideSource
In [ ]:
stride_source = StrideSource(
target_appliance=TARGET_APPLIANCE,
seq_length=SEQ_LENGTH,
filename=NILMTK_FILENAME,
windows=WINDOWS,
sample_period=SAMPLE_PERIOD,
stride=STRIDE
)
In [3]:
nilmtk_activations = load_nilmtk_activations(**LOADER_CONFIG['nilmtk_activations'])
In [7]:
from neuralnilm.data.realaggregatesource import RealAggregateSource
In [8]:
ras = RealAggregateSource(
activations=nilmtk_activations,
target_appliance=TARGET_APPLIANCE,
seq_length=SEQ_LENGTH,
filename=NILMTK_FILENAME,
windows=WINDOWS,
sample_period=SAMPLE_PERIOD
)
In [9]:
%prun ras._get_sequence_without_target('train')
In [ ]:
source = SyntheticAggregateSource(
activations=nilmtk_activations,
target_appliance=TARGET_APPLIANCE,
seq_length=SEQ_LENGTH,
allow_incomplete_target=False,
sample_period=SAMPLE_PERIOD
)
In [ ]:
sample = source.get_batch(num_seq_per_batch=1024).next()
In [ ]:
pipeline = DataPipeline(
sources=[source, ras, stride_source],
num_seq_per_batch=64,
input_processing=[DivideBy(sample.before_processing.input.flatten().std()), IndependentlyCenter()],
target_processing=[DivideBy(sample.before_processing.target.flatten().std())]
)
nilmtk_disag_source = NILMTKDisagSource( filename=NILMTK_FILENAME, target_appliance=TARGET_APPLIANCE, seq_length=SEQ_LENGTH, buildings=[5], window_per_building={}, stride=STRIDE, sample_period=SAMPLE_PERIOD )
disag_pipeline = deepcopy(pipeline) disag_pipeline.source = nilmtk_disag_source
disaggregator = Disaggregator( pipeline=disag_pipeline, output_path=PATH # )
Disagregator ideas:
In [ ]:
from lasagne.layers import InputLayer, RecurrentLayer, DenseLayer, ReshapeLayer
def get_net_0(input_shape, target_shape=None):
NUM_UNITS = {
'dense_layer_0': 100,
'dense_layer_1': 50,
'dense_layer_2': 100
}
if target_shape is None:
target_shape = input_shape
# Define layers
input_layer = InputLayer(
shape=input_shape
)
# Dense layers
dense_layer_0 = DenseLayer(
input_layer,
num_units=NUM_UNITS['dense_layer_0']
)
dense_layer_1 = DenseLayer(
dense_layer_0,
num_units=NUM_UNITS['dense_layer_1']
)
dense_layer_2 = DenseLayer(
dense_layer_1,
num_units=NUM_UNITS['dense_layer_2']
)
# Output
final_dense_layer = DenseLayer(
dense_layer_2,
num_units=target_shape[1] * target_shape[2],
nonlinearity=None
)
output_layer = ReshapeLayer(
final_dense_layer,
shape=target_shape
)
return output_layer
In [ ]:
from neuralnilm.net import Net
batch = pipeline.get_batch()
output_layer = get_net_0(
batch.after_processing.input.shape,
batch.after_processing.target.shape
)
net = Net(output_layer, tags=['AE'], description="Just testing new NeuralNILM code!")
In [ ]:
from neuralnilm.trainer import Trainer
from neuralnilm.metrics import Metrics
trainer = Trainer(
net=net,
data_pipeline=pipeline,
experiment_id=["5"],
metrics=Metrics(state_boundaries=[4]),
learning_rates={0: 1E-2, 10000: 1E-3},
repeat_callbacks=[
(5000, Trainer.save_params),
(5000, Trainer.plot_estimates),
( 500, Trainer.validate)
]
)
In [ ]:
contents = [(['data'], {'activations': LOADER_CONFIG})]
trainer.submit_report(additional_report_contents=contents)
In [ ]:
trainer.fit(500000)
In [ ]: