-
Notifications
You must be signed in to change notification settings - Fork 11
/
lasagne_utils.py
72 lines (65 loc) · 2.58 KB
/
lasagne_utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import os
import lasagne
import cPickle as pickle
import numpy as np
import theano.tensor as T
# Create 5D tensor type
ftensor5 = T.TensorType(dtype="float32", broadcastable=(False,) * 5)
def save_model(filename, suffix, model, log=None, announce=True, log_only=False):
# Build filename
filename = '{}_{}'.format(filename, suffix)
# Store in separate directory
filename = os.path.join('./models/', filename)
# Inform user
if announce:
print('Saving to: {}'.format(filename))
# Generate parameter filename and dump
param_filename = '%s.params' % (filename)
if not log_only:
# Acquire Data
data = lasagne.layers.get_all_param_values(model)
with open(param_filename, 'w') as f:
pickle.dump(data, f)
# Generate log filename and dump
if log is not None:
log_filename = '%s.log' % (filename)
with open(log_filename, 'w') as f:
pickle.dump(log, f)
def load_model(filename, model):
# Build filename
filename = os.path.join('./models/', '%s.params' % (filename))
with open(filename, 'r') as f:
data = pickle.load(f)
lasagne.layers.set_all_param_values(model, data)
return model
def load_log(filename, append_dir=True):
if append_dir:
filename = os.path.join('./models/', '%s.log' % (filename))
with open(filename, 'r') as f:
log = pickle.load(f)
return log
def store_in_log(log, kv_pairs):
# Quick helper function to append values to keys in a log
for k,v in kv_pairs.items():
log[k].append(v)
return log
def non_flattening_dense(l_in, batch_size, seq_len, *args, **kwargs):
# Flatten down the dimensions for everything but the features
l_flat = lasagne.layers.ReshapeLayer(l_in, (-1, [2]))
# Make a dense layer connected to it
l_dense = lasagne.layers.DenseLayer(l_flat, *args, **kwargs)
# Reshape it back out - this could be done implicitly, but I want to throw an error if not matching
l_reshaped = lasagne.layers.ReshapeLayer(l_dense, (batch_size, seq_len, l_dense.output_shape[1]))
return l_reshaped
def get_layer_output_fn(fn_inputs, network, on_unused_input='raise'):
import theano
outs = []
for layer in lasagne.layers.get_all_layers(network):
outs.append(lasagne.layers.get_output(layer, deterministic=True))
out_fn = theano.function(fn_inputs, outs, on_unused_input=on_unused_input)
return out_fn
def get_output_fn(fn_inputs, network):
import theano
output = lasagne.layers.get_output(network, deterministic=True)
out_fn = theano.function(fn_inputs, output)
return out_fn