train.py 3.58 KB
Newer Older
1
2
3
4
5
6
7
8
9
"""
Train our RNN on bottlecap or prediction files generated from our CNN.
"""
from keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping, CSVLogger
from models import ResearchModels
from data import DataSet
import time

def train(data_type, seq_length, model, saved_model=None,
10
11
          concat=False, class_limit=None, image_shape=None,
          load_to_memory=False):
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
    # Set variables.
    nb_epoch = 1000
    batch_size = 32

    # Helper: Save the model.
    checkpointer = ModelCheckpoint(
        filepath='./data/checkpoints/' + model + '-' + data_type + \
            '.{epoch:03d}-{val_loss:.3f}.hdf5',
        verbose=1,
        save_best_only=True)

    # Helper: TensorBoard
    tb = TensorBoard(log_dir='./data/logs')

    # Helper: Stop when we stop learning.
    early_stopper = EarlyStopping(patience=10)

    # Helper: Save results.
    timestamp = time.time()
    csv_logger = CSVLogger('./data/logs/' + model + '-' + 'training-' + \
        str(timestamp) + '.log')

    # Get the data and process it.
    if image_shape is None:
        data = DataSet(
            seq_length=seq_length,
            class_limit=class_limit
        )
    else:
        data = DataSet(
            seq_length=seq_length,
            class_limit=class_limit,
            image_shape=image_shape
        )

    # Get samples per epoch.
    # Multiply by 0.7 to attempt to guess how much of data.data is the train set.
    samples_per_epoch = ((len(data.data) * 0.7) // batch_size) * batch_size

51
52
53
54
55
56
57
58
    if load_to_memory:
        # Get data.
        X, y = data.get_all_sequences_in_memory(batch_size, 'train', data_type, concat)
        X_test, y_test = data.get_all_sequences_in_memory(batch_size, 'test', data_type, concat)
    else:
        # Get generators.
        generator = data.frame_generator(batch_size, 'train', data_type, concat)
        val_generator = data.frame_generator(batch_size, 'test', data_type, concat)
59
60
61
62
63

    # Get the model.
    rm = ResearchModels(len(data.classes), model, seq_length, saved_model)

    # Fit!
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
    if load_to_memory:
        # Use standard fit.
        rm.model.fit(
            X,
            y,
            batch_size=batch_size,
            validation_data=(X_test, y_test),
            verbose=1,
            callbacks=[checkpointer, tb, early_stopper, csv_logger],
            nb_epoch=nb_epoch,
            samples_per_epoch=samples_per_epoch)
    else:
        # Use fit generator.
        rm.model.fit_generator(
            generator=generator,
            samples_per_epoch=samples_per_epoch,
            nb_epoch=nb_epoch,
            verbose=1,
            callbacks=[checkpointer, tb, early_stopper, csv_logger],
            validation_data=val_generator,
            nb_val_samples=256)
85
86
87
88
89
90

def main():
    """These are the main training settings. Set each before running
    this file."""
    model = 'lstm'  # see `models.py` for more
    saved_model = None  # None or weights file
Matt Harvey's avatar
Matt Harvey committed
91
    class_limit = None  # int, can be 1-101 or None
Matt Harvey's avatar
Matt Harvey committed
92
    seq_length = 40
93
    load_to_memory = True  # pre-load the sequences into memory
Matt Harvey's avatar
Matt Harvey committed
94
95
96
97
98
99
100
101
102
103
104
105
106
107

    # Chose images or features and image shape based on network.
    if model == 'conv_3d' or model == 'crnn':
        data_type = 'images'
        image_shape = (80, 80, 3)
    else:
        data_type = 'features'
        image_shape = None

    # MLP requires flattened features.
    if model == 'mlp':
        concat = True
    else:
        concat = False
108
109

    train(data_type, seq_length, model, saved_model=saved_model,
110
111
          class_limit=class_limit, concat=concat, image_shape=image_shape,
          load_to_memory=load_to_memory)
112
113
114

if __name__ == '__main__':
    main()