Skip to content
This repository was archived by the owner on Dec 21, 2017. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions pelops/features/resnet50.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,7 @@ def __init__(self, chip_producer):

if resnet_model is None:
# include_top needs to be True for this to work
base_model = ResNet50(weights='imagenet', include_top=True)
resnet_model = Model(input=base_model.input,
output=base_model.get_layer('flatten_1').output)
resnet_model = ResNet50(weights='imagenet', include_top=False)

self.resnet_model = resnet_model

Expand Down
175 changes: 175 additions & 0 deletions pelops/models/pelops_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
from abc import ABCMeta, abstractmethod
import numpy as np
from keras.models import model_from_json


class PelopsModel(metaclass=ABCMeta):
"""
A base class for all Pelops Models
"""
def __init__(self,
train_exp_gen,
test_exp_gen,
num_experiments,
*args,
**kwargs):
"""

Args:
train_exp_gen: Training data experiment generator
test_exp_gen: Test data experiment generator
num_experiments: Number of epxeriments to use for validation, training is 10x
feature_transformer: A hook to transform feature vectors if desired
truth_function: A function that takes two chips and returns the desired "truth"
e.g. same car or same car, color, vehicle type

Returns:

"""
self.train_exp_gen = train_exp_gen
self.test_exp_gen = test_exp_gen
self.num_experiments = num_experiments
self.feature_transformer = kwargs.get('feature_transformer', lambda x: x)
self.truth_function = kwargs.get('truth_function', PelopsModel.make_carid_type_color_truth)

@abstractmethod
def define_model(self):
raise NotImplementedError()

@staticmethod
def make_carid_type_color_truth(chip1, chip2):
"""
Takes two chips and returns if the chips represent the same [car_id, color, vehicle type]
Args:
chip1:
chip2:

Returns:
if the two chips have the same [car_id, color, vehicle type]
"""
same_vehicle = chip1.car_id == chip2.car_id
same_type = chip1.misc['vehicle_type'] == chip2.misc['vehicle_type']
same_color = chip1.misc['color'] == chip2.misc['color']
return [same_vehicle, same_type, same_color]

@staticmethod
def make_carid_truth(chip1, chip2):
"""
Takes two chips and returns if the chips represent the same car_id
Args:
chip1:
chip2:

Returns:
if the two chips have the same [car_id]
"""
same_vehicle = chip1.car_id == chip2.car_id
return [same_vehicle]

@staticmethod
def make_batch(experiment_generator,
batch_size,
feature_transformer,
truth_function):
"""
Make a set of training or test data to be used

Args:
experiment_generator: Pelops.experiment_api.experiment.Experiment
batch_size: Number of examples to create
feature_transformer: A hook to transform feature vectors if desired
truth_maker: A function that takes two chips and returns the desired "truth"
e.g. same car or same car, color, vehicle type

Returns:
[Input
"""
truths = []
left_feats = []
right_feats = []

for i in range(batch_size):
# Generate Example
cam0, cam1 = experiment_generator.generate()

# Find true match
true_match = set([x.car_id for x in cam0]) & set([x.car_id for x in cam1])

# Figure out which car in camera 0 is the "true" match
for car in cam0:
if car.car_id in true_match:
true_car = car

true_car_feats = experiment_generator.dataset.get_feats_for_chip(true_car)

# Construct examples
for car_num, right_car in enumerate(cam1):
truth = truth_function(true_car, right_car)
right_car_feat = experiment_generator.dataset.get_feats_for_chip(right_car)

# Add forward example
left_feats.append(true_car_feats)
right_feats.append(right_car_feat)
truths.append(truth)

# Add reversed example
left_feats.append(right_car_feat)
right_feats.append(true_car_feats)
truths.append(truth)

left_feats = feature_transformer(np.array(left_feats))
right_feats = feature_transformer(np.array(right_feats))
return [left_feats, right_feats], np.array(truths, dtype=np.uint8)

def prep_train(self):
self.X_train, self.Y_train = self.make_batch(self.train_exp_gen,
10*self.num_experiments,
self.feature_transformer,
self.truth_function)

def prep_test(self):
self.X_test, self.Y_test = self.make_batch(self.test_exp_gen,
self.num_experiments,
self.feature_transformer,
self.truth_function)

def prep(self):
self.define_model()
self.prep_train()
self.prep_test()

def train(self,
epochs,
batch_size=128,
callbacks=None):
self.model.fit(self.X_train,
self.Y_train,
validation_data=(self.X_test, self.Y_test),
batch_size=batch_size,
nb_epoch=epochs,
callbacks=callbacks,
verbose=2)

def save(self, base_filename):
json_filename = base_filename + '.json'
weights_filename = base_filename + '.weights'

# serialize model to JSON
model_json = self.model.to_json()
with open(json_filename, 'w') as json_file:
json_file.write(model_json)

# serialize weights to HDF5
self.model.save_weights(weights_filename)

def load(self, base_filename):
json_filename = base_filename + '.json'
weights_filename = base_filename + '.weights'

# load json and create model
json_file = open(json_filename, 'r')
loaded_model_json = json_file.read()
json_file.close()
self.model = model_from_json(loaded_model_json)
# load weights into new model
self.model .load_weights(weights_filename)
54 changes: 54 additions & 0 deletions pelops/models/siamese_pca_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import numpy as np

from keras.models import Model
from keras.layers import Dense, Input, merge, Reshape, GlobalAveragePooling2D
from keras.layers.normalization import BatchNormalization

from sklearn import decomposition

from pelops.models.pelops_model import PelopsModel

class SiamesePCAModel(PelopsModel):
def __init__(self,
train_exp_gen,
test_exp_gen,
num_experiments,
*args,
**kwargs):

self.output_size = kwargs.get('pca_dim', 32)
self.pca = decomposition.PCA(n_components=self.output_size)

cars = set(train_exp_gen.list_of_cars)
feats = []
for chip in train_exp_gen.dataset.chips.values():
if chip.car_id in cars:
feats.append(train_exp_gen.dataset.get_feats_for_chip(chip))
self.pca.fit(np.array(feats))
kwargs['feature_transformer'] = self.pca.transform

super().__init__(train_exp_gen,
test_exp_gen,
num_experiments,
*args,
**kwargs)


def define_model(self):
processed_left = Input(shape=[self.output_size])
processed_right = Input(shape=[self.output_size])

my_layer = merge([processed_left, processed_right], mode='concat')
my_layer = Dense(self.output_size, activation='relu')(my_layer)
my_layer = BatchNormalization()(my_layer)

my_layer = Dense(self.output_size/2, activation='relu')(my_layer)

num_training_classes=3
predictions = Dense(num_training_classes, activation='sigmoid')(my_layer)

self.model = Model([processed_left, processed_right], output=predictions)

self.model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
2 changes: 1 addition & 1 deletion testci/test_resnet50_feature.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ def feature_producer(chip_producer):
def test_features(feature_producer, chip_producer):
for _, chip in chip_producer["chips"].items():
features = feature_producer.produce_features(chip)
assert features.shape == (1, 2048)
assert features.squeeze().shape == (2048,)
assert np.sum(features) != 0


Expand Down