Skip to content

Commit

Permalink
Adding demo files
Browse files Browse the repository at this point in the history
  • Loading branch information
mathinking committed Mar 27, 2021
0 parents commit 7e3f4b7
Show file tree
Hide file tree
Showing 12 changed files with 666 additions and 0 deletions.
141 changes: 141 additions & 0 deletions CIFARTraining/cifar10CNN.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
%% Classifying the CIFAR-10 dataset using Convolutional Neural Networks
% This example shows how to train a Convolutional Neural Network (CNN) from
% scratch using the dataset CIFAR10.
%
% Data Credit: Krizhevsky, A., & Hinton, G. (2009). Learning multiple
% layers of features from tiny images.

% Copyright 2016 The MathWorks, Inc.

%% Download the CIFAR-10 dataset
if ~exist('cifar-10-batches-mat','dir')
cifar10Dataset = 'cifar-10-matlab';
disp('Downloading 174MB CIFAR-10 dataset...');
websave([cifar10Dataset,'.tar.gz'],...
['https://www.cs.toronto.edu/~kriz/',cifar10Dataset,'.tar.gz']);
gunzip([cifar10Dataset,'.tar.gz'])
delete([cifar10Dataset,'.tar.gz'])
untar([cifar10Dataset,'.tar'])
delete([cifar10Dataset,'.tar'])
end

%% Prepare the CIFAR-10 dataset
if ~exist('cifar10Train','dir')
disp('Saving the Images in folders. This might take some time...');
saveCIFAR10AsFolderOfImages('cifar-10-batches-mat', pwd, true);
end

%% Load image CIFAR-10 Training dataset (50000 32x32 colour images in 10 classes)
imsetTrain = imageSet('cifar10Train','recursive');

%% Display Sampling of Image Data
numClasses = size(imsetTrain,2);
imagesPerClass = 10;
imagesInMontage = cell(imagesPerClass,numClasses);
for i = 1:size(imagesInMontage,2)
imagesInMontage(:,i) = ...
imsetTrain(i).ImageLocation(randi(imsetTrain(i).Count, 1, ...
imagesPerClass));
end

montage({imagesInMontage{:}},'Size',[numClasses,imagesPerClass]);
title('Sample of Training Data (Credit:Learning Multiple Layers of Features from Tiny Images, Alex Krizhevsky, 2009.)')

%% Prepare the data for Training
% Read all images and store them in a 4D uint8 input array for training,
% with its corresponding class

trainNames = {imsetTrain.Description};
XTrain = zeros(32,32,3,sum([imsetTrain.Count]),'uint8');
TTrain = categorical(discretize((1:sum([imsetTrain.Count]))',...
[0,cumsum([imsetTrain.Count])],'categorical',trainNames));

j = 0;
tic;
for c = 1:length(imsetTrain)
for i = 1:imsetTrain(c).Count
XTrain(:,:,:,i+j) = read(imsetTrain(c),i);
end
j = j + imsetTrain(c).Count;
end
toc;

%% Define a CNN architecture
conv1 = convolution2dLayer(5,32,'Padding',2,...
'BiasLearnRateFactor',2);
conv1.Weights = gpuArray(single(randn([5 5 3 32])*0.0001));
fc1 = fullyConnectedLayer(64,'BiasLearnRateFactor',2);
fc1.Weights = gpuArray(single(randn([64 576])*0.1));
fc2 = fullyConnectedLayer(10,'BiasLearnRateFactor',2);
fc2.Weights = gpuArray(single(randn([10 64])*0.1));

layers = [ ...
imageInputLayer([32 32 3]);
conv1;
maxPooling2dLayer(3,'Stride',2);
reluLayer();
convolution2dLayer(5,32,'Padding',2,'BiasLearnRateFactor',2);
reluLayer();
averagePooling2dLayer(3,'Stride',2);
convolution2dLayer(5,64,'Padding',2,'BiasLearnRateFactor',2);
reluLayer();
averagePooling2dLayer(3,'Stride',2);
fc1;
reluLayer();
fc2;
softmaxLayer()
classificationLayer()];

% Define the training options.
opts = trainingOptions('sgdm', ...
'InitialLearnRate', 0.001, ...
'LearnRateSchedule', 'piecewise', ...
'LearnRateDropFactor', 0.1, ...
'LearnRateDropPeriod', 8, ...
'L2Regularization', 0.004, ...
'MaxEpochs', 10, ...
'MiniBatchSize', 100, ...
'Verbose', true);

%% Training the CNN
[net, info] = trainNetwork(XTrain, TTrain, layers, opts);

% Alternative way using imageDataStore
% imdsTrain = imageDatastore(fullfile(pwd,'cifar10Train'),...
% 'IncludeSubfolders',true,'LabelSource','foldernames');
% [net, info] = trainNetwork(imdsTrain, layers, opts);

%% Visualise the first layer weights.
figure;
montage(mat2gray(gather(net.Layers(2).Weights)));
title('First Layer Weights');

%% Load Test Data

imsetTest = imageSet('cifar10Test','recursive');

testNames = {imsetTest.Description};
XTest = zeros(32,32,3,sum([imsetTest.Count]),'uint8');
TTest = categorical(discretize((1:sum([imsetTest.Count]))',...
[0,cumsum([imsetTest.Count])],'categorical',testNames));
j = 0;
tic;
for c = 1:length(imsetTest)
for i = 1:imsetTest(c).Count
XTest(:,:,:,i+j) = read(imsetTest(c),i);
end
j = j + imsetTest(c).Count;
end
toc;

%% Run the network on the test set

YTest = classify(net, XTest);

% Alternative way using imageDataStore
% imdsTest = imageDatastore(fullfile(pwd, 'cifar10Test'),...
% 'IncludeSubfolders',true,'LabelSource','foldernames');
% YTest = classify(net, imdsTest);

% Calculate the accuracy.
accuracy = sum(YTest == TTest)/numel(TTest)
33 changes: 33 additions & 0 deletions CIFARTraining/loadCIFAR10AsAFourDimensionalArray.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
% Copyright 2016 The MathWorks, Inc.

function [XTrain, TTrain, XTest, TTest] = loadCIFAR10AsAFourDimensionalArray()

% loadCIFAR10AsAFourDimensionalArray Load the CIFAR-10 data

[XTrain1, TTrain1] = iLoadBatchAsFourDimensionalArray('data_batch_1.mat');
[XTrain2, TTrain2] = iLoadBatchAsFourDimensionalArray('data_batch_2.mat');
[XTrain3, TTrain3] = iLoadBatchAsFourDimensionalArray('data_batch_3.mat');
[XTrain4, TTrain4] = iLoadBatchAsFourDimensionalArray('data_batch_4.mat');
[XTrain5, TTrain5] = iLoadBatchAsFourDimensionalArray('data_batch_5.mat');

XTrain = cat(4, XTrain1, XTrain2, XTrain3, XTrain4, XTrain5);
TTrain = [TTrain1; TTrain2; TTrain3; TTrain4; TTrain5];

[XTest, TTest] = iLoadBatchAsFourDimensionalArray('test_batch.mat');

XTrain = double(XTrain);
XTest = double(XTest);
end

function [XBatch, TBatch] = iLoadBatchAsFourDimensionalArray(batchFileName)
load(batchFileName);
XBatch = data';
XBatch = reshape(XBatch, 32,32,3,[]);
XBatch = permute(XBatch, [2 1 3 4]);
TBatch = iConvertLabelsToCategorical(labels);
end

function categoricalLabels = iConvertLabelsToCategorical(integerLabels)
load('batches.meta.mat');
categoricalLabels = categorical(integerLabels, 0:9, label_names);
end
79 changes: 79 additions & 0 deletions CIFARTraining/saveCIFAR10AsFolderOfImages.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
% Copyright 2016 The MathWorks, Inc.

function saveCIFAR10AsFolderOfImages(inputPath, outputPath, varargin)
% saveCIFAR10AsFolderOfImages Save the CIFAR-10 dataset as a folder of images
% saveCIFAR10AsFolderOfImages(inputPath, outputPath) takes the CIFAR-10
% dataset located at inputPath and saves it as a folder of images to the
% directory outputPath. If inputPath or outputPath is an empty string, it
% is assumed that the current folder should be used.
%
% saveCIFAR10AsFolderOfImages(..., labelDirectories) will save the
% CIFAR-10 data so that instances with the same label will be saved to
% sub-directories with the name of that label.

% Check input directories are valid
if(~isempty(inputPath))
assert(exist(inputPath,'dir') == 7);
end
if(~isempty(outputPath))
assert(exist(outputPath,'dir') == 7);
end

% Check if we want to save each set with the same labels to its own
% directory.
if(isempty(varargin))
labelDirectories = false;
else
assert(nargin == 3);
labelDirectories = varargin{1};
end

% Set names for directories
trainDirectoryName = 'cifar10Train';
testDirectoryName = 'cifar10Test';

% Create directories for the output
mkdir(fullfile(outputPath, trainDirectoryName));
mkdir(fullfile(outputPath, testDirectoryName));

if(labelDirectories)
labelNames = {'airplane','automobile','bird','cat','deer','dog','frog','horse','ship','truck'};
iMakeTheseDirectories(fullfile(outputPath, trainDirectoryName), labelNames);
iMakeTheseDirectories(fullfile(outputPath, testDirectoryName), labelNames);
for i = 1:5
iLoadBatchAndWriteAsImagesToLabelFolders(fullfile(inputPath,['data_batch_' num2str(i) '.mat']), fullfile(outputPath, trainDirectoryName), labelNames, (i-1)*10000);
end
iLoadBatchAndWriteAsImagesToLabelFolders(fullfile(inputPath,'test_batch.mat'), fullfile(outputPath, testDirectoryName), labelNames, 0);
else
for i = 1:5
iLoadBatchAndWriteAsImages(fullfile(inputPath,['data_batch_' num2str(i) '.mat']), fullfile(outputPath, trainDirectoryName), (i-1)*10000);
end
iLoadBatchAndWriteAsImages(fullfile(inputPath,'test_batch.mat'), fullfile(outputPath, testDirectoryName), 0);
end
end

function iLoadBatchAndWriteAsImagesToLabelFolders(fullInputBatchPath, fullOutputDirectoryPath, labelNames, nameIndexOffset)
load(fullInputBatchPath);
data = data'; %#ok<NODEF>
data = reshape(data, 32,32,3,[]);
data = permute(data, [2 1 3 4]);
for i = 1:size(data,4)
imwrite(data(:,:,:,i), fullfile(fullOutputDirectoryPath, labelNames{labels(i)+1}, ['image' num2str(i + nameIndexOffset) '.png']));
end
end

function iLoadBatchAndWriteAsImages(fullInputBatchPath, fullOutputDirectoryPath, nameIndexOffset)
load(fullInputBatchPath);
data = data'; %#ok<NODEF>
data = reshape(data, 32,32,3,[]);
data = permute(data, [2 1 3 4]);
for i = 1:size(data,4)
imwrite(data(:,:,:,i), fullfile(fullOutputDirectoryPath, ['image' num2str(i + nameIndexOffset) '.png']));
end
end

function iMakeTheseDirectories(outputPath, directoryNames)
for i = 1:numel(directoryNames)
mkdir(fullfile(outputPath, directoryNames{i}));
end
end
27 changes: 27 additions & 0 deletions LICENSE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
Copyright (c) 2017, The MathWorks, Inc.
All rights reserved.

Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:

* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the distribution.
* In all cases, the software is, and all modifications and derivatives
of the software shall be, licensed to you solely for use in conjunction
with MathWorks products and service offerings.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
17 changes: 17 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# DeepLearning-for-ComputerVision-with-MATLAB
[![View Example files for "Deep Learning for Computer Vision with MATLAB" Webinar on File Exchange](https://www.mathworks.com/matlabcentral/images/matlab-file-exchange.svg)](https://www.mathworks.com/matlabcentral/fileexchange/58030-example-files-for-deep-learning-for-computer-vision-with-matlab-webinar)

These are the example files used in the webinar ["Aprendizaje Profundo para Visión Artificial con MATLAB" - Spanish](https://www.mathworks.com/videos/deep-learning-for-computer-vision-with-matlab-1540981496452.html) ("Deep Learning for Computer Vision with MATLAB").


Deep Learning is an area of Machine Learning that uses multiple nonlinear processing layers to learn useful representations of features directly from data. This webinar shows the fundamentals of Deep Learning for Computer Vision and how to use Convolutional Neural Networks (popularly known as CNNs or ConvNets) to solve object classification/recognition problems.

The source code consists of 3 different examples:
1. Running a trained CNN (/WebcamClassification)
2. Training a CNN from scratch (/CIFARTraining)
3. Fine-tuning a pre-trained CNN. Transfer learning (/TransferLearning)
Examples 1) and 3) make use AlexNet [1]. In order to download the trained CNN [2], run the file downloadAndPrepareCNN.m (if using R2016a) or download AlexNet Network support package (if using R2016b or later).

References:
[1] Krizhevsky, A., Sutskever, I., & Hinton, G. E. (2012). Imagenet classification with Deep Convolutional Neural Networks. Advances in Neural Information Processing Systems (pp. 1097-1105).
[2] Vedaldi, A., & Lenc, K. (2015, October). MatConvNet: Convolutional Neural Networks for MATLAB. Proceedings of the 23rd ACM International Conference on Multimedia (pp. 689-692). ACM.
13 changes: 13 additions & 0 deletions TransferLearning/DisplayImageMontage.m
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
% Copyright 2016 The MathWorks, Inc.

function DisplayImageMontage(cellArrayOfImages)
% Displays a montage of images. Images are resized to handle different
% image sizes.

thumbnails = [];
for i = 1:numel(cellArrayOfImages)
img = imread(cellArrayOfImages{i});
thumbnails = cat(4, thumbnails, imresize(img, [200 200]));
end

montage(thumbnails)
Loading

0 comments on commit 7e3f4b7

Please sign in to comment.