-
Notifications
You must be signed in to change notification settings - Fork 2
/
SparseConvNet.h
99 lines (92 loc) · 5.01 KB
/
SparseConvNet.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
// Ben Graham, University of Warwick, 2015, [email protected]
// SparseConvNet is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
//(at your option) any later version.
// SparseConvNet is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
#pragma once
#include "SpatiallySparseDataset.h"
#include <memory>
#include <string>
#include <fstream>
class SparseConvNetCUDA;
class SparseConvNet {
private:
public:
std::unique_ptr<SparseConvNetCUDA> cnn;
SparseConvNet(int dimension, int nInputFeatures, int nClasses,
int pciBusID = -1, int nTop = 1);
~SparseConvNet();
void addLeNetLayerMP(int nFeatures, int filterSize, int filterStride,
int poolSize, int poolStride,
ActivationFunction activationFn = RELU,
float dropout = 0.0f, int minActiveInputs = 1);
void addLeNetLayerPOFMP(int nFeatures, int filterSize, int filterStride,
int poolSize, float fmpShrink,
ActivationFunction activationFn = RELU,
float dropout = 0.0f, int minActiveInputs = 1);
void addLeNetLayerROFMP(int nFeatures, int filterSize, int filterStride,
int poolSize, float fmpShrink,
ActivationFunction activationFn = RELU,
float dropout = 0.0f, int minActiveInputs = 1);
void addLeNetLayerPDFMP(int nFeatures, int filterSize, int filterStride,
int poolSize, float fmpShrink,
ActivationFunction activationFn = RELU,
float dropout = 0.0f, int minActiveInputs = 1);
void addLeNetLayerRDFMP(int nFeatures, int filterSize, int filterStride,
int poolSize, float fmpShrink,
ActivationFunction activationFn = RELU,
float dropout = 0.0f, int minActiveInputs = 1);
void addTerminalPoolingLayer(int poolSize);
void addSoftmaxLayer();
void addIndexLearnerLayer();
float processDataset(SpatiallySparseDataset &dataset, int batchSize = 100,
float learningRate = 0, float momentum = 0.99);
void processDatasetRepeatTest(SpatiallySparseDataset &dataset,
int batchSize = 100, int nReps = 12,
std::string predictionsFilename = "",
std::string confusionMatrixFilename = "");
float processIndexLearnerDataset(SpatiallySparseDataset &dataset,
int batchSize = 100,
float learningRate = 0.0,
float momentum = 0.99);
void processDatasetDumpTopLevelFeatures(SpatiallySparseDataset &dataset,
int batchSize, int reps = 1);
void loadWeights(std::string baseName, int epoch, bool momentum = false,
int firstNlayers = 1000000);
void saveWeights(std::string baseName, int epoch, bool momentum = false);
void calculateInputRegularizingConstants(SpatiallySparseDataset dataset);
};
class SparseConvTriangLeNet {
private:
std::auto_ptr<SparseConvNetCUDA> cnn;
public:
SparseConvTriangLeNet(int dimension, int nInputFeatures, int nClasses,
int pciBusID = -1, int nTop = 1);
~SparseConvTriangLeNet();
void addLeNetLayerMP(int nFeatures, int filterSize, int filterStride,
int poolSize, int poolStride,
ActivationFunction activationFn = RELU,
float dropout = 0.0f, int minActiveInputs = 1);
void addTerminalPoolingLayer(int poolSize);
void addSoftmaxLayer();
void addIndexLearnerLayer();
float processDataset(SpatiallySparseDataset &dataset, int batchSize = 100,
float learningRate = 0, float momentum = 0.99);
void processDatasetRepeatTest(SpatiallySparseDataset &dataset,
int batchSize = 100, int nReps = 12,
std::string predictionsFilename = "",
std::string confusionMatrixFilename = "");
float processIndexLearnerDataset(SpatiallySparseDataset &dataset,
int batchSize = 100, float learningRate = 0,
float momentum = 0.99);
void processDatasetDumpTopLevelFeatures(SpatiallySparseDataset &dataset,
int batchSize, int reps = 1);
void loadWeights(std::string baseName, int epoch, bool momentum = false,
int firstNlayers = 1000000);
void saveWeights(std::string baseName, int epoch, bool momentum = false);
void calculateInputRegularizingConstants(SpatiallySparseDataset dataset);
};