Skip to content

Commit

Permalink
Updated instantiation code
Browse files Browse the repository at this point in the history
  • Loading branch information
szaman19 committed Jun 18, 2024
1 parent 8eef5a0 commit e9fcd84
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 41 deletions.
2 changes: 1 addition & 1 deletion include/lbann/layers/misc/channelwise_softmax.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2014-2023, Lawrence Livermore National Security, LLC.
// Copyright (c) 2014-2024, Lawrence Livermore National Security, LLC.
// Produced at the Lawrence Livermore National Laboratory.
// Written by the LBANN Research Team (B. Van Essen, et al.) listed in
// the CONTRIBUTORS file. <[email protected]>
Expand Down
2 changes: 1 addition & 1 deletion include/lbann/layers/misc/channelwise_softmax_impl.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2014-2023, Lawrence Livermore National Security, LLC.
// Copyright (c) 2014-2024, Lawrence Livermore National Security, LLC.
// Produced at the Lawrence Livermore National Laboratory.
// Written by the LBANN Research Team (B. Van Essen, et al.) listed in
// the CONTRIBUTORS file. <[email protected]>
Expand Down
55 changes: 27 additions & 28 deletions include/lbann/layers/misc/distconv/distconv_channelwise_softmax.hpp
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
////////////////////////////////////////////////////////////////////////////////
// Copyright (c) 2014-2022, Lawrence Livermore National Security, LLC.
// Copyright (c) 2014-2024, Lawrence Livermore National Security, LLC.
// Produced at the Lawrence Livermore National Laboratory.
// Written by the LBANN Research Team (B. Van Essen, et al.) listed in
// the CONTRIBUTORS file. <[email protected]>
Expand Down Expand Up @@ -29,33 +29,32 @@
#include "lbann/utils/distconv.hpp"

#ifdef LBANN_HAS_DISTCONV
namespace distconv{
template <typename Backend, typename DataType>
class ChannelwiseSoftmax{
using LocaleMPI = tensor::LocaleMPI;

public:
ChannelwiseSoftmax(Backend &backend):m_be(backend){};

template <typename Allocator>
int forward(
const tensor::Tensor<DataType, LocaleMPI, Allocator> &input_0,
tensor::Tensor<DataType, LocaleMPI, Allocator> &output);

template <typename Allocator>
int backward(
const tensor::Tensor<DataType, LocaleMPI, Allocator> &input_0,
const tensor::Tensor<DataType, LocaleMPI, Allocator> &output_grad,
tensor::Tensor<DataType, LocaleMPI, Allocator> &input_grad_0);

protected:
Backend &m_be;

};

extern template class ChannelwiseSoftmax<::distconv::BackendDNNLib, float>;
extern template class ChannelwiseSoftmax<::distconv::BackendDNNLib, double>;
}
namespace distconv {
template <typename Backend, typename DataType>
class ChannelwiseSoftmax
{
using LocaleMPI = tensor::LocaleMPI;

public:
ChannelwiseSoftmax(Backend& backend) : m_be(backend){};

template <typename Allocator>
int forward(const tensor::Tensor<DataType, LocaleMPI, Allocator>& input_0,
tensor::Tensor<DataType, LocaleMPI, Allocator>& output);

template <typename Allocator>
int backward(
const tensor::Tensor<DataType, LocaleMPI, Allocator>& input_0,
const tensor::Tensor<DataType, LocaleMPI, Allocator>& output_grad,
tensor::Tensor<DataType, LocaleMPI, Allocator>& input_grad_0);

protected:
Backend& m_be;
};

extern template class ChannelwiseSoftmax<::distconv::BackendDNNLib, float>;
extern template class ChannelwiseSoftmax<::distconv::BackendDNNLib, double>;
} // namespace distconv

#endif // LBANN_HAS_DISTCONV
#endif // LBANN_LAYERS_MISC_DISTCONV_CHANNELWISE_SOFTMAX
1 change: 1 addition & 0 deletions src/layers/misc/channelwise_softmax_kernels.cuh
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
////////////////////////////////////////////////////////////////////////////////
#ifndef LBANN_LAYERS_MISC_CHANNELWISE_SOFTMAX_KERNELS
#define LBANN_LAYERS_MISC_CHANNELWISE_SOFTMAX_KERNELS
#include "lbann/utils/gpu/helpers.hpp"
namespace lbann{
namespace{
using Size3 = gpu_lib::array<size_t,3>;
Expand Down
17 changes: 6 additions & 11 deletions src/layers/misc/distconv/distconv_channelwise_softmax.cu
Original file line number Diff line number Diff line change
Expand Up @@ -123,26 +123,21 @@ int ChannelwiseSoftmax<Backend, DataType>::backward(
// Explicit template instantiation
// =========================================================

#define ETI(T, Backend) \
template class ChannelwiseSoftmax<Backend, T>; \
template int ChannelwiseSoftmax<Backend, T>::forward<tensor::CUDAAllocator>( \
#define PROTO(T) \
template class ChannelwiseSoftmax<BackendDNNLib, T>; \
template int \
ChannelwiseSoftmax<BackendDNNLib, T>::forward<tensor::CUDAAllocator>( \
const tensor::Tensor<T, tensor::LocaleMPI, tensor::CUDAAllocator>& \
input_0, \
tensor::Tensor<T, tensor::LocaleMPI, tensor::CUDAAllocator>& output_0); \
template int \
ChannelwiseSoftmax<Backend, T>::backward<tensor::CUDAAllocator>( \
ChannelwiseSoftmax<BackendDNNLib, T>::backward<tensor::CUDAAllocator>( \
const tensor::Tensor<T, tensor::LocaleMPI, tensor::CUDAAllocator>& \
input_0, \
const tensor::Tensor<T, tensor::LocaleMPI, tensor::CUDAAllocator>& \
input_1, \
tensor::Tensor<T, tensor::LocaleMPI, tensor::CUDAAllocator>& output_grad);

/// @todo: fp16
ETI(float, BackendDNNLib)
#ifdef LBANN_HAS_DOUBLE
ETI(double, BackendDNNLib)
#endif // LBANN_HAS_DOUBLE

#undef ETI
#include "lbann/macros/instantiate.hpp"
} // namespace distconv
#endif // LBANN_HAS_DISTCONV

0 comments on commit e9fcd84

Please sign in to comment.