Skip to content

Commit

Permalink
adding stackedrnn
Browse files Browse the repository at this point in the history
  • Loading branch information
MartinuzziFrancesco committed Dec 20, 2024
1 parent ff95400 commit 6795bc4
Show file tree
Hide file tree
Showing 14 changed files with 61 additions and 11 deletions.
27 changes: 16 additions & 11 deletions src/RecurrentLayers.jl
Original file line number Diff line number Diff line change
Expand Up @@ -45,21 +45,26 @@ end
export MGUCell, LiGRUCell, IndRNNCell, RANCell, LightRUCell, RHNCell,
RHNCellUnit, NASCell, MUT1Cell, MUT2Cell, MUT3Cell, SCRNCell, PeepholeLSTMCell,
FastRNNCell, FastGRNNCell

export MGU, LiGRU, IndRNN, RAN, LightRU, NAS, RHN, MUT1, MUT2, MUT3,
SCRN, PeepholeLSTM, FastRNN, FastGRNN

export StackedRNN

@compat(public, (initialstates))

include("mgu_cell.jl")
include("ligru_cell.jl")
include("indrnn_cell.jl")
include("ran_cell.jl")
include("lightru_cell.jl")
include("rhn_cell.jl")
include("nas_cell.jl")
include("mut_cell.jl")
include("scrn_cell.jl")
include("peepholelstm_cell.jl")
include("fastrnn_cell.jl")
include("cells/mgu_cell.jl")
include("cells/ligru_cell.jl")
include("cells/indrnn_cell.jl")
include("cells/ran_cell.jl")
include("cells/lightru_cell.jl")
include("cells/rhn_cell.jl")
include("cells/nas_cell.jl")
include("cells/mut_cell.jl")
include("cells/scrn_cell.jl")
include("cells/peepholelstm_cell.jl")
include("cells/fastrnn_cell.jl")

include("wrappers/stackedrnn.jl")

end #module
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
45 changes: 45 additions & 0 deletions src/wrappers/stackedrnn.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# based on https://fluxml.ai/Flux.jl/stable/guide/models/recurrence/
struct StackedRNN{L,S}
layers::L
states::S
end

Flux.@layer StackedRNN

"""
StackedRNN(rlayer, (input_size, hidden_size), args...;
num_layers = 1, kwargs...)
Constructs a stack of recurrent layers given the recurrent layer type.
Arguments:
- `rlayer`: Any recurrent layer such as [MGU](@ref), [RHN](@ref), etc... or
[RNN](@extref), [LSTM](@extref), etc...
- `input_size`: Defines the input dimension for the first layer.
- `hidden_size`: defines the dimension of the hidden layer.
- `num_layers`: The number of layers to stack. Default is 1.
- `args...`: Additional positional arguments passed to the recurrent layer.
- `kwargs...`: Additional keyword arguments passed to the recurrent layers.
Returns:
A `StackedRNN` instance containing the specified number of RNN layers and their initial states.
"""
function StackedRNN(rlayer, (input_size, hidden_size)::Pair, args...;
num_layers::Int = 1,
kwargs...)
layers = []
for (idx,layer) in enumerate(num_layers)
in_size = idx == 1 ? input_size : hidden_size
push!(layers, rlayer(in_size => hidden_size, args...; kwargs...))
end
states = [initialstates(layer) for layer in layers]

return StackedRNN(layers, states0)
end

function (stackedrnn::StackedRNN)(inp::AbstracArray)
for (layer, state) in zip(stackedrnn.layers, stackedrnn.states)
inp = layer(inp, state0)
end
return inp
end

0 comments on commit 6795bc4

Please sign in to comment.