Skip to content
This repository has been archived by the owner on Dec 20, 2024. It is now read-only.

Commit

Permalink
update tests
Browse files Browse the repository at this point in the history
  • Loading branch information
JPXKQX committed Jul 8, 2024
1 parent 0dbdd6c commit 1754690
Show file tree
Hide file tree
Showing 7 changed files with 30 additions and 3 deletions.
11 changes: 9 additions & 2 deletions src/anemoi/models/layers/mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,7 @@ def _register_edges(
Trainable tensor size
"""
if edge_attributes is None:
LOGGER.warning("No edge attributes provided.")
edge_attributes = []
raise ValueError("Edge attributes must be provided")

edge_attr_tensor = torch.cat([sub_graph[attr] for attr in edge_attributes], axis=1)

Expand Down Expand Up @@ -288,6 +287,7 @@ def __init__(
num_heads: int = 16,
mlp_hidden_ratio: int = 4,
sub_graph: Optional[HeteroData] = None,
sub_graph_edge_attributes: Optional[list[str]] = None,
src_grid_size: int = 0,
dst_grid_size: int = 0,
) -> None:
Expand Down Expand Up @@ -326,6 +326,7 @@ def __init__(
num_heads=num_heads,
mlp_hidden_ratio=mlp_hidden_ratio,
sub_graph=sub_graph,
sub_graph_edge_attributes=sub_graph_edge_attributes,
src_grid_size=src_grid_size,
dst_grid_size=dst_grid_size,
)
Expand Down Expand Up @@ -359,6 +360,7 @@ def __init__(
num_heads: int = 16,
mlp_hidden_ratio: int = 4,
sub_graph: Optional[HeteroData] = None,
sub_graph_edge_attributes: Optional[list[str]] = None,
src_grid_size: int = 0,
dst_grid_size: int = 0,
) -> None:
Expand Down Expand Up @@ -397,6 +399,7 @@ def __init__(
num_heads=num_heads,
mlp_hidden_ratio=mlp_hidden_ratio,
sub_graph=sub_graph,
sub_graph_edge_attributes=sub_graph_edge_attributes,
src_grid_size=src_grid_size,
dst_grid_size=dst_grid_size,
)
Expand Down Expand Up @@ -533,6 +536,7 @@ def __init__(
activation: str = "SiLU",
mlp_extra_layers: int = 0,
sub_graph: Optional[HeteroData] = None,
sub_graph_edge_attributes: Optional[list[str]] = None,
src_grid_size: int = 0,
dst_grid_size: int = 0,
) -> None:
Expand Down Expand Up @@ -570,6 +574,7 @@ def __init__(
activation,
mlp_extra_layers,
sub_graph=sub_graph,
sub_graph_edge_attributes=sub_graph_edge_attributes,
src_grid_size=src_grid_size,
dst_grid_size=dst_grid_size,
)
Expand Down Expand Up @@ -617,6 +622,7 @@ def __init__(
activation: str = "SiLU",
mlp_extra_layers: int = 0,
sub_graph: Optional[HeteroData] = None,
sub_graph_edge_attributes: Optional[list[str]] = None,
src_grid_size: int = 0,
dst_grid_size: int = 0,
) -> None:
Expand Down Expand Up @@ -654,6 +660,7 @@ def __init__(
activation=activation,
mlp_extra_layers=mlp_extra_layers,
sub_graph=sub_graph,
sub_graph_edge_attributes=sub_graph_edge_attributes,
src_grid_size=src_grid_size,
dst_grid_size=dst_grid_size,
)
Expand Down
3 changes: 2 additions & 1 deletion src/anemoi/models/layers/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ def __init__(
activation: str = "SiLU",
cpu_offload: bool = False,
sub_graph: Optional[HeteroData] = None,
sub_graph_edge_attributes: Optional[list[str]] = None,
src_grid_size: int = 0,
dst_grid_size: int = 0,
**kwargs,
Expand Down Expand Up @@ -201,7 +202,7 @@ def __init__(
mlp_extra_layers=mlp_extra_layers,
)

self._register_edges(sub_graph, src_grid_size, dst_grid_size, trainable_size)
self._register_edges(sub_graph, sub_graph_edge_attributes, src_grid_size, dst_grid_size, trainable_size)

self.trainable = TrainableTensor(trainable_size=trainable_size, tensor_size=self.edge_attr.shape[0])

Expand Down
1 change: 1 addition & 0 deletions tests/layers/mapper/test_base_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
)

Expand Down
3 changes: 3 additions & 0 deletions tests/layers/mapper/test_graphconv_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
)

Expand Down Expand Up @@ -166,6 +167,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
)

Expand Down Expand Up @@ -254,6 +256,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
)

Expand Down
3 changes: 3 additions & 0 deletions tests/layers/mapper/test_graphtransformer_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
num_heads=num_heads,
mlp_hidden_ratio=mlp_hidden_ratio,
Expand Down Expand Up @@ -182,6 +183,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
num_heads=num_heads,
mlp_hidden_ratio=mlp_hidden_ratio,
Expand Down Expand Up @@ -278,6 +280,7 @@ def mapper(self, mapper_init, fake_graph):
cpu_offload=cpu_offload,
activation=activation,
sub_graph=fake_graph[("src", "to", "dst")],
sub_graph_edge_attributes=["edge_attr1", "edge_attr2"],
trainable_size=trainable_size,
)

Expand Down
6 changes: 6 additions & 0 deletions tests/layers/processor/test_graphconv_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ def graphconv_init(fake_graph: HeteroData):
activation = "SiLU"
cpu_offload = False
sub_graph = fake_graph[("nodes", "to", "nodes")]
edge_attributes = ["edge_attr1", "edge_attr2"]
src_grid_size = 0
dst_grid_size = 0
trainable_size = 8
Expand All @@ -46,6 +47,7 @@ def graphconv_init(fake_graph: HeteroData):
activation,
cpu_offload,
sub_graph,
edge_attributes,
src_grid_size,
dst_grid_size,
trainable_size,
Expand All @@ -62,6 +64,7 @@ def graphconv_processor(graphconv_init):
activation,
cpu_offload,
sub_graph,
edge_attributes,
src_grid_size,
dst_grid_size,
trainable_size,
Expand All @@ -74,6 +77,7 @@ def graphconv_processor(graphconv_init):
activation=activation,
cpu_offload=cpu_offload,
sub_graph=sub_graph,
sub_graph_edge_attributes=edge_attributes,
src_grid_size=src_grid_size,
dst_grid_size=dst_grid_size,
trainable_size=trainable_size,
Expand All @@ -89,6 +93,7 @@ def test_graphconv_processor_init(graphconv_processor, graphconv_init):
_activation,
_cpu_offload,
_sub_graph,
_edge_attributes,
_src_grid_size,
_dst_grid_size,
_trainable_size,
Expand All @@ -109,6 +114,7 @@ def test_forward(graphconv_processor, graphconv_init):
_activation,
_cpu_offload,
_sub_graph,
_edge_attributes,
_src_grid_size,
_dst_grid_size,
trainable_size,
Expand Down
6 changes: 6 additions & 0 deletions tests/layers/processor/test_graphtransformer_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ def graphtransformer_init(fake_graph: HeteroData):
activation = "GELU"
cpu_offload = False
sub_graph = fake_graph[("nodes", "to", "nodes")]
edge_attributes = ["edge_attr1", "edge_attr2"]
src_grid_size = 0
dst_grid_size = 0
trainable_size = 6
Expand All @@ -48,6 +49,7 @@ def graphtransformer_init(fake_graph: HeteroData):
activation,
cpu_offload,
sub_graph,
edge_attributes,
src_grid_size,
dst_grid_size,
trainable_size,
Expand All @@ -65,6 +67,7 @@ def graphtransformer_processor(graphtransformer_init):
activation,
cpu_offload,
sub_graph,
edge_attributes,
src_grid_size,
dst_grid_size,
trainable_size,
Expand All @@ -78,6 +81,7 @@ def graphtransformer_processor(graphtransformer_init):
activation=activation,
cpu_offload=cpu_offload,
sub_graph=sub_graph,
sub_graph_edge_attributes=edge_attributes,
src_grid_size=src_grid_size,
dst_grid_size=dst_grid_size,
trainable_size=trainable_size,
Expand All @@ -94,6 +98,7 @@ def test_graphtransformer_processor_init(graphtransformer_processor, graphtransf
_activation,
_cpu_offload,
_sub_graph,
_edge_attributes,
_src_grid_size,
_dst_grid_size,
_trainable_size,
Expand All @@ -115,6 +120,7 @@ def test_forward(graphtransformer_processor, graphtransformer_init):
_activation,
_cpu_offload,
_sub_graph,
_edge_attributes,
_src_grid_size,
_dst_grid_size,
trainable_size,
Expand Down

0 comments on commit 1754690

Please sign in to comment.