Coverage for NeuralTSNE/NeuralTSNE/TSNE/NeuralNetwork/neural_network.py: 94%
30 statements
« prev ^ index » next coverage.py v7.8.0, created at 2025-05-18 16:32 +0000
« prev ^ index » next coverage.py v7.8.0, created at 2025-05-18 16:32 +0000
1from typing import List, Optional
2from collections import OrderedDict
3from abc import ABC, abstractmethod
5from torch import nn
8class BaseModel(nn.Module, ABC):
9 """
10 Base class for neural network models.
11 """
13 @property
14 @abstractmethod
15 def in_features(self) -> int:
16 """Return the number of input features."""
19class NeuralNetwork(BaseModel):
20 """
21 Neural network model for dimensionality reduction.
23 Parameters
24 ----------
25 `initial_features` : `int`, optional
26 Number of input features.
27 `n_components` : `int`, optional
28 Number of components in the output.
29 `multipliers` : `List[float]`, optional
30 List of multipliers for hidden layers.
31 `pre_filled_layers` : `Union[OrderedDict, nn.Sequential]`, optional
32 Pre-filled OrderedDict or nn.Sequential for layers. Defaults to `None`.
34 Note
35 ----
36 The neural network is designed for dimensionality reduction with
37 hidden layers defined by the list of multipliers. ReLU activation
38 functions are applied between layers.
39 If `pre_filled_layers` is provided, the neural network is initialized with
40 the given layers and other parameters are ignored.
41 """
43 def __init__(
44 self,
45 initial_features: int | None = None,
46 n_components: int | None = None,
47 multipliers: List[float] | None = None,
48 pre_filled_layers: Optional[OrderedDict | nn.Sequential] = None,
49 ) -> None:
50 super(NeuralNetwork, self).__init__()
52 if pre_filled_layers is not None:
53 self.sequential_stack = (
54 nn.Sequential(pre_filled_layers)
55 if isinstance(pre_filled_layers, OrderedDict)
56 else pre_filled_layers
57 )
58 return
60 layers = OrderedDict()
61 layers["0"] = nn.Linear(
62 initial_features, int(multipliers[0] * initial_features)
63 )
64 for i in range(1, len(multipliers)):
65 layers["ReLu" + str(i - 1)] = nn.ReLU()
66 layers[str(i)] = nn.Linear(
67 int(multipliers[i - 1] * initial_features),
68 int(multipliers[i] * initial_features),
69 )
70 layers["ReLu" + str(i)] = nn.ReLU()
71 if len(multipliers) == 1: 71 ↛ 72line 71 didn't jump to line 72 because the condition on line 71 was never true
72 layers["ReLu" + str(len(multipliers) - 1)] = nn.ReLU()
73 layers[str(len(multipliers))] = nn.Linear(
74 int(multipliers[-1] * initial_features), n_components
75 )
76 self.sequential_stack = nn.Sequential(layers)
78 def forward(self, x):
79 """
80 Forward pass through the neural network.
82 Parameters
83 ----------
84 `x` : `torch.Tensor`
85 Input tensor.
87 Returns
88 -------
89 `torch.Tensor`
90 Output tensor.
91 """
92 logits = self.sequential_stack(x)
93 return logits
95 @property
96 def in_features(self) -> int:
97 return self.sequential_stack[0].in_features