所以我的问题是,我有一个具有以下输入和输出类型的层特征:
pub trait Layer {
type Input: Dimension;
type Output: Dimension;
fn forward(&mut self, input: &ArrayBase<OwnedRepr<f32>, Self::Input>) -> ArrayBase<OwnedRepr<f32>, Self::Output>;
}
使用此转发功能:
impl<A: Activation> Layer for DenseLayer<A> {
type Input = Ix2;
type Output = Ix2;
fn forward(&mut self, input: &Array2<f32>) -> Array2<f32> {
assert_eq!(input.shape()[1], self.weights.shape()[0], "Input width must match weight height.");
let z = input.dot(&self.weights) + &self.biases;
self.activation.activate(&z)
}
}
我有这些,以便我的前向或后向函数可以接受例如 2 维数组,但仍然输出只有 1 维的数组。然后我有一个这种层特征的包装器的实现,我希望通过所有层进行转发:
pub struct NeuralNetwork<'a, L>
where
L: Layer + 'a,
{
layers: Vec<L>,
loss_function: &'a dyn Cost,
}
impl<'a, L> NeuralNetwork<'a, L>
where
L: Layer + 'a,
{
pub fn new(layers: Vec<L>, loss_function: &'a dyn Cost) -> Self {
NeuralNetwork { layers, loss_function }
}
pub fn forward(&mut self, input: &ArrayBase<OwnedRepr<f32>, L::Input>) -> ArrayBase<OwnedRepr<f32>, L::Output> {
let mut output = input.clone();
// todo fix the layer forward changing input to output
// causing mismatch in the input and output dimensions of forward
for layer in &mut self.layers {
output = layer.forward(&output);
}
output
}
}
现在因为在 for 循环中我首先输入了 input 类型,然后从 layer.forward 接收输出。在下一次迭代中,它采用 output 类型,但 layer.forward 只接受 input 类型。至少我认为这是正在发生的事情。这似乎是一个非常简单的问题,但我真的不确定如何解决这个问题。
编辑1:
可重现的示例:
use ndarray::{Array, Array2, ArrayBase, Dimension, OwnedRepr};
pub trait Layer {
type Input: Dimension;
type Output: Dimension;
fn forward(&mut self, input: &ArrayBase<OwnedRepr<f32>, Self::Input>) -> ArrayBase<OwnedRepr<f32>, Self::Output>;
}
// A Dense Layer struct
pub struct DenseLayer {
weights: Array2<f32>,
biases: Array2<f32>,
}
impl DenseLayer {
pub fn new(input_size: usize, output_size: usize) -> Self {
let weights = Array::random((input_size, output_size), rand::distributions::Uniform::new(-0.5, 0.5));
let biases = Array::zeros((1, output_size));
DenseLayer { weights, biases }
}
}
impl Layer for DenseLayer {
type Input = ndarray::Ix2; // Two-dimensional input
type Output = ndarray::Ix2; // Two-dimensional output
fn forward(&mut self, input: &ArrayBase<OwnedRepr<f32>, Self::Input>) -> ArrayBase<OwnedRepr<f32>, Self::Output> {
assert_eq!(input.shape()[1], self.weights.shape()[0], "Input width must match weight height.");
let z = input.dot(&self.weights) + &self.biases;
z // Return the output directly without activation
}
}
// Neural Network struct
pub struct NeuralNetwork<'a, L>
where
L: Layer + 'a,
{
layers: Vec<L>,
}
impl<'a, L> NeuralNetwork<'a, L>
where
L: Layer + 'a,
{
pub fn new(layers: Vec<L>) -> Self {
NeuralNetwork { layers }
}
pub fn forward(&mut self, input: &ArrayBase<OwnedRepr<f32>, L::Input>) -> ArrayBase<OwnedRepr<f32>, L::Output> {
let mut output = input.clone();
for layer in &mut self.layers {
output = layer.forward(&output);
}
output
}
}
fn main() {
// Create a neural network with one Dense Layer
let mut dense_layer = DenseLayer::new(3, 2);
let mut nn = NeuralNetwork::new(vec![dense_layer]);
// Create an example input (1 batch, 3 features)
let input = Array::from_shape_vec((1, 3), vec![1.0, 2.0, 3.0]).unwrap();
// Forward pass
let output = nn.forward(&input);
println!("Output: {:?}", output);
}