C# で Affine/Softmax レイヤの実装

C# で「ゼロから作るDeep Learning」に挑戦中。5 章も 5.6 節まで来た。

今回も Affine レイヤと Softmax-with-Loss レイヤを C# で写経するだけ。

using Numpy;

public class Affine
{
    public NDarray W { get; }

    public NDarray b { get; }

    public NDarray? x { get; private set; }

    public NDarray? dW { get; private set; }

    public NDarray? db { get; private set; }

    public Affine(NDarray W, NDarray b)
    {
        this.W = W;
        this.b = b;
        x = null;
        dW = null;
        db = null;
    }

    public NDarray forward(NDarray x)
    {
        this.x = x;
        var @out = np.dot(x, W) + b;
        return @out;
    }

    public NDarray backward(NDarray dout)
    {
        var dx = np.dot(dout, W.T);
        dW = np.dot(x!.T, dout);
        db = np.sum(dout, axis: 0);
        return dx;
    }
}

public class SoftmaxWithLoss
{
    // 損失
    public NDarray? loss { get; private set; }

    // softmaxの出力
    public NDarray? y { get; private set; }

    // 教師データ(one-hot vector)
    public NDarray? t { get; private set; }

    public NDarray forward(NDarray x, NDarray t)
    {
        this.t = t;
        this.y = softmax(x);
        this.loss = cross_entropy_error(this.y, this.t);
        return this.loss;
    }

    public NDarray backward(NDarray dout)
    {
        var batch_size = this.t!.shape[0];
        var dx = (this.y - this.t) / batch_size;
        return dx;
    }

    private static NDarray softmax(NDarray a)
    {
        var c = np.max(a);
        var exp_a = np.exp(a - c);
        var sum_exp_a = np.sum(exp_a);
        var y = exp_a / sum_exp_a;
        return y;
    }

    private static NDarray cross_entropy_error(NDarray y, NDarray t)
    {
        var delta = 1e-7;
        return (-1) * np.sum(t * np.log(y + delta));
    }
}