Skip to content
GitLab
  • Menu
Projects Groups Snippets
  • /
  • Help
    • Help
    • Support
    • Community forum
    • Submit feedback
    • Contribute to GitLab
  • Sign in / Register
  • sac2c sac2c
  • Project information
    • Project information
    • Activity
    • Labels
    • Members
  • Repository
    • Repository
    • Files
    • Commits
    • Branches
    • Tags
    • Contributors
    • Graph
    • Compare
  • Issues 394
    • Issues 394
    • List
    • Boards
    • Service Desk
    • Milestones
  • Merge requests 17
    • Merge requests 17
  • Deployments
    • Deployments
    • Releases
  • Wiki
    • Wiki
  • External wiki
    • External wiki
  • Activity
  • Graph
  • Create a new issue
  • Commits
  • Issue Boards
Collapse sidebar
  • sac-group
  • sac2csac2c
  • Issues
  • #2381
Closed
Open
Created Feb 13, 2024 by Thomas Koopman@thomasDeveloper

Assertion "(actual_cls != C_scl) && (formal_cls != C_scl)" fails

Not so minimal example

use MathArray: all;
use Structures: all;
use StdIO: all;

#define CATEGORIES 10
#define TRAIN_SIZE 60000

inline
float[n:shpi] rsum(int m, float[m:shpo,n:shpi] x)
{
  return {iv -> sum({jv -> x[jv++iv] | jv < shpo}) | iv < shpi};
}

noinline
float[m:mshp,n:oshp,b:bshp] MultiConv(float[n:ishp,b:bshp] in,
                                      float[m:mshp,n:wshp] weights,
                                      float[m:mshp] bias)
  | all(oshp == ishp - wshp + 1)
{
  x = {iv -> Convolve(in, weights[iv]) + bias[iv] | iv < mshp};
  print(shape(x));
  return x;
}

noinline 
float[n:oshp,b:bshp] Convolve(float[n:ishp,b:bshp] in, float[n:wshp] weights)
{
  oshp = ishp - wshp + 1;
  return {iv -> rsum(n, {jv -> weights[jv] * in[iv + jv]}) | iv < oshp};
}

inline
float average( float[*] array)
{
   return sum( array) / tof( prod( shape( array)));
}

inline
float[*] AveragePool( float[*] in, int[.] filter)
//
// assert( dim(in) >= shape(filter)[0] )
// assert( shape(out) == shape(in)/filter )
// 
{
  ones = genarray( [dim( in)], 1);
  filter = drop( shape( filter), ones) ++ filter;
  shp = shape( in) / filter;
  /*
   * out = { iv -> average( { ov -> in[iv+ov] | ov < filter})
   *             | iv < shp};
   */
  out = with {
          (. <= iv <= .) : average( with {
                                      (. <= ov <= .) : in[iv*filter+ov];
                                    } : genarray( filter, 0f));
        } : genarray( shp, 0f);
  return out;
}

inline
float[*] BackAveragePool( float[*] d_out, int[.] filter )
{
  ones = genarray( [dim( d_out)], 1);
  filter = drop( shape( filter), ones) ++ filter;
  shp = shape( d_out) * filter;
  d_in = with {
           (. <= iv <=.) : d_out[iv/filter] / tof( prod( filter));
         } : genarray( shp, 0f);
  return d_in;
}

inline
float[*] BackWeights2( float[*] d_out, float[*] weights, float[*] in)
{
  return with {
           ( . <= ov <= .) :
                   with { 
                    (0*shape( d_out) <= iv < shape( d_out)) : in[ iv+ov] * d_out[iv];
                   } : fold( +, 0f );
              } : genarray( shape( weights), 0f);
}

float[*], float[*], float[*]
BackMultiConv( float[*] d_out, float[*] weights, float[*] in, float[*] bias)
{
  shp_act_map = take( -[dim(in)], shape(weights));
  shp_maps = drop( -[dim(in)], shape(weights));
  d_in = with {
            ( . <= iv <= .) :
                   with {
                      (0*shp_maps <= ov < shp_maps) {
                           lb = max( 0*shp_act_map, iv - take( -[dim(in)], shape(d_out)) + 1);
                           ub = min( shp_act_map, iv+1 );
                        } : with { 
                              ( lb <= ov2 < ub) : weights[ov ++ ov2] * d_out[ov ++ (iv-ov2)];
                            } : fold( +, 0f);
                   } : fold( +, 0f);
         } : genarray( shape(in), 0f);
  d_weights = with {
                (. <= iv <= .) : BackWeights2( d_out[iv], weights[iv], in);
              } : genarray( shp_maps, genarray( take( -[dim(in)], shape(weights)), 0f));
  d_bias = with {
             (. <= iv <= .) : sum( d_out[iv]);
           } : genarray( shp_maps, 0f);

  return ( d_in, d_weights, d_bias);
}

inline
float[*] BackLogistic( float[*] d_out, float[*] out)
{
  return d_out * out * (1f - out);
}

inline
float[6,5,5], float[6], float[12,6,5,5], float[12], float[CATEGORIES,12,1,4,4], float[CATEGORIES], float
TrainZhang( float[28,28] in, float[6,5,5] k1, float[6] b1,
                             float[12,6,5,5] k2, float[12] b2,
                             float[CATEGORIES,12,1,4,4] fc, float[CATEGORIES] b,
                             float[CATEGORIES,1,1,1,1] target)
{
  float[6,24,24] c1, d_c1;
  float[6,12,12] s1, d_s1;
  float[12,1,8,8] c2, d_c2;
  float[12,1,4,4] s2, d_s2;
  float[CATEGORIES,1,1,1,1] out, d_out;

  c1 = MultiConv( in, k1, b1 );
  s1 = AveragePool( c1, [2,2]);
  c2 = MultiConv( s1, k2, b2);
  s2 = AveragePool( c2, [2,2]);
  out = MultiConv( s2, fc, b);

  d_out = out - target;
  error = 0f;

  d_s2, d_fc, d_b = BackMultiConv( BackLogistic( d_out, out), fc, s2, b);
  d_c2 = BackAveragePool( d_s2, [2,2]);
  d_s1, d_k2, d_b2 = BackMultiConv( BackLogistic( d_c2, c2), k2, s1, b2);
  d_c1 = BackAveragePool( d_s1, [2,2]);
  _, d_k1, d_b1 = BackMultiConv( BackLogistic( d_c1, c1), k1, in, b1);

  return ( d_k1, d_b1, d_k2, d_b2, d_fc, d_b, error);
}

int main()
{
  batchsize = 100;

  k1 = genarray( [6,5,5], 1f/25f);
  b1 = genarray( [6], 1f/6f);
  k2 = genarray( [12,6,5,5], 1f/150f);
  b2 = genarray( [12], 1f/12f);
  fc = genarray( [CATEGORIES,12,1,4,4], 1f/192f);
  b = genarray( [CATEGORIES], 1f/tof(CATEGORIES));

  training_images = genarray([TRAIN_SIZE,28,28], 0f);

  training_labels = genarray([TRAIN_SIZE], 3);

  error = 0d;
  delta_k1, delta_b1, delta_k2, delta_b2, delta_fc, delta_b, berr =
    with {
      ([0] <= iv < [batchsize]) {
        in = training_images[iv];
        target = genarray([CATEGORIES,1,1,1,1], 0f);
        target[[training_labels[iv],0,0,0,0]] = 1f;
        d_k1, d_b1, d_k2, d_b2, d_fc, d_b, err =
          TrainZhang(in, k1, b1, k2, b2, fc, b, target);
      }: (d_k1, d_b1, d_k2, d_b2, d_fc, d_b, err);
    }: (fold(+, 0f * k1),
                fold(+, 0f * b1),
                fold(+, 0f * k2),
                fold(+, 0f * b2),
                fold(+, 0f * fc),
                fold(+, 0f * b),
                fold(+, 0f));

  k1 = k1 - delta_k1;
  b1 = b1 - delta_b1;
  k2 = k2 - delta_k2;
  b2 = b2 - delta_b2;
  fc = fc - delta_fc;
  b  = b  - delta_b;
  error += tod(berr);

  print(k1);

  return 0;
}

using

sac2c 1.3.3-MijasCosta-1153-g14eb
build-type: DEBUG
built-by: "thomas" at 2024-02-12T12:49:22

with check -c

To upload designs, you'll need to enable LFS and have an admin enable hashed storage. More information
Assignee
Assign to
Time tracking