diff --git a/compile_demo_project_with_blob.sh b/compile_demo_project_with_blob.sh new file mode 100755 index 00000000..f0f90009 --- /dev/null +++ b/compile_demo_project_with_blob.sh @@ -0,0 +1,18 @@ +#!/bin/sh +echo ########## +echo "Compiling Demo..." +echo ########## + +gcc -o rnnoise_demo \ + examples/rnnoise_demo.c \ + src/celt_lpc.c src/denoise.c src/kiss_fft.c \ + src/nnet.c src/nnet_default.c src/parse_lpcnet_weights.c \ + src/pitch.c src/rnn.c src/rnnoise_tables.c \ + -I./include -I./src \ + -lm \ + -DUSE_WEIGHTS_FILE + +echo ########## +echo "Compiled rnnoise_demo !!" +ls -alh | grep rnnoise_demo +echo ########## diff --git a/compile_demo_project_without_blob.sh b/compile_demo_project_without_blob.sh new file mode 100755 index 00000000..a2b51611 --- /dev/null +++ b/compile_demo_project_without_blob.sh @@ -0,0 +1,17 @@ +#!/bin/sh +echo ########## +echo "Compiling Demo..." +echo ########## + +gcc -o rnnoise_demo \ + examples/rnnoise_demo.c \ + src/celt_lpc.c src/denoise.c src/kiss_fft.c \ + src/nnet.c src/nnet_default.c src/parse_lpcnet_weights.c \ + src/pitch.c src/rnn.c src/rnnoise_data.c src/rnnoise_tables.c \ + -I./include -I./src \ + -lm + +echo ########## +echo "Compiled rnnoise_demo !!" +ls -alh | grep rnnoise_demo +echo ########## diff --git a/generate_weights_blob.sh b/generate_weights_blob.sh new file mode 100755 index 00000000..926400ce --- /dev/null +++ b/generate_weights_blob.sh @@ -0,0 +1,18 @@ +#!/bin/sh +echo ########## +echo "Compiling Generator..." +echo ########## + +gcc -o generate_weights_blob \ + src/write_weights.c src/parse_lpcnet_weights.c + +echo ########## +echo "Generating weights_blob.bin..." +echo ########## + +./generate_weights_blob + +echo ########## +echo "Generated weights_blob.bin !!" +ls -alh | grep weights_blob.bin +echo ########## diff --git a/src/denoise.c b/src/denoise.c index b6fc3d4a..17c9a909 100644 --- a/src/denoise.c +++ b/src/denoise.c @@ -282,6 +282,23 @@ int rnnoise_get_frame_size(void) { return FRAME_SIZE; } +#ifndef DUMP_BINARY_WEIGHTS +int init_rnnoise(RNNoise *model, const WeightArray *arrays) { + if (linear_init(&model->conv1, arrays, "conv1_bias", NULL, NULL,"conv1_weights_float", NULL, NULL, NULL, 195, 128)) return 1; + if (linear_init(&model->conv2, arrays, "conv2_bias", "conv2_subias", "conv2_weights_int8","conv2_weights_float", NULL, NULL, "conv2_scale", 384, 384)) return 1; + if (linear_init(&model->gru1_input, arrays, "gru1_input_bias", "gru1_input_subias", "gru1_input_weights_int8","gru1_input_weights_float", "gru1_input_weights_idx", NULL, "gru1_input_scale", 384, 1152)) return 1; + if (linear_init(&model->gru1_recurrent, arrays, "gru1_recurrent_bias", "gru1_recurrent_subias", "gru1_recurrent_weights_int8","gru1_recurrent_weights_float", "gru1_recurrent_weights_idx", "gru1_recurrent_weights_diag", "gru1_recurrent_scale", 384, 1152)) return 1; + if (linear_init(&model->gru2_input, arrays, "gru2_input_bias", "gru2_input_subias", "gru2_input_weights_int8","gru2_input_weights_float", "gru2_input_weights_idx", NULL, "gru2_input_scale", 384, 1152)) return 1; + if (linear_init(&model->gru2_recurrent, arrays, "gru2_recurrent_bias", "gru2_recurrent_subias", "gru2_recurrent_weights_int8","gru2_recurrent_weights_float", "gru2_recurrent_weights_idx", "gru2_recurrent_weights_diag", "gru2_recurrent_scale", 384, 1152)) return 1; + if (linear_init(&model->gru3_input, arrays, "gru3_input_bias", "gru3_input_subias", "gru3_input_weights_int8","gru3_input_weights_float", "gru3_input_weights_idx", NULL, "gru3_input_scale", 384, 1152)) return 1; + if (linear_init(&model->gru3_recurrent, arrays, "gru3_recurrent_bias", "gru3_recurrent_subias", "gru3_recurrent_weights_int8","gru3_recurrent_weights_float", "gru3_recurrent_weights_idx", "gru3_recurrent_weights_diag", "gru3_recurrent_scale", 384, 1152)) return 1; + if (linear_init(&model->dense_out, arrays, "dense_out_bias", NULL, NULL,"dense_out_weights_float", NULL, NULL, NULL, 1536, 32)) return 1; + if (linear_init(&model->vad_dense, arrays, "vad_dense_bias", NULL, NULL,"vad_dense_weights_float", NULL, NULL, NULL, 1536, 1)) return 1; + + return 0; +} +#endif + int rnnoise_init(DenoiseState *st, RNNModel *model) { memset(st, 0, sizeof(*st)); #if !TRAINING diff --git a/src/write_weights.c b/src/write_weights.c index 0f492cdc..30a036fa 100644 --- a/src/write_weights.c +++ b/src/write_weights.c @@ -45,33 +45,49 @@ void write_weights(const WeightArray *list, FILE *fout) { - int i=0; - unsigned char zeros[WEIGHT_BLOCK_SIZE] = {0}; - while (list[i].name != NULL) { - WeightHead h; - if (strlen(list[i].name) >= sizeof(h.name) - 1) { - printf("[write_weights] warning: name %s too long\n", list[i].name); - } - memcpy(h.head, "DNNw", 4); - h.version = WEIGHT_BLOB_VERSION; - h.type = list[i].type; - h.size = list[i].size; - h.block_size = (h.size+WEIGHT_BLOCK_SIZE-1)/WEIGHT_BLOCK_SIZE*WEIGHT_BLOCK_SIZE; - RNN_CLEAR(h.name, sizeof(h.name)); - strncpy(h.name, list[i].name, sizeof(h.name)); - h.name[sizeof(h.name)-1] = 0; - celt_assert(sizeof(h) == WEIGHT_BLOCK_SIZE); - fwrite(&h, 1, WEIGHT_BLOCK_SIZE, fout); - fwrite(list[i].data, 1, h.size, fout); - fwrite(zeros, 1, h.block_size-h.size, fout); - i++; - } + unsigned char zeros[WEIGHT_BLOCK_SIZE] = {0}; + + for(int i = 0; list[i].name != NULL; i++) + { + const WeightArray *arr = &list[i]; + WeightHead head; + + printf("## Writing: %s, arr->type: %d, arr->size: %d\n", arr->name, arr->type, arr->size); + + // Write magic bit + memcpy(head.head, "DNNw", 4); + head.version = WEIGHT_BLOB_VERSION; + head.type = arr->type; + head.size = arr->size; + head.block_size = (arr->size + WEIGHT_BLOCK_SIZE - 1) / WEIGHT_BLOCK_SIZE * WEIGHT_BLOCK_SIZE; + + memset(head.name, 0, sizeof(head.name)); + strncpy(head.name, arr->name, sizeof(head.name) - 1); + head.name[sizeof(head.name) - 1] = (char *)NULL; + + // Check if Head Size is wrong + if(sizeof(head) != WEIGHT_BLOCK_SIZE) + { + printf("ERR: Header Size is Wrong!! - [%zu]\n", sizeof(head)); + continue; + } + + // Write Header to File + fwrite(&head, 1, WEIGHT_BLOCK_SIZE, fout); + // Write Data to File + fwrite(arr->data, 1, arr->size, fout); + // Fill 0 to empty padding + fwrite(zeros, 1, head.block_size - arr->size, fout); + + printf("## Wrote: %s, head->type: %d, head->size: %d, head->block_size: %d\n", head.name, head.type, head.size, head.block_size); + } } int main(void) { - FILE *fout = fopen("weights_blob.bin", "w"); - write_weights(rnnoise_arrays, fout); - fclose(fout); - return 0; + FILE *fout = fopen("weights_blob.bin", "wb"); + write_weights(rnnoise_arrays, fout); + + fclose(fout); + return 0; }