summaryrefslogtreecommitdiff
path: root/lib/rbcodec/codecs/libopus/celt/bands.c
diff options
context:
space:
mode:
Diffstat (limited to 'lib/rbcodec/codecs/libopus/celt/bands.c')
-rw-r--r--lib/rbcodec/codecs/libopus/celt/bands.c28
1 files changed, 20 insertions, 8 deletions
diff --git a/lib/rbcodec/codecs/libopus/celt/bands.c b/lib/rbcodec/codecs/libopus/celt/bands.c
index 5c715aff53..1ad786d795 100644
--- a/lib/rbcodec/codecs/libopus/celt/bands.c
+++ b/lib/rbcodec/codecs/libopus/celt/bands.c
@@ -216,7 +216,9 @@ void denormalise_bands(const CELTMode *m, const celt_norm * OPUS_RESTRICT X,
216 j=M*eBands[i]; 216 j=M*eBands[i];
217 band_end = M*eBands[i+1]; 217 band_end = M*eBands[i+1];
218 lg = ADD16(bandLogE[i+c*m->nbEBands], SHL16((opus_val16)eMeans[i],6)); 218 lg = ADD16(bandLogE[i+c*m->nbEBands], SHL16((opus_val16)eMeans[i],6));
219#ifdef FIXED_POINT 219#ifndef FIXED_POINT
220 g = celt_exp2(lg);
221#else
220 /* Handle the integer part of the log energy */ 222 /* Handle the integer part of the log energy */
221 shift = 16-(lg>>DB_SHIFT); 223 shift = 16-(lg>>DB_SHIFT);
222 if (shift>31) 224 if (shift>31)
@@ -227,9 +229,23 @@ void denormalise_bands(const CELTMode *m, const celt_norm * OPUS_RESTRICT X,
227 /* Handle the fractional part. */ 229 /* Handle the fractional part. */
228 g = celt_exp2_frac(lg&((1<<DB_SHIFT)-1)); 230 g = celt_exp2_frac(lg&((1<<DB_SHIFT)-1));
229 } 231 }
230#else 232 /* Handle extreme gains with negative shift. */
231 g = celt_exp2(lg); 233 if (shift<0)
234 {
235 /* For shift < -2 we'd be likely to overflow, so we're capping
236 the gain here. This shouldn't happen unless the bitstream is
237 already corrupted. */
238 if (shift < -2)
239 {
240 g = 32767;
241 shift = -2;
242 }
243 do {
244 *f++ = SHL32(MULT16_16(*x++, g), -shift);
245 } while (++j<band_end);
246 } else
232#endif 247#endif
248 /* Be careful of the fixed-point "else" just above when changing this code */
233 do { 249 do {
234 *f++ = SHR32(MULT16_16(*x++, g), shift); 250 *f++ = SHR32(MULT16_16(*x++, g), shift);
235 } while (++j<band_end); 251 } while (++j<band_end);
@@ -495,7 +511,7 @@ int spreading_decision(const CELTMode *m, celt_norm *X, int *average,
495 *tapset_decision=0; 511 *tapset_decision=0;
496 } 512 }
497 /*printf("%d %d %d\n", hf_sum, *hf_average, *tapset_decision);*/ 513 /*printf("%d %d %d\n", hf_sum, *hf_average, *tapset_decision);*/
498 celt_assert(nbBands>0); /*M*(eBands[end]-eBands[end-1]) <= 8 assures this*/ 514 celt_assert(nbBands>0); /* end has to be non-zero */
499 sum /= nbBands; 515 sum /= nbBands;
500 /* Recursive averaging */ 516 /* Recursive averaging */
501 sum = (sum+*average)>>1; 517 sum = (sum+*average)>>1;
@@ -873,7 +889,6 @@ static unsigned quant_partition(struct band_ctx *ctx, celt_norm *X,
873 int q; 889 int q;
874 int curr_bits; 890 int curr_bits;
875 int imid=0, iside=0; 891 int imid=0, iside=0;
876 int N_B=N;
877 int B0=B; 892 int B0=B;
878 opus_val16 mid=0, side=0; 893 opus_val16 mid=0, side=0;
879 unsigned cm=0; 894 unsigned cm=0;
@@ -895,8 +910,6 @@ static unsigned quant_partition(struct band_ctx *ctx, celt_norm *X,
895 spread = ctx->spread; 910 spread = ctx->spread;
896 ec = ctx->ec; 911 ec = ctx->ec;
897 912
898 N_B /= B;
899
900 /* If we need 1.5 more bit than we can produce, split the band in two. */ 913 /* If we need 1.5 more bit than we can produce, split the band in two. */
901 cache = m->cache.bits + m->cache.index[(LM+1)*m->nbEBands+i]; 914 cache = m->cache.bits + m->cache.index[(LM+1)*m->nbEBands+i];
902 if (LM != -1 && b > cache[cache[0]]+12 && N>2) 915 if (LM != -1 && b > cache[cache[0]]+12 && N>2)
@@ -1076,7 +1089,6 @@ static unsigned quant_band(struct band_ctx *ctx, celt_norm *X,
1076 longBlocks = B0==1; 1089 longBlocks = B0==1;
1077 1090
1078 N_B /= B; 1091 N_B /= B;
1079 N_B0 = N_B;
1080 1092
1081 /* Special case for one sample */ 1093 /* Special case for one sample */
1082 if (N==1) 1094 if (N==1)