summaryrefslogtreecommitdiff
path: root/apps/codecs/libtremor/misc.h
diff options
context:
space:
mode:
Diffstat (limited to 'apps/codecs/libtremor/misc.h')
-rw-r--r--apps/codecs/libtremor/misc.h32
1 files changed, 16 insertions, 16 deletions
diff --git a/apps/codecs/libtremor/misc.h b/apps/codecs/libtremor/misc.h
index 59ce6dbb74..6e22f20008 100644
--- a/apps/codecs/libtremor/misc.h
+++ b/apps/codecs/libtremor/misc.h
@@ -119,33 +119,33 @@ static inline ogg_int32_t MULT31_SHIFT15(ogg_int32_t x, ogg_int32_t y) {
119 119
120/* replaced XPROD32 with a macro to avoid memory reference 120/* replaced XPROD32 with a macro to avoid memory reference
121 _x, _y are the results (must be l-values) */ 121 _x, _y are the results (must be l-values) */
122#define XPROD32(_a, _b, _t, _v, _x, _y) \ 122#define XPROD32(_a, _b, _t, _v, _x, _y) \
123 { (_x)=MULT32(_a,_t)+MULT32(_b,_v); \ 123 { (_x)=MULT32(_a,_t)+MULT32(_b,_v); \
124 (_y)=MULT32(_b,_t)-MULT32(_a,_v); } 124 (_y)=MULT32(_b,_t)-MULT32(_a,_v); }
125 125
126 126
127#ifdef __i386__ 127#ifdef __i386__
128 128
129#define XPROD31(_a, _b, _t, _v, _x, _y) \ 129#define XPROD31(_a, _b, _t, _v, _x, _y) \
130 { *(_x)=MULT31(_a,_t)+MULT31(_b,_v); \ 130 { *(_x)=MULT31(_a,_t)+MULT31(_b,_v); \
131 *(_y)=MULT31(_b,_t)-MULT31(_a,_v); } 131 *(_y)=MULT31(_b,_t)-MULT31(_a,_v); }
132#define XNPROD31(_a, _b, _t, _v, _x, _y) \ 132#define XNPROD31(_a, _b, _t, _v, _x, _y) \
133 { *(_x)=MULT31(_a,_t)-MULT31(_b,_v); \ 133 { *(_x)=MULT31(_a,_t)-MULT31(_b,_v); \
134 *(_y)=MULT31(_b,_t)+MULT31(_a,_v); } 134 *(_y)=MULT31(_b,_t)+MULT31(_a,_v); }
135 135
136#else 136#else
137 137
138static inline void XPROD31(ogg_int32_t a, ogg_int32_t b, 138static inline void XPROD31(ogg_int32_t a, ogg_int32_t b,
139 ogg_int32_t t, ogg_int32_t v, 139 ogg_int32_t t, ogg_int32_t v,
140 ogg_int32_t *x, ogg_int32_t *y) 140 ogg_int32_t *x, ogg_int32_t *y)
141{ 141{
142 *x = MULT31(a, t) + MULT31(b, v); 142 *x = MULT31(a, t) + MULT31(b, v);
143 *y = MULT31(b, t) - MULT31(a, v); 143 *y = MULT31(b, t) - MULT31(a, v);
144} 144}
145 145
146static inline void XNPROD31(ogg_int32_t a, ogg_int32_t b, 146static inline void XNPROD31(ogg_int32_t a, ogg_int32_t b,
147 ogg_int32_t t, ogg_int32_t v, 147 ogg_int32_t t, ogg_int32_t v,
148 ogg_int32_t *x, ogg_int32_t *y) 148 ogg_int32_t *x, ogg_int32_t *y)
149{ 149{
150 *x = MULT31(a, t) - MULT31(b, v); 150 *x = MULT31(a, t) - MULT31(b, v);
151 *y = MULT31(b, t) + MULT31(a, v); 151 *y = MULT31(b, t) + MULT31(a, v);
@@ -217,8 +217,8 @@ static inline ogg_int32_t CLIP_TO_15(ogg_int32_t x) {
217#endif 217#endif
218 218
219static inline ogg_int32_t VFLOAT_MULT(ogg_int32_t a,ogg_int32_t ap, 219static inline ogg_int32_t VFLOAT_MULT(ogg_int32_t a,ogg_int32_t ap,
220 ogg_int32_t b,ogg_int32_t bp, 220 ogg_int32_t b,ogg_int32_t bp,
221 ogg_int32_t *p){ 221 ogg_int32_t *p){
222 if(a && b){ 222 if(a && b){
223#ifndef _LOW_ACCURACY_ 223#ifndef _LOW_ACCURACY_
224 *p=ap+bp+32; 224 *p=ap+bp+32;
@@ -232,16 +232,16 @@ static inline ogg_int32_t VFLOAT_MULT(ogg_int32_t a,ogg_int32_t ap,
232} 232}
233 233
234static inline ogg_int32_t VFLOAT_MULTI(ogg_int32_t a,ogg_int32_t ap, 234static inline ogg_int32_t VFLOAT_MULTI(ogg_int32_t a,ogg_int32_t ap,
235 ogg_int32_t i, 235 ogg_int32_t i,
236 ogg_int32_t *p){ 236 ogg_int32_t *p){
237 237
238 int ip=_ilog(abs(i))-31; 238 int ip=_ilog(abs(i))-31;
239 return VFLOAT_MULT(a,ap,i<<-ip,ip,p); 239 return VFLOAT_MULT(a,ap,i<<-ip,ip,p);
240} 240}
241 241
242static inline ogg_int32_t VFLOAT_ADD(ogg_int32_t a,ogg_int32_t ap, 242static inline ogg_int32_t VFLOAT_ADD(ogg_int32_t a,ogg_int32_t ap,
243 ogg_int32_t b,ogg_int32_t bp, 243 ogg_int32_t b,ogg_int32_t bp,
244 ogg_int32_t *p){ 244 ogg_int32_t *p){
245 245
246 if(!a){ 246 if(!a){
247 *p=bp; 247 *p=bp;