WebRtc_Word32 => int32_t etc. in audio_coding/
BUG=314
Review URL: https://webrtc-codereview.appspot.com/1271006
git-svn-id: http://webrtc.googlecode.com/svn/trunk@3789 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/modules/audio_coding/codecs/g722/g722_decode.c b/webrtc/modules/audio_coding/codecs/g722/g722_decode.c
index 499cc8f..e62af98 100644
--- a/webrtc/modules/audio_coding/codecs/g722/g722_decode.c
+++ b/webrtc/modules/audio_coding/codecs/g722/g722_decode.c
@@ -49,12 +49,12 @@
#define TRUE (!FALSE)
#endif
-static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
+static __inline int16_t saturate(int32_t amp)
{
- WebRtc_Word16 amp16;
+ int16_t amp16;
/* Hopefully this is optimised for the common case - not clipping */
- amp16 = (WebRtc_Word16) amp;
+ amp16 = (int16_t) amp;
if (amp == amp16)
return amp16;
if (amp > WEBRTC_INT16_MAX)
@@ -190,8 +190,8 @@
}
/*- End of function --------------------------------------------------------*/
-int WebRtc_g722_decode(g722_decode_state_t *s, WebRtc_Word16 amp[],
- const WebRtc_UWord8 g722_data[], int len)
+int WebRtc_g722_decode(g722_decode_state_t *s, int16_t amp[],
+ const uint8_t g722_data[], int len)
{
static const int wl[8] = {-60, -30, 58, 172, 334, 538, 1198, 3042 };
static const int rl42[16] = {0, 7, 6, 5, 4, 3, 2, 1,
@@ -372,14 +372,14 @@
if (s->itu_test_mode)
{
- amp[outlen++] = (WebRtc_Word16) (rlow << 1);
- amp[outlen++] = (WebRtc_Word16) (rhigh << 1);
+ amp[outlen++] = (int16_t) (rlow << 1);
+ amp[outlen++] = (int16_t) (rhigh << 1);
}
else
{
if (s->eight_k)
{
- amp[outlen++] = (WebRtc_Word16) (rlow << 1);
+ amp[outlen++] = (int16_t) (rlow << 1);
}
else
{
diff --git a/webrtc/modules/audio_coding/codecs/g722/g722_enc_dec.h b/webrtc/modules/audio_coding/codecs/g722/g722_enc_dec.h
index d2d19b0..ef279ac 100644
--- a/webrtc/modules/audio_coding/codecs/g722/g722_enc_dec.h
+++ b/webrtc/modules/audio_coding/codecs/g722/g722_enc_dec.h
@@ -138,8 +138,8 @@
int options);
int WebRtc_g722_encode_release(g722_encode_state_t *s);
int WebRtc_g722_encode(g722_encode_state_t *s,
- WebRtc_UWord8 g722_data[],
- const WebRtc_Word16 amp[],
+ uint8_t g722_data[],
+ const int16_t amp[],
int len);
g722_decode_state_t *WebRtc_g722_decode_init(g722_decode_state_t *s,
@@ -147,8 +147,8 @@
int options);
int WebRtc_g722_decode_release(g722_decode_state_t *s);
int WebRtc_g722_decode(g722_decode_state_t *s,
- WebRtc_Word16 amp[],
- const WebRtc_UWord8 g722_data[],
+ int16_t amp[],
+ const uint8_t g722_data[],
int len);
#ifdef __cplusplus
diff --git a/webrtc/modules/audio_coding/codecs/g722/g722_encode.c b/webrtc/modules/audio_coding/codecs/g722/g722_encode.c
index 7487b64..5b07615 100644
--- a/webrtc/modules/audio_coding/codecs/g722/g722_encode.c
+++ b/webrtc/modules/audio_coding/codecs/g722/g722_encode.c
@@ -48,12 +48,12 @@
#define TRUE (!FALSE)
#endif
-static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
+static __inline int16_t saturate(int32_t amp)
{
- WebRtc_Word16 amp16;
+ int16_t amp16;
/* Hopefully this is optimised for the common case - not clipping */
- amp16 = (WebRtc_Word16) amp;
+ amp16 = (int16_t) amp;
if (amp == amp16)
return amp16;
if (amp > WEBRTC_INT16_MAX)
@@ -191,10 +191,10 @@
*/
//#define RUN_LIKE_REFERENCE_G722
#ifdef RUN_LIKE_REFERENCE_G722
-WebRtc_Word16 limitValues (WebRtc_Word16 rl)
+int16_t limitValues (int16_t rl)
{
- WebRtc_Word16 yl;
+ int16_t yl;
yl = (rl > 16383) ? 16383 : ((rl < -16384) ? -16384 : rl);
@@ -202,8 +202,8 @@
}
#endif
-int WebRtc_g722_encode(g722_encode_state_t *s, WebRtc_UWord8 g722_data[],
- const WebRtc_Word16 amp[], int len)
+int WebRtc_g722_encode(g722_encode_state_t *s, uint8_t g722_data[],
+ const int16_t amp[], int len)
{
static const int q6[32] =
{
@@ -418,14 +418,14 @@
s->out_bits += s->bits_per_sample;
if (s->out_bits >= 8)
{
- g722_data[g722_bytes++] = (WebRtc_UWord8) (s->out_buffer & 0xFF);
+ g722_data[g722_bytes++] = (uint8_t) (s->out_buffer & 0xFF);
s->out_bits -= 8;
s->out_buffer >>= 8;
}
}
else
{
- g722_data[g722_bytes++] = (WebRtc_UWord8) code;
+ g722_data[g722_bytes++] = (uint8_t) code;
}
}
return g722_bytes;
diff --git a/webrtc/modules/audio_coding/codecs/g722/g722_interface.c b/webrtc/modules/audio_coding/codecs/g722/g722_interface.c
index d559014..7075669 100644
--- a/webrtc/modules/audio_coding/codecs/g722/g722_interface.c
+++ b/webrtc/modules/audio_coding/codecs/g722/g722_interface.c
@@ -17,7 +17,7 @@
#include "typedefs.h"
-WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst)
+int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst)
{
*G722enc_inst=(G722EncInst*)malloc(sizeof(g722_encode_state_t));
if (*G722enc_inst!=NULL) {
@@ -27,7 +27,7 @@
}
}
-WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
+int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
{
// Create and/or reset the G.722 encoder
// Bitrate 64 kbps and wideband mode (2)
@@ -40,16 +40,16 @@
}
}
-WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
+int16_t WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
{
// Free encoder memory
return WebRtc_g722_encode_release((g722_encode_state_t*) G722enc_inst);
}
-WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
- WebRtc_Word16 *speechIn,
- WebRtc_Word16 len,
- WebRtc_Word16 *encoded)
+int16_t WebRtcG722_Encode(G722EncInst *G722enc_inst,
+ int16_t *speechIn,
+ int16_t len,
+ int16_t *encoded)
{
unsigned char *codechar = (unsigned char*) encoded;
// Encode the input speech vector
@@ -57,7 +57,7 @@
codechar, speechIn, len);
}
-WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
+int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
{
*G722dec_inst=(G722DecInst*)malloc(sizeof(g722_decode_state_t));
if (*G722dec_inst!=NULL) {
@@ -67,7 +67,7 @@
}
}
-WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst)
+int16_t WebRtcG722_DecoderInit(G722DecInst *G722dec_inst)
{
// Create and/or reset the G.722 decoder
// Bitrate 64 kbps and wideband mode (2)
@@ -80,25 +80,25 @@
}
}
-WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
+int16_t WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
{
// Free encoder memory
return WebRtc_g722_decode_release((g722_decode_state_t*) G722dec_inst);
}
-WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
- WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType)
+int16_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
+ int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType)
{
// Decode the G.722 encoder stream
*speechType=G722_WEBRTC_SPEECH;
return WebRtc_g722_decode((g722_decode_state_t*) G722dec_inst,
- decoded, (WebRtc_UWord8*) encoded, len);
+ decoded, (uint8_t*) encoded, len);
}
-WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len)
+int16_t WebRtcG722_Version(char *versionStr, short len)
{
// Get version string
char version[30] = "2.0.0\n";
diff --git a/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h b/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h
index e50d66f..0948a18 100644
--- a/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h
+++ b/webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h
@@ -43,7 +43,7 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
+int16_t WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
/****************************************************************************
@@ -59,7 +59,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
+int16_t WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
/****************************************************************************
@@ -73,7 +73,7 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
+int16_t WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
@@ -95,10 +95,10 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
- WebRtc_Word16 *speechIn,
- WebRtc_Word16 len,
- WebRtc_Word16 *encoded);
+int16_t WebRtcG722_Encode(G722EncInst *G722enc_inst,
+ int16_t *speechIn,
+ int16_t len,
+ int16_t *encoded);
/****************************************************************************
@@ -112,7 +112,7 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
+int16_t WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
/****************************************************************************
@@ -128,7 +128,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst);
+int16_t WebRtcG722_DecoderInit(G722DecInst *G722dec_inst);
/****************************************************************************
@@ -143,7 +143,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
+int16_t WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
/****************************************************************************
@@ -167,11 +167,11 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
- WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType);
+int16_t WebRtcG722_Decode(G722DecInst *G722dec_inst,
+ int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType);
/****************************************************************************
* WebRtcG722_Version(...)
@@ -179,7 +179,7 @@
* Get a string with the current version of the codec
*/
-WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len);
+int16_t WebRtcG722_Version(char *versionStr, short len);
#ifdef __cplusplus
diff --git a/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc b/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc
index d2fdca3..d51301d 100644
--- a/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc
+++ b/webrtc/modules/audio_coding/codecs/g722/test/testG722.cc
@@ -29,11 +29,11 @@
typedef struct WebRtcG722DecInst G722DecInst;
/* function for reading audio data from PCM file */
-int readframe(WebRtc_Word16 *data, FILE *inp, int length)
+int readframe(int16_t *data, FILE *inp, int length)
{
short k, rlen, status = 0;
- rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
+ rlen = (short)fread(data, sizeof(int16_t), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
@@ -49,7 +49,7 @@
FILE *inp, *outbitp, *outp;
int framecnt, endfile;
- WebRtc_Word16 framelength = 160;
+ int16_t framelength = 160;
G722EncInst *G722enc_inst;
G722DecInst *G722dec_inst;
int err;
@@ -59,11 +59,11 @@
double runtime = 0;
double length_file;
- WebRtc_Word16 stream_len = 0;
- WebRtc_Word16 shortdata[960];
- WebRtc_Word16 decoded[960];
- WebRtc_Word16 streamdata[80*3];
- WebRtc_Word16 speechType[1];
+ int16_t stream_len = 0;
+ int16_t shortdata[960];
+ int16_t decoded[960];
+ int16_t streamdata[80*3];
+ int16_t speechType[1];
/* handling wrong input arguments in the command line */
if (argc!=5) {
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c
index 4a70c8b..8a8b133 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.c
@@ -32,16 +32,16 @@
iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax
and idxVec, uses state_first as
input) */
- WebRtc_Word16 *in, /* (i) vector to encode */
- WebRtc_Word16 *weightDenum /* (i) denominator of synthesis filter */
+ int16_t *in, /* (i) vector to encode */
+ int16_t *weightDenum /* (i) denominator of synthesis filter */
) {
- WebRtc_Word16 *syntOut;
- WebRtc_Word16 quantLen[2];
+ int16_t *syntOut;
+ int16_t quantLen[2];
/* Stack based */
- WebRtc_Word16 syntOutBuf[LPC_FILTERORDER+STATE_SHORT_LEN_30MS];
- WebRtc_Word16 in_weightedVec[STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
- WebRtc_Word16 *in_weighted = &in_weightedVec[LPC_FILTERORDER];
+ int16_t syntOutBuf[LPC_FILTERORDER+STATE_SHORT_LEN_30MS];
+ int16_t in_weightedVec[STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+ int16_t *in_weighted = &in_weightedVec[LPC_FILTERORDER];
/* Initialize the buffers */
WebRtcSpl_MemSetW16(syntOutBuf, 0, LPC_FILTERORDER+STATE_SHORT_LEN_30MS);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h
index fa59593..c061ff4 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant.h
@@ -32,8 +32,8 @@
iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax
and idxVec, uses state_first as
input) */
- WebRtc_Word16 *in, /* (i) vector to encode */
- WebRtc_Word16 *weightDenum /* (i) denominator of synthesis filter */
+ int16_t *in, /* (i) vector to encode */
+ int16_t *weightDenum /* (i) denominator of synthesis filter */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
index 4eebc3e..1a18a1d 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
@@ -20,26 +20,21 @@
#include "constants.h"
#include "sort_sq.h"
-void WebRtcIlbcfix_AbsQuantLoop(
- WebRtc_Word16 *syntOutIN,
- WebRtc_Word16 *in_weightedIN,
- WebRtc_Word16 *weightDenumIN,
- WebRtc_Word16 *quantLenIN,
- WebRtc_Word16 *idxVecIN
- )
-{
+void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN,
+ int16_t *weightDenumIN, int16_t *quantLenIN,
+ int16_t *idxVecIN ) {
int n, k1, k2;
- WebRtc_Word16 index;
- WebRtc_Word32 toQW32;
- WebRtc_Word32 toQ32;
- WebRtc_Word16 tmp16a;
- WebRtc_Word16 xq;
+ int16_t index;
+ int32_t toQW32;
+ int32_t toQ32;
+ int16_t tmp16a;
+ int16_t xq;
- WebRtc_Word16 *syntOut = syntOutIN;
- WebRtc_Word16 *in_weighted = in_weightedIN;
- WebRtc_Word16 *weightDenum = weightDenumIN;
- WebRtc_Word16 *quantLen = quantLenIN;
- WebRtc_Word16 *idxVec = idxVecIN;
+ int16_t *syntOut = syntOutIN;
+ int16_t *in_weighted = in_weightedIN;
+ int16_t *weightDenum = weightDenumIN;
+ int16_t *quantLen = quantLenIN;
+ int16_t *idxVec = idxVecIN;
n=0;
@@ -52,14 +47,14 @@
weightDenum, LPC_FILTERORDER+1, 1);
/* the quantizer */
- toQW32 = (WebRtc_Word32)(*in_weighted) - (WebRtc_Word32)(*syntOut);
+ toQW32 = (int32_t)(*in_weighted) - (int32_t)(*syntOut);
- toQ32 = (((WebRtc_Word32)toQW32)<<2);
+ toQ32 = (((int32_t)toQW32)<<2);
if (toQ32 > 32767) {
- toQ32 = (WebRtc_Word32) 32767;
+ toQ32 = (int32_t) 32767;
} else if (toQ32 < -32768) {
- toQ32 = (WebRtc_Word32) -32768;
+ toQ32 = (int32_t) -32768;
}
/* Quantize the state */
@@ -74,7 +69,7 @@
(state_sq3Tbl is in Q13 and toQ is in Q11)
*/
WebRtcIlbcfix_SortSq(&xq, &index,
- (WebRtc_Word16)toQ32,
+ (int16_t)toQ32,
WebRtcIlbcfix_kStateSq3, 8);
}
@@ -84,7 +79,7 @@
/* Compute decoded sample and update of the prediction filter */
tmp16a = ((WebRtcIlbcfix_kStateSq3[index] + 2 ) >> 2);
- *syntOut = (WebRtc_Word16) (tmp16a + (WebRtc_Word32)(*in_weighted) - toQW32);
+ *syntOut = (int16_t) (tmp16a + (int32_t)(*in_weighted) - toQW32);
n++;
syntOut++; in_weighted++;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
index f506e8e..50c6ffe 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
@@ -26,12 +26,8 @@
* (subrutine for WebRtcIlbcfix_StateSearch)
*---------------------------------------------------------------*/
-void WebRtcIlbcfix_AbsQuantLoop(
- WebRtc_Word16 *syntOutIN,
- WebRtc_Word16 *in_weightedIN,
- WebRtc_Word16 *weightDenumIN,
- WebRtc_Word16 *quantLenIN,
- WebRtc_Word16 *idxVecIN
- );
+void WebRtcIlbcfix_AbsQuantLoop(int16_t *syntOutIN, int16_t *in_weightedIN,
+ int16_t *weightDenumIN, int16_t *quantLenIN,
+ int16_t *idxVecIN);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c b/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
index 6011e92..d8f8c93 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
@@ -21,24 +21,24 @@
#include "augmented_cb_corr.h"
void WebRtcIlbcfix_AugmentedCbCorr(
- WebRtc_Word16 *target, /* (i) Target vector */
- WebRtc_Word16 *buffer, /* (i) Memory buffer */
- WebRtc_Word16 *interpSamples, /* (i) buffer with
+ int16_t *target, /* (i) Target vector */
+ int16_t *buffer, /* (i) Memory buffer */
+ int16_t *interpSamples, /* (i) buffer with
interpolated samples */
- WebRtc_Word32 *crossDot, /* (o) The cross correlation between
+ int32_t *crossDot, /* (o) The cross correlation between
the target and the Augmented
vector */
- WebRtc_Word16 low, /* (i) Lag to start from (typically
+ int16_t low, /* (i) Lag to start from (typically
20) */
- WebRtc_Word16 high, /* (i) Lag to end at (typically 39) */
- WebRtc_Word16 scale) /* (i) Scale factor to use for
+ int16_t high, /* (i) Lag to end at (typically 39) */
+ int16_t scale) /* (i) Scale factor to use for
the crossDot */
{
int lagcount;
- WebRtc_Word16 ilow;
- WebRtc_Word16 *targetPtr;
- WebRtc_Word32 *crossDotPtr;
- WebRtc_Word16 *iSPtr=interpSamples;
+ int16_t ilow;
+ int16_t *targetPtr;
+ int32_t *crossDotPtr;
+ int16_t *iSPtr=interpSamples;
/* Calculate the correlation between the target and the
interpolated codebook. The correlation is calculated in
@@ -46,7 +46,7 @@
crossDotPtr=crossDot;
for (lagcount=low; lagcount<=high; lagcount++) {
- ilow = (WebRtc_Word16) (lagcount-4);
+ ilow = (int16_t) (lagcount-4);
/* Compute dot product for the first (lagcount-4) samples */
(*crossDotPtr) = WebRtcSpl_DotProductWithScale(target, buffer-lagcount, ilow, scale);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h b/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
index 8e097fe..533d0a4 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
@@ -26,17 +26,17 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_AugmentedCbCorr(
- WebRtc_Word16 *target, /* (i) Target vector */
- WebRtc_Word16 *buffer, /* (i) Memory buffer */
- WebRtc_Word16 *interpSamples, /* (i) buffer with
+ int16_t *target, /* (i) Target vector */
+ int16_t *buffer, /* (i) Memory buffer */
+ int16_t *interpSamples, /* (i) buffer with
interpolated samples */
- WebRtc_Word32 *crossDot, /* (o) The cross correlation between
+ int32_t *crossDot, /* (o) The cross correlation between
the target and the Augmented
vector */
- WebRtc_Word16 low, /* (i) Lag to start from (typically
+ int16_t low, /* (i) Lag to start from (typically
20) */
- WebRtc_Word16 high, /* (i) Lag to end at (typically 39 */
- WebRtc_Word16 scale); /* (i) Scale factor to use for
+ int16_t high, /* (i) Lag to end at (typically 39 */
+ int16_t scale); /* (i) Scale factor to use for
the crossDot */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c b/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c
index a2287aa..4c29bb1 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.c
@@ -24,11 +24,11 @@
/* The output is in the same domain as the input */
void WebRtcIlbcfix_BwExpand(
- WebRtc_Word16 *out, /* (o) the bandwidth expanded lpc coefficients */
- WebRtc_Word16 *in, /* (i) the lpc coefficients before bandwidth
+ int16_t *out, /* (o) the bandwidth expanded lpc coefficients */
+ int16_t *in, /* (i) the lpc coefficients before bandwidth
expansion */
- WebRtc_Word16 *coef, /* (i) the bandwidth expansion factor Q15 */
- WebRtc_Word16 length /* (i) the length of lpc coefficient vectors */
+ int16_t *coef, /* (i) the bandwidth expansion factor Q15 */
+ int16_t length /* (i) the length of lpc coefficient vectors */
) {
int i;
@@ -37,6 +37,6 @@
/* out[i] = coef[i] * in[i] with rounding.
in[] and out[] are in Q12 and coef[] is in Q15
*/
- out[i] = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(coef[i], in[i])+16384)>>15);
+ out[i] = (int16_t)((WEBRTC_SPL_MUL_16_16(coef[i], in[i])+16384)>>15);
}
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h b/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h
index c9f3fab..b3b16d5 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/bw_expand.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_BwExpand(
- WebRtc_Word16 *out, /* (o) the bandwidth expanded lpc coefficients */
- WebRtc_Word16 *in, /* (i) the lpc coefficients before bandwidth
+ int16_t *out, /* (o) the bandwidth expanded lpc coefficients */
+ int16_t *in, /* (i) the lpc coefficients before bandwidth
expansion */
- WebRtc_Word16 *coef, /* (i) the bandwidth expansion factor Q15 */
- WebRtc_Word16 length /* (i) the length of lpc coefficient vectors */
+ int16_t *coef, /* (i) the bandwidth expansion factor Q15 */
+ int16_t length /* (i) the length of lpc coefficient vectors */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.c
index 094a7e4..808451f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.c
@@ -25,21 +25,21 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_CbConstruct(
- WebRtc_Word16 *decvector, /* (o) Decoded vector */
- WebRtc_Word16 *index, /* (i) Codebook indices */
- WebRtc_Word16 *gain_index, /* (i) Gain quantization indices */
- WebRtc_Word16 *mem, /* (i) Buffer for codevector construction */
- WebRtc_Word16 lMem, /* (i) Length of buffer */
- WebRtc_Word16 veclen /* (i) Length of vector */
+ int16_t *decvector, /* (o) Decoded vector */
+ int16_t *index, /* (i) Codebook indices */
+ int16_t *gain_index, /* (i) Gain quantization indices */
+ int16_t *mem, /* (i) Buffer for codevector construction */
+ int16_t lMem, /* (i) Length of buffer */
+ int16_t veclen /* (i) Length of vector */
){
int j;
- WebRtc_Word16 gain[CB_NSTAGES];
+ int16_t gain[CB_NSTAGES];
/* Stack based */
- WebRtc_Word16 cbvec0[SUBL];
- WebRtc_Word16 cbvec1[SUBL];
- WebRtc_Word16 cbvec2[SUBL];
- WebRtc_Word32 a32;
- WebRtc_Word16 *gainPtr;
+ int16_t cbvec0[SUBL];
+ int16_t cbvec1[SUBL];
+ int16_t cbvec2[SUBL];
+ int32_t a32;
+ int16_t *gainPtr;
/* gain de-quantization */
@@ -60,7 +60,7 @@
a32 += WEBRTC_SPL_MUL_16_16(*gainPtr++, cbvec1[j]);
a32 += WEBRTC_SPL_MUL_16_16(*gainPtr, cbvec2[j]);
gainPtr -= 2;
- decvector[j] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(a32 + 8192, 14);
+ decvector[j] = (int16_t) WEBRTC_SPL_RSHIFT_W32(a32 + 8192, 14);
}
return;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h
index bec759f..2e9080f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_construct.h
@@ -26,12 +26,12 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_CbConstruct(
- WebRtc_Word16 *decvector, /* (o) Decoded vector */
- WebRtc_Word16 *index, /* (i) Codebook indices */
- WebRtc_Word16 *gain_index, /* (i) Gain quantization indices */
- WebRtc_Word16 *mem, /* (i) Buffer for codevector construction */
- WebRtc_Word16 lMem, /* (i) Length of buffer */
- WebRtc_Word16 veclen /* (i) Length of vector */
+ int16_t *decvector, /* (o) Decoded vector */
+ int16_t *index, /* (i) Codebook indices */
+ int16_t *gain_index, /* (i) Gain quantization indices */
+ int16_t *mem, /* (i) Buffer for codevector construction */
+ int16_t lMem, /* (i) Length of buffer */
+ int16_t veclen /* (i) Length of vector */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
index 8613fa2..f883287 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
@@ -27,18 +27,18 @@
*----------------------------------------------------------------*/
void WebRtcIlbcfix_CbMemEnergy(
- WebRtc_Word16 range,
- WebRtc_Word16 *CB, /* (i) The CB memory (1:st section) */
- WebRtc_Word16 *filteredCB, /* (i) The filtered CB memory (2:nd section) */
- WebRtc_Word16 lMem, /* (i) Length of the CB memory */
- WebRtc_Word16 lTarget, /* (i) Length of the target vector */
- WebRtc_Word16 *energyW16, /* (o) Energy in the CB vectors */
- WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
- WebRtc_Word16 scale, /* (i) The scaling of all energy values */
- WebRtc_Word16 base_size /* (i) Index to where the energy values should be stored */
+ int16_t range,
+ int16_t *CB, /* (i) The CB memory (1:st section) */
+ int16_t *filteredCB, /* (i) The filtered CB memory (2:nd section) */
+ int16_t lMem, /* (i) Length of the CB memory */
+ int16_t lTarget, /* (i) Length of the target vector */
+ int16_t *energyW16, /* (o) Energy in the CB vectors */
+ int16_t *energyShifts, /* (o) Shift value of the energy */
+ int16_t scale, /* (i) The scaling of all energy values */
+ int16_t base_size /* (i) Index to where the energy values should be stored */
) {
- WebRtc_Word16 *ppi, *ppo, *pp;
- WebRtc_Word32 energy, tmp32;
+ int16_t *ppi, *ppo, *pp;
+ int32_t energy, tmp32;
/* Compute the energy and store it in a vector. Also the
* corresponding shift values are stored. The energy values
@@ -52,9 +52,9 @@
energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale);
/* Normalize the energy and store the number of shifts */
- energyShifts[0] = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+ energyShifts[0] = (int16_t)WebRtcSpl_NormW32(energy);
tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, energyShifts[0]);
- energyW16[0] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+ energyW16[0] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
/* Compute the energy of the rest of the cb memory
* by step wise adding and subtracting the next
@@ -68,9 +68,9 @@
energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale);
/* Normalize the energy and store the number of shifts */
- energyShifts[base_size] = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+ energyShifts[base_size] = (int16_t)WebRtcSpl_NormW32(energy);
tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, energyShifts[base_size]);
- energyW16[base_size] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+ energyW16[base_size] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
ppi = filteredCB + lMem - 1 - lTarget;
ppo = filteredCB + lMem - 1;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
index 1aa2b7b..1b50c0b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
@@ -20,15 +20,15 @@
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_
void WebRtcIlbcfix_CbMemEnergy(
- WebRtc_Word16 range,
- WebRtc_Word16 *CB, /* (i) The CB memory (1:st section) */
- WebRtc_Word16 *filteredCB, /* (i) The filtered CB memory (2:nd section) */
- WebRtc_Word16 lMem, /* (i) Length of the CB memory */
- WebRtc_Word16 lTarget, /* (i) Length of the target vector */
- WebRtc_Word16 *energyW16, /* (o) Energy in the CB vectors */
- WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
- WebRtc_Word16 scale, /* (i) The scaling of all energy values */
- WebRtc_Word16 base_size /* (i) Index to where the energy values should be stored */
+ int16_t range,
+ int16_t *CB, /* (i) The CB memory (1:st section) */
+ int16_t *filteredCB, /* (i) The filtered CB memory (2:nd section) */
+ int16_t lMem, /* (i) Length of the CB memory */
+ int16_t lTarget, /* (i) Length of the target vector */
+ int16_t *energyW16, /* (o) Energy in the CB vectors */
+ int16_t *energyShifts, /* (o) Shift value of the energy */
+ int16_t scale, /* (i) The scaling of all energy values */
+ int16_t base_size /* (i) Index to where the energy values should be stored */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
index 0c6f479..29f499f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
@@ -20,19 +20,19 @@
#include "constants.h"
void WebRtcIlbcfix_CbMemEnergyAugmentation(
- WebRtc_Word16 *interpSamples, /* (i) The interpolated samples */
- WebRtc_Word16 *CBmem, /* (i) The CB memory */
- WebRtc_Word16 scale, /* (i) The scaling of all energy values */
- WebRtc_Word16 base_size, /* (i) Index to where the energy values should be stored */
- WebRtc_Word16 *energyW16, /* (o) Energy in the CB vectors */
- WebRtc_Word16 *energyShifts /* (o) Shift value of the energy */
+ int16_t *interpSamples, /* (i) The interpolated samples */
+ int16_t *CBmem, /* (i) The CB memory */
+ int16_t scale, /* (i) The scaling of all energy values */
+ int16_t base_size, /* (i) Index to where the energy values should be stored */
+ int16_t *energyW16, /* (o) Energy in the CB vectors */
+ int16_t *energyShifts /* (o) Shift value of the energy */
){
- WebRtc_Word32 energy, tmp32;
- WebRtc_Word16 *ppe, *pp, *interpSamplesPtr;
- WebRtc_Word16 *CBmemPtr, lagcount;
- WebRtc_Word16 *enPtr=&energyW16[base_size-20];
- WebRtc_Word16 *enShPtr=&energyShifts[base_size-20];
- WebRtc_Word32 nrjRecursive;
+ int32_t energy, tmp32;
+ int16_t *ppe, *pp, *interpSamplesPtr;
+ int16_t *CBmemPtr, lagcount;
+ int16_t *enPtr=&energyW16[base_size-20];
+ int16_t *enShPtr=&energyShifts[base_size-20];
+ int32_t nrjRecursive;
CBmemPtr = CBmem+147;
interpSamplesPtr = interpSamples;
@@ -58,9 +58,9 @@
energy += WebRtcSpl_DotProductWithScale(pp, pp, SUBL-lagcount, scale);
/* Normalize the energy and store the number of shifts */
- (*enShPtr) = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+ (*enShPtr) = (int16_t)WebRtcSpl_NormW32(energy);
tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, (*enShPtr));
- (*enPtr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+ (*enPtr) = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
enShPtr++;
enPtr++;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
index 938b87e..6c181bd 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
@@ -20,12 +20,12 @@
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_
void WebRtcIlbcfix_CbMemEnergyAugmentation(
- WebRtc_Word16 *interpSamples, /* (i) The interpolated samples */
- WebRtc_Word16 *CBmem, /* (i) The CB memory */
- WebRtc_Word16 scale, /* (i) The scaling of all energy values */
- WebRtc_Word16 base_size, /* (i) Index to where the energy values should be stored */
- WebRtc_Word16 *energyW16, /* (o) Energy in the CB vectors */
- WebRtc_Word16 *energyShifts /* (o) Shift value of the energy */
+ int16_t *interpSamples, /* (i) The interpolated samples */
+ int16_t *CBmem, /* (i) The CB memory */
+ int16_t scale, /* (i) The scaling of all energy values */
+ int16_t base_size, /* (i) Index to where the energy values should be stored */
+ int16_t *energyW16, /* (o) Energy in the CB vectors */
+ int16_t *energyShifts /* (o) Shift value of the energy */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
index 40bb708..a2bc9b8 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
@@ -22,20 +22,20 @@
* by step wise adding and subtracting the next
* sample and the last sample respectively */
void WebRtcIlbcfix_CbMemEnergyCalc(
- WebRtc_Word32 energy, /* (i) input start energy */
- WebRtc_Word16 range, /* (i) number of iterations */
- WebRtc_Word16 *ppi, /* (i) input pointer 1 */
- WebRtc_Word16 *ppo, /* (i) input pointer 2 */
- WebRtc_Word16 *energyW16, /* (o) Energy in the CB vectors */
- WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
- WebRtc_Word16 scale, /* (i) The scaling of all energy values */
- WebRtc_Word16 base_size /* (i) Index to where the energy values should be stored */
+ int32_t energy, /* (i) input start energy */
+ int16_t range, /* (i) number of iterations */
+ int16_t *ppi, /* (i) input pointer 1 */
+ int16_t *ppo, /* (i) input pointer 2 */
+ int16_t *energyW16, /* (o) Energy in the CB vectors */
+ int16_t *energyShifts, /* (o) Shift value of the energy */
+ int16_t scale, /* (i) The scaling of all energy values */
+ int16_t base_size /* (i) Index to where the energy values should be stored */
)
{
- WebRtc_Word16 j,shft;
- WebRtc_Word32 tmp;
- WebRtc_Word16 *eSh_ptr;
- WebRtc_Word16 *eW16_ptr;
+ int16_t j,shft;
+ int32_t tmp;
+ int16_t *eSh_ptr;
+ int16_t *eW16_ptr;
eSh_ptr = &energyShifts[1+base_size];
@@ -53,13 +53,13 @@
ppi--;
ppo--;
- /* Normalize the energy into a WebRtc_Word16 and store
+ /* Normalize the energy into a int16_t and store
the number of shifts */
- shft = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+ shft = (int16_t)WebRtcSpl_NormW32(energy);
*eSh_ptr++ = shft;
tmp = WEBRTC_SPL_LSHIFT_W32(energy, shft);
- *eW16_ptr++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp, 16);
+ *eW16_ptr++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp, 16);
}
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
index ee2e285..c7e1e54 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
@@ -20,14 +20,14 @@
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_
void WebRtcIlbcfix_CbMemEnergyCalc(
- WebRtc_Word32 energy, /* (i) input start energy */
- WebRtc_Word16 range, /* (i) number of iterations */
- WebRtc_Word16 *ppi, /* (i) input pointer 1 */
- WebRtc_Word16 *ppo, /* (i) input pointer 2 */
- WebRtc_Word16 *energyW16, /* (o) Energy in the CB vectors */
- WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
- WebRtc_Word16 scale, /* (i) The scaling of all energy values */
- WebRtc_Word16 base_size /* (i) Index to where the energy values should be stored */
+ int32_t energy, /* (i) input start energy */
+ int16_t range, /* (i) number of iterations */
+ int16_t *ppi, /* (i) input pointer 1 */
+ int16_t *ppo, /* (i) input pointer 2 */
+ int16_t *energyW16, /* (o) Energy in the CB vectors */
+ int16_t *energyShifts, /* (o) Shift value of the energy */
+ int16_t scale, /* (i) The scaling of all energy values */
+ int16_t base_size /* (i) Index to where the energy values should be stored */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c
index 551a9a2..667240a 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_search.c
@@ -36,51 +36,51 @@
void WebRtcIlbcfix_CbSearch(
iLBC_Enc_Inst_t *iLBCenc_inst,
/* (i) the encoder state structure */
- WebRtc_Word16 *index, /* (o) Codebook indices */
- WebRtc_Word16 *gain_index, /* (o) Gain quantization indices */
- WebRtc_Word16 *intarget, /* (i) Target vector for encoding */
- WebRtc_Word16 *decResidual,/* (i) Decoded residual for codebook construction */
- WebRtc_Word16 lMem, /* (i) Length of buffer */
- WebRtc_Word16 lTarget, /* (i) Length of vector */
- WebRtc_Word16 *weightDenum,/* (i) weighting filter coefficients in Q12 */
- WebRtc_Word16 block /* (i) the subblock number */
+ int16_t *index, /* (o) Codebook indices */
+ int16_t *gain_index, /* (o) Gain quantization indices */
+ int16_t *intarget, /* (i) Target vector for encoding */
+ int16_t *decResidual,/* (i) Decoded residual for codebook construction */
+ int16_t lMem, /* (i) Length of buffer */
+ int16_t lTarget, /* (i) Length of vector */
+ int16_t *weightDenum,/* (i) weighting filter coefficients in Q12 */
+ int16_t block /* (i) the subblock number */
) {
- WebRtc_Word16 i, j, stage, range;
- WebRtc_Word16 *pp, scale, tmp;
- WebRtc_Word16 bits, temp1, temp2;
- WebRtc_Word16 base_size;
- WebRtc_Word32 codedEner, targetEner;
- WebRtc_Word16 gains[CB_NSTAGES+1];
- WebRtc_Word16 *cb_vecPtr;
- WebRtc_Word16 indexOffset, sInd, eInd;
- WebRtc_Word32 CritMax=0;
- WebRtc_Word16 shTotMax=WEBRTC_SPL_WORD16_MIN;
- WebRtc_Word16 bestIndex=0;
- WebRtc_Word16 bestGain=0;
- WebRtc_Word16 indexNew, CritNewSh;
- WebRtc_Word32 CritNew;
- WebRtc_Word32 *cDotPtr;
- WebRtc_Word16 noOfZeros;
- WebRtc_Word16 *gainPtr;
- WebRtc_Word32 t32, tmpW32;
- WebRtc_Word16 *WebRtcIlbcfix_kGainSq5_ptr;
+ int16_t i, j, stage, range;
+ int16_t *pp, scale, tmp;
+ int16_t bits, temp1, temp2;
+ int16_t base_size;
+ int32_t codedEner, targetEner;
+ int16_t gains[CB_NSTAGES+1];
+ int16_t *cb_vecPtr;
+ int16_t indexOffset, sInd, eInd;
+ int32_t CritMax=0;
+ int16_t shTotMax=WEBRTC_SPL_WORD16_MIN;
+ int16_t bestIndex=0;
+ int16_t bestGain=0;
+ int16_t indexNew, CritNewSh;
+ int32_t CritNew;
+ int32_t *cDotPtr;
+ int16_t noOfZeros;
+ int16_t *gainPtr;
+ int32_t t32, tmpW32;
+ int16_t *WebRtcIlbcfix_kGainSq5_ptr;
/* Stack based */
- WebRtc_Word16 CBbuf[CB_MEML+LPC_FILTERORDER+CB_HALFFILTERLEN];
- WebRtc_Word32 cDot[128];
- WebRtc_Word32 Crit[128];
- WebRtc_Word16 targetVec[SUBL+LPC_FILTERORDER];
- WebRtc_Word16 cbvectors[CB_MEML + 1]; /* Adding one extra position for
+ int16_t CBbuf[CB_MEML+LPC_FILTERORDER+CB_HALFFILTERLEN];
+ int32_t cDot[128];
+ int32_t Crit[128];
+ int16_t targetVec[SUBL+LPC_FILTERORDER];
+ int16_t cbvectors[CB_MEML + 1]; /* Adding one extra position for
Coverity warnings. */
- WebRtc_Word16 codedVec[SUBL];
- WebRtc_Word16 interpSamples[20*4];
- WebRtc_Word16 interpSamplesFilt[20*4];
- WebRtc_Word16 energyW16[CB_EXPAND*128];
- WebRtc_Word16 energyShifts[CB_EXPAND*128];
- WebRtc_Word16 *inverseEnergy=energyW16; /* Reuse memory */
- WebRtc_Word16 *inverseEnergyShifts=energyShifts; /* Reuse memory */
- WebRtc_Word16 *buf = &CBbuf[LPC_FILTERORDER];
- WebRtc_Word16 *target = &targetVec[LPC_FILTERORDER];
- WebRtc_Word16 *aug_vec = (WebRtc_Word16*)cDot; /* length [SUBL], reuse memory */
+ int16_t codedVec[SUBL];
+ int16_t interpSamples[20*4];
+ int16_t interpSamplesFilt[20*4];
+ int16_t energyW16[CB_EXPAND*128];
+ int16_t energyShifts[CB_EXPAND*128];
+ int16_t *inverseEnergy=energyW16; /* Reuse memory */
+ int16_t *inverseEnergyShifts=energyShifts; /* Reuse memory */
+ int16_t *buf = &CBbuf[LPC_FILTERORDER];
+ int16_t *target = &targetVec[LPC_FILTERORDER];
+ int16_t *aug_vec = (int16_t*)cDot; /* length [SUBL], reuse memory */
/* Determine size of codebook sections */
@@ -108,8 +108,8 @@
/* Find the highest absolute value to calculate proper
vector scale factor (so that it uses 12 bits) */
- temp1 = WebRtcSpl_MaxAbsValueW16(buf, (WebRtc_Word16)lMem);
- temp2 = WebRtcSpl_MaxAbsValueW16(target, (WebRtc_Word16)lTarget);
+ temp1 = WebRtcSpl_MaxAbsValueW16(buf, (int16_t)lMem);
+ temp2 = WebRtcSpl_MaxAbsValueW16(target, (int16_t)lTarget);
if ((temp1>0)&&(temp2>0)) {
temp1 = WEBRTC_SPL_MAX(temp1, temp2);
@@ -147,7 +147,7 @@
/* Compute the CB vectors' energies for the second cb section (filtered cb) */
WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamplesFilt, cbvectors,
- scale, (WebRtc_Word16)(base_size+20), energyW16, energyShifts);
+ scale, (int16_t)(base_size+20), energyW16, energyShifts);
/* Compute the CB vectors' energies and store them in the vector
* energyW16. Also the corresponding shift values are stored. The
@@ -221,11 +221,11 @@
/* Update the global best index and the corresponding gain */
WebRtcIlbcfix_CbUpdateBestIndex(
- CritNew, CritNewSh, (WebRtc_Word16)(indexNew+indexOffset), cDot[indexNew+indexOffset],
+ CritNew, CritNewSh, (int16_t)(indexNew+indexOffset), cDot[indexNew+indexOffset],
inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
&CritMax, &shTotMax, &bestIndex, &bestGain);
- sInd=bestIndex-(WebRtc_Word16)(CB_RESRANGE>>1);
+ sInd=bestIndex-(int16_t)(CB_RESRANGE>>1);
eInd=sInd+CB_RESRANGE;
if (sInd<0) {
eInd-=sInd;
@@ -243,7 +243,7 @@
if (sInd<20) {
WebRtcIlbcfix_AugmentedCbCorr(target, cbvectors+lMem,
interpSamplesFilt, cDot,
- (WebRtc_Word16)(sInd+20), (WebRtc_Word16)(WEBRTC_SPL_MIN(39, (eInd+20))), scale);
+ (int16_t)(sInd+20), (int16_t)(WEBRTC_SPL_MIN(39, (eInd+20))), scale);
i=20;
}
@@ -251,14 +251,14 @@
cb_vecPtr = cbvectors+lMem-20-i;
/* Calculate the cross correlations (main part of the filtered CB) */
- WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (WebRtc_Word16)(eInd-i+1), scale, -1);
+ WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (int16_t)(eInd-i+1), scale, -1);
} else {
cDotPtr = cDot;
cb_vecPtr = cbvectors+lMem-lTarget-sInd;
/* Calculate the cross correlations (main part of the filtered CB) */
- WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (WebRtc_Word16)(eInd-sInd+1), scale, -1);
+ WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (int16_t)(eInd-sInd+1), scale, -1);
}
@@ -267,13 +267,13 @@
/* Search for best index in this part of the vector */
WebRtcIlbcfix_CbSearchCore(
- cDot, (WebRtc_Word16)(eInd-sInd+1), stage, inverseEnergy+indexOffset,
+ cDot, (int16_t)(eInd-sInd+1), stage, inverseEnergy+indexOffset,
inverseEnergyShifts+indexOffset, Crit,
&indexNew, &CritNew, &CritNewSh);
/* Update the global best index and the corresponding gain */
WebRtcIlbcfix_CbUpdateBestIndex(
- CritNew, CritNewSh, (WebRtc_Word16)(indexNew+indexOffset), cDot[indexNew],
+ CritNew, CritNewSh, (int16_t)(indexNew+indexOffset), cDot[indexNew],
inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
&CritMax, &shTotMax, &bestIndex, &bestGain);
@@ -281,7 +281,7 @@
bestGain = WebRtcIlbcfix_GainQuant(bestGain,
- (WebRtc_Word16)WEBRTC_SPL_ABS_W16(gains[stage]), stage, &gain_index[stage]);
+ (int16_t)WEBRTC_SPL_ABS_W16(gains[stage]), stage, &gain_index[stage]);
/* Extract the best (according to measure) codebook vector
Also adjust the index, so that the augmented vectors are last.
@@ -308,7 +308,7 @@
/* Adjust index and extract vector */
index[stage]+=(base_size-20);
- WebRtcIlbcfix_CreateAugmentedVec((WebRtc_Word16)(index[stage]-base_size+40),
+ WebRtcIlbcfix_CreateAugmentedVec((int16_t)(index[stage]-base_size+40),
buf+lMem, aug_vec);
pp = aug_vec;
@@ -323,7 +323,7 @@
} else {
/* Adjust index and extract vector */
index[stage]+=(base_size-20);
- WebRtcIlbcfix_CreateAugmentedVec((WebRtc_Word16)(index[stage]-2*base_size+40),
+ WebRtcIlbcfix_CreateAugmentedVec((int16_t)(index[stage]-2*base_size+40),
cbvectors+lMem, aug_vec);
pp = aug_vec;
}
@@ -333,7 +333,7 @@
/* Subtract the best codebook vector, according
to measure, from the target vector */
- WebRtcSpl_AddAffineVectorToVector(target, pp, (WebRtc_Word16)(-bestGain), (WebRtc_Word32)8192, (WebRtc_Word16)14, (int)lTarget);
+ WebRtcSpl_AddAffineVectorToVector(target, pp, (int16_t)(-bestGain), (int32_t)8192, (int16_t)14, (int)lTarget);
/* record quantized gain */
gains[stage+1] = bestGain;
@@ -350,8 +350,8 @@
j=gain_index[0];
- temp1 = (WebRtc_Word16)WebRtcSpl_NormW32(codedEner);
- temp2 = (WebRtc_Word16)WebRtcSpl_NormW32(targetEner);
+ temp1 = (int16_t)WebRtcSpl_NormW32(codedEner);
+ temp2 = (int16_t)WebRtcSpl_NormW32(targetEner);
if(temp1 < temp2) {
bits = 16 - temp1;
@@ -359,19 +359,19 @@
bits = 16 - temp2;
}
- tmp = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(gains[1],gains[1], 14);
+ tmp = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(gains[1],gains[1], 14);
targetEner = WEBRTC_SPL_MUL_16_16(
WEBRTC_SPL_SHIFT_W32(targetEner, -bits), tmp);
- tmpW32 = ((WebRtc_Word32)(gains[1]-1))<<1;
+ tmpW32 = ((int32_t)(gains[1]-1))<<1;
/* Pointer to the table that contains
gain_sq5TblFIX * gain_sq5TblFIX in Q14 */
- gainPtr=(WebRtc_Word16*)WebRtcIlbcfix_kGainSq5Sq+gain_index[0];
- temp1 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(codedEner, -bits);
+ gainPtr=(int16_t*)WebRtcIlbcfix_kGainSq5Sq+gain_index[0];
+ temp1 = (int16_t)WEBRTC_SPL_SHIFT_W32(codedEner, -bits);
- WebRtcIlbcfix_kGainSq5_ptr = (WebRtc_Word16*)&WebRtcIlbcfix_kGainSq5[j];
+ WebRtcIlbcfix_kGainSq5_ptr = (int16_t*)&WebRtcIlbcfix_kGainSq5[j];
/* targetEner and codedEner are in Q(-2*scale) */
for (i=gain_index[0];i<32;i++) {
@@ -386,7 +386,7 @@
if (t32 < 0) {
if ((*WebRtcIlbcfix_kGainSq5_ptr) < tmpW32) {
j=i;
- WebRtcIlbcfix_kGainSq5_ptr = (WebRtc_Word16*)&WebRtcIlbcfix_kGainSq5[i];
+ WebRtcIlbcfix_kGainSq5_ptr = (int16_t*)&WebRtcIlbcfix_kGainSq5[i];
}
}
gainPtr++;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h
index e4ad4b5..fc62190 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_search.h
@@ -22,14 +22,14 @@
void WebRtcIlbcfix_CbSearch(
iLBC_Enc_Inst_t *iLBCenc_inst,
/* (i) the encoder state structure */
- WebRtc_Word16 *index, /* (o) Codebook indices */
- WebRtc_Word16 *gain_index, /* (o) Gain quantization indices */
- WebRtc_Word16 *intarget, /* (i) Target vector for encoding */
- WebRtc_Word16 *decResidual,/* (i) Decoded residual for codebook construction */
- WebRtc_Word16 lMem, /* (i) Length of buffer */
- WebRtc_Word16 lTarget, /* (i) Length of vector */
- WebRtc_Word16 *weightDenum,/* (i) weighting filter coefficients in Q12 */
- WebRtc_Word16 block /* (i) the subblock number */
+ int16_t *index, /* (o) Codebook indices */
+ int16_t *gain_index, /* (o) Gain quantization indices */
+ int16_t *intarget, /* (i) Target vector for encoding */
+ int16_t *decResidual,/* (i) Decoded residual for codebook construction */
+ int16_t lMem, /* (i) Length of buffer */
+ int16_t lTarget, /* (i) Length of vector */
+ int16_t *weightDenum,/* (i) weighting filter coefficients in Q12 */
+ int16_t block /* (i) the subblock number */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c
index 711e2df..c2299d5 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.c
@@ -20,29 +20,29 @@
#include "constants.h"
void WebRtcIlbcfix_CbSearchCore(
- WebRtc_Word32 *cDot, /* (i) Cross Correlation */
- WebRtc_Word16 range, /* (i) Search range */
- WebRtc_Word16 stage, /* (i) Stage of this search */
- WebRtc_Word16 *inverseEnergy, /* (i) Inversed energy */
- WebRtc_Word16 *inverseEnergyShift, /* (i) Shifts of inversed energy
+ int32_t *cDot, /* (i) Cross Correlation */
+ int16_t range, /* (i) Search range */
+ int16_t stage, /* (i) Stage of this search */
+ int16_t *inverseEnergy, /* (i) Inversed energy */
+ int16_t *inverseEnergyShift, /* (i) Shifts of inversed energy
with the offset 2*16-29 */
- WebRtc_Word32 *Crit, /* (o) The criteria */
- WebRtc_Word16 *bestIndex, /* (o) Index that corresponds to
+ int32_t *Crit, /* (o) The criteria */
+ int16_t *bestIndex, /* (o) Index that corresponds to
maximum criteria (in this
vector) */
- WebRtc_Word32 *bestCrit, /* (o) Value of critera for the
+ int32_t *bestCrit, /* (o) Value of critera for the
chosen index */
- WebRtc_Word16 *bestCritSh) /* (o) The domain of the chosen
+ int16_t *bestCritSh) /* (o) The domain of the chosen
criteria */
{
- WebRtc_Word32 maxW32, tmp32;
- WebRtc_Word16 max, sh, tmp16;
+ int32_t maxW32, tmp32;
+ int16_t max, sh, tmp16;
int i;
- WebRtc_Word32 *cDotPtr;
- WebRtc_Word16 cDotSqW16;
- WebRtc_Word16 *inverseEnergyPtr;
- WebRtc_Word32 *critPtr;
- WebRtc_Word16 *inverseEnergyShiftPtr;
+ int32_t *cDotPtr;
+ int16_t cDotSqW16;
+ int16_t *inverseEnergyPtr;
+ int32_t *critPtr;
+ int16_t *inverseEnergyShiftPtr;
/* Don't allow negative values for stage 0 */
if (stage==0) {
@@ -53,10 +53,10 @@
}
}
- /* Normalize cDot to WebRtc_Word16, calculate the square of cDot and store the upper WebRtc_Word16 */
+ /* Normalize cDot to int16_t, calculate the square of cDot and store the upper int16_t */
maxW32 = WebRtcSpl_MaxAbsValueW32(cDot, range);
- sh = (WebRtc_Word16)WebRtcSpl_NormW32(maxW32);
+ sh = (int16_t)WebRtcSpl_NormW32(maxW32);
cDotPtr = cDot;
inverseEnergyPtr = inverseEnergy;
critPtr = Crit;
@@ -64,10 +64,10 @@
max=WEBRTC_SPL_WORD16_MIN;
for (i=0;i<range;i++) {
- /* Calculate cDot*cDot and put the result in a WebRtc_Word16 */
+ /* Calculate cDot*cDot and put the result in a int16_t */
tmp32 = WEBRTC_SPL_LSHIFT_W32(*cDotPtr,sh);
- tmp16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32,16);
- cDotSqW16 = (WebRtc_Word16)(((WebRtc_Word32)(tmp16)*(tmp16))>>16);
+ tmp16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp32,16);
+ cDotSqW16 = (int16_t)(((int32_t)(tmp16)*(tmp16))>>16);
/* Calculate the criteria (cDot*cDot/energy) */
*critPtr=WEBRTC_SPL_MUL_16_16(cDotSqW16, (*inverseEnergyPtr));
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h
index e074c52..e4f2e92 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_search_core.h
@@ -22,19 +22,19 @@
#include "defines.h"
void WebRtcIlbcfix_CbSearchCore(
- WebRtc_Word32 *cDot, /* (i) Cross Correlation */
- WebRtc_Word16 range, /* (i) Search range */
- WebRtc_Word16 stage, /* (i) Stage of this search */
- WebRtc_Word16 *inverseEnergy, /* (i) Inversed energy */
- WebRtc_Word16 *inverseEnergyShift, /* (i) Shifts of inversed energy
+ int32_t *cDot, /* (i) Cross Correlation */
+ int16_t range, /* (i) Search range */
+ int16_t stage, /* (i) Stage of this search */
+ int16_t *inverseEnergy, /* (i) Inversed energy */
+ int16_t *inverseEnergyShift, /* (i) Shifts of inversed energy
with the offset 2*16-29 */
- WebRtc_Word32 *Crit, /* (o) The criteria */
- WebRtc_Word16 *bestIndex, /* (o) Index that corresponds to
+ int32_t *Crit, /* (o) The criteria */
+ int16_t *bestIndex, /* (o) Index that corresponds to
maximum criteria (in this
vector) */
- WebRtc_Word32 *bestCrit, /* (o) Value of critera for the
+ int32_t *bestCrit, /* (o) Value of critera for the
chosen index */
- WebRtc_Word16 *bestCritSh); /* (o) The domain of the chosen
+ int16_t *bestCritSh); /* (o) The domain of the chosen
criteria */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c b/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
index bf85408..88ea199 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
@@ -21,22 +21,22 @@
#include "constants.h"
void WebRtcIlbcfix_CbUpdateBestIndex(
- WebRtc_Word32 CritNew, /* (i) New Potentially best Criteria */
- WebRtc_Word16 CritNewSh, /* (i) Shift value of above Criteria */
- WebRtc_Word16 IndexNew, /* (i) Index of new Criteria */
- WebRtc_Word32 cDotNew, /* (i) Cross dot of new index */
- WebRtc_Word16 invEnergyNew, /* (i) Inversed energy new index */
- WebRtc_Word16 energyShiftNew, /* (i) Energy shifts of new index */
- WebRtc_Word32 *CritMax, /* (i/o) Maximum Criteria (so far) */
- WebRtc_Word16 *shTotMax, /* (i/o) Shifts of maximum criteria */
- WebRtc_Word16 *bestIndex, /* (i/o) Index that corresponds to
+ int32_t CritNew, /* (i) New Potentially best Criteria */
+ int16_t CritNewSh, /* (i) Shift value of above Criteria */
+ int16_t IndexNew, /* (i) Index of new Criteria */
+ int32_t cDotNew, /* (i) Cross dot of new index */
+ int16_t invEnergyNew, /* (i) Inversed energy new index */
+ int16_t energyShiftNew, /* (i) Energy shifts of new index */
+ int32_t *CritMax, /* (i/o) Maximum Criteria (so far) */
+ int16_t *shTotMax, /* (i/o) Shifts of maximum criteria */
+ int16_t *bestIndex, /* (i/o) Index that corresponds to
maximum criteria */
- WebRtc_Word16 *bestGain) /* (i/o) Gain in Q14 that corresponds
+ int16_t *bestGain) /* (i/o) Gain in Q14 that corresponds
to maximum criteria */
{
- WebRtc_Word16 shOld, shNew, tmp16;
- WebRtc_Word16 scaleTmp;
- WebRtc_Word32 gainW32;
+ int16_t shOld, shNew, tmp16;
+ int16_t scaleTmp;
+ int32_t gainW32;
/* Normalize the new and old Criteria to the same domain */
if (CritNewSh>(*shTotMax)) {
@@ -54,19 +54,19 @@
if (WEBRTC_SPL_RSHIFT_W32(CritNew, shNew)>
WEBRTC_SPL_RSHIFT_W32((*CritMax),shOld)) {
- tmp16 = (WebRtc_Word16)WebRtcSpl_NormW32(cDotNew);
+ tmp16 = (int16_t)WebRtcSpl_NormW32(cDotNew);
tmp16 = 16 - tmp16;
/* Calculate the gain in Q14
Compensate for inverseEnergyshift in Q29 and that the energy
- value was stored in a WebRtc_Word16 (shifted down 16 steps)
+ value was stored in a int16_t (shifted down 16 steps)
=> 29-14+16 = 31 */
scaleTmp = -energyShiftNew-tmp16+31;
scaleTmp = WEBRTC_SPL_MIN(31, scaleTmp);
gainW32 = WEBRTC_SPL_MUL_16_16_RSFT(
- ((WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cDotNew, -tmp16)), invEnergyNew, scaleTmp);
+ ((int16_t)WEBRTC_SPL_SHIFT_W32(cDotNew, -tmp16)), invEnergyNew, scaleTmp);
/* Check if criteria satisfies Gain criteria (max 1.3)
if it is larger set the gain to 1.3
@@ -77,7 +77,7 @@
} else if (gainW32<-21299) {
*bestGain=-21299;
} else {
- *bestGain=(WebRtc_Word16)gainW32;
+ *bestGain=(int16_t)gainW32;
}
*CritMax=CritNew;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h b/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
index 9015187..e8519d4 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
@@ -22,17 +22,17 @@
#include "defines.h"
void WebRtcIlbcfix_CbUpdateBestIndex(
- WebRtc_Word32 CritNew, /* (i) New Potentially best Criteria */
- WebRtc_Word16 CritNewSh, /* (i) Shift value of above Criteria */
- WebRtc_Word16 IndexNew, /* (i) Index of new Criteria */
- WebRtc_Word32 cDotNew, /* (i) Cross dot of new index */
- WebRtc_Word16 invEnergyNew, /* (i) Inversed energy new index */
- WebRtc_Word16 energyShiftNew, /* (i) Energy shifts of new index */
- WebRtc_Word32 *CritMax, /* (i/o) Maximum Criteria (so far) */
- WebRtc_Word16 *shTotMax, /* (i/o) Shifts of maximum criteria */
- WebRtc_Word16 *bestIndex, /* (i/o) Index that corresponds to
+ int32_t CritNew, /* (i) New Potentially best Criteria */
+ int16_t CritNewSh, /* (i) Shift value of above Criteria */
+ int16_t IndexNew, /* (i) Index of new Criteria */
+ int32_t cDotNew, /* (i) Cross dot of new index */
+ int16_t invEnergyNew, /* (i) Inversed energy new index */
+ int16_t energyShiftNew, /* (i) Energy shifts of new index */
+ int32_t *CritMax, /* (i/o) Maximum Criteria (so far) */
+ int16_t *shTotMax, /* (i/o) Shifts of maximum criteria */
+ int16_t *bestIndex, /* (i/o) Index that corresponds to
maximum criteria */
- WebRtc_Word16 *bestGain); /* (i/o) Gain in Q14 that corresponds
+ int16_t *bestGain); /* (i/o) Gain in Q14 that corresponds
to maximum criteria */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c b/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c
index 90108ff..b49dd79 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.c
@@ -26,57 +26,57 @@
* T_i(x) is the i:th order Chebyshev polynomial
*------------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_Chebyshev(
+int16_t WebRtcIlbcfix_Chebyshev(
/* (o) Result of C(x) */
- WebRtc_Word16 x, /* (i) Value to the Chevyshev polynomial */
- WebRtc_Word16 *f /* (i) The coefficients in the polynomial */
+ int16_t x, /* (i) Value to the Chevyshev polynomial */
+ int16_t *f /* (i) The coefficients in the polynomial */
) {
- WebRtc_Word16 b1_high, b1_low; /* Use the high, low format to increase the accuracy */
- WebRtc_Word32 b2;
- WebRtc_Word32 tmp1W32;
- WebRtc_Word32 tmp2W32;
+ int16_t b1_high, b1_low; /* Use the high, low format to increase the accuracy */
+ int32_t b2;
+ int32_t tmp1W32;
+ int32_t tmp2W32;
int i;
- b2 = (WebRtc_Word32)0x1000000; /* b2 = 1.0 (Q23) */
+ b2 = (int32_t)0x1000000; /* b2 = 1.0 (Q23) */
/* Calculate b1 = 2*x + f[1] */
- tmp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x, 10);
- tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[1], 14);
+ tmp1W32 = WEBRTC_SPL_LSHIFT_W32((int32_t)x, 10);
+ tmp1W32 += WEBRTC_SPL_LSHIFT_W32((int32_t)f[1], 14);
for (i = 2; i < 5; i++) {
tmp2W32 = tmp1W32;
/* Split b1 (in tmp1W32) into a high and low part */
- b1_high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
- b1_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)b1_high),16), 1);
+ b1_high = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
+ b1_low = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((int32_t)b1_high),16), 1);
/* Calculate 2*x*b1-b2+f[i] */
tmp1W32 = WEBRTC_SPL_LSHIFT_W32( (WEBRTC_SPL_MUL_16_16(b1_high, x) +
WEBRTC_SPL_MUL_16_16_RSFT(b1_low, x, 15)), 2);
tmp1W32 -= b2;
- tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[i], 14);
+ tmp1W32 += WEBRTC_SPL_LSHIFT_W32((int32_t)f[i], 14);
/* Update b2 for next round */
b2 = tmp2W32;
}
/* Split b1 (in tmp1W32) into a high and low part */
- b1_high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
- b1_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)b1_high),16), 1);
+ b1_high = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
+ b1_low = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((int32_t)b1_high),16), 1);
/* tmp1W32 = x*b1 - b2 + f[i]/2 */
tmp1W32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(b1_high, x), 1) +
WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16_RSFT(b1_low, x, 15), 1);
tmp1W32 -= b2;
- tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[i], 13);
+ tmp1W32 += WEBRTC_SPL_LSHIFT_W32((int32_t)f[i], 13);
- /* Handle overflows and set to maximum or minimum WebRtc_Word16 instead */
- if (tmp1W32>((WebRtc_Word32)33553408)) {
+ /* Handle overflows and set to maximum or minimum int16_t instead */
+ if (tmp1W32>((int32_t)33553408)) {
return(WEBRTC_SPL_WORD16_MAX);
- } else if (tmp1W32<((WebRtc_Word32)-33554432)) {
+ } else if (tmp1W32<((int32_t)-33554432)) {
return(WEBRTC_SPL_WORD16_MIN);
} else {
- return((WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 10));
+ return((int16_t)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 10));
}
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h b/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h
index 57aab99..bf10132 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/chebyshev.h
@@ -28,10 +28,10 @@
* T_i(x) is the i:th order Chebyshev polynomial
*------------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_Chebyshev(
+int16_t WebRtcIlbcfix_Chebyshev(
/* (o) Result of C(x) */
- WebRtc_Word16 x, /* (i) Value to the Chevyshev polynomial */
- WebRtc_Word16 *f /* (i) The coefficients in the polynomial */
+ int16_t x, /* (i) Value to the Chevyshev polynomial */
+ int16_t *f /* (i) The coefficients in the polynomial */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c b/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c
index 3d7f93e..a53e8a7 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.c
@@ -24,15 +24,15 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_CompCorr(
- WebRtc_Word32 *corr, /* (o) cross correlation */
- WebRtc_Word32 *ener, /* (o) energy */
- WebRtc_Word16 *buffer, /* (i) signal buffer */
- WebRtc_Word16 lag, /* (i) pitch lag */
- WebRtc_Word16 bLen, /* (i) length of buffer */
- WebRtc_Word16 sRange, /* (i) correlation search length */
- WebRtc_Word16 scale /* (i) number of rightshifts to use */
+ int32_t *corr, /* (o) cross correlation */
+ int32_t *ener, /* (o) energy */
+ int16_t *buffer, /* (i) signal buffer */
+ int16_t lag, /* (i) pitch lag */
+ int16_t bLen, /* (i) length of buffer */
+ int16_t sRange, /* (i) correlation search length */
+ int16_t scale /* (i) number of rightshifts to use */
){
- WebRtc_Word16 *w16ptr;
+ int16_t *w16ptr;
w16ptr=&buffer[bLen-sRange-lag];
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h b/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h
index cd46532..4ff80aa 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/comp_corr.h
@@ -27,13 +27,13 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_CompCorr(
- WebRtc_Word32 *corr, /* (o) cross correlation */
- WebRtc_Word32 *ener, /* (o) energy */
- WebRtc_Word16 *buffer, /* (i) signal buffer */
- WebRtc_Word16 lag, /* (i) pitch lag */
- WebRtc_Word16 bLen, /* (i) length of buffer */
- WebRtc_Word16 sRange, /* (i) correlation search length */
- WebRtc_Word16 scale /* (i) number of rightshifts to use */
+ int32_t *corr, /* (o) cross correlation */
+ int32_t *ener, /* (o) energy */
+ int16_t *buffer, /* (i) signal buffer */
+ int16_t lag, /* (i) pitch lag */
+ int16_t bLen, /* (i) length of buffer */
+ int16_t sRange, /* (i) correlation search length */
+ int16_t scale /* (i) number of rightshifts to use */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/constants.c b/webrtc/modules/audio_coding/codecs/ilbc/constants.c
index 5ebe9be..1d384b7 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/constants.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/constants.c
@@ -21,25 +21,25 @@
/* HP Filters {b[0] b[1] b[2] -a[1] -a[2]} */
-const WebRtc_Word16 WebRtcIlbcfix_kHpInCoefs[5] = {3798, -7596, 3798, 7807, -3733};
-const WebRtc_Word16 WebRtcIlbcfix_kHpOutCoefs[5] = {3849, -7699, 3849, 7918, -3833};
+const int16_t WebRtcIlbcfix_kHpInCoefs[5] = {3798, -7596, 3798, 7807, -3733};
+const int16_t WebRtcIlbcfix_kHpOutCoefs[5] = {3849, -7699, 3849, 7918, -3833};
/* Window in Q11 to window the energies of the 5 choises (3 for 20ms) in the choise for
the 80 sample start state
*/
-const WebRtc_Word16 WebRtcIlbcfix_kStartSequenceEnrgWin[NSUB_MAX-1]= {
+const int16_t WebRtcIlbcfix_kStartSequenceEnrgWin[NSUB_MAX-1]= {
1638, 1843, 2048, 1843, 1638
};
/* LP Filter coeffs used for downsampling */
-const WebRtc_Word16 WebRtcIlbcfix_kLpFiltCoefs[FILTERORDER_DS_PLUS1]= {
+const int16_t WebRtcIlbcfix_kLpFiltCoefs[FILTERORDER_DS_PLUS1]= {
-273, 512, 1297, 1696, 1297, 512, -273
};
/* Constants used in the LPC calculations */
/* Hanning LPC window (in Q15) */
-const WebRtc_Word16 WebRtcIlbcfix_kLpcWin[BLOCKL_MAX] = {
+const int16_t WebRtcIlbcfix_kLpcWin[BLOCKL_MAX] = {
6, 22, 50, 89, 139, 200, 272, 355, 449, 554, 669, 795,
932, 1079, 1237, 1405, 1583, 1771, 1969, 2177, 2395, 2622, 2858, 3104,
3359, 3622, 3894, 4175, 4464, 4761, 5066, 5379, 5699, 6026, 6361, 6702,
@@ -63,7 +63,7 @@
};
/* Asymmetric LPC window (in Q15)*/
-const WebRtc_Word16 WebRtcIlbcfix_kLpcAsymWin[BLOCKL_MAX] = {
+const int16_t WebRtcIlbcfix_kLpcAsymWin[BLOCKL_MAX] = {
2, 7, 15, 27, 42, 60, 81, 106, 135, 166, 201, 239,
280, 325, 373, 424, 478, 536, 597, 661, 728, 798, 872, 949,
1028, 1111, 1197, 1287, 1379, 1474, 1572, 1674, 1778, 1885, 1995, 2108,
@@ -87,7 +87,7 @@
};
/* Lag window for LPC (Q31) */
-const WebRtc_Word32 WebRtcIlbcfix_kLpcLagWin[LPC_FILTERORDER + 1]={
+const int32_t WebRtcIlbcfix_kLpcLagWin[LPC_FILTERORDER + 1]={
2147483647, 2144885453, 2137754373, 2125918626, 2109459810,
2088483140, 2063130336, 2033564590, 1999977009, 1962580174,
1921610283};
@@ -95,7 +95,7 @@
/* WebRtcIlbcfix_kLpcChirpSyntDenum vector in Q15 corresponding
* floating point vector {1 0.9025 0.9025^2 0.9025^3 ...}
*/
-const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpSyntDenum[LPC_FILTERORDER + 1] = {
+const int16_t WebRtcIlbcfix_kLpcChirpSyntDenum[LPC_FILTERORDER + 1] = {
32767, 29573, 26690, 24087,
21739, 19619, 17707, 15980,
14422, 13016, 11747};
@@ -103,12 +103,12 @@
/* WebRtcIlbcfix_kLpcChirpWeightDenum in Q15 corresponding to
* floating point vector {1 0.4222 0.4222^2... }
*/
-const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpWeightDenum[LPC_FILTERORDER + 1] = {
+const int16_t WebRtcIlbcfix_kLpcChirpWeightDenum[LPC_FILTERORDER + 1] = {
32767, 13835, 5841, 2466, 1041, 440,
186, 78, 33, 14, 6};
/* LSF quantization Q13 domain */
-const WebRtc_Word16 WebRtcIlbcfix_kLsfCb[64 * 3 + 128 * 3 + 128 * 4] = {
+const int16_t WebRtcIlbcfix_kLsfCb[64 * 3 + 128 * 3 + 128 * 4] = {
1273, 2238, 3696,
3199, 5309, 8209,
3606, 5671, 7829,
@@ -431,22 +431,22 @@
15878, 18550, 20718, 22906
};
-const WebRtc_Word16 WebRtcIlbcfix_kLsfDimCb[LSF_NSPLIT] = {3, 3, 4};
-const WebRtc_Word16 WebRtcIlbcfix_kLsfSizeCb[LSF_NSPLIT] = {64,128,128};
+const int16_t WebRtcIlbcfix_kLsfDimCb[LSF_NSPLIT] = {3, 3, 4};
+const int16_t WebRtcIlbcfix_kLsfSizeCb[LSF_NSPLIT] = {64,128,128};
-const WebRtc_Word16 WebRtcIlbcfix_kLsfMean[LPC_FILTERORDER] = {
+const int16_t WebRtcIlbcfix_kLsfMean[LPC_FILTERORDER] = {
2308, 3652, 5434, 7885,
10255, 12559, 15160, 17513,
20328, 22752};
-const WebRtc_Word16 WebRtcIlbcfix_kLspMean[LPC_FILTERORDER] = {
+const int16_t WebRtcIlbcfix_kLspMean[LPC_FILTERORDER] = {
31476, 29565, 25819, 18725, 10276,
1236, -9049, -17600, -25884, -30618
};
/* Q14 */
-const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight20ms[4] = {12288, 8192, 4096, 0};
-const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight30ms[6] = {8192, 16384, 10923, 5461, 0, 0};
+const int16_t WebRtcIlbcfix_kLsfWeight20ms[4] = {12288, 8192, 4096, 0};
+const int16_t WebRtcIlbcfix_kLsfWeight30ms[6] = {8192, 16384, 10923, 5461, 0, 0};
/*
cos(x) in Q15
@@ -454,7 +454,7 @@
used in WebRtcIlbcfix_Lsp2Lsf()
*/
-const WebRtc_Word16 WebRtcIlbcfix_kCos[64] = {
+const int16_t WebRtcIlbcfix_kCos[64] = {
32767, 32729, 32610, 32413, 32138, 31786, 31357, 30853,
30274, 29622, 28899, 28106, 27246, 26320, 25330, 24279,
23170, 22006, 20788, 19520, 18205, 16846, 15447, 14010,
@@ -469,7 +469,7 @@
Derivative in Q19, used to interpolate between the
WebRtcIlbcfix_kCos[] values to get a more exact y = cos(x)
*/
-const WebRtc_Word16 WebRtcIlbcfix_kCosDerivative[64] = {
+const int16_t WebRtcIlbcfix_kCosDerivative[64] = {
-632, -1893, -3150, -4399, -5638, -6863, -8072, -9261,
-10428, -11570, -12684, -13767, -14817, -15832, -16808, -17744,
-18637, -19486, -20287, -21039, -21741, -22390, -22986, -23526,
@@ -484,7 +484,7 @@
WebRtcIlbcfix_kCosGrid[i] = cos((2*pi*i)/(float)(2*COS_GRID_POINTS));
*/
-const WebRtc_Word16 WebRtcIlbcfix_kCosGrid[COS_GRID_POINTS + 1] = {
+const int16_t WebRtcIlbcfix_kCosGrid[COS_GRID_POINTS + 1] = {
32760, 32723, 32588, 32364, 32051, 31651, 31164, 30591,
29935, 29196, 28377, 27481, 26509, 25465, 24351, 23170,
21926, 20621, 19260, 17846, 16384, 14876, 13327, 11743,
@@ -500,7 +500,7 @@
used in WebRtcIlbcfix_Lsp2Lsf()
*/
-const WebRtc_Word16 WebRtcIlbcfix_kAcosDerivative[64] = {
+const int16_t WebRtcIlbcfix_kAcosDerivative[64] = {
-26887, -8812, -5323, -3813, -2979, -2444, -2081, -1811,
-1608, -1450, -1322, -1219, -1132, -1059, -998, -946,
-901, -861, -827, -797, -772, -750, -730, -713,
@@ -515,7 +515,7 @@
/* Tables for quantization of start state */
/* State quantization tables */
-const WebRtc_Word16 WebRtcIlbcfix_kStateSq3[8] = { /* Values in Q13 */
+const int16_t WebRtcIlbcfix_kStateSq3[8] = { /* Values in Q13 */
-30473, -17838, -9257, -2537,
3639, 10893, 19958, 32636
};
@@ -524,7 +524,7 @@
less or equal than value 0 => index = 0
less or equal than value k => index = k
*/
-const WebRtc_Word32 WebRtcIlbcfix_kChooseFrgQuant[64] = {
+const int32_t WebRtcIlbcfix_kChooseFrgQuant[64] = {
118, 163, 222, 305, 425, 604,
851, 1174, 1617, 2222, 3080, 4191,
5525, 7215, 9193, 11540, 14397, 17604,
@@ -538,7 +538,7 @@
37302935, 58819185, 109782723, WEBRTC_SPL_WORD32_MAX
};
-const WebRtc_Word16 WebRtcIlbcfix_kScale[64] = {
+const int16_t WebRtcIlbcfix_kScale[64] = {
/* Values in Q16 */
29485, 25003, 21345, 18316, 15578, 13128, 10973, 9310, 7955,
6762, 5789, 4877, 4255, 3699, 3258, 2904, 2595, 2328,
@@ -560,7 +560,7 @@
37:58 in Q5
59:63 in Q3
*/
-const WebRtc_Word16 WebRtcIlbcfix_kFrgQuantMod[64] = {
+const int16_t WebRtcIlbcfix_kFrgQuantMod[64] = {
/* First 37 values in Q8 */
569, 671, 786, 916, 1077, 1278,
1529, 1802, 2109, 2481, 2898, 3440,
@@ -583,36 +583,36 @@
/* Expansion filter to get additional cb section.
* Q12 and reversed compared to flp
*/
-const WebRtc_Word16 WebRtcIlbcfix_kCbFiltersRev[CB_FILTERLEN]={
+const int16_t WebRtcIlbcfix_kCbFiltersRev[CB_FILTERLEN]={
-140, 446, -755, 3302, 2922, -590, 343, -138};
/* Weighting coefficients for short lags.
* [0.2 0.4 0.6 0.8] in Q15 */
-const WebRtc_Word16 WebRtcIlbcfix_kAlpha[4]={
+const int16_t WebRtcIlbcfix_kAlpha[4]={
6554, 13107, 19661, 26214};
/* Ranges for search and filters at different subframes */
-const WebRtc_Word16 WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES]={
+const int16_t WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES]={
{58,58,58}, {108,44,44}, {108,108,108}, {108,108,108}, {108,108,108}};
-const WebRtc_Word16 WebRtcIlbcfix_kFilterRange[5]={63, 85, 125, 147, 147};
+const int16_t WebRtcIlbcfix_kFilterRange[5]={63, 85, 125, 147, 147};
/* Gain Quantization for the codebook gains of the 3 stages */
-/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
-const WebRtc_Word16 WebRtcIlbcfix_kGainSq3[9]={
+/* Q14 (one extra value (max int16_t) to simplify for the search) */
+const int16_t WebRtcIlbcfix_kGainSq3[9]={
-16384, -10813, -5407, 0, 4096, 8192,
12288, 16384, 32767};
-/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
-const WebRtc_Word16 WebRtcIlbcfix_kGainSq4[17]={
+/* Q14 (one extra value (max int16_t) to simplify for the search) */
+const int16_t WebRtcIlbcfix_kGainSq4[17]={
-17203, -14746, -12288, -9830, -7373, -4915,
-2458, 0, 2458, 4915, 7373, 9830,
12288, 14746, 17203, 19661, 32767};
-/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
-const WebRtc_Word16 WebRtcIlbcfix_kGainSq5[33]={
+/* Q14 (one extra value (max int16_t) to simplify for the search) */
+const int16_t WebRtcIlbcfix_kGainSq5[33]={
614, 1229, 1843, 2458, 3072, 3686,
4301, 4915, 5530, 6144, 6758, 7373,
7987, 8602, 9216, 9830, 10445, 11059,
@@ -621,7 +621,7 @@
19046, 19661, 32767};
/* Q14 gain_sq5Tbl squared in Q14 */
-const WebRtc_Word16 WebRtcIlbcfix_kGainSq5Sq[32] = {
+const int16_t WebRtcIlbcfix_kGainSq5Sq[32] = {
23, 92, 207, 368, 576, 829,
1129, 1474, 1866, 2304, 2787, 3317,
3893, 4516, 5184, 5897, 6658, 7464,
@@ -630,37 +630,37 @@
22140, 23593
};
-const WebRtc_Word16* const WebRtcIlbcfix_kGain[3] =
+const int16_t* const WebRtcIlbcfix_kGain[3] =
{WebRtcIlbcfix_kGainSq5, WebRtcIlbcfix_kGainSq4, WebRtcIlbcfix_kGainSq3};
/* Tables for the Enhancer, using upsamling factor 4 (ENH_UPS0 = 4) */
-const WebRtc_Word16 WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1]={
+const int16_t WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1]={
{0, 0, 0, 4096, 0, 0, 0},
{64, -315, 1181, 3531, -436, 77, -64},
{97, -509, 2464, 2464, -509, 97, -97},
{77, -436, 3531, 1181, -315, 64, -77}
};
-const WebRtc_Word16 WebRtcIlbcfix_kEnhWt[3] = {
+const int16_t WebRtcIlbcfix_kEnhWt[3] = {
4800, 16384, 27968 /* Q16 */
};
-const WebRtc_Word16 WebRtcIlbcfix_kEnhPlocs[ENH_NBLOCKS_TOT] = {
+const int16_t WebRtcIlbcfix_kEnhPlocs[ENH_NBLOCKS_TOT] = {
160, 480, 800, 1120, 1440, 1760, 2080, 2400 /* Q(-2) */
};
/* PLC table */
-const WebRtc_Word16 WebRtcIlbcfix_kPlcPerSqr[6] = { /* Grid points for square of periodiciy in Q15 */
+const int16_t WebRtcIlbcfix_kPlcPerSqr[6] = { /* Grid points for square of periodiciy in Q15 */
839, 1343, 2048, 2998, 4247, 5849
};
-const WebRtc_Word16 WebRtcIlbcfix_kPlcPitchFact[6] = { /* Value of y=(x^4-0.4)/(0.7-0.4) in grid points in Q15 */
+const int16_t WebRtcIlbcfix_kPlcPitchFact[6] = { /* Value of y=(x^4-0.4)/(0.7-0.4) in grid points in Q15 */
0, 5462, 10922, 16384, 21846, 27306
};
-const WebRtc_Word16 WebRtcIlbcfix_kPlcPfSlope[6] = { /* Slope of y=(x^4-0.4)/(0.7-0.4) in Q11 */
+const int16_t WebRtcIlbcfix_kPlcPfSlope[6] = { /* Slope of y=(x^4-0.4)/(0.7-0.4) in Q11 */
26667, 18729, 13653, 10258, 7901, 6214
};
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/constants.h b/webrtc/modules/audio_coding/codecs/ilbc/constants.h
index f787f74..cdc8a9c 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/constants.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/constants.h
@@ -24,69 +24,69 @@
/* high pass filters */
-extern const WebRtc_Word16 WebRtcIlbcfix_kHpInCoefs[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kHpOutCoefs[];
+extern const int16_t WebRtcIlbcfix_kHpInCoefs[];
+extern const int16_t WebRtcIlbcfix_kHpOutCoefs[];
/* Window for start state decision */
-extern const WebRtc_Word16 WebRtcIlbcfix_kStartSequenceEnrgWin[];
+extern const int16_t WebRtcIlbcfix_kStartSequenceEnrgWin[];
/* low pass filter used for downsampling */
-extern const WebRtc_Word16 WebRtcIlbcfix_kLpFiltCoefs[];
+extern const int16_t WebRtcIlbcfix_kLpFiltCoefs[];
/* LPC analysis and quantization */
-extern const WebRtc_Word16 WebRtcIlbcfix_kLpcWin[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLpcAsymWin[];
-extern const WebRtc_Word32 WebRtcIlbcfix_kLpcLagWin[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpSyntDenum[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpWeightDenum[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLsfDimCb[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLsfSizeCb[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLsfCb[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight20ms[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight30ms[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLsfMean[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kLspMean[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kCos[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kCosDerivative[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kCosGrid[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kAcosDerivative[];
+extern const int16_t WebRtcIlbcfix_kLpcWin[];
+extern const int16_t WebRtcIlbcfix_kLpcAsymWin[];
+extern const int32_t WebRtcIlbcfix_kLpcLagWin[];
+extern const int16_t WebRtcIlbcfix_kLpcChirpSyntDenum[];
+extern const int16_t WebRtcIlbcfix_kLpcChirpWeightDenum[];
+extern const int16_t WebRtcIlbcfix_kLsfDimCb[];
+extern const int16_t WebRtcIlbcfix_kLsfSizeCb[];
+extern const int16_t WebRtcIlbcfix_kLsfCb[];
+extern const int16_t WebRtcIlbcfix_kLsfWeight20ms[];
+extern const int16_t WebRtcIlbcfix_kLsfWeight30ms[];
+extern const int16_t WebRtcIlbcfix_kLsfMean[];
+extern const int16_t WebRtcIlbcfix_kLspMean[];
+extern const int16_t WebRtcIlbcfix_kCos[];
+extern const int16_t WebRtcIlbcfix_kCosDerivative[];
+extern const int16_t WebRtcIlbcfix_kCosGrid[];
+extern const int16_t WebRtcIlbcfix_kAcosDerivative[];
/* state quantization tables */
-extern const WebRtc_Word16 WebRtcIlbcfix_kStateSq3[];
-extern const WebRtc_Word32 WebRtcIlbcfix_kChooseFrgQuant[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kScale[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kFrgQuantMod[];
+extern const int16_t WebRtcIlbcfix_kStateSq3[];
+extern const int32_t WebRtcIlbcfix_kChooseFrgQuant[];
+extern const int16_t WebRtcIlbcfix_kScale[];
+extern const int16_t WebRtcIlbcfix_kFrgQuantMod[];
/* Ranges for search and filters at different subframes */
-extern const WebRtc_Word16 WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES];
-extern const WebRtc_Word16 WebRtcIlbcfix_kFilterRange[];
+extern const int16_t WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES];
+extern const int16_t WebRtcIlbcfix_kFilterRange[];
/* gain quantization tables */
-extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq3[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq4[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq5[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq5Sq[];
-extern const WebRtc_Word16* const WebRtcIlbcfix_kGain[];
+extern const int16_t WebRtcIlbcfix_kGainSq3[];
+extern const int16_t WebRtcIlbcfix_kGainSq4[];
+extern const int16_t WebRtcIlbcfix_kGainSq5[];
+extern const int16_t WebRtcIlbcfix_kGainSq5Sq[];
+extern const int16_t* const WebRtcIlbcfix_kGain[];
/* adaptive codebook definitions */
-extern const WebRtc_Word16 WebRtcIlbcfix_kCbFiltersRev[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kAlpha[];
+extern const int16_t WebRtcIlbcfix_kCbFiltersRev[];
+extern const int16_t WebRtcIlbcfix_kAlpha[];
/* enhancer definitions */
-extern const WebRtc_Word16 WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1];
-extern const WebRtc_Word16 WebRtcIlbcfix_kEnhWt[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kEnhPlocs[];
+extern const int16_t WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1];
+extern const int16_t WebRtcIlbcfix_kEnhWt[];
+extern const int16_t WebRtcIlbcfix_kEnhPlocs[];
/* PLC tables */
-extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPerSqr[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPitchFact[];
-extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPfSlope[];
+extern const int16_t WebRtcIlbcfix_kPlcPerSqr[];
+extern const int16_t WebRtcIlbcfix_kPlcPitchFact[];
+extern const int16_t WebRtcIlbcfix_kPlcPfSlope[];
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c b/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
index f021c4d..965cbe0 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
@@ -25,14 +25,14 @@
*----------------------------------------------------------------*/
void WebRtcIlbcfix_CreateAugmentedVec(
- WebRtc_Word16 index, /* (i) Index for the augmented vector to be created */
- WebRtc_Word16 *buffer, /* (i) Pointer to the end of the codebook memory that
+ int16_t index, /* (i) Index for the augmented vector to be created */
+ int16_t *buffer, /* (i) Pointer to the end of the codebook memory that
is used for creation of the augmented codebook */
- WebRtc_Word16 *cbVec /* (o) The construced codebook vector */
+ int16_t *cbVec /* (o) The construced codebook vector */
) {
- WebRtc_Word16 ilow;
- WebRtc_Word16 *ppo, *ppi;
- WebRtc_Word16 cbVecTmp[4];
+ int16_t ilow;
+ int16_t *ppo, *ppi;
+ int16_t cbVecTmp[4];
ilow = index-4;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h b/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
index 970a9be..e3c3c7b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
@@ -27,10 +27,10 @@
*----------------------------------------------------------------*/
void WebRtcIlbcfix_CreateAugmentedVec(
- WebRtc_Word16 index, /* (i) Index for the augmented vector to be created */
- WebRtc_Word16 *buffer, /* (i) Pointer to the end of the codebook memory that
+ int16_t index, /* (i) Index for the augmented vector to be created */
+ int16_t *buffer, /* (i) Pointer to the end of the codebook memory that
is used for creation of the augmented codebook */
- WebRtc_Word16 *cbVec /* (o) The construced codebook vector */
+ int16_t *cbVec /* (o) The construced codebook vector */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/decode.c b/webrtc/modules/audio_coding/codecs/ilbc/decode.c
index 3bca764..5da9685 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/decode.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/decode.c
@@ -37,25 +37,25 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_DecodeImpl(
- WebRtc_Word16 *decblock, /* (o) decoded signal block */
- const WebRtc_UWord16 *bytes, /* (i) encoded signal bits */
+ int16_t *decblock, /* (o) decoded signal block */
+ const uint16_t *bytes, /* (i) encoded signal bits */
iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) the decoder state
structure */
- WebRtc_Word16 mode /* (i) 0: bad packet, PLC,
+ int16_t mode /* (i) 0: bad packet, PLC,
1: normal */
) {
int i;
- WebRtc_Word16 order_plus_one;
+ int16_t order_plus_one;
- WebRtc_Word16 last_bit;
- WebRtc_Word16 *data;
+ int16_t last_bit;
+ int16_t *data;
/* Stack based */
- WebRtc_Word16 decresidual[BLOCKL_MAX];
- WebRtc_Word16 PLCresidual[BLOCKL_MAX + LPC_FILTERORDER];
- WebRtc_Word16 syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
- WebRtc_Word16 PLClpc[LPC_FILTERORDER + 1];
+ int16_t decresidual[BLOCKL_MAX];
+ int16_t PLCresidual[BLOCKL_MAX + LPC_FILTERORDER];
+ int16_t syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
+ int16_t PLClpc[LPC_FILTERORDER + 1];
#ifndef WEBRTC_BIG_ENDIAN
- WebRtc_UWord16 swapped[NO_OF_WORDS_30MS];
+ uint16_t swapped[NO_OF_WORDS_30MS];
#endif
iLBC_bits *iLBCbits_inst = (iLBC_bits*)PLCresidual;
@@ -87,14 +87,14 @@
if (mode) { /* No bit errors was detected, continue decoding */
/* Stack based */
- WebRtc_Word16 lsfdeq[LPC_FILTERORDER*LPC_N_MAX];
- WebRtc_Word16 weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+ int16_t lsfdeq[LPC_FILTERORDER*LPC_N_MAX];
+ int16_t weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
/* adjust index */
WebRtcIlbcfix_IndexConvDec(iLBCbits_inst->cb_index);
/* decode the lsf */
- WebRtcIlbcfix_SimpleLsfDeQ(lsfdeq, (WebRtc_Word16*)(iLBCbits_inst->lsf), iLBCdec_inst->lpc_n);
+ WebRtcIlbcfix_SimpleLsfDeQ(lsfdeq, (int16_t*)(iLBCbits_inst->lsf), iLBCdec_inst->lpc_n);
WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCdec_inst->lpc_n);
WebRtcIlbcfix_DecoderInterpolateLsp(syntdenum, weightdenum,
lsfdeq, LPC_FILTERORDER, iLBCdec_inst);
@@ -105,7 +105,7 @@
/* preparing the plc for a future loss! */
WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 0,
decresidual, syntdenum + (LPC_FILTERORDER + 1)*(iLBCdec_inst->nsub - 1),
- (WebRtc_Word16)(iLBCdec_inst->last_lag), iLBCdec_inst);
+ (int16_t)(iLBCdec_inst->last_lag), iLBCdec_inst);
/* Use the output from doThePLC */
WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
@@ -121,7 +121,7 @@
/* packet loss conceal */
WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 1,
- decresidual, syntdenum, (WebRtc_Word16)(iLBCdec_inst->last_lag), iLBCdec_inst);
+ decresidual, syntdenum, (int16_t)(iLBCdec_inst->last_lag), iLBCdec_inst);
WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
@@ -187,18 +187,18 @@
WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
} else { /* Enhancer not activated */
- WebRtc_Word16 lag;
+ int16_t lag;
/* Find last lag (since the enhancer is not called to give this info) */
lag = 20;
if (iLBCdec_inst->mode==20) {
- lag = (WebRtc_Word16)WebRtcIlbcfix_XcorrCoef(
+ lag = (int16_t)WebRtcIlbcfix_XcorrCoef(
&decresidual[iLBCdec_inst->blockl-60],
&decresidual[iLBCdec_inst->blockl-60-lag],
60,
80, lag, -1);
} else {
- lag = (WebRtc_Word16)WebRtcIlbcfix_XcorrCoef(
+ lag = (int16_t)WebRtcIlbcfix_XcorrCoef(
&decresidual[iLBCdec_inst->blockl-ENH_BLOCKL],
&decresidual[iLBCdec_inst->blockl-ENH_BLOCKL-lag],
ENH_BLOCKL,
@@ -228,7 +228,7 @@
WEBRTC_SPL_MEMCPY_W16(decblock,data,iLBCdec_inst->blockl);
/* High pass filter the signal (with upscaling a factor 2 and saturation) */
- WebRtcIlbcfix_HpOutput(decblock, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+ WebRtcIlbcfix_HpOutput(decblock, (int16_t*)WebRtcIlbcfix_kHpOutCoefs,
iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
iLBCdec_inst->blockl);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/decode.h b/webrtc/modules/audio_coding/codecs/ilbc/decode.h
index 2c9b5a2..71e3802 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/decode.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/decode.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_DecodeImpl(
- WebRtc_Word16 *decblock, /* (o) decoded signal block */
- const WebRtc_UWord16 *bytes, /* (i) encoded signal bits */
+ int16_t *decblock, /* (o) decoded signal block */
+ const uint16_t *bytes, /* (i) encoded signal bits */
iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) the decoder state
structure */
- WebRtc_Word16 mode /* (i) 0: bad packet, PLC,
+ int16_t mode /* (i) 0: bad packet, PLC,
1: normal */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c b/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c
index da56db4..d70fd38 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.c
@@ -35,15 +35,15 @@
/* (i/o) the decoder state structure */
iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used
for the decoding */
- WebRtc_Word16 *decresidual, /* (o) decoded residual frame */
- WebRtc_Word16 *syntdenum /* (i) the decoded synthesis filter
+ int16_t *decresidual, /* (o) decoded residual frame */
+ int16_t *syntdenum /* (i) the decoded synthesis filter
coefficients */
) {
- WebRtc_Word16 meml_gotten, Nfor, Nback, diff, start_pos;
- WebRtc_Word16 subcount, subframe;
- WebRtc_Word16 *reverseDecresidual = iLBCdec_inst->enh_buf; /* Reversed decoded data, used for decoding backwards in time (reuse memory in state) */
- WebRtc_Word16 *memVec = iLBCdec_inst->prevResidual; /* Memory for codebook and filter state (reuse memory in state) */
- WebRtc_Word16 *mem = &memVec[CB_HALFFILTERLEN]; /* Memory for codebook */
+ int16_t meml_gotten, Nfor, Nback, diff, start_pos;
+ int16_t subcount, subframe;
+ int16_t *reverseDecresidual = iLBCdec_inst->enh_buf; /* Reversed decoded data, used for decoding backwards in time (reuse memory in state) */
+ int16_t *memVec = iLBCdec_inst->prevResidual; /* Memory for codebook and filter state (reuse memory in state) */
+ int16_t *mem = &memVec[CB_HALFFILTERLEN]; /* Memory for codebook */
diff = STATE_LEN - iLBCdec_inst->state_short_len;
@@ -64,7 +64,7 @@
/* setup memory */
- WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCdec_inst->state_short_len));
+ WebRtcSpl_MemSetW16(mem, 0, (int16_t)(CB_MEML-iLBCdec_inst->state_short_len));
WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCdec_inst->state_short_len, decresidual+start_pos,
iLBCdec_inst->state_short_len);
@@ -74,7 +74,7 @@
&decresidual[start_pos+iLBCdec_inst->state_short_len],
iLBC_encbits->cb_index, iLBC_encbits->gain_index,
mem+CB_MEML-ST_MEM_L_TBL,
- ST_MEM_L_TBL, (WebRtc_Word16)diff
+ ST_MEM_L_TBL, (int16_t)diff
);
}
@@ -85,7 +85,7 @@
meml_gotten = iLBCdec_inst->state_short_len;
WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1,
decresidual+start_pos, meml_gotten);
- WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+ WebRtcSpl_MemSetW16(mem, 0, (int16_t)(CB_MEML-meml_gotten));
/* construct decoded vector */
@@ -154,7 +154,7 @@
WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1,
decresidual+(iLBC_encbits->startIdx-1)*SUBL, meml_gotten);
- WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+ WebRtcSpl_MemSetW16(mem, 0, (int16_t)(CB_MEML-meml_gotten));
/* loop over subframes to decode */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h b/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h
index ea7208a..04d6227 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/decode_residual.h
@@ -30,8 +30,8 @@
/* (i/o) the decoder state structure */
iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used
for the decoding */
- WebRtc_Word16 *decresidual, /* (o) decoded residual frame */
- WebRtc_Word16 *syntdenum /* (i) the decoded synthesis filter
+ int16_t *decresidual, /* (o) decoded residual frame */
+ int16_t *syntdenum /* (i) the decoded synthesis filter
coefficients */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c b/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
index eee3105..3557665 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
@@ -26,16 +26,16 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_DecoderInterpolateLsp(
- WebRtc_Word16 *syntdenum, /* (o) synthesis filter coefficients */
- WebRtc_Word16 *weightdenum, /* (o) weighting denumerator
+ int16_t *syntdenum, /* (o) synthesis filter coefficients */
+ int16_t *weightdenum, /* (o) weighting denumerator
coefficients */
- WebRtc_Word16 *lsfdeq, /* (i) dequantized lsf coefficients */
- WebRtc_Word16 length, /* (i) length of lsf coefficient vector */
+ int16_t *lsfdeq, /* (i) dequantized lsf coefficients */
+ int16_t length, /* (i) length of lsf coefficient vector */
iLBC_Dec_Inst_t *iLBCdec_inst
/* (i) the decoder state structure */
){
int i, pos, lp_length;
- WebRtc_Word16 lp[LPC_FILTERORDER + 1], *lsfdeq2;
+ int16_t lp[LPC_FILTERORDER + 1], *lsfdeq2;
lsfdeq2 = lsfdeq + length;
lp_length = length + 1;
@@ -46,7 +46,7 @@
WebRtcIlbcfix_LspInterpolate2PolyDec(lp, (*iLBCdec_inst).lsfdeqold, lsfdeq,
WebRtcIlbcfix_kLsfWeight30ms[0], length);
WEBRTC_SPL_MEMCPY_W16(syntdenum,lp,lp_length);
- WebRtcIlbcfix_BwExpand(weightdenum, lp, (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+ WebRtcIlbcfix_BwExpand(weightdenum, lp, (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, (int16_t)lp_length);
/* subframes 2 to 6: interpolation between first and last LSF */
@@ -56,7 +56,7 @@
WebRtcIlbcfix_kLsfWeight30ms[i], length);
WEBRTC_SPL_MEMCPY_W16(syntdenum + pos,lp,lp_length);
WebRtcIlbcfix_BwExpand(weightdenum + pos, lp,
- (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+ (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, (int16_t)lp_length);
pos += lp_length;
}
} else { /* iLBCdec_inst->mode=20 */
@@ -67,7 +67,7 @@
WebRtcIlbcfix_kLsfWeight20ms[i], length);
WEBRTC_SPL_MEMCPY_W16(syntdenum+pos,lp,lp_length);
WebRtcIlbcfix_BwExpand(weightdenum+pos, lp,
- (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+ (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, (int16_t)lp_length);
pos += lp_length;
}
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h b/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
index 3896ca9..02c653a 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_DecoderInterpolateLsp(
- WebRtc_Word16 *syntdenum, /* (o) synthesis filter coefficients */
- WebRtc_Word16 *weightdenum, /* (o) weighting denumerator
+ int16_t *syntdenum, /* (o) synthesis filter coefficients */
+ int16_t *weightdenum, /* (o) weighting denumerator
coefficients */
- WebRtc_Word16 *lsfdeq, /* (i) dequantized lsf coefficients */
- WebRtc_Word16 length, /* (i) length of lsf coefficient vector */
+ int16_t *lsfdeq, /* (i) dequantized lsf coefficients */
+ int16_t length, /* (i) length of lsf coefficient vector */
iLBC_Dec_Inst_t *iLBCdec_inst
/* (i) the decoder state structure */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/defines.h b/webrtc/modules/audio_coding/codecs/ilbc/defines.h
index bdeba01..b60eaf4 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/defines.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/defines.h
@@ -118,53 +118,53 @@
/* Struct for the bits */
typedef struct iLBC_bits_t_ {
- WebRtc_Word16 lsf[LSF_NSPLIT*LPC_N_MAX];
- WebRtc_Word16 cb_index[CB_NSTAGES*(NASUB_MAX+1)]; /* First CB_NSTAGES values contains extra CB index */
- WebRtc_Word16 gain_index[CB_NSTAGES*(NASUB_MAX+1)]; /* First CB_NSTAGES values contains extra CB gain */
- WebRtc_Word16 idxForMax;
- WebRtc_Word16 state_first;
- WebRtc_Word16 idxVec[STATE_SHORT_LEN_30MS];
- WebRtc_Word16 firstbits;
- WebRtc_Word16 startIdx;
+ int16_t lsf[LSF_NSPLIT*LPC_N_MAX];
+ int16_t cb_index[CB_NSTAGES*(NASUB_MAX+1)]; /* First CB_NSTAGES values contains extra CB index */
+ int16_t gain_index[CB_NSTAGES*(NASUB_MAX+1)]; /* First CB_NSTAGES values contains extra CB gain */
+ int16_t idxForMax;
+ int16_t state_first;
+ int16_t idxVec[STATE_SHORT_LEN_30MS];
+ int16_t firstbits;
+ int16_t startIdx;
} iLBC_bits;
/* type definition encoder instance */
typedef struct iLBC_Enc_Inst_t_ {
/* flag for frame size mode */
- WebRtc_Word16 mode;
+ int16_t mode;
/* basic parameters for different frame sizes */
- WebRtc_Word16 blockl;
- WebRtc_Word16 nsub;
- WebRtc_Word16 nasub;
- WebRtc_Word16 no_of_bytes, no_of_words;
- WebRtc_Word16 lpc_n;
- WebRtc_Word16 state_short_len;
+ int16_t blockl;
+ int16_t nsub;
+ int16_t nasub;
+ int16_t no_of_bytes, no_of_words;
+ int16_t lpc_n;
+ int16_t state_short_len;
/* analysis filter state */
- WebRtc_Word16 anaMem[LPC_FILTERORDER];
+ int16_t anaMem[LPC_FILTERORDER];
/* Fix-point old lsf parameters for interpolation */
- WebRtc_Word16 lsfold[LPC_FILTERORDER];
- WebRtc_Word16 lsfdeqold[LPC_FILTERORDER];
+ int16_t lsfold[LPC_FILTERORDER];
+ int16_t lsfdeqold[LPC_FILTERORDER];
/* signal buffer for LP analysis */
- WebRtc_Word16 lpc_buffer[LPC_LOOKBACK + BLOCKL_MAX];
+ int16_t lpc_buffer[LPC_LOOKBACK + BLOCKL_MAX];
/* state of input HP filter */
- WebRtc_Word16 hpimemx[2];
- WebRtc_Word16 hpimemy[4];
+ int16_t hpimemx[2];
+ int16_t hpimemy[4];
#ifdef SPLIT_10MS
- WebRtc_Word16 weightdenumbuf[66];
- WebRtc_Word16 past_samples[160];
- WebRtc_UWord16 bytes[25];
- WebRtc_Word16 section;
- WebRtc_Word16 Nfor_flag;
- WebRtc_Word16 Nback_flag;
- WebRtc_Word16 start_pos;
- WebRtc_Word16 diff;
+ int16_t weightdenumbuf[66];
+ int16_t past_samples[160];
+ uint16_t bytes[25];
+ int16_t section;
+ int16_t Nfor_flag;
+ int16_t Nback_flag;
+ int16_t start_pos;
+ int16_t diff;
#endif
} iLBC_Enc_Inst_t;
@@ -173,46 +173,46 @@
typedef struct iLBC_Dec_Inst_t_ {
/* flag for frame size mode */
- WebRtc_Word16 mode;
+ int16_t mode;
/* basic parameters for different frame sizes */
- WebRtc_Word16 blockl;
- WebRtc_Word16 nsub;
- WebRtc_Word16 nasub;
- WebRtc_Word16 no_of_bytes, no_of_words;
- WebRtc_Word16 lpc_n;
- WebRtc_Word16 state_short_len;
+ int16_t blockl;
+ int16_t nsub;
+ int16_t nasub;
+ int16_t no_of_bytes, no_of_words;
+ int16_t lpc_n;
+ int16_t state_short_len;
/* synthesis filter state */
- WebRtc_Word16 syntMem[LPC_FILTERORDER];
+ int16_t syntMem[LPC_FILTERORDER];
/* old LSF for interpolation */
- WebRtc_Word16 lsfdeqold[LPC_FILTERORDER];
+ int16_t lsfdeqold[LPC_FILTERORDER];
/* pitch lag estimated in enhancer and used in PLC */
int last_lag;
/* PLC state information */
int consPLICount, prev_enh_pl;
- WebRtc_Word16 perSquare;
+ int16_t perSquare;
- WebRtc_Word16 prevScale, prevPLI;
- WebRtc_Word16 prevLag, prevLpc[LPC_FILTERORDER+1];
- WebRtc_Word16 prevResidual[NSUB_MAX*SUBL];
- WebRtc_Word16 seed;
+ int16_t prevScale, prevPLI;
+ int16_t prevLag, prevLpc[LPC_FILTERORDER+1];
+ int16_t prevResidual[NSUB_MAX*SUBL];
+ int16_t seed;
/* previous synthesis filter parameters */
- WebRtc_Word16 old_syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+ int16_t old_syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
/* state of output HP filter */
- WebRtc_Word16 hpimemx[2];
- WebRtc_Word16 hpimemy[4];
+ int16_t hpimemx[2];
+ int16_t hpimemy[4];
/* enhancer state information */
int use_enhancer;
- WebRtc_Word16 enh_buf[ENH_BUFL+ENH_BUFL_FILTEROVERHEAD];
- WebRtc_Word16 enh_period[ENH_NBLOCKS_TOT];
+ int16_t enh_buf[ENH_BUFL+ENH_BUFL_FILTEROVERHEAD];
+ int16_t enh_period[ENH_NBLOCKS_TOT];
} iLBC_Dec_Inst_t;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c b/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c
index 0dfae2b..c0f5368 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/do_plc.c
@@ -27,36 +27,36 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_DoThePlc(
- WebRtc_Word16 *PLCresidual, /* (o) concealed residual */
- WebRtc_Word16 *PLClpc, /* (o) concealed LP parameters */
- WebRtc_Word16 PLI, /* (i) packet loss indicator
+ int16_t *PLCresidual, /* (o) concealed residual */
+ int16_t *PLClpc, /* (o) concealed LP parameters */
+ int16_t PLI, /* (i) packet loss indicator
0 - no PL, 1 = PL */
- WebRtc_Word16 *decresidual, /* (i) decoded residual */
- WebRtc_Word16 *lpc, /* (i) decoded LPC (only used for no PL) */
- WebRtc_Word16 inlag, /* (i) pitch lag */
+ int16_t *decresidual, /* (i) decoded residual */
+ int16_t *lpc, /* (i) decoded LPC (only used for no PL) */
+ int16_t inlag, /* (i) pitch lag */
iLBC_Dec_Inst_t *iLBCdec_inst
/* (i/o) decoder instance */
){
- WebRtc_Word16 i, pick;
- WebRtc_Word32 cross, ener, cross_comp, ener_comp = 0;
- WebRtc_Word32 measure, maxMeasure, energy;
- WebRtc_Word16 max, crossSquareMax, crossSquare;
- WebRtc_Word16 j, lag, tmp1, tmp2, randlag;
- WebRtc_Word16 shift1, shift2, shift3, shiftMax;
- WebRtc_Word16 scale3;
- WebRtc_Word16 corrLen;
- WebRtc_Word32 tmpW32, tmp2W32;
- WebRtc_Word16 use_gain;
- WebRtc_Word16 tot_gain;
- WebRtc_Word16 max_perSquare;
- WebRtc_Word16 scale1, scale2;
- WebRtc_Word16 totscale;
- WebRtc_Word32 nom;
- WebRtc_Word16 denom;
- WebRtc_Word16 pitchfact;
- WebRtc_Word16 use_lag;
+ int16_t i, pick;
+ int32_t cross, ener, cross_comp, ener_comp = 0;
+ int32_t measure, maxMeasure, energy;
+ int16_t max, crossSquareMax, crossSquare;
+ int16_t j, lag, tmp1, tmp2, randlag;
+ int16_t shift1, shift2, shift3, shiftMax;
+ int16_t scale3;
+ int16_t corrLen;
+ int32_t tmpW32, tmp2W32;
+ int16_t use_gain;
+ int16_t tot_gain;
+ int16_t max_perSquare;
+ int16_t scale1, scale2;
+ int16_t totscale;
+ int32_t nom;
+ int16_t denom;
+ int16_t pitchfact;
+ int16_t use_lag;
int ind;
- WebRtc_Word16 randvec[BLOCKL_MAX];
+ int16_t randvec[BLOCKL_MAX];
/* Packet Loss */
if (PLI == 1) {
@@ -70,7 +70,7 @@
/* Maximum 60 samples are correlated, preserve as high accuracy
as possible without getting overflow */
- max = WebRtcSpl_MaxAbsValueW16((*iLBCdec_inst).prevResidual, (WebRtc_Word16)iLBCdec_inst->blockl);
+ max = WebRtcSpl_MaxAbsValueW16((*iLBCdec_inst).prevResidual, (int16_t)iLBCdec_inst->blockl);
scale3 = (WebRtcSpl_GetSizeInBits(max)<<1) - 25;
if (scale3 < 0) {
scale3 = 0;
@@ -92,7 +92,7 @@
/* Normalize and store cross^2 and the number of shifts */
shiftMax = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross))-15;
- crossSquareMax = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross, -shiftMax),
+ crossSquareMax = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross, -shiftMax),
WEBRTC_SPL_SHIFT_W32(cross, -shiftMax), 15);
for (j=inlag-2;j<=inlag+3;j++) {
@@ -103,7 +103,7 @@
this lag is better or not. To avoid the division,
do a cross multiplication */
shift1 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross_comp))-15;
- crossSquare = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1),
+ crossSquare = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1),
WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1), 15);
shift2 = WebRtcSpl_GetSizeInBits(ener)-15;
@@ -147,25 +147,25 @@
corrLen, scale3);
if ((tmp2W32>0)&&(ener_comp>0)) {
- /* norm energies to WebRtc_Word16, compute the product of the energies and
- use the upper WebRtc_Word16 as the denominator */
+ /* norm energies to int16_t, compute the product of the energies and
+ use the upper int16_t as the denominator */
- scale1=(WebRtc_Word16)WebRtcSpl_NormW32(tmp2W32)-16;
- tmp1=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(tmp2W32, scale1);
+ scale1=(int16_t)WebRtcSpl_NormW32(tmp2W32)-16;
+ tmp1=(int16_t)WEBRTC_SPL_SHIFT_W32(tmp2W32, scale1);
- scale2=(WebRtc_Word16)WebRtcSpl_NormW32(ener)-16;
- tmp2=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(ener, scale2);
- denom=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp1, tmp2, 16); /* denom in Q(scale1+scale2-16) */
+ scale2=(int16_t)WebRtcSpl_NormW32(ener)-16;
+ tmp2=(int16_t)WEBRTC_SPL_SHIFT_W32(ener, scale2);
+ denom=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp1, tmp2, 16); /* denom in Q(scale1+scale2-16) */
/* Square the cross correlation and norm it such that max_perSquare
will be in Q15 after the division */
totscale = scale1+scale2-1;
- tmp1 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cross, (totscale>>1));
- tmp2 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cross, totscale-(totscale>>1));
+ tmp1 = (int16_t)WEBRTC_SPL_SHIFT_W32(cross, (totscale>>1));
+ tmp2 = (int16_t)WEBRTC_SPL_SHIFT_W32(cross, totscale-(totscale>>1));
nom = WEBRTC_SPL_MUL_16_16(tmp1, tmp2);
- max_perSquare = (WebRtc_Word16)WebRtcSpl_DivW32W16(nom, denom);
+ max_perSquare = (int16_t)WebRtcSpl_DivW32W16(nom, denom);
} else {
max_perSquare = 0;
@@ -209,10 +209,10 @@
ind--;
}
/* pitch fact is approximated by first order */
- tmpW32 = (WebRtc_Word32)WebRtcIlbcfix_kPlcPitchFact[ind] +
+ tmpW32 = (int32_t)WebRtcIlbcfix_kPlcPitchFact[ind] +
WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kPlcPfSlope[ind], (max_perSquare-WebRtcIlbcfix_kPlcPerSqr[ind]), 11);
- pitchfact = (WebRtc_Word16)WEBRTC_SPL_MIN(tmpW32, 32767); /* guard against overflow */
+ pitchfact = (int16_t)WEBRTC_SPL_MIN(tmpW32, 32767); /* guard against overflow */
} else { /* periodicity < 0.4 */
pitchfact = 0;
@@ -230,8 +230,8 @@
for (i=0; i<iLBCdec_inst->blockl; i++) {
/* noise component - 52 < randlagFIX < 117 */
- iLBCdec_inst->seed = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(iLBCdec_inst->seed, 31821)+(WebRtc_Word32)13849);
- randlag = 53 + (WebRtc_Word16)(iLBCdec_inst->seed & 63);
+ iLBCdec_inst->seed = (int16_t)(WEBRTC_SPL_MUL_16_16(iLBCdec_inst->seed, 31821)+(int32_t)13849);
+ randlag = 53 + (int16_t)(iLBCdec_inst->seed & 63);
pick = i - randlag;
@@ -254,16 +254,16 @@
if (i<80) {
tot_gain=use_gain;
} else if (i<160) {
- tot_gain=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(31130, use_gain, 15); /* 0.95*use_gain */
+ tot_gain=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(31130, use_gain, 15); /* 0.95*use_gain */
} else {
- tot_gain=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(29491, use_gain, 15); /* 0.9*use_gain */
+ tot_gain=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(29491, use_gain, 15); /* 0.9*use_gain */
}
/* mix noise and pitch repeatition */
- PLCresidual[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tot_gain,
- (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( (WEBRTC_SPL_MUL_16_16(pitchfact, PLCresidual[i]) +
+ PLCresidual[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tot_gain,
+ (int16_t)WEBRTC_SPL_RSHIFT_W32( (WEBRTC_SPL_MUL_16_16(pitchfact, PLCresidual[i]) +
WEBRTC_SPL_MUL_16_16((32767-pitchfact), randvec[i]) + 16384),
15),
15);
@@ -276,7 +276,7 @@
}
/* less than 30 dB, use only noise */
- if (energy < (WEBRTC_SPL_SHIFT_W32(((WebRtc_Word32)iLBCdec_inst->blockl*900),-(iLBCdec_inst->prevScale+1)))) {
+ if (energy < (WEBRTC_SPL_SHIFT_W32(((int32_t)iLBCdec_inst->blockl*900),-(iLBCdec_inst->prevScale+1)))) {
energy = 0;
for (i=0; i<iLBCdec_inst->blockl; i++) {
PLCresidual[i] = randvec[i];
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h b/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h
index c5bcc52..15e68ec 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/do_plc.h
@@ -27,13 +27,13 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_DoThePlc(
- WebRtc_Word16 *PLCresidual, /* (o) concealed residual */
- WebRtc_Word16 *PLClpc, /* (o) concealed LP parameters */
- WebRtc_Word16 PLI, /* (i) packet loss indicator
+ int16_t *PLCresidual, /* (o) concealed residual */
+ int16_t *PLClpc, /* (o) concealed LP parameters */
+ int16_t PLI, /* (i) packet loss indicator
0 - no PL, 1 = PL */
- WebRtc_Word16 *decresidual, /* (i) decoded residual */
- WebRtc_Word16 *lpc, /* (i) decoded LPC (only used for no PL) */
- WebRtc_Word16 inlag, /* (i) pitch lag */
+ int16_t *decresidual, /* (i) decoded residual */
+ int16_t *lpc, /* (i) decoded LPC (only used for no PL) */
+ int16_t inlag, /* (i) pitch lag */
iLBC_Dec_Inst_t *iLBCdec_inst
/* (i/o) decoder instance */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/encode.c b/webrtc/modules/audio_coding/codecs/ilbc/encode.c
index d3d3ba8..75d1672 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/encode.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/encode.c
@@ -41,38 +41,38 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_EncodeImpl(
- WebRtc_UWord16 *bytes, /* (o) encoded data bits iLBC */
- const WebRtc_Word16 *block, /* (i) speech vector to encode */
+ uint16_t *bytes, /* (o) encoded data bits iLBC */
+ const int16_t *block, /* (i) speech vector to encode */
iLBC_Enc_Inst_t *iLBCenc_inst /* (i/o) the general encoder
state */
){
int n, meml_gotten, Nfor, Nback;
- WebRtc_Word16 diff, start_pos;
+ int16_t diff, start_pos;
int index;
int subcount, subframe;
- WebRtc_Word16 start_count, end_count;
- WebRtc_Word16 *residual;
- WebRtc_Word32 en1, en2;
- WebRtc_Word16 scale, max;
- WebRtc_Word16 *syntdenum;
- WebRtc_Word16 *decresidual;
- WebRtc_Word16 *reverseResidual;
- WebRtc_Word16 *reverseDecresidual;
+ int16_t start_count, end_count;
+ int16_t *residual;
+ int32_t en1, en2;
+ int16_t scale, max;
+ int16_t *syntdenum;
+ int16_t *decresidual;
+ int16_t *reverseResidual;
+ int16_t *reverseDecresidual;
/* Stack based */
- WebRtc_Word16 weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
- WebRtc_Word16 dataVec[BLOCKL_MAX + LPC_FILTERORDER];
- WebRtc_Word16 memVec[CB_MEML+CB_FILTERLEN];
- WebRtc_Word16 bitsMemory[sizeof(iLBC_bits)/sizeof(WebRtc_Word16)];
+ int16_t weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+ int16_t dataVec[BLOCKL_MAX + LPC_FILTERORDER];
+ int16_t memVec[CB_MEML+CB_FILTERLEN];
+ int16_t bitsMemory[sizeof(iLBC_bits)/sizeof(int16_t)];
iLBC_bits *iLBCbits_inst = (iLBC_bits*)bitsMemory;
#ifdef SPLIT_10MS
- WebRtc_Word16 *weightdenumbuf = iLBCenc_inst->weightdenumbuf;
- WebRtc_Word16 last_bit;
+ int16_t *weightdenumbuf = iLBCenc_inst->weightdenumbuf;
+ int16_t last_bit;
#endif
- WebRtc_Word16 *data = &dataVec[LPC_FILTERORDER];
- WebRtc_Word16 *mem = &memVec[CB_HALFFILTERLEN];
+ int16_t *data = &dataVec[LPC_FILTERORDER];
+ int16_t *mem = &memVec[CB_HALFFILTERLEN];
/* Reuse som buffers to save stack memory */
residual = &iLBCenc_inst->lpc_buffer[LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl];
@@ -83,8 +83,8 @@
#ifdef SPLIT_10MS
- WebRtcSpl_MemSetW16 ( (WebRtc_Word16 *) iLBCbits_inst, 0,
- (WebRtc_Word16) (sizeof(iLBC_bits) / sizeof(WebRtc_Word16)) );
+ WebRtcSpl_MemSetW16 ( (int16_t *) iLBCbits_inst, 0,
+ (int16_t) (sizeof(iLBC_bits) / sizeof(int16_t)) );
start_pos = iLBCenc_inst->start_pos;
diff = iLBCenc_inst->diff;
@@ -121,7 +121,7 @@
#endif
/* high pass filtering of input signal and scale down the residual (*0.5) */
- WebRtcIlbcfix_HpInput(data, (WebRtc_Word16*)WebRtcIlbcfix_kHpInCoefs,
+ WebRtcIlbcfix_HpInput(data, (int16_t*)WebRtcIlbcfix_kHpInCoefs,
iLBCenc_inst->hpimemy, iLBCenc_inst->hpimemx,
iLBCenc_inst->blockl);
@@ -191,7 +191,7 @@
/* setup memory */
- WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCenc_inst->state_short_len));
+ WebRtcSpl_MemSetW16(mem, 0, (int16_t)(CB_MEML-iLBCenc_inst->state_short_len));
WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCenc_inst->state_short_len,
decresidual+start_pos, iLBCenc_inst->state_short_len);
@@ -222,7 +222,7 @@
meml_gotten = iLBCenc_inst->state_short_len;
WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[start_pos], meml_gotten);
- WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCenc_inst->state_short_len));
+ WebRtcSpl_MemSetW16(mem, 0, (int16_t)(CB_MEML-iLBCenc_inst->state_short_len));
/* encode subframes */
WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
@@ -325,7 +325,7 @@
}
#else
start_count = 0;
- end_count = (WebRtc_Word16)Nfor;
+ end_count = (int16_t)Nfor;
#endif
/* loop over subframes to encode */
@@ -339,7 +339,7 @@
&residual[(iLBCbits_inst->startIdx+1+subframe)*SUBL],
mem, MEM_LF_TBL, SUBL,
&weightdenum[(iLBCbits_inst->startIdx+1+subframe)*(LPC_FILTERORDER+1)],
- (WebRtc_Word16)subcount);
+ (int16_t)subcount);
/* construct decoded vector */
@@ -396,7 +396,7 @@
}
WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[Nback*SUBL], meml_gotten);
- WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+ WebRtcSpl_MemSetW16(mem, 0, (int16_t)(CB_MEML-meml_gotten));
#ifdef SPLIT_10MS
if (iLBCenc_inst->Nback_flag > 0)
@@ -433,7 +433,7 @@
}
#else
start_count = 0;
- end_count = (WebRtc_Word16)Nback;
+ end_count = (int16_t)Nback;
#endif
/* loop over subframes to encode */
@@ -446,7 +446,7 @@
iLBCbits_inst->gain_index+subcount*CB_NSTAGES, &reverseResidual[subframe*SUBL],
mem, MEM_LF_TBL, SUBL,
&weightdenum[(iLBCbits_inst->startIdx-2-subframe)*(LPC_FILTERORDER+1)],
- (WebRtc_Word16)subcount);
+ (int16_t)subcount);
/* construct decoded vector */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/encode.h b/webrtc/modules/audio_coding/codecs/ilbc/encode.h
index b7d93d7..5a47c58 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/encode.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/encode.h
@@ -26,8 +26,8 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_EncodeImpl(
- WebRtc_UWord16 *bytes, /* (o) encoded data bits iLBC */
- const WebRtc_Word16 *block, /* (i) speech vector to encode */
+ uint16_t *bytes, /* (o) encoded data bits iLBC */
+ const int16_t *block, /* (i) speech vector to encode */
iLBC_Enc_Inst_t *iLBCenc_inst /* (i/o) the general encoder
state */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.c b/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.c
index d56069b..a6b1c75 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.c
@@ -21,13 +21,13 @@
#include "energy_inverse.h"
void WebRtcIlbcfix_EnergyInverse(
- WebRtc_Word16 *energy, /* (i/o) Energy and inverse
+ int16_t *energy, /* (i/o) Energy and inverse
energy (in Q29) */
int noOfEnergies) /* (i) The length of the energy
vector */
{
- WebRtc_Word32 Nom=(WebRtc_Word32)0x1FFFFFFF;
- WebRtc_Word16 *energyPtr;
+ int32_t Nom=(int32_t)0x1FFFFFFF;
+ int16_t *energyPtr;
int i;
/* Set the minimum energy value to 16384 to avoid overflow */
@@ -40,7 +40,7 @@
/* Calculate inverse energy in Q29 */
energyPtr=energy;
for (i=0; i<noOfEnergies; i++) {
- (*energyPtr) = (WebRtc_Word16)WebRtcSpl_DivW32W16(Nom, (*energyPtr));
+ (*energyPtr) = (int16_t)WebRtcSpl_DivW32W16(Nom, (*energyPtr));
energyPtr++;
}
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h b/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h
index db13589..7bb6721 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/energy_inverse.h
@@ -24,7 +24,7 @@
/* Inverses the in vector in into Q29 domain */
void WebRtcIlbcfix_EnergyInverse(
- WebRtc_Word16 *energy, /* (i/o) Energy and inverse
+ int16_t *energy, /* (i/o) Energy and inverse
energy (in Q29) */
int noOfEnergies); /* (i) The length of the energy
vector */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c b/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c
index 3343816..91f3970 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.c
@@ -24,13 +24,13 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_EnhUpsample(
- WebRtc_Word32 *useq1, /* (o) upsampled output sequence */
- WebRtc_Word16 *seq1 /* (i) unupsampled sequence */
+ int32_t *useq1, /* (o) upsampled output sequence */
+ int16_t *seq1 /* (i) unupsampled sequence */
){
int j;
- WebRtc_Word32 *pu1, *pu11;
- WebRtc_Word16 *ps, *w16tmp;
- const WebRtc_Word16 *pp;
+ int32_t *pu1, *pu11;
+ int16_t *ps, *w16tmp;
+ const int16_t *pp;
/* filtering: filter overhangs left side of sequence */
pu1=useq1;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.h b/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.h
index 53534cc..00bb28b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/enh_upsample.h
@@ -26,8 +26,8 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_EnhUpsample(
- WebRtc_Word32 *useq1, /* (o) upsampled output sequence */
- WebRtc_Word16 *seq1 /* (i) unupsampled sequence */
+ int32_t *useq1, /* (o) upsampled output sequence */
+ int16_t *seq1 /* (i) unupsampled sequence */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c b/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c
index b8f3335..38c3de3 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/enhancer.c
@@ -27,16 +27,16 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Enhancer(
- WebRtc_Word16 *odata, /* (o) smoothed block, dimension blockl */
- WebRtc_Word16 *idata, /* (i) data buffer used for enhancing */
- WebRtc_Word16 idatal, /* (i) dimension idata */
- WebRtc_Word16 centerStartPos, /* (i) first sample current block within idata */
- WebRtc_Word16 *period, /* (i) pitch period array (pitch bward-in time) */
- WebRtc_Word16 *plocs, /* (i) locations where period array values valid */
- WebRtc_Word16 periodl /* (i) dimension of period and plocs */
+ int16_t *odata, /* (o) smoothed block, dimension blockl */
+ int16_t *idata, /* (i) data buffer used for enhancing */
+ int16_t idatal, /* (i) dimension idata */
+ int16_t centerStartPos, /* (i) first sample current block within idata */
+ int16_t *period, /* (i) pitch period array (pitch bward-in time) */
+ int16_t *plocs, /* (i) locations where period array values valid */
+ int16_t periodl /* (i) dimension of period and plocs */
){
/* Stack based */
- WebRtc_Word16 surround[ENH_BLOCKL];
+ int16_t surround[ENH_BLOCKL];
WebRtcSpl_MemSetW16(surround, 0, ENH_BLOCKL);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h b/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h
index e14f559..83f48b0 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/enhancer.h
@@ -27,13 +27,13 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Enhancer(
- WebRtc_Word16 *odata, /* (o) smoothed block, dimension blockl */
- WebRtc_Word16 *idata, /* (i) data buffer used for enhancing */
- WebRtc_Word16 idatal, /* (i) dimension idata */
- WebRtc_Word16 centerStartPos, /* (i) first sample current block within idata */
- WebRtc_Word16 *period, /* (i) pitch period array (pitch bward-in time) */
- WebRtc_Word16 *plocs, /* (i) locations where period array values valid */
- WebRtc_Word16 periodl /* (i) dimension of period and plocs */
+ int16_t *odata, /* (o) smoothed block, dimension blockl */
+ int16_t *idata, /* (i) data buffer used for enhancing */
+ int16_t idatal, /* (i) dimension idata */
+ int16_t centerStartPos, /* (i) first sample current block within idata */
+ int16_t *period, /* (i) pitch period array (pitch bward-in time) */
+ int16_t *plocs, /* (i) locations where period array values valid */
+ int16_t periodl /* (i) dimension of period and plocs */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c b/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c
index 61b71d1..809f48f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.c
@@ -29,41 +29,41 @@
*---------------------------------------------------------------*/
int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
- WebRtc_Word16 *out, /* (o) enhanced signal */
- WebRtc_Word16 *in, /* (i) unenhanced signal */
+ int16_t *out, /* (o) enhanced signal */
+ int16_t *in, /* (i) unenhanced signal */
iLBC_Dec_Inst_t *iLBCdec_inst /* (i) buffers etc */
){
int iblock;
int lag=20, tlag=20;
int inLen=iLBCdec_inst->blockl+120;
- WebRtc_Word16 scale, scale1, plc_blockl;
- WebRtc_Word16 *enh_buf, *enh_period;
- WebRtc_Word32 tmp1, tmp2, max, new_blocks;
- WebRtc_Word16 *enh_bufPtr1;
+ int16_t scale, scale1, plc_blockl;
+ int16_t *enh_buf, *enh_period;
+ int32_t tmp1, tmp2, max, new_blocks;
+ int16_t *enh_bufPtr1;
int i, k;
- WebRtc_Word16 EnChange;
- WebRtc_Word16 SqrtEnChange;
- WebRtc_Word16 inc;
- WebRtc_Word16 win;
- WebRtc_Word16 *tmpW16ptr;
- WebRtc_Word16 startPos;
- WebRtc_Word16 *plc_pred;
- WebRtc_Word16 *target, *regressor;
- WebRtc_Word16 max16;
+ int16_t EnChange;
+ int16_t SqrtEnChange;
+ int16_t inc;
+ int16_t win;
+ int16_t *tmpW16ptr;
+ int16_t startPos;
+ int16_t *plc_pred;
+ int16_t *target, *regressor;
+ int16_t max16;
int shifts;
- WebRtc_Word32 ener;
- WebRtc_Word16 enerSh;
- WebRtc_Word16 corrSh;
- WebRtc_Word16 ind, sh;
- WebRtc_Word16 start, stop;
+ int32_t ener;
+ int16_t enerSh;
+ int16_t corrSh;
+ int16_t ind, sh;
+ int16_t start, stop;
/* Stack based */
- WebRtc_Word16 totsh[3];
- WebRtc_Word16 downsampled[(BLOCKL_MAX+120)>>1]; /* length 180 */
- WebRtc_Word32 corr32[50];
- WebRtc_Word32 corrmax[3];
- WebRtc_Word16 corr16[3];
- WebRtc_Word16 en16[3];
- WebRtc_Word16 lagmax[3];
+ int16_t totsh[3];
+ int16_t downsampled[(BLOCKL_MAX+120)>>1]; /* length 180 */
+ int32_t corr32[50];
+ int32_t corrmax[3];
+ int16_t corr16[3];
+ int16_t en16[3];
+ int16_t lagmax[3];
plc_pred = downsampled; /* Reuse memory since plc_pred[ENH_BLOCKL] and
downsampled are non overlapping */
@@ -97,10 +97,10 @@
k=WebRtcSpl_DownsampleFast(
enh_buf+ENH_BUFL-inLen, /* Input samples */
- (WebRtc_Word16)(inLen+ENH_BUFL_FILTEROVERHEAD),
+ (int16_t)(inLen+ENH_BUFL_FILTEROVERHEAD),
downsampled,
- (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(inLen, 1),
- (WebRtc_Word16*)WebRtcIlbcfix_kLpFiltCoefs, /* Coefficients in Q12 */
+ (int16_t)WEBRTC_SPL_RSHIFT_W16(inLen, 1),
+ (int16_t*)WebRtcIlbcfix_kLpFiltCoefs, /* Coefficients in Q12 */
FILTERORDER_DS_PLUS1, /* Length of filter (order-1) */
FACTOR_DS,
DELAY_DS);
@@ -115,13 +115,13 @@
/* scaling */
max16=WebRtcSpl_MaxAbsValueW16(®ressor[-50],
- (WebRtc_Word16)(ENH_BLOCKL_HALF+50-1));
+ (int16_t)(ENH_BLOCKL_HALF+50-1));
shifts = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max16, max16)) - 25;
shifts = WEBRTC_SPL_MAX(0, shifts);
/* compute cross correlation */
WebRtcSpl_CrossCorrelation(corr32, target, regressor,
- ENH_BLOCKL_HALF, 50, (WebRtc_Word16)shifts, -1);
+ ENH_BLOCKL_HALF, 50, (int16_t)shifts, -1);
/* Find 3 highest correlations that should be compared for the
highest (corr*corr)/ener */
@@ -147,10 +147,10 @@
®ressor[-lagmax[i]],
ENH_BLOCKL_HALF, shifts);
enerSh = 15-WebRtcSpl_GetSizeInBits(ener);
- corr16[i] = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(corrmax[i], corrSh);
- corr16[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(corr16[i],
+ corr16[i] = (int16_t)WEBRTC_SPL_SHIFT_W32(corrmax[i], corrSh);
+ corr16[i] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(corr16[i],
corr16[i], 16);
- en16[i] = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(ener, enerSh);
+ en16[i] = (int16_t)WEBRTC_SPL_SHIFT_W32(ener, enerSh);
totsh[i] = enerSh - WEBRTC_SPL_LSHIFT_W32(corrSh, 1);
}
@@ -176,7 +176,7 @@
/* Store the estimated lag in the non-downsampled domain */
enh_period[ENH_NBLOCKS_TOT-new_blocks+iblock] =
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(lag, 8);
+ (int16_t)WEBRTC_SPL_MUL_16_16(lag, 8);
/* Store the estimated lag for backward PLC */
if (iLBCdec_inst->prev_enh_pl==1) {
@@ -203,7 +203,7 @@
regressor=in+tlag-1;
/* scaling */
- max16=WebRtcSpl_MaxAbsValueW16(regressor, (WebRtc_Word16)(plc_blockl+3-1));
+ max16=WebRtcSpl_MaxAbsValueW16(regressor, (int16_t)(plc_blockl+3-1));
if (max16>5000)
shifts=2;
else
@@ -211,7 +211,7 @@
/* compute cross correlation */
WebRtcSpl_CrossCorrelation(corr32, target, regressor,
- plc_blockl, 3, (WebRtc_Word16)shifts, 1);
+ plc_blockl, 3, (int16_t)shifts, 1);
/* find lag */
lag=WebRtcSpl_MaxIndexW32(corr32, 3);
@@ -257,7 +257,7 @@
&enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl], plc_blockl);
max16=WebRtcSpl_MaxAbsValueW16(plc_pred, plc_blockl);
max = WEBRTC_SPL_MAX(max, max16);
- scale=22-(WebRtc_Word16)WebRtcSpl_NormW32(max);
+ scale=22-(int16_t)WebRtcSpl_NormW32(max);
scale=WEBRTC_SPL_MAX(scale,0);
tmp2 = WebRtcSpl_DotProductWithScale(
@@ -273,21 +273,21 @@
Calculate EnChange=tmp2/tmp1 in Q16
*/
- scale1=(WebRtc_Word16)WebRtcSpl_NormW32(tmp1);
+ scale1=(int16_t)WebRtcSpl_NormW32(tmp1);
tmp1=WEBRTC_SPL_SHIFT_W32(tmp1, (scale1-16)); /* using 15 bits */
tmp2=WEBRTC_SPL_SHIFT_W32(tmp2, (scale1));
- EnChange = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp2,
- (WebRtc_Word16)tmp1);
+ EnChange = (int16_t)WebRtcSpl_DivW32W16(tmp2,
+ (int16_t)tmp1);
/* Calculate the Sqrt of the energy in Q15 ((14+16)/2) */
- SqrtEnChange = (WebRtc_Word16)WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)EnChange, 14));
+ SqrtEnChange = (int16_t)WebRtcSpl_SqrtFloor(
+ WEBRTC_SPL_LSHIFT_W32((int32_t)EnChange, 14));
/* Multiply first part of vector with 2*SqrtEnChange */
WebRtcSpl_ScaleVector(plc_pred, plc_pred, SqrtEnChange,
- (WebRtc_Word16)(plc_blockl-16), 14);
+ (int16_t)(plc_blockl-16), 14);
/* Calculate increase parameter for window part (16 last samples) */
/* (1-2*SqrtEnChange)/16 in Q15 */
@@ -297,7 +297,7 @@
tmpW16ptr=&plc_pred[plc_blockl-16];
for (i=16;i>0;i--) {
- (*tmpW16ptr)=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ (*tmpW16ptr)=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
(*tmpW16ptr), (SqrtEnChange+(win>>1)), 14);
/* multiply by (2.0*SqrtEnChange+win) */
@@ -320,13 +320,13 @@
for (i=0; i<plc_blockl; i++) {
win+=inc;
*enh_bufPtr1 =
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT((*enh_bufPtr1), win, 14);
- *enh_bufPtr1 += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT((*enh_bufPtr1), win, 14);
+ *enh_bufPtr1 += (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
(16384-win), plc_pred[plc_blockl-1-i], 14);
enh_bufPtr1--;
}
} else {
- WebRtc_Word16 *synt = &downsampled[LPC_FILTERORDER];
+ int16_t *synt = &downsampled[LPC_FILTERORDER];
enh_bufPtr1=&enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl];
WEBRTC_SPL_MEMCPY_W16(enh_bufPtr1, plc_pred, plc_blockl);
@@ -344,24 +344,24 @@
synt,
&iLBCdec_inst->old_syntdenum[
(iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
- LPC_FILTERORDER+1, (WebRtc_Word16)lag);
+ LPC_FILTERORDER+1, (int16_t)lag);
WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], &synt[lag-LPC_FILTERORDER],
LPC_FILTERORDER);
- WebRtcIlbcfix_HpOutput(synt, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+ WebRtcIlbcfix_HpOutput(synt, (int16_t*)WebRtcIlbcfix_kHpOutCoefs,
iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
- (WebRtc_Word16)lag);
+ (int16_t)lag);
WebRtcSpl_FilterARFastQ12(
enh_bufPtr1, synt,
&iLBCdec_inst->old_syntdenum[
(iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
- LPC_FILTERORDER+1, (WebRtc_Word16)lag);
+ LPC_FILTERORDER+1, (int16_t)lag);
WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &synt[lag-LPC_FILTERORDER],
LPC_FILTERORDER);
- WebRtcIlbcfix_HpOutput(synt, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+ WebRtcIlbcfix_HpOutput(synt, (int16_t*)WebRtcIlbcfix_kHpOutCoefs,
iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
- (WebRtc_Word16)lag);
+ (int16_t)lag);
}
}
@@ -372,9 +372,9 @@
WebRtcIlbcfix_Enhancer(out+WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL),
enh_buf,
ENH_BUFL,
- (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL)+startPos),
+ (int16_t)(WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL)+startPos),
enh_period,
- (WebRtc_Word16*)WebRtcIlbcfix_kEnhPlocs, ENH_NBLOCKS_TOT);
+ (int16_t*)WebRtcIlbcfix_kEnhPlocs, ENH_NBLOCKS_TOT);
}
return (lag);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h b/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h
index 37b27e2..f6ebed6 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/enhancer_interface.h
@@ -26,8 +26,8 @@
*---------------------------------------------------------------*/
int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
- WebRtc_Word16 *out, /* (o) enhanced signal */
- WebRtc_Word16 *in, /* (i) unenhanced signal */
+ int16_t *out, /* (o) enhanced signal */
+ int16_t *in, /* (i) unenhanced signal */
iLBC_Dec_Inst_t *iLBCdec_inst /* (i) buffers etc */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c b/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
index 7cece26..aa8170c 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_FilteredCbVecs(
- WebRtc_Word16 *cbvectors, /* (o) Codebook vector for the higher section */
- WebRtc_Word16 *CBmem, /* (i) Codebook memory that is filtered to create a
+ int16_t *cbvectors, /* (o) Codebook vector for the higher section */
+ int16_t *CBmem, /* (i) Codebook memory that is filtered to create a
second CB section */
int lMem, /* (i) Length of codebook memory */
- WebRtc_Word16 samples /* (i) Number of samples to filter */
+ int16_t samples /* (i) Number of samples to filter */
) {
/* Set up the memory, start with zero state */
@@ -42,7 +42,7 @@
WebRtcSpl_FilterMAFastQ12(
CBmem+CB_HALFFILTERLEN+lMem-samples, cbvectors+lMem-samples,
- (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev, CB_FILTERLEN, samples);
+ (int16_t*)WebRtcIlbcfix_kCbFiltersRev, CB_FILTERLEN, samples);
return;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h b/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
index c502e8f..99e89a0 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
@@ -28,11 +28,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_FilteredCbVecs(
- WebRtc_Word16 *cbvectors, /* (o) Codebook vector for the higher section */
- WebRtc_Word16 *CBmem, /* (i) Codebook memory that is filtered to create a
+ int16_t *cbvectors, /* (o) Codebook vector for the higher section */
+ int16_t *CBmem, /* (i) Codebook memory that is filtered to create a
second CB section */
int lMem, /* (i) Length of codebook memory */
- WebRtc_Word16 samples /* (i) Number of samples to filter */
+ int16_t samples /* (i) Number of samples to filter */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c b/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c
index ea3675e..c2c6f75 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.c
@@ -23,19 +23,19 @@
* Classification of subframes to localize start state
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_FrameClassify(
+int16_t WebRtcIlbcfix_FrameClassify(
/* (o) Index to the max-energy sub frame */
iLBC_Enc_Inst_t *iLBCenc_inst,
/* (i/o) the encoder state structure */
- WebRtc_Word16 *residualFIX /* (i) lpc residual signal */
+ int16_t *residualFIX /* (i) lpc residual signal */
){
- WebRtc_Word16 max, scale;
- WebRtc_Word32 ssqEn[NSUB_MAX-1];
- WebRtc_Word16 *ssqPtr;
- WebRtc_Word32 *seqEnPtr;
- WebRtc_Word32 maxW32;
- WebRtc_Word16 scale1;
- WebRtc_Word16 pos;
+ int16_t max, scale;
+ int32_t ssqEn[NSUB_MAX-1];
+ int16_t *ssqPtr;
+ int32_t *seqEnPtr;
+ int32_t maxW32;
+ int16_t scale1;
+ int16_t pos;
int n;
/*
@@ -62,7 +62,7 @@
}
/* Scale to maximum 20 bits in order to allow for the 11 bit window */
- maxW32 = WebRtcSpl_MaxValueW32(ssqEn, (WebRtc_Word16)(iLBCenc_inst->nsub-1));
+ maxW32 = WebRtcSpl_MaxValueW32(ssqEn, (int16_t)(iLBCenc_inst->nsub-1));
scale = WebRtcSpl_GetSizeInBits(maxW32) - 20;
scale1 = WEBRTC_SPL_MAX(0, scale);
@@ -71,9 +71,9 @@
*/
seqEnPtr=ssqEn;
if (iLBCenc_inst->mode==20) {
- ssqPtr=(WebRtc_Word16*)WebRtcIlbcfix_kStartSequenceEnrgWin+1;
+ ssqPtr=(int16_t*)WebRtcIlbcfix_kStartSequenceEnrgWin+1;
} else {
- ssqPtr=(WebRtc_Word16*)WebRtcIlbcfix_kStartSequenceEnrgWin;
+ ssqPtr=(int16_t*)WebRtcIlbcfix_kStartSequenceEnrgWin;
}
for (n=(iLBCenc_inst->nsub-1); n>0; n--) {
(*seqEnPtr)=WEBRTC_SPL_MUL(((*seqEnPtr)>>scale1), (*ssqPtr));
@@ -82,7 +82,7 @@
}
/* Extract the best choise of start state */
- pos = WebRtcSpl_MaxIndexW32(ssqEn, (WebRtc_Word16)(iLBCenc_inst->nsub-1)) + 1;
+ pos = WebRtcSpl_MaxIndexW32(ssqEn, (int16_t)(iLBCenc_inst->nsub-1)) + 1;
return(pos);
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h b/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h
index faf4666..ba9b9fe 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/frame_classify.h
@@ -19,11 +19,11 @@
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_
-WebRtc_Word16 WebRtcIlbcfix_FrameClassify(
+int16_t WebRtcIlbcfix_FrameClassify(
/* (o) Index to the max-energy sub frame */
iLBC_Enc_Inst_t *iLBCenc_inst,
/* (i/o) the encoder state structure */
- WebRtc_Word16 *residualFIX /* (i) lpc residual signal */
+ int16_t *residualFIX /* (i) lpc residual signal */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c b/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c
index 9450a80..8570c8b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.c
@@ -24,14 +24,14 @@
* residual
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_GainDequant(
+int16_t WebRtcIlbcfix_GainDequant(
/* (o) quantized gain value (Q14) */
- WebRtc_Word16 index, /* (i) quantization index */
- WebRtc_Word16 maxIn, /* (i) maximum of unquantized gain (Q14) */
- WebRtc_Word16 stage /* (i) The stage of the search */
+ int16_t index, /* (i) quantization index */
+ int16_t maxIn, /* (i) maximum of unquantized gain (Q14) */
+ int16_t stage /* (i) The stage of the search */
){
- WebRtc_Word16 scale;
- const WebRtc_Word16 *gain;
+ int16_t scale;
+ const int16_t *gain;
/* obtain correct scale factor */
@@ -41,5 +41,5 @@
/* select the quantization table and return the decoded value */
gain = WebRtcIlbcfix_kGain[stage];
- return((WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(scale, gain[index])+8192)>>14));
+ return((int16_t)((WEBRTC_SPL_MUL_16_16(scale, gain[index])+8192)>>14));
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h b/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h
index 28f2ceb..6bda066 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/gain_dequant.h
@@ -26,11 +26,11 @@
* residual
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_GainDequant(
+int16_t WebRtcIlbcfix_GainDequant(
/* (o) quantized gain value (Q14) */
- WebRtc_Word16 index, /* (i) quantization index */
- WebRtc_Word16 maxIn, /* (i) maximum of unquantized gain (Q14) */
- WebRtc_Word16 stage /* (i) The stage of the search */
+ int16_t index, /* (i) quantization index */
+ int16_t maxIn, /* (i) maximum of unquantized gain (Q14) */
+ int16_t stage /* (i) The stage of the search */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c b/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c
index bdf88a5..f7a8083 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.c
@@ -23,16 +23,16 @@
* quantizer for the gain in the gain-shape coding of residual
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
- WebRtc_Word16 gain, /* (i) gain value Q14 */
- WebRtc_Word16 maxIn, /* (i) maximum of gain value Q14 */
- WebRtc_Word16 stage, /* (i) The stage of the search */
- WebRtc_Word16 *index /* (o) quantization index */
+int16_t WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
+ int16_t gain, /* (i) gain value Q14 */
+ int16_t maxIn, /* (i) maximum of gain value Q14 */
+ int16_t stage, /* (i) The stage of the search */
+ int16_t *index /* (o) quantization index */
) {
- WebRtc_Word16 scale, returnVal, cblen;
- WebRtc_Word32 gainW32, measure1, measure2;
- const WebRtc_Word16 *cbPtr, *cb;
+ int16_t scale, returnVal, cblen;
+ int32_t gainW32, measure1, measure2;
+ const int16_t *cbPtr, *cb;
int loc, noMoves, noChecks, i;
/* ensure a lower bound (0.1) on the scaling factor */
@@ -48,7 +48,7 @@
/* Multiply the gain with 2^14 to make the comparison
easier and with higher precision */
- gainW32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)gain, 14);
+ gainW32 = WEBRTC_SPL_LSHIFT_W32((int32_t)gain, 14);
/* Do a binary search, starting in the middle of the CB
loc - defines the current position in the table
@@ -99,7 +99,7 @@
*index=loc;
/* Calculate the quantized gain value (in Q14) */
- returnVal=(WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(scale, cb[loc])+8192)>>14);
+ returnVal=(int16_t)((WEBRTC_SPL_MUL_16_16(scale, cb[loc])+8192)>>14);
/* return the quantized value */
return(returnVal);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h b/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h
index a2f0596..3954364 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/gain_quant.h
@@ -25,11 +25,11 @@
* quantizer for the gain in the gain-shape coding of residual
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
- WebRtc_Word16 gain, /* (i) gain value Q14 */
- WebRtc_Word16 maxIn, /* (i) maximum of gain value Q14 */
- WebRtc_Word16 stage, /* (i) The stage of the search */
- WebRtc_Word16 *index /* (o) quantization index */
+int16_t WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
+ int16_t gain, /* (i) gain value Q14 */
+ int16_t maxIn, /* (i) maximum of gain value Q14 */
+ int16_t stage, /* (i) The stage of the search */
+ int16_t *index /* (o) quantization index */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c b/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c
index aba3e31..ca07b0a 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.c
@@ -25,16 +25,16 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_GetCbVec(
- WebRtc_Word16 *cbvec, /* (o) Constructed codebook vector */
- WebRtc_Word16 *mem, /* (i) Codebook buffer */
- WebRtc_Word16 index, /* (i) Codebook index */
- WebRtc_Word16 lMem, /* (i) Length of codebook buffer */
- WebRtc_Word16 cbveclen /* (i) Codebook vector length */
+ int16_t *cbvec, /* (o) Constructed codebook vector */
+ int16_t *mem, /* (i) Codebook buffer */
+ int16_t index, /* (i) Codebook index */
+ int16_t lMem, /* (i) Length of codebook buffer */
+ int16_t cbveclen /* (i) Codebook vector length */
){
- WebRtc_Word16 k, base_size;
- WebRtc_Word16 lag;
+ int16_t k, base_size;
+ int16_t lag;
/* Stack based */
- WebRtc_Word16 tempbuff2[SUBL+5];
+ int16_t tempbuff2[SUBL+5];
/* Determine size of codebook sections */
@@ -58,7 +58,7 @@
/* Calculate lag */
- k=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, (index-(lMem-cbveclen+1)))+cbveclen;
+ k=(int16_t)WEBRTC_SPL_MUL_16_16(2, (index-(lMem-cbveclen+1)))+cbveclen;
lag=WEBRTC_SPL_RSHIFT_W16(k, 1);
@@ -70,7 +70,7 @@
else {
- WebRtc_Word16 memIndTest;
+ int16_t memIndTest;
/* first non-interpolated vectors */
@@ -86,7 +86,7 @@
/* do filtering to get the codebook vector */
WebRtcSpl_FilterMAFastQ12(
- &mem[memIndTest+4], cbvec, (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev,
+ &mem[memIndTest+4], cbvec, (int16_t*)WebRtcIlbcfix_kCbFiltersRev,
CB_FILTERLEN, cbveclen);
}
@@ -99,8 +99,8 @@
/* do filtering */
WebRtcSpl_FilterMAFastQ12(
- &mem[memIndTest+7], tempbuff2, (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev,
- CB_FILTERLEN, (WebRtc_Word16)(cbveclen+5));
+ &mem[memIndTest+7], tempbuff2, (int16_t*)WebRtcIlbcfix_kCbFiltersRev,
+ CB_FILTERLEN, (int16_t)(cbveclen+5));
/* Calculate lag index */
lag = (cbveclen<<1)-20+index-base_size-lMem-1;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h b/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h
index 99b5d4e..1c5ac8f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/get_cd_vec.h
@@ -20,11 +20,11 @@
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_CD_VEC_H_
void WebRtcIlbcfix_GetCbVec(
- WebRtc_Word16 *cbvec, /* (o) Constructed codebook vector */
- WebRtc_Word16 *mem, /* (i) Codebook buffer */
- WebRtc_Word16 index, /* (i) Codebook index */
- WebRtc_Word16 lMem, /* (i) Length of codebook buffer */
- WebRtc_Word16 cbveclen /* (i) Codebook vector length */
+ int16_t *cbvec, /* (o) Constructed codebook vector */
+ int16_t *mem, /* (i) Codebook buffer */
+ int16_t index, /* (i) Codebook index */
+ int16_t lMem, /* (i) Length of codebook buffer */
+ int16_t cbveclen /* (i) Codebook vector length */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
index c55e918..d44380f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
@@ -38,19 +38,19 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_GetLspPoly(
- WebRtc_Word16 *lsp, /* (i) LSP in Q15 */
- WebRtc_Word32 *f) /* (o) polonymial in Q24 */
+ int16_t *lsp, /* (i) LSP in Q15 */
+ int32_t *f) /* (o) polonymial in Q24 */
{
- WebRtc_Word32 tmpW32;
+ int32_t tmpW32;
int i, j;
- WebRtc_Word16 high, low;
- WebRtc_Word16 *lspPtr;
- WebRtc_Word32 *fPtr;
+ int16_t high, low;
+ int16_t *lspPtr;
+ int32_t *fPtr;
lspPtr = lsp;
fPtr = f;
/* f[0] = 1.0 (Q24) */
- (*fPtr) = (WebRtc_Word32)16777216;
+ (*fPtr) = (int32_t)16777216;
fPtr++;
(*fPtr) = WEBRTC_SPL_MUL((*lspPtr), -1024);
@@ -64,8 +64,8 @@
for(j=i; j>1; j--)
{
/* Compute f[j] = f[j] + tmp*f[j-1] + f[j-2]; */
- high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(fPtr[-1], 16);
- low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(fPtr[-1]-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)high),16), 1);
+ high = (int16_t)WEBRTC_SPL_RSHIFT_W32(fPtr[-1], 16);
+ low = (int16_t)WEBRTC_SPL_RSHIFT_W32(fPtr[-1]-WEBRTC_SPL_LSHIFT_W32(((int32_t)high),16), 1);
tmpW32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(high, (*lspPtr)), 2) +
WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16_RSFT(low, (*lspPtr), 15), 2);
@@ -74,7 +74,7 @@
(*fPtr) -= tmpW32;
fPtr--;
}
- (*fPtr) -= (WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)(*lspPtr), 10);
+ (*fPtr) -= (int32_t)WEBRTC_SPL_LSHIFT_W32((int32_t)(*lspPtr), 10);
fPtr+=i;
lspPtr+=2;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h b/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
index b0520b4..46ade48 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
@@ -41,7 +41,7 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_GetLspPoly(
- WebRtc_Word16 *lsp, /* (i) LSP in Q15 */
- WebRtc_Word32 *f); /* (o) polonymial in Q24 */
+ int16_t *lsp, /* (i) LSP in Q15 */
+ int32_t *f); /* (o) polonymial in Q24 */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c b/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c
index ce72865..695631a 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.c
@@ -26,31 +26,31 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_GetSyncSeq(
- WebRtc_Word16 *idata, /* (i) original data */
- WebRtc_Word16 idatal, /* (i) dimension of data */
- WebRtc_Word16 centerStartPos, /* (i) where current block starts */
- WebRtc_Word16 *period, /* (i) rough-pitch-period array (Q-2) */
- WebRtc_Word16 *plocs, /* (i) where periods of period array are taken (Q-2) */
- WebRtc_Word16 periodl, /* (i) dimension period array */
- WebRtc_Word16 hl, /* (i) 2*hl+1 is the number of sequences */
- WebRtc_Word16 *surround /* (i/o) The contribution from this sequence
+ int16_t *idata, /* (i) original data */
+ int16_t idatal, /* (i) dimension of data */
+ int16_t centerStartPos, /* (i) where current block starts */
+ int16_t *period, /* (i) rough-pitch-period array (Q-2) */
+ int16_t *plocs, /* (i) where periods of period array are taken (Q-2) */
+ int16_t periodl, /* (i) dimension period array */
+ int16_t hl, /* (i) 2*hl+1 is the number of sequences */
+ int16_t *surround /* (i/o) The contribution from this sequence
summed with earlier contributions */
){
- WebRtc_Word16 i,centerEndPos,q;
+ int16_t i,centerEndPos,q;
/* Stack based */
- WebRtc_Word16 lagBlock[2*ENH_HL+1];
- WebRtc_Word16 blockStartPos[2*ENH_HL+1]; /* Defines the position to search around (Q2) */
- WebRtc_Word16 plocs2[ENH_PLOCSL];
+ int16_t lagBlock[2*ENH_HL+1];
+ int16_t blockStartPos[2*ENH_HL+1]; /* Defines the position to search around (Q2) */
+ int16_t plocs2[ENH_PLOCSL];
centerEndPos=centerStartPos+ENH_BLOCKL-1;
/* present (find predicted lag from this position) */
WebRtcIlbcfix_NearestNeighbor(lagBlock+hl,plocs,
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, (centerStartPos+centerEndPos)),
+ (int16_t)WEBRTC_SPL_MUL_16_16(2, (centerStartPos+centerEndPos)),
periodl);
- blockStartPos[hl]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, centerStartPos);
+ blockStartPos[hl]=(int16_t)WEBRTC_SPL_MUL_16_16(4, centerStartPos);
/* past (find predicted position and perform a refined
search to find the best sequence) */
@@ -59,10 +59,10 @@
blockStartPos[q]=blockStartPos[q+1]-period[lagBlock[q+1]];
WebRtcIlbcfix_NearestNeighbor(lagBlock+q, plocs,
- (WebRtc_Word16)(blockStartPos[q] + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)-period[lagBlock[q+1]]),
+ (int16_t)(blockStartPos[q] + (int16_t)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)-period[lagBlock[q+1]]),
periodl);
- if((blockStartPos[q]-(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_OVERHANG))>=0) {
+ if((blockStartPos[q]-(int16_t)WEBRTC_SPL_MUL_16_16(4, ENH_OVERHANG))>=0) {
/* Find the best possible sequence in the 4 times upsampled
domain around blockStartPos+q */
@@ -85,14 +85,14 @@
for(q=hl+1;q<=WEBRTC_SPL_MUL_16_16(2, hl);q++) {
WebRtcIlbcfix_NearestNeighbor(lagBlock+q,plocs2,
- (WebRtc_Word16)(blockStartPos[q-1]+
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)),periodl);
+ (int16_t)(blockStartPos[q-1]+
+ (int16_t)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)),periodl);
blockStartPos[q]=blockStartPos[q-1]+period[lagBlock[q]];
- if( (blockStartPos[q]+(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, (ENH_BLOCKL+ENH_OVERHANG)))
+ if( (blockStartPos[q]+(int16_t)WEBRTC_SPL_MUL_16_16(4, (ENH_BLOCKL+ENH_OVERHANG)))
<
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, idatal)) {
+ (int16_t)WEBRTC_SPL_MUL_16_16(4, idatal)) {
/* Find the best possible sequence in the 4 times upsampled
domain around blockStartPos+q */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h b/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h
index a0ffd39..f9b08b7 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/get_sync_seq.h
@@ -26,14 +26,14 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_GetSyncSeq(
- WebRtc_Word16 *idata, /* (i) original data */
- WebRtc_Word16 idatal, /* (i) dimension of data */
- WebRtc_Word16 centerStartPos, /* (i) where current block starts */
- WebRtc_Word16 *period, /* (i) rough-pitch-period array (Q-2) */
- WebRtc_Word16 *plocs, /* (i) where periods of period array are taken (Q-2) */
- WebRtc_Word16 periodl, /* (i) dimension period array */
- WebRtc_Word16 hl, /* (i) 2*hl+1 is the number of sequences */
- WebRtc_Word16 *surround /* (i/o) The contribution from this sequence
+ int16_t *idata, /* (i) original data */
+ int16_t idatal, /* (i) dimension of data */
+ int16_t centerStartPos, /* (i) where current block starts */
+ int16_t *period, /* (i) rough-pitch-period array (Q-2) */
+ int16_t *plocs, /* (i) where periods of period array are taken (Q-2) */
+ int16_t periodl, /* (i) dimension period array */
+ int16_t hl, /* (i) 2*hl+1 is the number of sequences */
+ int16_t *surround /* (i/o) The contribution from this sequence
summed with earlier contributions */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c b/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c
index f202f62a..48bd7c4 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/hp_input.c
@@ -23,18 +23,18 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_HpInput(
- WebRtc_Word16 *signal, /* (i/o) signal vector */
- WebRtc_Word16 *ba, /* (i) B- and A-coefficients (2:nd order)
+ int16_t *signal, /* (i/o) signal vector */
+ int16_t *ba, /* (i) B- and A-coefficients (2:nd order)
{b[0] b[1] b[2] -a[1] -a[2]} a[0]
is assumed to be 1.0 */
- WebRtc_Word16 *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
+ int16_t *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
yhi[n-2] ylow[n-2] */
- WebRtc_Word16 *x, /* (i/o) Filter state x[n-1] x[n-2] */
- WebRtc_Word16 len) /* (i) Number of samples to filter */
+ int16_t *x, /* (i/o) Filter state x[n-1] x[n-2] */
+ int16_t len) /* (i) Number of samples to filter */
{
int i;
- WebRtc_Word32 tmpW32;
- WebRtc_Word32 tmpW32b;
+ int32_t tmpW32;
+ int32_t tmpW32b;
for (i=0; i<len; i++) {
@@ -62,10 +62,10 @@
tmpW32b = tmpW32 + 4096;
/* Saturate (to 2^28) so that the HP filtered signal does not overflow */
- tmpW32b = WEBRTC_SPL_SAT((WebRtc_Word32)268435455, tmpW32b, (WebRtc_Word32)-268435456);
+ tmpW32b = WEBRTC_SPL_SAT((int32_t)268435455, tmpW32b, (int32_t)-268435456);
/* Convert back to Q0 and multiply with 0.5 */
- signal[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 13);
+ signal[i] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 13);
/* Update state (filtered part) */
y[2] = y[0];
@@ -80,8 +80,8 @@
tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
}
- y[0] = (WebRtc_Word16)(tmpW32 >> 16);
- y[1] = (WebRtc_Word16)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y[0], 16))>>1);
+ y[0] = (int16_t)(tmpW32 >> 16);
+ y[1] = (int16_t)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((int32_t)y[0], 16))>>1);
}
return;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h b/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h
index f56c4f7..a30f703 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/hp_input.h
@@ -22,13 +22,13 @@
#include "defines.h"
void WebRtcIlbcfix_HpInput(
- WebRtc_Word16 *signal, /* (i/o) signal vector */
- WebRtc_Word16 *ba, /* (i) B- and A-coefficients (2:nd order)
+ int16_t *signal, /* (i/o) signal vector */
+ int16_t *ba, /* (i) B- and A-coefficients (2:nd order)
{b[0] b[1] b[2] -a[1] -a[2]} a[0]
is assumed to be 1.0 */
- WebRtc_Word16 *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
+ int16_t *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
yhi[n-2] ylow[n-2] */
- WebRtc_Word16 *x, /* (i/o) Filter state x[n-1] x[n-2] */
- WebRtc_Word16 len); /* (i) Number of samples to filter */
+ int16_t *x, /* (i/o) Filter state x[n-1] x[n-2] */
+ int16_t len); /* (i) Number of samples to filter */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c b/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
index 8e1c919..432fdee 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/hp_output.c
@@ -23,18 +23,18 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_HpOutput(
- WebRtc_Word16 *signal, /* (i/o) signal vector */
- WebRtc_Word16 *ba, /* (i) B- and A-coefficients (2:nd order)
+ int16_t *signal, /* (i/o) signal vector */
+ int16_t *ba, /* (i) B- and A-coefficients (2:nd order)
{b[0] b[1] b[2] -a[1] -a[2]} a[0]
is assumed to be 1.0 */
- WebRtc_Word16 *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
+ int16_t *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
yhi[n-2] ylow[n-2] */
- WebRtc_Word16 *x, /* (i/o) Filter state x[n-1] x[n-2] */
- WebRtc_Word16 len) /* (i) Number of samples to filter */
+ int16_t *x, /* (i/o) Filter state x[n-1] x[n-2] */
+ int16_t len) /* (i) Number of samples to filter */
{
int i;
- WebRtc_Word32 tmpW32;
- WebRtc_Word32 tmpW32b;
+ int32_t tmpW32;
+ int32_t tmpW32b;
for (i=0; i<len; i++) {
@@ -62,10 +62,10 @@
tmpW32b = tmpW32 + 1024;
/* Saturate (to 2^26) so that the HP filtered signal does not overflow */
- tmpW32b = WEBRTC_SPL_SAT((WebRtc_Word32)67108863, tmpW32b, (WebRtc_Word32)-67108864);
+ tmpW32b = WEBRTC_SPL_SAT((int32_t)67108863, tmpW32b, (int32_t)-67108864);
/* Convert back to Q0 and multiply with 2 */
- signal[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 11);
+ signal[i] = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 11);
/* Update state (filtered part) */
y[2] = y[0];
@@ -80,8 +80,8 @@
tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
}
- y[0] = (WebRtc_Word16)(tmpW32 >> 16);
- y[1] = (WebRtc_Word16)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y[0], 16))>>1);
+ y[0] = (int16_t)(tmpW32 >> 16);
+ y[1] = (int16_t)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((int32_t)y[0], 16))>>1);
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h b/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h
index c9a7426..7937ba0 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/hp_output.h
@@ -22,13 +22,13 @@
#include "defines.h"
void WebRtcIlbcfix_HpOutput(
- WebRtc_Word16 *signal, /* (i/o) signal vector */
- WebRtc_Word16 *ba, /* (i) B- and A-coefficients (2:nd order)
+ int16_t *signal, /* (i/o) signal vector */
+ int16_t *ba, /* (i) B- and A-coefficients (2:nd order)
{b[0] b[1] b[2] -a[1] -a[2]} a[0]
is assumed to be 1.0 */
- WebRtc_Word16 *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
+ int16_t *y, /* (i/o) Filter state yhi[n-1] ylow[n-1]
yhi[n-2] ylow[n-2] */
- WebRtc_Word16 *x, /* (i/o) Filter state x[n-1] x[n-2] */
- WebRtc_Word16 len); /* (i) Number of samples to filter */
+ int16_t *x, /* (i/o) Filter state x[n-1] x[n-2] */
+ int16_t len); /* (i) Number of samples to filter */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c b/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c
index 9a93d61..21d159f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/ilbc.c
@@ -25,9 +25,9 @@
#include <stdlib.h>
-WebRtc_Word16 WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst, WebRtc_Word16 *ILBCENC_inst_Addr, WebRtc_Word16 *size) {
+int16_t WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst, int16_t *ILBCENC_inst_Addr, int16_t *size) {
*iLBC_encinst=(iLBC_encinst_t*)ILBCENC_inst_Addr;
- *size=sizeof(iLBC_Enc_Inst_t)/sizeof(WebRtc_Word16);
+ *size=sizeof(iLBC_Enc_Inst_t)/sizeof(int16_t);
if (*iLBC_encinst!=NULL) {
return(0);
} else {
@@ -35,9 +35,9 @@
}
}
-WebRtc_Word16 WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst, WebRtc_Word16 *ILBCDEC_inst_Addr, WebRtc_Word16 *size) {
+int16_t WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst, int16_t *ILBCDEC_inst_Addr, int16_t *size) {
*iLBC_decinst=(iLBC_decinst_t*)ILBCDEC_inst_Addr;
- *size=sizeof(iLBC_Dec_Inst_t)/sizeof(WebRtc_Word16);
+ *size=sizeof(iLBC_Dec_Inst_t)/sizeof(int16_t);
if (*iLBC_decinst!=NULL) {
return(0);
} else {
@@ -45,7 +45,7 @@
}
}
-WebRtc_Word16 WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst) {
+int16_t WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst) {
*iLBC_encinst=(iLBC_encinst_t*)malloc(sizeof(iLBC_Enc_Inst_t));
if (*iLBC_encinst!=NULL) {
WebRtcSpl_Init();
@@ -55,7 +55,7 @@
}
}
-WebRtc_Word16 WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst) {
+int16_t WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst) {
*iLBC_decinst=(iLBC_decinst_t*)malloc(sizeof(iLBC_Dec_Inst_t));
if (*iLBC_decinst!=NULL) {
WebRtcSpl_Init();
@@ -65,18 +65,18 @@
}
}
-WebRtc_Word16 WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst) {
+int16_t WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst) {
free(iLBC_encinst);
return(0);
}
-WebRtc_Word16 WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst) {
+int16_t WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst) {
free(iLBC_decinst);
return(0);
}
-WebRtc_Word16 WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst, WebRtc_Word16 mode)
+int16_t WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst, int16_t mode)
{
if ((mode==20)||(mode==30)) {
WebRtcIlbcfix_InitEncode((iLBC_Enc_Inst_t*) iLBCenc_inst, mode);
@@ -86,10 +86,10 @@
}
}
-WebRtc_Word16 WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst, const WebRtc_Word16 *speechIn, WebRtc_Word16 len, WebRtc_Word16 *encoded) {
+int16_t WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst, const int16_t *speechIn, int16_t len, int16_t *encoded) {
- WebRtc_Word16 pos = 0;
- WebRtc_Word16 encpos = 0;
+ int16_t pos = 0;
+ int16_t encpos = 0;
if ((len != ((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl) &&
#ifdef SPLIT_10MS
@@ -104,7 +104,7 @@
/* call encoder */
while (pos<len) {
- WebRtcIlbcfix_EncodeImpl((WebRtc_UWord16*) &encoded[encpos], &speechIn[pos], (iLBC_Enc_Inst_t*) iLBCenc_inst);
+ WebRtcIlbcfix_EncodeImpl((uint16_t*) &encoded[encpos], &speechIn[pos], (iLBC_Enc_Inst_t*) iLBCenc_inst);
#ifdef SPLIT_10MS
pos += 80;
if(((iLBC_Enc_Inst_t*)iLBCenc_inst)->section == 0)
@@ -117,7 +117,7 @@
}
}
-WebRtc_Word16 WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 mode) {
+int16_t WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst, int16_t mode) {
if ((mode==20)||(mode==30)) {
WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, mode, 1);
return(0);
@@ -125,21 +125,21 @@
return(-1);
}
}
-WebRtc_Word16 WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst) {
+int16_t WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst) {
WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, 20, 1);
return(0);
}
-WebRtc_Word16 WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst) {
+int16_t WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst) {
WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, 30, 1);
return(0);
}
-WebRtc_Word16 WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
- const WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType)
+int16_t WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
+ const int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType)
{
int i=0;
/* Allow for automatic switching between the frame sizes
@@ -172,7 +172,7 @@
}
while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
- WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+ WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const uint16_t*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
i++;
}
/* iLBC does not support VAD/CNG yet */
@@ -180,11 +180,11 @@
return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
}
-WebRtc_Word16 WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
- const WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType)
+int16_t WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
+ const int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType)
{
int i=0;
if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
@@ -196,7 +196,7 @@
}
while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
- WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+ WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const uint16_t*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
i++;
}
/* iLBC does not support VAD/CNG yet */
@@ -204,11 +204,11 @@
return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
}
-WebRtc_Word16 WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
- const WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType)
+int16_t WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
+ const int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType)
{
int i=0;
if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
@@ -220,7 +220,7 @@
}
while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
- WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+ WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const uint16_t*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
i++;
}
/* iLBC does not support VAD/CNG yet */
@@ -228,9 +228,9 @@
return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
}
-WebRtc_Word16 WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 *decoded, WebRtc_Word16 noOfLostFrames) {
+int16_t WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst, int16_t *decoded, int16_t noOfLostFrames) {
int i;
- WebRtc_UWord16 dummy;
+ uint16_t dummy;
for (i=0;i<noOfLostFrames;i++) {
/* call decoder */
@@ -239,7 +239,7 @@
return (noOfLostFrames*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
}
-WebRtc_Word16 WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 *decoded, WebRtc_Word16 noOfLostFrames) {
+int16_t WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst, int16_t *decoded, int16_t noOfLostFrames) {
/* Two input parameters not used, but needed for function pointers in NetEQ */
(void)(decoded = NULL);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c
index 0d6346a..033a0a3 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.c
@@ -19,7 +19,7 @@
#include "defines.h"
void WebRtcIlbcfix_IndexConvDec(
- WebRtc_Word16 *index /* (i/o) Codebook indexes */
+ int16_t *index /* (i/o) Codebook indexes */
){
int k;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.h b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.h
index f29ee23..354c5b8 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_dec.h
@@ -22,7 +22,7 @@
#include "defines.h"
void WebRtcIlbcfix_IndexConvDec(
- WebRtc_Word16 *index /* (i/o) Codebook indexes */
+ int16_t *index /* (i/o) Codebook indexes */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c
index cbc04b6..0e7701b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.c
@@ -22,7 +22,7 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_IndexConvEnc(
- WebRtc_Word16 *index /* (i/o) Codebook indexes */
+ int16_t *index /* (i/o) Codebook indexes */
){
int k;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h
index d28a6e2..d686331 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/index_conv_enc.h
@@ -26,7 +26,7 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_IndexConvEnc(
- WebRtc_Word16 *index /* (i/o) Codebook indexes */
+ int16_t *index /* (i/o) Codebook indexes */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c b/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c
index ea16a32..f76ade0 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/init_decode.c
@@ -23,9 +23,9 @@
* Initiation of decoder instance.
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
+int16_t WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) Decoder instance */
- WebRtc_Word16 mode, /* (i) frame size mode */
+ int16_t mode, /* (i) frame size mode */
int use_enhancer) { /* (i) 1: use enhancer, 0: no enhancer */
int i;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h b/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h
index 3452f34..2938c74 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/init_decode.h
@@ -25,9 +25,9 @@
* Initiation of decoder instance.
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
+int16_t WebRtcIlbcfix_InitDecode( /* (o) Number of decoded samples */
iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) Decoder instance */
- WebRtc_Word16 mode, /* (i) frame size mode */
+ int16_t mode, /* (i) frame size mode */
int use_enhancer /* (i) 1 to use enhancer
0 to run without enhancer */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c b/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c
index e1c0fc6..f6da158 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/init_encode.c
@@ -23,9 +23,9 @@
* Initiation of encoder instance.
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
+int16_t WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
iLBC_Enc_Inst_t *iLBCenc_inst, /* (i/o) Encoder instance */
- WebRtc_Word16 mode) { /* (i) frame size mode */
+ int16_t mode) { /* (i) frame size mode */
iLBCenc_inst->mode = mode;
/* Set all the variables that are dependent on the frame size mode */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h b/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h
index f1d1858..562efb1 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/init_encode.h
@@ -25,9 +25,9 @@
* Initiation of encoder instance.
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
+int16_t WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
iLBC_Enc_Inst_t *iLBCenc_inst, /* (i/o) Encoder instance */
- WebRtc_Word16 mode /* (i) frame size mode */
+ int16_t mode /* (i) frame size mode */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h b/webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h
index 6208962..9ab2e86 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h
@@ -60,12 +60,12 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst,
- WebRtc_Word16 *ILBCENC_inst_Addr,
- WebRtc_Word16 *size);
- WebRtc_Word16 WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst,
- WebRtc_Word16 *ILBCDEC_inst_Addr,
- WebRtc_Word16 *size);
+ int16_t WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst,
+ int16_t *ILBCENC_inst_Addr,
+ int16_t *size);
+ int16_t WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst,
+ int16_t *ILBCDEC_inst_Addr,
+ int16_t *size);
/****************************************************************************
@@ -80,8 +80,8 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst);
- WebRtc_Word16 WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst);
+ int16_t WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst);
+ int16_t WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst);
/****************************************************************************
* WebRtcIlbcfix_XxxFree(...)
@@ -95,8 +95,8 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst);
- WebRtc_Word16 WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst);
+ int16_t WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst);
+ int16_t WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst);
/****************************************************************************
@@ -113,8 +113,8 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst,
- WebRtc_Word16 frameLen);
+ int16_t WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst,
+ int16_t frameLen);
/****************************************************************************
* WebRtcIlbcfix_Encode(...)
@@ -135,10 +135,10 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst,
- const WebRtc_Word16 *speechIn,
- WebRtc_Word16 len,
- WebRtc_Word16 *encoded);
+ int16_t WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst,
+ const int16_t *speechIn,
+ int16_t len,
+ int16_t *encoded);
/****************************************************************************
* WebRtcIlbcfix_DecoderInit(...)
@@ -156,10 +156,10 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst,
- WebRtc_Word16 frameLen);
- WebRtc_Word16 WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst);
- WebRtc_Word16 WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst);
+ int16_t WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst,
+ int16_t frameLen);
+ int16_t WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst);
+ int16_t WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst);
/****************************************************************************
* WebRtcIlbcfix_Decode(...)
@@ -181,21 +181,21 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
- const WebRtc_Word16* encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType);
- WebRtc_Word16 WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
- const WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType);
- WebRtc_Word16 WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
- const WebRtc_Word16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType);
+ int16_t WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
+ const int16_t* encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType);
+ int16_t WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
+ const int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType);
+ int16_t WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
+ const int16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType);
/****************************************************************************
* WebRtcIlbcfix_DecodePlc(...)
@@ -215,9 +215,9 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 noOfLostFrames);
+ int16_t WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst,
+ int16_t *decoded,
+ int16_t noOfLostFrames);
/****************************************************************************
* WebRtcIlbcfix_NetEqPlc(...)
@@ -237,9 +237,9 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 noOfLostFrames);
+ int16_t WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst,
+ int16_t *decoded,
+ int16_t noOfLostFrames);
/****************************************************************************
* WebRtcIlbcfix_version(...)
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c b/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c
index 11cb33c..b6ea201 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/interpolate.c
@@ -24,14 +24,14 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Interpolate(
- WebRtc_Word16 *out, /* (o) output vector */
- WebRtc_Word16 *in1, /* (i) first input vector */
- WebRtc_Word16 *in2, /* (i) second input vector */
- WebRtc_Word16 coef, /* (i) weight coefficient in Q14 */
- WebRtc_Word16 length) /* (i) number of sample is vectors */
+ int16_t *out, /* (o) output vector */
+ int16_t *in1, /* (i) first input vector */
+ int16_t *in2, /* (i) second input vector */
+ int16_t coef, /* (i) weight coefficient in Q14 */
+ int16_t length) /* (i) number of sample is vectors */
{
int i;
- WebRtc_Word16 invcoef;
+ int16_t invcoef;
/*
Performs the operation out[i] = in[i]*coef + (1-coef)*in2[i] (with rounding)
@@ -39,7 +39,7 @@
invcoef = 16384 - coef; /* 16384 = 1.0 (Q14)*/
for (i = 0; i < length; i++) {
- out[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ out[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(coef, in1[i]) + WEBRTC_SPL_MUL_16_16(invcoef, in2[i]))+8192,
14);
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h b/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h
index a12021c..0483232 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/interpolate.h
@@ -26,10 +26,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Interpolate(
- WebRtc_Word16 *out, /* (o) output vector */
- WebRtc_Word16 *in1, /* (i) first input vector */
- WebRtc_Word16 *in2, /* (i) second input vector */
- WebRtc_Word16 coef, /* (i) weight coefficient in Q14 */
- WebRtc_Word16 length); /* (i) number of sample is vectors */
+ int16_t *out, /* (o) output vector */
+ int16_t *in1, /* (i) first input vector */
+ int16_t *in2, /* (i) second input vector */
+ int16_t coef, /* (i) weight coefficient in Q14 */
+ int16_t length); /* (i) number of sample is vectors */
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c b/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c
index 31eb52e..219eda7 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.c
@@ -20,12 +20,12 @@
#include "constants.h"
void WebRtcIlbcfix_InterpolateSamples(
- WebRtc_Word16 *interpSamples, /* (o) The interpolated samples */
- WebRtc_Word16 *CBmem, /* (i) The CB memory */
- WebRtc_Word16 lMem /* (i) Length of the CB memory */
+ int16_t *interpSamples, /* (o) The interpolated samples */
+ int16_t *CBmem, /* (i) The CB memory */
+ int16_t lMem /* (i) Length of the CB memory */
) {
- WebRtc_Word16 *ppi, *ppo, i, j, temp1, temp2;
- WebRtc_Word16 *tmpPtr;
+ int16_t *ppi, *ppo, i, j, temp1, temp2;
+ int16_t *tmpPtr;
/* Calculate the 20 vectors of interpolated samples (4 samples each)
that are used in the codebooks for lag 20 to 39 */
@@ -37,8 +37,8 @@
ppi = CBmem+lMem-j-24;
for (i=0; i<4; i++) {
- *tmpPtr++ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp2],*ppo, 15) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp1], *ppi, 15);
+ *tmpPtr++ = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp2],*ppo, 15) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp1], *ppi, 15);
ppo++;
ppi++;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h b/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h
index 5c98aaf..586c27d 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/interpolate_samples.h
@@ -26,9 +26,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_InterpolateSamples(
- WebRtc_Word16 *interpSamples, /* (o) The interpolated samples */
- WebRtc_Word16 *CBmem, /* (i) The CB memory */
- WebRtc_Word16 lMem /* (i) Length of the CB memory */
+ int16_t *interpSamples, /* (o) The interpolated samples */
+ int16_t *CBmem, /* (i) The CB memory */
+ int16_t lMem /* (i) Length of the CB memory */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c b/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c
index 73d67a0..8f9a3c0 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.c
@@ -28,18 +28,18 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LpcEncode(
- WebRtc_Word16 *syntdenum, /* (i/o) synthesis filter coefficients
+ int16_t *syntdenum, /* (i/o) synthesis filter coefficients
before/after encoding */
- WebRtc_Word16 *weightdenum, /* (i/o) weighting denumerator coefficients
+ int16_t *weightdenum, /* (i/o) weighting denumerator coefficients
before/after encoding */
- WebRtc_Word16 *lsf_index, /* (o) lsf quantization index */
- WebRtc_Word16 *data, /* (i) Speech to do LPC analysis on */
+ int16_t *lsf_index, /* (o) lsf quantization index */
+ int16_t *data, /* (i) Speech to do LPC analysis on */
iLBC_Enc_Inst_t *iLBCenc_inst
/* (i/o) the encoder state structure */
) {
/* Stack based */
- WebRtc_Word16 lsf[LPC_FILTERORDER * LPC_N_MAX];
- WebRtc_Word16 lsfdeq[LPC_FILTERORDER * LPC_N_MAX];
+ int16_t lsf[LPC_FILTERORDER * LPC_N_MAX];
+ int16_t lsfdeq[LPC_FILTERORDER * LPC_N_MAX];
/* Calculate LSF's from the input speech */
WebRtcIlbcfix_SimpleLpcAnalysis(lsf, data, iLBCenc_inst);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h b/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h
index 36967a3..8d87b0a 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lpc_encode.h
@@ -26,12 +26,12 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LpcEncode(
- WebRtc_Word16 *syntdenum, /* (i/o) synthesis filter coefficients
+ int16_t *syntdenum, /* (i/o) synthesis filter coefficients
before/after encoding */
- WebRtc_Word16 *weightdenum, /* (i/o) weighting denumerator coefficients
+ int16_t *weightdenum, /* (i/o) weighting denumerator coefficients
before/after encoding */
- WebRtc_Word16 *lsf_index, /* (o) lsf quantization index */
- WebRtc_Word16 *data, /* (i) Speech to do LPC analysis on */
+ int16_t *lsf_index, /* (o) lsf quantization index */
+ int16_t *data, /* (i) Speech to do LPC analysis on */
iLBC_Enc_Inst_t *iLBCenc_inst
/* (i/o) the encoder state structure */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c b/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c
index 7097d74..0e3bd84 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.c
@@ -24,15 +24,15 @@
*---------------------------------------------------------------*/
int WebRtcIlbcfix_LsfCheck(
- WebRtc_Word16 *lsf, /* LSF parameters */
+ int16_t *lsf, /* LSF parameters */
int dim, /* dimension of LSF */
int NoAn) /* No of analysis per frame */
{
int k,n,m, Nit=2, change=0,pos;
- const WebRtc_Word16 eps=319; /* 0.039 in Q13 (50 Hz)*/
- const WebRtc_Word16 eps2=160; /* eps/2.0 in Q13;*/
- const WebRtc_Word16 maxlsf=25723; /* 3.14; (4000 Hz)*/
- const WebRtc_Word16 minlsf=82; /* 0.01; (0 Hz)*/
+ const int16_t eps=319; /* 0.039 in Q13 (50 Hz)*/
+ const int16_t eps2=160; /* eps/2.0 in Q13;*/
+ const int16_t maxlsf=25723; /* 3.14; (4000 Hz)*/
+ const int16_t minlsf=82; /* 0.01; (0 Hz)*/
/* LSF separation check*/
for (n=0;n<Nit;n++) { /* Run through a 2 times */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h b/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h
index 830bbed..2f4ac8c 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_check.h
@@ -26,7 +26,7 @@
*---------------------------------------------------------------*/
int WebRtcIlbcfix_LsfCheck(
- WebRtc_Word16 *lsf, /* LSF parameters */
+ int16_t *lsf, /* LSF parameters */
int dim, /* dimension of LSF */
int NoAn); /* No of analysis per frame */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
index 3bb23d0..66bbde8 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
@@ -25,14 +25,14 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LspInterpolate2PolyDec(
- WebRtc_Word16 *a, /* (o) lpc coefficients Q12 */
- WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
- WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
- WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+ int16_t *a, /* (o) lpc coefficients Q12 */
+ int16_t *lsf1, /* (i) first set of lsf coefficients Q13 */
+ int16_t *lsf2, /* (i) second set of lsf coefficients Q13 */
+ int16_t coef, /* (i) weighting coefficient to use between
lsf1 and lsf2 Q14 */
- WebRtc_Word16 length /* (i) length of coefficient vectors */
+ int16_t length /* (i) length of coefficient vectors */
){
- WebRtc_Word16 lsftmp[LPC_FILTERORDER];
+ int16_t lsftmp[LPC_FILTERORDER];
/* interpolate LSF */
WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
index 23fe3a7..3540c1c 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
@@ -26,12 +26,12 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LspInterpolate2PolyDec(
- WebRtc_Word16 *a, /* (o) lpc coefficients Q12 */
- WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
- WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
- WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+ int16_t *a, /* (o) lpc coefficients Q12 */
+ int16_t *lsf1, /* (i) first set of lsf coefficients Q13 */
+ int16_t *lsf2, /* (i) second set of lsf coefficients Q13 */
+ int16_t coef, /* (i) weighting coefficient to use between
lsf1 and lsf2 Q14 */
- WebRtc_Word16 length /* (i) length of coefficient vectors */
+ int16_t length /* (i) length of coefficient vectors */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
index 3b0a34d..cf67ecc 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
@@ -26,15 +26,15 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
- WebRtc_Word16 *a, /* (o) lpc coefficients Q12 */
- WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
- WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
- WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+ int16_t *a, /* (o) lpc coefficients Q12 */
+ int16_t *lsf1, /* (i) first set of lsf coefficients Q13 */
+ int16_t *lsf2, /* (i) second set of lsf coefficients Q13 */
+ int16_t coef, /* (i) weighting coefficient to use between
lsf1 and lsf2 Q14 */
- WebRtc_Word16 length /* (i) length of coefficient vectors */
+ int16_t length /* (i) length of coefficient vectors */
) {
/* Stack based */
- WebRtc_Word16 lsftmp[LPC_FILTERORDER];
+ int16_t lsftmp[LPC_FILTERORDER];
/* interpolate LSF */
WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
index 1bbbb80..799c100 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
@@ -27,12 +27,12 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
- WebRtc_Word16 *a, /* (o) lpc coefficients Q12 */
- WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
- WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
- WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+ int16_t *a, /* (o) lpc coefficients Q12 */
+ int16_t *lsf1, /* (i) first set of lsf coefficients Q13 */
+ int16_t *lsf2, /* (i) second set of lsf coefficients Q13 */
+ int16_t coef, /* (i) weighting coefficient to use between
lsf1 and lsf2 Q14 */
- WebRtc_Word16 length /* (i) length of coefficient vectors */
+ int16_t length /* (i) length of coefficient vectors */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
index 84278a4..40737bb 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
@@ -24,19 +24,19 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Lsf2Lsp(
- WebRtc_Word16 *lsf, /* (i) lsf in Q13 values between 0 and pi */
- WebRtc_Word16 *lsp, /* (o) lsp in Q15 values between -1 and 1 */
- WebRtc_Word16 m /* (i) number of coefficients */
+ int16_t *lsf, /* (i) lsf in Q13 values between 0 and pi */
+ int16_t *lsp, /* (o) lsp in Q15 values between -1 and 1 */
+ int16_t m /* (i) number of coefficients */
) {
- WebRtc_Word16 i, k;
- WebRtc_Word16 diff; /* difference, which is used for the
+ int16_t i, k;
+ int16_t diff; /* difference, which is used for the
linear approximation (Q8) */
- WebRtc_Word16 freq; /* normalized frequency in Q15 (0..1) */
- WebRtc_Word32 tmpW32;
+ int16_t freq; /* normalized frequency in Q15 (0..1) */
+ int32_t tmpW32;
for(i=0; i<m; i++)
{
- freq = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(lsf[i], 20861, 15);
+ freq = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(lsf[i], 20861, 15);
/* 20861: 1.0/(2.0*PI) in Q17 */
/*
Upper 8 bits give the index k and
@@ -54,7 +54,7 @@
/* Calculate linear approximation */
tmpW32 = WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kCosDerivative[k], diff);
- lsp[i] = WebRtcIlbcfix_kCos[k]+(WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(tmpW32, 12));
+ lsp[i] = WebRtcIlbcfix_kCos[k]+(int16_t)(WEBRTC_SPL_RSHIFT_W32(tmpW32, 12));
}
return;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
index db6549b..b2104d7 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
@@ -26,9 +26,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Lsf2Lsp(
- WebRtc_Word16 *lsf, /* (i) lsf in Q13 values between 0 and pi */
- WebRtc_Word16 *lsp, /* (o) lsp in Q15 values between -1 and 1 */
- WebRtc_Word16 m /* (i) number of coefficients */
+ int16_t *lsf, /* (i) lsf in Q13 values between 0 and pi */
+ int16_t *lsp, /* (o) lsp in Q15 values between -1 and 1 */
+ int16_t m /* (i) number of coefficients */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
index f1c4a9e..acc5ac8 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
@@ -22,15 +22,15 @@
#include "constants.h"
void WebRtcIlbcfix_Lsf2Poly(
- WebRtc_Word16 *a, /* (o) predictor coefficients (order = 10) in Q12 */
- WebRtc_Word16 *lsf /* (i) line spectral frequencies in Q13 */
+ int16_t *a, /* (o) predictor coefficients (order = 10) in Q12 */
+ int16_t *lsf /* (i) line spectral frequencies in Q13 */
) {
- WebRtc_Word32 f[2][6]; /* f[0][] and f[1][] corresponds to
+ int32_t f[2][6]; /* f[0][] and f[1][] corresponds to
F1(z) and F2(z) respectivly */
- WebRtc_Word32 *f1ptr, *f2ptr;
- WebRtc_Word16 *a1ptr, *a2ptr;
- WebRtc_Word32 tmpW32;
- WebRtc_Word16 lsp[10];
+ int32_t *f1ptr, *f2ptr;
+ int16_t *a1ptr, *a2ptr;
+ int32_t tmpW32;
+ int16_t lsp[10];
int i;
/* Convert lsf to lsp */
@@ -71,10 +71,10 @@
for (i=5; i>0; i--)
{
tmpW32 = (*f1ptr) + (*f2ptr);
- (*a1ptr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
+ (*a1ptr) = (int16_t)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
tmpW32 = (*f1ptr) - (*f2ptr);
- (*a2ptr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
+ (*a2ptr) = (int16_t)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
a1ptr++;
a2ptr--;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
index a00693b..d85f510 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
@@ -26,8 +26,8 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Lsf2Poly(
- WebRtc_Word16 *a, /* (o) predictor coefficients (order = 10) in Q12 */
- WebRtc_Word16 *lsf /* (i) line spectral frequencies in Q13 */
+ int16_t *a, /* (o) predictor coefficients (order = 10) in Q12 */
+ int16_t *lsf /* (i) line spectral frequencies in Q13 */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c b/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
index 134afbb..7afa5af 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
@@ -24,17 +24,17 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Lsp2Lsf(
- WebRtc_Word16 *lsp, /* (i) lsp vector -1...+1 in Q15 */
- WebRtc_Word16 *lsf, /* (o) Lsf vector 0...Pi in Q13
+ int16_t *lsp, /* (i) lsp vector -1...+1 in Q15 */
+ int16_t *lsf, /* (o) Lsf vector 0...Pi in Q13
(ordered, so that lsf[i]<lsf[i+1]) */
- WebRtc_Word16 m /* (i) Number of coefficients */
+ int16_t m /* (i) Number of coefficients */
)
{
- WebRtc_Word16 i, k;
- WebRtc_Word16 diff; /* diff between table value and desired value (Q15) */
- WebRtc_Word16 freq; /* lsf/(2*pi) (Q16) */
- WebRtc_Word16 *lspPtr, *lsfPtr, *cosTblPtr;
- WebRtc_Word16 tmp;
+ int16_t i, k;
+ int16_t diff; /* diff between table value and desired value (Q15) */
+ int16_t freq; /* lsf/(2*pi) (Q16) */
+ int16_t *lspPtr, *lsfPtr, *cosTblPtr;
+ int16_t tmp;
/* set the index to maximum index value in WebRtcIlbcfix_kCos */
k = 63;
@@ -46,14 +46,14 @@
*/
lspPtr = &lsp[9];
lsfPtr = &lsf[9];
- cosTblPtr=(WebRtc_Word16*)&WebRtcIlbcfix_kCos[k];
+ cosTblPtr=(int16_t*)&WebRtcIlbcfix_kCos[k];
for(i=m-1; i>=0; i--)
{
/*
locate value in the table, which is just above lsp[i],
basically an approximation to acos(x)
*/
- while( (((WebRtc_Word32)(*cosTblPtr)-(*lspPtr)) < 0)&&(k>0) )
+ while( (((int32_t)(*cosTblPtr)-(*lspPtr)) < 0)&&(k>0) )
{
k-=1;
cosTblPtr--;
@@ -68,13 +68,13 @@
*/
/* tmp (linear offset) in Q16 */
- tmp = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAcosDerivative[k],diff, 11);
+ tmp = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAcosDerivative[k],diff, 11);
/* freq in Q16 */
- freq = (WebRtc_Word16)WEBRTC_SPL_LSHIFT_W16(k,9)+tmp;
+ freq = (int16_t)WEBRTC_SPL_LSHIFT_W16(k,9)+tmp;
/* lsf = freq*2*pi */
- (*lsfPtr) = (WebRtc_Word16)(((WebRtc_Word32)freq*25736)>>15);
+ (*lsfPtr) = (int16_t)(((int32_t)freq*25736)>>15);
lsfPtr--;
lspPtr--;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h b/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
index 97ba7e4..a2bcaff 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
@@ -26,10 +26,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Lsp2Lsf(
- WebRtc_Word16 *lsp, /* (i) lsp vector -1...+1 in Q15 */
- WebRtc_Word16 *lsf, /* (o) Lsf vector 0...Pi in Q13
+ int16_t *lsp, /* (i) lsp vector -1...+1 in Q15 */
+ int16_t *lsf, /* (o) Lsf vector 0...Pi in Q13
(ordered, so that lsf[i]<lsf[i+1]) */
- WebRtc_Word16 m /* (i) Number of coefficients */
+ int16_t m /* (i) Number of coefficients */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c b/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c
index 2162205..c6cd834 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/my_corr.c
@@ -23,13 +23,13 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_MyCorr(
- WebRtc_Word32 *corr, /* (o) correlation of seq1 and seq2 */
- WebRtc_Word16 *seq1, /* (i) first sequence */
- WebRtc_Word16 dim1, /* (i) dimension first seq1 */
- const WebRtc_Word16 *seq2, /* (i) second sequence */
- WebRtc_Word16 dim2 /* (i) dimension seq2 */
+ int32_t *corr, /* (o) correlation of seq1 and seq2 */
+ int16_t *seq1, /* (i) first sequence */
+ int16_t dim1, /* (i) dimension first seq1 */
+ const int16_t *seq2, /* (i) second sequence */
+ int16_t dim2 /* (i) dimension seq2 */
){
- WebRtc_Word16 max, scale, loops;
+ int16_t max, scale, loops;
/* Calculate correlation between the two sequences. Scale the
result of the multiplcication to maximum 26 bits in order
@@ -37,7 +37,7 @@
max=WebRtcSpl_MaxAbsValueW16(seq1, dim1);
scale=WebRtcSpl_GetSizeInBits(max);
- scale = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(2,scale)-26);
+ scale = (int16_t)(WEBRTC_SPL_MUL_16_16(2,scale)-26);
if (scale<0) {
scale=0;
}
@@ -45,7 +45,7 @@
loops=dim1-dim2+1;
/* Calculate the cross correlations */
- WebRtcSpl_CrossCorrelation(corr, (WebRtc_Word16*)seq2, seq1, dim2, loops, scale, 1);
+ WebRtcSpl_CrossCorrelation(corr, (int16_t*)seq2, seq1, dim2, loops, scale, 1);
return;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h b/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h
index f588c53..ee66998 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/my_corr.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_MyCorr(
- WebRtc_Word32 *corr, /* (o) correlation of seq1 and seq2 */
- WebRtc_Word16 *seq1, /* (i) first sequence */
- WebRtc_Word16 dim1, /* (i) dimension first seq1 */
- const WebRtc_Word16 *seq2, /* (i) second sequence */
- WebRtc_Word16 dim2 /* (i) dimension seq2 */
+ int32_t *corr, /* (o) correlation of seq1 and seq2 */
+ int16_t *seq1, /* (i) first sequence */
+ int16_t dim1, /* (i) dimension first seq1 */
+ const int16_t *seq2, /* (i) second sequence */
+ int16_t dim2 /* (i) dimension seq2 */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c b/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
index ea9e1eb..8d1272f 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
@@ -25,15 +25,15 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_NearestNeighbor(
- WebRtc_Word16 *index, /* (o) index of array element closest to value */
- WebRtc_Word16 *array, /* (i) data array (Q2) */
- WebRtc_Word16 value, /* (i) value (Q2) */
- WebRtc_Word16 arlength /* (i) dimension of data array (==8) */
+ int16_t *index, /* (o) index of array element closest to value */
+ int16_t *array, /* (i) data array (Q2) */
+ int16_t value, /* (i) value (Q2) */
+ int16_t arlength /* (i) dimension of data array (==8) */
){
int i;
- WebRtc_Word16 diff;
+ int16_t diff;
/* Stack based */
- WebRtc_Word32 crit[8];
+ int32_t crit[8];
/* Calculate square distance */
for(i=0;i<arlength;i++){
@@ -42,5 +42,5 @@
}
/* Find the minimum square distance */
- *index=WebRtcSpl_MinIndexW32(crit, (WebRtc_Word16)arlength);
+ *index=WebRtcSpl_MinIndexW32(crit, (int16_t)arlength);
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h b/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
index 705e17a..0c03470 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
@@ -28,10 +28,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_NearestNeighbor(
- WebRtc_Word16 *index, /* (o) index of array element closest to value */
- WebRtc_Word16 *array, /* (i) data array (Q2) */
- WebRtc_Word16 value, /* (i) value (Q2) */
- WebRtc_Word16 arlength /* (i) dimension of data array (==8) */
+ int16_t *index, /* (o) index of array element closest to value */
+ int16_t *array, /* (i) data array (Q2) */
+ int16_t value, /* (i) value (Q2) */
+ int16_t arlength /* (i) dimension of data array (==8) */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c b/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c
index 3990fbe..9475ecb 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.c
@@ -23,24 +23,24 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_PackBits(
- WebRtc_UWord16 *bitstream, /* (o) The packetized bitstream */
+ uint16_t *bitstream, /* (o) The packetized bitstream */
iLBC_bits *enc_bits, /* (i) Encoded bits */
- WebRtc_Word16 mode /* (i) Codec mode (20 or 30) */
+ int16_t mode /* (i) Codec mode (20 or 30) */
){
- WebRtc_UWord16 *bitstreamPtr;
+ uint16_t *bitstreamPtr;
int i, k;
- WebRtc_Word16 *tmpPtr;
+ int16_t *tmpPtr;
bitstreamPtr=bitstream;
/* Class 1 bits of ULP */
- /* First WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)enc_bits->lsf[0])<<10; /* Bit 0..5 */
+ /* First int16_t */
+ (*bitstreamPtr) = ((uint16_t)enc_bits->lsf[0])<<10; /* Bit 0..5 */
(*bitstreamPtr) |= (enc_bits->lsf[1])<<3; /* Bit 6..12 */
(*bitstreamPtr) |= (enc_bits->lsf[2]&0x70)>>4; /* Bit 13..15 */
bitstreamPtr++;
- /* Second WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)enc_bits->lsf[2]&0xF)<<12; /* Bit 0..3 */
+ /* Second int16_t */
+ (*bitstreamPtr) = ((uint16_t)enc_bits->lsf[2]&0xF)<<12; /* Bit 0..3 */
if (mode==20) {
(*bitstreamPtr) |= (enc_bits->startIdx)<<10; /* Bit 4..5 */
@@ -48,7 +48,7 @@
(*bitstreamPtr) |= (enc_bits->idxForMax)<<3; /* Bit 7..12 */
(*bitstreamPtr) |= ((enc_bits->cb_index[0])&0x70)>>4; /* Bit 13..15 */
bitstreamPtr++;
- /* Third WebRtc_Word16 */
+ /* Third int16_t */
(*bitstreamPtr) = ((enc_bits->cb_index[0])&0xE)<<12; /* Bit 0..2 */
(*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x18)<<8; /* Bit 3..4 */
(*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x8)<<7; /* Bit 5 */
@@ -60,15 +60,15 @@
(*bitstreamPtr) |= (enc_bits->lsf[3])<<6; /* Bit 4..9 */
(*bitstreamPtr) |= (enc_bits->lsf[4]&0x7E)>>1; /* Bit 10..15 */
bitstreamPtr++;
- /* Third WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)enc_bits->lsf[4]&0x1)<<15; /* Bit 0 */
+ /* Third int16_t */
+ (*bitstreamPtr) = ((uint16_t)enc_bits->lsf[4]&0x1)<<15; /* Bit 0 */
(*bitstreamPtr) |= (enc_bits->lsf[5])<<8; /* Bit 1..7 */
(*bitstreamPtr) |= (enc_bits->startIdx)<<5; /* Bit 8..10 */
(*bitstreamPtr) |= (enc_bits->state_first)<<4; /* Bit 11 */
(*bitstreamPtr) |= ((enc_bits->idxForMax)&0x3C)>>2; /* Bit 12..15 */
bitstreamPtr++;
- /* 4:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)enc_bits->idxForMax&0x3)<<14; /* Bit 0..1 */
+ /* 4:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)enc_bits->idxForMax&0x3)<<14; /* Bit 0..1 */
(*bitstreamPtr) |= (enc_bits->cb_index[0]&0x78)<<7; /* Bit 2..5 */
(*bitstreamPtr) |= (enc_bits->gain_index[0]&0x10)<<5; /* Bit 6 */
(*bitstreamPtr) |= (enc_bits->gain_index[1]&0x8)<<5; /* Bit 7 */
@@ -77,14 +77,14 @@
(*bitstreamPtr) |= (enc_bits->gain_index[4]&0x8)>>3; /* Bit 15 */
}
/* Class 2 bits of ULP */
- /* 4:th to 6:th WebRtc_Word16 for 20 ms case
- 5:th to 7:th WebRtc_Word16 for 30 ms case */
+ /* 4:th to 6:th int16_t for 20 ms case
+ 5:th to 7:th int16_t for 30 ms case */
bitstreamPtr++;
tmpPtr=enc_bits->idxVec;
for (k=0; k<3; k++) {
(*bitstreamPtr) = 0;
for (i=15; i>=0; i--) {
- (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+ (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x4)>>2)<<i;
/* Bit 15-i */
tmpPtr++;
}
@@ -92,10 +92,10 @@
}
if (mode==20) {
- /* 7:th WebRtc_Word16 */
+ /* 7:th int16_t */
(*bitstreamPtr) = 0;
for (i=15; i>6; i--) {
- (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+ (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x4)>>2)<<i;
/* Bit 15-i */
tmpPtr++;
}
@@ -106,10 +106,10 @@
(*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)>>2; /* Bit 14..15 */
} else { /* mode==30 */
- /* 8:th WebRtc_Word16 */
+ /* 8:th int16_t */
(*bitstreamPtr) = 0;
for (i=15; i>5; i--) {
- (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+ (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x4)>>2)<<i;
/* Bit 15-i */
tmpPtr++;
}
@@ -119,13 +119,13 @@
(*bitstreamPtr) |= (enc_bits->cb_index[3]&0x2); /* Bit 14 */
(*bitstreamPtr) |= (enc_bits->cb_index[6]&0x80)>>7; /* Bit 15 */
bitstreamPtr++;
- /* 9:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)enc_bits->cb_index[6]&0x7E)<<9;/* Bit 0..5 */
+ /* 9:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)enc_bits->cb_index[6]&0x7E)<<9;/* Bit 0..5 */
(*bitstreamPtr) |= (enc_bits->cb_index[9]&0xFE)<<2; /* Bit 6..12 */
(*bitstreamPtr) |= (enc_bits->cb_index[12]&0xE0)>>5; /* Bit 13..15 */
bitstreamPtr++;
- /* 10:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)enc_bits->cb_index[12]&0x1E)<<11;/* Bit 0..3 */
+ /* 10:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)enc_bits->cb_index[12]&0x1E)<<11;/* Bit 0..3 */
(*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<8; /* Bit 4..5 */
(*bitstreamPtr) |= (enc_bits->gain_index[4]&0x6)<<7; /* Bit 6..7 */
(*bitstreamPtr) |= (enc_bits->gain_index[6]&0x18)<<3; /* Bit 8..9 */
@@ -137,27 +137,27 @@
}
bitstreamPtr++;
/* Class 3 bits of ULP */
- /* 8:th to 14:th WebRtc_Word16 for 20 ms case
- 11:th to 17:th WebRtc_Word16 for 30 ms case */
+ /* 8:th to 14:th int16_t for 20 ms case
+ 11:th to 17:th int16_t for 30 ms case */
tmpPtr=enc_bits->idxVec;
for (k=0; k<7; k++) {
(*bitstreamPtr) = 0;
for (i=14; i>=0; i-=2) {
- (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x3))<<i; /* Bit 15-i..14-i*/
+ (*bitstreamPtr) |= ((uint16_t)((*tmpPtr)&0x3))<<i; /* Bit 15-i..14-i*/
tmpPtr++;
}
bitstreamPtr++;
}
if (mode==20) {
- /* 15:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
+ /* 15:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
(*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<13; /* Bit 2 */
(*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<6; /* Bit 3..9 */
(*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x7E)>>1; /* Bit 10..15 */
bitstreamPtr++;
- /* 16:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->cb_index[2])&0x1))<<15;
+ /* 16:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)((enc_bits->cb_index[2])&0x1))<<15;
/* Bit 0 */
(*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<12; /* Bit 1..3 */
(*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<10; /* Bit 4..5 */
@@ -165,18 +165,18 @@
(*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<6; /* Bit 9 */
(*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x7E)>>1; /* Bit 10..15 */
bitstreamPtr++;
- /* 17:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->cb_index[4])&0x1))<<15;
+ /* 17:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)((enc_bits->cb_index[4])&0x1))<<15;
/* Bit 0 */
(*bitstreamPtr) |= (enc_bits->cb_index[5])<<8; /* Bit 1..7 */
(*bitstreamPtr) |= (enc_bits->cb_index[6]); /* Bit 8..15 */
bitstreamPtr++;
- /* 18:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[7]))<<8; /* Bit 0..7 */
+ /* 18:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[7]))<<8; /* Bit 0..7 */
(*bitstreamPtr) |= (enc_bits->cb_index[8]); /* Bit 8..15 */
bitstreamPtr++;
- /* 19:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->gain_index[3])&0x3))<<14;
+ /* 19:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)((enc_bits->gain_index[3])&0x3))<<14;
/* Bit 0..1 */
(*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x3)<<12; /* Bit 2..3 */
(*bitstreamPtr) |= ((enc_bits->gain_index[5]))<<9; /* Bit 4..6 */
@@ -184,15 +184,15 @@
(*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<4; /* Bit 10..11 */
(*bitstreamPtr) |= (enc_bits->gain_index[8])<<1; /* Bit 12..14 */
} else { /* mode==30 */
- /* 18:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
+ /* 18:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
(*bitstreamPtr) |= (((enc_bits->idxVec[57])&0x3))<<12; /* Bit 2..3 */
(*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<11; /* Bit 4 */
(*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<4; /* Bit 5..11 */
(*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x78)>>3; /* Bit 12..15 */
bitstreamPtr++;
- /* 19:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[2])&0x7)<<13;
+ /* 19:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[2])&0x7)<<13;
/* Bit 0..2 */
(*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<10; /* Bit 3..5 */
(*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<8; /* Bit 6..7 */
@@ -200,36 +200,36 @@
(*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<4; /* Bit 11 */
(*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x78)>>3; /* Bit 12..15 */
bitstreamPtr++;
- /* 20:th WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[4])&0x7)<<13;
+ /* 20:th int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[4])&0x7)<<13;
/* Bit 0..2 */
(*bitstreamPtr) |= ((enc_bits->cb_index[5]))<<6; /* Bit 3..9 */
(*bitstreamPtr) |= ((enc_bits->cb_index[6])&0x1)<<5; /* Bit 10 */
(*bitstreamPtr) |= ((enc_bits->cb_index[7])&0xF8)>>3; /* Bit 11..15 */
bitstreamPtr++;
- /* 21:st WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[7])&0x7)<<13;
+ /* 21:st int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[7])&0x7)<<13;
/* Bit 0..2 */
(*bitstreamPtr) |= ((enc_bits->cb_index[8]))<<5; /* Bit 3..10 */
(*bitstreamPtr) |= ((enc_bits->cb_index[9])&0x1)<<4; /* Bit 11 */
(*bitstreamPtr) |= ((enc_bits->cb_index[10])&0xF0)>>4; /* Bit 12..15 */
bitstreamPtr++;
- /* 22:nd WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[10])&0xF)<<12;
+ /* 22:nd int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[10])&0xF)<<12;
/* Bit 0..3 */
(*bitstreamPtr) |= ((enc_bits->cb_index[11]))<<4; /* Bit 4..11 */
(*bitstreamPtr) |= ((enc_bits->cb_index[12])&0x1)<<3; /* Bit 12 */
(*bitstreamPtr) |= ((enc_bits->cb_index[13])&0xE0)>>5; /* Bit 13..15 */
bitstreamPtr++;
- /* 23:rd WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[13])&0x1F)<<11;
+ /* 23:rd int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->cb_index[13])&0x1F)<<11;
/* Bit 0..4 */
(*bitstreamPtr) |= ((enc_bits->cb_index[14]))<<3; /* Bit 5..12 */
(*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x3)<<1; /* Bit 13..14 */
(*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x1); /* Bit 15 */
bitstreamPtr++;
- /* 24:rd WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->gain_index[5]))<<13;
+ /* 24:rd int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->gain_index[5]))<<13;
/* Bit 0..2 */
(*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<10; /* Bit 3..5 */
(*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<8; /* Bit 6..7 */
@@ -237,8 +237,8 @@
(*bitstreamPtr) |= ((enc_bits->gain_index[9])&0xF)<<1; /* Bit 11..14 */
(*bitstreamPtr) |= ((enc_bits->gain_index[10])&0x4)>>2; /* Bit 15 */
bitstreamPtr++;
- /* 25:rd WebRtc_Word16 */
- (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->gain_index[10])&0x3)<<14;
+ /* 25:rd int16_t */
+ (*bitstreamPtr) = ((uint16_t)(enc_bits->gain_index[10])&0x3)<<14;
/* Bit 0..1 */
(*bitstreamPtr) |= ((enc_bits->gain_index[11]))<<11; /* Bit 2..4 */
(*bitstreamPtr) |= ((enc_bits->gain_index[12])&0xF)<<7; /* Bit 5..8 */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.h b/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.h
index ed3f224..603ddd4 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/pack_bits.h
@@ -26,9 +26,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_PackBits(
- WebRtc_UWord16 *bitstream, /* (o) The packetized bitstream */
+ uint16_t *bitstream, /* (o) The packetized bitstream */
iLBC_bits *enc_bits, /* (i) Encoded bits */
- WebRtc_Word16 mode /* (i) Codec mode (20 or 30) */
+ int16_t mode /* (i) Codec mode (20 or 30) */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
index fe91851..df8a78b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
@@ -22,10 +22,10 @@
#include "lsp_to_lsf.h"
void WebRtcIlbcfix_Poly2Lsf(
- WebRtc_Word16 *lsf, /* (o) lsf coefficients (Q13) */
- WebRtc_Word16 *a /* (i) A coefficients (Q12) */
+ int16_t *lsf, /* (o) lsf coefficients (Q13) */
+ int16_t *a /* (i) A coefficients (Q12) */
) {
- WebRtc_Word16 lsp[10];
- WebRtcIlbcfix_Poly2Lsp(a, lsp, (WebRtc_Word16*)WebRtcIlbcfix_kLspMean);
+ int16_t lsp[10];
+ WebRtcIlbcfix_Poly2Lsp(a, lsp, (int16_t*)WebRtcIlbcfix_kLspMean);
WebRtcIlbcfix_Lsp2Lsf(lsp, lsf, 10);
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
index 0ea595e..5a7f7bb 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
@@ -26,8 +26,8 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Poly2Lsf(
- WebRtc_Word16 *lsf, /* (o) lsf coefficients (Q13) */
- WebRtc_Word16 *a /* (i) A coefficients (Q12) */
+ int16_t *lsf, /* (o) lsf coefficients (Q13) */
+ int16_t *a /* (i) A coefficients (Q12) */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
index 29b4213..3add966 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
@@ -26,17 +26,17 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Poly2Lsp(
- WebRtc_Word16 *a, /* (o) A coefficients in Q12 */
- WebRtc_Word16 *lsp, /* (i) LSP coefficients in Q15 */
- WebRtc_Word16 *old_lsp /* (i) old LSP coefficients that are used if the new
+ int16_t *a, /* (o) A coefficients in Q12 */
+ int16_t *lsp, /* (i) LSP coefficients in Q15 */
+ int16_t *old_lsp /* (i) old LSP coefficients that are used if the new
coefficients turn out to be unstable */
) {
- WebRtc_Word16 f[2][6]; /* f[0][] represents f1 and f[1][] represents f2 */
- WebRtc_Word16 *a_i_ptr, *a_10mi_ptr;
- WebRtc_Word16 *f1ptr, *f2ptr;
- WebRtc_Word32 tmpW32;
- WebRtc_Word16 x, y, xlow, ylow, xmid, ymid, xhigh, yhigh, xint;
- WebRtc_Word16 shifts, sign;
+ int16_t f[2][6]; /* f[0][] represents f1 and f[1][] represents f2 */
+ int16_t *a_i_ptr, *a_10mi_ptr;
+ int16_t *f1ptr, *f2ptr;
+ int32_t tmpW32;
+ int16_t x, y, xlow, ylow, xmid, ymid, xhigh, yhigh, xint;
+ int16_t shifts, sign;
int i, j;
int foundFreqs;
int fi_select;
@@ -56,8 +56,8 @@
(*f1ptr) = 1024; /* 1.0 in Q10 */
(*f2ptr) = 1024; /* 1.0 in Q10 */
for (i = 0; i < 5; i++) {
- (*(f1ptr+1)) = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)(*a_i_ptr)+(*a_10mi_ptr)), 2) - (*f1ptr));
- (*(f2ptr+1)) = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)(*a_i_ptr)-(*a_10mi_ptr)), 2) + (*f2ptr));
+ (*(f1ptr+1)) = (int16_t)(WEBRTC_SPL_RSHIFT_W32(((int32_t)(*a_i_ptr)+(*a_10mi_ptr)), 2) - (*f1ptr));
+ (*(f2ptr+1)) = (int16_t)(WEBRTC_SPL_RSHIFT_W32(((int32_t)(*a_i_ptr)-(*a_10mi_ptr)), 2) + (*f2ptr));
a_i_ptr++;
a_10mi_ptr--;
f1ptr++;
@@ -116,25 +116,25 @@
} else {
sign = y;
y = WEBRTC_SPL_ABS_W16(y);
- shifts = (WebRtc_Word16)WebRtcSpl_NormW32(y)-16;
+ shifts = (int16_t)WebRtcSpl_NormW32(y)-16;
y = WEBRTC_SPL_LSHIFT_W16(y, shifts);
- y = (WebRtc_Word16)WebRtcSpl_DivW32W16(536838144, y); /* 1/(yhigh-ylow) */
+ y = (int16_t)WebRtcSpl_DivW32W16(536838144, y); /* 1/(yhigh-ylow) */
tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(x, y, (19-shifts));
/* y=(xhigh-xlow)/(yhigh-ylow) */
- y = (WebRtc_Word16)(tmpW32&0xFFFF);
+ y = (int16_t)(tmpW32&0xFFFF);
if (sign < 0) {
y = -y;
}
/* tmpW32 = ylow*(xhigh-xlow)/(yhigh-ylow) */
tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(ylow, y, 10);
- xint = xlow-(WebRtc_Word16)(tmpW32&0xFFFF);
+ xint = xlow-(int16_t)(tmpW32&0xFFFF);
}
/* Store the calculated lsp */
- lsp[foundFreqs] = (WebRtc_Word16)xint;
+ lsp[foundFreqs] = (int16_t)xint;
foundFreqs++;
/* if needed, set xlow and ylow for next recursion */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
index 7eebb25..ed20fd9 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
@@ -27,9 +27,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Poly2Lsp(
- WebRtc_Word16 *a, /* (o) A coefficients in Q12 */
- WebRtc_Word16 *lsp, /* (i) LSP coefficients in Q15 */
- WebRtc_Word16 *old_lsp /* (i) old LSP coefficients that are used if the new
+ int16_t *a, /* (o) A coefficients in Q12 */
+ int16_t *lsp, /* (i) LSP coefficients in Q15 */
+ int16_t *old_lsp /* (i) old LSP coefficients that are used if the new
coefficients turn out to be unstable */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/refiner.c b/webrtc/modules/audio_coding/codecs/ilbc/refiner.c
index 9210092..50d30c9 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/refiner.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/refiner.c
@@ -30,26 +30,26 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Refiner(
- WebRtc_Word16 *updStartPos, /* (o) updated start point (Q-2) */
- WebRtc_Word16 *idata, /* (i) original data buffer */
- WebRtc_Word16 idatal, /* (i) dimension of idata */
- WebRtc_Word16 centerStartPos, /* (i) beginning center segment */
- WebRtc_Word16 estSegPos, /* (i) estimated beginning other segment (Q-2) */
- WebRtc_Word16 *surround, /* (i/o) The contribution from this sequence
+ int16_t *updStartPos, /* (o) updated start point (Q-2) */
+ int16_t *idata, /* (i) original data buffer */
+ int16_t idatal, /* (i) dimension of idata */
+ int16_t centerStartPos, /* (i) beginning center segment */
+ int16_t estSegPos, /* (i) estimated beginning other segment (Q-2) */
+ int16_t *surround, /* (i/o) The contribution from this sequence
summed with earlier contributions */
- WebRtc_Word16 gain /* (i) Gain to use for this sequence */
+ int16_t gain /* (i) Gain to use for this sequence */
){
- WebRtc_Word16 estSegPosRounded,searchSegStartPos,searchSegEndPos,corrdim;
- WebRtc_Word16 tloc,tloc2,i,st,en,fraction;
+ int16_t estSegPosRounded,searchSegStartPos,searchSegEndPos,corrdim;
+ int16_t tloc,tloc2,i,st,en,fraction;
- WebRtc_Word32 maxtemp, scalefact;
- WebRtc_Word16 *filtStatePtr, *polyPtr;
+ int32_t maxtemp, scalefact;
+ int16_t *filtStatePtr, *polyPtr;
/* Stack based */
- WebRtc_Word16 filt[7];
- WebRtc_Word32 corrVecUps[ENH_CORRDIM*ENH_UPS0];
- WebRtc_Word32 corrVecTemp[ENH_CORRDIM];
- WebRtc_Word16 vect[ENH_VECTL];
- WebRtc_Word16 corrVec[ENH_CORRDIM];
+ int16_t filt[7];
+ int32_t corrVecUps[ENH_CORRDIM*ENH_UPS0];
+ int32_t corrVecTemp[ENH_CORRDIM];
+ int16_t vect[ENH_VECTL];
+ int16_t corrVec[ENH_CORRDIM];
/* defining array bounds */
@@ -71,21 +71,21 @@
location of max */
WebRtcIlbcfix_MyCorr(corrVecTemp,idata+searchSegStartPos,
- (WebRtc_Word16)(corrdim+ENH_BLOCKL-1),idata+centerStartPos,ENH_BLOCKL);
+ (int16_t)(corrdim+ENH_BLOCKL-1),idata+centerStartPos,ENH_BLOCKL);
/* Calculate the rescaling factor for the correlation in order to
- put the correlation in a WebRtc_Word16 vector instead */
- maxtemp=WebRtcSpl_MaxAbsValueW32(corrVecTemp, (WebRtc_Word16)corrdim);
+ put the correlation in a int16_t vector instead */
+ maxtemp=WebRtcSpl_MaxAbsValueW32(corrVecTemp, (int16_t)corrdim);
scalefact=WebRtcSpl_GetSizeInBits(maxtemp)-15;
if (scalefact>0) {
for (i=0;i<corrdim;i++) {
- corrVec[i]=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(corrVecTemp[i], scalefact);
+ corrVec[i]=(int16_t)WEBRTC_SPL_RSHIFT_W32(corrVecTemp[i], scalefact);
}
} else {
for (i=0;i<corrdim;i++) {
- corrVec[i]=(WebRtc_Word16)corrVecTemp[i];
+ corrVec[i]=(int16_t)corrVecTemp[i];
}
}
/* In order to guarantee that all values are initialized */
@@ -97,11 +97,11 @@
WebRtcIlbcfix_EnhUpsample(corrVecUps,corrVec);
/* Find maximum */
- tloc=WebRtcSpl_MaxIndexW32(corrVecUps, (WebRtc_Word16) (ENH_UPS0*corrdim));
+ tloc=WebRtcSpl_MaxIndexW32(corrVecUps, (int16_t) (ENH_UPS0*corrdim));
/* make vector can be upsampled without ever running outside
bounds */
- *updStartPos = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(searchSegStartPos,4) + tloc + 4;
+ *updStartPos = (int16_t)WEBRTC_SPL_MUL_16_16(searchSegStartPos,4) + tloc + 4;
tloc2 = WEBRTC_SPL_RSHIFT_W16((tloc+3), 2);
@@ -110,7 +110,7 @@
/* initialize the vector to be filtered, stuff with zeros
when data is outside idata buffer */
if(st<0){
- WebRtcSpl_MemSetW16(vect, 0, (WebRtc_Word16)(-st));
+ WebRtcSpl_MemSetW16(vect, 0, (int16_t)(-st));
WEBRTC_SPL_MEMCPY_W16(&vect[-st], idata, (ENH_VECTL+st));
}
else{
@@ -120,19 +120,19 @@
WEBRTC_SPL_MEMCPY_W16(vect, &idata[st],
(ENH_VECTL-(en-idatal)));
WebRtcSpl_MemSetW16(&vect[ENH_VECTL-(en-idatal)], 0,
- (WebRtc_Word16)(en-idatal));
+ (int16_t)(en-idatal));
}
else {
WEBRTC_SPL_MEMCPY_W16(vect, &idata[st], ENH_VECTL);
}
}
/* Calculate which of the 4 fractions to use */
- fraction=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(tloc2,ENH_UPS0)-tloc;
+ fraction=(int16_t)WEBRTC_SPL_MUL_16_16(tloc2,ENH_UPS0)-tloc;
/* compute the segment (this is actually a convolution) */
filtStatePtr = filt + 6;
- polyPtr = (WebRtc_Word16*)WebRtcIlbcfix_kEnhPolyPhaser[fraction];
+ polyPtr = (int16_t*)WebRtcIlbcfix_kEnhPolyPhaser[fraction];
for (i=0;i<7;i++) {
*filtStatePtr-- = *polyPtr++;
}
@@ -144,7 +144,7 @@
/* Add the contribution from this vector (scaled with gain) to the total surround vector */
WebRtcSpl_AddAffineVectorToVector(
surround, vect, gain,
- (WebRtc_Word32)32768, 16, ENH_BLOCKL);
+ (int32_t)32768, 16, ENH_BLOCKL);
return;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/refiner.h b/webrtc/modules/audio_coding/codecs/ilbc/refiner.h
index 559555c..d139961 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/refiner.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/refiner.h
@@ -30,14 +30,14 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Refiner(
- WebRtc_Word16 *updStartPos, /* (o) updated start point (Q-2) */
- WebRtc_Word16 *idata, /* (i) original data buffer */
- WebRtc_Word16 idatal, /* (i) dimension of idata */
- WebRtc_Word16 centerStartPos, /* (i) beginning center segment */
- WebRtc_Word16 estSegPos, /* (i) estimated beginning other segment (Q-2) */
- WebRtc_Word16 *surround, /* (i/o) The contribution from this sequence
+ int16_t *updStartPos, /* (o) updated start point (Q-2) */
+ int16_t *idata, /* (i) original data buffer */
+ int16_t idatal, /* (i) dimension of idata */
+ int16_t centerStartPos, /* (i) beginning center segment */
+ int16_t estSegPos, /* (i) estimated beginning other segment (Q-2) */
+ int16_t *surround, /* (i/o) The contribution from this sequence
summed with earlier contributions */
- WebRtc_Word16 gain /* (i) Gain to use for this sequence */
+ int16_t gain /* (i) Gain to use for this sequence */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c b/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
index ee5e643..9055493 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
@@ -26,27 +26,27 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleInterpolateLsf(
- WebRtc_Word16 *syntdenum, /* (o) the synthesis filter denominator
+ int16_t *syntdenum, /* (o) the synthesis filter denominator
resulting from the quantized
interpolated lsf Q12 */
- WebRtc_Word16 *weightdenum, /* (o) the weighting filter denominator
+ int16_t *weightdenum, /* (o) the weighting filter denominator
resulting from the unquantized
interpolated lsf Q12 */
- WebRtc_Word16 *lsf, /* (i) the unquantized lsf coefficients Q13 */
- WebRtc_Word16 *lsfdeq, /* (i) the dequantized lsf coefficients Q13 */
- WebRtc_Word16 *lsfold, /* (i) the unquantized lsf coefficients of
+ int16_t *lsf, /* (i) the unquantized lsf coefficients Q13 */
+ int16_t *lsfdeq, /* (i) the dequantized lsf coefficients Q13 */
+ int16_t *lsfold, /* (i) the unquantized lsf coefficients of
the previous signal frame Q13 */
- WebRtc_Word16 *lsfdeqold, /* (i) the dequantized lsf coefficients of the
+ int16_t *lsfdeqold, /* (i) the dequantized lsf coefficients of the
previous signal frame Q13 */
- WebRtc_Word16 length, /* (i) should equate FILTERORDER */
+ int16_t length, /* (i) should equate FILTERORDER */
iLBC_Enc_Inst_t *iLBCenc_inst
/* (i/o) the encoder state structure */
) {
int i, pos, lp_length;
- WebRtc_Word16 *lsf2, *lsfdeq2;
+ int16_t *lsf2, *lsfdeq2;
/* Stack based */
- WebRtc_Word16 lp[LPC_FILTERORDER + 1];
+ int16_t lp[LPC_FILTERORDER + 1];
lsf2 = lsf + length;
lsfdeq2 = lsfdeq + length;
@@ -67,8 +67,8 @@
WebRtcIlbcfix_kLsfWeight30ms[0],
length);
WebRtcIlbcfix_BwExpand(weightdenum, lp,
- (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
- (WebRtc_Word16)lp_length);
+ (int16_t*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+ (int16_t)lp_length);
/* subframe 2 to 6: Interpolation between first and second
set of lsf coefficients */
@@ -87,8 +87,8 @@
WebRtcIlbcfix_kLsfWeight30ms[i],
length);
WebRtcIlbcfix_BwExpand(weightdenum + pos, lp,
- (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
- (WebRtc_Word16)lp_length);
+ (int16_t*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+ (int16_t)lp_length);
pos += lp_length;
}
@@ -113,8 +113,8 @@
WebRtcIlbcfix_kLsfWeight20ms[i],
length);
WebRtcIlbcfix_BwExpand(weightdenum+pos, lp,
- (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
- (WebRtc_Word16)lp_length);
+ (int16_t*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+ (int16_t)lp_length);
pos += lp_length;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h b/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
index 8cdd7da..b11f5d8 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
@@ -26,19 +26,19 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleInterpolateLsf(
- WebRtc_Word16 *syntdenum, /* (o) the synthesis filter denominator
+ int16_t *syntdenum, /* (o) the synthesis filter denominator
resulting from the quantized
interpolated lsf Q12 */
- WebRtc_Word16 *weightdenum, /* (o) the weighting filter denominator
+ int16_t *weightdenum, /* (o) the weighting filter denominator
resulting from the unquantized
interpolated lsf Q12 */
- WebRtc_Word16 *lsf, /* (i) the unquantized lsf coefficients Q13 */
- WebRtc_Word16 *lsfdeq, /* (i) the dequantized lsf coefficients Q13 */
- WebRtc_Word16 *lsfold, /* (i) the unquantized lsf coefficients of
+ int16_t *lsf, /* (i) the unquantized lsf coefficients Q13 */
+ int16_t *lsfdeq, /* (i) the dequantized lsf coefficients Q13 */
+ int16_t *lsfold, /* (i) the unquantized lsf coefficients of
the previous signal frame Q13 */
- WebRtc_Word16 *lsfdeqold, /* (i) the dequantized lsf coefficients of the
+ int16_t *lsfdeqold, /* (i) the dequantized lsf coefficients of the
previous signal frame Q13 */
- WebRtc_Word16 length, /* (i) should equate FILTERORDER */
+ int16_t length, /* (i) should equate FILTERORDER */
iLBC_Enc_Inst_t *iLBCenc_inst
/* (i/o) the encoder state structure */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c b/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
index 2d19edd..f8125e1 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
@@ -27,20 +27,20 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleLpcAnalysis(
- WebRtc_Word16 *lsf, /* (o) lsf coefficients */
- WebRtc_Word16 *data, /* (i) new block of speech */
+ int16_t *lsf, /* (o) lsf coefficients */
+ int16_t *data, /* (i) new block of speech */
iLBC_Enc_Inst_t *iLBCenc_inst
/* (i/o) the encoder state structure */
) {
int k;
int scale;
- WebRtc_Word16 is;
- WebRtc_Word16 stability;
+ int16_t is;
+ int16_t stability;
/* Stack based */
- WebRtc_Word16 A[LPC_FILTERORDER + 1];
- WebRtc_Word32 R[LPC_FILTERORDER + 1];
- WebRtc_Word16 windowedData[BLOCKL_MAX];
- WebRtc_Word16 rc[LPC_FILTERORDER];
+ int16_t A[LPC_FILTERORDER + 1];
+ int32_t R[LPC_FILTERORDER + 1];
+ int16_t windowedData[BLOCKL_MAX];
+ int16_t rc[LPC_FILTERORDER];
is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer+is,data,iLBCenc_inst->blockl);
@@ -80,7 +80,7 @@
}
/* Bandwidth expand the filter coefficients */
- WebRtcIlbcfix_BwExpand(A, A, (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, LPC_FILTERORDER+1);
+ WebRtcIlbcfix_BwExpand(A, A, (int16_t*)WebRtcIlbcfix_kLpcChirpSyntDenum, LPC_FILTERORDER+1);
/* Convert from A to LSF representation */
WebRtcIlbcfix_Poly2Lsf(lsf + k*LPC_FILTERORDER, A);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h b/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
index 83c1e5b..a3d1985 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
@@ -26,8 +26,8 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleLpcAnalysis(
- WebRtc_Word16 *lsf, /* (o) lsf coefficients */
- WebRtc_Word16 *data, /* (i) new block of speech */
+ int16_t *lsf, /* (o) lsf coefficients */
+ int16_t *data, /* (i) new block of speech */
iLBC_Enc_Inst_t *iLBCenc_inst
/* (i/o) the encoder state structure */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
index 7b5efa0..ef54883 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
@@ -24,9 +24,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleLsfDeQ(
- WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients */
- WebRtc_Word16 *index, /* (i) quantization index */
- WebRtc_Word16 lpc_n /* (i) number of LPCs */
+ int16_t *lsfdeq, /* (o) dequantized lsf coefficients */
+ int16_t *index, /* (i) quantization index */
+ int16_t lpc_n /* (i) number of LPCs */
){
int i, j, pos, cb_pos;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
index efd3103..353edb2 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
@@ -26,9 +26,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleLsfDeQ(
- WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients */
- WebRtc_Word16 *index, /* (i) quantization index */
- WebRtc_Word16 lpc_n /* (i) number of LPCs */
+ int16_t *lsfdeq, /* (o) dequantized lsf coefficients */
+ int16_t *index, /* (i) quantization index */
+ int16_t lpc_n /* (i) number of LPCs */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
index aa27fb4..8daba50 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
@@ -25,23 +25,23 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleLsfQ(
- WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients
+ int16_t *lsfdeq, /* (o) dequantized lsf coefficients
(dimension FILTERORDER) Q13 */
- WebRtc_Word16 *index, /* (o) quantization index */
- WebRtc_Word16 *lsf, /* (i) the lsf coefficient vector to be
+ int16_t *index, /* (o) quantization index */
+ int16_t *lsf, /* (i) the lsf coefficient vector to be
quantized (dimension FILTERORDER) Q13 */
- WebRtc_Word16 lpc_n /* (i) number of lsf sets to quantize */
+ int16_t lpc_n /* (i) number of lsf sets to quantize */
){
/* Quantize first LSF with memoryless split VQ */
WebRtcIlbcfix_SplitVq( lsfdeq, index, lsf,
- (WebRtc_Word16*)WebRtcIlbcfix_kLsfCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfDimCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfSizeCb);
+ (int16_t*)WebRtcIlbcfix_kLsfCb, (int16_t*)WebRtcIlbcfix_kLsfDimCb, (int16_t*)WebRtcIlbcfix_kLsfSizeCb);
if (lpc_n==2) {
/* Quantize second LSF with memoryless split VQ */
WebRtcIlbcfix_SplitVq( lsfdeq + LPC_FILTERORDER, index + LSF_NSPLIT,
- lsf + LPC_FILTERORDER, (WebRtc_Word16*)WebRtcIlbcfix_kLsfCb,
- (WebRtc_Word16*)WebRtcIlbcfix_kLsfDimCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfSizeCb);
+ lsf + LPC_FILTERORDER, (int16_t*)WebRtcIlbcfix_kLsfCb,
+ (int16_t*)WebRtcIlbcfix_kLsfDimCb, (int16_t*)WebRtcIlbcfix_kLsfSizeCb);
}
return;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
index fd17b2e..94f804b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
@@ -26,12 +26,12 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SimpleLsfQ(
- WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients
+ int16_t *lsfdeq, /* (o) dequantized lsf coefficients
(dimension FILTERORDER) Q13 */
- WebRtc_Word16 *index, /* (o) quantization index */
- WebRtc_Word16 *lsf, /* (i) the lsf coefficient vector to be
+ int16_t *index, /* (o) quantization index */
+ int16_t *lsf, /* (i) the lsf coefficient vector to be
quantized (dimension FILTERORDER) Q13 */
- WebRtc_Word16 lpc_n /* (i) number of lsf sets to quantize */
+ int16_t lpc_n /* (i) number of lsf sets to quantize */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/smooth.c b/webrtc/modules/audio_coding/codecs/ilbc/smooth.c
index b606077..c975098 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/smooth.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/smooth.c
@@ -25,22 +25,22 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Smooth(
- WebRtc_Word16 *odata, /* (o) smoothed output */
- WebRtc_Word16 *current, /* (i) the un enhanced residual for
+ int16_t *odata, /* (o) smoothed output */
+ int16_t *current, /* (i) the un enhanced residual for
this block */
- WebRtc_Word16 *surround /* (i) The approximation from the
+ int16_t *surround /* (i) The approximation from the
surrounding sequences */
) {
- WebRtc_Word16 maxtot, scale, scale1, scale2;
- WebRtc_Word16 A, B, C, denomW16;
- WebRtc_Word32 B_W32, denom, num;
- WebRtc_Word32 errs;
- WebRtc_Word32 w00,w10,w11, endiff, crit;
- WebRtc_Word32 w00prim, w10prim, w11_div_w00;
- WebRtc_Word16 w11prim;
- WebRtc_Word16 bitsw00, bitsw10, bitsw11;
- WebRtc_Word32 w11w00, w10w10, w00w00;
- WebRtc_Word16 max1, max2;
+ int16_t maxtot, scale, scale1, scale2;
+ int16_t A, B, C, denomW16;
+ int32_t B_W32, denom, num;
+ int32_t errs;
+ int32_t w00,w10,w11, endiff, crit;
+ int32_t w00prim, w10prim, w11_div_w00;
+ int16_t w11prim;
+ int16_t bitsw00, bitsw10, bitsw11;
+ int32_t w11w00, w10w10, w00w00;
+ int16_t max1, max2;
/* compute some inner products (ensure no overflow by first calculating proper scale factor) */
@@ -51,7 +51,7 @@
maxtot=WEBRTC_SPL_MAX(max1, max2);
scale=WebRtcSpl_GetSizeInBits(maxtot);
- scale = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2,scale)-26;
+ scale = (int16_t)WEBRTC_SPL_MUL_16_16(2,scale)-26;
scale=WEBRTC_SPL_MAX(0, scale);
w00=WebRtcSpl_DotProductWithScale(current,current,ENH_BLOCKL,scale);
@@ -77,13 +77,13 @@
}
w00prim = WEBRTC_SPL_LSHIFT_W32(w00, scale1);
- w11prim = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w11, scale2);
+ w11prim = (int16_t) WEBRTC_SPL_SHIFT_W32(w11, scale2);
/* Perform C = sqrt(w11/w00) (C is in Q11 since (16+6)/2=11) */
if (w11prim>64) {
endiff = WEBRTC_SPL_LSHIFT_W32(
- (WebRtc_Word32)WebRtcSpl_DivW32W16(w00prim, w11prim), 6);
- C = (WebRtc_Word16)WebRtcSpl_SqrtFloor(endiff); /* C is in Q11 */
+ (int32_t)WebRtcSpl_DivW32W16(w00prim, w11prim), 6);
+ C = (int16_t)WebRtcSpl_SqrtFloor(endiff); /* C is in Q11 */
} else {
C = 1;
}
@@ -123,23 +123,23 @@
}
w11w00 = WEBRTC_SPL_MUL_16_16(
- (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w11, -scale),
- (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale));
+ (int16_t)WEBRTC_SPL_SHIFT_W32(w11, -scale),
+ (int16_t)WEBRTC_SPL_SHIFT_W32(w00, -scale));
w10w10 = WEBRTC_SPL_MUL_16_16(
- (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w10, -scale),
- (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w10, -scale));
+ (int16_t)WEBRTC_SPL_SHIFT_W32(w10, -scale),
+ (int16_t)WEBRTC_SPL_SHIFT_W32(w10, -scale));
w00w00 = WEBRTC_SPL_MUL_16_16(
- (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale),
- (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale));
+ (int16_t)WEBRTC_SPL_SHIFT_W32(w00, -scale),
+ (int16_t)WEBRTC_SPL_SHIFT_W32(w00, -scale));
/* Calculate (w11*w00-w10*w10)/(w00*w00) in Q16 */
if (w00w00>65536) {
endiff = (w11w00-w10w10);
endiff = WEBRTC_SPL_MAX(0, endiff);
/* denom is in Q16 */
- denom = WebRtcSpl_DivW32W16(endiff, (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(w00w00, 16));
+ denom = WebRtcSpl_DivW32W16(endiff, (int16_t)WEBRTC_SPL_RSHIFT_W32(w00w00, 16));
} else {
denom = 65536;
}
@@ -151,20 +151,20 @@
if (scale>0) {
/* denomW16 is in Q(16+scale) */
- denomW16=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(denom, scale);
+ denomW16=(int16_t)WEBRTC_SPL_RSHIFT_W32(denom, scale);
/* num in Q(34-scale) */
num=WEBRTC_SPL_RSHIFT_W32(ENH_A0_MINUS_A0A0DIV4, scale);
} else {
/* denomW16 is in Q16 */
- denomW16=(WebRtc_Word16)denom;
+ denomW16=(int16_t)denom;
/* num in Q34 */
num=ENH_A0_MINUS_A0A0DIV4;
}
/* A sqrt( (ENH_A0-(ENH_A0^2)/4)*(w00*w00)/(w11*w00 + w10*w10) ) in Q9 */
- A = (WebRtc_Word16)WebRtcSpl_SqrtFloor(WebRtcSpl_DivW32W16(num, denomW16));
+ A = (int16_t)WebRtcSpl_SqrtFloor(WebRtcSpl_DivW32W16(num, denomW16));
/* B_W32 is in Q30 ( B = 1 - ENH_A0/2 - A * w10/w00 ) */
scale1 = 31-bitsw10;
@@ -179,15 +179,15 @@
}
if ((w00prim>0)&&(w10prim>0)) {
- w11_div_w00=WebRtcSpl_DivW32W16(w10prim, (WebRtc_Word16)w00prim);
+ w11_div_w00=WebRtcSpl_DivW32W16(w10prim, (int16_t)w00prim);
if (WebRtcSpl_GetSizeInBits(w11_div_w00)+WebRtcSpl_GetSizeInBits(A)>31) {
B_W32 = 0;
} else {
- B_W32 = (WebRtc_Word32)1073741824 - (WebRtc_Word32)ENH_A0DIV2 -
+ B_W32 = (int32_t)1073741824 - (int32_t)ENH_A0DIV2 -
WEBRTC_SPL_MUL(A, w11_div_w00);
}
- B = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(B_W32, 16); /* B in Q14 */
+ B = (int16_t)WEBRTC_SPL_RSHIFT_W32(B_W32, 16); /* B in Q14 */
} else {
/* No smoothing */
A = 0;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/smooth.h b/webrtc/modules/audio_coding/codecs/ilbc/smooth.h
index 88ce805..add0c7b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/smooth.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/smooth.h
@@ -26,10 +26,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Smooth(
- WebRtc_Word16 *odata, /* (o) smoothed output */
- WebRtc_Word16 *current, /* (i) the un enhanced residual for
+ int16_t *odata, /* (o) smoothed output */
+ int16_t *current, /* (i) the un enhanced residual for
this block */
- WebRtc_Word16 *surround /* (i) The approximation from the
+ int16_t *surround /* (i) The approximation from the
surrounding sequences */
);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c b/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c
index 9bacd85..df3a3b7 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.c
@@ -19,25 +19,25 @@
#include "defines.h"
#include "constants.h"
-WebRtc_Word32 WebRtcIlbcfix_Smooth_odata(
- WebRtc_Word16 *odata,
- WebRtc_Word16 *psseq,
- WebRtc_Word16 *surround,
- WebRtc_Word16 C)
+int32_t WebRtcIlbcfix_Smooth_odata(
+ int16_t *odata,
+ int16_t *psseq,
+ int16_t *surround,
+ int16_t C)
{
int i;
- WebRtc_Word16 err;
- WebRtc_Word32 errs;
+ int16_t err;
+ int32_t errs;
for(i=0;i<80;i++) {
- odata[i]= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+ odata[i]= (int16_t)WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(C, surround[i])+1024), 11);
}
errs=0;
for(i=0;i<80;i++) {
- err=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16((psseq[i]-odata[i]), 3);
+ err=(int16_t)WEBRTC_SPL_RSHIFT_W16((psseq[i]-odata[i]), 3);
errs+=WEBRTC_SPL_MUL_16_16(err, err); /* errs in Q-6 */
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h b/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h
index 6fbe694..8324439 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/smooth_out_data.h
@@ -25,11 +25,11 @@
* help function to WebRtcIlbcfix_Smooth()
*---------------------------------------------------------------*/
-WebRtc_Word32 WebRtcIlbcfix_Smooth_odata(
- WebRtc_Word16 *odata,
- WebRtc_Word16 *psseq,
- WebRtc_Word16 *surround,
- WebRtc_Word16 C);
+int32_t WebRtcIlbcfix_Smooth_odata(
+ int16_t *odata,
+ int16_t *psseq,
+ int16_t *surround,
+ int16_t C);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c b/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c
index 9276a7b..dcfd8bd 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.c
@@ -23,11 +23,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SortSq(
- WebRtc_Word16 *xq, /* (o) the quantized value */
- WebRtc_Word16 *index, /* (o) the quantization index */
- WebRtc_Word16 x, /* (i) the value to quantize */
- const WebRtc_Word16 *cb, /* (i) the quantization codebook */
- WebRtc_Word16 cb_size /* (i) the size of the quantization codebook */
+ int16_t *xq, /* (o) the quantized value */
+ int16_t *index, /* (o) the quantization index */
+ int16_t x, /* (i) the value to quantize */
+ const int16_t *cb, /* (i) the quantization codebook */
+ int16_t cb_size /* (i) the size of the quantization codebook */
){
int i;
@@ -40,7 +40,7 @@
i++;
}
- if (x > WEBRTC_SPL_RSHIFT_W32(( (WebRtc_Word32)cb[i] + cb[i - 1] + 1),1)) {
+ if (x > WEBRTC_SPL_RSHIFT_W32(( (int32_t)cb[i] + cb[i - 1] + 1),1)) {
*index = i;
*xq = cb[i];
} else {
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h b/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h
index 2863dc5..eaf175b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/sort_sq.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SortSq(
- WebRtc_Word16 *xq, /* (o) the quantized value */
- WebRtc_Word16 *index, /* (o) the quantization index */
- WebRtc_Word16 x, /* (i) the value to quantize */
- const WebRtc_Word16 *cb, /* (i) the quantization codebook */
- WebRtc_Word16 cb_size /* (i) the size of the quantization codebook */
+ int16_t *xq, /* (o) the quantized value */
+ int16_t *index, /* (o) the quantization index */
+ int16_t x, /* (i) the value to quantize */
+ const int16_t *cb, /* (i) the quantization codebook */
+ int16_t cb_size /* (i) the size of the quantization codebook */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c b/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c
index d908fa2..39b6e1b 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/split_vq.c
@@ -26,16 +26,16 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SplitVq(
- WebRtc_Word16 *qX, /* (o) the quantized vector in Q13 */
- WebRtc_Word16 *index, /* (o) a vector of indexes for all vector
+ int16_t *qX, /* (o) the quantized vector in Q13 */
+ int16_t *index, /* (o) a vector of indexes for all vector
codebooks in the split */
- WebRtc_Word16 *X, /* (i) the vector to quantize */
- WebRtc_Word16 *CB, /* (i) the quantizer codebook in Q13 */
- WebRtc_Word16 *dim, /* (i) the dimension of X and qX */
- WebRtc_Word16 *cbsize /* (i) the number of vectors in the codebook */
+ int16_t *X, /* (i) the vector to quantize */
+ int16_t *CB, /* (i) the quantizer codebook in Q13 */
+ int16_t *dim, /* (i) the dimension of X and qX */
+ int16_t *cbsize /* (i) the number of vectors in the codebook */
) {
- WebRtc_Word16 *qXPtr, *indexPtr, *CBPtr, *XPtr;
+ int16_t *qXPtr, *indexPtr, *CBPtr, *XPtr;
/* Quantize X with the 3 vectror quantization tables */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h b/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h
index 7264a21..2ca98cb 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/split_vq.h
@@ -26,13 +26,13 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SplitVq(
- WebRtc_Word16 *qX, /* (o) the quantized vector in Q13 */
- WebRtc_Word16 *index, /* (o) a vector of indexes for all vector
+ int16_t *qX, /* (o) the quantized vector in Q13 */
+ int16_t *index, /* (o) a vector of indexes for all vector
codebooks in the split */
- WebRtc_Word16 *X, /* (i) the vector to quantize */
- WebRtc_Word16 *CB, /* (i) the quantizer codebook in Q13 */
- WebRtc_Word16 *dim, /* (i) the dimension of X and qX */
- WebRtc_Word16 *cbsize /* (i) the number of vectors in the codebook */
+ int16_t *X, /* (i) the vector to quantize */
+ int16_t *CB, /* (i) the quantizer codebook in Q13 */
+ int16_t *dim, /* (i) the dimension of X and qX */
+ int16_t *cbsize /* (i) the number of vectors in the codebook */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c b/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c
index 9d03cc3..492ad59 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/state_construct.c
@@ -24,23 +24,23 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_StateConstruct(
- WebRtc_Word16 idxForMax, /* (i) 6-bit index for the quantization of
+ int16_t idxForMax, /* (i) 6-bit index for the quantization of
max amplitude */
- WebRtc_Word16 *idxVec, /* (i) vector of quantization indexes */
- WebRtc_Word16 *syntDenum, /* (i) synthesis filter denumerator */
- WebRtc_Word16 *Out_fix, /* (o) the decoded state vector */
- WebRtc_Word16 len /* (i) length of a state vector */
+ int16_t *idxVec, /* (i) vector of quantization indexes */
+ int16_t *syntDenum, /* (i) synthesis filter denumerator */
+ int16_t *Out_fix, /* (o) the decoded state vector */
+ int16_t len /* (i) length of a state vector */
) {
int k;
- WebRtc_Word16 maxVal;
- WebRtc_Word16 *tmp1, *tmp2, *tmp3;
+ int16_t maxVal;
+ int16_t *tmp1, *tmp2, *tmp3;
/* Stack based */
- WebRtc_Word16 numerator[1+LPC_FILTERORDER];
- WebRtc_Word16 sampleValVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
- WebRtc_Word16 sampleMaVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
- WebRtc_Word16 *sampleVal = &sampleValVec[LPC_FILTERORDER];
- WebRtc_Word16 *sampleMa = &sampleMaVec[LPC_FILTERORDER];
- WebRtc_Word16 *sampleAr = &sampleValVec[LPC_FILTERORDER];
+ int16_t numerator[1+LPC_FILTERORDER];
+ int16_t sampleValVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+ int16_t sampleMaVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+ int16_t *sampleVal = &sampleValVec[LPC_FILTERORDER];
+ int16_t *sampleMa = &sampleMaVec[LPC_FILTERORDER];
+ int16_t *sampleAr = &sampleValVec[LPC_FILTERORDER];
/* initialization of coefficients */
@@ -60,7 +60,7 @@
for(k=0; k<len; k++){
/*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 2097152 (= 0.5 << 22)
maxVal is in Q8 and result is in Q(-1) */
- (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)2097152) >> 22);
+ (*tmp1) = (int16_t) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(int32_t)2097152) >> 22);
tmp1++;
tmp2--;
}
@@ -68,7 +68,7 @@
for(k=0; k<len; k++){
/*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 262144 (= 0.5 << 19)
maxVal is in Q5 and result is in Q(-1) */
- (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)262144) >> 19);
+ (*tmp1) = (int16_t) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(int32_t)262144) >> 19);
tmp1++;
tmp2--;
}
@@ -76,7 +76,7 @@
for(k=0; k<len; k++){
/*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 65536 (= 0.5 << 17)
maxVal is in Q3 and result is in Q(-1) */
- (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)65536) >> 17);
+ (*tmp1) = (int16_t) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(int32_t)65536) >> 17);
tmp1++;
tmp2--;
}
@@ -93,11 +93,11 @@
/* Run MA filter + AR filter */
WebRtcSpl_FilterMAFastQ12(
sampleVal, sampleMa,
- numerator, LPC_FILTERORDER+1, (WebRtc_Word16)(len + LPC_FILTERORDER));
+ numerator, LPC_FILTERORDER+1, (int16_t)(len + LPC_FILTERORDER));
WebRtcSpl_MemSetW16(&sampleMa[len + LPC_FILTERORDER], 0, (len - LPC_FILTERORDER));
WebRtcSpl_FilterARFastQ12(
sampleMa, sampleAr,
- syntDenum, LPC_FILTERORDER+1, (WebRtc_Word16)(2*len));
+ syntDenum, LPC_FILTERORDER+1, (int16_t)(2*len));
tmp1 = &sampleAr[len-1];
tmp2 = &sampleAr[2*len-1];
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h b/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h
index 465699b..22d75e2 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/state_construct.h
@@ -24,12 +24,12 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_StateConstruct(
- WebRtc_Word16 idxForMax, /* (i) 6-bit index for the quantization of
+ int16_t idxForMax, /* (i) 6-bit index for the quantization of
max amplitude */
- WebRtc_Word16 *idxVec, /* (i) vector of quantization indexes */
- WebRtc_Word16 *syntDenum, /* (i) synthesis filter denumerator */
- WebRtc_Word16 *Out_fix, /* (o) the decoded state vector */
- WebRtc_Word16 len /* (i) length of a state vector */
+ int16_t *idxVec, /* (i) vector of quantization indexes */
+ int16_t *syntDenum, /* (i) synthesis filter denumerator */
+ int16_t *Out_fix, /* (o) the decoded state vector */
+ int16_t len /* (i) length of a state vector */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/state_search.c b/webrtc/modules/audio_coding/codecs/ilbc/state_search.c
index 824a0ba..bf9c7a9 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/state_search.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/state_search.c
@@ -29,23 +29,23 @@
/* (i) Encoder instance */
iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax
and idxVec, input state_first) */
- WebRtc_Word16 *residual, /* (i) target residual vector */
- WebRtc_Word16 *syntDenum, /* (i) lpc synthesis filter */
- WebRtc_Word16 *weightDenum /* (i) weighting filter denuminator */
+ int16_t *residual, /* (i) target residual vector */
+ int16_t *syntDenum, /* (i) lpc synthesis filter */
+ int16_t *weightDenum /* (i) weighting filter denuminator */
) {
- WebRtc_Word16 k, index;
- WebRtc_Word16 maxVal;
- WebRtc_Word16 scale, shift;
- WebRtc_Word32 maxValsq;
- WebRtc_Word16 scaleRes;
- WebRtc_Word16 max;
+ int16_t k, index;
+ int16_t maxVal;
+ int16_t scale, shift;
+ int32_t maxValsq;
+ int16_t scaleRes;
+ int16_t max;
int i;
/* Stack based */
- WebRtc_Word16 numerator[1+LPC_FILTERORDER];
- WebRtc_Word16 residualLongVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
- WebRtc_Word16 sampleMa[2*STATE_SHORT_LEN_30MS];
- WebRtc_Word16 *residualLong = &residualLongVec[LPC_FILTERORDER];
- WebRtc_Word16 *sampleAr = residualLong;
+ int16_t numerator[1+LPC_FILTERORDER];
+ int16_t residualLongVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+ int16_t sampleMa[2*STATE_SHORT_LEN_30MS];
+ int16_t *residualLong = &residualLongVec[LPC_FILTERORDER];
+ int16_t *sampleAr = residualLong;
/* Scale to maximum 12 bits to avoid saturation in circular convolution filter */
max = WebRtcSpl_MaxAbsValueW16(residual, iLBCenc_inst->state_short_len);
@@ -66,12 +66,12 @@
WebRtcSpl_MemSetW16(residualLongVec, 0, LPC_FILTERORDER);
WebRtcSpl_FilterMAFastQ12(
residualLong, sampleMa,
- numerator, LPC_FILTERORDER+1, (WebRtc_Word16)(iLBCenc_inst->state_short_len + LPC_FILTERORDER));
+ numerator, LPC_FILTERORDER+1, (int16_t)(iLBCenc_inst->state_short_len + LPC_FILTERORDER));
WebRtcSpl_MemSetW16(&sampleMa[iLBCenc_inst->state_short_len + LPC_FILTERORDER], 0, iLBCenc_inst->state_short_len - LPC_FILTERORDER);
WebRtcSpl_FilterARFastQ12(
sampleMa, sampleAr,
- syntDenum, LPC_FILTERORDER+1, (WebRtc_Word16)(2*iLBCenc_inst->state_short_len));
+ syntDenum, LPC_FILTERORDER+1, (int16_t)(2*iLBCenc_inst->state_short_len));
for(k=0;k<iLBCenc_inst->state_short_len;k++){
sampleAr[k] += sampleAr[k+iLBCenc_inst->state_short_len];
@@ -82,10 +82,10 @@
/* Find the best index */
- if ((((WebRtc_Word32)maxVal)<<scaleRes)<23170) {
- maxValsq=((WebRtc_Word32)maxVal*maxVal)<<(2+2*scaleRes);
+ if ((((int32_t)maxVal)<<scaleRes)<23170) {
+ maxValsq=((int32_t)maxVal*maxVal)<<(2+2*scaleRes);
} else {
- maxValsq=(WebRtc_Word32)WEBRTC_SPL_WORD32_MAX;
+ maxValsq=(int32_t)WEBRTC_SPL_WORD32_MAX;
}
index=0;
@@ -110,7 +110,7 @@
/* Set up vectors for AbsQuant and rescale it with the scale factor */
WebRtcSpl_ScaleVectorWithSat(sampleAr, sampleAr, scale,
- iLBCenc_inst->state_short_len, (WebRtc_Word16)(shift-scaleRes));
+ iLBCenc_inst->state_short_len, (int16_t)(shift-scaleRes));
/* Quantize the values in fout[] */
WebRtcIlbcfix_AbsQuant(iLBCenc_inst, iLBC_encbits, sampleAr, weightDenum);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/state_search.h b/webrtc/modules/audio_coding/codecs/ilbc/state_search.h
index 8b7f298..80a4f13 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/state_search.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/state_search.h
@@ -30,9 +30,9 @@
/* (i) Encoder instance */
iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax
and idxVec, input state_first) */
- WebRtc_Word16 *residual, /* (i) target residual vector */
- WebRtc_Word16 *syntDenum, /* (i) lpc synthesis filter */
- WebRtc_Word16 *weightDenum /* (i) weighting filter denuminator */
+ int16_t *residual, /* (i) target residual vector */
+ int16_t *syntDenum, /* (i) lpc synthesis filter */
+ int16_t *weightDenum /* (i) weighting filter denuminator */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c b/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c
index a48a066..8bbac42 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.c
@@ -23,9 +23,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SwapBytes(
- const WebRtc_UWord16* input, /* (i) the sequence to swap */
- WebRtc_Word16 wordLength, /* (i) number or WebRtc_UWord16 to swap */
- WebRtc_UWord16* output /* (o) the swapped sequence */
+ const uint16_t* input, /* (i) the sequence to swap */
+ int16_t wordLength, /* (i) number or uint16_t to swap */
+ uint16_t* output /* (o) the swapped sequence */
) {
int k;
for (k = wordLength; k > 0; k--) {
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h b/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h
index 1632311..a909b2c 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/swap_bytes.h
@@ -26,9 +26,9 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_SwapBytes(
- const WebRtc_UWord16* input, /* (i) the sequence to swap */
- WebRtc_Word16 wordLength, /* (i) number or WebRtc_UWord16 to swap */
- WebRtc_UWord16* output /* (o) the swapped sequence */
+ const uint16_t* input, /* (i) the sequence to swap */
+ int16_t wordLength, /* (i) number or uint16_t to swap */
+ uint16_t* output /* (o) the swapped sequence */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
index 19569ac..4b86b91 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
@@ -44,14 +44,14 @@
{
FILE *ifileid,*efileid,*ofileid, *cfileid;
- WebRtc_Word16 data[BLOCKL_MAX];
- WebRtc_Word16 encoded_data[ILBCNOOFWORDS_MAX], decoded_data[BLOCKL_MAX];
+ int16_t data[BLOCKL_MAX];
+ int16_t encoded_data[ILBCNOOFWORDS_MAX], decoded_data[BLOCKL_MAX];
int len;
short pli, mode;
int blockcount = 0;
int packetlosscount = 0;
int frameLen;
- WebRtc_Word16 speechType;
+ int16_t speechType;
iLBC_encinst_t *Enc_Inst;
iLBC_decinst_t *Dec_Inst;
@@ -155,7 +155,7 @@
/* loop over input blocks */
- while (((WebRtc_Word16)fread(data,sizeof(WebRtc_Word16),frameLen,ifileid))==
+ while (((int16_t)fread(data,sizeof(int16_t),frameLen,ifileid))==
frameLen) {
blockcount++;
@@ -163,20 +163,20 @@
/* encoding */
fprintf(stderr, "--- Encoding block %i --- ",blockcount);
- len=WebRtcIlbcfix_Encode(Enc_Inst, data, (WebRtc_Word16)frameLen, encoded_data);
+ len=WebRtcIlbcfix_Encode(Enc_Inst, data, (int16_t)frameLen, encoded_data);
fprintf(stderr, "\r");
/* write byte file */
- if (fwrite(encoded_data, sizeof(WebRtc_Word16),
- ((len+1)/sizeof(WebRtc_Word16)), efileid) !=
- (size_t)(((len+1)/sizeof(WebRtc_Word16)))) {
+ if (fwrite(encoded_data, sizeof(int16_t),
+ ((len+1)/sizeof(int16_t)), efileid) !=
+ (size_t)(((len+1)/sizeof(int16_t)))) {
return -1;
}
/* get channel data if provided */
if (argc==6) {
- if (fread(&pli, sizeof(WebRtc_Word16), 1, cfileid)) {
+ if (fread(&pli, sizeof(int16_t), 1, cfileid)) {
if ((pli!=0)&&(pli!=1)) {
fprintf(stderr, "Error in channel file\n");
exit(0);
@@ -184,7 +184,7 @@
if (pli==0) {
/* Packet loss -> remove info from frame */
memset(encoded_data, 0,
- sizeof(WebRtc_Word16)*ILBCNOOFWORDS_MAX);
+ sizeof(int16_t)*ILBCNOOFWORDS_MAX);
packetlosscount++;
}
} else {
@@ -200,7 +200,7 @@
fprintf(stderr, "--- Decoding block %i --- ",blockcount);
if (pli==1) {
len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data,
- (WebRtc_Word16)len, decoded_data,&speechType);
+ (int16_t)len, decoded_data,&speechType);
} else {
len=WebRtcIlbcfix_DecodePlc(Dec_Inst, decoded_data, 1);
}
@@ -208,7 +208,7 @@
/* write output file */
- if (fwrite(decoded_data, sizeof(WebRtc_Word16), len,
+ if (fwrite(decoded_data, sizeof(int16_t), len,
ofileid) != (size_t)len) {
return -1;
}
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
index ee5e484..87ec489 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
@@ -153,14 +153,14 @@
if(len != 0){ //len may be 0 in 10ms split case
/* get channel data if provided */
if (argc==6) {
- if (fread(&pli, sizeof(WebRtc_Word16), 1, chfileid)) {
+ if (fread(&pli, sizeof(int16_t), 1, chfileid)) {
if ((pli!=0)&&(pli!=1)) {
fprintf(stderr, "Error in channel file\n");
exit(0);
}
if (pli==0) {
/* Packet loss -> remove info from frame */
- memset(encoded_data, 0, sizeof(WebRtc_Word16)*25);
+ memset(encoded_data, 0, sizeof(int16_t)*25);
}
} else {
fprintf(stderr, "Error. Channel file too short\n");
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
index f67945e..934d4be 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
@@ -41,12 +41,12 @@
short encode( /* (o) Number of bytes encoded */
iLBC_Enc_Inst_t *iLBCenc_inst, /* (i/o) Encoder instance */
- WebRtc_Word16 *encoded_data, /* (o) The encoded bytes */
- WebRtc_Word16 *data /* (i) The signal block to encode */
+ int16_t *encoded_data, /* (o) The encoded bytes */
+ int16_t *data /* (i) The signal block to encode */
){
/* do the actual encoding */
- WebRtcIlbcfix_Encode((WebRtc_UWord16 *)encoded_data, data, iLBCenc_inst);
+ WebRtcIlbcfix_Encode((uint16_t *)encoded_data, data, iLBCenc_inst);
return (iLBCenc_inst->no_of_bytes);
}
@@ -69,7 +69,7 @@
/* do actual decoding of block */
- WebRtcIlbcfix_Decode(decoded_data, (WebRtc_UWord16 *)encoded_data,
+ WebRtcIlbcfix_Decode(decoded_data, (uint16_t *)encoded_data,
iLBCdec_inst, mode);
return (iLBCdec_inst->blockl);
@@ -213,7 +213,7 @@
frameLen = Enc_Inst.blockl;
while( fread(&inputdata[noOfBlocks*Enc_Inst.blockl],sizeof(short),
- Enc_Inst.blockl,ifileid)==(WebRtc_UWord16)Enc_Inst.blockl){
+ Enc_Inst.blockl,ifileid)==(uint16_t)Enc_Inst.blockl){
noOfBlocks++;
}
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c b/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c
index 6ed9265..fae06f6 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.c
@@ -22,23 +22,23 @@
* unpacking of bits from bitstream, i.e., vector of bytes
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
- const WebRtc_UWord16 *bitstream, /* (i) The packatized bitstream */
+int16_t WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
+ const uint16_t *bitstream, /* (i) The packatized bitstream */
iLBC_bits *enc_bits, /* (o) Paramerers from bitstream */
- WebRtc_Word16 mode /* (i) Codec mode (20 or 30) */
+ int16_t mode /* (i) Codec mode (20 or 30) */
) {
- const WebRtc_UWord16 *bitstreamPtr;
+ const uint16_t *bitstreamPtr;
int i, k;
- WebRtc_Word16 *tmpPtr;
+ int16_t *tmpPtr;
bitstreamPtr=bitstream;
- /* First WebRtc_Word16 */
+ /* First int16_t */
enc_bits->lsf[0] = (*bitstreamPtr)>>10; /* Bit 0..5 */
enc_bits->lsf[1] = ((*bitstreamPtr)>>3)&0x7F; /* Bit 6..12 */
enc_bits->lsf[2] = ((*bitstreamPtr)&0x7)<<4; /* Bit 13..15 */
bitstreamPtr++;
- /* Second WebRtc_Word16 */
+ /* Second int16_t */
enc_bits->lsf[2] |= ((*bitstreamPtr)>>12)&0xF; /* Bit 0..3 */
if (mode==20) {
@@ -47,7 +47,7 @@
enc_bits->idxForMax = ((*bitstreamPtr)>>3)&0x3F; /* Bit 7..12 */
enc_bits->cb_index[0] = ((*bitstreamPtr)&0x7)<<4; /* Bit 13..15 */
bitstreamPtr++;
- /* Third WebRtc_Word16 */
+ /* Third int16_t */
enc_bits->cb_index[0] |= ((*bitstreamPtr)>>12)&0xE; /* Bit 0..2 */
enc_bits->gain_index[0] = ((*bitstreamPtr)>>8)&0x18; /* Bit 3..4 */
enc_bits->gain_index[1] = ((*bitstreamPtr)>>7)&0x8; /* Bit 5 */
@@ -59,14 +59,14 @@
enc_bits->lsf[3] = ((*bitstreamPtr)>>6)&0x3F; /* Bit 4..9 */
enc_bits->lsf[4] = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
bitstreamPtr++;
- /* Third WebRtc_Word16 */
+ /* Third int16_t */
enc_bits->lsf[4] |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0 */
enc_bits->lsf[5] = ((*bitstreamPtr)>>8)&0x7F; /* Bit 1..7 */
enc_bits->startIdx = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */
enc_bits->state_first = ((*bitstreamPtr)>>4)&0x1; /* Bit 11 */
enc_bits->idxForMax = ((*bitstreamPtr)<<2)&0x3C; /* Bit 12..15 */
bitstreamPtr++;
- /* 4:th WebRtc_Word16 */
+ /* 4:th int16_t */
enc_bits->idxForMax |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */
enc_bits->cb_index[0] = ((*bitstreamPtr)>>7)&0x78; /* Bit 2..5 */
enc_bits->gain_index[0] = ((*bitstreamPtr)>>5)&0x10; /* Bit 6 */
@@ -76,8 +76,8 @@
enc_bits->gain_index[4] = ((*bitstreamPtr)<<3)&0x8; /* Bit 15 */
}
/* Class 2 bits of ULP */
- /* 4:th to 6:th WebRtc_Word16 for 20 ms case
- 5:th to 7:th WebRtc_Word16 for 30 ms case */
+ /* 4:th to 6:th int16_t for 20 ms case
+ 5:th to 7:th int16_t for 30 ms case */
bitstreamPtr++;
tmpPtr=enc_bits->idxVec;
for (k=0; k<3; k++) {
@@ -90,7 +90,7 @@
}
if (mode==20) {
- /* 7:th WebRtc_Word16 */
+ /* 7:th int16_t */
for (i=15; i>6; i--) {
(*tmpPtr) = (((*bitstreamPtr)>>i)<<2)&0x4;
/* Bit 15-i */
@@ -103,7 +103,7 @@
enc_bits->gain_index[7] = ((*bitstreamPtr)<<2)&0xC; /* Bit 14..15 */
} else { /* mode==30 */
- /* 8:th WebRtc_Word16 */
+ /* 8:th int16_t */
for (i=15; i>5; i--) {
(*tmpPtr) = (((*bitstreamPtr)>>i)<<2)&0x4;
/* Bit 15-i */
@@ -115,12 +115,12 @@
enc_bits->cb_index[3] |= ((*bitstreamPtr))&0x2; /* Bit 14 */
enc_bits->cb_index[6] = ((*bitstreamPtr)<<7)&0x80; /* Bit 15 */
bitstreamPtr++;
- /* 9:th WebRtc_Word16 */
+ /* 9:th int16_t */
enc_bits->cb_index[6] |= ((*bitstreamPtr)>>9)&0x7E; /* Bit 0..5 */
enc_bits->cb_index[9] = ((*bitstreamPtr)>>2)&0xFE; /* Bit 6..12 */
enc_bits->cb_index[12] = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */
bitstreamPtr++;
- /* 10:th WebRtc_Word16 */
+ /* 10:th int16_t */
enc_bits->cb_index[12] |= ((*bitstreamPtr)>>11)&0x1E;/* Bit 0..3 */
enc_bits->gain_index[3] |= ((*bitstreamPtr)>>8)&0xC; /* Bit 4..5 */
enc_bits->gain_index[4] |= ((*bitstreamPtr)>>7)&0x6; /* Bit 6..7 */
@@ -133,8 +133,8 @@
}
bitstreamPtr++;
/* Class 3 bits of ULP */
- /* 8:th to 14:th WebRtc_Word16 for 20 ms case
- 11:th to 17:th WebRtc_Word16 for 30 ms case */
+ /* 8:th to 14:th int16_t for 20 ms case
+ 11:th to 17:th int16_t for 30 ms case */
tmpPtr=enc_bits->idxVec;
for (k=0; k<7; k++) {
for (i=14; i>=0; i-=2) {
@@ -145,13 +145,13 @@
}
if (mode==20) {
- /* 15:th WebRtc_Word16 */
+ /* 15:th int16_t */
enc_bits->idxVec[56] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */
enc_bits->cb_index[0] |= ((*bitstreamPtr)>>13)&0x1; /* Bit 2 */
enc_bits->cb_index[1] = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9 */
enc_bits->cb_index[2] = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
bitstreamPtr++;
- /* 16:th WebRtc_Word16 */
+ /* 16:th int16_t */
enc_bits->cb_index[2] |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0 */
enc_bits->gain_index[0] |= ((*bitstreamPtr)>>12)&0x7; /* Bit 1..3 */
enc_bits->gain_index[1] |= ((*bitstreamPtr)>>10)&0x3; /* Bit 4..5 */
@@ -159,16 +159,16 @@
enc_bits->cb_index[3] |= ((*bitstreamPtr)>>6)&0x1; /* Bit 9 */
enc_bits->cb_index[4] = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
bitstreamPtr++;
- /* 17:th WebRtc_Word16 */
+ /* 17:th int16_t */
enc_bits->cb_index[4] |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0 */
enc_bits->cb_index[5] = ((*bitstreamPtr)>>8)&0x7F; /* Bit 1..7 */
enc_bits->cb_index[6] = ((*bitstreamPtr))&0xFF; /* Bit 8..15 */
bitstreamPtr++;
- /* 18:th WebRtc_Word16 */
+ /* 18:th int16_t */
enc_bits->cb_index[7] = (*bitstreamPtr)>>8; /* Bit 0..7 */
enc_bits->cb_index[8] = (*bitstreamPtr)&0xFF; /* Bit 8..15 */
bitstreamPtr++;
- /* 19:th WebRtc_Word16 */
+ /* 19:th int16_t */
enc_bits->gain_index[3] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */
enc_bits->gain_index[4] |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3 */
enc_bits->gain_index[5] = ((*bitstreamPtr)>>9)&0x7; /* Bit 4..6 */
@@ -176,14 +176,14 @@
enc_bits->gain_index[7] |= ((*bitstreamPtr)>>4)&0x3; /* Bit 10..11 */
enc_bits->gain_index[8] = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */
} else { /* mode==30 */
- /* 18:th WebRtc_Word16 */
+ /* 18:th int16_t */
enc_bits->idxVec[56] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */
enc_bits->idxVec[57] |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3 */
enc_bits->cb_index[0] |= ((*bitstreamPtr)>>11)&1; /* Bit 4 */
enc_bits->cb_index[1] = ((*bitstreamPtr)>>4)&0x7F; /* Bit 5..11 */
enc_bits->cb_index[2] = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */
bitstreamPtr++;
- /* 19:th WebRtc_Word16 */
+ /* 19:th int16_t */
enc_bits->cb_index[2] |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */
enc_bits->gain_index[0] |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5 */
enc_bits->gain_index[1] |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7 */
@@ -191,31 +191,31 @@
enc_bits->cb_index[3] |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11 */
enc_bits->cb_index[4] = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */
bitstreamPtr++;
- /* 20:th WebRtc_Word16 */
+ /* 20:th int16_t */
enc_bits->cb_index[4] |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */
enc_bits->cb_index[5] = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9 */
enc_bits->cb_index[6] |= ((*bitstreamPtr)>>5)&0x1; /* Bit 10 */
enc_bits->cb_index[7] = ((*bitstreamPtr)<<3)&0xF8; /* Bit 11..15 */
bitstreamPtr++;
- /* 21:st WebRtc_Word16 */
+ /* 21:st int16_t */
enc_bits->cb_index[7] |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */
enc_bits->cb_index[8] = ((*bitstreamPtr)>>5)&0xFF; /* Bit 3..10 */
enc_bits->cb_index[9] |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11 */
enc_bits->cb_index[10] = ((*bitstreamPtr)<<4)&0xF0; /* Bit 12..15 */
bitstreamPtr++;
- /* 22:nd WebRtc_Word16 */
+ /* 22:nd int16_t */
enc_bits->cb_index[10] |= ((*bitstreamPtr)>>12)&0xF; /* Bit 0..3 */
enc_bits->cb_index[11] = ((*bitstreamPtr)>>4)&0xFF; /* Bit 4..11 */
enc_bits->cb_index[12] |= ((*bitstreamPtr)>>3)&0x1; /* Bit 12 */
enc_bits->cb_index[13] = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */
bitstreamPtr++;
- /* 23:rd WebRtc_Word16 */
+ /* 23:rd int16_t */
enc_bits->cb_index[13] |= ((*bitstreamPtr)>>11)&0x1F;/* Bit 0..4 */
enc_bits->cb_index[14] = ((*bitstreamPtr)>>3)&0xFF; /* Bit 5..12 */
enc_bits->gain_index[3] |= ((*bitstreamPtr)>>1)&0x3; /* Bit 13..14 */
enc_bits->gain_index[4] |= ((*bitstreamPtr)&0x1); /* Bit 15 */
bitstreamPtr++;
- /* 24:rd WebRtc_Word16 */
+ /* 24:rd int16_t */
enc_bits->gain_index[5] = ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2 */
enc_bits->gain_index[6] |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5 */
enc_bits->gain_index[7] |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7 */
@@ -223,7 +223,7 @@
enc_bits->gain_index[9] |= ((*bitstreamPtr)>>1)&0xF; /* Bit 11..14 */
enc_bits->gain_index[10] |= ((*bitstreamPtr)<<2)&0x4; /* Bit 15 */
bitstreamPtr++;
- /* 25:rd WebRtc_Word16 */
+ /* 25:rd int16_t */
enc_bits->gain_index[10] |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1 */
enc_bits->gain_index[11] = ((*bitstreamPtr)>>11)&0x7; /* Bit 2..4 */
enc_bits->gain_index[12] |= ((*bitstreamPtr)>>7)&0xF; /* Bit 5..8 */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h b/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h
index 31c728e..9586a12 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/unpack_bits.h
@@ -25,10 +25,10 @@
* unpacking of bits from bitstream, i.e., vector of bytes
*---------------------------------------------------------------*/
-WebRtc_Word16 WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
- const WebRtc_UWord16 *bitstream, /* (i) The packatized bitstream */
+int16_t WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
+ const uint16_t *bitstream, /* (i) The packatized bitstream */
iLBC_bits *enc_bits, /* (o) Paramerers from bitstream */
- WebRtc_Word16 mode /* (i) Codec mode (20 or 30) */
+ int16_t mode /* (i) Codec mode (20 or 30) */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/vq3.c b/webrtc/modules/audio_coding/codecs/ilbc/vq3.c
index 81d1bfa..a6c6cdb 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/vq3.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/vq3.c
@@ -24,16 +24,16 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Vq3(
- WebRtc_Word16 *Xq, /* quantized vector (Q13) */
- WebRtc_Word16 *index,
- WebRtc_Word16 *CB, /* codebook in Q13 */
- WebRtc_Word16 *X, /* vector to quantize (Q13) */
- WebRtc_Word16 n_cb
+ int16_t *Xq, /* quantized vector (Q13) */
+ int16_t *index,
+ int16_t *CB, /* codebook in Q13 */
+ int16_t *X, /* vector to quantize (Q13) */
+ int16_t n_cb
){
- WebRtc_Word16 i, j;
- WebRtc_Word16 pos, minindex=0;
- WebRtc_Word16 tmp;
- WebRtc_Word32 dist, mindist;
+ int16_t i, j;
+ int16_t pos, minindex=0;
+ int16_t tmp;
+ int32_t dist, mindist;
pos = 0;
mindist = WEBRTC_SPL_WORD32_MAX; /* start value */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/vq3.h b/webrtc/modules/audio_coding/codecs/ilbc/vq3.h
index f2628e0..e2e2ab5 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/vq3.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/vq3.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Vq3(
- WebRtc_Word16 *Xq, /* (o) the quantized vector (Q13) */
- WebRtc_Word16 *index, /* (o) the quantization index */
- WebRtc_Word16 *CB, /* (i) the vector quantization codebook (Q13) */
- WebRtc_Word16 *X, /* (i) the vector to quantize (Q13) */
- WebRtc_Word16 n_cb /* (i) the number of vectors in the codebook */
+ int16_t *Xq, /* (o) the quantized vector (Q13) */
+ int16_t *index, /* (o) the quantization index */
+ int16_t *CB, /* (i) the vector quantization codebook (Q13) */
+ int16_t *X, /* (i) the vector to quantize (Q13) */
+ int16_t n_cb /* (i) the number of vectors in the codebook */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/vq4.c b/webrtc/modules/audio_coding/codecs/ilbc/vq4.c
index 3d4c26d..7776dfb 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/vq4.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/vq4.c
@@ -24,16 +24,16 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Vq4(
- WebRtc_Word16 *Xq, /* quantized vector (Q13) */
- WebRtc_Word16 *index,
- WebRtc_Word16 *CB, /* codebook in Q13 */
- WebRtc_Word16 *X, /* vector to quantize (Q13) */
- WebRtc_Word16 n_cb
+ int16_t *Xq, /* quantized vector (Q13) */
+ int16_t *index,
+ int16_t *CB, /* codebook in Q13 */
+ int16_t *X, /* vector to quantize (Q13) */
+ int16_t n_cb
){
- WebRtc_Word16 i, j;
- WebRtc_Word16 pos, minindex=0;
- WebRtc_Word16 tmp;
- WebRtc_Word32 dist, mindist;
+ int16_t i, j;
+ int16_t pos, minindex=0;
+ int16_t tmp;
+ int32_t dist, mindist;
pos = 0;
mindist = WEBRTC_SPL_WORD32_MAX; /* start value */
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/vq4.h b/webrtc/modules/audio_coding/codecs/ilbc/vq4.h
index 1b8cff2..5ae4c87 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/vq4.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/vq4.h
@@ -26,11 +26,11 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Vq4(
- WebRtc_Word16 *Xq, /* (o) the quantized vector (Q13) */
- WebRtc_Word16 *index, /* (o) the quantization index */
- WebRtc_Word16 *CB, /* (i) the vector quantization codebook (Q13) */
- WebRtc_Word16 *X, /* (i) the vector to quantize (Q13) */
- WebRtc_Word16 n_cb /* (i) the number of vectors in the codebook */
+ int16_t *Xq, /* (o) the quantized vector (Q13) */
+ int16_t *index, /* (o) the quantization index */
+ int16_t *CB, /* (i) the vector quantization codebook (Q13) */
+ int16_t *X, /* (i) the vector to quantize (Q13) */
+ int16_t n_cb /* (i) the number of vectors in the codebook */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c b/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c
index b0e8406..9ff1be3 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.c
@@ -23,18 +23,18 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Window32W32(
- WebRtc_Word32 *z, /* Output */
- WebRtc_Word32 *x, /* Input (same domain as Output)*/
- const WebRtc_Word32 *y, /* Q31 Window */
- WebRtc_Word16 N /* length to process */
+ int32_t *z, /* Output */
+ int32_t *x, /* Input (same domain as Output)*/
+ const int32_t *y, /* Q31 Window */
+ int16_t N /* length to process */
) {
- WebRtc_Word16 i;
- WebRtc_Word16 x_low, x_hi, y_low, y_hi;
- WebRtc_Word16 left_shifts;
- WebRtc_Word32 temp;
+ int16_t i;
+ int16_t x_low, x_hi, y_low, y_hi;
+ int16_t left_shifts;
+ int32_t temp;
- left_shifts = (WebRtc_Word16)WebRtcSpl_NormW32(x[0]);
- WebRtcSpl_VectorBitShiftW32(x, N, x, (WebRtc_Word16)(-left_shifts));
+ left_shifts = (int16_t)WebRtcSpl_NormW32(x[0]);
+ WebRtcSpl_VectorBitShiftW32(x, N, x, (int16_t)(-left_shifts));
/* The double precision numbers use a special representation:
@@ -42,15 +42,15 @@
*/
for (i = 0; i < N; i++) {
/* Extract higher bytes */
- x_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(x[i], 16);
- y_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(y[i], 16);
+ x_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(x[i], 16);
+ y_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(y[i], 16);
/* Extract lower bytes, defined as (w32 - hi<<16)>>1 */
- temp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x_hi, 16);
- x_low = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((x[i] - temp), 1);
+ temp = WEBRTC_SPL_LSHIFT_W32((int32_t)x_hi, 16);
+ x_low = (int16_t) WEBRTC_SPL_RSHIFT_W32((x[i] - temp), 1);
- temp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y_hi, 16);
- y_low = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((y[i] - temp), 1);
+ temp = WEBRTC_SPL_LSHIFT_W32((int32_t)y_hi, 16);
+ y_low = (int16_t) WEBRTC_SPL_RSHIFT_W32((y[i] - temp), 1);
/* Calculate z by a 32 bit multiplication using both low and high from x and y */
temp = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(x_hi, y_hi), 1);
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h b/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h
index 121188a..4ee6fce 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/window32_w32.h
@@ -26,10 +26,10 @@
*---------------------------------------------------------------*/
void WebRtcIlbcfix_Window32W32(
- WebRtc_Word32 *z, /* Output */
- WebRtc_Word32 *x, /* Input (same domain as Output)*/
- const WebRtc_Word32 *y, /* Q31 Window */
- WebRtc_Word16 N /* length to process */
+ int32_t *z, /* Output */
+ int32_t *x, /* Input (same domain as Output)*/
+ const int32_t *y, /* Q31 Window */
+ int16_t N /* length to process */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c b/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c
index 04170ad..eb7f828 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c
+++ b/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.c
@@ -24,26 +24,26 @@
*---------------------------------------------------------------*/
int WebRtcIlbcfix_XcorrCoef(
- WebRtc_Word16 *target, /* (i) first array */
- WebRtc_Word16 *regressor, /* (i) second array */
- WebRtc_Word16 subl, /* (i) dimension arrays */
- WebRtc_Word16 searchLen, /* (i) the search lenght */
- WebRtc_Word16 offset, /* (i) samples offset between arrays */
- WebRtc_Word16 step /* (i) +1 or -1 */
+ int16_t *target, /* (i) first array */
+ int16_t *regressor, /* (i) second array */
+ int16_t subl, /* (i) dimension arrays */
+ int16_t searchLen, /* (i) the search lenght */
+ int16_t offset, /* (i) samples offset between arrays */
+ int16_t step /* (i) +1 or -1 */
){
int k;
- WebRtc_Word16 maxlag;
- WebRtc_Word16 pos;
- WebRtc_Word16 max;
- WebRtc_Word16 crossCorrScale, Energyscale;
- WebRtc_Word16 crossCorrSqMod, crossCorrSqMod_Max;
- WebRtc_Word32 crossCorr, Energy;
- WebRtc_Word16 crossCorrmod, EnergyMod, EnergyMod_Max;
- WebRtc_Word16 *tp, *rp;
- WebRtc_Word16 *rp_beg, *rp_end;
- WebRtc_Word16 totscale, totscale_max;
- WebRtc_Word16 scalediff;
- WebRtc_Word32 newCrit, maxCrit;
+ int16_t maxlag;
+ int16_t pos;
+ int16_t max;
+ int16_t crossCorrScale, Energyscale;
+ int16_t crossCorrSqMod, crossCorrSqMod_Max;
+ int32_t crossCorr, Energy;
+ int16_t crossCorrmod, EnergyMod, EnergyMod_Max;
+ int16_t *tp, *rp;
+ int16_t *rp_beg, *rp_end;
+ int16_t totscale, totscale_max;
+ int16_t scalediff;
+ int32_t newCrit, maxCrit;
int shifts;
/* Initializations, to make sure that the first one is selected */
@@ -55,16 +55,16 @@
/* Find scale value and start position */
if (step==1) {
- max=WebRtcSpl_MaxAbsValueW16(regressor, (WebRtc_Word16)(subl+searchLen-1));
+ max=WebRtcSpl_MaxAbsValueW16(regressor, (int16_t)(subl+searchLen-1));
rp_beg = regressor;
rp_end = ®ressor[subl];
} else { /* step==-1 */
- max=WebRtcSpl_MaxAbsValueW16(®ressor[-searchLen], (WebRtc_Word16)(subl+searchLen-1));
+ max=WebRtcSpl_MaxAbsValueW16(®ressor[-searchLen], (int16_t)(subl+searchLen-1));
rp_beg = ®ressor[-1];
rp_end = ®ressor[subl-1];
}
- /* Introduce a scale factor on the Energy in WebRtc_Word32 in
+ /* Introduce a scale factor on the Energy in int32_t in
order to make sure that the calculation does not
overflow */
@@ -86,13 +86,13 @@
if ((Energy>0)&&(crossCorr>0)) {
/* Put cross correlation and energy on 16 bit word */
- crossCorrScale=(WebRtc_Word16)WebRtcSpl_NormW32(crossCorr)-16;
- crossCorrmod=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(crossCorr, crossCorrScale);
- Energyscale=(WebRtc_Word16)WebRtcSpl_NormW32(Energy)-16;
- EnergyMod=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(Energy, Energyscale);
+ crossCorrScale=(int16_t)WebRtcSpl_NormW32(crossCorr)-16;
+ crossCorrmod=(int16_t)WEBRTC_SPL_SHIFT_W32(crossCorr, crossCorrScale);
+ Energyscale=(int16_t)WebRtcSpl_NormW32(Energy)-16;
+ EnergyMod=(int16_t)WEBRTC_SPL_SHIFT_W32(Energy, Energyscale);
- /* Square cross correlation and store upper WebRtc_Word16 */
- crossCorrSqMod=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(crossCorrmod, crossCorrmod, 16);
+ /* Square cross correlation and store upper int16_t */
+ crossCorrSqMod=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(crossCorrmod, crossCorrmod, 16);
/* Calculate the total number of (dynamic) right shifts that have
been performed on (crossCorr*crossCorr)/energy
@@ -111,11 +111,11 @@
division */
if (scalediff<0) {
- newCrit = ((WebRtc_Word32)crossCorrSqMod*EnergyMod_Max)>>(-scalediff);
- maxCrit = ((WebRtc_Word32)crossCorrSqMod_Max*EnergyMod);
+ newCrit = ((int32_t)crossCorrSqMod*EnergyMod_Max)>>(-scalediff);
+ maxCrit = ((int32_t)crossCorrSqMod_Max*EnergyMod);
} else {
- newCrit = ((WebRtc_Word32)crossCorrSqMod*EnergyMod_Max);
- maxCrit = ((WebRtc_Word32)crossCorrSqMod_Max*EnergyMod)>>scalediff;
+ newCrit = ((int32_t)crossCorrSqMod*EnergyMod_Max);
+ maxCrit = ((int32_t)crossCorrSqMod_Max*EnergyMod)>>scalediff;
}
/* Store the new lag value if the new criteria is larger
@@ -132,7 +132,7 @@
/* Do a +/- to get the next energy */
Energy += step*(WEBRTC_SPL_RSHIFT_W32(
- ((WebRtc_Word32)(*rp_end)*(*rp_end)) - ((WebRtc_Word32)(*rp_beg)*(*rp_beg)),
+ ((int32_t)(*rp_end)*(*rp_end)) - ((int32_t)(*rp_beg)*(*rp_beg)),
shifts));
rp_beg+=step;
rp_end+=step;
diff --git a/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h b/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h
index ac885c4..1f4c58d 100644
--- a/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h
+++ b/webrtc/modules/audio_coding/codecs/ilbc/xcorr_coef.h
@@ -27,12 +27,12 @@
*---------------------------------------------------------------*/
int WebRtcIlbcfix_XcorrCoef(
- WebRtc_Word16 *target, /* (i) first array */
- WebRtc_Word16 *regressor, /* (i) second array */
- WebRtc_Word16 subl, /* (i) dimension arrays */
- WebRtc_Word16 searchLen, /* (i) the search lenght */
- WebRtc_Word16 offset, /* (i) samples offset between arrays */
- WebRtc_Word16 step /* (i) +1 or -1 */
+ int16_t *target, /* (i) first array */
+ int16_t *regressor, /* (i) second array */
+ int16_t subl, /* (i) dimension arrays */
+ int16_t searchLen, /* (i) the search lenght */
+ int16_t offset, /* (i) samples offset between arrays */
+ int16_t step /* (i) +1 or -1 */
);
#endif
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h b/webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h
index 28e9429..da7163e 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h
@@ -35,7 +35,7 @@
*
*/
- WebRtc_Word16 WebRtcIsacfix_AssignSize(int *sizeinbytes);
+ int16_t WebRtcIsacfix_AssignSize(int *sizeinbytes);
/**************************************************************************
* WebRtcIsacfix_Assign(...)
@@ -52,7 +52,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_Assign(ISACFIX_MainStruct **inst,
+ int16_t WebRtcIsacfix_Assign(ISACFIX_MainStruct **inst,
void *ISACFIX_inst_Addr);
/****************************************************************************
@@ -68,7 +68,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst);
+ int16_t WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst);
/****************************************************************************
@@ -83,7 +83,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst);
+ int16_t WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst);
/****************************************************************************
@@ -104,8 +104,8 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 CodingMode);
+ int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t CodingMode);
/****************************************************************************
@@ -129,9 +129,9 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_Word16 *speechIn,
- WebRtc_Word16 *encoded);
+ int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
+ const int16_t *speechIn,
+ int16_t *encoded);
@@ -161,9 +161,9 @@
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
- WebRtc_Word16 WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_Word16 *speechIn,
- WebRtc_Word16 *encoded);
+ int16_t WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+ const int16_t *speechIn,
+ int16_t *encoded);
#endif // WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
@@ -181,7 +181,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst);
+ int16_t WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst);
/****************************************************************************
@@ -201,11 +201,11 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 arr_ts);
+ int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t arr_ts);
/****************************************************************************
* WebRtcIsacfix_UpdateBwEstimate(...)
@@ -226,12 +226,12 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts);
+ int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts);
/****************************************************************************
* WebRtcIsacfix_Decode(...)
@@ -252,11 +252,11 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType);
+ int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType);
/****************************************************************************
@@ -281,11 +281,11 @@
*/
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
- WebRtc_Word16 WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType);
+ int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType);
#endif // WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
@@ -311,9 +311,9 @@
*/
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
- WebRtc_Word16 WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 noOfLostFrames );
+ int16_t WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t *decoded,
+ int16_t noOfLostFrames);
#endif // WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
@@ -339,9 +339,9 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 noOfLostFrames );
+ int16_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t *decoded,
+ int16_t noOfLostFrames );
/****************************************************************************
@@ -357,8 +357,8 @@
*
*/
- WebRtc_Word16 WebRtcIsacfix_ReadFrameLen(const WebRtc_Word16* encoded,
- WebRtc_Word16* frameLength);
+ int16_t WebRtcIsacfix_ReadFrameLen(const int16_t* encoded,
+ int16_t* frameLength);
/****************************************************************************
* WebRtcIsacfix_Control(...)
@@ -376,9 +376,9 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 rate,
- WebRtc_Word16 framesize);
+ int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t rate,
+ int16_t framesize);
@@ -404,10 +404,10 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 rateBPS,
- WebRtc_Word16 frameSizeMs,
- WebRtc_Word16 enforceFrameSize);
+ int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t rateBPS,
+ int16_t frameSizeMs,
+ int16_t enforceFrameSize);
@@ -438,7 +438,7 @@
* Return value : Error code
*/
- WebRtc_Word16 WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst);
+ int16_t WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst);
/****************************************************************************
@@ -453,7 +453,7 @@
* else bitrate
*/
- WebRtc_Word32 WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst);
+ int32_t WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst);
/****************************************************************************
@@ -474,8 +474,8 @@
* -1 if error happens
*/
- WebRtc_Word16 WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 maxPayloadBytes);
+ int16_t WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t maxPayloadBytes);
/****************************************************************************
@@ -501,8 +501,8 @@
* -1 if error happens
*/
- WebRtc_Word16 WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word32 maxRate);
+ int16_t WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst,
+ int32_t maxRate);
/****************************************************************************
* WebRtcIsacfix_CreateInternal(...)
@@ -516,7 +516,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst);
+ int16_t WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst);
/****************************************************************************
@@ -531,7 +531,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst);
+ int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst);
/****************************************************************************
@@ -553,10 +553,10 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 bweIndex,
- float scale,
- WebRtc_Word16 *encoded);
+ int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t bweIndex,
+ float scale,
+ int16_t *encoded);
/****************************************************************************
@@ -573,8 +573,8 @@
*
*/
- WebRtc_Word16 WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst,
- WebRtc_Word16* rateIndex);
+ int16_t WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst,
+ int16_t* rateIndex);
/****************************************************************************
@@ -589,8 +589,8 @@
*
*/
- WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst,
- WebRtc_Word16 rateIndex);
+ int16_t WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst,
+ int16_t rateIndex);
/****************************************************************************
@@ -606,8 +606,8 @@
*
*/
- WebRtc_Word16 WebRtcIsacfix_ReadBwIndex(const WebRtc_Word16* encoded,
- WebRtc_Word16* rateIndex);
+ int16_t WebRtcIsacfix_ReadBwIndex(const int16_t* encoded,
+ int16_t* rateIndex);
/****************************************************************************
@@ -621,7 +621,7 @@
* Return value : frame lenght in samples
*/
- WebRtc_Word16 WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst);
+ int16_t WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst);
#if defined(__cplusplus)
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c
index ee62bad..38eecb7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines.c
@@ -30,10 +30,10 @@
*
* Return value : number of bytes in the stream
*/
-WebRtc_Word16 WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData)
+int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData)
{
- WebRtc_UWord16 *streamPtr;
- WebRtc_UWord16 negCarry;
+ uint16_t *streamPtr;
+ uint16_t negCarry;
/* point to the right place in the stream buffer */
streamPtr = streamData->stream + streamData->stream_index;
@@ -72,10 +72,10 @@
}
/* write remaining data to bitstream, if "full == 0" first byte has data */
if (streamData->full == 0) {
- *streamPtr++ += (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
+ *streamPtr++ += (uint16_t) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
streamData->full = 1;
} else {
- *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_LSHIFT_W32(
+ *streamPtr = (uint16_t) WEBRTC_SPL_LSHIFT_W32(
WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24), 8);
streamData->full = 0;
}
@@ -111,10 +111,10 @@
}
/* write remaining data (2 bytes) to bitstream */
if (streamData->full) {
- *streamPtr++ = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 16);
+ *streamPtr++ = (uint16_t) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 16);
} else {
- *streamPtr++ |= (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
- *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 8)
+ *streamPtr++ |= (uint16_t) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
+ *streamPtr = (uint16_t) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 8)
& 0xFF00;
}
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
index 14f1add..5c23f7a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_hist.c
@@ -33,20 +33,20 @@
* <0 if error detected
*/
int WebRtcIsacfix_EncHistMulti(Bitstr_enc *streamData,
- const WebRtc_Word16 *data,
- const WebRtc_UWord16 **cdf,
- const WebRtc_Word16 lenData)
+ const int16_t *data,
+ const uint16_t **cdf,
+ const int16_t lenData)
{
- WebRtc_UWord32 W_lower;
- WebRtc_UWord32 W_upper;
- WebRtc_UWord32 W_upper_LSB;
- WebRtc_UWord32 W_upper_MSB;
- WebRtc_UWord16 *streamPtr;
- WebRtc_UWord16 negCarry;
- WebRtc_UWord16 *maxStreamPtr;
- WebRtc_UWord16 *streamPtrCarry;
- WebRtc_UWord32 cdfLo;
- WebRtc_UWord32 cdfHi;
+ uint32_t W_lower;
+ uint32_t W_upper;
+ uint32_t W_upper_LSB;
+ uint32_t W_upper_MSB;
+ uint16_t *streamPtr;
+ uint16_t negCarry;
+ uint16_t *maxStreamPtr;
+ uint16_t *streamPtrCarry;
+ uint32_t cdfLo;
+ uint32_t cdfHi;
int k;
@@ -60,8 +60,8 @@
for (k = lenData; k > 0; k--)
{
/* fetch cdf_lower and cdf_upper from cdf tables */
- cdfLo = (WebRtc_UWord32) *(*cdf + (WebRtc_UWord32)*data);
- cdfHi = (WebRtc_UWord32) *(*cdf++ + (WebRtc_UWord32)*data++ + 1);
+ cdfLo = (uint32_t) *(*cdf + (uint32_t)*data);
+ cdfHi = (uint32_t) *(*cdf++ + (uint32_t)*data++ + 1);
/* update interval */
W_upper_LSB = W_upper & 0x0000FFFF;
@@ -103,10 +103,10 @@
{
W_upper = WEBRTC_SPL_LSHIFT_W32(W_upper, 8);
if (streamData->full == 0) {
- *streamPtr++ += (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
+ *streamPtr++ += (uint16_t) WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24);
streamData->full = 1;
} else {
- *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_LSHIFT_W32(
+ *streamPtr = (uint16_t) WEBRTC_SPL_LSHIFT_W32(
WEBRTC_SPL_RSHIFT_W32(streamData->streamval, 24), 8);
streamData->full = 0;
}
@@ -145,21 +145,21 @@
* Return value : number of bytes in the stream
* <0 if error detected
*/
-WebRtc_Word16 WebRtcIsacfix_DecHistBisectMulti(WebRtc_Word16 *data,
- Bitstr_dec *streamData,
- const WebRtc_UWord16 **cdf,
- const WebRtc_UWord16 *cdfSize,
- const WebRtc_Word16 lenData)
+int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t *data,
+ Bitstr_dec *streamData,
+ const uint16_t **cdf,
+ const uint16_t *cdfSize,
+ const int16_t lenData)
{
- WebRtc_UWord32 W_lower = 0;
- WebRtc_UWord32 W_upper;
- WebRtc_UWord32 W_tmp;
- WebRtc_UWord32 W_upper_LSB;
- WebRtc_UWord32 W_upper_MSB;
- WebRtc_UWord32 streamval;
- const WebRtc_UWord16 *streamPtr;
- const WebRtc_UWord16 *cdfPtr;
- WebRtc_Word16 sizeTmp;
+ uint32_t W_lower = 0;
+ uint32_t W_upper;
+ uint32_t W_tmp;
+ uint32_t W_upper_LSB;
+ uint32_t W_upper_MSB;
+ uint32_t streamval;
+ const uint16_t *streamPtr;
+ const uint16_t *cdfPtr;
+ int16_t sizeTmp;
int k;
@@ -175,7 +175,7 @@
if (streamData->stream_index == 0)
{
/* read first word from bytestream */
- streamval = WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)*streamPtr++, 16);
+ streamval = WEBRTC_SPL_LSHIFT_W32((uint32_t)*streamPtr++, 16);
streamval |= *streamPtr++;
} else {
streamval = streamData->streamval;
@@ -282,20 +282,20 @@
* Return value : number of bytes in original stream
* <0 if error detected
*/
-WebRtc_Word16 WebRtcIsacfix_DecHistOneStepMulti(WebRtc_Word16 *data,
- Bitstr_dec *streamData,
- const WebRtc_UWord16 **cdf,
- const WebRtc_UWord16 *initIndex,
- const WebRtc_Word16 lenData)
+int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t *data,
+ Bitstr_dec *streamData,
+ const uint16_t **cdf,
+ const uint16_t *initIndex,
+ const int16_t lenData)
{
- WebRtc_UWord32 W_lower;
- WebRtc_UWord32 W_upper;
- WebRtc_UWord32 W_tmp;
- WebRtc_UWord32 W_upper_LSB;
- WebRtc_UWord32 W_upper_MSB;
- WebRtc_UWord32 streamval;
- const WebRtc_UWord16 *streamPtr;
- const WebRtc_UWord16 *cdfPtr;
+ uint32_t W_lower;
+ uint32_t W_upper;
+ uint32_t W_tmp;
+ uint32_t W_upper_LSB;
+ uint32_t W_upper_MSB;
+ uint32_t streamval;
+ const uint16_t *streamPtr;
+ const uint16_t *cdfPtr;
int k;
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
index 39c437e..b540ed5 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routines_logist.c
@@ -21,7 +21,7 @@
/* Tables for piecewise linear cdf functions: y = k*x */
/* x Points for function piecewise() in Q15 */
-static const WebRtc_Word32 kHistEdges[51] = {
+static const int32_t kHistEdges[51] = {
-327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716,
-196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644,
-65536, -52429, -39322, -26215, -13108, 0, 13107, 26214, 39321, 52428,
@@ -32,7 +32,7 @@
/* k Points for function piecewise() in Q0 */
-static const WebRtc_UWord16 kCdfSlope[51] = {
+static const uint16_t kCdfSlope[51] = {
5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 13, 23, 47, 87, 154, 315, 700, 1088,
2471, 6064, 14221, 21463, 36634, 36924, 19750, 13270, 5806, 2312,
@@ -42,7 +42,7 @@
};
/* y Points for function piecewise() in Q0 */
-static const WebRtc_UWord16 kCdfLogistic[51] = {
+static const uint16_t kCdfLogistic[51] = {
0, 2, 4, 6, 8, 10, 12, 14, 16, 18,
20, 22, 24, 29, 38, 57, 92, 153, 279, 559,
994, 1983, 4408, 10097, 18682, 33336, 48105, 56005, 61313, 63636,
@@ -64,10 +64,10 @@
*/
-static __inline WebRtc_UWord16 WebRtcIsacfix_Piecewise(WebRtc_Word32 xinQ15) {
- WebRtc_Word32 ind;
- WebRtc_Word32 qtmp1;
- WebRtc_UWord16 qtmp2;
+static __inline uint16_t WebRtcIsacfix_Piecewise(int32_t xinQ15) {
+ int32_t ind;
+ int32_t qtmp1;
+ uint16_t qtmp2;
/* Find index for x-value */
qtmp1 = WEBRTC_SPL_SAT(kHistEdges[50],xinQ15,kHistEdges[0]);
@@ -76,7 +76,7 @@
/* Calculate corresponding y-value ans return*/
qtmp1 = qtmp1 - kHistEdges[ind];
- qtmp2 = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(
+ qtmp2 = (uint16_t)WEBRTC_SPL_RSHIFT_U32(
WEBRTC_SPL_UMUL_32_16(qtmp1,kCdfSlope[ind]), 15);
return (kCdfLogistic[ind] + qtmp2);
}
@@ -97,20 +97,20 @@
* <0 otherwise.
*/
int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc *streamData,
- WebRtc_Word16 *dataQ7,
- const WebRtc_UWord16 *envQ8,
- const WebRtc_Word16 lenData)
+ int16_t *dataQ7,
+ const uint16_t *envQ8,
+ const int16_t lenData)
{
- WebRtc_UWord32 W_lower;
- WebRtc_UWord32 W_upper;
- WebRtc_UWord16 W_upper_LSB;
- WebRtc_UWord16 W_upper_MSB;
- WebRtc_UWord16 *streamPtr;
- WebRtc_UWord16 *maxStreamPtr;
- WebRtc_UWord16 *streamPtrCarry;
- WebRtc_UWord16 negcarry;
- WebRtc_UWord32 cdfLo;
- WebRtc_UWord32 cdfHi;
+ uint32_t W_lower;
+ uint32_t W_upper;
+ uint16_t W_upper_LSB;
+ uint16_t W_upper_MSB;
+ uint16_t *streamPtr;
+ uint16_t *maxStreamPtr;
+ uint16_t *streamPtrCarry;
+ uint16_t negcarry;
+ uint32_t cdfLo;
+ uint32_t cdfHi;
int k;
/* point to beginning of stream buffer
@@ -148,8 +148,8 @@
/* update interval */
- W_upper_LSB = (WebRtc_UWord16)W_upper;
- W_upper_MSB = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
+ W_upper_LSB = (uint16_t)W_upper;
+ W_upper_MSB = (uint16_t)WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
W_lower = WEBRTC_SPL_UMUL_32_16(cdfLo, W_upper_MSB);
W_lower += WEBRTC_SPL_UMUL_32_16_RSFT16(cdfLo, W_upper_LSB);
W_upper = WEBRTC_SPL_UMUL_32_16(cdfHi, W_upper_MSB);
@@ -187,11 +187,11 @@
{
W_upper = WEBRTC_SPL_LSHIFT_U32(W_upper, 8);
if (streamData->full == 0) {
- *streamPtr++ += (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_U32(
+ *streamPtr++ += (uint16_t) WEBRTC_SPL_RSHIFT_U32(
streamData->streamval, 24);
streamData->full = 1;
} else {
- *streamPtr = (WebRtc_UWord16) WEBRTC_SPL_LSHIFT_U32(
+ *streamPtr = (uint16_t) WEBRTC_SPL_LSHIFT_U32(
WEBRTC_SPL_RSHIFT_U32(streamData->streamval, 24), 8);
streamData->full = 0;
}
@@ -228,25 +228,25 @@
* Return value : number of bytes in the stream so far
* -1 if error detected
*/
-WebRtc_Word16 WebRtcIsacfix_DecLogisticMulti2(WebRtc_Word16 *dataQ7,
- Bitstr_dec *streamData,
- const WebRtc_Word32 *envQ8,
- const WebRtc_Word16 lenData)
+int16_t WebRtcIsacfix_DecLogisticMulti2(int16_t *dataQ7,
+ Bitstr_dec *streamData,
+ const int32_t *envQ8,
+ const int16_t lenData)
{
- WebRtc_UWord32 W_lower;
- WebRtc_UWord32 W_upper;
- WebRtc_UWord32 W_tmp;
- WebRtc_UWord16 W_upper_LSB;
- WebRtc_UWord16 W_upper_MSB;
- WebRtc_UWord32 streamVal;
- WebRtc_UWord16 cdfTmp;
- WebRtc_Word32 res;
- WebRtc_Word32 inSqrt;
- WebRtc_Word32 newRes;
- const WebRtc_UWord16 *streamPtr;
- WebRtc_Word16 candQ7;
- WebRtc_Word16 envCount;
- WebRtc_UWord16 tmpARSpecQ8 = 0;
+ uint32_t W_lower;
+ uint32_t W_upper;
+ uint32_t W_tmp;
+ uint16_t W_upper_LSB;
+ uint16_t W_upper_MSB;
+ uint32_t streamVal;
+ uint16_t cdfTmp;
+ int32_t res;
+ int32_t inSqrt;
+ int32_t newRes;
+ const uint16_t *streamPtr;
+ int16_t candQ7;
+ int16_t envCount;
+ uint16_t tmpARSpecQ8 = 0;
int k, i;
@@ -266,7 +266,7 @@
}
- res = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1,
+ res = WEBRTC_SPL_LSHIFT_W32((int32_t)1,
WEBRTC_SPL_RSHIFT_W16(WebRtcSpl_GetSizeInBits(envQ8[0]), 1));
envCount = 0;
@@ -290,13 +290,13 @@
newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(inSqrt, res) + res, 1);
} while (newRes != res && i-- > 0);
- tmpARSpecQ8 = (WebRtc_UWord16)newRes;
+ tmpARSpecQ8 = (uint16_t)newRes;
for(k4 = 0; k4 < 4; k4++)
{
/* find the integer *data for which streamVal lies in [W_lower+1, W_upper] */
- W_upper_LSB = (WebRtc_UWord16) (W_upper & 0x0000FFFF);
- W_upper_MSB = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
+ W_upper_LSB = (uint16_t) (W_upper & 0x0000FFFF);
+ W_upper_MSB = (uint16_t) WEBRTC_SPL_RSHIFT_U32(W_upper, 16);
/* find first candidate by inverting the logistic cdf
* Input dither value collected from io-stream */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h
index 9aa49da..584bc47 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h
@@ -38,9 +38,9 @@
*/
int WebRtcIsacfix_EncLogisticMulti2(
Bitstr_enc *streamData,
- WebRtc_Word16 *dataQ7,
- const WebRtc_UWord16 *env,
- const WebRtc_Word16 lenData);
+ int16_t *dataQ7,
+ const uint16_t *env,
+ const int16_t lenData);
/****************************************************************************
@@ -54,7 +54,7 @@
*
* Return value : number of bytes in the stream
*/
-WebRtc_Word16 WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData);
+int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc *streamData);
/****************************************************************************
@@ -74,11 +74,11 @@
* Return value : number of bytes in the stream so far
* <0 if error detected
*/
-WebRtc_Word16 WebRtcIsacfix_DecLogisticMulti2(
- WebRtc_Word16 *data,
+int16_t WebRtcIsacfix_DecLogisticMulti2(
+ int16_t *data,
Bitstr_dec *streamData,
- const WebRtc_Word32 *env,
- const WebRtc_Word16 lenData);
+ const int32_t *env,
+ const int16_t lenData);
/****************************************************************************
@@ -97,9 +97,9 @@
*/
int WebRtcIsacfix_EncHistMulti(
Bitstr_enc *streamData,
- const WebRtc_Word16 *data,
- const WebRtc_UWord16 **cdf,
- const WebRtc_Word16 lenData);
+ const int16_t *data,
+ const uint16_t **cdf,
+ const int16_t lenData);
/****************************************************************************
@@ -122,12 +122,12 @@
* Return value : number of bytes in the stream
* <0 if error detected
*/
-WebRtc_Word16 WebRtcIsacfix_DecHistBisectMulti(
- WebRtc_Word16 *data,
+int16_t WebRtcIsacfix_DecHistBisectMulti(
+ int16_t *data,
Bitstr_dec *streamData,
- const WebRtc_UWord16 **cdf,
- const WebRtc_UWord16 *cdfSize,
- const WebRtc_Word16 lenData);
+ const uint16_t **cdf,
+ const uint16_t *cdfSize,
+ const int16_t lenData);
/****************************************************************************
@@ -150,11 +150,11 @@
* Return value : number of bytes in original stream
* <0 if error detected
*/
-WebRtc_Word16 WebRtcIsacfix_DecHistOneStepMulti(
- WebRtc_Word16 *data,
+int16_t WebRtcIsacfix_DecHistOneStepMulti(
+ int16_t *data,
Bitstr_dec *streamData,
- const WebRtc_UWord16 **cdf,
- const WebRtc_UWord16 *initIndex,
- const WebRtc_Word16 lenData);
+ const uint16_t **cdf,
+ const uint16_t *initIndex,
+ const int16_t lenData);
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c
index 8e21b29..724a900 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.c
@@ -24,14 +24,14 @@
/* array of quantization levels for bottle neck info; Matlab code: */
/* sprintf('%4.1ff, ', logspace(log10(5000), log10(40000), 12)) */
-static const WebRtc_Word16 kQRateTable[12] = {
+static const int16_t kQRateTable[12] = {
10000, 11115, 12355, 13733, 15265, 16967,
18860, 20963, 23301, 25900, 28789, 32000
};
/* 0.1 times the values in the table kQRateTable */
/* values are in Q16 */
-static const WebRtc_Word32 KQRate01[12] = {
+static const int32_t KQRate01[12] = {
65536000, 72843264, 80969728, 90000589, 100040704, 111194931,
123600896, 137383117, 152705434, 169738240, 188671590, 209715200
};
@@ -40,10 +40,10 @@
* 8 bits/byte * 1000 msec/sec * 1/framelength (in msec)->bits/byte*sec
* frame length will either be 30 or 60 msec. 8738 is 1/60 in Q19 and 1/30 in Q18
* The following number is either in Q15 or Q14 depending on the current frame length */
-static const WebRtc_Word32 kBitsByteSec = 4369000;
+static const int32_t kBitsByteSec = 4369000;
/* Received header rate. First value is for 30 ms packets and second for 60 ms */
-static const WebRtc_Word16 kRecHeaderRate[2] = {
+static const int16_t kRecHeaderRate[2] = {
9333, 4666
};
@@ -51,13 +51,13 @@
minBwInv 30 ms, maxBwInv 30 ms,
minBwInv 60 ms, maxBwInv 69 ms
*/
-static const WebRtc_Word32 kInvBandwidth[4] = {
+static const int32_t kInvBandwidth[4] = {
55539, 25978,
73213, 29284
};
/* Number of samples in 25 msec */
-static const WebRtc_Word32 kSamplesIn25msec = 400;
+static const int32_t kSamplesIn25msec = 400;
/****************************************************************************
@@ -70,7 +70,7 @@
*
* Return value : 0
*/
-WebRtc_Word32 WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bweStr)
+int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bweStr)
{
bweStr->prevFrameSizeMs = INIT_FRAME_LEN;
bweStr->prevRtpNumber = 0;
@@ -93,15 +93,15 @@
bweStr->recBw = INIT_BN_EST;
bweStr->recBwAvgQ = INIT_BN_EST_Q7;
bweStr->recBwAvg = INIT_REC_BN_EST_Q5;
- bweStr->recJitter = (WebRtc_Word32) 327680; /* 10 in Q15 */
+ bweStr->recJitter = (int32_t) 327680; /* 10 in Q15 */
bweStr->recJitterShortTerm = 0;
- bweStr->recJitterShortTermAbs = (WebRtc_Word32) 40960; /* 5 in Q13 */
- bweStr->recMaxDelay = (WebRtc_Word32) 10;
- bweStr->recMaxDelayAvgQ = (WebRtc_Word32) 5120; /* 10 in Q9 */
+ bweStr->recJitterShortTermAbs = (int32_t) 40960; /* 5 in Q13 */
+ bweStr->recMaxDelay = (int32_t) 10;
+ bweStr->recMaxDelayAvgQ = (int32_t) 5120; /* 10 in Q9 */
bweStr->recHeaderRate = INIT_HDR_RATE;
bweStr->countRecPkts = 0;
bweStr->sendBwAvg = INIT_BN_EST_Q7;
- bweStr->sendMaxDelayAvg = (WebRtc_Word32) 5120; /* 10 in Q9 */
+ bweStr->sendMaxDelayAvg = (int32_t) 5120; /* 10 in Q9 */
bweStr->countHighSpeedRec = 0;
bweStr->highSpeedRec = 0;
@@ -139,42 +139,42 @@
* Return value : 0 if everything went fine,
* -1 otherwise
*/
-WebRtc_Word32 WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bweStr,
- const WebRtc_UWord16 rtpNumber,
- const WebRtc_Word16 frameSize,
- const WebRtc_UWord32 sendTime,
- const WebRtc_UWord32 arrivalTime,
- const WebRtc_Word16 pksize,
- const WebRtc_UWord16 Index)
+int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bweStr,
+ const uint16_t rtpNumber,
+ const int16_t frameSize,
+ const uint32_t sendTime,
+ const uint32_t arrivalTime,
+ const int16_t pksize,
+ const uint16_t Index)
{
- WebRtc_UWord16 weight = 0;
- WebRtc_UWord32 currBwInv = 0;
- WebRtc_UWord16 recRtpRate;
- WebRtc_UWord32 arrTimeProj;
- WebRtc_Word32 arrTimeDiff;
- WebRtc_Word32 arrTimeNoise;
- WebRtc_Word32 arrTimeNoiseAbs;
- WebRtc_Word32 sendTimeDiff;
+ uint16_t weight = 0;
+ uint32_t currBwInv = 0;
+ uint16_t recRtpRate;
+ uint32_t arrTimeProj;
+ int32_t arrTimeDiff;
+ int32_t arrTimeNoise;
+ int32_t arrTimeNoiseAbs;
+ int32_t sendTimeDiff;
- WebRtc_Word32 delayCorrFactor = DELAY_CORRECTION_MED;
- WebRtc_Word32 lateDiff = 0;
- WebRtc_Word16 immediateSet = 0;
- WebRtc_Word32 frameSizeSampl;
+ int32_t delayCorrFactor = DELAY_CORRECTION_MED;
+ int32_t lateDiff = 0;
+ int16_t immediateSet = 0;
+ int32_t frameSizeSampl;
- WebRtc_Word32 temp;
- WebRtc_Word32 msec;
- WebRtc_UWord32 exponent;
- WebRtc_UWord32 reductionFactor;
- WebRtc_UWord32 numBytesInv;
- WebRtc_Word32 sign;
+ int32_t temp;
+ int32_t msec;
+ uint32_t exponent;
+ uint32_t reductionFactor;
+ uint32_t numBytesInv;
+ int32_t sign;
- WebRtc_UWord32 byteSecondsPerBit;
- WebRtc_UWord32 tempLower;
- WebRtc_UWord32 tempUpper;
- WebRtc_Word32 recBwAvgInv;
- WebRtc_Word32 numPktsExpected;
+ uint32_t byteSecondsPerBit;
+ uint32_t tempLower;
+ uint32_t tempUpper;
+ int32_t recBwAvgInv;
+ int32_t numPktsExpected;
- WebRtc_Word16 errCode;
+ int16_t errCode;
/* UPDATE ESTIMATES FROM OTHER SIDE */
@@ -200,8 +200,8 @@
}
/* kBitsByteSec is in Q15 */
- recRtpRate = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(kBitsByteSec,
- (WebRtc_Word32)pksize), 15) + bweStr->recHeaderRate;
+ recRtpRate = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(kBitsByteSec,
+ (int32_t)pksize), 15) + bweStr->recHeaderRate;
} else {
/* If frameSize changed since last call, from 60 to 30, recalculate some values */
@@ -215,8 +215,8 @@
}
/* kBitsByteSec is in Q14 */
- recRtpRate = (WebRtc_UWord16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(kBitsByteSec,
- (WebRtc_Word32)pksize), 14) + bweStr->recHeaderRate;
+ recRtpRate = (uint16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(kBitsByteSec,
+ (int32_t)pksize), 14) + bweStr->recHeaderRate;
}
@@ -243,7 +243,7 @@
bweStr->countRecPkts++;
/* Calculate framesize in msec */
- frameSizeSampl = WEBRTC_SPL_MUL_16_16((WebRtc_Word16)SAMPLES_PER_MSEC, frameSize);
+ frameSizeSampl = WEBRTC_SPL_MUL_16_16((int16_t)SAMPLES_PER_MSEC, frameSize);
/* Check that it's not one of the first 9 packets */
if ( bweStr->countUpdates > 0 ) {
@@ -292,13 +292,13 @@
reductionFactor = WEBRTC_SPL_RSHIFT_U32(reductionFactor, 11);
if ( reductionFactor != 0 ) {
- bweStr->recBwInv = WEBRTC_SPL_MUL((WebRtc_Word32)bweStr->recBwInv, (WebRtc_Word32)reductionFactor);
- bweStr->recBwInv = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)bweStr->recBwInv, 13);
+ bweStr->recBwInv = WEBRTC_SPL_MUL((int32_t)bweStr->recBwInv, (int32_t)reductionFactor);
+ bweStr->recBwInv = WEBRTC_SPL_RSHIFT_W32((int32_t)bweStr->recBwInv, 13);
} else {
/* recBwInv = 1 / (INIT_BN_EST + INIT_HDR_RATE) in Q26 (Q30??)*/
bweStr->recBwInv = WEBRTC_SPL_DIV((1073741824 +
- WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)INIT_BN_EST + INIT_HDR_RATE), 1)), INIT_BN_EST + INIT_HDR_RATE);
+ WEBRTC_SPL_LSHIFT_W32(((int32_t)INIT_BN_EST + INIT_HDR_RATE), 1)), INIT_BN_EST + INIT_HDR_RATE);
}
/* reset time-since-update counter */
@@ -332,12 +332,12 @@
/* 8000 is 1/2 second (in samples at FS) */
if (lateDiff > 8000) {
- delayCorrFactor = (WebRtc_Word32) DELAY_CORRECTION_MAX;
+ delayCorrFactor = (int32_t) DELAY_CORRECTION_MAX;
bweStr->inWaitPeriod = 1;
bweStr->startWaitPeriod = arrivalTime;
immediateSet = 1;
} else if (lateDiff > 5120) {
- delayCorrFactor = (WebRtc_Word32) DELAY_CORRECTION_MED;
+ delayCorrFactor = (int32_t) DELAY_CORRECTION_MED;
immediateSet = 1;
bweStr->inWaitPeriod = 1;
bweStr->startWaitPeriod = arrivalTime;
@@ -345,19 +345,19 @@
}
}
- if ((bweStr->prevRtpRate > WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) bweStr->recBwAvg, 5)) &&
- (recRtpRate > WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)bweStr->recBwAvg, 5)) &&
+ if ((bweStr->prevRtpRate > WEBRTC_SPL_RSHIFT_W32((int32_t) bweStr->recBwAvg, 5)) &&
+ (recRtpRate > WEBRTC_SPL_RSHIFT_W32((int32_t)bweStr->recBwAvg, 5)) &&
!bweStr->inWaitPeriod) {
/* test if still in initiation period and increment counter */
if (bweStr->countUpdates++ > 99) {
/* constant weight after initiation part, 0.01 in Q13 */
- weight = (WebRtc_UWord16) 82;
+ weight = (uint16_t) 82;
} else {
/* weight decreases with number of updates, 1/countUpdates in Q13 */
- weight = (WebRtc_UWord16) WebRtcSpl_DivW32W16(
- (WebRtc_Word32)(8192 + WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) bweStr->countUpdates, 1)),
- (WebRtc_Word16)bweStr->countUpdates);
+ weight = (uint16_t) WebRtcSpl_DivW32W16(
+ (int32_t)(8192 + WEBRTC_SPL_RSHIFT_W32((int32_t) bweStr->countUpdates, 1)),
+ (int16_t)bweStr->countUpdates);
}
/* Bottle Neck Estimation */
@@ -373,9 +373,9 @@
}
/* compute inverse receiving rate for last packet, in Q19 */
- numBytesInv = (WebRtc_UWord16) WebRtcSpl_DivW32W16(
- (WebRtc_Word32)(524288 + WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)pksize + HEADER_SIZE), 1)),
- (WebRtc_Word16)(pksize + HEADER_SIZE));
+ numBytesInv = (uint16_t) WebRtcSpl_DivW32W16(
+ (int32_t)(524288 + WEBRTC_SPL_RSHIFT_W32(((int32_t)pksize + HEADER_SIZE), 1)),
+ (int16_t)(pksize + HEADER_SIZE));
/* 8389 is ~ 1/128000 in Q30 */
byteSecondsPerBit = WEBRTC_SPL_MUL_16_16(arrTimeDiff, 8389);
@@ -402,7 +402,7 @@
/* update bottle neck rate estimate */
bweStr->recBwInv = WEBRTC_SPL_UMUL(weight, currBwInv) +
- WEBRTC_SPL_UMUL((WebRtc_UWord32) 8192 - weight, bweStr->recBwInv);
+ WEBRTC_SPL_UMUL((uint32_t) 8192 - weight, bweStr->recBwInv);
/* Shift back to Q30 from Q40 (actual used bits shouldn't be more than 27 based on minBwInv)
up to 30 bits used with Q13 weight */
@@ -417,24 +417,24 @@
and NOT right shifting recBwAvg 5 bits to an integer
At max 13 bits are used
shift to Q5 */
- recBwAvgInv = WEBRTC_SPL_UDIV((WebRtc_UWord32)(0x80000000 + WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 1)),
+ recBwAvgInv = WEBRTC_SPL_UDIV((uint32_t)(0x80000000 + WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 1)),
bweStr->recBwAvg);
/* Calculate Projected arrival time difference */
/* The numerator of the quotient can be 22 bits so right shift inv by 4 to avoid overflow
result in Q22 */
- arrTimeProj = WEBRTC_SPL_MUL((WebRtc_Word32)8000, recBwAvgInv);
+ arrTimeProj = WEBRTC_SPL_MUL((int32_t)8000, recBwAvgInv);
/* shift to Q22 */
arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 4);
/* complete calulation */
- arrTimeProj = WEBRTC_SPL_MUL(((WebRtc_Word32)pksize + HEADER_SIZE), arrTimeProj);
+ arrTimeProj = WEBRTC_SPL_MUL(((int32_t)pksize + HEADER_SIZE), arrTimeProj);
/* shift to Q10 */
arrTimeProj = WEBRTC_SPL_RSHIFT_U32(arrTimeProj, 12);
/* difference between projected and actual arrival time differences */
/* Q9 (only shift arrTimeDiff by 5 to simulate divide by 16 (need to revisit if change sampling rate) DH */
- if (WEBRTC_SPL_LSHIFT_W32(arrTimeDiff, 6) > (WebRtc_Word32)arrTimeProj) {
+ if (WEBRTC_SPL_LSHIFT_W32(arrTimeDiff, 6) > (int32_t)arrTimeProj) {
arrTimeNoise = WEBRTC_SPL_LSHIFT_W32(arrTimeDiff, 6) - arrTimeProj;
sign = 1;
} else {
@@ -454,8 +454,8 @@
bweStr->recJitter = WEBRTC_SPL_RSHIFT_W32(bweStr->recJitter, 10);
/* Maximum jitter is 10 msec in Q15 */
- if (bweStr->recJitter > (WebRtc_Word32)327680) {
- bweStr->recJitter = (WebRtc_Word32)327680;
+ if (bweStr->recJitter > (int32_t)327680) {
+ bweStr->recJitter = (int32_t)327680;
}
/* short term averaged absolute jitter */
@@ -520,8 +520,8 @@
bweStr->recBw = WEBRTC_SPL_UMUL(delayCorrFactor, bweStr->recBw);
bweStr->recBw = WEBRTC_SPL_RSHIFT_U32(bweStr->recBw, 10);
- if (bweStr->recBw < (WebRtc_Word32) MIN_ISAC_BW) {
- bweStr->recBw = (WebRtc_Word32) MIN_ISAC_BW;
+ if (bweStr->recBw < (int32_t) MIN_ISAC_BW) {
+ bweStr->recBw = (int32_t) MIN_ISAC_BW;
}
bweStr->recBwAvg = WEBRTC_SPL_LSHIFT_U32(bweStr->recBw + bweStr->recHeaderRate, 5);
@@ -542,10 +542,10 @@
/* This function updates the send bottle neck rate */
/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
/* returns 0 if everything went fine, -1 otherwise */
-WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bweStr,
- const WebRtc_Word16 Index)
+int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bweStr,
+ const int16_t Index)
{
- WebRtc_UWord16 RateInd;
+ uint16_t RateInd;
if ( (Index < 0) || (Index > 23) ) {
return -ISAC_RANGE_ERROR_BW_ESTIMATOR;
@@ -558,7 +558,7 @@
/* compute the jitter estimate as decoded on the other side in Q9 */
/* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MAX_ISAC_MD */
bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) +
- WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)MAX_ISAC_MD, 9));
+ WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32((int32_t)MAX_ISAC_MD, 9));
bweStr->sendMaxDelayAvg = WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
} else {
@@ -566,7 +566,7 @@
/* compute the jitter estimate as decoded on the other side in Q9 */
/* sendMaxDelayAvg = 0.9 * sendMaxDelayAvg + 0.1 * MIN_ISAC_MD */
bweStr->sendMaxDelayAvg = WEBRTC_SPL_MUL(461, bweStr->sendMaxDelayAvg) +
- WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)MIN_ISAC_MD,9));
+ WEBRTC_SPL_MUL(51, WEBRTC_SPL_LSHIFT_W32((int32_t)MIN_ISAC_MD,9));
bweStr->sendMaxDelayAvg = WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
}
@@ -605,18 +605,18 @@
* Return:
* bandwith and jitter index (0..23)
*/
-WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bweStr)
+uint16_t WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bweStr)
{
- WebRtc_Word32 rate;
- WebRtc_Word32 maxDelay;
- WebRtc_UWord16 rateInd;
- WebRtc_UWord16 maxDelayBit;
- WebRtc_Word32 tempTerm1;
- WebRtc_Word32 tempTerm2;
- WebRtc_Word32 tempTermX;
- WebRtc_Word32 tempTermY;
- WebRtc_Word32 tempMin;
- WebRtc_Word32 tempMax;
+ int32_t rate;
+ int32_t maxDelay;
+ uint16_t rateInd;
+ uint16_t maxDelayBit;
+ int32_t tempTerm1;
+ int32_t tempTerm2;
+ int32_t tempTermX;
+ int32_t tempTermY;
+ int32_t tempMin;
+ int32_t tempMax;
/* Get Rate Index */
@@ -627,7 +627,7 @@
/* recBwAvg = 0.9 * recBwAvg + 0.1 * (rate + bweStr->recHeaderRate), 0.9 and 0.1 in Q9 */
bweStr->recBwAvg = WEBRTC_SPL_UMUL(922, bweStr->recBwAvg) +
- WEBRTC_SPL_UMUL(102, WEBRTC_SPL_LSHIFT_U32((WebRtc_UWord32)rate + bweStr->recHeaderRate, 5));
+ WEBRTC_SPL_UMUL(102, WEBRTC_SPL_LSHIFT_U32((uint32_t)rate + bweStr->recHeaderRate, 5));
bweStr->recBwAvg = WEBRTC_SPL_RSHIFT_U32(bweStr->recBwAvg, 10);
/* Find quantization index that gives the closest rate after averaging.
@@ -649,7 +649,7 @@
tempTermX = WEBRTC_SPL_UMUL(461, bweStr->recBwAvgQ) - tempTerm1;
/* rate in Q16 */
- tempTermY = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)rate, 16);
+ tempTermY = WEBRTC_SPL_LSHIFT_W32((int32_t)rate, 16);
/* 0.1 * kQRateTable[rateInd] = KQRate01[rateInd] */
tempTerm1 = tempTermX + KQRate01[rateInd] - tempTermY;
@@ -690,8 +690,8 @@
/* Update quantized max delay average */
tempMax = 652800; /* MAX_ISAC_MD * 0.1 in Q18 */
tempMin = 130560; /* MIN_ISAC_MD * 0.1 in Q18 */
- tempTermX = WEBRTC_SPL_MUL((WebRtc_Word32)bweStr->recMaxDelayAvgQ, (WebRtc_Word32)461);
- tempTermY = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)maxDelay, 18);
+ tempTermX = WEBRTC_SPL_MUL((int32_t)bweStr->recMaxDelayAvgQ, (int32_t)461);
+ tempTermY = WEBRTC_SPL_LSHIFT_W32((int32_t)maxDelay, 18);
tempTerm1 = tempTermX + tempMax - tempTermY;
tempTerm2 = tempTermY - tempTermX - tempMin;
@@ -711,24 +711,24 @@
}
/* Return bandwitdh and jitter index (0..23) */
- return (WebRtc_UWord16)(rateInd + maxDelayBit);
+ return (uint16_t)(rateInd + maxDelayBit);
}
/* get the bottle neck rate from far side to here, as estimated on this side */
-WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bweStr)
+uint16_t WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bweStr)
{
- WebRtc_UWord32 recBw;
- WebRtc_Word32 jitter_sign; /* Q8 */
- WebRtc_Word32 bw_adjust; /* Q16 */
- WebRtc_Word32 rec_jitter_short_term_abs_inv; /* Q18 */
- WebRtc_Word32 temp;
+ uint32_t recBw;
+ int32_t jitter_sign; /* Q8 */
+ int32_t bw_adjust; /* Q16 */
+ int32_t rec_jitter_short_term_abs_inv; /* Q18 */
+ int32_t temp;
/* Q18 rec jitter short term abs is in Q13, multiply it by 2^13 to save precision
2^18 then needs to be shifted 13 bits to 2^31 */
rec_jitter_short_term_abs_inv = WEBRTC_SPL_UDIV(0x80000000, bweStr->recJitterShortTermAbs);
/* Q27 = 9 + 18 */
- jitter_sign = WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(bweStr->recJitterShortTerm, 4), (WebRtc_Word32)rec_jitter_short_term_abs_inv);
+ jitter_sign = WEBRTC_SPL_MUL(WEBRTC_SPL_RSHIFT_W32(bweStr->recJitterShortTerm, 4), (int32_t)rec_jitter_short_term_abs_inv);
if (jitter_sign < 0) {
temp = -jitter_sign;
@@ -748,9 +748,9 @@
temp = WEBRTC_SPL_MUL(jitter_sign, temp);
temp = -temp;
temp = WEBRTC_SPL_RSHIFT_W32(temp, 8);
- bw_adjust = (WebRtc_UWord32)65536 + temp; /* (1 << 16) + temp; */
+ bw_adjust = (uint32_t)65536 + temp; /* (1 << 16) + temp; */
} else {
- bw_adjust = (WebRtc_UWord32)65536 - WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(jitter_sign, temp), 8);/* (1 << 16) - ((jitter_sign * temp) >> 8); */
+ bw_adjust = (uint32_t)65536 - WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(jitter_sign, temp), 8);/* (1 << 16) - ((jitter_sign * temp) >> 8); */
}
//make sure following multiplication won't overflow
@@ -769,15 +769,15 @@
recBw = MAX_ISAC_BW;
}
- return (WebRtc_UWord16) recBw;
+ return (uint16_t) recBw;
}
/* Returns the mmax delay (in ms) */
-WebRtc_Word16 WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bweStr)
+int16_t WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bweStr)
{
- WebRtc_Word16 recMaxDelay;
+ int16_t recMaxDelay;
- recMaxDelay = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(bweStr->recMaxDelay, 15);
+ recMaxDelay = (int16_t) WEBRTC_SPL_RSHIFT_W32(bweStr->recMaxDelay, 15);
/* limit range of jitter estimate */
if (recMaxDelay < MIN_ISAC_MD) {
@@ -790,11 +790,11 @@
}
/* get the bottle neck rate from here to far side, as estimated by far side */
-WebRtc_Word16 WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr *bweStr)
+int16_t WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr *bweStr)
{
- WebRtc_Word16 send_bw;
+ int16_t send_bw;
- send_bw = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 7);
+ send_bw = (int16_t) WEBRTC_SPL_RSHIFT_U32(bweStr->sendBwAvg, 7);
/* limit range of bottle neck rate */
if (send_bw < MIN_ISAC_BW) {
@@ -809,11 +809,11 @@
/* Returns the max delay value from the other side in ms */
-WebRtc_Word16 WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr *bweStr)
+int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr *bweStr)
{
- WebRtc_Word16 send_max_delay;
+ int16_t send_max_delay;
- send_max_delay = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
+ send_max_delay = (int16_t) WEBRTC_SPL_RSHIFT_W32(bweStr->sendMaxDelayAvg, 9);
/* limit range of jitter estimate */
if (send_max_delay < MIN_ISAC_MD) {
@@ -832,17 +832,17 @@
* update long-term average bitrate and amount of data in buffer
* returns minimum payload size (bytes)
*/
-WebRtc_UWord16 WebRtcIsacfix_GetMinBytes(RateModel *State,
- WebRtc_Word16 StreamSize, /* bytes in bitstream */
- const WebRtc_Word16 FrameSamples, /* samples per frame */
- const WebRtc_Word16 BottleNeck, /* bottle neck rate; excl headers (bps) */
- const WebRtc_Word16 DelayBuildUp) /* max delay from bottle neck buffering (ms) */
+uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State,
+ int16_t StreamSize, /* bytes in bitstream */
+ const int16_t FrameSamples, /* samples per frame */
+ const int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */
+ const int16_t DelayBuildUp) /* max delay from bottle neck buffering (ms) */
{
- WebRtc_Word32 MinRate = 0;
- WebRtc_UWord16 MinBytes;
- WebRtc_Word16 TransmissionTime;
- WebRtc_Word32 inv_Q12;
- WebRtc_Word32 den;
+ int32_t MinRate = 0;
+ uint16_t MinBytes;
+ int16_t TransmissionTime;
+ int32_t inv_Q12;
+ int32_t den;
/* first 10 packets @ low rate, then INIT_BURST_LEN packets @ fixed rate of INIT_RATE bps */
@@ -887,7 +887,7 @@
//round and shift before conversion
MinRate += 256;
MinRate = WEBRTC_SPL_RSHIFT_W32(MinRate, 9);
- MinBytes = (WebRtc_UWord16)WEBRTC_SPL_UDIV(WEBRTC_SPL_MUL(MinRate, FrameSamples), FS8);
+ MinBytes = (uint16_t)WEBRTC_SPL_UDIV(WEBRTC_SPL_MUL(MinRate, FrameSamples), FS8);
/* StreamSize will be adjusted if less than MinBytes */
if (StreamSize < MinBytes) {
@@ -904,12 +904,12 @@
State->ExceedAgo = 0;
}
} else {
- State->ExceedAgo += (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */
+ State->ExceedAgo += (int16_t)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */
State->PrevExceed = 1;
}
} else {
State->PrevExceed = 0;
- State->ExceedAgo += (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */
+ State->ExceedAgo += (int16_t)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */
}
/* set burst flag if bottle neck not exceeded for long time */
@@ -923,9 +923,9 @@
/* Update buffer delay */
- TransmissionTime = (WebRtc_Word16)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(StreamSize, 8000), BottleNeck); /* ms */
+ TransmissionTime = (int16_t)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(StreamSize, 8000), BottleNeck); /* ms */
State->StillBuffered += TransmissionTime;
- State->StillBuffered -= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); //>>4 = SAMPLES_PER_MSEC /* ms */
+ State->StillBuffered -= (int16_t)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); //>>4 = SAMPLES_PER_MSEC /* ms */
if (State->StillBuffered < 0) {
State->StillBuffered = 0;
}
@@ -942,19 +942,19 @@
* update long-term average bitrate and amount of data in buffer
*/
void WebRtcIsacfix_UpdateRateModel(RateModel *State,
- WebRtc_Word16 StreamSize, /* bytes in bitstream */
- const WebRtc_Word16 FrameSamples, /* samples per frame */
- const WebRtc_Word16 BottleNeck) /* bottle neck rate; excl headers (bps) */
+ int16_t StreamSize, /* bytes in bitstream */
+ const int16_t FrameSamples, /* samples per frame */
+ const int16_t BottleNeck) /* bottle neck rate; excl headers (bps) */
{
- WebRtc_Word16 TransmissionTime;
+ int16_t TransmissionTime;
/* avoid the initial "high-rate" burst */
State->InitCounter = 0;
/* Update buffer delay */
- TransmissionTime = (WebRtc_Word16)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(WEBRTC_SPL_MUL(StreamSize, 8), 1000), BottleNeck); /* ms */
+ TransmissionTime = (int16_t)WEBRTC_SPL_DIV(WEBRTC_SPL_MUL(WEBRTC_SPL_MUL(StreamSize, 8), 1000), BottleNeck); /* ms */
State->StillBuffered += TransmissionTime;
- State->StillBuffered -= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */
+ State->StillBuffered -= (int16_t)WEBRTC_SPL_RSHIFT_W16(FrameSamples, 4); /* ms */
if (State->StillBuffered < 0) {
State->StillBuffered = 0;
}
@@ -975,9 +975,9 @@
-WebRtc_Word16 WebRtcIsacfix_GetNewFrameLength(WebRtc_Word16 bottle_neck, WebRtc_Word16 current_framesamples)
+int16_t WebRtcIsacfix_GetNewFrameLength(int16_t bottle_neck, int16_t current_framesamples)
{
- WebRtc_Word16 new_framesamples;
+ int16_t new_framesamples;
new_framesamples = current_framesamples;
@@ -1000,20 +1000,20 @@
return new_framesamples;
}
-WebRtc_Word16 WebRtcIsacfix_GetSnr(WebRtc_Word16 bottle_neck, WebRtc_Word16 framesamples)
+int16_t WebRtcIsacfix_GetSnr(int16_t bottle_neck, int16_t framesamples)
{
- WebRtc_Word16 s2nr = 0;
+ int16_t s2nr = 0;
/* find new SNR value */
//consider BottleNeck to be in Q10 ( * 1 in Q10)
switch(framesamples) {
case 480:
/*s2nr = -1*(a_30 << 10) + ((b_30 * bottle_neck) >> 10);*/
- s2nr = -22500 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(500, bottle_neck, 10); //* 0.001; //+ c_30 * bottle_neck * bottle_neck * 0.000001;
+ s2nr = -22500 + (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(500, bottle_neck, 10); //* 0.001; //+ c_30 * bottle_neck * bottle_neck * 0.000001;
break;
case 960:
/*s2nr = -1*(a_60 << 10) + ((b_60 * bottle_neck) >> 10);*/
- s2nr = -22500 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(500, bottle_neck, 10); //* 0.001; //+ c_30 * bottle_neck * bottle_neck * 0.000001;
+ s2nr = -22500 + (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(500, bottle_neck, 10); //* 0.001; //+ c_30 * bottle_neck * bottle_neck * 0.000001;
break;
default:
s2nr = -1; /* Error */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h
index 76a50f8..acd5dd7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h
@@ -33,7 +33,7 @@
* Return value : 0
*/
-WebRtc_Word32 WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bwest_str);
+int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr *bwest_str);
/****************************************************************************
@@ -57,17 +57,17 @@
* -1 otherwise
*/
-WebRtc_Word32 WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bwest_str,
- const WebRtc_UWord16 rtp_number,
- const WebRtc_Word16 frameSize,
- const WebRtc_UWord32 send_ts,
- const WebRtc_UWord32 arr_ts,
- const WebRtc_Word16 pksize,
- const WebRtc_UWord16 Index);
+int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr *bwest_str,
+ const uint16_t rtp_number,
+ const int16_t frameSize,
+ const uint32_t send_ts,
+ const uint32_t arr_ts,
+ const int16_t pksize,
+ const uint16_t Index);
/* Update receiving estimates. Used when we only receive BWE index, no iSAC data packet. */
-WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bwest_str,
- const WebRtc_Word16 Index);
+int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr *bwest_str,
+ const int16_t Index);
/****************************************************************************
* WebRtcIsacfix_GetDownlinkBwIndexImpl(...)
@@ -81,47 +81,47 @@
* Return:
* bandwith and jitter index (0..23)
*/
-WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bwest_str);
+uint16_t WebRtcIsacfix_GetDownlinkBwIndexImpl(BwEstimatorstr *bwest_str);
/* Returns the bandwidth estimation (in bps) */
-WebRtc_UWord16 WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bwest_str);
+uint16_t WebRtcIsacfix_GetDownlinkBandwidth(const BwEstimatorstr *bwest_str);
/* Returns the bandwidth that iSAC should send with in bps */
-WebRtc_Word16 WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr *bwest_str);
+int16_t WebRtcIsacfix_GetUplinkBandwidth(const BwEstimatorstr *bwest_str);
/* Returns the max delay (in ms) */
-WebRtc_Word16 WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str);
+int16_t WebRtcIsacfix_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str);
/* Returns the max delay value from the other side in ms */
-WebRtc_Word16 WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr *bwest_str);
+int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr *bwest_str);
/*
* update amount of data in bottle neck buffer and burst handling
* returns minimum payload size (bytes)
*/
-WebRtc_UWord16 WebRtcIsacfix_GetMinBytes(RateModel *State,
- WebRtc_Word16 StreamSize, /* bytes in bitstream */
- const WebRtc_Word16 FrameLen, /* ms per frame */
- const WebRtc_Word16 BottleNeck, /* bottle neck rate; excl headers (bps) */
- const WebRtc_Word16 DelayBuildUp); /* max delay from bottle neck buffering (ms) */
+uint16_t WebRtcIsacfix_GetMinBytes(RateModel *State,
+ int16_t StreamSize, /* bytes in bitstream */
+ const int16_t FrameLen, /* ms per frame */
+ const int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */
+ const int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */
/*
* update long-term average bitrate and amount of data in buffer
*/
void WebRtcIsacfix_UpdateRateModel(RateModel *State,
- WebRtc_Word16 StreamSize, /* bytes in bitstream */
- const WebRtc_Word16 FrameSamples, /* samples per frame */
- const WebRtc_Word16 BottleNeck); /* bottle neck rate; excl headers (bps) */
+ int16_t StreamSize, /* bytes in bitstream */
+ const int16_t FrameSamples, /* samples per frame */
+ const int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */
void WebRtcIsacfix_InitRateModel(RateModel *State);
/* Returns the new framelength value (input argument: bottle_neck) */
-WebRtc_Word16 WebRtcIsacfix_GetNewFrameLength(WebRtc_Word16 bottle_neck, WebRtc_Word16 current_framelength);
+int16_t WebRtcIsacfix_GetNewFrameLength(int16_t bottle_neck, int16_t current_framelength);
/* Returns the new SNR value (input argument: bottle_neck) */
//returns snr in Q10
-WebRtc_Word16 WebRtcIsacfix_GetSnr(WebRtc_Word16 bottle_neck, WebRtc_Word16 framesamples);
+int16_t WebRtcIsacfix_GetSnr(int16_t bottle_neck, int16_t framesamples);
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_BANDWIDTH_ESTIMATOR_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
index 689f329..88c7e1a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/codec.h
@@ -27,23 +27,23 @@
int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr* bwest_str,
Bitstr_dec* streamdata,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts);
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts);
-WebRtc_Word16 WebRtcIsacfix_DecodeImpl(WebRtc_Word16* signal_out16,
+int16_t WebRtcIsacfix_DecodeImpl(int16_t* signal_out16,
ISACFIX_DecInst_t* ISACdec_obj,
- WebRtc_Word16* current_framesamples);
+ int16_t* current_framesamples);
-WebRtc_Word16 WebRtcIsacfix_DecodePlcImpl(WebRtc_Word16* decoded,
+int16_t WebRtcIsacfix_DecodePlcImpl(int16_t* decoded,
ISACFIX_DecInst_t* ISACdec_obj,
- WebRtc_Word16* current_framesample );
+ int16_t* current_framesample );
-int WebRtcIsacfix_EncodeImpl(WebRtc_Word16* in,
+int WebRtcIsacfix_EncodeImpl(int16_t* in,
ISACFIX_EncInst_t* ISACenc_obj,
BwEstimatorstr* bw_estimatordata,
- WebRtc_Word16 CodingMode);
+ int16_t CodingMode);
int WebRtcIsacfix_EncodeStoredData(ISACFIX_EncInst_t* ISACenc_obj,
int BWnumber,
@@ -69,93 +69,93 @@
void WebRtcIsacfix_InitTransform();
-typedef void (*Time2Spec)(WebRtc_Word16* inre1Q9,
- WebRtc_Word16* inre2Q9,
- WebRtc_Word16* outre,
- WebRtc_Word16* outim);
-typedef void (*Spec2Time)(WebRtc_Word16* inreQ7,
- WebRtc_Word16* inimQ7,
- WebRtc_Word32* outre1Q16,
- WebRtc_Word32* outre2Q16);
+typedef void (*Time2Spec)(int16_t* inre1Q9,
+ int16_t* inre2Q9,
+ int16_t* outre,
+ int16_t* outim);
+typedef void (*Spec2Time)(int16_t* inreQ7,
+ int16_t* inimQ7,
+ int32_t* outre1Q16,
+ int32_t* outre2Q16);
extern Time2Spec WebRtcIsacfix_Time2Spec;
extern Spec2Time WebRtcIsacfix_Spec2Time;
-void WebRtcIsacfix_Time2SpecC(WebRtc_Word16* inre1Q9,
- WebRtc_Word16* inre2Q9,
- WebRtc_Word16* outre,
- WebRtc_Word16* outim);
-void WebRtcIsacfix_Spec2TimeC(WebRtc_Word16* inreQ7,
- WebRtc_Word16* inimQ7,
- WebRtc_Word32* outre1Q16,
- WebRtc_Word32* outre2Q16);
+void WebRtcIsacfix_Time2SpecC(int16_t* inre1Q9,
+ int16_t* inre2Q9,
+ int16_t* outre,
+ int16_t* outim);
+void WebRtcIsacfix_Spec2TimeC(int16_t* inreQ7,
+ int16_t* inimQ7,
+ int32_t* outre1Q16,
+ int32_t* outre2Q16);
#if (defined WEBRTC_DETECT_ARM_NEON) || (defined WEBRTC_ARCH_ARM_NEON)
-void WebRtcIsacfix_Time2SpecNeon(WebRtc_Word16* inre1Q9,
- WebRtc_Word16* inre2Q9,
- WebRtc_Word16* outre,
- WebRtc_Word16* outim);
-void WebRtcIsacfix_Spec2TimeNeon(WebRtc_Word16* inreQ7,
- WebRtc_Word16* inimQ7,
- WebRtc_Word32* outre1Q16,
- WebRtc_Word32* outre2Q16);
+void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9,
+ int16_t* inre2Q9,
+ int16_t* outre,
+ int16_t* outim);
+void WebRtcIsacfix_Spec2TimeNeon(int16_t* inreQ7,
+ int16_t* inimQ7,
+ int32_t* outre1Q16,
+ int32_t* outre2Q16);
#endif
/* filterbank functions */
-void WebRtcIsacfix_SplitAndFilter1(WebRtc_Word16* in,
- WebRtc_Word16* LP16,
- WebRtc_Word16* HP16,
+void WebRtcIsacfix_SplitAndFilter1(int16_t* in,
+ int16_t* LP16,
+ int16_t* HP16,
PreFiltBankstr* prefiltdata);
-void WebRtcIsacfix_FilterAndCombine1(WebRtc_Word16* tempin_ch1,
- WebRtc_Word16* tempin_ch2,
- WebRtc_Word16* out16,
+void WebRtcIsacfix_FilterAndCombine1(int16_t* tempin_ch1,
+ int16_t* tempin_ch2,
+ int16_t* out16,
PostFiltBankstr* postfiltdata);
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
-void WebRtcIsacfix_SplitAndFilter2(WebRtc_Word16* in,
- WebRtc_Word16* LP16,
- WebRtc_Word16* HP16,
+void WebRtcIsacfix_SplitAndFilter2(int16_t* in,
+ int16_t* LP16,
+ int16_t* HP16,
PreFiltBankstr* prefiltdata);
-void WebRtcIsacfix_FilterAndCombine2(WebRtc_Word16* tempin_ch1,
- WebRtc_Word16* tempin_ch2,
- WebRtc_Word16* out16,
+void WebRtcIsacfix_FilterAndCombine2(int16_t* tempin_ch1,
+ int16_t* tempin_ch2,
+ int16_t* out16,
PostFiltBankstr* postfiltdata,
- WebRtc_Word16 len);
+ int16_t len);
#endif
/* normalized lattice filters */
-void WebRtcIsacfix_NormLatticeFilterMa(WebRtc_Word16 orderCoef,
- WebRtc_Word32* stateGQ15,
- WebRtc_Word16* lat_inQ0,
- WebRtc_Word16* filt_coefQ15,
- WebRtc_Word32* gain_lo_hiQ17,
- WebRtc_Word16 lo_hi,
- WebRtc_Word16* lat_outQ9);
+void WebRtcIsacfix_NormLatticeFilterMa(int16_t orderCoef,
+ int32_t* stateGQ15,
+ int16_t* lat_inQ0,
+ int16_t* filt_coefQ15,
+ int32_t* gain_lo_hiQ17,
+ int16_t lo_hi,
+ int16_t* lat_outQ9);
-void WebRtcIsacfix_NormLatticeFilterAr(WebRtc_Word16 orderCoef,
- WebRtc_Word16* stateGQ0,
- WebRtc_Word32* lat_inQ25,
- WebRtc_Word16* filt_coefQ15,
- WebRtc_Word32* gain_lo_hiQ17,
- WebRtc_Word16 lo_hi,
- WebRtc_Word16* lat_outQ0);
+void WebRtcIsacfix_NormLatticeFilterAr(int16_t orderCoef,
+ int16_t* stateGQ0,
+ int32_t* lat_inQ25,
+ int16_t* filt_coefQ15,
+ int32_t* gain_lo_hiQ17,
+ int16_t lo_hi,
+ int16_t* lat_outQ0);
/* TODO(kma): Remove the following functions into individual header files. */
/* Internal functions in both C and ARM Neon versions */
-int WebRtcIsacfix_AutocorrC(WebRtc_Word32* __restrict r,
- const WebRtc_Word16* __restrict x,
- WebRtc_Word16 N,
- WebRtc_Word16 order,
- WebRtc_Word16* __restrict scale);
+int WebRtcIsacfix_AutocorrC(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale);
void WebRtcIsacfix_FilterMaLoopC(int16_t input0,
int16_t input1,
@@ -165,11 +165,11 @@
int32_t* ptr2);
#if (defined WEBRTC_DETECT_ARM_NEON) || (defined WEBRTC_ARCH_ARM_NEON)
-int WebRtcIsacfix_AutocorrNeon(WebRtc_Word32* __restrict r,
- const WebRtc_Word16* __restrict x,
- WebRtc_Word16 N,
- WebRtc_Word16 order,
- WebRtc_Word16* __restrict scale);
+int WebRtcIsacfix_AutocorrNeon(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale);
void WebRtcIsacfix_FilterMaLoopNeon(int16_t input0,
int16_t input1,
@@ -181,11 +181,11 @@
/* Function pointers associated with the above functions. */
-typedef int (*AutocorrFix)(WebRtc_Word32* __restrict r,
- const WebRtc_Word16* __restrict x,
- WebRtc_Word16 N,
- WebRtc_Word16 order,
- WebRtc_Word16* __restrict scale);
+typedef int (*AutocorrFix)(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale);
extern AutocorrFix WebRtcIsacfix_AutocorrFix;
typedef void (*FilterMaLoopFix)(int16_t input0,
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c
index 2e15e7a..ae0d687 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode.c
@@ -27,43 +27,43 @@
-WebRtc_Word16 WebRtcIsacfix_DecodeImpl(WebRtc_Word16 *signal_out16,
- ISACFIX_DecInst_t *ISACdec_obj,
- WebRtc_Word16 *current_framesamples)
+int16_t WebRtcIsacfix_DecodeImpl(int16_t *signal_out16,
+ ISACFIX_DecInst_t *ISACdec_obj,
+ int16_t *current_framesamples)
{
int k;
int err;
- WebRtc_Word16 BWno;
- WebRtc_Word16 len = 0;
+ int16_t BWno;
+ int16_t len = 0;
- WebRtc_Word16 model;
+ int16_t model;
- WebRtc_Word16 Vector_Word16_1[FRAMESAMPLES/2];
- WebRtc_Word16 Vector_Word16_2[FRAMESAMPLES/2];
+ int16_t Vector_Word16_1[FRAMESAMPLES/2];
+ int16_t Vector_Word16_2[FRAMESAMPLES/2];
- WebRtc_Word32 Vector_Word32_1[FRAMESAMPLES/2];
- WebRtc_Word32 Vector_Word32_2[FRAMESAMPLES/2];
+ int32_t Vector_Word32_1[FRAMESAMPLES/2];
+ int32_t Vector_Word32_2[FRAMESAMPLES/2];
- WebRtc_Word16 lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs
- WebRtc_Word16 hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs
- WebRtc_Word32 gain_lo_hiQ17[2*SUBFRAMES];
+ int16_t lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs
+ int16_t hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs
+ int32_t gain_lo_hiQ17[2*SUBFRAMES];
- WebRtc_Word16 PitchLags_Q7[PITCH_SUBFRAMES];
- WebRtc_Word16 PitchGains_Q12[PITCH_SUBFRAMES];
- WebRtc_Word16 AvgPitchGain_Q12;
+ int16_t PitchLags_Q7[PITCH_SUBFRAMES];
+ int16_t PitchGains_Q12[PITCH_SUBFRAMES];
+ int16_t AvgPitchGain_Q12;
- WebRtc_Word16 tmp_1, tmp_2;
- WebRtc_Word32 tmp32a, tmp32b;
- WebRtc_Word16 gainQ13;
+ int16_t tmp_1, tmp_2;
+ int32_t tmp32a, tmp32b;
+ int16_t gainQ13;
- WebRtc_Word16 frame_nb; /* counter */
- WebRtc_Word16 frame_mode; /* 0 for 20ms and 30ms, 1 for 60ms */
- WebRtc_Word16 processed_samples;
+ int16_t frame_nb; /* counter */
+ int16_t frame_mode; /* 0 for 20ms and 30ms, 1 for 60ms */
+ int16_t processed_samples;
/* PLC */
- WebRtc_Word16 overlapWin[ 240 ];
+ int16_t overlapWin[ 240 ];
(ISACdec_obj->bitstr_obj).W_upper = 0xFFFFFFFF;
(ISACdec_obj->bitstr_obj).streamval = 0;
@@ -76,8 +76,8 @@
if (err<0) // error check
return err;
- frame_mode = (WebRtc_Word16)WEBRTC_SPL_DIV(*current_framesamples, MAX_FRAMESAMPLES); /* 0, or 1 */
- processed_samples = (WebRtc_Word16)WEBRTC_SPL_DIV(*current_framesamples, frame_mode+1); /* either 320 (20ms) or 480 (30, 60 ms) */
+ frame_mode = (int16_t)WEBRTC_SPL_DIV(*current_framesamples, MAX_FRAMESAMPLES); /* 0, or 1 */
+ processed_samples = (int16_t)WEBRTC_SPL_DIV(*current_framesamples, frame_mode+1); /* either 320 (20ms) or 480 (30, 60 ms) */
err = WebRtcIsacfix_DecodeSendBandwidth(&ISACdec_obj->bitstr_obj, &BWno);
if (err<0) // error check
@@ -95,7 +95,7 @@
if (err<0) // error check
return err;
- AvgPitchGain_Q12 = (WebRtc_Word16)(((WebRtc_Word32)PitchGains_Q12[0] + PitchGains_Q12[1] + PitchGains_Q12[2] + PitchGains_Q12[3])>>2);
+ AvgPitchGain_Q12 = (int16_t)(((int32_t)PitchGains_Q12[0] + PitchGains_Q12[1] + PitchGains_Q12[2] + PitchGains_Q12[3])>>2);
/* decode & dequantize FiltCoef */
err = WebRtcIsacfix_DecodeLpc(gain_lo_hiQ17, lofilt_coefQ15, hifilt_coefQ15,
@@ -113,7 +113,7 @@
WebRtcIsacfix_Spec2Time(Vector_Word16_1, Vector_Word16_2, Vector_Word32_1, Vector_Word32_2);
for (k=0; k<FRAMESAMPLES/2; k++) {
- Vector_Word16_1[k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(Vector_Word32_1[k]+64, 7); //Q16 -> Q9
+ Vector_Word16_1[k] = (int16_t)WEBRTC_SPL_RSHIFT_W32(Vector_Word32_1[k]+64, 7); //Q16 -> Q9
}
/* ---- If this is recovery frame ---- */
@@ -129,14 +129,14 @@
ISACdec_obj->plcstr_obj.decayCoeffNoise = WEBRTC_SPL_WORD16_MAX; /* DECAY_RATE is in Q15 */
ISACdec_obj->plcstr_obj.pitchCycles = 0;
- PitchGains_Q12[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(PitchGains_Q12[0], 700, 10 );
+ PitchGains_Q12[0] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(PitchGains_Q12[0], 700, 10 );
/* ---- Add-overlap ---- */
WebRtcSpl_GetHanningWindow( overlapWin, RECOVERY_OVERLAP );
for( k = 0; k < RECOVERY_OVERLAP; k++ )
Vector_Word16_1[k] = WEBRTC_SPL_ADD_SAT_W16(
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( (ISACdec_obj->plcstr_obj).overlapLP[k], overlapWin[RECOVERY_OVERLAP - k - 1], 14),
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( Vector_Word16_1[k], overlapWin[k], 14) );
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( (ISACdec_obj->plcstr_obj).overlapLP[k], overlapWin[RECOVERY_OVERLAP - k - 1], 14),
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( Vector_Word16_1[k], overlapWin[k], 14) );
@@ -177,11 +177,11 @@
/* gain = 1.0f - 0.45f * AvgPitchGain; */
tmp32a = WEBRTC_SPL_MUL_16_16_RSFT(AvgPitchGain_Q12, 29, 0); // Q18
tmp32b = 262144 - tmp32a; // Q18
- gainQ13 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q13
+ gainQ13 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q13
for (k = 0; k < FRAMESAMPLES/2; k++)
{
- Vector_Word32_1[k] = (WebRtc_Word32) WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(Vector_Word16_2[k], gainQ13), 3); // Q25
+ Vector_Word32_1[k] = (int32_t) WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(Vector_Word16_2[k], gainQ13), 3); // Q25
}
@@ -204,8 +204,8 @@
/* Form the polyphase signals, and compensate for DC offset */
for (k=0;k<FRAMESAMPLES/2;k++) {
- tmp_1 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(((WebRtc_Word32)Vector_Word16_1[k]+Vector_Word16_2[k] + 1)); /* Construct a new upper channel signal*/
- tmp_2 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(((WebRtc_Word32)Vector_Word16_1[k]-Vector_Word16_2[k])); /* Construct a new lower channel signal*/
+ tmp_1 = (int16_t)WebRtcSpl_SatW32ToW16(((int32_t)Vector_Word16_1[k]+Vector_Word16_2[k] + 1)); /* Construct a new upper channel signal*/
+ tmp_2 = (int16_t)WebRtcSpl_SatW32ToW16(((int32_t)Vector_Word16_1[k]-Vector_Word16_2[k])); /* Construct a new lower channel signal*/
Vector_Word16_1[k] = tmp_1;
Vector_Word16_2[k] = tmp_2;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c
index 68c6003..c1221e7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_bwe.c
@@ -26,13 +26,13 @@
int WebRtcIsacfix_EstimateBandwidth(BwEstimatorstr *bwest_str,
Bitstr_dec *streamdata,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts)
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts)
{
- WebRtc_Word16 index;
- WebRtc_Word16 frame_samples;
+ int16_t index;
+ int16_t frame_samples;
int err;
/* decode framelength */
@@ -53,10 +53,10 @@
err = WebRtcIsacfix_UpdateUplinkBwImpl(
bwest_str,
rtp_seq_number,
- (WebRtc_UWord16)WEBRTC_SPL_UDIV(WEBRTC_SPL_UMUL(frame_samples,1000), FS),
+ (uint16_t)WEBRTC_SPL_UDIV(WEBRTC_SPL_UMUL(frame_samples,1000), FS),
send_ts,
arr_ts,
- (WebRtc_Word16) packet_size, /* in bytes */
+ (int16_t) packet_size, /* in bytes */
index);
/* error check */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
index de51658..0021675 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c
@@ -33,29 +33,29 @@
* returns the total number of bytes in the stream
*/
-static WebRtc_Word16 plc_filterma_Fast(
- WebRtc_Word16 *In, /* (i) Vector to be filtered. InOut[-orderCoef+1]
+static int16_t plc_filterma_Fast(
+ int16_t *In, /* (i) Vector to be filtered. InOut[-orderCoef+1]
to InOut[-1] contains state */
- WebRtc_Word16 *Out, /* (o) Filtered vector */
- WebRtc_Word16 *B, /* (i) The filter coefficients (in Q0) */
- WebRtc_Word16 Blen, /* (i) Number of B coefficients */
- WebRtc_Word16 len, /* (i) Number of samples to be filtered */
- WebRtc_Word16 reduceDecay,
- WebRtc_Word16 decay,
- WebRtc_Word16 rshift )
+ int16_t *Out, /* (o) Filtered vector */
+ int16_t *B, /* (i) The filter coefficients (in Q0) */
+ int16_t Blen, /* (i) Number of B coefficients */
+ int16_t len, /* (i) Number of samples to be filtered */
+ int16_t reduceDecay,
+ int16_t decay,
+ int16_t rshift )
{
int i, j;
- WebRtc_Word32 o;
- WebRtc_Word32 lim;
+ int32_t o;
+ int32_t lim;
- lim = WEBRTC_SPL_LSHIFT_W32( (WebRtc_Word32)1, 15 + rshift )-1;
+ lim = WEBRTC_SPL_LSHIFT_W32( (int32_t)1, 15 + rshift )-1;
for (i = 0; i < len; i++)
{
- G_CONST WebRtc_Word16 *b_ptr = &B[0];
- G_CONST WebRtc_Word16 *x_ptr = &In[i];
+ G_CONST int16_t *b_ptr = &B[0];
+ G_CONST int16_t *x_ptr = &In[i];
- o = (WebRtc_Word32)0;
+ o = (int32_t)0;
for (j = 0;j < Blen; j++)
{
@@ -68,13 +68,13 @@
o = WEBRTC_SPL_ADD_SAT_W32( o, WEBRTC_SPL_LSHIFT_W32( 1, (rshift-1) ) );
/* saturate according to the domain of the filter coefficients */
- o = WEBRTC_SPL_SAT((WebRtc_Word32)lim, o, (WebRtc_Word32)-lim);
+ o = WEBRTC_SPL_SAT((int32_t)lim, o, (int32_t)-lim);
- /* o should be in the range of WebRtc_Word16 */
+ /* o should be in the range of int16_t */
o = WEBRTC_SPL_RSHIFT_W32( o, rshift );
/* decay the output signal; this is specific to plc */
- *Out++ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16)o, decay, 15); // ((o + (WebRtc_Word32)2048) >> 12);
+ *Out++ = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( (int16_t)o, decay, 15); // ((o + (int32_t)2048) >> 12);
/* change the decay */
decay -= reduceDecay;
@@ -91,13 +91,13 @@
-static __inline WebRtc_Word32 log2_Q8_T( WebRtc_UWord32 x ) {
+static __inline int32_t log2_Q8_T( uint32_t x ) {
- WebRtc_Word32 zeros, lg2;
- WebRtc_Word16 frac;
+ int32_t zeros, lg2;
+ int16_t frac;
zeros=WebRtcSpl_NormU32(x);
- frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(((WebRtc_UWord32)WEBRTC_SPL_LSHIFT_W32(x, zeros)&0x7FFFFFFF), 23);
+ frac=(int16_t)WEBRTC_SPL_RSHIFT_W32(((uint32_t)WEBRTC_SPL_LSHIFT_W32(x, zeros)&0x7FFFFFFF), 23);
/* log2(magn(i)) */
lg2= (WEBRTC_SPL_LSHIFT_W16((31-zeros), 8)+frac);
@@ -105,16 +105,16 @@
}
-static __inline WebRtc_Word16 exp2_Q10_T(WebRtc_Word16 x) { // Both in and out in Q10
+static __inline int16_t exp2_Q10_T(int16_t x) { // Both in and out in Q10
- WebRtc_Word16 tmp16_1, tmp16_2;
+ int16_t tmp16_1, tmp16_2;
- tmp16_2=(WebRtc_Word16)(0x0400|(x&0x03FF));
- tmp16_1=-(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(x,10);
+ tmp16_2=(int16_t)(0x0400|(x&0x03FF));
+ tmp16_1=-(int16_t)WEBRTC_SPL_RSHIFT_W16(x,10);
if(tmp16_1>0)
- return (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+ return (int16_t) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
else
- return (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+ return (int16_t) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
}
@@ -126,9 +126,9 @@
The function implements membership values for two sets. The mebership functions are
of second orders corresponding to half-bell-shapped pulses.
*/
-static void MemshipValQ15( WebRtc_Word16 in, WebRtc_Word16 *A, WebRtc_Word16 *B )
+static void MemshipValQ15( int16_t in, int16_t *A, int16_t *B )
{
- WebRtc_Word16 x;
+ int16_t x;
in -= 700; /* translate the lowLim to 0, limHigh = 5000 - 700, M = 2150 */
@@ -142,12 +142,12 @@
/* x = in / 2150 {in Q15} = x * 15.2409 {in Q15} =
x*15 + (x*983)/(2^12); note that 983/2^12 = 0.23999 */
- /* we are sure that x is in the range of WebRtc_Word16 */
- x = (WebRtc_Word16)( WEBRTC_SPL_MUL_16_16( in, 15 ) +
+ /* we are sure that x is in the range of int16_t */
+ x = (int16_t)( WEBRTC_SPL_MUL_16_16( in, 15 ) +
WEBRTC_SPL_MUL_16_16_RSFT( in, 983, 12) );
/* b = x^2 / 2 {in Q15} so a shift of 16 is required to
be in correct domain and one more for the division by 2 */
- *B = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( WEBRTC_SPL_MUL_16_16( x, x ) + 0x00010000, 17 );
+ *B = (int16_t)WEBRTC_SPL_RSHIFT_W32( WEBRTC_SPL_MUL_16_16( x, x ) + 0x00010000, 17 );
*A = WEBRTC_SPL_WORD16_MAX - *B;
}
else
@@ -162,11 +162,11 @@
{
/* This is a mirror case of the above */
in = 4300 - in;
- x = (WebRtc_Word16)( WEBRTC_SPL_MUL_16_16( in, 15 ) +
+ x = (int16_t)( WEBRTC_SPL_MUL_16_16( in, 15 ) +
WEBRTC_SPL_MUL_16_16_RSFT( in, 983, 12) );
/* b = x^2 / 2 {in Q15} so a shift of 16 is required to
be in correct domain and one more for the division by 2 */
- *A = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( WEBRTC_SPL_MUL_16_16( x, x ) + 0x00010000, 17 );
+ *A = (int16_t)WEBRTC_SPL_RSHIFT_W32( WEBRTC_SPL_MUL_16_16( x, x ) + 0x00010000, 17 );
*B = WEBRTC_SPL_WORD16_MAX - *A;
}
@@ -181,11 +181,11 @@
-static void LinearResampler( WebRtc_Word16 *in, WebRtc_Word16 *out, WebRtc_Word16 lenIn, WebRtc_Word16 lenOut )
+static void LinearResampler( int16_t *in, int16_t *out, int16_t lenIn, int16_t lenOut )
{
- WebRtc_Word32 n;
- WebRtc_Word16 resOut, i, j, relativePos, diff; /* */
- WebRtc_UWord16 udiff;
+ int32_t n;
+ int16_t resOut, i, j, relativePos, diff; /* */
+ uint16_t udiff;
if( lenIn == lenOut )
{
@@ -193,8 +193,8 @@
return;
}
- n = WEBRTC_SPL_MUL_16_16( (WebRtc_Word16)(lenIn-1), RESAMP_RES );
- resOut = WebRtcSpl_DivW32W16ResW16( n, (WebRtc_Word16)(lenOut-1) );
+ n = WEBRTC_SPL_MUL_16_16( (int16_t)(lenIn-1), RESAMP_RES );
+ resOut = WebRtcSpl_DivW32W16ResW16( n, (int16_t)(lenOut-1) );
out[0] = in[0];
for( i = 1, j = 0, relativePos = 0; i < lenOut; i++ )
@@ -214,20 +214,20 @@
if( (in[ j ] > 0) && (in[j + 1] < 0) )
{
- udiff = (WebRtc_UWord16)(in[ j ] - in[j + 1]);
- out[ i ] = in[ j ] - (WebRtc_UWord16)( ((WebRtc_Word32)( udiff * relativePos )) >> RESAMP_RES_BIT);
+ udiff = (uint16_t)(in[ j ] - in[j + 1]);
+ out[ i ] = in[ j ] - (uint16_t)( ((int32_t)( udiff * relativePos )) >> RESAMP_RES_BIT);
}
else
{
if( (in[j] < 0) && (in[j+1] > 0) )
{
- udiff = (WebRtc_UWord16)( in[j + 1] - in[ j ] );
- out[ i ] = in[ j ] + (WebRtc_UWord16)( ((WebRtc_Word32)( udiff * relativePos )) >> RESAMP_RES_BIT);
+ udiff = (uint16_t)( in[j + 1] - in[ j ] );
+ out[ i ] = in[ j ] + (uint16_t)( ((int32_t)( udiff * relativePos )) >> RESAMP_RES_BIT);
}
else
{
diff = in[ j + 1 ] - in[ j ];
- out[ i ] = in[ j ] + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT( diff, relativePos, RESAMP_RES_BIT );
+ out[ i ] = in[ j ] + (int16_t)WEBRTC_SPL_MUL_16_16_RSFT( diff, relativePos, RESAMP_RES_BIT );
}
}
}
@@ -237,46 +237,46 @@
-WebRtc_Word16 WebRtcIsacfix_DecodePlcImpl(WebRtc_Word16 *signal_out16,
- ISACFIX_DecInst_t *ISACdec_obj,
- WebRtc_Word16 *current_framesamples )
+int16_t WebRtcIsacfix_DecodePlcImpl(int16_t *signal_out16,
+ ISACFIX_DecInst_t *ISACdec_obj,
+ int16_t *current_framesamples )
{
int subframecnt;
- WebRtc_Word16 len = 0;
+ int16_t len = 0;
- WebRtc_Word16* Vector_Word16_1;
- WebRtc_Word16 Vector_Word16_Extended_1[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
- WebRtc_Word16* Vector_Word16_2;
- WebRtc_Word16 Vector_Word16_Extended_2[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
+ int16_t* Vector_Word16_1;
+ int16_t Vector_Word16_Extended_1[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
+ int16_t* Vector_Word16_2;
+ int16_t Vector_Word16_Extended_2[FRAMESAMPLES_HALF + NOISE_FILTER_LEN];
- WebRtc_Word32 Vector_Word32_1[FRAMESAMPLES_HALF];
- WebRtc_Word32 Vector_Word32_2[FRAMESAMPLES_HALF];
+ int32_t Vector_Word32_1[FRAMESAMPLES_HALF];
+ int32_t Vector_Word32_2[FRAMESAMPLES_HALF];
- WebRtc_Word16 lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs
- WebRtc_Word16 hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs
+ int16_t lofilt_coefQ15[ORDERLO*SUBFRAMES]; //refl. coeffs
+ int16_t hifilt_coefQ15[ORDERHI*SUBFRAMES]; //refl. coeffs
- WebRtc_Word16 pitchLags_Q7[PITCH_SUBFRAMES];
- WebRtc_Word16 pitchGains_Q12[PITCH_SUBFRAMES];
+ int16_t pitchLags_Q7[PITCH_SUBFRAMES];
+ int16_t pitchGains_Q12[PITCH_SUBFRAMES];
- WebRtc_Word16 tmp_1, tmp_2;
- WebRtc_Word32 tmp32a, tmp32b;
- WebRtc_Word16 gainQ13;
+ int16_t tmp_1, tmp_2;
+ int32_t tmp32a, tmp32b;
+ int16_t gainQ13;
- WebRtc_Word16 myDecayRate;
+ int16_t myDecayRate;
/* ---------- PLC variables ------------ */
- WebRtc_Word16 lag0, i, k, noiseIndex;
- WebRtc_Word16 stretchPitchLP[PITCH_MAX_LAG + 10], stretchPitchLP1[PITCH_MAX_LAG + 10];
+ int16_t lag0, i, k, noiseIndex;
+ int16_t stretchPitchLP[PITCH_MAX_LAG + 10], stretchPitchLP1[PITCH_MAX_LAG + 10];
- WebRtc_Word32 gain_lo_hiQ17[2*SUBFRAMES];
+ int32_t gain_lo_hiQ17[2*SUBFRAMES];
- WebRtc_Word16 nLP, pLP, wNoisyLP, wPriodicLP, tmp16, minIdx;
- WebRtc_Word32 nHP, pHP, wNoisyHP, wPriodicHP, corr, minCorr, maxCoeff;
- WebRtc_Word16 noise1, rshift;
+ int16_t nLP, pLP, wNoisyLP, wPriodicLP, tmp16, minIdx;
+ int32_t nHP, pHP, wNoisyHP, wPriodicHP, corr, minCorr, maxCoeff;
+ int16_t noise1, rshift;
- WebRtc_Word16 ltpGain, pitchGain, myVoiceIndicator, myAbs, maxAbs;
- WebRtc_Word32 varIn, varOut, logVarIn, logVarOut, Q, logMaxAbs;
+ int16_t ltpGain, pitchGain, myVoiceIndicator, myAbs, maxAbs;
+ int32_t varIn, varOut, logVarIn, logVarOut, Q, logMaxAbs;
int rightShiftIn, rightShiftOut;
@@ -363,13 +363,13 @@
PITCH_MAX_LAG + 10 - lag0 + i] );
maxAbs = (myAbs > maxAbs)? myAbs:maxAbs;
}
- logVarIn = log2_Q8_T( (WebRtc_UWord32)( varIn ) ) +
- (WebRtc_Word32)(rightShiftIn << 8);
- logVarOut = log2_Q8_T( (WebRtc_UWord32)( varOut ) ) +
- (WebRtc_Word32)(rightShiftOut << 8);
- logMaxAbs = log2_Q8_T( (WebRtc_UWord32)( maxAbs ) );
+ logVarIn = log2_Q8_T( (uint32_t)( varIn ) ) +
+ (int32_t)(rightShiftIn << 8);
+ logVarOut = log2_Q8_T( (uint32_t)( varOut ) ) +
+ (int32_t)(rightShiftOut << 8);
+ logMaxAbs = log2_Q8_T( (uint32_t)( maxAbs ) );
- ltpGain = (WebRtc_Word16)(logVarOut - logVarIn);
+ ltpGain = (int16_t)(logVarOut - logVarIn);
Q = 2 * logMaxAbs - ( logVarOut - 1512 );
/*
@@ -381,8 +381,8 @@
* --
*/
- logVarIn -= log2_Q8_T( (WebRtc_UWord32)( lag0 ) );
- tmp16 = (WebRtc_Word16)((logVarIn<<1) - (4<<10) );
+ logVarIn -= log2_Q8_T( (uint32_t)( lag0 ) );
+ tmp16 = (int16_t)((logVarIn<<1) - (4<<10) );
rightShiftIn = 0;
if( tmp16 > 4096 )
{
@@ -457,11 +457,11 @@
pitchLags_Q7[0] = pitchLags_Q7[1] = pitchLags_Q7[2] = pitchLags_Q7[3] =
((ISACdec_obj->plcstr_obj).stretchLag<<7);
pitchGains_Q12[3] = ( (ISACdec_obj->plcstr_obj).lastPitchGain_Q12);
- pitchGains_Q12[2] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ pitchGains_Q12[2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
pitchGains_Q12[3], 1010, 10 );
- pitchGains_Q12[1] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ pitchGains_Q12[1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
pitchGains_Q12[2], 1010, 10 );
- pitchGains_Q12[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ pitchGains_Q12[0] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
pitchGains_Q12[1], 1010, 10 );
@@ -497,9 +497,9 @@
}
plc_filterma_Fast(Vector_Word16_1, Vector_Word16_Extended_1,
&(ISACdec_obj->plcstr_obj).prevPitchInvIn[FRAMESAMPLES_HALF -
- NOISE_FILTER_LEN], (WebRtc_Word16) NOISE_FILTER_LEN,
- (WebRtc_Word16) FRAMESAMPLES_HALF, (WebRtc_Word16)(5),
- (ISACdec_obj->plcstr_obj).decayCoeffNoise, (WebRtc_Word16)(6));
+ NOISE_FILTER_LEN], (int16_t) NOISE_FILTER_LEN,
+ (int16_t) FRAMESAMPLES_HALF, (int16_t)(5),
+ (ISACdec_obj->plcstr_obj).decayCoeffNoise, (int16_t)(6));
maxCoeff = WebRtcSpl_MaxAbsValueW32(
&(ISACdec_obj->plcstr_obj).prevHP[
@@ -513,7 +513,7 @@
}
for( i = 0; i < NOISE_FILTER_LEN; i++ ) {
Vector_Word16_1[ FRAMESAMPLES_HALF - NOISE_FILTER_LEN + i] =
- (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+ (int16_t)WEBRTC_SPL_RSHIFT_W32(
(ISACdec_obj->plcstr_obj).prevHP[
PITCH_MAX_LAG + 10 - NOISE_FILTER_LEN + i], rshift);
}
@@ -521,15 +521,15 @@
Vector_Word16_2,
Vector_Word16_Extended_2,
&Vector_Word16_1[FRAMESAMPLES_HALF - NOISE_FILTER_LEN],
- (WebRtc_Word16) NOISE_FILTER_LEN,
- (WebRtc_Word16) FRAMESAMPLES_HALF,
- (WebRtc_Word16) (5),
+ (int16_t) NOISE_FILTER_LEN,
+ (int16_t) FRAMESAMPLES_HALF,
+ (int16_t) (5),
(ISACdec_obj->plcstr_obj).decayCoeffNoise,
- (WebRtc_Word16) (7) );
+ (int16_t) (7) );
for( i = 0; i < FRAMESAMPLES_HALF; i++ )
Vector_Word32_2[i] = WEBRTC_SPL_LSHIFT_W32(
- (WebRtc_Word32)Vector_Word16_Extended_2[i], rshift );
+ (int32_t)Vector_Word16_Extended_2[i], rshift );
Vector_Word16_1 = Vector_Word16_Extended_1;
}
@@ -541,12 +541,12 @@
for( i = 0, noiseIndex = 0; i < FRAMESAMPLES_HALF; i++, noiseIndex++ )
{
/* --- Lowpass */
- pLP = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ pLP = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
stretchPitchLP[(ISACdec_obj->plcstr_obj).pitchIndex],
(ISACdec_obj->plcstr_obj).decayCoeffPriodic, 15 );
/* --- Highpass */
- pHP = (WebRtc_Word32)WEBRTC_SPL_MUL_16_32_RSFT15(
+ pHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15(
(ISACdec_obj->plcstr_obj).decayCoeffPriodic,
(ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 -
(ISACdec_obj->plcstr_obj).stretchLag +
@@ -590,9 +590,9 @@
{
for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
{
- stretchPitchLP[k] = (WebRtc_Word16)((
- (WebRtc_Word32)stretchPitchLP[k]* 3 +
- (WebRtc_Word32)stretchPitchLP1[k])>>2);
+ stretchPitchLP[k] = (int16_t)((
+ (int32_t)stretchPitchLP[k]* 3 +
+ (int32_t)stretchPitchLP1[k])>>2);
}
break;
}
@@ -600,9 +600,9 @@
{
for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
{
- stretchPitchLP[k] = (WebRtc_Word16)((
- (WebRtc_Word32)stretchPitchLP[k] +
- (WebRtc_Word32)stretchPitchLP1[k] )>>1);
+ stretchPitchLP[k] = (int16_t)((
+ (int32_t)stretchPitchLP[k] +
+ (int32_t)stretchPitchLP1[k] )>>1);
}
break;
}
@@ -610,8 +610,8 @@
{
for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
{
- stretchPitchLP[k] = (WebRtc_Word16)((stretchPitchLP[k] +
- (WebRtc_Word32)stretchPitchLP1[k]*3 )>>2);
+ stretchPitchLP[k] = (int16_t)((stretchPitchLP[k] +
+ (int32_t)stretchPitchLP1[k]*3 )>>2);
}
break;
}
@@ -641,8 +641,8 @@
noise1 = WEBRTC_SPL_RSHIFT_W16(
(ISACdec_obj->plcstr_obj).seed, 10 ) - 16;
- nLP = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
- (WebRtc_Word16)((noise1)*(ISACdec_obj->plcstr_obj).std),
+ nLP = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
+ (int16_t)((noise1)*(ISACdec_obj->plcstr_obj).std),
(ISACdec_obj->plcstr_obj).decayCoeffNoise, 15 );
/* --- Highpass */
@@ -651,9 +651,9 @@
noise1 = WEBRTC_SPL_RSHIFT_W16(
(ISACdec_obj->plcstr_obj).seed, 11 ) - 8;
- nHP = (WebRtc_Word32)WEBRTC_SPL_MUL_16_32_RSFT15(
+ nHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15(
(ISACdec_obj->plcstr_obj).decayCoeffNoise,
- (WebRtc_Word32)(noise1*(ISACdec_obj->plcstr_obj).std) );
+ (int32_t)(noise1*(ISACdec_obj->plcstr_obj).std) );
/* --- lower the muliplier (more decay at next sample) --- */
(ISACdec_obj->plcstr_obj).decayCoeffNoise -= (myDecayRate);
@@ -662,12 +662,12 @@
/* ------ Periodic Vector --- */
/* --- Lowpass */
- pLP = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ pLP = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
stretchPitchLP[(ISACdec_obj->plcstr_obj).pitchIndex],
(ISACdec_obj->plcstr_obj).decayCoeffPriodic, 15 );
/* --- Highpass */
- pHP = (WebRtc_Word32)WEBRTC_SPL_MUL_16_32_RSFT15(
+ pHP = (int32_t)WEBRTC_SPL_MUL_16_32_RSFT15(
(ISACdec_obj->plcstr_obj).decayCoeffPriodic,
(ISACdec_obj->plcstr_obj).prevHP[PITCH_MAX_LAG + 10 -
(ISACdec_obj->plcstr_obj).stretchLag +
@@ -681,14 +681,14 @@
}
/* ------ Weighting the noisy and periodic vectors ------- */
- wNoisyLP = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT(
+ wNoisyLP = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT(
(ISACdec_obj->plcstr_obj).A, nLP, 15 ) );
- wNoisyHP = (WebRtc_Word32)(WEBRTC_SPL_MUL_16_32_RSFT15(
+ wNoisyHP = (int32_t)(WEBRTC_SPL_MUL_16_32_RSFT15(
(ISACdec_obj->plcstr_obj).A, (nHP) ) );
- wPriodicLP = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16_RSFT(
+ wPriodicLP = (int16_t)(WEBRTC_SPL_MUL_16_16_RSFT(
(ISACdec_obj->plcstr_obj).B, pLP, 15));
- wPriodicHP = (WebRtc_Word32)(WEBRTC_SPL_MUL_16_32_RSFT15(
+ wPriodicHP = (int32_t)(WEBRTC_SPL_MUL_16_32_RSFT15(
(ISACdec_obj->plcstr_obj).B, pHP));
(ISACdec_obj->plcstr_obj).pitchIndex++;
@@ -720,9 +720,9 @@
{
for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
{
- stretchPitchLP[k] = (WebRtc_Word16)((
- (WebRtc_Word32)stretchPitchLP[k]* 3 +
- (WebRtc_Word32)stretchPitchLP1[k] )>>2);
+ stretchPitchLP[k] = (int16_t)((
+ (int32_t)stretchPitchLP[k]* 3 +
+ (int32_t)stretchPitchLP1[k] )>>2);
}
break;
}
@@ -730,9 +730,9 @@
{
for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
{
- stretchPitchLP[k] = (WebRtc_Word16)((
- (WebRtc_Word32)stretchPitchLP[k] +
- (WebRtc_Word32)stretchPitchLP1[k])>>1);
+ stretchPitchLP[k] = (int16_t)((
+ (int32_t)stretchPitchLP[k] +
+ (int32_t)stretchPitchLP1[k])>>1);
}
break;
}
@@ -740,9 +740,9 @@
{
for( k=0; k<(ISACdec_obj->plcstr_obj).stretchLag; k++ )
{
- stretchPitchLP[k] = (WebRtc_Word16)(
+ stretchPitchLP[k] = (int16_t)(
(stretchPitchLP[k] +
- (WebRtc_Word32)stretchPitchLP1[k]*3 )>>2);
+ (int32_t)stretchPitchLP1[k]*3 )>>2);
}
break;
}
@@ -756,9 +756,9 @@
}
/* ------ Sum the noisy and periodic signals ------ */
- Vector_Word16_1[i] = (WebRtc_Word16)WEBRTC_SPL_ADD_SAT_W16(
+ Vector_Word16_1[i] = (int16_t)WEBRTC_SPL_ADD_SAT_W16(
wNoisyLP, wPriodicLP );
- Vector_Word32_2[i] = (WebRtc_Word32)WEBRTC_SPL_ADD_SAT_W32(
+ Vector_Word32_2[i] = (int32_t)WEBRTC_SPL_ADD_SAT_W32(
wNoisyHP, wPriodicHP );
}
}
@@ -770,7 +770,7 @@
for( i = 0; i < RECOVERY_OVERLAP; i++ )
{
- (ISACdec_obj->plcstr_obj).overlapLP[i] = (WebRtc_Word16)(
+ (ISACdec_obj->plcstr_obj).overlapLP[i] = (int16_t)(
WEBRTC_SPL_MUL_16_16_RSFT(stretchPitchLP[k],
(ISACdec_obj->plcstr_obj).decayCoeffPriodic, 15) );
k = ( k < ((ISACdec_obj->plcstr_obj).stretchLag - 1) )? (k+1):0;
@@ -788,11 +788,11 @@
tmp32a = WEBRTC_SPL_MUL_16_16_RSFT((ISACdec_obj->plcstr_obj).AvgPitchGain_Q12,
29, 0); // Q18
tmp32b = 262144 - tmp32a; // Q18
- gainQ13 = (WebRtc_Word16) (tmp32b >> 5); // Q13
+ gainQ13 = (int16_t) (tmp32b >> 5); // Q13
/* perceptual post-filtering (using normalized lattice filter) */
for (k = 0; k < FRAMESAMPLES_HALF; k++)
- Vector_Word32_1[k] = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(
+ Vector_Word32_1[k] = (int32_t) WEBRTC_SPL_MUL_16_16(
Vector_Word16_2[k], gainQ13) << 3; // Q25
@@ -810,11 +810,11 @@
for (k=0;k<FRAMESAMPLES_HALF;k++)
{
/* Construct a new upper channel signal*/
- tmp_1 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(
- ((WebRtc_Word32)Vector_Word16_1[k]+Vector_Word16_2[k] + 1));
+ tmp_1 = (int16_t)WebRtcSpl_SatW32ToW16(
+ ((int32_t)Vector_Word16_1[k]+Vector_Word16_2[k] + 1));
/* Construct a new lower channel signal*/
- tmp_2 = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(
- ((WebRtc_Word32)Vector_Word16_1[k]-Vector_Word16_2[k]));
+ tmp_2 = (int16_t)WebRtcSpl_SatW32ToW16(
+ ((int32_t)Vector_Word16_1[k]-Vector_Word16_2[k]));
Vector_Word16_1[k] = tmp_1;
Vector_Word16_2[k] = tmp_2;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
index cb531e5..e209c0e 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/encode.c
@@ -28,40 +28,40 @@
#include <stdio.h>
-int WebRtcIsacfix_EncodeImpl(WebRtc_Word16 *in,
- ISACFIX_EncInst_t *ISACenc_obj,
- BwEstimatorstr *bw_estimatordata,
- WebRtc_Word16 CodingMode)
+int WebRtcIsacfix_EncodeImpl(int16_t *in,
+ ISACFIX_EncInst_t *ISACenc_obj,
+ BwEstimatorstr *bw_estimatordata,
+ int16_t CodingMode)
{
- WebRtc_Word16 stream_length = 0;
- WebRtc_Word16 usefulstr_len = 0;
+ int16_t stream_length = 0;
+ int16_t usefulstr_len = 0;
int k;
- WebRtc_Word16 BWno;
+ int16_t BWno;
- WebRtc_Word16 lofilt_coefQ15[(ORDERLO)*SUBFRAMES];
- WebRtc_Word16 hifilt_coefQ15[(ORDERHI)*SUBFRAMES];
- WebRtc_Word32 gain_lo_hiQ17[2*SUBFRAMES];
+ int16_t lofilt_coefQ15[(ORDERLO)*SUBFRAMES];
+ int16_t hifilt_coefQ15[(ORDERHI)*SUBFRAMES];
+ int32_t gain_lo_hiQ17[2*SUBFRAMES];
- WebRtc_Word16 LPandHP[FRAMESAMPLES/2 + QLOOKAHEAD];
- WebRtc_Word16 LP16a[FRAMESAMPLES/2 + QLOOKAHEAD];
- WebRtc_Word16 HP16a[FRAMESAMPLES/2 + QLOOKAHEAD];
+ int16_t LPandHP[FRAMESAMPLES/2 + QLOOKAHEAD];
+ int16_t LP16a[FRAMESAMPLES/2 + QLOOKAHEAD];
+ int16_t HP16a[FRAMESAMPLES/2 + QLOOKAHEAD];
- WebRtc_Word16 PitchLags_Q7[PITCH_SUBFRAMES];
- WebRtc_Word16 PitchGains_Q12[PITCH_SUBFRAMES];
- WebRtc_Word16 AvgPitchGain_Q12;
+ int16_t PitchLags_Q7[PITCH_SUBFRAMES];
+ int16_t PitchGains_Q12[PITCH_SUBFRAMES];
+ int16_t AvgPitchGain_Q12;
- WebRtc_Word16 frame_mode; /* 0 for 30ms, 1 for 60ms */
- WebRtc_Word16 processed_samples;
+ int16_t frame_mode; /* 0 for 30ms, 1 for 60ms */
+ int16_t processed_samples;
int status;
- WebRtc_Word32 bits_gainsQ11;
- WebRtc_Word16 MinBytes;
- WebRtc_Word16 bmodel;
+ int32_t bits_gainsQ11;
+ int16_t MinBytes;
+ int16_t bmodel;
transcode_obj transcodingParam;
- WebRtc_Word16 payloadLimitBytes;
- WebRtc_Word16 arithLenBeforeEncodingDFT;
- WebRtc_Word16 iterCntr;
+ int16_t payloadLimitBytes;
+ int16_t arithLenBeforeEncodingDFT;
+ int16_t iterCntr;
/* copy new frame length and bottle neck rate only for the first 10 ms data */
if (ISACenc_obj->buffer_index == 0) {
@@ -112,7 +112,7 @@
// multiply the bottleneck by 0.88 before computing SNR, 0.88 is tuned by experimenting on TIMIT
// 901/1024 is 0.87988281250000
- ISACenc_obj->s2nr = WebRtcIsacfix_GetSnr((WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ISACenc_obj->BottleNeck, 901, 10),
+ ISACenc_obj->s2nr = WebRtcIsacfix_GetSnr((int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ISACenc_obj->BottleNeck, 901, 10),
ISACenc_obj->current_framesamples);
/* encode frame length */
@@ -276,22 +276,22 @@
while((((ISACenc_obj->bitstr_obj.stream_index) << 1) > payloadLimitBytes) ||
(status == -ISAC_DISALLOWED_BITSTREAM_LENGTH))
{
- WebRtc_Word16 arithLenDFTByte;
- WebRtc_Word16 bytesLeftQ5;
- WebRtc_Word16 ratioQ5[8] = {0, 6, 9, 12, 16, 19, 22, 25};
+ int16_t arithLenDFTByte;
+ int16_t bytesLeftQ5;
+ int16_t ratioQ5[8] = {0, 6, 9, 12, 16, 19, 22, 25};
// According to experiments on TIMIT the following is proper for audio, but it is not agressive enough for tonal inputs
// such as DTMF, sweep-sine, ...
//
// (0.55 - (0.8 - ratio[i]/32) * 5 / 6) * 2^14
- // WebRtc_Word16 scaleQ14[8] = {0, 648, 1928, 3208, 4915, 6195, 7475, 8755};
+ // int16_t scaleQ14[8] = {0, 648, 1928, 3208, 4915, 6195, 7475, 8755};
// This is a supper-agressive scaling passed the tests (tonal inputs) tone with one iteration for payload limit
// of 120 (32kbps bottleneck), number of frames needed a rate-reduction was 58403
//
- WebRtc_Word16 scaleQ14[8] = {0, 348, 828, 1408, 2015, 3195, 3500, 3500};
- WebRtc_Word16 idx;
+ int16_t scaleQ14[8] = {0, 348, 828, 1408, 2015, 3195, 3500, 3500};
+ int16_t idx;
if(iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION)
{
@@ -348,8 +348,8 @@
// scale FFT coefficients to reduce the bit-rate
for(k = 0; k < FRAMESAMPLES_HALF; k++)
{
- LP16a[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(LP16a[k], scaleQ14[idx], 14);
- LPandHP[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(LPandHP[k], scaleQ14[idx], 14);
+ LP16a[k] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(LP16a[k], scaleQ14[idx], 14);
+ LPandHP[k] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(LPandHP[k], scaleQ14[idx], 14);
}
// Save data for multiple packets memory
@@ -421,7 +421,7 @@
{
/* update rate model and get minimum number of bytes in this packet */
- MinBytes = WebRtcIsacfix_GetMinBytes(&ISACenc_obj->rate_data_obj, (WebRtc_Word16) stream_length,
+ MinBytes = WebRtcIsacfix_GetMinBytes(&ISACenc_obj->rate_data_obj, (int16_t) stream_length,
ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck, ISACenc_obj->MaxDelay);
/* if bitstream is too short, add garbage at the end */
@@ -452,7 +452,7 @@
{
if (stream_length & 0x0001){
ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
- ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] |= (WebRtc_UWord16)(ISACenc_obj->bitstr_seed & 0xFF);
+ ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] |= (uint16_t)(ISACenc_obj->bitstr_seed & 0xFF);
} else {
ISACenc_obj->bitstr_seed = WEBRTC_SPL_RAND( ISACenc_obj->bitstr_seed );
ISACenc_obj->bitstr_obj.stream[ WEBRTC_SPL_RSHIFT_W16(stream_length, 1) ] = WEBRTC_SPL_LSHIFT_U16(ISACenc_obj->bitstr_seed, 8);
@@ -473,7 +473,7 @@
else
{
/* update rate model */
- WebRtcIsacfix_UpdateRateModel(&ISACenc_obj->rate_data_obj, (WebRtc_Word16) stream_length,
+ WebRtcIsacfix_UpdateRateModel(&ISACenc_obj->rate_data_obj, (int16_t) stream_length,
ISACenc_obj->current_framesamples, ISACenc_obj->BottleNeck);
}
return stream_length;
@@ -489,17 +489,17 @@
{
int ii;
int status;
- WebRtc_Word16 BWno = BWnumber;
+ int16_t BWno = BWnumber;
int stream_length = 0;
- WebRtc_Word16 model;
- const WebRtc_UWord16 *Q_PitchGain_cdf_ptr[1];
- const WebRtc_UWord16 **cdf;
+ int16_t model;
+ const uint16_t *Q_PitchGain_cdf_ptr[1];
+ const uint16_t **cdf;
const ISAC_SaveEncData_t *SaveEnc_str;
- WebRtc_Word32 tmpLPCcoeffs_g[KLT_ORDER_GAIN<<1];
- WebRtc_Word16 tmpLPCindex_g[KLT_ORDER_GAIN<<1];
- WebRtc_Word16 tmp_fre[FRAMESAMPLES];
- WebRtc_Word16 tmp_fim[FRAMESAMPLES];
+ int32_t tmpLPCcoeffs_g[KLT_ORDER_GAIN<<1];
+ int16_t tmpLPCindex_g[KLT_ORDER_GAIN<<1];
+ int16_t tmp_fre[FRAMESAMPLES];
+ int16_t tmp_fim[FRAMESAMPLES];
SaveEnc_str = ISACenc_obj->SaveEnc_ptr;
@@ -537,13 +537,13 @@
if ((0.0 < scale) && (scale < 1.0)) {
/* Compensate LPC gain */
for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) {
- tmpLPCcoeffs_g[ii] = (WebRtc_Word32) ((scale) * (float) SaveEnc_str->LPCcoeffs_g[ii]);
+ tmpLPCcoeffs_g[ii] = (int32_t) ((scale) * (float) SaveEnc_str->LPCcoeffs_g[ii]);
}
/* Scale DFT */
for (ii = 0; ii < (FRAMESAMPLES_HALF*(1+SaveEnc_str->startIdx)); ii++) {
- tmp_fre[ii] = (WebRtc_Word16) ((scale) * (float) SaveEnc_str->fre[ii]) ;
- tmp_fim[ii] = (WebRtc_Word16) ((scale) * (float) SaveEnc_str->fim[ii]) ;
+ tmp_fre[ii] = (int16_t) ((scale) * (float) SaveEnc_str->fre[ii]) ;
+ tmp_fim[ii] = (int16_t) ((scale) * (float) SaveEnc_str->fim[ii]) ;
}
} else {
for (ii = 0; ii < (KLT_ORDER_GAIN*(1+SaveEnc_str->startIdx)); ii++) {
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c
index c2716fc..8b46d68 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.c
@@ -65,26 +65,26 @@
/*
This function implements the fix-point correspondant function to lrint.
- FLP: (WebRtc_Word32)floor(flt+.499999999999)
+ FLP: (int32_t)floor(flt+.499999999999)
FIP: (fixVal+roundVal)>>qDomain
where roundVal = 2^(qDomain-1) = 1<<(qDomain-1)
*/
-static __inline WebRtc_Word32 CalcLrIntQ(WebRtc_Word32 fixVal, WebRtc_Word16 qDomain) {
- WebRtc_Word32 intgr;
- WebRtc_Word32 roundVal;
+static __inline int32_t CalcLrIntQ(int32_t fixVal, int16_t qDomain) {
+ int32_t intgr;
+ int32_t roundVal;
- roundVal = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, qDomain-1);
+ roundVal = WEBRTC_SPL_LSHIFT_W32((int32_t)1, qDomain-1);
intgr = WEBRTC_SPL_RSHIFT_W32(fixVal+roundVal, qDomain);
return intgr;
}
/*
- __inline WebRtc_UWord32 stepwise(WebRtc_Word32 dinQ10) {
+ __inline uint32_t stepwise(int32_t dinQ10) {
- WebRtc_Word32 ind, diQ10, dtQ10;
+ int32_t ind, diQ10, dtQ10;
diQ10 = dinQ10;
if (diQ10 < DPMIN_Q10)
@@ -110,13 +110,13 @@
177.445678 should be subtracted (since logN() returns a Q8 value).
For a X value in Q17, the value 177.445678*17 = 3017 should be
subtracted */
-static WebRtc_Word16 CalcLogN(WebRtc_Word32 arg) {
- WebRtc_Word16 zeros, log2, frac, logN;
+static int16_t CalcLogN(int32_t arg) {
+ int16_t zeros, log2, frac, logN;
zeros=WebRtcSpl_NormU32(arg);
- frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(WEBRTC_SPL_LSHIFT_W32(arg, zeros)&0x7FFFFFFF, 23);
- log2=(WebRtc_Word16)(WEBRTC_SPL_LSHIFT_W32(31-zeros, 8)+frac); // log2(x) in Q8
- logN=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(log2,22713,15); //Q8*Q15 log(2) = 0.693147 = 22713 in Q15
+ frac=(int16_t)WEBRTC_SPL_RSHIFT_U32(WEBRTC_SPL_LSHIFT_W32(arg, zeros)&0x7FFFFFFF, 23);
+ log2=(int16_t)(WEBRTC_SPL_LSHIFT_W32(31-zeros, 8)+frac); // log2(x) in Q8
+ logN=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(log2,22713,15); //Q8*Q15 log(2) = 0.693147 = 22713 in Q15
logN=logN+11; //Scalar compensation which minimizes the (log(x)-logN(x))^2 error over all x.
return logN;
@@ -126,22 +126,22 @@
/*
expN(x) = 2^(a*x), where a = log2(e) ~= 1.442695
- Input: Q8 (WebRtc_Word16)
- Output: Q17 (WebRtc_Word32)
+ Input: Q8 (int16_t)
+ Output: Q17 (int32_t)
a = log2(e) = log2(exp(1)) ~= 1.442695 ==> a = 23637 in Q14 (1.442688)
To this value, 700 is added or subtracted in order to get an average error
nearer zero, instead of always same-sign.
*/
-static WebRtc_Word32 CalcExpN(WebRtc_Word16 x) {
- WebRtc_Word16 ax, axINT, axFRAC;
- WebRtc_Word16 exp16;
- WebRtc_Word32 exp;
+static int32_t CalcExpN(int16_t x) {
+ int16_t ax, axINT, axFRAC;
+ int16_t exp16;
+ int32_t exp;
if (x>=0) {
- // ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637-700, 14); //Q8
- ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637, 14); //Q8
+ // ax=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637-700, 14); //Q8
+ ax=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637, 14); //Q8
axINT = WEBRTC_SPL_RSHIFT_W16(ax, 8); //Q0
axFRAC = ax&0x00FF;
exp16 = WEBRTC_SPL_LSHIFT_W32(1, axINT); //Q0
@@ -149,12 +149,12 @@
exp = WEBRTC_SPL_MUL_16_16(exp16, axFRAC); // Q0*Q8 = Q8
exp = WEBRTC_SPL_LSHIFT_W32(exp, 9); //Q17
} else {
- // ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637+700, 14); //Q8
- ax=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637, 14); //Q8
+ // ax=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637+700, 14); //Q8
+ ax=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT(x, 23637, 14); //Q8
ax = -ax;
axINT = 1 + WEBRTC_SPL_RSHIFT_W16(ax, 8); //Q0
axFRAC = 0x00FF - (ax&0x00FF);
- exp16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(32768, axINT); //Q15
+ exp16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(32768, axINT); //Q15
axFRAC = axFRAC+256; //Q8
exp = WEBRTC_SPL_MUL_16_16(exp16, axFRAC); // Q15*Q8 = Q23
exp = WEBRTC_SPL_RSHIFT_W32(exp, 6); //Q17
@@ -165,11 +165,11 @@
/* compute correlation from power spectrum */
-static void CalcCorrelation(WebRtc_Word32 *PSpecQ12, WebRtc_Word32 *CorrQ7)
+static void CalcCorrelation(int32_t *PSpecQ12, int32_t *CorrQ7)
{
- WebRtc_Word32 summ[FRAMESAMPLES/8];
- WebRtc_Word32 diff[FRAMESAMPLES/8];
- WebRtc_Word32 sum;
+ int32_t summ[FRAMESAMPLES/8];
+ int32_t diff[FRAMESAMPLES/8];
+ int32_t sum;
int k, n;
for (k = 0; k < FRAMESAMPLES/8; k++) {
@@ -199,16 +199,16 @@
/* compute inverse AR power spectrum */
-static void CalcInvArSpec(const WebRtc_Word16 *ARCoefQ12,
- const WebRtc_Word32 gainQ10,
- WebRtc_Word32 *CurveQ16)
+static void CalcInvArSpec(const int16_t *ARCoefQ12,
+ const int32_t gainQ10,
+ int32_t *CurveQ16)
{
- WebRtc_Word32 CorrQ11[AR_ORDER+1];
- WebRtc_Word32 sum, tmpGain;
- WebRtc_Word32 diffQ16[FRAMESAMPLES/8];
- const WebRtc_Word16 *CS_ptrQ9;
+ int32_t CorrQ11[AR_ORDER+1];
+ int32_t sum, tmpGain;
+ int32_t diffQ16[FRAMESAMPLES/8];
+ const int16_t *CS_ptrQ9;
int k, n;
- WebRtc_Word16 round, shftVal = 0, sh;
+ int16_t round, shftVal = 0, sh;
sum = 0;
for (n = 0; n < AR_ORDER+1; n++)
@@ -269,19 +269,19 @@
}
}
-static void CalcRootInvArSpec(const WebRtc_Word16 *ARCoefQ12,
- const WebRtc_Word32 gainQ10,
- WebRtc_UWord16 *CurveQ8)
+static void CalcRootInvArSpec(const int16_t *ARCoefQ12,
+ const int32_t gainQ10,
+ uint16_t *CurveQ8)
{
- WebRtc_Word32 CorrQ11[AR_ORDER+1];
- WebRtc_Word32 sum, tmpGain;
- WebRtc_Word32 summQ16[FRAMESAMPLES/8];
- WebRtc_Word32 diffQ16[FRAMESAMPLES/8];
+ int32_t CorrQ11[AR_ORDER+1];
+ int32_t sum, tmpGain;
+ int32_t summQ16[FRAMESAMPLES/8];
+ int32_t diffQ16[FRAMESAMPLES/8];
- const WebRtc_Word16 *CS_ptrQ9;
+ const int16_t *CS_ptrQ9;
int k, n, i;
- WebRtc_Word16 round, shftVal = 0, sh;
- WebRtc_Word32 res, in_sqrt, newRes;
+ int16_t round, shftVal = 0, sh;
+ int32_t res, in_sqrt, newRes;
sum = 0;
for (n = 0; n < AR_ORDER+1; n++)
@@ -357,7 +357,7 @@
newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1);
} while (newRes != res && i-- > 0);
- CurveQ8[k] = (WebRtc_Word16)newRes;
+ CurveQ8[k] = (int16_t)newRes;
}
for (k = FRAMESAMPLES/8; k < FRAMESAMPLES/4; k++) {
@@ -375,7 +375,7 @@
newRes = WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_DIV(in_sqrt, res) + res, 1);
} while (newRes != res && i-- > 0);
- CurveQ8[k] = (WebRtc_Word16)newRes;
+ CurveQ8[k] = (int16_t)newRes;
}
}
@@ -383,31 +383,31 @@
/* generate array of dither samples in Q7 */
-static void GenerateDitherQ7(WebRtc_Word16 *bufQ7,
- WebRtc_UWord32 seed,
- WebRtc_Word16 length,
- WebRtc_Word16 AvgPitchGain_Q12)
+static void GenerateDitherQ7(int16_t *bufQ7,
+ uint32_t seed,
+ int16_t length,
+ int16_t AvgPitchGain_Q12)
{
int k;
- WebRtc_Word16 dither1_Q7, dither2_Q7, dither_gain_Q14, shft;
+ int16_t dither1_Q7, dither2_Q7, dither_gain_Q14, shft;
if (AvgPitchGain_Q12 < 614) /* this threshold should be equal to that in decode_spec() */
{
for (k = 0; k < length-2; k += 3)
{
- /* new random unsigned WebRtc_Word32 */
+ /* new random unsigned int32_t */
seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515;
/* fixed-point dither sample between -64 and 64 (Q7) */
- dither1_Q7 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)seed + 16777216, 25); // * 128/4294967295
+ dither1_Q7 = (int16_t)WEBRTC_SPL_RSHIFT_W32((int32_t)seed + 16777216, 25); // * 128/4294967295
- /* new random unsigned WebRtc_Word32 */
+ /* new random unsigned int32_t */
seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515;
/* fixed-point dither sample between -64 and 64 */
- dither2_Q7 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(seed + 16777216, 25);
+ dither2_Q7 = (int16_t)WEBRTC_SPL_RSHIFT_W32(seed + 16777216, 25);
- shft = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 15);
+ shft = (int16_t)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 15);
if (shft < 5)
{
bufQ7[k] = dither1_Q7;
@@ -430,21 +430,21 @@
}
else
{
- dither_gain_Q14 = (WebRtc_Word16)(22528 - WEBRTC_SPL_MUL(10, AvgPitchGain_Q12));
+ dither_gain_Q14 = (int16_t)(22528 - WEBRTC_SPL_MUL(10, AvgPitchGain_Q12));
/* dither on half of the coefficients */
for (k = 0; k < length-1; k += 2)
{
- /* new random unsigned WebRtc_Word32 */
+ /* new random unsigned int32_t */
seed = WEBRTC_SPL_UMUL(seed, 196314165) + 907633515;
/* fixed-point dither sample between -64 and 64 */
- dither1_Q7 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)seed + 16777216, 25);
+ dither1_Q7 = (int16_t)WEBRTC_SPL_RSHIFT_W32((int32_t)seed + 16777216, 25);
/* dither sample is placed in either even or odd index */
- shft = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 1); /* either 0 or 1 */
+ shft = (int16_t)(WEBRTC_SPL_RSHIFT_U32(seed, 25) & 1); /* either 0 or 1 */
- bufQ7[k + shft] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(dither_gain_Q14, dither1_Q7) + 8192, 14);
+ bufQ7[k + shft] = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(dither_gain_Q14, dither1_Q7) + 8192, 14);
bufQ7[k + 1 - shft] = 0;
}
}
@@ -457,18 +457,18 @@
* function to decode the complex spectrum from the bitstream
* returns the total number of bytes in the stream
*/
-WebRtc_Word16 WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
- WebRtc_Word16 *frQ7,
- WebRtc_Word16 *fiQ7,
- WebRtc_Word16 AvgPitchGain_Q12)
+int16_t WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
+ int16_t *frQ7,
+ int16_t *fiQ7,
+ int16_t AvgPitchGain_Q12)
{
- WebRtc_Word16 data[FRAMESAMPLES];
- WebRtc_Word32 invARSpec2_Q16[FRAMESAMPLES/4];
- WebRtc_Word16 ARCoefQ12[AR_ORDER+1];
- WebRtc_Word16 RCQ15[AR_ORDER];
- WebRtc_Word16 gainQ10;
- WebRtc_Word32 gain2_Q10;
- WebRtc_Word16 len;
+ int16_t data[FRAMESAMPLES];
+ int32_t invARSpec2_Q16[FRAMESAMPLES/4];
+ int16_t ARCoefQ12[AR_ORDER+1];
+ int16_t RCQ15[AR_ORDER];
+ int16_t gainQ10;
+ int32_t gain2_Q10;
+ int16_t len;
int k;
/* create dither signal */
@@ -489,7 +489,7 @@
/* arithmetic decoding of spectrum */
/* 'data' input and output. Input = Dither */
- len = WebRtcIsacfix_DecLogisticMulti2(data, streamdata, invARSpec2_Q16, (WebRtc_Word16)FRAMESAMPLES);
+ len = WebRtcIsacfix_DecLogisticMulti2(data, streamdata, invARSpec2_Q16, (int16_t)FRAMESAMPLES);
if (len<1)
return -ISAC_RANGE_ERROR_DECODE_SPECTRUM;
@@ -499,24 +499,24 @@
{
for (k = 0; k < FRAMESAMPLES; k += 4)
{
- gainQ10 = WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)30, 10),
- (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(invARSpec2_Q16[k>>2] + (WebRtc_UWord32)2195456, 16));
- *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[ k ], gainQ10) + 512, 10);
- *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+1], gainQ10) + 512, 10);
- *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+2], gainQ10) + 512, 10);
- *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+3], gainQ10) + 512, 10);
+ gainQ10 = WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((int32_t)30, 10),
+ (int16_t)WEBRTC_SPL_RSHIFT_U32(invARSpec2_Q16[k>>2] + (uint32_t)2195456, 16));
+ *frQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[ k ], gainQ10) + 512, 10);
+ *fiQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+1], gainQ10) + 512, 10);
+ *frQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+2], gainQ10) + 512, 10);
+ *fiQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+3], gainQ10) + 512, 10);
}
}
else
{
for (k = 0; k < FRAMESAMPLES; k += 4)
{
- gainQ10 = WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)36, 10),
- (WebRtc_Word16)WEBRTC_SPL_RSHIFT_U32(invARSpec2_Q16[k>>2] + (WebRtc_UWord32)2654208, 16));
- *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[ k ], gainQ10) + 512, 10);
- *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+1], gainQ10) + 512, 10);
- *frQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+2], gainQ10) + 512, 10);
- *fiQ7++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+3], gainQ10) + 512, 10);
+ gainQ10 = WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_LSHIFT_W32((int32_t)36, 10),
+ (int16_t)WEBRTC_SPL_RSHIFT_U32(invARSpec2_Q16[k>>2] + (uint32_t)2654208, 16));
+ *frQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[ k ], gainQ10) + 512, 10);
+ *fiQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+1], gainQ10) + 512, 10);
+ *frQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+2], gainQ10) + 512, 10);
+ *fiQ7++ = (int16_t)WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL(data[k+3], gainQ10) + 512, 10);
}
}
@@ -524,24 +524,24 @@
}
-int WebRtcIsacfix_EncodeSpec(const WebRtc_Word16 *fr,
- const WebRtc_Word16 *fi,
+int WebRtcIsacfix_EncodeSpec(const int16_t *fr,
+ const int16_t *fi,
Bitstr_enc *streamdata,
- WebRtc_Word16 AvgPitchGain_Q12)
+ int16_t AvgPitchGain_Q12)
{
- WebRtc_Word16 dataQ7[FRAMESAMPLES];
- WebRtc_Word32 PSpec[FRAMESAMPLES/4];
- WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES/4];
- WebRtc_Word32 CorrQ7[AR_ORDER+1];
- WebRtc_Word32 CorrQ7_norm[AR_ORDER+1];
- WebRtc_Word16 RCQ15[AR_ORDER];
- WebRtc_Word16 ARCoefQ12[AR_ORDER+1];
- WebRtc_Word32 gain2_Q10;
- WebRtc_Word16 val;
- WebRtc_Word32 nrg;
- WebRtc_UWord32 sum;
- WebRtc_Word16 lft_shft;
- WebRtc_Word16 status;
+ int16_t dataQ7[FRAMESAMPLES];
+ int32_t PSpec[FRAMESAMPLES/4];
+ uint16_t invARSpecQ8[FRAMESAMPLES/4];
+ int32_t CorrQ7[AR_ORDER+1];
+ int32_t CorrQ7_norm[AR_ORDER+1];
+ int16_t RCQ15[AR_ORDER];
+ int16_t ARCoefQ12[AR_ORDER+1];
+ int32_t gain2_Q10;
+ int16_t val;
+ int32_t nrg;
+ uint32_t sum;
+ int16_t lft_shft;
+ int16_t status;
int k, n, j;
@@ -627,7 +627,7 @@
/* arithmetic coding of spectrum */
- status = WebRtcIsacfix_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, (WebRtc_Word16)FRAMESAMPLES);
+ status = WebRtcIsacfix_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8, (int16_t)FRAMESAMPLES);
if ( status )
return( status );
@@ -636,7 +636,7 @@
/* Matlab's LAR definition */
-static void Rc2LarFix(const WebRtc_Word16 *rcQ15, WebRtc_Word32 *larQ17, WebRtc_Word16 order) {
+static void Rc2LarFix(const int16_t *rcQ15, int32_t *larQ17, int16_t order) {
/*
@@ -670,8 +670,8 @@
*/
int k;
- WebRtc_Word16 rc;
- WebRtc_Word32 larAbsQ17;
+ int16_t rc;
+ int32_t larAbsQ17;
for (k = 0; k < order; k++) {
@@ -702,7 +702,7 @@
}
-static void Lar2RcFix(const WebRtc_Word32 *larQ17, WebRtc_Word16 *rcQ15, WebRtc_Word16 order) {
+static void Lar2RcFix(const int32_t *larQ17, int16_t *rcQ15, int16_t order) {
/*
This is a piece-wise implemenetation of a lar2rc-function
@@ -710,12 +710,12 @@
*/
int k;
- WebRtc_Word16 larAbsQ11;
- WebRtc_Word32 rc;
+ int16_t larAbsQ11;
+ int32_t rc;
for (k = 0; k < order; k++) {
- larAbsQ11 = (WebRtc_Word16) WEBRTC_SPL_ABS_W32(WEBRTC_SPL_RSHIFT_W32(larQ17[k]+32,6)); //Q11
+ larAbsQ11 = (int16_t) WEBRTC_SPL_ABS_W32(WEBRTC_SPL_RSHIFT_W32(larQ17[k]+32,6)); //Q11
if (larAbsQ11<4097) { //2.000012018559 in Q11
// Q11*Q16>>12 = Q15
@@ -735,21 +735,21 @@
rc = -rc;
}
- rcQ15[k] = (WebRtc_Word16) rc; // Q15
+ rcQ15[k] = (int16_t) rc; // Q15
}
}
-static void Poly2LarFix(WebRtc_Word16 *lowbandQ15,
- WebRtc_Word16 orderLo,
- WebRtc_Word16 *hibandQ15,
- WebRtc_Word16 orderHi,
- WebRtc_Word16 Nsub,
- WebRtc_Word32 *larsQ17) {
+static void Poly2LarFix(int16_t *lowbandQ15,
+ int16_t orderLo,
+ int16_t *hibandQ15,
+ int16_t orderHi,
+ int16_t Nsub,
+ int32_t *larsQ17) {
int k, n;
- WebRtc_Word32 *outpQ17;
- WebRtc_Word16 orderTot;
- WebRtc_Word32 larQ17[MAX_ORDER]; // Size 7+6 is enough
+ int32_t *outpQ17;
+ int16_t orderTot;
+ int32_t larQ17[MAX_ORDER]; // Size 7+6 is enough
orderTot = (orderLo + orderHi);
outpQ17 = larsQ17;
@@ -772,18 +772,18 @@
}
-static void Lar2polyFix(WebRtc_Word32 *larsQ17,
- WebRtc_Word16 *lowbandQ15,
- WebRtc_Word16 orderLo,
- WebRtc_Word16 *hibandQ15,
- WebRtc_Word16 orderHi,
- WebRtc_Word16 Nsub) {
+static void Lar2polyFix(int32_t *larsQ17,
+ int16_t *lowbandQ15,
+ int16_t orderLo,
+ int16_t *hibandQ15,
+ int16_t orderHi,
+ int16_t Nsub) {
int k, n;
- WebRtc_Word16 orderTot;
- WebRtc_Word16 *outplQ15, *outphQ15;
- WebRtc_Word32 *inpQ17;
- WebRtc_Word16 rcQ15[7+6];
+ int16_t orderTot;
+ int16_t *outplQ15, *outphQ15;
+ int32_t *inpQ17;
+ int16_t rcQ15[7+6];
orderTot = (orderLo + orderHi);
outplQ15 = lowbandQ15;
@@ -918,13 +918,13 @@
}
}
-int WebRtcIsacfix_DecodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *LPCCoef_loQ15,
- WebRtc_Word16 *LPCCoef_hiQ15,
+int WebRtcIsacfix_DecodeLpc(int32_t *gain_lo_hiQ17,
+ int16_t *LPCCoef_loQ15,
+ int16_t *LPCCoef_hiQ15,
Bitstr_dec *streamdata,
- WebRtc_Word16 *outmodel) {
+ int16_t *outmodel) {
- WebRtc_Word32 larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_GAIN+KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES
+ int32_t larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_GAIN+KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES
int err;
err = WebRtcIsacfix_DecodeLpcCoef(streamdata, larsQ17, gain_lo_hiQ17, outmodel);
@@ -938,24 +938,24 @@
/* decode & dequantize LPC Coef */
int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec *streamdata,
- WebRtc_Word32 *LPCCoefQ17,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *outmodel)
+ int32_t *LPCCoefQ17,
+ int32_t *gain_lo_hiQ17,
+ int16_t *outmodel)
{
int j, k, n;
int err;
- WebRtc_Word16 pos, pos2, posg, poss;
- WebRtc_Word16 gainpos;
- WebRtc_Word16 model;
- WebRtc_Word16 index_QQ[KLT_ORDER_SHAPE];
- WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
- WebRtc_Word16 tmpcoeffs_sQ10[KLT_ORDER_SHAPE];
- WebRtc_Word32 tmpcoeffs_sQ17[KLT_ORDER_SHAPE];
- WebRtc_Word32 tmpcoeffs2_sQ18[KLT_ORDER_SHAPE];
- WebRtc_Word32 sumQQ;
- WebRtc_Word16 sumQQ16;
- WebRtc_Word32 tmp32;
+ int16_t pos, pos2, posg, poss;
+ int16_t gainpos;
+ int16_t model;
+ int16_t index_QQ[KLT_ORDER_SHAPE];
+ int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+ int16_t tmpcoeffs_sQ10[KLT_ORDER_SHAPE];
+ int32_t tmpcoeffs_sQ17[KLT_ORDER_SHAPE];
+ int32_t tmpcoeffs2_sQ18[KLT_ORDER_SHAPE];
+ int32_t sumQQ;
+ int16_t sumQQ16;
+ int32_t tmp32;
@@ -1020,14 +1020,14 @@
for (k=0; k<SUBFRAMES; k++) {
/* log gains */
- sumQQ16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
+ sumQQ16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg];
sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out
gain_lo_hiQ17[gainpos] = sumQQ; //Q17
gainpos++;
posg++;
- sumQQ16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
+ sumQQ16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
sumQQ16 += WebRtcIsacfix_kMeansGainQ8[model][posg];
sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out
gain_lo_hiQ17[gainpos] = sumQQ; //Q17
@@ -1056,28 +1056,28 @@
}
/* estimate codel length of LPC Coef */
-static int EstCodeLpcCoef(WebRtc_Word32 *LPCCoefQ17,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *model,
- WebRtc_Word32 *sizeQ11,
+static int EstCodeLpcCoef(int32_t *LPCCoefQ17,
+ int32_t *gain_lo_hiQ17,
+ int16_t *model,
+ int32_t *sizeQ11,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData,
transcode_obj *transcodingParam) {
int j, k, n;
- WebRtc_Word16 posQQ, pos2QQ, gainpos;
- WebRtc_Word16 pos, poss, posg, offsg;
- WebRtc_Word16 index_gQQ[KLT_ORDER_GAIN], index_sQQ[KLT_ORDER_SHAPE];
- WebRtc_Word16 index_ovr_gQQ[KLT_ORDER_GAIN], index_ovr_sQQ[KLT_ORDER_SHAPE];
- WebRtc_Word32 BitsQQ;
+ int16_t posQQ, pos2QQ, gainpos;
+ int16_t pos, poss, posg, offsg;
+ int16_t index_gQQ[KLT_ORDER_GAIN], index_sQQ[KLT_ORDER_SHAPE];
+ int16_t index_ovr_gQQ[KLT_ORDER_GAIN], index_ovr_sQQ[KLT_ORDER_SHAPE];
+ int32_t BitsQQ;
- WebRtc_Word16 tmpcoeffs_gQ6[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs_sQ17[KLT_ORDER_SHAPE];
- WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs2_sQ17[KLT_ORDER_SHAPE];
- WebRtc_Word32 sumQQ;
- WebRtc_Word32 tmp32;
- WebRtc_Word16 sumQQ16;
+ int16_t tmpcoeffs_gQ6[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs_sQ17[KLT_ORDER_SHAPE];
+ int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs2_sQ17[KLT_ORDER_SHAPE];
+ int32_t sumQQ;
+ int32_t tmp32;
+ int16_t sumQQ16;
int status = 0;
/* write LAR coefficients to statistics file */
@@ -1170,7 +1170,7 @@
for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
{
posQQ = WebRtcIsacfix_kSelIndGain[k];
- pos2QQ= (WebRtc_Word16)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
+ pos2QQ= (int16_t)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
index_gQQ[k] = pos2QQ + WebRtcIsacfix_kQuantMinGain[k]; //ATTN: ok?
if (index_gQQ[k] < 0) {
@@ -1194,7 +1194,7 @@
for (k=0; k<KLT_ORDER_SHAPE; k++) //ATTN: ok?
{
- index_sQQ[k] = (WebRtc_Word16)(CalcLrIntQ(tmpcoeffs_sQ17[WebRtcIsacfix_kSelIndShape[k]], 17) + WebRtcIsacfix_kQuantMinShape[k]); //ATTN: ok?
+ index_sQQ[k] = (int16_t)(CalcLrIntQ(tmpcoeffs_sQ17[WebRtcIsacfix_kSelIndShape[k]], 17) + WebRtcIsacfix_kQuantMinShape[k]); //ATTN: ok?
if (index_sQQ[k] < 0)
index_sQQ[k] = 0;
@@ -1321,7 +1321,7 @@
gainpos = 0;
for (k=0; k<2*SUBFRAMES; k++) {
- sumQQ16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
+ sumQQ16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmpcoeffs_gQ17[posg], 2+9); //Divide by 4 and get Q17 to Q8, i.e. shift 2+9
sumQQ16 += WebRtcIsacfix_kMeansGainQ8[0][posg];
sumQQ = CalcExpN(sumQQ16); // Q8 in and Q17 out
gain_lo_hiQ17[gainpos] = sumQQ; //Q17
@@ -1333,18 +1333,18 @@
return 0;
}
-int WebRtcIsacfix_EstCodeLpcGain(WebRtc_Word32 *gain_lo_hiQ17,
+int WebRtcIsacfix_EstCodeLpcGain(int32_t *gain_lo_hiQ17,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData) {
int j, k;
- WebRtc_Word16 posQQ, pos2QQ, gainpos;
- WebRtc_Word16 posg;
- WebRtc_Word16 index_gQQ[KLT_ORDER_GAIN];
+ int16_t posQQ, pos2QQ, gainpos;
+ int16_t posg;
+ int16_t index_gQQ[KLT_ORDER_GAIN];
- WebRtc_Word16 tmpcoeffs_gQ6[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
- WebRtc_Word32 sumQQ;
+ int16_t tmpcoeffs_gQ6[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+ int32_t sumQQ;
int status = 0;
/* write LAR coefficients to statistics file */
@@ -1409,7 +1409,7 @@
for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
{
posQQ = WebRtcIsacfix_kSelIndGain[k];
- pos2QQ= (WebRtc_Word16)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
+ pos2QQ= (int16_t)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
index_gQQ[k] = pos2QQ + WebRtcIsacfix_kQuantMinGain[k]; //ATTN: ok?
if (index_gQQ[k] < 0) {
@@ -1435,17 +1435,17 @@
}
-int WebRtcIsacfix_EncodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *LPCCoef_loQ15,
- WebRtc_Word16 *LPCCoef_hiQ15,
- WebRtc_Word16 *model,
- WebRtc_Word32 *sizeQ11,
+int WebRtcIsacfix_EncodeLpc(int32_t *gain_lo_hiQ17,
+ int16_t *LPCCoef_loQ15,
+ int16_t *LPCCoef_hiQ15,
+ int16_t *model,
+ int32_t *sizeQ11,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData,
transcode_obj *transcodeParam)
{
int status = 0;
- WebRtc_Word32 larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES
+ int32_t larsQ17[KLT_ORDER_SHAPE]; // KLT_ORDER_SHAPE == (ORDERLO+ORDERHI)*SUBFRAMES
// = (6+12)*6 == 108
Poly2LarFix(LPCCoef_loQ15, ORDERLO, LPCCoef_hiQ15, ORDERHI, SUBFRAMES, larsQ17);
@@ -1463,10 +1463,10 @@
/* decode & dequantize RC */
-int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata, WebRtc_Word16 *RCQ15)
+int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata, int16_t *RCQ15)
{
int k, err;
- WebRtc_Word16 index[AR_ORDER];
+ int16_t index[AR_ORDER];
/* entropy decoding of quantization indices */
err = WebRtcIsacfix_DecHistOneStepMulti(index, streamdata, WebRtcIsacfix_kRcCdfPtr, WebRtcIsacfix_kRcInitInd, AR_ORDER);
@@ -1485,10 +1485,10 @@
/* quantize & code RC */
-int WebRtcIsacfix_EncodeRcCoef(WebRtc_Word16 *RCQ15, Bitstr_enc *streamdata)
+int WebRtcIsacfix_EncodeRcCoef(int16_t *RCQ15, Bitstr_enc *streamdata)
{
int k;
- WebRtc_Word16 index[AR_ORDER];
+ int16_t index[AR_ORDER];
int status;
/* quantize reflection coefficients (add noise feedback?) */
@@ -1519,10 +1519,10 @@
/* decode & dequantize squared Gain */
-int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata, WebRtc_Word32 *gainQ10)
+int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata, int32_t *gainQ10)
{
int err;
- WebRtc_Word16 index;
+ int16_t index;
/* entropy decoding of quantization index */
err = WebRtcIsacfix_DecHistOneStepMulti(
@@ -1545,9 +1545,9 @@
/* quantize & code squared Gain */
-int WebRtcIsacfix_EncodeGain2(WebRtc_Word32 *gainQ10, Bitstr_enc *streamdata)
+int WebRtcIsacfix_EncodeGain2(int32_t *gainQ10, Bitstr_enc *streamdata)
{
- WebRtc_Word16 index;
+ int16_t index;
int status = 0;
/* find quantization index */
@@ -1576,11 +1576,11 @@
/* code and decode Pitch Gains and Lags functions */
/* decode & dequantize Pitch Gains */
-int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata, WebRtc_Word16 *PitchGains_Q12)
+int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata, int16_t *PitchGains_Q12)
{
int err;
- WebRtc_Word16 index_comb;
- const WebRtc_UWord16 *pitch_gain_cdf_ptr[1];
+ int16_t index_comb;
+ const uint16_t *pitch_gain_cdf_ptr[1];
/* entropy decoding of quantization indices */
*pitch_gain_cdf_ptr = WebRtcIsacfix_kPitchGainCdf;
@@ -1600,20 +1600,20 @@
/* quantize & code Pitch Gains */
-int WebRtcIsacfix_EncodePitchGain(WebRtc_Word16 *PitchGains_Q12, Bitstr_enc *streamdata, ISAC_SaveEncData_t* encData)
+int WebRtcIsacfix_EncodePitchGain(int16_t *PitchGains_Q12, Bitstr_enc *streamdata, ISAC_SaveEncData_t* encData)
{
int k,j;
- WebRtc_Word16 SQ15[PITCH_SUBFRAMES];
- WebRtc_Word16 index[3];
- WebRtc_Word16 index_comb;
- const WebRtc_UWord16 *pitch_gain_cdf_ptr[1];
- WebRtc_Word32 CQ17;
+ int16_t SQ15[PITCH_SUBFRAMES];
+ int16_t index[3];
+ int16_t index_comb;
+ const uint16_t *pitch_gain_cdf_ptr[1];
+ int32_t CQ17;
int status = 0;
/* get the approximate arcsine (almost linear)*/
for (k=0; k<PITCH_SUBFRAMES; k++)
- SQ15[k] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(PitchGains_Q12[k],33,2); //Q15
+ SQ15[k] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(PitchGains_Q12[k],33,2); //Q15
/* find quantization index; only for the first three transform coefficients */
@@ -1625,7 +1625,7 @@
CQ17 += WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIsacfix_kTransform[k][j], SQ15[j],10); // Q17
}
- index[k] = (WebRtc_Word16)((CQ17 + 8192)>>14); // Rounding and scaling with stepsize (=1/0.125=8)
+ index[k] = (int16_t)((CQ17 + 8192)>>14); // Rounding and scaling with stepsize (=1/0.125=8)
/* check that the index is not outside the boundaries of the table */
if (index[k] < WebRtcIsacfix_kLowerlimiGain[k]) index[k] = WebRtcIsacfix_kLowerlimiGain[k];
@@ -1634,7 +1634,7 @@
}
/* calculate unique overall index */
- index_comb = (WebRtc_Word16)(WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[0], index[0]) +
+ index_comb = (int16_t)(WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[0], index[0]) +
WEBRTC_SPL_MUL(WebRtcIsacfix_kMultsGain[1], index[1]) + index[2]);
/* unquantize back to pitch gains by table look-up */
@@ -1667,21 +1667,21 @@
/* decode & dequantize Pitch Lags */
int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata,
- WebRtc_Word16 *PitchGain_Q12,
- WebRtc_Word16 *PitchLags_Q7)
+ int16_t *PitchGain_Q12,
+ int16_t *PitchLags_Q7)
{
int k, err;
- WebRtc_Word16 index[PITCH_SUBFRAMES];
- const WebRtc_Word16 *mean_val2Q10, *mean_val4Q10;
+ int16_t index[PITCH_SUBFRAMES];
+ const int16_t *mean_val2Q10, *mean_val4Q10;
- const WebRtc_Word16 *lower_limit;
- const WebRtc_UWord16 *init_index;
- const WebRtc_UWord16 *cdf_size;
- const WebRtc_UWord16 **cdf;
+ const int16_t *lower_limit;
+ const uint16_t *init_index;
+ const uint16_t *cdf_size;
+ const uint16_t **cdf;
- WebRtc_Word32 meangainQ12;
- WebRtc_Word32 CQ11, CQ10,tmp32a,tmp32b;
- WebRtc_Word16 shft,tmp16a,tmp16c;
+ int32_t meangainQ12;
+ int32_t CQ11, CQ10,tmp32a,tmp32b;
+ int16_t shft,tmp16a,tmp16c;
meangainQ12=0;
for (k = 0; k < 4; k++)
@@ -1727,25 +1727,25 @@
/* unquantize back to transform coefficients and do the inverse transform: S = T'*C */
- CQ11 = ((WebRtc_Word32)index[0] + lower_limit[0]); // Q0
+ CQ11 = ((int32_t)index[0] + lower_limit[0]); // Q0
CQ11 = WEBRTC_SPL_SHIFT_W32(CQ11,11-shft); // Scale with StepSize, Q11
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32a = WEBRTC_SPL_MUL_16_32_RSFT11(WebRtcIsacfix_kTransform[0][k], CQ11);
- tmp16a = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32a, 5);
+ tmp16a = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32a, 5);
PitchLags_Q7[k] = tmp16a;
}
CQ10 = mean_val2Q10[index[1]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
- tmp32b = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[1][k], (WebRtc_Word16) CQ10,10);
- tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5);
+ tmp32b = (int32_t) WEBRTC_SPL_MUL_16_16_RSFT((int16_t) WebRtcIsacfix_kTransform[1][k], (int16_t) CQ10,10);
+ tmp16c = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5);
PitchLags_Q7[k] += tmp16c;
}
CQ10 = mean_val4Q10[index[3]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
- tmp32b = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[3][k], (WebRtc_Word16) CQ10,10);
- tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5);
+ tmp32b = (int32_t) WEBRTC_SPL_MUL_16_16_RSFT((int16_t) WebRtcIsacfix_kTransform[3][k], (int16_t) CQ10,10);
+ tmp16c = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5);
PitchLags_Q7[k] += tmp16c;
}
@@ -1755,19 +1755,19 @@
/* quantize & code Pitch Lags */
-int WebRtcIsacfix_EncodePitchLag(WebRtc_Word16 *PitchLagsQ7,WebRtc_Word16 *PitchGain_Q12,
+int WebRtcIsacfix_EncodePitchLag(int16_t *PitchLagsQ7,int16_t *PitchGain_Q12,
Bitstr_enc *streamdata, ISAC_SaveEncData_t* encData)
{
int k, j;
- WebRtc_Word16 index[PITCH_SUBFRAMES];
- WebRtc_Word32 meangainQ12, CQ17;
- WebRtc_Word32 CQ11, CQ10,tmp32a;
+ int16_t index[PITCH_SUBFRAMES];
+ int32_t meangainQ12, CQ17;
+ int32_t CQ11, CQ10,tmp32a;
- const WebRtc_Word16 *mean_val2Q10,*mean_val4Q10;
- const WebRtc_Word16 *lower_limit, *upper_limit;
- const WebRtc_UWord16 **cdf;
- WebRtc_Word16 shft, tmp16a, tmp16b, tmp16c;
- WebRtc_Word32 tmp32b;
+ const int16_t *mean_val2Q10,*mean_val4Q10;
+ const int16_t *lower_limit, *upper_limit;
+ const uint16_t **cdf;
+ int16_t shft, tmp16a, tmp16b, tmp16c;
+ int32_t tmp32b;
int status = 0;
/* compute mean pitch gain */
@@ -1817,7 +1817,7 @@
CQ17 = WEBRTC_SPL_SHIFT_W32(CQ17,shft); // Scale with StepSize
/* quantize */
- tmp16b = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(CQ17 + 65536, 17 );
+ tmp16b = (int16_t) WEBRTC_SPL_RSHIFT_W32(CQ17 + 65536, 17 );
index[k] = tmp16b;
/* check that the index is not outside the boundaries of the table */
@@ -1837,21 +1837,21 @@
for (k=0; k<PITCH_SUBFRAMES; k++) {
tmp32a = WEBRTC_SPL_MUL_16_32_RSFT11(WebRtcIsacfix_kTransform[0][k], CQ11); // Q12
- tmp16a = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32a, 5);// Q7
+ tmp16a = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32a, 5);// Q7
PitchLagsQ7[k] = tmp16a;
}
CQ10 = mean_val2Q10[index[1]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
- tmp32b = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[1][k], (WebRtc_Word16) CQ10,10);
- tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q7
+ tmp32b = (int32_t) WEBRTC_SPL_MUL_16_16_RSFT((int16_t) WebRtcIsacfix_kTransform[1][k], (int16_t) CQ10,10);
+ tmp16c = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q7
PitchLagsQ7[k] += tmp16c;
}
CQ10 = mean_val4Q10[index[3]];
for (k=0; k<PITCH_SUBFRAMES; k++) {
- tmp32b = (WebRtc_Word32) WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) WebRtcIsacfix_kTransform[3][k], (WebRtc_Word16) CQ10,10);
- tmp16c = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q7
+ tmp32b = (int32_t) WEBRTC_SPL_MUL_16_16_RSFT((int16_t) WebRtcIsacfix_kTransform[3][k], (int16_t) CQ10,10);
+ tmp16c = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32b, 5); // Q7
PitchLagsQ7[k] += tmp16c;
}
@@ -1870,22 +1870,22 @@
/* cdf array for frame length indicator */
-const WebRtc_UWord16 kFrameLenCdf[4] = {
+const uint16_t kFrameLenCdf[4] = {
0, 21845, 43690, 65535};
/* pointer to cdf array for frame length indicator */
-const WebRtc_UWord16 *kFrameLenCdfPtr[1] = {kFrameLenCdf};
+const uint16_t *kFrameLenCdfPtr[1] = {kFrameLenCdf};
/* initial cdf index for decoder of frame length indicator */
-const WebRtc_UWord16 kFrameLenInitIndex[1] = {1};
+const uint16_t kFrameLenInitIndex[1] = {1};
int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec *streamdata,
- WebRtc_Word16 *framesamples)
+ int16_t *framesamples)
{
int err;
- WebRtc_Word16 frame_mode;
+ int16_t frame_mode;
err = 0;
/* entropy decoding of frame length [1:30ms,2:60ms] */
@@ -1908,10 +1908,10 @@
}
-int WebRtcIsacfix_EncodeFrameLen(WebRtc_Word16 framesamples, Bitstr_enc *streamdata) {
+int WebRtcIsacfix_EncodeFrameLen(int16_t framesamples, Bitstr_enc *streamdata) {
int status;
- WebRtc_Word16 frame_mode;
+ int16_t frame_mode;
status = 0;
frame_mode = 0;
@@ -1936,34 +1936,34 @@
}
/* cdf array for estimated bandwidth */
-const WebRtc_UWord16 kBwCdf[25] = {
+const uint16_t kBwCdf[25] = {
0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037,
32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074,
62804, 65535};
/* pointer to cdf array for estimated bandwidth */
-const WebRtc_UWord16 *kBwCdfPtr[1] = {kBwCdf};
+const uint16_t *kBwCdfPtr[1] = {kBwCdf};
/* initial cdf index for decoder of estimated bandwidth*/
-const WebRtc_UWord16 kBwInitIndex[1] = {7};
+const uint16_t kBwInitIndex[1] = {7};
-int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata, WebRtc_Word16 *BWno) {
+int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata, int16_t *BWno) {
int err;
- WebRtc_Word16 BWno32;
+ int16_t BWno32;
/* entropy decoding of sender's BW estimation [0..23] */
err = WebRtcIsacfix_DecHistOneStepMulti(&BWno32, streamdata, kBwCdfPtr, kBwInitIndex, 1);
if (err<0) // error check
return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH;
- *BWno = (WebRtc_Word16)BWno32;
+ *BWno = (int16_t)BWno32;
return err;
}
-int WebRtcIsacfix_EncodeReceiveBandwidth(WebRtc_Word16 *BWno, Bitstr_enc *streamdata)
+int WebRtcIsacfix_EncodeReceiveBandwidth(int16_t *BWno, Bitstr_enc *streamdata)
{
int status = 0;
/* entropy encoding of receiver's BW estimation [0..23] */
@@ -1973,15 +1973,15 @@
}
/* estimate codel length of LPC Coef */
-void WebRtcIsacfix_TranscodeLpcCoef(WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *index_gQQ) {
+void WebRtcIsacfix_TranscodeLpcCoef(int32_t *gain_lo_hiQ17,
+ int16_t *index_gQQ) {
int j, k;
- WebRtc_Word16 posQQ, pos2QQ;
- WebRtc_Word16 posg, offsg, gainpos;
- WebRtc_Word32 tmpcoeffs_gQ6[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs_gQ17[KLT_ORDER_GAIN];
- WebRtc_Word32 tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
- WebRtc_Word32 sumQQ;
+ int16_t posQQ, pos2QQ;
+ int16_t posg, offsg, gainpos;
+ int32_t tmpcoeffs_gQ6[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs_gQ17[KLT_ORDER_GAIN];
+ int32_t tmpcoeffs2_gQ21[KLT_ORDER_GAIN];
+ int32_t sumQQ;
/* log gains, mean removal and scaling */
@@ -2036,7 +2036,7 @@
for (k=0; k<KLT_ORDER_GAIN; k++) //ATTN: ok?
{
posQQ = WebRtcIsacfix_kSelIndGain[k];
- pos2QQ= (WebRtc_Word16)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
+ pos2QQ= (int16_t)CalcLrIntQ(tmpcoeffs_gQ17[posQQ], 17);
index_gQQ[k] = pos2QQ + WebRtcIsacfix_kQuantMinGain[k]; //ATTN: ok?
if (index_gQQ[k] < 0) {
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
index 941fd0b..dd85090 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h
@@ -22,91 +22,91 @@
#include "structs.h"
/* decode complex spectrum (return number of bytes in stream) */
-WebRtc_Word16 WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
- WebRtc_Word16 *frQ7,
- WebRtc_Word16 *fiQ7,
- WebRtc_Word16 AvgPitchGain_Q12);
+int16_t WebRtcIsacfix_DecodeSpec(Bitstr_dec *streamdata,
+ int16_t *frQ7,
+ int16_t *fiQ7,
+ int16_t AvgPitchGain_Q12);
/* encode complex spectrum */
-int WebRtcIsacfix_EncodeSpec(const WebRtc_Word16 *fr,
- const WebRtc_Word16 *fi,
+int WebRtcIsacfix_EncodeSpec(const int16_t *fr,
+ const int16_t *fi,
Bitstr_enc *streamdata,
- WebRtc_Word16 AvgPitchGain_Q12);
+ int16_t AvgPitchGain_Q12);
/* decode & dequantize LPC Coef */
int WebRtcIsacfix_DecodeLpcCoef(Bitstr_dec *streamdata,
- WebRtc_Word32 *LPCCoefQ17,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *outmodel);
+ int32_t *LPCCoefQ17,
+ int32_t *gain_lo_hiQ17,
+ int16_t *outmodel);
-int WebRtcIsacfix_DecodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *LPCCoef_loQ15,
- WebRtc_Word16 *LPCCoef_hiQ15,
+int WebRtcIsacfix_DecodeLpc(int32_t *gain_lo_hiQ17,
+ int16_t *LPCCoef_loQ15,
+ int16_t *LPCCoef_hiQ15,
Bitstr_dec *streamdata,
- WebRtc_Word16 *outmodel);
+ int16_t *outmodel);
/* quantize & code LPC Coef */
-int WebRtcIsacfix_EncodeLpc(WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *LPCCoef_loQ15,
- WebRtc_Word16 *LPCCoef_hiQ15,
- WebRtc_Word16 *model,
- WebRtc_Word32 *sizeQ11,
+int WebRtcIsacfix_EncodeLpc(int32_t *gain_lo_hiQ17,
+ int16_t *LPCCoef_loQ15,
+ int16_t *LPCCoef_hiQ15,
+ int16_t *model,
+ int32_t *sizeQ11,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData,
transcode_obj *transcodeParam);
-int WebRtcIsacfix_EstCodeLpcGain(WebRtc_Word32 *gain_lo_hiQ17,
+int WebRtcIsacfix_EstCodeLpcGain(int32_t *gain_lo_hiQ17,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData);
/* decode & dequantize RC */
int WebRtcIsacfix_DecodeRcCoef(Bitstr_dec *streamdata,
- WebRtc_Word16 *RCQ15);
+ int16_t *RCQ15);
/* quantize & code RC */
-int WebRtcIsacfix_EncodeRcCoef(WebRtc_Word16 *RCQ15,
+int WebRtcIsacfix_EncodeRcCoef(int16_t *RCQ15,
Bitstr_enc *streamdata);
/* decode & dequantize squared Gain */
int WebRtcIsacfix_DecodeGain2(Bitstr_dec *streamdata,
- WebRtc_Word32 *Gain2);
+ int32_t *Gain2);
/* quantize & code squared Gain (input is squared gain) */
-int WebRtcIsacfix_EncodeGain2(WebRtc_Word32 *gain2,
+int WebRtcIsacfix_EncodeGain2(int32_t *gain2,
Bitstr_enc *streamdata);
-int WebRtcIsacfix_EncodePitchGain(WebRtc_Word16 *PitchGains_Q12,
+int WebRtcIsacfix_EncodePitchGain(int16_t *PitchGains_Q12,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData);
-int WebRtcIsacfix_EncodePitchLag(WebRtc_Word16 *PitchLagQ7,
- WebRtc_Word16 *PitchGain_Q12,
+int WebRtcIsacfix_EncodePitchLag(int16_t *PitchLagQ7,
+ int16_t *PitchGain_Q12,
Bitstr_enc *streamdata,
ISAC_SaveEncData_t* encData);
int WebRtcIsacfix_DecodePitchGain(Bitstr_dec *streamdata,
- WebRtc_Word16 *PitchGain_Q12);
+ int16_t *PitchGain_Q12);
int WebRtcIsacfix_DecodePitchLag(Bitstr_dec *streamdata,
- WebRtc_Word16 *PitchGain_Q12,
- WebRtc_Word16 *PitchLagQ7);
+ int16_t *PitchGain_Q12,
+ int16_t *PitchLagQ7);
int WebRtcIsacfix_DecodeFrameLen(Bitstr_dec *streamdata,
- WebRtc_Word16 *framelength);
+ int16_t *framelength);
-int WebRtcIsacfix_EncodeFrameLen(WebRtc_Word16 framelength,
+int WebRtcIsacfix_EncodeFrameLen(int16_t framelength,
Bitstr_enc *streamdata);
int WebRtcIsacfix_DecodeSendBandwidth(Bitstr_dec *streamdata,
- WebRtc_Word16 *BWno);
+ int16_t *BWno);
-int WebRtcIsacfix_EncodeReceiveBandwidth(WebRtc_Word16 *BWno,
+int WebRtcIsacfix_EncodeReceiveBandwidth(int16_t *BWno,
Bitstr_enc *streamdata);
-void WebRtcIsacfix_TranscodeLpcCoef(WebRtc_Word32 *tmpcoeffs_gQ6,
- WebRtc_Word16 *index_gQQ);
+void WebRtcIsacfix_TranscodeLpcCoef(int32_t *tmpcoeffs_gQ6,
+ int16_t *index_gQQ);
// Pointer functions for LPC transforms.
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.c
index fff35c4..d9cf8f2 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.c
@@ -18,7 +18,7 @@
#include "fft.h"
-const WebRtc_Word16 kSortTabFft[240] = {
+const int16_t kSortTabFft[240] = {
0, 60, 120, 180, 20, 80, 140, 200, 40, 100, 160, 220,
4, 64, 124, 184, 24, 84, 144, 204, 44, 104, 164, 224,
8, 68, 128, 188, 28, 88, 148, 208, 48, 108, 168, 228,
@@ -42,7 +42,7 @@
};
/* Cosine table in Q14 */
-const WebRtc_Word16 kCosTabFfftQ14[240] = {
+const int16_t kCosTabFfftQ14[240] = {
16384, 16378, 16362, 16333, 16294, 16244, 16182, 16110, 16026, 15931, 15826, 15709,
15582, 15444, 15296, 15137, 14968, 14788, 14598, 14399, 14189, 13970, 13741, 13502,
13255, 12998, 12733, 12458, 12176, 11885, 11585, 11278, 10963, 10641, 10311, 9974,
@@ -68,18 +68,18 @@
/* Uses 16x16 mul, without rounding, which is faster. Uses WEBRTC_SPL_MUL_16_16_RSFT */
-WebRtc_Word16 WebRtcIsacfix_FftRadix16Fastest(WebRtc_Word16 RexQx[], WebRtc_Word16 ImxQx[], WebRtc_Word16 iSign) {
+int16_t WebRtcIsacfix_FftRadix16Fastest(int16_t RexQx[], int16_t ImxQx[], int16_t iSign) {
- WebRtc_Word16 dd, ee, ff, gg, hh, ii;
- WebRtc_Word16 k0, k1, k2, k3, k4, kk;
- WebRtc_Word16 tmp116, tmp216;
+ int16_t dd, ee, ff, gg, hh, ii;
+ int16_t k0, k1, k2, k3, k4, kk;
+ int16_t tmp116, tmp216;
- WebRtc_Word16 ccc1Q14, ccc2Q14, ccc3Q14, sss1Q14, sss2Q14, sss3Q14;
- WebRtc_Word16 sss60Q14, ccc72Q14, sss72Q14;
- WebRtc_Word16 aaQx, ajQx, akQx, ajmQx, ajpQx, akmQx, akpQx;
- WebRtc_Word16 bbQx, bjQx, bkQx, bjmQx, bjpQx, bkmQx, bkpQx;
+ int16_t ccc1Q14, ccc2Q14, ccc3Q14, sss1Q14, sss2Q14, sss3Q14;
+ int16_t sss60Q14, ccc72Q14, sss72Q14;
+ int16_t aaQx, ajQx, akQx, ajmQx, ajpQx, akmQx, akpQx;
+ int16_t bbQx, bjQx, bkQx, bjmQx, bjpQx, bkmQx, bkpQx;
- WebRtc_Word16 ReDATAQx[240], ImDATAQx[240];
+ int16_t ReDATAQx[240], ImDATAQx[240];
sss60Q14 = kCosTabFfftQ14[20];
ccc72Q14 = kCosTabFfftQ14[48];
@@ -145,18 +145,18 @@
// ImxQ16[k2] = ajpQ16 * sss2Q14 + bjpQ16 * ccc2Q14;
// ImxQ16[k3] = akmQ16 * sss3Q14 + bkmQ16 * ccc3Q14;
- RexQx[k1] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, akpQx, 14) -
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, bkpQx, 14); // 6 non-mul + 2 mul cycles, i.e. 8 cycles (6+2*7=20 cycles if 16x32mul)
- RexQx[k2] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) -
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjpQx, 14);
- RexQx[k3] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, akmQx, 14) -
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, bkmQx, 14);
- ImxQx[k1] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, akpQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, bkpQx, 14);
- ImxQx[k2] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajpQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14);
- ImxQx[k3] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, akmQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, bkmQx, 14);
+ RexQx[k1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, akpQx, 14) -
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, bkpQx, 14); // 6 non-mul + 2 mul cycles, i.e. 8 cycles (6+2*7=20 cycles if 16x32mul)
+ RexQx[k2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) -
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjpQx, 14);
+ RexQx[k3] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, akmQx, 14) -
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, bkmQx, 14);
+ ImxQx[k1] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss1Q14, akpQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc1Q14, bkpQx, 14);
+ ImxQx[k2] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajpQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14);
+ ImxQx[k3] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss3Q14, akmQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc3Q14, bkmQx, 14);
//This mul segment needs 6*8 = 48 cycles for 16x16 muls, but 6*20 = 120 cycles for 16x32 muls
@@ -183,8 +183,8 @@
tmp116 = RexQx[k1] - RexQx[k2];
tmp216 = ImxQx[k1] - ImxQx[k2];
- ajQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp116, 14); // Q14*Qx>>14 = Qx
- bjQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp216, 14); // Q14*Qx>>14 = Qx
+ ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp116, 14); // Q14*Qx>>14 = Qx
+ bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss60Q14, tmp216, 14); // Q14*Qx>>14 = Qx
RexQx[k1] = akQx - bjQx;
RexQx[k2] = akQx + bjQx;
ImxQx[k1] = bkQx + ajQx;
@@ -211,7 +211,7 @@
kk += 20;
ff = ff+4;
for (hh=0; hh<2; hh++) {
- ee = ff + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(hh, ff);
+ ee = ff + (int16_t)WEBRTC_SPL_MUL_16_16(hh, ff);
dd = ee + 60;
ccc2Q14 = kCosTabFfftQ14[ee];
sss2Q14 = kCosTabFfftQ14[dd];
@@ -221,10 +221,10 @@
for (ii=0; ii<4; ii++) {
akQx = RexQx[kk];
bkQx = ImxQx[kk];
- RexQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - // Q14*Qx>>14 = Qx
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14);
- ImxQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + // Q14*Qx>>14 = Qx
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14);
+ RexQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) - // Q14*Qx>>14 = Qx
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14);
+ ImxQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) + // Q14*Qx>>14 = Qx
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14);
kk += 60;
@@ -264,14 +264,14 @@
RexQx[kk] = aaQx + akpQx + ajpQx;
ImxQx[kk] = bbQx + bkpQx + bjpQx;
- akQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, akpQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) + aaQx;
- bkQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bkpQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14) + bbQx;
- ajQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, akmQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajmQx, 14);
- bjQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bkmQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjmQx, 14);
+ akQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, akpQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, ajpQx, 14) + aaQx;
+ bkQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bkpQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bjpQx, 14) + bbQx;
+ ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, akmQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, ajmQx, 14);
+ bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bkmQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bjmQx, 14);
// 32+4*8=64 or 32+4*20=112
RexQx[k1] = akQx - bjQx;
@@ -279,14 +279,14 @@
ImxQx[k1] = bkQx + ajQx;
ImxQx[k4] = bkQx - ajQx;
- akQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akpQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, ajpQx, 14) + aaQx;
- bkQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkpQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bjpQx, 14) + bbQx;
- ajQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akmQx, 14) -
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, ajmQx, 14);
- bjQx = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkmQx, 14) -
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bjmQx, 14);
+ akQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akpQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, ajpQx, 14) + aaQx;
+ bkQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkpQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc72Q14, bjpQx, 14) + bbQx;
+ ajQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akmQx, 14) -
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, ajmQx, 14);
+ bjQx = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkmQx, 14) -
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss72Q14, bjmQx, 14);
// 8+4*8=40 or 8+4*20=88
RexQx[k2] = akQx - bjQx;
@@ -308,7 +308,7 @@
for (gg=0; gg<3; gg++) {
kk += 4;
- dd = 12 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(12, gg);
+ dd = 12 + (int16_t)WEBRTC_SPL_MUL_16_16(12, gg);
ff = 0;
for (hh=0; hh<4; hh++) {
ff = ff+dd;
@@ -324,10 +324,10 @@
sss2Q14 = -sss2Q14;
}
- RexQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) -
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14);
- ImxQx[kk] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) +
- (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14);
+ RexQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, akQx, 14) -
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, bkQx, 14);
+ ImxQx[kk] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(sss2Q14, akQx, 14) +
+ (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(ccc2Q14, bkQx, 14);
kk += 20;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.h
index efa116e..dc7cea8 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/fft.h
@@ -34,7 +34,7 @@
#include "structs.h"
-WebRtc_Word16 WebRtcIsacfix_FftRadix16Fastest(WebRtc_Word16 RexQx[], WebRtc_Word16 ImxQx[], WebRtc_Word16 iSign);
+int16_t WebRtcIsacfix_FftRadix16Fastest(int16_t RexQx[], int16_t ImxQx[], int16_t iSign);
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
index c7eb590..9c9d098 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbanks.c
@@ -92,13 +92,13 @@
int32_t *state)
{
int k;
- WebRtc_Word32 a1 = 0, b1 = 0, c = 0, in = 0;
- WebRtc_Word32 a2 = 0, b2 = 0;
- WebRtc_Word32 state0 = state[0];
- WebRtc_Word32 state1 = state[1];
+ int32_t a1 = 0, b1 = 0, c = 0, in = 0;
+ int32_t a2 = 0, b2 = 0;
+ int32_t state0 = state[0];
+ int32_t state1 = state[1];
for (k=0; k<len; k++) {
- in = (WebRtc_Word32)io[k];
+ in = (int32_t)io[k];
#ifdef WEBRTC_ARCH_ARM_V7
{
@@ -134,11 +134,11 @@
b2 = WEBRTC_SPL_MUL_32_32_RSFT32(coefficient[3], coefficient[2], state1);
#endif
- c = ((WebRtc_Word32)in) + WEBRTC_SPL_RSHIFT_W32(a1+b1, 7); // Q0
- io[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(c); // Write output as Q0.
+ c = ((int32_t)in) + WEBRTC_SPL_RSHIFT_W32(a1+b1, 7); // Q0
+ io[k] = (int16_t)WebRtcSpl_SatW32ToW16(c); // Write output as Q0.
- c = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)in, 2) - a2 - b2; // In Q2.
- c = (WebRtc_Word32)WEBRTC_SPL_SAT(536870911, c, -536870912);
+ c = WEBRTC_SPL_LSHIFT_W32((int32_t)in, 2) - a2 - b2; // In Q2.
+ c = (int32_t)WEBRTC_SPL_SAT(536870911, c, -536870912);
state1 = state0;
state0 = WEBRTC_SPL_LSHIFT_W32(c, 2); // Write state as Q4
@@ -148,9 +148,9 @@
}
-void WebRtcIsacfix_SplitAndFilter1(WebRtc_Word16 *pin,
- WebRtc_Word16 *LP16,
- WebRtc_Word16 *HP16,
+void WebRtcIsacfix_SplitAndFilter1(int16_t *pin,
+ int16_t *LP16,
+ int16_t *HP16,
PreFiltBankstr *prefiltdata)
{
/* Function WebRtcIsacfix_SplitAndFilter */
@@ -159,10 +159,10 @@
int k;
- WebRtc_Word16 tempin_ch1[FRAMESAMPLES/2 + QLOOKAHEAD];
- WebRtc_Word16 tempin_ch2[FRAMESAMPLES/2 + QLOOKAHEAD];
- WebRtc_Word32 tmpState_ch1[2 * (QORDER-1)]; /* 4 */
- WebRtc_Word32 tmpState_ch2[2 * (QORDER-1)]; /* 4 */
+ int16_t tempin_ch1[FRAMESAMPLES/2 + QLOOKAHEAD];
+ int16_t tempin_ch2[FRAMESAMPLES/2 + QLOOKAHEAD];
+ int32_t tmpState_ch1[2 * (QORDER-1)]; /* 4 */
+ int32_t tmpState_ch2[2 * (QORDER-1)]; /* 4 */
/* High pass filter */
WebRtcIsacfix_HighpassFilterFixDec32(pin, FRAMESAMPLES, WebRtcIsacfix_kHpStCoeffInQ30, prefiltdata->HPstates_fix);
@@ -213,13 +213,13 @@
/* Now Construct low-pass and high-pass signals as combinations of polyphase components */
for (k=0; k<FRAMESAMPLES/2 + QLOOKAHEAD; k++) {
- WebRtc_Word32 tmp1, tmp2, tmp3;
- tmp1 = (WebRtc_Word32)tempin_ch1[k]; // Q0 -> Q0
- tmp2 = (WebRtc_Word32)tempin_ch2[k]; // Q0 -> Q0
- tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 + tmp2), 1);/* low pass signal*/
- LP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */
- tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 - tmp2), 1);/* high pass signal*/
- HP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */
+ int32_t tmp1, tmp2, tmp3;
+ tmp1 = (int32_t)tempin_ch1[k]; // Q0 -> Q0
+ tmp2 = (int32_t)tempin_ch2[k]; // Q0 -> Q0
+ tmp3 = (int32_t)WEBRTC_SPL_RSHIFT_W32((tmp1 + tmp2), 1);/* low pass signal*/
+ LP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */
+ tmp3 = (int32_t)WEBRTC_SPL_RSHIFT_W32((tmp1 - tmp2), 1);/* high pass signal*/
+ HP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */
}
}/*end of WebRtcIsacfix_SplitAndFilter */
@@ -228,9 +228,9 @@
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
/* Without lookahead */
-void WebRtcIsacfix_SplitAndFilter2(WebRtc_Word16 *pin,
- WebRtc_Word16 *LP16,
- WebRtc_Word16 *HP16,
+void WebRtcIsacfix_SplitAndFilter2(int16_t *pin,
+ int16_t *LP16,
+ int16_t *HP16,
PreFiltBankstr *prefiltdata)
{
/* Function WebRtcIsacfix_SplitAndFilter2 */
@@ -239,8 +239,8 @@
int k;
- WebRtc_Word16 tempin_ch1[FRAMESAMPLES/2];
- WebRtc_Word16 tempin_ch2[FRAMESAMPLES/2];
+ int16_t tempin_ch1[FRAMESAMPLES/2];
+ int16_t tempin_ch2[FRAMESAMPLES/2];
/* High pass filter */
@@ -272,13 +272,13 @@
/* Now Construct low-pass and high-pass signals as combinations of polyphase components */
for (k=0; k<FRAMESAMPLES/2; k++) {
- WebRtc_Word32 tmp1, tmp2, tmp3;
- tmp1 = (WebRtc_Word32)tempin_ch1[k]; // Q0 -> Q0
- tmp2 = (WebRtc_Word32)tempin_ch2[k]; // Q0 -> Q0
- tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 + tmp2), 1);/* low pass signal*/
- LP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */
- tmp3 = (WebRtc_Word32)WEBRTC_SPL_RSHIFT_W32((tmp1 - tmp2), 1);/* high pass signal*/
- HP16[k] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */
+ int32_t tmp1, tmp2, tmp3;
+ tmp1 = (int32_t)tempin_ch1[k]; // Q0 -> Q0
+ tmp2 = (int32_t)tempin_ch2[k]; // Q0 -> Q0
+ tmp3 = (int32_t)WEBRTC_SPL_RSHIFT_W32((tmp1 + tmp2), 1);/* low pass signal*/
+ LP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*low pass */
+ tmp3 = (int32_t)WEBRTC_SPL_RSHIFT_W32((tmp1 - tmp2), 1);/* high pass signal*/
+ HP16[k] = (int16_t)WebRtcSpl_SatW32ToW16(tmp3); /*high pass */
}
}/*end of WebRtcIsacfix_SplitAndFilter */
@@ -308,13 +308,13 @@
high-pass signals.
postfiltdata: the input data structure containing the filterbank
states is updated for the next decoding iteration */
-void WebRtcIsacfix_FilterAndCombine1(WebRtc_Word16 *tempin_ch1,
- WebRtc_Word16 *tempin_ch2,
- WebRtc_Word16 *out16,
+void WebRtcIsacfix_FilterAndCombine1(int16_t *tempin_ch1,
+ int16_t *tempin_ch2,
+ int16_t *out16,
PostFiltBankstr *postfiltdata)
{
int k;
- WebRtc_Word16 in[FRAMESAMPLES];
+ int16_t in[FRAMESAMPLES];
/* all-pass filter the new upper and lower channel signal.
For upper channel, use the all-pass filter factors that were used as a
@@ -368,14 +368,14 @@
high-pass signals.
postfiltdata: the input data structure containing the filterbank
states is updated for the next decoding iteration */
-void WebRtcIsacfix_FilterAndCombine2(WebRtc_Word16 *tempin_ch1,
- WebRtc_Word16 *tempin_ch2,
- WebRtc_Word16 *out16,
+void WebRtcIsacfix_FilterAndCombine2(int16_t *tempin_ch1,
+ int16_t *tempin_ch2,
+ int16_t *out16,
PostFiltBankstr *postfiltdata,
- WebRtc_Word16 len)
+ int16_t len)
{
int k;
- WebRtc_Word16 in[FRAMESAMPLES];
+ int16_t in[FRAMESAMPLES];
/* all-pass filter the new upper and lower channel signal.
For upper channel, use the all-pass filter factors that were used as a
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/filters.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/filters.c
index a5ebd39..eb0e87a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/filters.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/filters.c
@@ -14,11 +14,11 @@
// Autocorrelation function in fixed point.
// NOTE! Different from SPLIB-version in how it scales the signal.
-int WebRtcIsacfix_AutocorrC(WebRtc_Word32* __restrict r,
- const WebRtc_Word16* __restrict x,
- WebRtc_Word16 N,
- WebRtc_Word16 order,
- WebRtc_Word16* __restrict scale) {
+int WebRtcIsacfix_AutocorrC(int32_t* __restrict r,
+ const int16_t* __restrict x,
+ int16_t N,
+ int16_t order,
+ int16_t* __restrict scale) {
int i = 0;
int j = 0;
int16_t scaling = 0;
@@ -59,17 +59,17 @@
return(order + 1);
}
-static const WebRtc_Word32 kApUpperQ15[ALLPASSSECTIONS] = { 1137, 12537 };
-static const WebRtc_Word32 kApLowerQ15[ALLPASSSECTIONS] = { 5059, 24379 };
+static const int32_t kApUpperQ15[ALLPASSSECTIONS] = { 1137, 12537 };
+static const int32_t kApLowerQ15[ALLPASSSECTIONS] = { 5059, 24379 };
-static void AllpassFilterForDec32(WebRtc_Word16 *InOut16, //Q0
- const WebRtc_Word32 *APSectionFactors, //Q15
- WebRtc_Word16 lengthInOut,
- WebRtc_Word32 *FilterState) //Q16
+static void AllpassFilterForDec32(int16_t *InOut16, //Q0
+ const int32_t *APSectionFactors, //Q15
+ int16_t lengthInOut,
+ int32_t *FilterState) //Q16
{
int n, j;
- WebRtc_Word32 a, b;
+ int32_t a, b;
for (j=0; j<ALLPASSSECTIONS; j++) {
for (n=0;n<lengthInOut;n+=2){
@@ -77,12 +77,12 @@
a = WEBRTC_SPL_LSHIFT_W32(a, 1); // Q15 -> Q16
b = WEBRTC_SPL_ADD_SAT_W32(a, FilterState[j]); //Q16+Q16=Q16
a = WEBRTC_SPL_MUL_16_32_RSFT16(
- (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(b, 16),
+ (int16_t) WEBRTC_SPL_RSHIFT_W32(b, 16),
-APSectionFactors[j]); //Q0*Q31=Q31 shifted 16 gives Q15
FilterState[j] = WEBRTC_SPL_ADD_SAT_W32(
WEBRTC_SPL_LSHIFT_W32(a,1),
- WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)InOut16[n], 16)); // Q15<<1 + Q0<<16 = Q16 + Q16 = Q16
- InOut16[n] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(b, 16); //Save as Q0
+ WEBRTC_SPL_LSHIFT_W32((uint32_t)InOut16[n], 16)); // Q15<<1 + Q0<<16 = Q16 + Q16 = Q16
+ InOut16[n] = (int16_t) WEBRTC_SPL_RSHIFT_W32(b, 16); //Save as Q0
}
}
}
@@ -90,20 +90,20 @@
-void WebRtcIsacfix_DecimateAllpass32(const WebRtc_Word16 *in,
- WebRtc_Word32 *state_in, /* array of size: 2*ALLPASSSECTIONS+1 */
- WebRtc_Word16 N, /* number of input samples */
- WebRtc_Word16 *out) /* array of size N/2 */
+void WebRtcIsacfix_DecimateAllpass32(const int16_t *in,
+ int32_t *state_in, /* array of size: 2*ALLPASSSECTIONS+1 */
+ int16_t N, /* number of input samples */
+ int16_t *out) /* array of size N/2 */
{
int n;
- WebRtc_Word16 data_vec[PITCH_FRAME_LEN];
+ int16_t data_vec[PITCH_FRAME_LEN];
/* copy input */
- memcpy(data_vec+1, in, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), (N-1)));
+ memcpy(data_vec+1, in, WEBRTC_SPL_MUL_16_16(sizeof(int16_t), (N-1)));
- data_vec[0] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(state_in[WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)],16); //the z^(-1) state
- state_in[WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)] = WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)in[N-1],16);
+ data_vec[0] = (int16_t) WEBRTC_SPL_RSHIFT_W32(state_in[WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)],16); //the z^(-1) state
+ state_in[WEBRTC_SPL_MUL_16_16(2, ALLPASSSECTIONS)] = WEBRTC_SPL_LSHIFT_W32((uint32_t)in[N-1],16);
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.S b/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.S
index a970333..3c5ac64 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.S
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/filters_neon.S
@@ -15,11 +15,11 @@
.align 2
@ int WebRtcIsacfix_AutocorrNeon(
-@ WebRtc_Word32* __restrict r,
-@ const WebRtc_Word16* __restrict x,
-@ WebRtc_Word16 N,
-@ WebRtc_Word16 order,
-@ WebRtc_Word16* __restrict scale);
+@ int32_t* __restrict r,
+@ const int16_t* __restrict x,
+@ int16_t N,
+@ int16_t order,
+@ int16_t* __restrict scale);
DEFINE_FUNCTION WebRtcIsacfix_AutocorrNeon
push {r3 - r12}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c
index 4d11af5..dbcd49c 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/initialize.c
@@ -25,18 +25,18 @@
int k;
for (k = 0; k < WINLEN; k++) {
- maskdata->DataBufferLoQ0[k] = (WebRtc_Word16) 0;
- maskdata->DataBufferHiQ0[k] = (WebRtc_Word16) 0;
+ maskdata->DataBufferLoQ0[k] = (int16_t) 0;
+ maskdata->DataBufferHiQ0[k] = (int16_t) 0;
}
for (k = 0; k < ORDERLO+1; k++) {
- maskdata->CorrBufLoQQ[k] = (WebRtc_Word32) 0;
+ maskdata->CorrBufLoQQ[k] = (int32_t) 0;
maskdata->CorrBufLoQdom[k] = 0;
maskdata->PreStateLoGQ15[k] = 0;
}
for (k = 0; k < ORDERHI+1; k++) {
- maskdata->CorrBufHiQQ[k] = (WebRtc_Word32) 0;
+ maskdata->CorrBufHiQQ[k] = (int32_t) 0;
maskdata->CorrBufHiQdom[k] = 0;
maskdata->PreStateHiGQ15[k] = 0;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
index f9c481d..945475f 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/isacfix.c
@@ -42,8 +42,8 @@
*
*/
-WebRtc_Word16 WebRtcIsacfix_AssignSize(int *sizeinbytes) {
- *sizeinbytes=sizeof(ISACFIX_SubStruct)*2/sizeof(WebRtc_Word16);
+int16_t WebRtcIsacfix_AssignSize(int *sizeinbytes) {
+ *sizeinbytes=sizeof(ISACFIX_SubStruct)*2/sizeof(int16_t);
return(0);
}
@@ -56,7 +56,7 @@
* If successful, Return 0, else Return -1
*/
-WebRtc_Word16 WebRtcIsacfix_Assign(ISACFIX_MainStruct **inst, void *ISACFIX_inst_Addr) {
+int16_t WebRtcIsacfix_Assign(ISACFIX_MainStruct **inst, void *ISACFIX_inst_Addr) {
if (ISACFIX_inst_Addr!=NULL) {
*inst = (ISACFIX_MainStruct*)ISACFIX_inst_Addr;
(*(ISACFIX_SubStruct**)inst)->errorcode = 0;
@@ -84,7 +84,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst)
+int16_t WebRtcIsacfix_Create(ISACFIX_MainStruct **ISAC_main_inst)
{
ISACFIX_SubStruct *tempo;
tempo = malloc(1 * sizeof(ISACFIX_SubStruct));
@@ -113,7 +113,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst)
+int16_t WebRtcIsacfix_CreateInternal(ISACFIX_MainStruct *ISAC_main_inst)
{
ISACFIX_SubStruct *ISAC_inst;
@@ -147,7 +147,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst)
+int16_t WebRtcIsacfix_Free(ISACFIX_MainStruct *ISAC_main_inst)
{
free(ISAC_main_inst);
return(0);
@@ -165,7 +165,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
+int16_t WebRtcIsacfix_FreeInternal(ISACFIX_MainStruct *ISAC_main_inst)
{
ISACFIX_SubStruct *ISAC_inst;
@@ -217,11 +217,11 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 CodingMode)
+int16_t WebRtcIsacfix_EncoderInit(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t CodingMode)
{
int k;
- WebRtc_Word16 statusInit;
+ int16_t statusInit;
ISACFIX_SubStruct *ISAC_inst;
statusInit = 0;
@@ -321,12 +321,12 @@
* : -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_Word16 *speechIn,
- WebRtc_Word16 *encoded)
+int16_t WebRtcIsacfix_Encode(ISACFIX_MainStruct *ISAC_main_inst,
+ const int16_t *speechIn,
+ int16_t *encoded)
{
ISACFIX_SubStruct *ISAC_inst;
- WebRtc_Word16 stream_len;
+ int16_t stream_len;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
@@ -341,20 +341,20 @@
return (-1);
}
- stream_len = WebRtcIsacfix_EncodeImpl((WebRtc_Word16*)speechIn,
- &ISAC_inst->ISACenc_obj,
- &ISAC_inst->bwestimator_obj,
- ISAC_inst->CodingMode);
+ stream_len = WebRtcIsacfix_EncodeImpl((int16_t*)speechIn,
+ &ISAC_inst->ISACenc_obj,
+ &ISAC_inst->bwestimator_obj,
+ ISAC_inst->CodingMode);
if (stream_len<0) {
ISAC_inst->errorcode = - stream_len;
return -1;
}
- /* convert from bytes to WebRtc_Word16 */
+ /* convert from bytes to int16_t */
#ifndef WEBRTC_BIG_ENDIAN
for (k=0;k<(stream_len+1)>>1;k++) {
- encoded[k] = (WebRtc_Word16)( ( (WebRtc_UWord16)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 )
+ encoded[k] = (int16_t)( ( (uint16_t)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 )
| (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8));
}
@@ -396,15 +396,15 @@
* : -1 - Error
*/
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
-WebRtc_Word16 WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_Word16 *speechIn,
- WebRtc_Word16 *encoded)
+int16_t WebRtcIsacfix_EncodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+ const int16_t *speechIn,
+ int16_t *encoded)
{
ISACFIX_SubStruct *ISAC_inst;
- WebRtc_Word16 stream_len;
- WebRtc_Word16 speechInWB[FRAMESAMPLES_10ms];
- WebRtc_Word16 Vector_Word16_1[FRAMESAMPLES_10ms/2];
- WebRtc_Word16 Vector_Word16_2[FRAMESAMPLES_10ms/2];
+ int16_t stream_len;
+ int16_t speechInWB[FRAMESAMPLES_10ms];
+ int16_t Vector_Word16_1[FRAMESAMPLES_10ms/2];
+ int16_t Vector_Word16_2[FRAMESAMPLES_10ms/2];
int k;
@@ -431,20 +431,20 @@
/* Encode WB signal */
- stream_len = WebRtcIsacfix_EncodeImpl((WebRtc_Word16*)speechInWB,
- &ISAC_inst->ISACenc_obj,
- &ISAC_inst->bwestimator_obj,
- ISAC_inst->CodingMode);
+ stream_len = WebRtcIsacfix_EncodeImpl((int16_t*)speechInWB,
+ &ISAC_inst->ISACenc_obj,
+ &ISAC_inst->bwestimator_obj,
+ ISAC_inst->CodingMode);
if (stream_len<0) {
ISAC_inst->errorcode = - stream_len;
return -1;
}
- /* convert from bytes to WebRtc_Word16 */
+ /* convert from bytes to int16_t */
#ifndef WEBRTC_BIG_ENDIAN
for (k=0;k<(stream_len+1)>>1;k++) {
- encoded[k] = (WebRtc_Word16)(((WebRtc_UWord16)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8)
+ encoded[k] = (int16_t)(((uint16_t)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8)
| (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8));
}
@@ -478,13 +478,13 @@
* : -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 bweIndex,
- float scale,
- WebRtc_Word16 *encoded)
+int16_t WebRtcIsacfix_GetNewBitStream(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t bweIndex,
+ float scale,
+ int16_t *encoded)
{
ISACFIX_SubStruct *ISAC_inst;
- WebRtc_Word16 stream_len;
+ int16_t stream_len;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
@@ -509,7 +509,7 @@
#ifndef WEBRTC_BIG_ENDIAN
for (k=0;k<(stream_len+1)>>1;k++) {
- encoded[k] = (WebRtc_Word16)( ( (WebRtc_UWord16)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 )
+ encoded[k] = (int16_t)( ( (uint16_t)(ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] >> 8 )
| (((ISAC_inst->ISACenc_obj.bitstr_obj).stream[k] & 0x00FF) << 8));
}
@@ -536,7 +536,7 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst)
+int16_t WebRtcIsacfix_DecoderInit(ISACFIX_MainStruct *ISAC_main_inst)
{
ISACFIX_SubStruct *ISAC_inst;
@@ -579,22 +579,22 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 arr_ts)
+int16_t WebRtcIsacfix_UpdateBwEstimate1(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t arr_ts)
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
- WebRtc_UWord16 partOfStream[5];
+ uint16_t partOfStream[5];
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
/* Set stream pointer to point at partOfStream */
- streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+ streamdata.stream = (uint16_t *)partOfStream;
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -623,7 +623,7 @@
#ifndef WEBRTC_BIG_ENDIAN
for (k=0; k<5; k++) {
- streamdata.stream[k] = (WebRtc_UWord16) (((WebRtc_UWord16)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+ streamdata.stream[k] = (uint16_t) (((uint16_t)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
}
#else
memcpy(streamdata.stream, encoded, 5);
@@ -666,23 +666,23 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts)
+int16_t WebRtcIsacfix_UpdateBwEstimate(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts)
{
ISACFIX_SubStruct *ISAC_inst;
Bitstr_dec streamdata;
- WebRtc_UWord16 partOfStream[5];
+ uint16_t partOfStream[5];
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
/* Set stream pointer to point at partOfStream */
- streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+ streamdata.stream = (uint16_t *)partOfStream;
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -711,7 +711,7 @@
#ifndef WEBRTC_BIG_ENDIAN
for (k=0; k<5; k++) {
- streamdata.stream[k] = (WebRtc_UWord16) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+ streamdata.stream[k] = (uint16_t) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
}
#else
memcpy(streamdata.stream, encoded, 5);
@@ -755,20 +755,20 @@
*/
-WebRtc_Word16 WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType)
+int16_t WebRtcIsacfix_Decode(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType)
{
ISACFIX_SubStruct *ISAC_inst;
/* number of samples (480 or 960), output from decoder */
/* that were actually used in the encoder/decoder (determined on the fly) */
- WebRtc_Word16 number_of_samples;
+ int16_t number_of_samples;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 declen = 0;
+ int16_t declen = 0;
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -790,15 +790,15 @@
return -1;
}
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (WebRtc_UWord16 *)encoded;
+ (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded;
- /* convert bitstream from WebRtc_Word16 to bytes */
+ /* convert bitstream from int16_t to bytes */
#ifndef WEBRTC_BIG_ENDIAN
for (k=0; k<(len>>1); k++) {
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+ (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (uint16_t) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
}
if (len & 0x0001)
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] & 0xFF)<<8);
+ (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (uint16_t) ((encoded[k] & 0xFF)<<8);
#endif
/* added for NetEq purposes (VAD/DTX related) */
@@ -809,7 +809,7 @@
if (declen < 0) {
/* Some error inside the decoder */
ISAC_inst->errorcode = -declen;
- memset(decoded, 0, sizeof(WebRtc_Word16) * MAX_FRAMESAMPLES);
+ memset(decoded, 0, sizeof(int16_t) * MAX_FRAMESAMPLES);
return -1;
}
@@ -818,13 +818,13 @@
if (declen & 0x0001) {
if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) & 0x00FF) ) {
ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
- memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+ memset(decoded, 0, sizeof(int16_t) * number_of_samples);
return -1;
}
} else {
if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) >> 8) ) {
ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
- memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+ memset(decoded, 0, sizeof(int16_t) * number_of_samples);
return -1;
}
}
@@ -858,21 +858,21 @@
*/
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
-WebRtc_Word16 WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
- const WebRtc_UWord16 *encoded,
- WebRtc_Word16 len,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 *speechType)
+int16_t WebRtcIsacfix_DecodeNb(ISACFIX_MainStruct *ISAC_main_inst,
+ const uint16_t *encoded,
+ int16_t len,
+ int16_t *decoded,
+ int16_t *speechType)
{
ISACFIX_SubStruct *ISAC_inst;
/* twice the number of samples (480 or 960), output from decoder */
/* that were actually used in the encoder/decoder (determined on the fly) */
- WebRtc_Word16 number_of_samples;
+ int16_t number_of_samples;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 declen = 0;
- WebRtc_Word16 dummy[FRAMESAMPLES/2];
+ int16_t declen = 0;
+ int16_t dummy[FRAMESAMPLES/2];
/* typecast pointer to real structure */
@@ -891,15 +891,15 @@
return -1;
}
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (WebRtc_UWord16 *)encoded;
+ (ISAC_inst->ISACdec_obj.bitstr_obj).stream = (uint16_t *)encoded;
- /* convert bitstream from WebRtc_Word16 to bytes */
+ /* convert bitstream from int16_t to bytes */
#ifndef WEBRTC_BIG_ENDIAN
for (k=0; k<(len>>1); k++) {
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+ (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (uint16_t) ((encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
}
if (len & 0x0001)
- (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (WebRtc_UWord16) ((encoded[k] & 0xFF)<<8);
+ (ISAC_inst->ISACdec_obj.bitstr_obj).stream[k] = (uint16_t) ((encoded[k] & 0xFF)<<8);
#endif
/* added for NetEq purposes (VAD/DTX related) */
@@ -910,7 +910,7 @@
if (declen < 0) {
/* Some error inside the decoder */
ISAC_inst->errorcode = -declen;
- memset(decoded, 0, sizeof(WebRtc_Word16) * FRAMESAMPLES);
+ memset(decoded, 0, sizeof(int16_t) * FRAMESAMPLES);
return -1;
}
@@ -919,13 +919,13 @@
if (declen & 0x0001) {
if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) & 0x00FF) ) {
ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
- memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+ memset(decoded, 0, sizeof(int16_t) * number_of_samples);
return -1;
}
} else {
if (len != declen && len != declen + (((ISAC_inst->ISACdec_obj.bitstr_obj).stream[declen>>1]) >> 8) ) {
ISAC_inst->errorcode = ISAC_LENGTH_MISMATCH;
- memset(decoded, 0, sizeof(WebRtc_Word16) * number_of_samples);
+ memset(decoded, 0, sizeof(int16_t) * number_of_samples);
return -1;
}
}
@@ -963,14 +963,14 @@
*/
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
-WebRtc_Word16 WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 noOfLostFrames )
+int16_t WebRtcIsacfix_DecodePlcNb(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t *decoded,
+ int16_t noOfLostFrames )
{
- WebRtc_Word16 no_of_samples, declen, k, ok;
- WebRtc_Word16 outframeNB[FRAMESAMPLES];
- WebRtc_Word16 outframeWB[FRAMESAMPLES];
- WebRtc_Word16 dummy[FRAMESAMPLES/2];
+ int16_t no_of_samples, declen, k, ok;
+ int16_t outframeNB[FRAMESAMPLES];
+ int16_t outframeWB[FRAMESAMPLES];
+ int16_t dummy[FRAMESAMPLES/2];
ISACFIX_SubStruct *ISAC_inst;
@@ -1029,13 +1029,13 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 *decoded,
- WebRtc_Word16 noOfLostFrames)
+int16_t WebRtcIsacfix_DecodePlc(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t *decoded,
+ int16_t noOfLostFrames)
{
- WebRtc_Word16 no_of_samples, declen, k, ok;
- WebRtc_Word16 outframe16[MAX_FRAMESAMPLES];
+ int16_t no_of_samples, declen, k, ok;
+ int16_t outframe16[MAX_FRAMESAMPLES];
ISACFIX_SubStruct *ISAC_inst;
/* typecast pointer to real structure */
@@ -1081,9 +1081,9 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 rate,
- WebRtc_Word16 framesize)
+int16_t WebRtcIsacfix_Control(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t rate,
+ int16_t framesize)
{
ISACFIX_SubStruct *ISAC_inst;
/* typecast pointer to real structure */
@@ -1140,10 +1140,10 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 rateBPS,
- WebRtc_Word16 frameSizeMs,
- WebRtc_Word16 enforceFrameSize)
+int16_t WebRtcIsacfix_ControlBwe(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t rateBPS,
+ int16_t frameSizeMs,
+ int16_t enforceFrameSize)
{
ISACFIX_SubStruct *ISAC_inst;
/* Typecast pointer to real structure */
@@ -1168,7 +1168,7 @@
/* Set initial rate, if value between 10000 and 32000, */
/* if rateBPS is 0, keep the default initial bottleneck value (15000) */
if ((rateBPS >= 10000) && (rateBPS <= 32000)) {
- ISAC_inst->bwestimator_obj.sendBwAvg = (((WebRtc_UWord32)rateBPS) << 7);
+ ISAC_inst->bwestimator_obj.sendBwAvg = (((uint32_t)rateBPS) << 7);
} else if (rateBPS != 0) {
ISAC_inst->errorcode = ISAC_DISALLOWED_BOTTLENECK;
return -1;
@@ -1203,8 +1203,8 @@
*
*/
-WebRtc_Word16 WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst,
- WebRtc_Word16* rateIndex)
+int16_t WebRtcIsacfix_GetDownLinkBwIndex(ISACFIX_MainStruct* ISAC_main_inst,
+ int16_t* rateIndex)
{
ISACFIX_SubStruct *ISAC_inst;
@@ -1230,10 +1230,10 @@
*
*/
-WebRtc_Word16 WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst,
- WebRtc_Word16 rateIndex)
+int16_t WebRtcIsacfix_UpdateUplinkBw(ISACFIX_MainStruct* ISAC_main_inst,
+ int16_t rateIndex)
{
- WebRtc_Word16 err = 0;
+ int16_t err = 0;
ISACFIX_SubStruct *ISAC_inst;
/* typecast pointer to real structure */
@@ -1262,18 +1262,18 @@
*
*/
-WebRtc_Word16 WebRtcIsacfix_ReadFrameLen(const WebRtc_Word16* encoded,
- WebRtc_Word16* frameLength)
+int16_t WebRtcIsacfix_ReadFrameLen(const int16_t* encoded,
+ int16_t* frameLength)
{
Bitstr_dec streamdata;
- WebRtc_UWord16 partOfStream[5];
+ uint16_t partOfStream[5];
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
/* Set stream pointer to point at partOfStream */
- streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+ streamdata.stream = (uint16_t *)partOfStream;
streamdata.W_upper = 0xFFFFFFFF;
streamdata.streamval = 0;
@@ -1282,7 +1282,7 @@
#ifndef WEBRTC_BIG_ENDIAN
for (k=0; k<5; k++) {
- streamdata.stream[k] = (WebRtc_UWord16) (((WebRtc_UWord16)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+ streamdata.stream[k] = (uint16_t) (((uint16_t)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
}
#else
memcpy(streamdata.stream, encoded, 5);
@@ -1311,18 +1311,18 @@
*
*/
-WebRtc_Word16 WebRtcIsacfix_ReadBwIndex(const WebRtc_Word16* encoded,
- WebRtc_Word16* rateIndex)
+int16_t WebRtcIsacfix_ReadBwIndex(const int16_t* encoded,
+ int16_t* rateIndex)
{
Bitstr_dec streamdata;
- WebRtc_UWord16 partOfStream[5];
+ uint16_t partOfStream[5];
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
/* Set stream pointer to point at partOfStream */
- streamdata.stream = (WebRtc_UWord16 *)partOfStream;
+ streamdata.stream = (uint16_t *)partOfStream;
streamdata.W_upper = 0xFFFFFFFF;
streamdata.streamval = 0;
@@ -1331,7 +1331,7 @@
#ifndef WEBRTC_BIG_ENDIAN
for (k=0; k<5; k++) {
- streamdata.stream[k] = (WebRtc_UWord16) (((WebRtc_UWord16)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
+ streamdata.stream[k] = (uint16_t) (((uint16_t)encoded[k] >> 8)|((encoded[k] & 0xFF)<<8));
}
#else
memcpy(streamdata.stream, encoded, 5);
@@ -1367,7 +1367,7 @@
* Return value : Error code
*/
-WebRtc_Word16 WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst)
+int16_t WebRtcIsacfix_GetErrorCode(ISACFIX_MainStruct *ISAC_main_inst)
{
ISACFIX_SubStruct *ISAC_inst;
/* typecast pointer to real structure */
@@ -1389,12 +1389,12 @@
* Return value : bitrate
*/
-WebRtc_Word32 WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst)
+int32_t WebRtcIsacfix_GetUplinkBw(ISACFIX_MainStruct *ISAC_main_inst)
{
ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
BwEstimatorstr * bw = (BwEstimatorstr*)&(ISAC_inst->bwestimator_obj);
- return (WebRtc_Word32) WebRtcIsacfix_GetUplinkBandwidth(bw);
+ return (int32_t) WebRtcIsacfix_GetUplinkBandwidth(bw);
}
/****************************************************************************
@@ -1408,7 +1408,7 @@
* Return value : frame lenght in samples
*/
-WebRtc_Word16 WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst)
+int16_t WebRtcIsacfix_GetNewFrameLen(ISACFIX_MainStruct *ISAC_main_inst)
{
ISACFIX_SubStruct *ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
return ISAC_inst->ISACenc_obj.new_framelength;
@@ -1433,8 +1433,8 @@
* -1 if error happens
*/
-WebRtc_Word16 WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word16 maxPayloadBytes)
+int16_t WebRtcIsacfix_SetMaxPayloadSize(ISACFIX_MainStruct *ISAC_main_inst,
+ int16_t maxPayloadBytes)
{
ISACFIX_SubStruct *ISAC_inst;
@@ -1492,11 +1492,11 @@
* -1 if error happens
*/
-WebRtc_Word16 WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst,
- WebRtc_Word32 maxRate)
+int16_t WebRtcIsacfix_SetMaxRate(ISACFIX_MainStruct *ISAC_main_inst,
+ int32_t maxRate)
{
ISACFIX_SubStruct *ISAC_inst;
- WebRtc_Word16 maxRateInBytes;
+ int16_t maxRateInBytes;
/* typecast pointer to real structure */
ISAC_inst = (ISACFIX_SubStruct *)ISAC_main_inst;
@@ -1512,7 +1512,7 @@
maximum rate. Multiply with 30/1000 to get number of bits per 30 msec,
divide by 8 to get number of bytes per 30 msec:
maxRateInBytes = floor((maxRate * 30/1000) / 8); */
- maxRateInBytes = (WebRtc_Word16)( WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_MUL(maxRate, 3), 800) );
+ maxRateInBytes = (int16_t)( WebRtcSpl_DivW32W16ResW16(WEBRTC_SPL_MUL(maxRate, 3), 800) );
/* Store the value for usage in the WebRtcIsacfix_SetMaxPayloadSize-function */
ISAC_inst->ISACenc_obj.maxRateInBytes = maxRateInBytes;
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c
index bbc5993..6b3a6bf 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice.c
@@ -19,17 +19,17 @@
#include "settings.h"
#define LATTICE_MUL_32_32_RSFT16(a32a, a32b, b32) \
- ((WebRtc_Word32)(WEBRTC_SPL_MUL(a32a, b32) + (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32))))
+ ((int32_t)(WEBRTC_SPL_MUL(a32a, b32) + (WEBRTC_SPL_MUL_16_32_RSFT16(a32b, b32))))
/* This macro is FORBIDDEN to use elsewhere than in a function in this file and
its corresponding neon version. It might give unpredictable results, since a
- general WebRtc_Word32*WebRtc_Word32 multiplication results in a 64 bit value.
+ general int32_t*int32_t multiplication results in a 64 bit value.
The result is then shifted just 16 steps to the right, giving need for 48
- bits, i.e. in the generel case, it will NOT fit in a WebRtc_Word32. In the
- cases used in here, the WebRtc_Word32 will be enough, since (for a good
+ bits, i.e. in the generel case, it will NOT fit in a int32_t. In the
+ cases used in here, the int32_t will be enough, since (for a good
reason) the involved multiplicands aren't big enough to overflow a
- WebRtc_Word32 after shifting right 16 bits. I have compared the result of a
+ int32_t after shifting right 16 bits. I have compared the result of a
multiplication between t32 and tmp32, done in two ways:
- 1) Using (WebRtc_Word32) (((float)(tmp32))*((float)(tmp32b))/65536.0);
+ 1) Using (int32_t) (((float)(tmp32))*((float)(tmp32b))/65536.0);
2) Using LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b);
By running 25 files, I haven't found any bigger diff than 64 - this was in the
case when method 1) gave 650235648 and 2) gave 650235712.
@@ -86,43 +86,43 @@
/* filter the signal using normalized lattice filter */
/* MA filter */
-void WebRtcIsacfix_NormLatticeFilterMa(WebRtc_Word16 orderCoef,
- WebRtc_Word32 *stateGQ15,
- WebRtc_Word16 *lat_inQ0,
- WebRtc_Word16 *filt_coefQ15,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 lo_hi,
- WebRtc_Word16 *lat_outQ9)
+void WebRtcIsacfix_NormLatticeFilterMa(int16_t orderCoef,
+ int32_t *stateGQ15,
+ int16_t *lat_inQ0,
+ int16_t *filt_coefQ15,
+ int32_t *gain_lo_hiQ17,
+ int16_t lo_hi,
+ int16_t *lat_outQ9)
{
- WebRtc_Word16 sthQ15[MAX_AR_MODEL_ORDER];
- WebRtc_Word16 cthQ15[MAX_AR_MODEL_ORDER];
+ int16_t sthQ15[MAX_AR_MODEL_ORDER];
+ int16_t cthQ15[MAX_AR_MODEL_ORDER];
int u, i, k, n;
- WebRtc_Word16 temp2,temp3;
- WebRtc_Word16 ord_1 = orderCoef+1;
- WebRtc_Word32 inv_cthQ16[MAX_AR_MODEL_ORDER];
+ int16_t temp2,temp3;
+ int16_t ord_1 = orderCoef+1;
+ int32_t inv_cthQ16[MAX_AR_MODEL_ORDER];
- WebRtc_Word32 gain32, fQtmp;
- WebRtc_Word16 gain16;
- WebRtc_Word16 gain_sh;
+ int32_t gain32, fQtmp;
+ int16_t gain16;
+ int16_t gain_sh;
- WebRtc_Word32 tmp32, tmp32b;
- WebRtc_Word32 fQ15vec[HALF_SUBFRAMELEN];
- WebRtc_Word32 gQ15[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN];
- WebRtc_Word16 sh;
- WebRtc_Word16 t16a;
- WebRtc_Word16 t16b;
+ int32_t tmp32, tmp32b;
+ int32_t fQ15vec[HALF_SUBFRAMELEN];
+ int32_t gQ15[MAX_AR_MODEL_ORDER+1][HALF_SUBFRAMELEN];
+ int16_t sh;
+ int16_t t16a;
+ int16_t t16b;
for (u=0;u<SUBFRAMES;u++)
{
int32_t temp1 = WEBRTC_SPL_MUL_16_16(u, HALF_SUBFRAMELEN);
/* set the Direct Form coefficients */
- temp2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(u, orderCoef);
- temp3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, u)+lo_hi;
+ temp2 = (int16_t)WEBRTC_SPL_MUL_16_16(u, orderCoef);
+ temp3 = (int16_t)WEBRTC_SPL_MUL_16_16(2, u)+lo_hi;
/* compute lattice filter coefficients */
- memcpy(sthQ15, &filt_coefQ15[temp2], orderCoef * sizeof(WebRtc_Word16));
+ memcpy(sthQ15, &filt_coefQ15[temp2], orderCoef * sizeof(int16_t));
WebRtcSpl_SqrtOfOneMinusXSquared(sthQ15, orderCoef, cthQ15);
@@ -134,9 +134,9 @@
for (k=0;k<orderCoef;k++)
{
gain32 = WEBRTC_SPL_MUL_16_32_RSFT15(cthQ15[k], gain32); //Q15*Q(17+gain_sh)>>15 = Q(17+gain_sh)
- inv_cthQ16[k] = WebRtcSpl_DivW32W16((WebRtc_Word32)2147483647, cthQ15[k]); // 1/cth[k] in Q31/Q15 = Q16
+ inv_cthQ16[k] = WebRtcSpl_DivW32W16((int32_t)2147483647, cthQ15[k]); // 1/cth[k] in Q31/Q15 = Q16
}
- gain16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(gain32, 16); //Q(1+gain_sh)
+ gain16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(gain32, 16); //Q(1+gain_sh)
/* normalized lattice filter */
/*****************************/
@@ -144,8 +144,8 @@
/* initial conditions */
for (i=0;i<HALF_SUBFRAMELEN;i++)
{
- fQ15vec[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)lat_inQ0[i + temp1], 15); //Q15
- gQ15[0][i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)lat_inQ0[i + temp1], 15); //Q15
+ fQ15vec[i] = WEBRTC_SPL_LSHIFT_W32((int32_t)lat_inQ0[i + temp1], 15); //Q15
+ gQ15[0][i] = WEBRTC_SPL_LSHIFT_W32((int32_t)lat_inQ0[i + temp1], 15); //Q15
}
@@ -158,8 +158,8 @@
tmp32 = WEBRTC_SPL_MUL_16_32_RSFT15(sthQ15[i-1], stateGQ15[i-1]);//Q15*Q15>>15 = Q15
tmp32b= fQtmp + tmp32; //Q15+Q15=Q15
tmp32 = inv_cthQ16[i-1]; //Q16
- t16a = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
- t16b = (WebRtc_Word16) (tmp32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)t16a), 16));
+ t16a = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+ t16b = (int16_t) (tmp32-WEBRTC_SPL_LSHIFT_W32(((int32_t)t16a), 16));
if (t16b<0) t16a++;
tmp32 = LATTICE_MUL_32_32_RSFT16(t16a, t16b, tmp32b);
fQtmp = tmp32; // Q15
@@ -189,7 +189,7 @@
//gain32 = WEBRTC_SPL_RSHIFT_W32(gain32, gain_sh); // Q(17+gain_sh) -> Q17
tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(gain16, fQ15vec[n]); //Q(1+gain_sh)*Q15>>16 = Q(gain_sh)
sh = 9-gain_sh; //number of needed shifts to reach Q9
- t16a = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32, sh);
+ t16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32, sh);
lat_outQ9[n + temp1] = t16a;
}
@@ -210,39 +210,39 @@
/* ----------------AR filter-------------------------*/
/* filter the signal using normalized lattice filter */
-void WebRtcIsacfix_NormLatticeFilterAr(WebRtc_Word16 orderCoef,
- WebRtc_Word16 *stateGQ0,
- WebRtc_Word32 *lat_inQ25,
- WebRtc_Word16 *filt_coefQ15,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 lo_hi,
- WebRtc_Word16 *lat_outQ0)
+void WebRtcIsacfix_NormLatticeFilterAr(int16_t orderCoef,
+ int16_t *stateGQ0,
+ int32_t *lat_inQ25,
+ int16_t *filt_coefQ15,
+ int32_t *gain_lo_hiQ17,
+ int16_t lo_hi,
+ int16_t *lat_outQ0)
{
int ii,n,k,i,u;
- WebRtc_Word16 sthQ15[MAX_AR_MODEL_ORDER];
- WebRtc_Word16 cthQ15[MAX_AR_MODEL_ORDER];
- WebRtc_Word32 tmp32;
+ int16_t sthQ15[MAX_AR_MODEL_ORDER];
+ int16_t cthQ15[MAX_AR_MODEL_ORDER];
+ int32_t tmp32;
- WebRtc_Word16 tmpAR;
- WebRtc_Word16 ARfQ0vec[HALF_SUBFRAMELEN];
- WebRtc_Word16 ARgQ0vec[MAX_AR_MODEL_ORDER+1];
+ int16_t tmpAR;
+ int16_t ARfQ0vec[HALF_SUBFRAMELEN];
+ int16_t ARgQ0vec[MAX_AR_MODEL_ORDER+1];
- WebRtc_Word32 inv_gain32;
- WebRtc_Word16 inv_gain16;
- WebRtc_Word16 den16;
- WebRtc_Word16 sh;
+ int32_t inv_gain32;
+ int16_t inv_gain16;
+ int16_t den16;
+ int16_t sh;
- WebRtc_Word16 temp2,temp3;
- WebRtc_Word16 ord_1 = orderCoef+1;
+ int16_t temp2,temp3;
+ int16_t ord_1 = orderCoef+1;
for (u=0;u<SUBFRAMES;u++)
{
int32_t temp1 = WEBRTC_SPL_MUL_16_16(u, HALF_SUBFRAMELEN);
//set the denominator and numerator of the Direct Form
- temp2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(u, orderCoef);
- temp3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, u) + lo_hi;
+ temp2 = (int16_t)WEBRTC_SPL_MUL_16_16(u, orderCoef);
+ temp3 = (int16_t)WEBRTC_SPL_MUL_16_16(2, u) + lo_hi;
for (ii=0; ii<orderCoef; ii++) {
sthQ15[ii] = filt_coefQ15[temp2+ii];
@@ -263,11 +263,11 @@
}
sh = WebRtcSpl_NormW32(tmp32); // tmp32 is the gain
- den16 = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32, sh-16); //Q(27+sh-16) = Q(sh+11) (all 16 bits are value bits)
- inv_gain32 = WebRtcSpl_DivW32W16((WebRtc_Word32)2147483647, den16); // 1/gain in Q31/Q(sh+11) = Q(20-sh)
+ den16 = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32, sh-16); //Q(27+sh-16) = Q(sh+11) (all 16 bits are value bits)
+ inv_gain32 = WebRtcSpl_DivW32W16((int32_t)2147483647, den16); // 1/gain in Q31/Q(sh+11) = Q(20-sh)
//initial conditions
- inv_gain16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(inv_gain32, 2); // 1/gain in Q(20-sh-2) = Q(18-sh)
+ inv_gain16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(inv_gain32, 2); // 1/gain in Q(20-sh-2) = Q(18-sh)
for (i=0;i<HALF_SUBFRAMELEN;i++)
{
@@ -276,16 +276,16 @@
tmp32 = WEBRTC_SPL_MUL_16_32_RSFT16(inv_gain16, tmp32); //lat_in[]*inv_gain in (Q(18-sh)*Q26)>>16 = Q(28-sh)
tmp32 = WEBRTC_SPL_SHIFT_W32(tmp32, -(28-sh)); // lat_in[]*inv_gain in Q0
- ARfQ0vec[i] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32); // Q0
+ ARfQ0vec[i] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
}
for (i=orderCoef-1;i>=0;i--) //get the state of f&g for the first input, for all orders
{
tmp32 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(cthQ15[i],ARfQ0vec[0])) - (WEBRTC_SPL_MUL_16_16(sthQ15[i],stateGQ0[i])) + 16384), 15);
- tmpAR = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32); // Q0
+ tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
tmp32 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(sthQ15[i],ARfQ0vec[0])) + (WEBRTC_SPL_MUL_16_16(cthQ15[i], stateGQ0[i])) + 16384), 15);
- ARgQ0vec[i+1] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32); // Q0
+ ARgQ0vec[i+1] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32); // Q0
ARfQ0vec[0] = tmpAR;
}
ARgQ0vec[0] = ARfQ0vec[0];
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c
index 80ccf39..d5b2d0e 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/lattice_c.c
@@ -40,8 +40,8 @@
- (WEBRTC_SPL_MUL_16_16(sth_Q15[k], ar_g_Q0[k])) + 16384), 15);
tmp32_2 = WEBRTC_SPL_RSHIFT_W32(((WEBRTC_SPL_MUL_16_16(sth_Q15[k], tmpAR))
+ (WEBRTC_SPL_MUL_16_16(cth_Q15[k], ar_g_Q0[k])) + 16384), 15);
- tmpAR = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32);
- ar_g_Q0[k + 1] = (WebRtc_Word16)WebRtcSpl_SatW32ToW16(tmp32_2);
+ tmpAR = (int16_t)WebRtcSpl_SatW32ToW16(tmp32);
+ ar_g_Q0[k + 1] = (int16_t)WebRtcSpl_SatW32ToW16(tmp32_2);
}
ar_f_Q0[n + 1] = tmpAR;
ar_g_Q0[0] = tmpAR;
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
index 4be438e..0dc8174 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c
@@ -24,36 +24,36 @@
/* The conversion is implemented by the step-down algorithm */
void WebRtcSpl_AToK_JSK(
- WebRtc_Word16 *a16, /* Q11 */
- WebRtc_Word16 useOrder,
- WebRtc_Word16 *k16 /* Q15 */
+ int16_t *a16, /* Q11 */
+ int16_t useOrder,
+ int16_t *k16 /* Q15 */
)
{
int m, k;
- WebRtc_Word32 tmp32[MAX_AR_MODEL_ORDER];
- WebRtc_Word32 tmp32b;
- WebRtc_Word32 tmp_inv_denum32;
- WebRtc_Word16 tmp_inv_denum16;
+ int32_t tmp32[MAX_AR_MODEL_ORDER];
+ int32_t tmp32b;
+ int32_t tmp_inv_denum32;
+ int16_t tmp_inv_denum16;
k16[useOrder-1]= WEBRTC_SPL_LSHIFT_W16(a16[useOrder], 4); //Q11<<4 => Q15
for (m=useOrder-1; m>0; m--) {
- tmp_inv_denum32 = ((WebRtc_Word32) 1073741823) - WEBRTC_SPL_MUL_16_16(k16[m], k16[m]); // (1 - k^2) in Q30
- tmp_inv_denum16 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp_inv_denum32, 15); // (1 - k^2) in Q15
+ tmp_inv_denum32 = ((int32_t) 1073741823) - WEBRTC_SPL_MUL_16_16(k16[m], k16[m]); // (1 - k^2) in Q30
+ tmp_inv_denum16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp_inv_denum32, 15); // (1 - k^2) in Q15
for (k=1; k<=m; k++) {
- tmp32b = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)a16[k], 16) -
+ tmp32b = WEBRTC_SPL_LSHIFT_W32((int32_t)a16[k], 16) -
WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(k16[m], a16[m-k+1]), 1);
tmp32[k] = WebRtcSpl_DivW32W16(tmp32b, tmp_inv_denum16); //Q27/Q15 = Q12
}
for (k=1; k<m; k++) {
- a16[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmp32[k], 1); //Q12>>1 => Q11
+ a16[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32[k], 1); //Q12>>1 => Q11
}
tmp32[m] = WEBRTC_SPL_SAT(4092, tmp32[m], -4092);
- k16[m-1] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(tmp32[m], 3); //Q12<<3 => Q15
+ k16[m-1] = (int16_t) WEBRTC_SPL_LSHIFT_W32(tmp32[m], 3); //Q12<<3 => Q15
}
return;
@@ -63,25 +63,25 @@
-WebRtc_Word16 WebRtcSpl_LevinsonW32_JSK(
- WebRtc_Word32 *R, /* (i) Autocorrelation of length >= order+1 */
- WebRtc_Word16 *A, /* (o) A[0..order] LPC coefficients (Q11) */
- WebRtc_Word16 *K, /* (o) K[0...order-1] Reflection coefficients (Q15) */
- WebRtc_Word16 order /* (i) filter order */
+int16_t WebRtcSpl_LevinsonW32_JSK(
+ int32_t *R, /* (i) Autocorrelation of length >= order+1 */
+ int16_t *A, /* (o) A[0..order] LPC coefficients (Q11) */
+ int16_t *K, /* (o) K[0...order-1] Reflection coefficients (Q15) */
+ int16_t order /* (i) filter order */
) {
- WebRtc_Word16 i, j;
- WebRtc_Word16 R_hi[LEVINSON_MAX_ORDER+1], R_low[LEVINSON_MAX_ORDER+1];
+ int16_t i, j;
+ int16_t R_hi[LEVINSON_MAX_ORDER+1], R_low[LEVINSON_MAX_ORDER+1];
/* Aurocorr coefficients in high precision */
- WebRtc_Word16 A_hi[LEVINSON_MAX_ORDER+1], A_low[LEVINSON_MAX_ORDER+1];
+ int16_t A_hi[LEVINSON_MAX_ORDER+1], A_low[LEVINSON_MAX_ORDER+1];
/* LPC coefficients in high precicion */
- WebRtc_Word16 A_upd_hi[LEVINSON_MAX_ORDER+1], A_upd_low[LEVINSON_MAX_ORDER+1];
+ int16_t A_upd_hi[LEVINSON_MAX_ORDER+1], A_upd_low[LEVINSON_MAX_ORDER+1];
/* LPC coefficients for next iteration */
- WebRtc_Word16 K_hi, K_low; /* reflection coefficient in high precision */
- WebRtc_Word16 Alpha_hi, Alpha_low, Alpha_exp; /* Prediction gain Alpha in high precision
+ int16_t K_hi, K_low; /* reflection coefficient in high precision */
+ int16_t Alpha_hi, Alpha_low, Alpha_exp; /* Prediction gain Alpha in high precision
and with scale factor */
- WebRtc_Word16 tmp_hi, tmp_low;
- WebRtc_Word32 temp1W32, temp2W32, temp3W32;
- WebRtc_Word16 norm;
+ int16_t tmp_hi, tmp_low;
+ int32_t temp1W32, temp2W32, temp3W32;
+ int16_t norm;
/* Normalize the autocorrelation R[0]...R[order+1] */
@@ -90,14 +90,14 @@
for (i=order;i>=0;i--) {
temp1W32 = WEBRTC_SPL_LSHIFT_W32(R[i], norm);
/* Put R in hi and low format */
- R_hi[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- R_low[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[i], 16)), 1);
+ R_hi[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ R_low[i] = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)R_hi[i], 16)), 1);
}
/* K = A[1] = -R[1] / R[0] */
- temp2W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[1],16) +
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_low[1],1); /* R[1] in Q31 */
+ temp2W32 = WEBRTC_SPL_LSHIFT_W32((int32_t)R_hi[1],16) +
+ WEBRTC_SPL_LSHIFT_W32((int32_t)R_low[1],1); /* R[1] in Q31 */
temp3W32 = WEBRTC_SPL_ABS_W32(temp2W32); /* abs R[1] */
temp1W32 = WebRtcSpl_DivW32HiLow(temp3W32, R_hi[0], R_low[0]); /* abs(R[1])/R[0] in Q31 */
/* Put back the sign on R[1] */
@@ -106,8 +106,8 @@
}
/* Put K in hi and low format */
- K_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- K_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)K_hi, 16)), 1);
+ K_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ K_low = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)K_hi, 16)), 1);
/* Store first reflection coefficient */
K[0] = K_hi;
@@ -115,8 +115,8 @@
temp1W32 = WEBRTC_SPL_RSHIFT_W32(temp1W32, 4); /* A[1] in Q27 */
/* Put A[1] in hi and low format */
- A_hi[1] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- A_low[1] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[1], 16)), 1);
+ A_hi[1] = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ A_low[1] = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)A_hi[1], 16)), 1);
/* Alpha = R[0] * (1-K^2) */
@@ -124,11 +124,11 @@
WEBRTC_SPL_MUL_16_16(K_hi, K_hi)), 1); /* temp1W32 = k^2 in Q31 */
temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* Guard against <0 */
- temp1W32 = (WebRtc_Word32)0x7fffffffL - temp1W32; /* temp1W32 = (1 - K[0]*K[0]) in Q31 */
+ temp1W32 = (int32_t)0x7fffffffL - temp1W32; /* temp1W32 = (1 - K[0]*K[0]) in Q31 */
/* Store temp1W32 = 1 - K[0]*K[0] on hi and low format */
- tmp_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+ tmp_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ tmp_low = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)tmp_hi, 16)), 1);
/* Calculate Alpha in Q31 */
temp1W32 = WEBRTC_SPL_LSHIFT_W32((WEBRTC_SPL_MUL_16_16(R_hi[0], tmp_hi) +
@@ -139,8 +139,8 @@
Alpha_exp = WebRtcSpl_NormW32(temp1W32);
temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, Alpha_exp);
- Alpha_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- Alpha_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)Alpha_hi, 16)), 1);
+ Alpha_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ Alpha_low = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)Alpha_hi, 16)), 1);
/* Perform the iterative calculations in the
Levinson Durbin algorithm */
@@ -166,8 +166,8 @@
}
temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, 4);
- temp1W32 += (WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_hi[i], 16) +
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)R_low[i], 1));
+ temp1W32 += (WEBRTC_SPL_LSHIFT_W32((int32_t)R_hi[i], 16) +
+ WEBRTC_SPL_LSHIFT_W32((int32_t)R_low[i], 1));
/* K = -temp1W32 / Alpha */
temp2W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* abs(temp1W32) */
@@ -185,16 +185,16 @@
} else {
if (temp3W32 > 0)
{
- temp3W32 = (WebRtc_Word32)0x7fffffffL;
+ temp3W32 = (int32_t)0x7fffffffL;
} else
{
- temp3W32 = (WebRtc_Word32)0x80000000L;
+ temp3W32 = (int32_t)0x80000000L;
}
}
/* Put K on hi and low format */
- K_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
- K_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp3W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)K_hi, 16)), 1);
+ K_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
+ K_low = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp3W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)K_hi, 16)), 1);
/* Store Reflection coefficient in Q15 */
K[i-1] = K_hi;
@@ -203,7 +203,7 @@
user decide what to do in that case
*/
- if ((WebRtc_Word32)WEBRTC_SPL_ABS_W16(K_hi) > (WebRtc_Word32)32740) {
+ if ((int32_t)WEBRTC_SPL_ABS_W16(K_hi) > (int32_t)32740) {
return(-i); /* Unstable filter */
}
@@ -215,23 +215,23 @@
for(j=1; j<i; j++)
{
- temp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[j],16) +
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_low[j],1); /* temp1W32 = A[j] in Q27 */
+ temp1W32 = WEBRTC_SPL_LSHIFT_W32((int32_t)A_hi[j],16) +
+ WEBRTC_SPL_LSHIFT_W32((int32_t)A_low[j],1); /* temp1W32 = A[j] in Q27 */
temp1W32 += WEBRTC_SPL_LSHIFT_W32(( WEBRTC_SPL_MUL_16_16(K_hi, A_hi[i-j]) +
WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(K_hi, A_low[i-j]), 15) +
WEBRTC_SPL_RSHIFT_W32(WEBRTC_SPL_MUL_16_16(K_low, A_hi[i-j]), 15) ), 1); /* temp1W32 += K*A[i-j] in Q27 */
/* Put Anew in hi and low format */
- A_upd_hi[j] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- A_upd_low[j] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_upd_hi[j], 16)), 1);
+ A_upd_hi[j] = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ A_upd_low[j] = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)A_upd_hi[j], 16)), 1);
}
temp3W32 = WEBRTC_SPL_RSHIFT_W32(temp3W32, 4); /* temp3W32 = K in Q27 (Convert from Q31 to Q27) */
/* Store Anew in hi and low format */
- A_upd_hi[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
- A_upd_low[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp3W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_upd_hi[i], 16)), 1);
+ A_upd_hi[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp3W32, 16);
+ A_upd_low[i] = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp3W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)A_upd_hi[i], 16)), 1);
/* Alpha = Alpha * (1-K^2) */
@@ -239,11 +239,11 @@
WEBRTC_SPL_MUL_16_16(K_hi, K_hi)), 1); /* K*K in Q31 */
temp1W32 = WEBRTC_SPL_ABS_W32(temp1W32); /* Guard against <0 */
- temp1W32 = (WebRtc_Word32)0x7fffffffL - temp1W32; /* 1 - K*K in Q31 */
+ temp1W32 = (int32_t)0x7fffffffL - temp1W32; /* 1 - K*K in Q31 */
/* Convert 1- K^2 in hi and low format */
- tmp_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- tmp_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tmp_hi, 16)), 1);
+ tmp_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ tmp_low = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)tmp_hi, 16)), 1);
/* Calculate Alpha = Alpha * (1-K^2) in Q31 */
temp1W32 = WEBRTC_SPL_LSHIFT_W32(( WEBRTC_SPL_MUL_16_16(Alpha_hi, tmp_hi) +
@@ -255,8 +255,8 @@
norm = WebRtcSpl_NormW32(temp1W32);
temp1W32 = WEBRTC_SPL_LSHIFT_W32(temp1W32, norm);
- Alpha_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
- Alpha_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)Alpha_hi, 16)), 1);
+ Alpha_hi = (int16_t) WEBRTC_SPL_RSHIFT_W32(temp1W32, 16);
+ Alpha_low = (int16_t)WEBRTC_SPL_RSHIFT_W32((temp1W32 - WEBRTC_SPL_LSHIFT_W32((int32_t)Alpha_hi, 16)), 1);
/* Update the total nomalization of Alpha */
Alpha_exp = Alpha_exp + norm;
@@ -279,10 +279,10 @@
for(i=1; i<=order; i++) {
/* temp1W32 in Q27 */
- temp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_hi[i], 16) +
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)A_low[i], 1);
+ temp1W32 = WEBRTC_SPL_LSHIFT_W32((int32_t)A_hi[i], 16) +
+ WEBRTC_SPL_LSHIFT_W32((int32_t)A_low[i], 1);
/* Round and store upper word */
- A[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(temp1W32+(WebRtc_Word32)32768, 16);
+ A[i] = (int16_t)WEBRTC_SPL_RSHIFT_W32(temp1W32+(int32_t)32768, 16);
}
return(1); /* Stable filters */
}
@@ -297,7 +297,7 @@
* for k=1:16, fprintf(1, '%.8f, ', w(k*16 + (-15:0))); fprintf(1, '\n'); end
* All values are multiplyed with 2^21 in fixed point code.
*/
-static const WebRtc_Word16 kWindowAutocorr[WINLEN] = {
+static const int16_t kWindowAutocorr[WINLEN] = {
0, 0, 0, 0, 0, 1, 1, 2, 2, 3, 5, 6,
8, 10, 12, 14, 17, 20, 24, 28, 33, 38, 43, 49,
56, 63, 71, 79, 88, 98, 108, 119, 131, 143, 157, 171,
@@ -337,20 +337,20 @@
round(kPolyVecLo*32768)
round(kPolyVecHi*32768)
*/
-static const WebRtc_Word16 kPolyVecLo[12] = {
+static const int16_t kPolyVecLo[12] = {
29491, 26542, 23888, 21499, 19349, 17414, 15673, 14106, 12695, 11425, 10283, 9255
};
-static const WebRtc_Word16 kPolyVecHi[6] = {
+static const int16_t kPolyVecHi[6] = {
26214, 20972, 16777, 13422, 10737, 8590
};
-static __inline WebRtc_Word32 log2_Q8_LPC( WebRtc_UWord32 x ) {
+static __inline int32_t log2_Q8_LPC( uint32_t x ) {
- WebRtc_Word32 zeros, lg2;
- WebRtc_Word16 frac;
+ int32_t zeros, lg2;
+ int16_t frac;
zeros=WebRtcSpl_NormU32(x);
- frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(((WebRtc_UWord32)WEBRTC_SPL_LSHIFT_W32(x, zeros)&0x7FFFFFFF), 23);
+ frac=(int16_t)WEBRTC_SPL_RSHIFT_W32(((uint32_t)WEBRTC_SPL_LSHIFT_W32(x, zeros)&0x7FFFFFFF), 23);
/* log2(x) */
@@ -359,22 +359,22 @@
}
-static const WebRtc_Word16 kMulPitchGain = -25; /* 200/256 in Q5 */
-static const WebRtc_Word16 kChngFactor = 3523; /* log10(2)*10/4*0.4/1.4=log10(2)/1.4= 0.2150 in Q14 */
-static const WebRtc_Word16 kExp2 = 11819; /* 1/log(2) */
+static const int16_t kMulPitchGain = -25; /* 200/256 in Q5 */
+static const int16_t kChngFactor = 3523; /* log10(2)*10/4*0.4/1.4=log10(2)/1.4= 0.2150 in Q14 */
+static const int16_t kExp2 = 11819; /* 1/log(2) */
const int kShiftLowerBand = 11; /* Shift value for lower band in Q domain. */
const int kShiftHigherBand = 12; /* Shift value for higher band in Q domain. */
-void WebRtcIsacfix_GetVars(const WebRtc_Word16 *input, const WebRtc_Word16 *pitchGains_Q12,
- WebRtc_UWord32 *oldEnergy, WebRtc_Word16 *varscale)
+void WebRtcIsacfix_GetVars(const int16_t *input, const int16_t *pitchGains_Q12,
+ uint32_t *oldEnergy, int16_t *varscale)
{
int k;
- WebRtc_UWord32 nrgQ[4];
- WebRtc_Word16 nrgQlog[4];
- WebRtc_Word16 tmp16, chng1, chng2, chng3, chng4, tmp, chngQ, oldNrgQlog, pgQ, pg3;
- WebRtc_Word32 expPg32;
- WebRtc_Word16 expPg, divVal;
- WebRtc_Word16 tmp16_1, tmp16_2;
+ uint32_t nrgQ[4];
+ int16_t nrgQlog[4];
+ int16_t tmp16, chng1, chng2, chng3, chng4, tmp, chngQ, oldNrgQlog, pgQ, pg3;
+ int32_t expPg32;
+ int16_t expPg, divVal;
+ int16_t tmp16_1, tmp16_2;
/* Calculate energies of first and second frame halfs */
nrgQ[0]=0;
@@ -395,9 +395,9 @@
}
for ( k=0; k<4; k++) {
- nrgQlog[k] = (WebRtc_Word16)log2_Q8_LPC(nrgQ[k]); /* log2(nrgQ) */
+ nrgQlog[k] = (int16_t)log2_Q8_LPC(nrgQ[k]); /* log2(nrgQ) */
}
- oldNrgQlog = (WebRtc_Word16)log2_Q8_LPC(*oldEnergy);
+ oldNrgQlog = (int16_t)log2_Q8_LPC(*oldEnergy);
/* Calculate average level change */
chng1 = WEBRTC_SPL_ABS_W16(nrgQlog[3]-nrgQlog[2]);
@@ -405,7 +405,7 @@
chng3 = WEBRTC_SPL_ABS_W16(nrgQlog[1]-nrgQlog[0]);
chng4 = WEBRTC_SPL_ABS_W16(nrgQlog[0]-oldNrgQlog);
tmp = chng1+chng2+chng3+chng4;
- chngQ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp, kChngFactor, 10); /* Q12 */
+ chngQ = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(tmp, kChngFactor, 10); /* Q12 */
chngQ += 2926; /* + 1.0/1.4 in Q12 */
/* Find average pitch gain */
@@ -415,34 +415,34 @@
pgQ += pitchGains_Q12[k];
}
- pg3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(pgQ, pgQ,11); /* pgQ in Q(12+2)=Q14. Q14*Q14>>11 => Q17 */
- pg3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(pgQ, pg3,13); /* Q17*Q14>>13 =>Q18 */
- pg3 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(pg3, kMulPitchGain ,5); /* Q10 kMulPitchGain = -25 = -200 in Q-3. */
+ pg3 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(pgQ, pgQ,11); /* pgQ in Q(12+2)=Q14. Q14*Q14>>11 => Q17 */
+ pg3 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(pgQ, pg3,13); /* Q17*Q14>>13 =>Q18 */
+ pg3 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(pg3, kMulPitchGain ,5); /* Q10 kMulPitchGain = -25 = -200 in Q-3. */
- tmp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,pg3,13);/* Q13*Q10>>13 => Q10*/
+ tmp16=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,pg3,13);/* Q13*Q10>>13 => Q10*/
if (tmp16<0) {
tmp16_2 = (0x0400 | (tmp16 & 0x03FF));
- tmp16_1 = (WEBRTC_SPL_RSHIFT_W16((WebRtc_UWord16)(tmp16 ^ 0xFFFF), 10)-3); /* Gives result in Q14 */
+ tmp16_1 = (WEBRTC_SPL_RSHIFT_W16((uint16_t)(tmp16 ^ 0xFFFF), 10)-3); /* Gives result in Q14 */
if (tmp16_1<0)
- expPg=(WebRtc_Word16) -WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+ expPg=(int16_t) -WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
else
- expPg=(WebRtc_Word16) -WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+ expPg=(int16_t) -WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
} else
- expPg = (WebRtc_Word16) -16384; /* 1 in Q14, since 2^0=1 */
+ expPg = (int16_t) -16384; /* 1 in Q14, since 2^0=1 */
- expPg32 = (WebRtc_Word32)WEBRTC_SPL_LSHIFT_W16((WebRtc_Word32)expPg, 8); /* Q22 */
+ expPg32 = (int32_t)WEBRTC_SPL_LSHIFT_W16((int32_t)expPg, 8); /* Q22 */
divVal = WebRtcSpl_DivW32W16ResW16(expPg32, chngQ); /* Q22/Q12=Q10 */
- tmp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,divVal,13);/* Q13*Q10>>13 => Q10*/
+ tmp16=(int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(kExp2,divVal,13);/* Q13*Q10>>13 => Q10*/
if (tmp16<0) {
tmp16_2 = (0x0400 | (tmp16 & 0x03FF));
- tmp16_1 = (WEBRTC_SPL_RSHIFT_W16((WebRtc_UWord16)(tmp16 ^ 0xFFFF), 10)-3); /* Gives result in Q14 */
+ tmp16_1 = (WEBRTC_SPL_RSHIFT_W16((uint16_t)(tmp16 ^ 0xFFFF), 10)-3); /* Gives result in Q14 */
if (tmp16_1<0)
- expPg=(WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+ expPg=(int16_t) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
else
- expPg=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+ expPg=(int16_t) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
} else
- expPg = (WebRtc_Word16) 16384; /* 1 in Q14, since 2^0=1 */
+ expPg = (int16_t) 16384; /* 1 in Q14, since 2^0=1 */
*varscale = expPg-1;
*oldEnergy = nrgQ[3];
@@ -450,16 +450,16 @@
-static __inline WebRtc_Word16 exp2_Q10_T(WebRtc_Word16 x) { // Both in and out in Q10
+static __inline int16_t exp2_Q10_T(int16_t x) { // Both in and out in Q10
- WebRtc_Word16 tmp16_1, tmp16_2;
+ int16_t tmp16_1, tmp16_2;
- tmp16_2=(WebRtc_Word16)(0x0400|(x&0x03FF));
- tmp16_1=-(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(x,10);
+ tmp16_2=(int16_t)(0x0400|(x&0x03FF));
+ tmp16_1=-(int16_t)WEBRTC_SPL_RSHIFT_W16(x,10);
if(tmp16_1>0)
- return (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+ return (int16_t) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
else
- return (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+ return (int16_t) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
}
@@ -538,63 +538,63 @@
return residual_energy;
}
-void WebRtcIsacfix_GetLpcCoef(WebRtc_Word16 *inLoQ0,
- WebRtc_Word16 *inHiQ0,
+void WebRtcIsacfix_GetLpcCoef(int16_t *inLoQ0,
+ int16_t *inHiQ0,
MaskFiltstr_enc *maskdata,
- WebRtc_Word16 snrQ10,
- const WebRtc_Word16 *pitchGains_Q12,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *lo_coeffQ15,
- WebRtc_Word16 *hi_coeffQ15)
+ int16_t snrQ10,
+ const int16_t *pitchGains_Q12,
+ int32_t *gain_lo_hiQ17,
+ int16_t *lo_coeffQ15,
+ int16_t *hi_coeffQ15)
{
int k, n, ii;
int pos1, pos2;
int sh_lo, sh_hi, sh, ssh, shMem;
- WebRtc_Word16 varscaleQ14;
+ int16_t varscaleQ14;
- WebRtc_Word16 tmpQQlo, tmpQQhi;
- WebRtc_Word32 tmp32;
- WebRtc_Word16 tmp16,tmp16b;
+ int16_t tmpQQlo, tmpQQhi;
+ int32_t tmp32;
+ int16_t tmp16,tmp16b;
- WebRtc_Word16 polyHI[ORDERHI+1];
- WebRtc_Word16 rcQ15_lo[ORDERLO], rcQ15_hi[ORDERHI];
+ int16_t polyHI[ORDERHI+1];
+ int16_t rcQ15_lo[ORDERLO], rcQ15_hi[ORDERHI];
- WebRtc_Word16 DataLoQ6[WINLEN], DataHiQ6[WINLEN];
- WebRtc_Word32 corrloQQ[ORDERLO+2];
- WebRtc_Word32 corrhiQQ[ORDERHI+1];
- WebRtc_Word32 corrlo2QQ[ORDERLO+1];
- WebRtc_Word16 scale;
- WebRtc_Word16 QdomLO, QdomHI, newQdomHI, newQdomLO;
+ int16_t DataLoQ6[WINLEN], DataHiQ6[WINLEN];
+ int32_t corrloQQ[ORDERLO+2];
+ int32_t corrhiQQ[ORDERHI+1];
+ int32_t corrlo2QQ[ORDERLO+1];
+ int16_t scale;
+ int16_t QdomLO, QdomHI, newQdomHI, newQdomLO;
- WebRtc_Word32 res_nrgQQ;
- WebRtc_Word32 sqrt_nrg;
+ int32_t res_nrgQQ;
+ int32_t sqrt_nrg;
/* less-noise-at-low-frequencies factor */
- WebRtc_Word16 aaQ14;
+ int16_t aaQ14;
/* Multiplication with 1/sqrt(12) ~= 0.28901734104046 can be done by convertion to
Q15, i.e. round(0.28901734104046*32768) = 9471, and use 9471/32768.0 ~= 0.289032
*/
- WebRtc_Word16 snrq;
+ int16_t snrq;
int shft;
- WebRtc_Word16 tmp16a;
- WebRtc_Word32 tmp32a, tmp32b, tmp32c;
+ int16_t tmp16a;
+ int32_t tmp32a, tmp32b, tmp32c;
- WebRtc_Word16 a_LOQ11[ORDERLO+1];
- WebRtc_Word16 k_vecloQ15[ORDERLO];
- WebRtc_Word16 a_HIQ12[ORDERHI+1];
- WebRtc_Word16 k_vechiQ15[ORDERHI];
+ int16_t a_LOQ11[ORDERLO+1];
+ int16_t k_vecloQ15[ORDERLO];
+ int16_t a_HIQ12[ORDERHI+1];
+ int16_t k_vechiQ15[ORDERHI];
- WebRtc_Word16 stab;
+ int16_t stab;
snrq=snrQ10;
/* SNR= C * 2 ^ (D * snrq) ; C=0.289, D=0.05*log2(10)=0.166 (~=172 in Q10)*/
- tmp16 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(snrq, 172, 10); // Q10
+ tmp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(snrq, 172, 10); // Q10
tmp16b = exp2_Q10_T(tmp16); // Q10
- snrq = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(tmp16b, 285, 10); // Q10
+ snrq = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp16b, 285, 10); // Q10
/* change quallevel depending on pitch gains and level fluctuations */
WebRtcIsacfix_GetVars(inLoQ0, pitchGains_Q12, &(maskdata->OldEnergy), &varscaleQ14);
@@ -604,17 +604,17 @@
With 0.35 in Q16 (0.35 ~= 22938/65536.0 = 0.3500061) and varscaleQ14 in Q14,
we get Q16*Q14>>16 = Q14
*/
- aaQ14 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ aaQ14 = (int16_t) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(22938, (8192 + WEBRTC_SPL_RSHIFT_W32(varscaleQ14, 1)))
- + ((WebRtc_Word32)32768)), 16);
+ + ((int32_t)32768)), 16);
/* Calculate tmp = (1.0 + aa*aa); in Q12 */
- tmp16 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(aaQ14, aaQ14, 15); //Q14*Q14>>15 = Q13
+ tmp16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(aaQ14, aaQ14, 15); //Q14*Q14>>15 = Q13
tmpQQlo = 4096 + WEBRTC_SPL_RSHIFT_W16(tmp16, 1); // Q12 + Q13>>1 = Q12
/* Calculate tmp = (1.0+aa) * (1.0+aa); */
tmp16 = 8192 + WEBRTC_SPL_RSHIFT_W16(aaQ14, 1); // 1+a in Q13
- tmpQQhi = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(tmp16, tmp16, 14); //Q13*Q13>>14 = Q12
+ tmpQQhi = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(tmp16, tmp16, 14); //Q13*Q13>>14 = Q12
/* replace data in buffer by new look-ahead data */
for (pos1 = 0; pos1 < QLOOKAHEAD; pos1++) {
@@ -627,18 +627,18 @@
for (pos1 = 0; pos1 < WINLEN - UPDATE/2; pos1++) {
maskdata->DataBufferLoQ0[pos1] = maskdata->DataBufferLoQ0[pos1 + UPDATE/2];
maskdata->DataBufferHiQ0[pos1] = maskdata->DataBufferHiQ0[pos1 + UPDATE/2];
- DataLoQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+ DataLoQ6[pos1] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
maskdata->DataBufferLoQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
- DataHiQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+ DataHiQ6[pos1] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
maskdata->DataBufferHiQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
}
- pos2 = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(k, UPDATE)/2);
+ pos2 = (int16_t)(WEBRTC_SPL_MUL_16_16(k, UPDATE)/2);
for (n = 0; n < UPDATE/2; n++, pos1++) {
maskdata->DataBufferLoQ0[pos1] = inLoQ0[QLOOKAHEAD + pos2];
maskdata->DataBufferHiQ0[pos1] = inHiQ0[pos2++];
- DataLoQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+ DataLoQ6[pos1] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
maskdata->DataBufferLoQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
- DataHiQ6[pos1] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(
+ DataHiQ6[pos1] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(
maskdata->DataBufferHiQ0[pos1], kWindowAutocorr[pos1], 15); // Q0*Q21>>15 = Q6
}
@@ -700,9 +700,9 @@
/* Calculate corrlo2[0] += 9.5367431640625e-7; and
corrhi[0] += 9.5367431640625e-7, where the constant is 1/2^20 */
- tmp32 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32) 1, QdomLO-20);
+ tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t) 1, QdomLO-20);
corrlo2QQ[0] += tmp32;
- tmp32 = WEBRTC_SPL_SHIFT_W32((WebRtc_Word32) 1, QdomHI-20);
+ tmp32 = WEBRTC_SPL_SHIFT_W32((int32_t) 1, QdomHI-20);
corrhiQQ[0] += tmp32;
/* corrlo2QQ is in Q(QdomLO) and corrhiQQ is in Q(QdomHI) before the following
@@ -721,10 +721,10 @@
newQdomLO = QdomLO;
for (n = 0; n <= ORDERLO; n++) {
- WebRtc_Word32 tmp, tmpB, tmpCorr;
- WebRtc_Word16 alpha=328; //0.01 in Q15
- WebRtc_Word16 beta=324; //(1-0.01)*0.01=0.0099 in Q15
- WebRtc_Word16 gamma=32440; //(1-0.01)=0.99 in Q15
+ int32_t tmp, tmpB, tmpCorr;
+ int16_t alpha=328; //0.01 in Q15
+ int16_t beta=324; //(1-0.01)*0.01=0.0099 in Q15
+ int16_t gamma=32440; //(1-0.01)=0.99 in Q15
if (maskdata->CorrBufLoQQ[n] != 0) {
shMem=WebRtcSpl_NormW32(maskdata->CorrBufLoQQ[n]);
@@ -768,10 +768,10 @@
newQdomHI = QdomHI;
for (n = 0; n <= ORDERHI; n++) {
- WebRtc_Word32 tmp, tmpB, tmpCorr;
- WebRtc_Word16 alpha=328; //0.01 in Q15
- WebRtc_Word16 beta=324; //(1-0.01)*0.01=0.0099 in Q15
- WebRtc_Word16 gamma=32440; //(1-0.01)=0.99 in Q1
+ int32_t tmp, tmpB, tmpCorr;
+ int16_t alpha=328; //0.01 in Q15
+ int16_t beta=324; //(1-0.01)*0.01=0.0099 in Q15
+ int16_t gamma=32440; //(1-0.01)=0.99 in Q1
if (maskdata->CorrBufHiQQ[n] != 0) {
shMem=WebRtcSpl_NormW32(maskdata->CorrBufHiQQ[n]);
sh = QdomHI - maskdata->CorrBufHiQdom[n];
@@ -834,13 +834,13 @@
/* bandwidth expansion */
for (n = 1; n <= ORDERLO; n++) {
- a_LOQ11[n] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecLo[n-1], a_LOQ11[n]);
+ a_LOQ11[n] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecLo[n-1], a_LOQ11[n]);
}
polyHI[0] = a_HIQ12[0];
for (n = 1; n <= ORDERHI; n++) {
- a_HIQ12[n] = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecHi[n-1], a_HIQ12[n]);
+ a_HIQ12[n] = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(kPolyVecHi[n-1], a_HIQ12[n]);
polyHI[n] = a_HIQ12[n];
}
@@ -876,7 +876,7 @@
//tmp32a=WEBRTC_SPL_MUL_16_16_RSFT(varscaleQ14, H_T_HQ19, 17); // Q14
- tmp32a=WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) varscaleQ14,1); // H_T_HQ19=65536 (16-17=-1) ssh= WEBRTC_SPL_RSHIFT_W16(sh_lo, 1); // sqrt_nrg is in Qssh
+ tmp32a=WEBRTC_SPL_RSHIFT_W32((int32_t) varscaleQ14,1); // H_T_HQ19=65536 (16-17=-1) ssh= WEBRTC_SPL_RSHIFT_W16(sh_lo, 1); // sqrt_nrg is in Qssh
ssh= WEBRTC_SPL_RSHIFT_W16(sh_lo, 1); // sqrt_nrg is in Qssh
sh = ssh - 14;
tmp32b = WEBRTC_SPL_SHIFT_W32(tmp32a, sh); // Q14->Qssh
@@ -885,7 +885,7 @@
sh = WebRtcSpl_NormW32(tmp32c);
shft = 16 - sh;
- tmp16a = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator)
+ tmp16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator)
tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft)
sh = ssh-shft-7;
@@ -893,13 +893,13 @@
}
else
{
- *gain_lo_hiQ17 = 100; //(WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32( (WebRtc_Word32)1, 17); // Gains in Q17
+ *gain_lo_hiQ17 = 100; //(int32_t)WEBRTC_SPL_LSHIFT_W32( (int32_t)1, 17); // Gains in Q17
}
gain_lo_hiQ17++;
/* copy coefficients to output array */
for (n = 0; n < ORDERLO; n++) {
- *lo_coeffQ15 = (WebRtc_Word16) (rcQ15_lo[n]);
+ *lo_coeffQ15 = (int16_t) (rcQ15_lo[n]);
lo_coeffQ15++;
}
/* residual energy */
@@ -925,7 +925,7 @@
/* hi_coeff = varscale * S_N_R / (sqrt_nrg + varscale * H_T_H); */
//tmp32a=WEBRTC_SPL_MUL_16_16_RSFT(varscaleQ14, H_T_HQ19, 17); // Q14
- tmp32a=WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32) varscaleQ14,1); // H_T_HQ19=65536 (16-17=-1)
+ tmp32a=WEBRTC_SPL_RSHIFT_W32((int32_t) varscaleQ14,1); // H_T_HQ19=65536 (16-17=-1)
ssh= WEBRTC_SPL_RSHIFT_W32(sh_hi, 1); // sqrt_nrg is in Qssh
sh = ssh - 14;
@@ -935,7 +935,7 @@
sh = WebRtcSpl_NormW32(tmp32c);
shft = 16 - sh;
- tmp16a = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator)
+ tmp16a = (int16_t) WEBRTC_SPL_SHIFT_W32(tmp32c, -shft); // Q(ssh-shft) (denominator)
tmp32b = WebRtcSpl_DivW32W16(tmp32a, tmp16a); // Q(24-ssh+shft)
sh = ssh-shft-7;
@@ -943,7 +943,7 @@
}
else
{
- *gain_lo_hiQ17 = 100; //(WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32( (WebRtc_Word32)1, 17); // Gains in Q17
+ *gain_lo_hiQ17 = 100; //(int32_t)WEBRTC_SPL_LSHIFT_W32( (int32_t)1, 17); // Gains in Q17
}
gain_lo_hiQ17++;
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h
index e06a207..72e0cfc 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.h
@@ -24,19 +24,19 @@
#include "structs.h"
-void WebRtcIsacfix_GetVars(const WebRtc_Word16 *input,
- const WebRtc_Word16 *pitchGains_Q12,
- WebRtc_UWord32 *oldEnergy,
- WebRtc_Word16 *varscale);
+void WebRtcIsacfix_GetVars(const int16_t *input,
+ const int16_t *pitchGains_Q12,
+ uint32_t *oldEnergy,
+ int16_t *varscale);
-void WebRtcIsacfix_GetLpcCoef(WebRtc_Word16 *inLoQ0,
- WebRtc_Word16 *inHiQ0,
+void WebRtcIsacfix_GetLpcCoef(int16_t *inLoQ0,
+ int16_t *inHiQ0,
MaskFiltstr_enc *maskdata,
- WebRtc_Word16 snrQ10,
- const WebRtc_Word16 *pitchGains_Q12,
- WebRtc_Word32 *gain_lo_hiQ17,
- WebRtc_Word16 *lo_coeffQ15,
- WebRtc_Word16 *hi_coeffQ15);
+ int16_t snrQ10,
+ const int16_t *pitchGains_Q12,
+ int32_t *gain_lo_hiQ17,
+ int16_t *lo_coeffQ15,
+ int16_t *hi_coeffQ15);
typedef int32_t (*CalculateResidualEnergy)(int lpc_order,
int32_t q_val_corr,
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c
index 90cc9af..bc0f129 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.c
@@ -20,11 +20,11 @@
#include "lpc_tables.h"
/* indices of KLT coefficients used */
-const WebRtc_UWord16 WebRtcIsacfix_kSelIndGain[12] = {
+const uint16_t WebRtcIsacfix_kSelIndGain[12] = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11};
-const WebRtc_UWord16 WebRtcIsacfix_kSelIndShape[108] = {
+const uint16_t WebRtcIsacfix_kSelIndShape[108] = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29,
@@ -39,26 +39,26 @@
};
/* cdf array for model indicator */
-const WebRtc_UWord16 WebRtcIsacfix_kModelCdf[4] = {
+const uint16_t WebRtcIsacfix_kModelCdf[4] = {
0, 15434, 37548, 65535
};
/* pointer to cdf array for model indicator */
-const WebRtc_UWord16 *WebRtcIsacfix_kModelCdfPtr[1] = {
+const uint16_t *WebRtcIsacfix_kModelCdfPtr[1] = {
WebRtcIsacfix_kModelCdf
};
/* initial cdf index for decoder of model indicator */
-const WebRtc_UWord16 WebRtcIsacfix_kModelInitIndex[1] = {
+const uint16_t WebRtcIsacfix_kModelInitIndex[1] = {
1
};
/* offset to go from rounded value to quantization index */
-const WebRtc_Word16 WebRtcIsacfix_kQuantMinGain[12] ={
+const int16_t WebRtcIsacfix_kQuantMinGain[12] ={
3, 6, 4, 6, 6, 9, 5, 16, 11, 34, 32, 47
};
-const WebRtc_Word16 WebRtcIsacfix_kQuantMinShape[108] = {
+const int16_t WebRtcIsacfix_kQuantMinShape[108] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1, 2, 2, 2, 3, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 1, 1,
@@ -73,11 +73,11 @@
};
/* maximum quantization index */
-const WebRtc_UWord16 WebRtcIsacfix_kMaxIndGain[12] = {
+const uint16_t WebRtcIsacfix_kMaxIndGain[12] = {
6, 12, 8, 14, 10, 19, 12, 31, 22, 56, 52, 138
};
-const WebRtc_UWord16 WebRtcIsacfix_kMaxIndShape[108] = {
+const uint16_t WebRtcIsacfix_kMaxIndShape[108] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
2, 2, 2, 2, 4, 4, 5, 6, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 2, 2,
@@ -92,13 +92,13 @@
};
/* index offset */
-const WebRtc_UWord16 WebRtcIsacfix_kOffsetGain[3][12] = {
+const uint16_t WebRtcIsacfix_kOffsetGain[3][12] = {
{ 0, 7, 20, 29, 44, 55, 75, 88, 120, 143, 200, 253},
{ 0, 7, 19, 27, 42, 53, 73, 86, 117, 140, 197, 249},
{ 0, 7, 20, 28, 44, 55, 75, 89, 121, 145, 202, 257}
};
-const WebRtc_UWord16 WebRtcIsacfix_kOffsetShape[3][108] = {
+const uint16_t WebRtcIsacfix_kOffsetShape[3][108] = {
{
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
11, 14, 17, 20, 23, 28, 33, 39, 46, 47,
@@ -141,13 +141,13 @@
};
/* initial cdf index for KLT coefficients */
-const WebRtc_UWord16 WebRtcIsacfix_kInitIndexGain[3][12] = {
+const uint16_t WebRtcIsacfix_kInitIndexGain[3][12] = {
{ 3, 6, 4, 7, 5, 10, 6, 16, 11, 28, 26, 69},
{ 3, 6, 4, 7, 5, 10, 6, 15, 11, 28, 26, 69},
{ 3, 6, 4, 8, 5, 10, 7, 16, 12, 28, 27, 70}
};
-const WebRtc_UWord16 WebRtcIsacfix_kInitIndexShape[3][108] = {
+const uint16_t WebRtcIsacfix_kInitIndexShape[3][108] = {
{
0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1, 2, 2, 3, 3, 0, 0,
@@ -190,11 +190,11 @@
};
/* offsets for quantizer representation levels*/
-const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsGain[3] = {
+const uint16_t WebRtcIsacfix_kOfLevelsGain[3] = {
0, 392, 779
};
-const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsShape[3] = {
+const uint16_t WebRtcIsacfix_kOfLevelsShape[3] = {
0, 578, 1152
};
@@ -202,7 +202,7 @@
-const WebRtc_Word32 WebRtcIsacfix_kLevelsGainQ17[1176] = {
+const int32_t WebRtcIsacfix_kLevelsGainQ17[1176] = {
-364547,-231664,-102123,-573,104807,238257,368823,-758583,-640135,-510291
,-377662,-252785,-113177,2627,112906,248601,389461,522691,644517,763974
,-538963,-368179,-245823,-106095,-890,104299,241111,350730,493190,-800763
@@ -325,7 +325,7 @@
-const WebRtc_Word16 WebRtcIsacfix_kLevelsShapeQ10[1735] = {
+const int16_t WebRtcIsacfix_kLevelsShapeQ10[1735] = {
0, 0, -1, 0, 0, 1, 0, 1, 0, -821
, 1, -763, -1, 656, -620, 0, 633, -636, 4, 615
, -630, 1, 649, -1773, -670, 5, 678, 1810, -1876, -676
@@ -503,7 +503,7 @@
};
/* cdf tables for quantizer indices */
-const WebRtc_UWord16 WebRtcIsacfix_kCdfGain[1212] = {
+const uint16_t WebRtcIsacfix_kCdfGain[1212] = {
0, 13, 301, 3730, 61784, 65167, 65489, 65535, 0, 17,
142, 314, 929, 2466, 7678, 56450, 63463, 64740, 65204, 65426,
65527, 65535, 0, 8, 100, 724, 6301, 60105, 65125, 65510,
@@ -628,7 +628,7 @@
65533, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kCdfShape[2059] = {
+const uint16_t WebRtcIsacfix_kCdfShape[2059] = {
0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535,
0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4,
65535, 0, 8, 65514, 65535, 0, 29, 65481, 65535, 0,
@@ -838,7 +838,7 @@
};
/* pointers to cdf tables for quantizer indices */
-const WebRtc_UWord16 *WebRtcIsacfix_kCdfGainPtr[3][12] = {
+const uint16_t *WebRtcIsacfix_kCdfGainPtr[3][12] = {
{ WebRtcIsacfix_kCdfGain +0 +0, WebRtcIsacfix_kCdfGain +0 +8, WebRtcIsacfix_kCdfGain +0 +22,
WebRtcIsacfix_kCdfGain +0 +32, WebRtcIsacfix_kCdfGain +0 +48, WebRtcIsacfix_kCdfGain +0 +60,
WebRtcIsacfix_kCdfGain +0 +81, WebRtcIsacfix_kCdfGain +0 +95, WebRtcIsacfix_kCdfGain +0 +128,
@@ -856,7 +856,7 @@
}
};
-const WebRtc_UWord16 *WebRtcIsacfix_kCdfShapePtr[3][108] = {
+const uint16_t *WebRtcIsacfix_kCdfShapePtr[3][108] = {
{ WebRtcIsacfix_kCdfShape +0 +0, WebRtcIsacfix_kCdfShape +0 +2, WebRtcIsacfix_kCdfShape +0 +4,
WebRtcIsacfix_kCdfShape +0 +6, WebRtcIsacfix_kCdfShape +0 +8, WebRtcIsacfix_kCdfShape +0 +10,
WebRtcIsacfix_kCdfShape +0 +12, WebRtcIsacfix_kCdfShape +0 +14, WebRtcIsacfix_kCdfShape +0 +16,
@@ -972,7 +972,7 @@
/* code length for all coefficients using different models */
-const WebRtc_Word16 WebRtcIsacfix_kCodeLenGainQ11[392] = {
+const int16_t WebRtcIsacfix_kCodeLenGainQ11[392] = {
25189, 16036, 8717, 358, 8757, 15706, 21456, 24397, 18502, 17559
, 13794, 11088, 7480, 873, 6603, 11636, 14627, 16805, 19132, 26624
, 26624, 19408, 13751, 7280, 583, 7591, 15178, 23773, 28672, 25189
@@ -1015,7 +1015,7 @@
, 30720, 30720
};
-const WebRtc_Word16 WebRtcIsacfix_kCodeLenShapeQ11[577] = {
+const int16_t WebRtcIsacfix_kCodeLenShapeQ11[577] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 28672
, 0, 26624, 1, 23773, 22819, 4, 20982, 18598, 10, 19282
, 16587, 22, 16442, 26624, 13126, 60, 14245, 26624, 26624, 12736
@@ -1077,7 +1077,7 @@
};
/* left KLT transforms */
-const WebRtc_Word16 WebRtcIsacfix_kT1GainQ15[3][4] = {
+const int16_t WebRtcIsacfix_kT1GainQ15[3][4] = {
{ -26130, 19773, 19773, 26130 },
{ -26664, 19046, 19046, 26664 },
{ -23538, 22797, 22797, 23538 }
@@ -1085,7 +1085,7 @@
-const WebRtc_Word16 WebRtcIsacfix_kT1ShapeQ15[3][324] = {
+const int16_t WebRtcIsacfix_kT1ShapeQ15[3][324] = {
{ 52,16,168,7,439,-138,-89,306,671,882,
157,1301,291,1598,-3571,-1943,-1119,32404,96,-12,
379,-64,-307,345,-836,539,1045,2541,-2865,-992,
@@ -1191,7 +1191,7 @@
};
/* right KLT transforms */
-const WebRtc_Word16 WebRtcIsacfix_kT2GainQ15[3][36] = {
+const int16_t WebRtcIsacfix_kT2GainQ15[3][36] = {
{ 4775, -14892, 20313, -17104, 10533, -3613, -6782, 16044, -8889,
-11019, 21330, -10720, 13193, -15678, -11101, 14461, 12250, -13096,
-16951, 2167, 16066, 15569, -702, -16754, -19195, -12823, -4321,
@@ -1209,7 +1209,7 @@
}
};
-const WebRtc_Word16 WebRtcIsacfix_kT2ShapeQ15[3][36] = {
+const int16_t WebRtcIsacfix_kT2ShapeQ15[3][36] = {
{ 4400, -11512, 17205, -19470, 14770, -5345, 9784, -19222, 11228,
6842, -18371, 9909, 14191, -13496, -11563, 14015, 11827, -14839,
-15439, 948, 17802, 14827, -2053, -17132, 18723, 14516, 4135,
@@ -1228,7 +1228,7 @@
};
/* means of log gains and LAR coefficients*/
-const WebRtc_Word16 WebRtcIsacfix_kMeansGainQ8[3][12] = {
+const int16_t WebRtcIsacfix_kMeansGainQ8[3][12] = {
{ -1758, -1370, -1758, -1373, -1757, -1375,
-1758, -1374, -1758, -1373, -1755, -1370
},
@@ -1241,7 +1241,7 @@
};
-const WebRtc_Word32 WebRtcIsacfix_kMeansShapeQ17[3][108] = {
+const int32_t WebRtcIsacfix_kMeansShapeQ17[3][108] = {
{ -119581, 34418, -44193, 11112, -4428, 18906, 9222, 8068, 1953, 5425,
1871, 1689, 109933, 33751, 10471, -2566, 1090, 2320, -119219, 33728,
-43759, 11450, -4870, 19117, 9174, 8037, 1972, 5331, 1872, 1843,
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h
index 4f2e0e7..587bcd4 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_tables.h
@@ -22,77 +22,77 @@
/* indices of KLT coefficients used */
-extern const WebRtc_UWord16 WebRtcIsacfix_kSelIndGain[12];
+extern const uint16_t WebRtcIsacfix_kSelIndGain[12];
-extern const WebRtc_UWord16 WebRtcIsacfix_kSelIndShape[108];
+extern const uint16_t WebRtcIsacfix_kSelIndShape[108];
/* cdf array for model indicator */
-extern const WebRtc_UWord16 WebRtcIsacfix_kModelCdf[KLT_NUM_MODELS+1];
+extern const uint16_t WebRtcIsacfix_kModelCdf[KLT_NUM_MODELS+1];
/* pointer to cdf array for model indicator */
-extern const WebRtc_UWord16 *WebRtcIsacfix_kModelCdfPtr[1];
+extern const uint16_t *WebRtcIsacfix_kModelCdfPtr[1];
/* initial cdf index for decoder of model indicator */
-extern const WebRtc_UWord16 WebRtcIsacfix_kModelInitIndex[1];
+extern const uint16_t WebRtcIsacfix_kModelInitIndex[1];
/* offset to go from rounded value to quantization index */
-extern const WebRtc_Word16 WebRtcIsacfix_kQuantMinGain[12];
+extern const int16_t WebRtcIsacfix_kQuantMinGain[12];
-extern const WebRtc_Word16 WebRtcIsacfix_kQuantMinShape[108];
+extern const int16_t WebRtcIsacfix_kQuantMinShape[108];
/* maximum quantization index */
-extern const WebRtc_UWord16 WebRtcIsacfix_kMaxIndGain[12];
+extern const uint16_t WebRtcIsacfix_kMaxIndGain[12];
-extern const WebRtc_UWord16 WebRtcIsacfix_kMaxIndShape[108];
+extern const uint16_t WebRtcIsacfix_kMaxIndShape[108];
/* index offset */
-extern const WebRtc_UWord16 WebRtcIsacfix_kOffsetGain[KLT_NUM_MODELS][12];
+extern const uint16_t WebRtcIsacfix_kOffsetGain[KLT_NUM_MODELS][12];
-extern const WebRtc_UWord16 WebRtcIsacfix_kOffsetShape[KLT_NUM_MODELS][108];
+extern const uint16_t WebRtcIsacfix_kOffsetShape[KLT_NUM_MODELS][108];
/* initial cdf index for KLT coefficients */
-extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndexGain[KLT_NUM_MODELS][12];
+extern const uint16_t WebRtcIsacfix_kInitIndexGain[KLT_NUM_MODELS][12];
-extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndexShape[KLT_NUM_MODELS][108];
+extern const uint16_t WebRtcIsacfix_kInitIndexShape[KLT_NUM_MODELS][108];
/* offsets for quantizer representation levels */
-extern const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsGain[3];
+extern const uint16_t WebRtcIsacfix_kOfLevelsGain[3];
-extern const WebRtc_UWord16 WebRtcIsacfix_kOfLevelsShape[3];
+extern const uint16_t WebRtcIsacfix_kOfLevelsShape[3];
/* quantizer representation levels */
-extern const WebRtc_Word32 WebRtcIsacfix_kLevelsGainQ17[1176];
+extern const int32_t WebRtcIsacfix_kLevelsGainQ17[1176];
-extern const WebRtc_Word16 WebRtcIsacfix_kLevelsShapeQ10[1735];
+extern const int16_t WebRtcIsacfix_kLevelsShapeQ10[1735];
/* cdf tables for quantizer indices */
-extern const WebRtc_UWord16 WebRtcIsacfix_kCdfGain[1212];
+extern const uint16_t WebRtcIsacfix_kCdfGain[1212];
-extern const WebRtc_UWord16 WebRtcIsacfix_kCdfShape[2059];
+extern const uint16_t WebRtcIsacfix_kCdfShape[2059];
/* pointers to cdf tables for quantizer indices */
-extern const WebRtc_UWord16 *WebRtcIsacfix_kCdfGainPtr[KLT_NUM_MODELS][12];
+extern const uint16_t *WebRtcIsacfix_kCdfGainPtr[KLT_NUM_MODELS][12];
-extern const WebRtc_UWord16 *WebRtcIsacfix_kCdfShapePtr[KLT_NUM_MODELS][108];
+extern const uint16_t *WebRtcIsacfix_kCdfShapePtr[KLT_NUM_MODELS][108];
/* code length for all coefficients using different models */
-extern const WebRtc_Word16 WebRtcIsacfix_kCodeLenGainQ11[392];
+extern const int16_t WebRtcIsacfix_kCodeLenGainQ11[392];
-extern const WebRtc_Word16 WebRtcIsacfix_kCodeLenShapeQ11[577];
+extern const int16_t WebRtcIsacfix_kCodeLenShapeQ11[577];
/* left KLT transforms */
-extern const WebRtc_Word16 WebRtcIsacfix_kT1GainQ15[KLT_NUM_MODELS][4];
+extern const int16_t WebRtcIsacfix_kT1GainQ15[KLT_NUM_MODELS][4];
-extern const WebRtc_Word16 WebRtcIsacfix_kT1ShapeQ15[KLT_NUM_MODELS][324];
+extern const int16_t WebRtcIsacfix_kT1ShapeQ15[KLT_NUM_MODELS][324];
/* right KLT transforms */
-extern const WebRtc_Word16 WebRtcIsacfix_kT2GainQ15[KLT_NUM_MODELS][36];
+extern const int16_t WebRtcIsacfix_kT2GainQ15[KLT_NUM_MODELS][36];
-extern const WebRtc_Word16 WebRtcIsacfix_kT2ShapeQ15[KLT_NUM_MODELS][36];
+extern const int16_t WebRtcIsacfix_kT2ShapeQ15[KLT_NUM_MODELS][36];
/* means of log gains and LAR coefficients */
-extern const WebRtc_Word16 WebRtcIsacfix_kMeansGainQ8[KLT_NUM_MODELS][12];
+extern const int16_t WebRtcIsacfix_kMeansGainQ8[KLT_NUM_MODELS][12];
-extern const WebRtc_Word32 WebRtcIsacfix_kMeansShapeQ17[3][108];
+extern const int32_t WebRtcIsacfix_kMeansShapeQ17[3][108];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_LPC_TABLES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c
index 6af02d8..a1dced9 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.c
@@ -24,24 +24,24 @@
#include "system_wrappers/interface/compile_assert.h"
/* log2[0.2, 0.5, 0.98] in Q8 */
-static const WebRtc_Word16 kLogLagWinQ8[3] = {
+static const int16_t kLogLagWinQ8[3] = {
-594, -256, -7
};
/* [1 -0.75 0.25] in Q12 */
-static const WebRtc_Word16 kACoefQ12[3] = {
+static const int16_t kACoefQ12[3] = {
4096, -3072, 1024
};
-static __inline WebRtc_Word32 Log2Q8( WebRtc_UWord32 x ) {
+static __inline int32_t Log2Q8( uint32_t x ) {
- WebRtc_Word32 zeros, lg2;
- WebRtc_Word16 frac;
+ int32_t zeros, lg2;
+ int16_t frac;
zeros=WebRtcSpl_NormU32(x);
- frac=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(((WebRtc_UWord32)(WEBRTC_SPL_LSHIFT_W32(x, zeros))&0x7FFFFFFF), 23);
+ frac=(int16_t)WEBRTC_SPL_RSHIFT_W32(((uint32_t)(WEBRTC_SPL_LSHIFT_W32(x, zeros))&0x7FFFFFFF), 23);
/* log2(magn(i)) */
lg2= (WEBRTC_SPL_LSHIFT_W32((31-zeros), 8)+frac);
@@ -49,27 +49,27 @@
}
-static __inline WebRtc_Word16 Exp2Q10(WebRtc_Word16 x) { // Both in and out in Q10
+static __inline int16_t Exp2Q10(int16_t x) { // Both in and out in Q10
- WebRtc_Word16 tmp16_1, tmp16_2;
+ int16_t tmp16_1, tmp16_2;
- tmp16_2=(WebRtc_Word16)(0x0400|(x&0x03FF));
- tmp16_1=-(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(x,10);
+ tmp16_2=(int16_t)(0x0400|(x&0x03FF));
+ tmp16_1=-(int16_t)WEBRTC_SPL_RSHIFT_W16(x,10);
if(tmp16_1>0)
- return (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
+ return (int16_t) WEBRTC_SPL_RSHIFT_W16(tmp16_2, tmp16_1);
else
- return (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
+ return (int16_t) WEBRTC_SPL_LSHIFT_W16(tmp16_2, -tmp16_1);
}
/* 1D parabolic interpolation . All input and output values are in Q8 */
-static __inline void Intrp1DQ8(WebRtc_Word32 *x, WebRtc_Word32 *fx, WebRtc_Word32 *y, WebRtc_Word32 *fy) {
+static __inline void Intrp1DQ8(int32_t *x, int32_t *fx, int32_t *y, int32_t *fy) {
- WebRtc_Word16 sign1=1, sign2=1;
- WebRtc_Word32 r32, q32, t32, nom32, den32;
- WebRtc_Word16 t16, tmp16, tmp16_1;
+ int16_t sign1=1, sign2=1;
+ int32_t r32, q32, t32, nom32, den32;
+ int16_t t16, tmp16, tmp16_1;
if ((fx[0]>0) && (fx[2]>0)) {
r32=fx[1]-fx[2];
@@ -85,7 +85,7 @@
/* (Signs are removed because WebRtcSpl_DivResultInQ31 can't handle negative numbers) */
t32=WebRtcSpl_DivResultInQ31(WEBRTC_SPL_MUL_32_16(nom32, sign1),WEBRTC_SPL_MUL_32_16(den32, sign2)); /* t in Q31, without signs */
- t16=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(t32, 23); /* Q8 */
+ t16=(int16_t)WEBRTC_SPL_RSHIFT_W32(t32, 23); /* Q8 */
t16=t16*sign1*sign2; /* t in Q8 with signs */
*y = x[0]+t16; /* Q8 */
@@ -95,9 +95,9 @@
/* fy = 0.5 * t * (t-1) * fx[0] + (1-t*t) * fx[1] + 0.5 * t * (t+1) * fx[2]; */
/* Part I: 0.5 * t * (t-1) * fx[0] */
- tmp16_1=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(t16,t16); /* Q8*Q8=Q16 */
+ tmp16_1=(int16_t)WEBRTC_SPL_MUL_16_16(t16,t16); /* Q8*Q8=Q16 */
tmp16_1 = WEBRTC_SPL_RSHIFT_W16(tmp16_1,2); /* Q16>>2 = Q14 */
- t16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(t16, 64); /* Q8<<6 = Q14 */
+ t16 = (int16_t)WEBRTC_SPL_MUL_16_16(t16, 64); /* Q8<<6 = Q14 */
tmp16 = tmp16_1-t16;
*fy = WEBRTC_SPL_MUL_16_32_RSFT15(tmp16, fx[0]); /* (Q14 * Q8 >>15)/2 = Q8 */
@@ -115,10 +115,10 @@
}
-static void FindFour32(WebRtc_Word32 *in, WebRtc_Word16 length, WebRtc_Word16 *bestind)
+static void FindFour32(int32_t *in, int16_t length, int16_t *bestind)
{
- WebRtc_Word32 best[4]= {-100, -100, -100, -100};
- WebRtc_Word16 k;
+ int32_t best[4]= {-100, -100, -100, -100};
+ int16_t k;
for (k=0; k<length; k++) {
if (in[k] > best[3]) {
@@ -159,35 +159,35 @@
-static void PCorr2Q32(const WebRtc_Word16 *in, WebRtc_Word32 *logcorQ8)
+static void PCorr2Q32(const int16_t *in, int32_t *logcorQ8)
{
- WebRtc_Word16 scaling,n,k;
- WebRtc_Word32 ysum32,csum32, lys, lcs;
- WebRtc_Word32 oneQ8;
+ int16_t scaling,n,k;
+ int32_t ysum32,csum32, lys, lcs;
+ int32_t oneQ8;
- const WebRtc_Word16 *x, *inptr;
+ const int16_t *x, *inptr;
- oneQ8 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, 8); // 1.00 in Q8
+ oneQ8 = WEBRTC_SPL_LSHIFT_W32((int32_t)1, 8); // 1.00 in Q8
x = in + PITCH_MAX_LAG/2 + 2;
- scaling = WebRtcSpl_GetScalingSquare ((WebRtc_Word16 *) in, PITCH_CORR_LEN2, PITCH_CORR_LEN2);
+ scaling = WebRtcSpl_GetScalingSquare ((int16_t *) in, PITCH_CORR_LEN2, PITCH_CORR_LEN2);
ysum32 = 1;
csum32 = 0;
x = in + PITCH_MAX_LAG/2 + 2;
for (n = 0; n < PITCH_CORR_LEN2; n++) {
- ysum32 += WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) in[n],(WebRtc_Word16) in[n], scaling); // Q0
- csum32 += WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) x[n],(WebRtc_Word16) in[n], scaling); // Q0
+ ysum32 += WEBRTC_SPL_MUL_16_16_RSFT( (int16_t) in[n],(int16_t) in[n], scaling); // Q0
+ csum32 += WEBRTC_SPL_MUL_16_16_RSFT((int16_t) x[n],(int16_t) in[n], scaling); // Q0
}
logcorQ8 += PITCH_LAG_SPAN2 - 1;
- lys=Log2Q8((WebRtc_UWord32) ysum32); // Q8
+ lys=Log2Q8((uint32_t) ysum32); // Q8
lys=WEBRTC_SPL_RSHIFT_W32(lys, 1); //sqrt(ysum);
if (csum32>0) {
- lcs=Log2Q8((WebRtc_UWord32) csum32); // 2log(csum) in Q8
+ lcs=Log2Q8((uint32_t) csum32); // 2log(csum) in Q8
if (lcs>(lys + oneQ8) ){ // csum/sqrt(ysum) > 2 in Q8
*logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum))
@@ -202,8 +202,8 @@
for (k = 1; k < PITCH_LAG_SPAN2; k++) {
inptr = &in[k];
- ysum32 -= WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) in[k-1],(WebRtc_Word16) in[k-1], scaling);
- ysum32 += WEBRTC_SPL_MUL_16_16_RSFT( (WebRtc_Word16) in[PITCH_CORR_LEN2 + k - 1],(WebRtc_Word16) in[PITCH_CORR_LEN2 + k - 1], scaling);
+ ysum32 -= WEBRTC_SPL_MUL_16_16_RSFT( (int16_t) in[k-1],(int16_t) in[k-1], scaling);
+ ysum32 += WEBRTC_SPL_MUL_16_16_RSFT( (int16_t) in[PITCH_CORR_LEN2 + k - 1],(int16_t) in[PITCH_CORR_LEN2 + k - 1], scaling);
#ifdef WEBRTC_ARCH_ARM_NEON
{
@@ -244,12 +244,12 @@
logcorQ8--;
- lys=Log2Q8((WebRtc_UWord32)ysum32); // Q8
+ lys=Log2Q8((uint32_t)ysum32); // Q8
lys=WEBRTC_SPL_RSHIFT_W32(lys, 1); //sqrt(ysum);
if (csum32>0) {
- lcs=Log2Q8((WebRtc_UWord32) csum32); // 2log(csum) in Q8
+ lcs=Log2Q8((uint32_t) csum32); // 2log(csum) in Q8
if (lcs>(lys + oneQ8) ){ // csum/sqrt(ysum) > 2
*logcorQ8 = lcs - lys; // log2(csum/sqrt(ysum))
@@ -265,34 +265,34 @@
-void WebRtcIsacfix_InitialPitch(const WebRtc_Word16 *in, /* Q0 */
+void WebRtcIsacfix_InitialPitch(const int16_t *in, /* Q0 */
PitchAnalysisStruct *State,
- WebRtc_Word16 *lagsQ7 /* Q7 */
+ int16_t *lagsQ7 /* Q7 */
)
{
- WebRtc_Word16 buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2];
- WebRtc_Word32 *crrvecQ8_1,*crrvecQ8_2;
- WebRtc_Word32 cv1q[PITCH_LAG_SPAN2+2],cv2q[PITCH_LAG_SPAN2+2], peakvq[PITCH_LAG_SPAN2+2];
+ int16_t buf_dec16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2+2];
+ int32_t *crrvecQ8_1,*crrvecQ8_2;
+ int32_t cv1q[PITCH_LAG_SPAN2+2],cv2q[PITCH_LAG_SPAN2+2], peakvq[PITCH_LAG_SPAN2+2];
int k;
- WebRtc_Word16 peaks_indq;
- WebRtc_Word16 peakiq[PITCH_LAG_SPAN2];
- WebRtc_Word32 corr;
- WebRtc_Word32 corr32, corr_max32, corr_max_o32;
- WebRtc_Word16 npkq;
- WebRtc_Word16 best4q[4]={0,0,0,0};
- WebRtc_Word32 xq[3],yq[1],fyq[1];
- WebRtc_Word32 *fxq;
- WebRtc_Word32 best_lag1q, best_lag2q;
- WebRtc_Word32 tmp32a,tmp32b,lag32,ratq;
- WebRtc_Word16 start;
- WebRtc_Word16 oldgQ12, tmp16a, tmp16b, gain_bias16,tmp16c, tmp16d, bias16;
- WebRtc_Word32 tmp32c,tmp32d, tmp32e;
- WebRtc_Word16 old_lagQ;
- WebRtc_Word32 old_lagQ8;
- WebRtc_Word32 lagsQ8[4];
+ int16_t peaks_indq;
+ int16_t peakiq[PITCH_LAG_SPAN2];
+ int32_t corr;
+ int32_t corr32, corr_max32, corr_max_o32;
+ int16_t npkq;
+ int16_t best4q[4]={0,0,0,0};
+ int32_t xq[3],yq[1],fyq[1];
+ int32_t *fxq;
+ int32_t best_lag1q, best_lag2q;
+ int32_t tmp32a,tmp32b,lag32,ratq;
+ int16_t start;
+ int16_t oldgQ12, tmp16a, tmp16b, gain_bias16,tmp16c, tmp16d, bias16;
+ int32_t tmp32c,tmp32d, tmp32e;
+ int16_t old_lagQ;
+ int32_t old_lagQ8;
+ int32_t lagsQ8[4];
old_lagQ = State->PFstr_wght.oldlagQ7; // Q7
- old_lagQ8= WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)old_lagQ,1); //Q8
+ old_lagQ8= WEBRTC_SPL_LSHIFT_W32((int32_t)old_lagQ,1); //Q8
oldgQ12= State->PFstr_wght.oldgainQ12;
@@ -301,7 +301,7 @@
/* copy old values from state buffer */
- memcpy(buf_dec16, State->dec_buffer16, WEBRTC_SPL_MUL_16_16(sizeof(WebRtc_Word16), (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2)));
+ memcpy(buf_dec16, State->dec_buffer16, WEBRTC_SPL_MUL_16_16(sizeof(int16_t), (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2)));
/* decimation; put result after the old values */
WebRtcIsacfix_DecimateAllpass32(in, State->decimator_state32, PITCH_FRAME_LEN,
@@ -309,7 +309,7 @@
/* low-pass filtering */
start= PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2;
- WebRtcSpl_FilterARFastQ12(&buf_dec16[start],&buf_dec16[start],(WebRtc_Word16*)kACoefQ12,3, PITCH_FRAME_LEN/2);
+ WebRtcSpl_FilterARFastQ12(&buf_dec16[start],&buf_dec16[start],(int16_t*)kACoefQ12,3, PITCH_FRAME_LEN/2);
/* copy end part back into state buffer */
for (k = 0; k < (PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2); k++)
@@ -322,25 +322,25 @@
/* bias towards pitch lag of previous frame */
- tmp32a = Log2Q8((WebRtc_UWord32) old_lagQ8) - 2304; // log2(0.5*oldlag) in Q8
+ tmp32a = Log2Q8((uint32_t) old_lagQ8) - 2304; // log2(0.5*oldlag) in Q8
tmp32b = WEBRTC_SPL_MUL_16_16_RSFT(oldgQ12,oldgQ12, 10); //Q12 & * 4.0;
- gain_bias16 = (WebRtc_Word16) tmp32b; //Q12
+ gain_bias16 = (int16_t) tmp32b; //Q12
if (gain_bias16 > 3276) gain_bias16 = 3276; // 0.8 in Q12
for (k = 0; k < PITCH_LAG_SPAN2; k++)
{
if (crrvecQ8_1[k]>0) {
- tmp32b = Log2Q8((WebRtc_UWord32) (k + (PITCH_MIN_LAG/2-2)));
- tmp16a = (WebRtc_Word16) (tmp32b - tmp32a); // Q8 & fabs(ratio)<4
+ tmp32b = Log2Q8((uint32_t) (k + (PITCH_MIN_LAG/2-2)));
+ tmp16a = (int16_t) (tmp32b - tmp32a); // Q8 & fabs(ratio)<4
tmp32c = WEBRTC_SPL_MUL_16_16_RSFT(tmp16a,tmp16a, 6); //Q10
- tmp16b = (WebRtc_Word16) tmp32c; // Q10 & <8
+ tmp16b = (int16_t) tmp32c; // Q10 & <8
tmp32d = WEBRTC_SPL_MUL_16_16_RSFT(tmp16b, 177 , 8); // mult with ln2 in Q8
- tmp16c = (WebRtc_Word16) tmp32d; // Q10 & <4
- tmp16d = Exp2Q10((WebRtc_Word16) -tmp16c); //Q10
+ tmp16c = (int16_t) tmp32d; // Q10 & <4
+ tmp16d = Exp2Q10((int16_t) -tmp16c); //Q10
tmp32c = WEBRTC_SPL_MUL_16_16_RSFT(gain_bias16,tmp16d,13); // Q10 & * 0.5
- bias16 = (WebRtc_Word16) (1024 + tmp32c); // Q10
- tmp32b = Log2Q8((WebRtc_UWord32) bias16) - 2560; // Q10 in -> Q8 out with 10*2^8 offset
+ bias16 = (int16_t) (1024 + tmp32c); // Q10
+ tmp32b = Log2Q8((uint32_t) bias16) - 2560; // Q10 in -> Q8 out with 10*2^8 offset
crrvecQ8_1[k] += tmp32b ; // -10*2^8 offset
}
}
@@ -402,7 +402,7 @@
corr_max32=0;
best_lag1q =0;
if (peaks_indq > 0) {
- FindFour32(peakvq, (WebRtc_Word16) peaks_indq, best4q);
+ FindFour32(peakvq, (int16_t) peaks_indq, best4q);
npkq = WEBRTC_SPL_MIN(peaks_indq, 4);
for (k=0;k<npkq;k++) {
@@ -413,10 +413,10 @@
xq[0] = WEBRTC_SPL_LSHIFT_W32(xq[0], 8);
Intrp1DQ8(xq, fxq, yq, fyq);
- tmp32a= Log2Q8((WebRtc_UWord32) *yq) - 2048; // offset 8*2^8
+ tmp32a= Log2Q8((uint32_t) *yq) - 2048; // offset 8*2^8
/* Bias towards short lags */
/* log(pow(0.8, log(2.0 * *y )))/log(2.0) */
- tmp32b= WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) tmp32a, -42, 8);
+ tmp32b= WEBRTC_SPL_MUL_16_16_RSFT((int16_t) tmp32a, -42, 8);
tmp32c= tmp32b + 256;
*fyq += tmp32c;
if (*fyq > corr_max32) {
@@ -440,12 +440,12 @@
for (k = 1; k <= PITCH_LAG_SPAN2; k++)
{
tmp32a = WEBRTC_SPL_LSHIFT_W32(k, 7); // 0.5*k Q8
- tmp32b = (WebRtc_Word32) (WEBRTC_SPL_LSHIFT_W32(tmp32a, 1)) - ratq; // Q8
- tmp32c = WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) tmp32b, (WebRtc_Word16) tmp32b, 8); // Q8
+ tmp32b = (int32_t) (WEBRTC_SPL_LSHIFT_W32(tmp32a, 1)) - ratq; // Q8
+ tmp32c = WEBRTC_SPL_MUL_16_16_RSFT((int16_t) tmp32b, (int16_t) tmp32b, 8); // Q8
- tmp32b = (WebRtc_Word32) tmp32c + (WebRtc_Word32) WEBRTC_SPL_RSHIFT_W32(ratq, 1); // (k-r)^2 + 0.5 * r Q8
- tmp32c = Log2Q8((WebRtc_UWord32) tmp32a) - 2048; // offset 8*2^8 , log2(0.5*k) Q8
- tmp32d = Log2Q8((WebRtc_UWord32) tmp32b) - 2048; // offset 8*2^8 , log2(0.5*k) Q8
+ tmp32b = (int32_t) tmp32c + (int32_t) WEBRTC_SPL_RSHIFT_W32(ratq, 1); // (k-r)^2 + 0.5 * r Q8
+ tmp32c = Log2Q8((uint32_t) tmp32a) - 2048; // offset 8*2^8 , log2(0.5*k) Q8
+ tmp32d = Log2Q8((uint32_t) tmp32b) - 2048; // offset 8*2^8 , log2(0.5*k) Q8
tmp32e = tmp32c -tmp32d;
cv2q[k] += WEBRTC_SPL_RSHIFT_W32(tmp32e, 1);
@@ -474,7 +474,7 @@
best_lag2q =0;
if (peaks_indq > 0) {
- FindFour32(peakvq, (WebRtc_Word16) peaks_indq, best4q);
+ FindFour32(peakvq, (int16_t) peaks_indq, best4q);
npkq = WEBRTC_SPL_MIN(peaks_indq, 4);
for (k=0;k<npkq;k++) {
@@ -487,8 +487,8 @@
/* Bias towards short lags */
/* log(pow(0.8, log(2.0f * *y )))/log(2.0f) */
- tmp32a= Log2Q8((WebRtc_UWord32) *yq) - 2048; // offset 8*2^8
- tmp32b= WEBRTC_SPL_MUL_16_16_RSFT((WebRtc_Word16) tmp32a, -82, 8);
+ tmp32a= Log2Q8((uint32_t) *yq) - 2048; // offset 8*2^8
+ tmp32b= WEBRTC_SPL_MUL_16_16_RSFT((int16_t) tmp32a, -82, 8);
tmp32c= tmp32b + 256;
*fyq += tmp32c;
if (*fyq > corr_max32) {
@@ -506,24 +506,24 @@
lagsQ8[3] = lagsQ8[0];
}
- lagsQ7[0]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[0], 1);
- lagsQ7[1]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[1], 1);
- lagsQ7[2]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[2], 1);
- lagsQ7[3]=(WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(lagsQ8[3], 1);
+ lagsQ7[0]=(int16_t) WEBRTC_SPL_RSHIFT_W32(lagsQ8[0], 1);
+ lagsQ7[1]=(int16_t) WEBRTC_SPL_RSHIFT_W32(lagsQ8[1], 1);
+ lagsQ7[2]=(int16_t) WEBRTC_SPL_RSHIFT_W32(lagsQ8[2], 1);
+ lagsQ7[3]=(int16_t) WEBRTC_SPL_RSHIFT_W32(lagsQ8[3], 1);
}
-void WebRtcIsacfix_PitchAnalysis(const WebRtc_Word16 *inn, /* PITCH_FRAME_LEN samples */
- WebRtc_Word16 *outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
+void WebRtcIsacfix_PitchAnalysis(const int16_t *inn, /* PITCH_FRAME_LEN samples */
+ int16_t *outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
PitchAnalysisStruct *State,
- WebRtc_Word16 *PitchLags_Q7,
- WebRtc_Word16 *PitchGains_Q12)
+ int16_t *PitchLags_Q7,
+ int16_t *PitchGains_Q12)
{
- WebRtc_Word16 inbufQ0[PITCH_FRAME_LEN + QLOOKAHEAD];
- WebRtc_Word16 k;
+ int16_t inbufQ0[PITCH_FRAME_LEN + QLOOKAHEAD];
+ int16_t k;
/* inital pitch estimate */
WebRtcIsacfix_InitialPitch(inn, State, PitchLags_Q7);
@@ -537,7 +537,7 @@
inbufQ0[k] = State->inbuf[k];
}
for (k = 0; k < PITCH_FRAME_LEN; k++) {
- inbufQ0[k+QLOOKAHEAD] = (WebRtc_Word16) inn[k];
+ inbufQ0[k+QLOOKAHEAD] = (int16_t) inn[k];
}
/* lookahead pitch filtering for masking analysis */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h
index 6225256..93c81c8 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h
@@ -20,42 +20,42 @@
#include "structs.h"
-void WebRtcIsacfix_PitchAnalysis(const WebRtc_Word16 *in, /* PITCH_FRAME_LEN samples */
- WebRtc_Word16 *outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
+void WebRtcIsacfix_PitchAnalysis(const int16_t *in, /* PITCH_FRAME_LEN samples */
+ int16_t *outQ0, /* PITCH_FRAME_LEN+QLOOKAHEAD samples */
PitchAnalysisStruct *State,
- WebRtc_Word16 *lagsQ7,
- WebRtc_Word16 *PitchGains_Q12);
+ int16_t *lagsQ7,
+ int16_t *PitchGains_Q12);
-void WebRtcIsacfix_InitialPitch(const WebRtc_Word16 *in,
+void WebRtcIsacfix_InitialPitch(const int16_t *in,
PitchAnalysisStruct *State,
- WebRtc_Word16 *qlags);
+ int16_t *qlags);
-void WebRtcIsacfix_PitchFilter(WebRtc_Word16 *indatFix,
- WebRtc_Word16 *outdatQQ,
+void WebRtcIsacfix_PitchFilter(int16_t *indatFix,
+ int16_t *outdatQQ,
PitchFiltstr *pfp,
- WebRtc_Word16 *lagsQ7,
- WebRtc_Word16 *gainsQ12,
- WebRtc_Word16 type);
+ int16_t *lagsQ7,
+ int16_t *gainsQ12,
+ int16_t type);
void WebRtcIsacfix_PitchFilterCore(int loopNumber,
- WebRtc_Word16 gain,
+ int16_t gain,
int index,
- WebRtc_Word16 sign,
- WebRtc_Word16* inputState,
- WebRtc_Word16* outputBuff2,
- const WebRtc_Word16* coefficient,
- WebRtc_Word16* inputBuf,
- WebRtc_Word16* outputBuf,
+ int16_t sign,
+ int16_t* inputState,
+ int16_t* outputBuff2,
+ const int16_t* coefficient,
+ int16_t* inputBuf,
+ int16_t* outputBuf,
int* index2);
-void WebRtcIsacfix_PitchFilterGains(const WebRtc_Word16 *indatQ0,
+void WebRtcIsacfix_PitchFilterGains(const int16_t *indatQ0,
PitchFiltstr *pfp,
- WebRtc_Word16 *lagsQ7,
- WebRtc_Word16 *gainsQ12);
+ int16_t *lagsQ7,
+ int16_t *gainsQ12);
-void WebRtcIsacfix_DecimateAllpass32(const WebRtc_Word16 *in,
- WebRtc_Word32 *state_in, /* array of size: 2*ALLPASSSECTIONS+1 */
- WebRtc_Word16 N, /* number of input samples */
- WebRtc_Word16 *out); /* array of size N/2 */
+void WebRtcIsacfix_DecimateAllpass32(const int16_t *in,
+ int32_t *state_in, /* array of size: 2*ALLPASSSECTIONS+1 */
+ int16_t N, /* number of input samples */
+ int16_t *out); /* array of size N/2 */
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_ESTIMATOR_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c
index f30293e..89d884a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c
@@ -25,16 +25,16 @@
static const int kSegments = 5;
// A division factor of 1/5 in Q15.
-static const WebRtc_Word16 kDivFactor = 6553;
+static const int16_t kDivFactor = 6553;
// Filter coefficicients in Q15.
-static const WebRtc_Word16 kDampFilter[PITCH_DAMPORDER] = {
+static const int16_t kDampFilter[PITCH_DAMPORDER] = {
-2294, 8192, 20972, 8192, -2294
};
// Interpolation coefficients; generated by design_pitch_filter.m.
// Coefficients are stored in Q14.
-static const WebRtc_Word16 kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = {
+static const int16_t kIntrpCoef[PITCH_FRACS][PITCH_FRACORDER] = {
{-367, 1090, -2706, 9945, 10596, -3318, 1626, -781, 287},
{-325, 953, -2292, 7301, 12963, -3320, 1570, -743, 271},
{-240, 693, -1622, 4634, 14809, -2782, 1262, -587, 212},
@@ -48,44 +48,44 @@
// Function prototype for pitch filtering.
// TODO(Turaj): Add descriptions of input and output parameters.
void WebRtcIsacfix_PitchFilterCore(int loopNumber,
- WebRtc_Word16 gain,
+ int16_t gain,
int index,
- WebRtc_Word16 sign,
- WebRtc_Word16* inputState,
- WebRtc_Word16* outputBuf2,
- const WebRtc_Word16* coefficient,
- WebRtc_Word16* inputBuf,
- WebRtc_Word16* outputBuf,
+ int16_t sign,
+ int16_t* inputState,
+ int16_t* outputBuf2,
+ const int16_t* coefficient,
+ int16_t* inputBuf,
+ int16_t* outputBuf,
int* index2);
-static __inline WebRtc_Word32 CalcLrIntQ(WebRtc_Word32 fixVal,
- WebRtc_Word16 qDomain) {
- WebRtc_Word32 intgr;
- WebRtc_Word32 roundVal;
+static __inline int32_t CalcLrIntQ(int32_t fixVal,
+ int16_t qDomain) {
+ int32_t intgr;
+ int32_t roundVal;
- roundVal = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, qDomain - 1);
+ roundVal = WEBRTC_SPL_LSHIFT_W32((int32_t)1, qDomain - 1);
intgr = WEBRTC_SPL_RSHIFT_W32(fixVal + roundVal, qDomain);
return intgr;
}
-void WebRtcIsacfix_PitchFilter(WebRtc_Word16* indatQQ, // Q10 if type is 1 or 4,
+void WebRtcIsacfix_PitchFilter(int16_t* indatQQ, // Q10 if type is 1 or 4,
// Q0 if type is 2.
- WebRtc_Word16* outdatQQ,
+ int16_t* outdatQQ,
PitchFiltstr* pfp,
- WebRtc_Word16* lagsQ7,
- WebRtc_Word16* gainsQ12,
- WebRtc_Word16 type) {
+ int16_t* lagsQ7,
+ int16_t* gainsQ12,
+ int16_t type) {
int k, ind, cnt;
- WebRtc_Word16 sign = 1;
- WebRtc_Word16 inystateQQ[PITCH_DAMPORDER];
- WebRtc_Word16 ubufQQ[PITCH_INTBUFFSIZE + QLOOKAHEAD];
- const WebRtc_Word16 Gain = 21299; // 1.3 in Q14
- WebRtc_Word16 oldLagQ7;
- WebRtc_Word16 oldGainQ12, lagdeltaQ7, curLagQ7, gaindeltaQ12, curGainQ12;
+ int16_t sign = 1;
+ int16_t inystateQQ[PITCH_DAMPORDER];
+ int16_t ubufQQ[PITCH_INTBUFFSIZE + QLOOKAHEAD];
+ const int16_t Gain = 21299; // 1.3 in Q14
+ int16_t oldLagQ7;
+ int16_t oldGainQ12, lagdeltaQ7, curLagQ7, gaindeltaQ12, curGainQ12;
int indW32 = 0, frcQQ = 0;
- WebRtc_Word32 tmpW32;
- const WebRtc_Word16* fracoeffQQ = NULL;
+ int32_t tmpW32;
+ const int16_t* fracoeffQQ = NULL;
// Assumptions in ARM assembly for WebRtcIsacfix_PitchFilterCoreARM().
COMPILE_ASSERT(PITCH_FRACORDER == 9);
@@ -104,7 +104,7 @@
// Make output more periodic.
for (k = 0; k < PITCH_SUBFRAMES; k++) {
- gainsQ12[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ gainsQ12[k] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
gainsQ12[k], Gain, 14);
}
}
@@ -121,11 +121,11 @@
for (k = 0; k < PITCH_SUBFRAMES; k++) {
// Calculate interpolation steps.
lagdeltaQ7 = lagsQ7[k] - oldLagQ7;
- lagdeltaQ7 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ lagdeltaQ7 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
lagdeltaQ7, kDivFactor, 15);
curLagQ7 = oldLagQ7;
gaindeltaQ12 = gainsQ12[k] - oldGainQ12;
- gaindeltaQ12 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+ gaindeltaQ12 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(
gaindeltaQ12, kDivFactor, 15);
curGainQ12 = oldGainQ12;
@@ -172,18 +172,18 @@
}
-void WebRtcIsacfix_PitchFilterGains(const WebRtc_Word16* indatQ0,
+void WebRtcIsacfix_PitchFilterGains(const int16_t* indatQ0,
PitchFiltstr* pfp,
- WebRtc_Word16* lagsQ7,
- WebRtc_Word16* gainsQ12) {
+ int16_t* lagsQ7,
+ int16_t* gainsQ12) {
int k, n, m, ind, pos, pos3QQ;
- WebRtc_Word16 ubufQQ[PITCH_INTBUFFSIZE];
- WebRtc_Word16 oldLagQ7, lagdeltaQ7, curLagQ7;
- const WebRtc_Word16* fracoeffQQ = NULL;
- WebRtc_Word16 scale;
- WebRtc_Word16 cnt = 0, frcQQ, indW16 = 0, tmpW16;
- WebRtc_Word32 tmpW32, tmp2W32, csum1QQ, esumxQQ;
+ int16_t ubufQQ[PITCH_INTBUFFSIZE];
+ int16_t oldLagQ7, lagdeltaQ7, curLagQ7;
+ const int16_t* fracoeffQQ = NULL;
+ int16_t scale;
+ int16_t cnt = 0, frcQQ, indW16 = 0, tmpW16;
+ int32_t tmpW32, tmp2W32, csum1QQ, esumxQQ;
// Set up buffer and states.
memcpy(ubufQQ, pfp->ubufQQ, sizeof(pfp->ubufQQ));
@@ -202,7 +202,7 @@
// Calculate interpolation steps.
lagdeltaQ7 = lagsQ7[k] - oldLagQ7;
- lagdeltaQ7 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ lagdeltaQ7 = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
lagdeltaQ7, kDivFactor, 15);
curLagQ7 = oldLagQ7;
oldLagQ7 = lagsQ7[k];
@@ -215,7 +215,7 @@
for (cnt = 0; cnt < kSegments; cnt++) {
// Update parameters for each segment.
curLagQ7 += lagdeltaQ7;
- indW16 = (WebRtc_Word16)CalcLrIntQ(curLagQ7, 7);
+ indW16 = (int16_t)CalcLrIntQ(curLagQ7, 7);
tmpW16 = WEBRTC_SPL_LSHIFT_W16(indW16, 7);
tmpW16 -= curLagQ7;
frcQQ = WEBRTC_SPL_RSHIFT_W16(tmpW16, 4);
@@ -241,7 +241,7 @@
tmp2W32 = WEBRTC_SPL_MUL_16_32_RSFT14(indatQ0[ind], tmpW32);
tmpW32 += 8192;
- tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32, 14);
+ tmpW16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmpW32, 14);
tmpW32 = WEBRTC_SPL_MUL_16_16(tmpW16, tmpW16);
if ((tmp2W32 > 1073700000) || (csum1QQ > 1073700000) ||
@@ -269,7 +269,7 @@
} else {
tmpW32 = 4096;
}
- gainsQ12[k] = (WebRtc_Word16)WEBRTC_SPL_SAT(PITCH_MAX_GAIN_Q12, tmpW32, 0);
+ gainsQ12[k] = (int16_t)WEBRTC_SPL_SAT(PITCH_MAX_GAIN_Q12, tmpW32, 0);
}
// Export buffer and states.
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S
index ffd0e63..57796b0 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_armv6.S
@@ -20,14 +20,14 @@
.align 2
@ void WebRtcIsacfix_PitchFilterCore(int loopNumber,
-@ WebRtc_Word16 gain,
+@ int16_t gain,
@ int index,
-@ WebRtc_Word16 sign,
-@ WebRtc_Word16* inputState,
-@ WebRtc_Word16* outputBuf2,
-@ const WebRtc_Word16* coefficient,
-@ WebRtc_Word16* inputBuf,
-@ WebRtc_Word16* outputBuf,
+@ int16_t sign,
+@ int16_t* inputState,
+@ int16_t* outputBuf2,
+@ const int16_t* coefficient,
+@ int16_t* inputBuf,
+@ int16_t* outputBuf,
@ int* index2) {
DEFINE_FUNCTION WebRtcIsacfix_PitchFilterCore
push {r4-r11}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c
index 29b4b6a..d3c90b3 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c
@@ -12,26 +12,26 @@
#include "modules/audio_coding/codecs/isac/fix/source/pitch_estimator.h"
/* Filter coefficicients in Q15. */
-static const WebRtc_Word16 kDampFilter[PITCH_DAMPORDER] = {
+static const int16_t kDampFilter[PITCH_DAMPORDER] = {
-2294, 8192, 20972, 8192, -2294
};
void WebRtcIsacfix_PitchFilterCore(int loopNumber,
- WebRtc_Word16 gain,
+ int16_t gain,
int index,
- WebRtc_Word16 sign,
- WebRtc_Word16* inputState,
- WebRtc_Word16* outputBuf2,
- const WebRtc_Word16* coefficient,
- WebRtc_Word16* inputBuf,
- WebRtc_Word16* outputBuf,
+ int16_t sign,
+ int16_t* inputState,
+ int16_t* outputBuf2,
+ const int16_t* coefficient,
+ int16_t* inputBuf,
+ int16_t* outputBuf,
int* index2) {
int i = 0, j = 0; /* Loop counters. */
- WebRtc_Word16* ubufQQpos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)];
- WebRtc_Word16 tmpW16 = 0;
+ int16_t* ubufQQpos2 = &outputBuf2[PITCH_BUFFSIZE - (index + 2)];
+ int16_t tmpW16 = 0;
for (i = 0; i < loopNumber; i++) {
- WebRtc_Word32 tmpW32 = 0;
+ int32_t tmpW32 = 0;
/* Filter to get fractional pitch. */
for (j = 0; j < PITCH_FRACORDER; j++) {
@@ -41,12 +41,12 @@
/* Saturate to avoid overflow in tmpW16. */
tmpW32 = WEBRTC_SPL_SAT(536862719, tmpW32, -536879104);
tmpW32 += 8192;
- tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32, 14);
+ tmpW16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmpW32, 14);
/* Shift low pass filter state. */
memmove(&inputState[1], &inputState[0],
- (PITCH_DAMPORDER - 1) * sizeof(WebRtc_Word16));
- inputState[0] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+ (PITCH_DAMPORDER - 1) * sizeof(int16_t));
+ inputState[0] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
gain, tmpW16, 12);
/* Low pass filter. */
@@ -60,7 +60,7 @@
/* Saturate to avoid overflow in tmpW16. */
tmpW32 = WEBRTC_SPL_SAT(1073725439, tmpW32, -1073758208);
tmpW32 += 16384;
- tmpW16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32, 15);
+ tmpW16 = (int16_t)WEBRTC_SPL_RSHIFT_W32(tmpW32, 15);
/* Subtract from input and update buffer. */
tmpW32 = inputBuf[*index2] - WEBRTC_SPL_MUL_16_16(sign, tmpW16);
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c
index 50ea658..bfd83b7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.c
@@ -21,7 +21,7 @@
/********************* Pitch Filter Gain Coefficient Tables ************************/
/* cdf for quantized pitch filter gains */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchGainCdf[255] = {
+const uint16_t WebRtcIsacfix_kPitchGainCdf[255] = {
0, 2, 4, 6, 64, 901, 903, 905, 16954, 16956,
16961, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17411,
17514, 17516, 17583, 18790, 18796, 18802, 20760, 20777, 20782, 21722,
@@ -51,25 +51,25 @@
};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsacfix_kLowerlimiGain[3] = {
+const int16_t WebRtcIsacfix_kLowerlimiGain[3] = {
-7, -2, -1
};
-const WebRtc_Word16 WebRtcIsacfix_kUpperlimitGain[3] = {
+const int16_t WebRtcIsacfix_kUpperlimitGain[3] = {
0, 3, 1
};
-const WebRtc_UWord16 WebRtcIsacfix_kMultsGain[2] = {
+const uint16_t WebRtcIsacfix_kMultsGain[2] = {
18, 3
};
/* size of cdf table */
-const WebRtc_UWord16 WebRtcIsacfix_kCdfTableSizeGain[1] = {
+const uint16_t WebRtcIsacfix_kCdfTableSizeGain[1] = {
256
};
/* mean values of pitch filter gains in FIXED point Q12 */
-const WebRtc_Word16 WebRtcIsacfix_kPitchGain1[144] = {
+const int16_t WebRtcIsacfix_kPitchGain1[144] = {
843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839,
1843, 1843, 1843, 1843, 1843, 1843, 1843, 814, 846, 1092, 1013,
1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843, 1843,
@@ -86,7 +86,7 @@
1265
};
-const WebRtc_Word16 WebRtcIsacfix_kPitchGain2[144] = {
+const int16_t WebRtcIsacfix_kPitchGain2[144] = {
1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784,
1606, 1843, 1843, 1711, 1843, 1843, 1814, 1389, 1275, 1040, 1564,
1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720, 1475,
@@ -103,7 +103,7 @@
87
};
-const WebRtc_Word16 WebRtcIsacfix_kPitchGain3[144] = {
+const int16_t WebRtcIsacfix_kPitchGain3[144] = {
1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639,
1393, 1760, 1525, 1285, 1656, 1419, 1176, 1835, 1718, 1475, 1841,
1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299, 1040,
@@ -121,7 +121,7 @@
};
-const WebRtc_Word16 WebRtcIsacfix_kPitchGain4[144] = {
+const int16_t WebRtcIsacfix_kPitchGain4[144] = {
1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434,
1656, 843, 1092, 1336, 504, 757, 1007, 1843, 1843, 1843, 1838,
1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821, 1092,
@@ -141,7 +141,7 @@
/* transform matrix in Q12*/
-const WebRtc_Word16 WebRtcIsacfix_kTransform[4][4] = {
+const int16_t WebRtcIsacfix_kTransform[4][4] = {
{ -2048, -2048, -2048, -2048 },
{ 2748, 916, -916, -2748 },
{ 2048, -2048, -2048, 2048 },
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h
index 788e553..c4e0be5 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_gain_tables.h
@@ -23,23 +23,23 @@
/********************* Pitch Filter Gain Coefficient Tables ************************/
/* cdf for quantized pitch filter gains */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchGainCdf[255];
+extern const uint16_t WebRtcIsacfix_kPitchGainCdf[255];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsacfix_kLowerlimiGain[3];
-extern const WebRtc_Word16 WebRtcIsacfix_kUpperlimitGain[3];
-extern const WebRtc_UWord16 WebRtcIsacfix_kMultsGain[2];
+extern const int16_t WebRtcIsacfix_kLowerlimiGain[3];
+extern const int16_t WebRtcIsacfix_kUpperlimitGain[3];
+extern const uint16_t WebRtcIsacfix_kMultsGain[2];
/* mean values of pitch filter gains in Q12*/
-extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain1[144];
-extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain2[144];
-extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain3[144];
-extern const WebRtc_Word16 WebRtcIsacfix_kPitchGain4[144];
+extern const int16_t WebRtcIsacfix_kPitchGain1[144];
+extern const int16_t WebRtcIsacfix_kPitchGain2[144];
+extern const int16_t WebRtcIsacfix_kPitchGain3[144];
+extern const int16_t WebRtcIsacfix_kPitchGain4[144];
/* size of cdf table */
-extern const WebRtc_UWord16 WebRtcIsacfix_kCdfTableSizeGain[1];
+extern const uint16_t WebRtcIsacfix_kCdfTableSizeGain[1];
/* transform matrix */
-extern const WebRtc_Word16 WebRtcIsacfix_kTransform[4][4];
+extern const int16_t WebRtcIsacfix_kTransform[4][4];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_GAIN_TABLES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c
index 81700e4..4566b6eb 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.c
@@ -24,7 +24,7 @@
/* tables for use with small pitch gain */
/* cdf for quantized pitch filter lags */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Lo[127] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf1Lo[127] = {
0, 134, 336, 549, 778, 998, 1264, 1512, 1777, 2070,
2423, 2794, 3051, 3361, 3708, 3979, 4315, 4610, 4933, 5269,
5575, 5896, 6155, 6480, 6816, 7129, 7477, 7764, 8061, 8358,
@@ -40,20 +40,20 @@
65152, 65535, 65535, 65535, 65535, 65535, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Lo[20] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf2Lo[20] = {
0, 429, 3558, 5861, 8558, 11639, 15210, 19502, 24773, 31983,
42602, 48567, 52601, 55676, 58160, 60172, 61889, 63235, 65383, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Lo[2] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf3Lo[2] = {
0, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Lo[10] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf4Lo[10] = {
0, 2966, 6368, 11182, 19431, 37793, 48532, 55353, 60626, 65535
};
-const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrLo[4] = {
+const uint16_t *WebRtcIsacfix_kPitchLagPtrLo[4] = {
WebRtcIsacfix_kPitchLagCdf1Lo,
WebRtcIsacfix_kPitchLagCdf2Lo,
WebRtcIsacfix_kPitchLagCdf3Lo,
@@ -61,32 +61,32 @@
};
/* size of first cdf table */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeLo[1] = {
+const uint16_t WebRtcIsacfix_kPitchLagSizeLo[1] = {
128
};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsacfix_kLowerLimitLo[4] = {
+const int16_t WebRtcIsacfix_kLowerLimitLo[4] = {
-140, -9, 0, -4
};
-const WebRtc_Word16 WebRtcIsacfix_kUpperLimitLo[4] = {
+const int16_t WebRtcIsacfix_kUpperLimitLo[4] = {
-20, 9, 0, 4
};
/* initial index for arithmetic decoder */
-const WebRtc_UWord16 WebRtcIsacfix_kInitIndLo[3] = {
+const uint16_t WebRtcIsacfix_kInitIndLo[3] = {
10, 1, 5
};
/* mean values of pitch filter lags in Q10 */
-const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Lo[19] = {
+const int16_t WebRtcIsacfix_kMeanLag2Lo[19] = {
-17627, -16207, -14409, -12319, -10253, -8200, -6054, -3986, -1948, -19,
1937, 3974, 6064, 8155, 10229, 12270, 14296, 16127, 17520
};
-const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Lo[9] = {
+const int16_t WebRtcIsacfix_kMeanLag4Lo[9] = {
-7949, -6063, -4036, -1941, 38, 1977, 4060, 6059
};
@@ -95,7 +95,7 @@
/* tables for use with medium pitch gain */
/* cdf for quantized pitch filter lags */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Mid[255] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf1Mid[255] = {
0, 28, 61, 88, 121, 149, 233, 331, 475, 559,
624, 661, 689, 712, 745, 791, 815, 843, 866, 922,
959, 1024, 1061, 1117, 1178, 1238, 1280, 1350, 1453, 1513,
@@ -124,23 +124,23 @@
65535, 65535, 65535, 65535, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Mid[36] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf2Mid[36] = {
0, 71, 335, 581, 836, 1039, 1323, 1795, 2258, 2608,
3005, 3591, 4243, 5344, 7163, 10583, 16848, 28078, 49448, 57007,
60357, 61850, 62837, 63437, 63872, 64188, 64377, 64614, 64774, 64949,
65039, 65115, 65223, 65360, 65474, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Mid[2] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf3Mid[2] = {
0, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Mid[20] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf4Mid[20] = {
0, 28, 246, 459, 667, 1045, 1523, 2337, 4337, 11347,
44231, 56709, 60781, 62243, 63161, 63969, 64608, 65062, 65502, 65535
};
-const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrMid[4] = {
+const uint16_t *WebRtcIsacfix_kPitchLagPtrMid[4] = {
WebRtcIsacfix_kPitchLagCdf1Mid,
WebRtcIsacfix_kPitchLagCdf2Mid,
WebRtcIsacfix_kPitchLagCdf3Mid,
@@ -148,27 +148,27 @@
};
/* size of first cdf table */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeMid[1] = {
+const uint16_t WebRtcIsacfix_kPitchLagSizeMid[1] = {
256
};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsacfix_kLowerLimitMid[4] = {
+const int16_t WebRtcIsacfix_kLowerLimitMid[4] = {
-280, -17, 0, -9
};
-const WebRtc_Word16 WebRtcIsacfix_kUpperLimitMid[4] = {
+const int16_t WebRtcIsacfix_kUpperLimitMid[4] = {
-40, 17, 0, 9
};
/* initial index for arithmetic decoder */
-const WebRtc_UWord16 WebRtcIsacfix_kInitIndMid[3] = {
+const uint16_t WebRtcIsacfix_kInitIndMid[3] = {
18, 1, 10
};
/* mean values of pitch filter lags in Q10 */
-const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Mid[35] = {
+const int16_t WebRtcIsacfix_kMeanLag2Mid[35] = {
-17297, -16250, -15416, -14343, -13341, -12363, -11270,
-10355, -9122, -8217, -7172, -6083, -5102, -4004, -3060,
-1982, -952, -18, 935, 1976, 3040, 4032,
@@ -177,7 +177,7 @@
};
-const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Mid[19] = {
+const int16_t WebRtcIsacfix_kMeanLag4Mid[19] = {
-8811, -8081, -7203, -6003, -5057, -4025, -2983, -1964,
-891, 29, 921, 1920, 2988, 4064, 5187, 6079, 7173, 8074, 8849
};
@@ -186,7 +186,7 @@
/* tables for use with large pitch gain */
/* cdf for quantized pitch filter lags */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Hi[511] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf1Hi[511] = {
0, 7, 18, 33, 69, 105, 156, 228, 315, 612,
680, 691, 709, 724, 735, 738, 742, 746, 749, 753,
756, 760, 764, 774, 782, 785, 789, 796, 800, 803,
@@ -241,7 +241,7 @@
65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Hi[68] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf2Hi[68] = {
0, 7, 11, 22, 37, 52, 56, 59, 81, 85,
89, 96, 115, 130, 137, 152, 170, 181, 193, 200,
207, 233, 237, 259, 289, 318, 363, 433, 592, 992,
@@ -251,18 +251,18 @@
65483, 65491, 65498, 65505, 65516, 65520, 65528, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Hi[2] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf3Hi[2] = {
0, 65535
};
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Hi[35] = {
+const uint16_t WebRtcIsacfix_kPitchLagCdf4Hi[35] = {
0, 7, 19, 30, 41, 48, 63, 74, 82, 96,
122, 152, 215, 330, 701, 2611, 10931, 48106, 61177, 64341,
65112, 65238, 65309, 65338, 65364, 65379, 65401, 65427, 65453,
65465, 65476, 65490, 65509, 65528, 65535
};
-const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrHi[4] = {
+const uint16_t *WebRtcIsacfix_kPitchLagPtrHi[4] = {
WebRtcIsacfix_kPitchLagCdf1Hi,
WebRtcIsacfix_kPitchLagCdf2Hi,
WebRtcIsacfix_kPitchLagCdf3Hi,
@@ -270,27 +270,27 @@
};
/* size of first cdf table */
-const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeHi[1] = {
+const uint16_t WebRtcIsacfix_kPitchLagSizeHi[1] = {
512
};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsacfix_kLowerLimitHi[4] = {
+const int16_t WebRtcIsacfix_kLowerLimitHi[4] = {
-552, -34, 0, -16
};
-const WebRtc_Word16 WebRtcIsacfix_kUpperLimitHi[4] = {
+const int16_t WebRtcIsacfix_kUpperLimitHi[4] = {
-80, 32, 0, 17
};
/* initial index for arithmetic decoder */
-const WebRtc_UWord16 WebRtcIsacfix_kInitIndHi[3] = {
+const uint16_t WebRtcIsacfix_kInitIndHi[3] = {
34, 1, 18
};
/* mean values of pitch filter lags */
-const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Hi[67] = {
+const int16_t WebRtcIsacfix_kMeanLag2Hi[67] = {
-17482, -16896, -16220, -15929, -15329, -14848, -14336, -13807, -13312, -12800, -12218, -11720,
-11307, -10649, -10396, -9742, -9148, -8668, -8297, -7718, -7155, -6656, -6231, -5600, -5129,
-4610, -4110, -3521, -3040, -2525, -2016, -1506, -995, -477, -5, 469, 991, 1510, 2025, 2526, 3079,
@@ -299,7 +299,7 @@
};
-const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Hi[34] = {
+const int16_t WebRtcIsacfix_kMeanLag4Hi[34] = {
-8175, -7659, -7205, -6684, -6215, -5651, -5180, -4566, -4087, -3536, -3096,
-2532, -1990, -1482, -959, -440, 11, 451, 954, 1492, 2020, 2562, 3059,
3577, 4113, 4618, 5134, 5724, 6060, 6758, 7015, 7716, 8066, 8741
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h
index 9517c29..bb8b39a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_lag_tables.h
@@ -27,77 +27,77 @@
/* tables for use with small pitch gain */
/* cdfs for quantized pitch lags */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Lo[127];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Lo[20];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Lo[2];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Lo[10];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Lo[127];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Lo[20];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Lo[2];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Lo[10];
-extern const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrLo[4];
+extern const uint16_t *WebRtcIsacfix_kPitchLagPtrLo[4];
/* size of first cdf table */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeLo[1];
+extern const uint16_t WebRtcIsacfix_kPitchLagSizeLo[1];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsacfix_kLowerLimitLo[4];
-extern const WebRtc_Word16 WebRtcIsacfix_kUpperLimitLo[4];
+extern const int16_t WebRtcIsacfix_kLowerLimitLo[4];
+extern const int16_t WebRtcIsacfix_kUpperLimitLo[4];
/* initial index for arithmetic decoder */
-extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndLo[3];
+extern const uint16_t WebRtcIsacfix_kInitIndLo[3];
/* mean values of pitch filter lags */
-extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Lo[19];
-extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Lo[9];
+extern const int16_t WebRtcIsacfix_kMeanLag2Lo[19];
+extern const int16_t WebRtcIsacfix_kMeanLag4Lo[9];
/* tables for use with medium pitch gain */
/* cdfs for quantized pitch lags */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Mid[255];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Mid[36];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Mid[2];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Mid[20];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Mid[255];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Mid[36];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Mid[2];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Mid[20];
-extern const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrMid[4];
+extern const uint16_t *WebRtcIsacfix_kPitchLagPtrMid[4];
/* size of first cdf table */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeMid[1];
+extern const uint16_t WebRtcIsacfix_kPitchLagSizeMid[1];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsacfix_kLowerLimitMid[4];
-extern const WebRtc_Word16 WebRtcIsacfix_kUpperLimitMid[4];
+extern const int16_t WebRtcIsacfix_kLowerLimitMid[4];
+extern const int16_t WebRtcIsacfix_kUpperLimitMid[4];
/* initial index for arithmetic decoder */
-extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndMid[3];
+extern const uint16_t WebRtcIsacfix_kInitIndMid[3];
/* mean values of pitch filter lags */
-extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Mid[35];
-extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Mid[19];
+extern const int16_t WebRtcIsacfix_kMeanLag2Mid[35];
+extern const int16_t WebRtcIsacfix_kMeanLag4Mid[19];
/* tables for use with large pitch gain */
/* cdfs for quantized pitch lags */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf1Hi[511];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf2Hi[68];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf3Hi[2];
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagCdf4Hi[35];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf1Hi[511];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf2Hi[68];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf3Hi[2];
+extern const uint16_t WebRtcIsacfix_kPitchLagCdf4Hi[35];
-extern const WebRtc_UWord16 *WebRtcIsacfix_kPitchLagPtrHi[4];
+extern const uint16_t *WebRtcIsacfix_kPitchLagPtrHi[4];
/* size of first cdf table */
-extern const WebRtc_UWord16 WebRtcIsacfix_kPitchLagSizeHi[1];
+extern const uint16_t WebRtcIsacfix_kPitchLagSizeHi[1];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsacfix_kLowerLimitHi[4];
-extern const WebRtc_Word16 WebRtcIsacfix_kUpperLimitHi[4];
+extern const int16_t WebRtcIsacfix_kLowerLimitHi[4];
+extern const int16_t WebRtcIsacfix_kUpperLimitHi[4];
/* initial index for arithmetic decoder */
-extern const WebRtc_UWord16 WebRtcIsacfix_kInitIndHi[3];
+extern const uint16_t WebRtcIsacfix_kInitIndHi[3];
/* mean values of pitch filter lags */
-extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag2Hi[67];
-extern const WebRtc_Word16 WebRtcIsacfix_kMeanLag4Hi[34];
+extern const int16_t WebRtcIsacfix_kMeanLag2Hi[67];
+extern const int16_t WebRtcIsacfix_kMeanLag4Hi[34];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_PITCH_LAG_TABLES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h
index da88ba2..2149480 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h
@@ -22,11 +22,11 @@
/* sampling frequency (Hz) */
#define FS 16000
/* 1.5 times Sampling frequency */
-#define FS_1_HALF (WebRtc_UWord32) 24000
+#define FS_1_HALF (uint32_t) 24000
/* Three times Sampling frequency */
-#define FS3 (WebRtc_UWord32) 48000
+#define FS3 (uint32_t) 48000
/* Eight times Sampling frequency */
-#define FS8 (WebRtc_UWord32) 128000
+#define FS8 (uint32_t) 128000
/* number of samples per frame (either 480 (30ms) or 960 (60ms)) */
#define INITIAL_FRAMESAMPLES 960
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c
index 81b932f..cf2dea7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.c
@@ -22,84 +22,84 @@
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-const WebRtc_UWord16 WebRtcIsacfix_kRc1Cdf[12] = {
+const uint16_t WebRtcIsacfix_kRc1Cdf[12] = {
0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531,
65533, 65535
};
/* cdf for quantized reflection coefficient 2 */
-const WebRtc_UWord16 WebRtcIsacfix_kRc2Cdf[12] = {
+const uint16_t WebRtcIsacfix_kRc2Cdf[12] = {
0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531,
65533, 65535
};
/* cdf for quantized reflection coefficient 3 */
-const WebRtc_UWord16 WebRtcIsacfix_kRc3Cdf[12] = {
+const uint16_t WebRtcIsacfix_kRc3Cdf[12] = {
0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531,
65533, 65535
};
/* cdf for quantized reflection coefficient 4 */
-const WebRtc_UWord16 WebRtcIsacfix_kRc4Cdf[12] = {
+const uint16_t WebRtcIsacfix_kRc4Cdf[12] = {
0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531,
65533, 65535
};
/* cdf for quantized reflection coefficient 5 */
-const WebRtc_UWord16 WebRtcIsacfix_kRc5Cdf[12] = {
+const uint16_t WebRtcIsacfix_kRc5Cdf[12] = {
0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531,
65533, 65535
};
/* cdf for quantized reflection coefficient 6 */
-const WebRtc_UWord16 WebRtcIsacfix_kRc6Cdf[12] = {
+const uint16_t WebRtcIsacfix_kRc6Cdf[12] = {
0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531,
65533, 65535
};
/* representation levels for quantized reflection coefficient 1 */
-const WebRtc_Word16 WebRtcIsacfix_kRc1Levels[11] = {
+const int16_t WebRtcIsacfix_kRc1Levels[11] = {
-32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 2 */
-const WebRtc_Word16 WebRtcIsacfix_kRc2Levels[11] = {
+const int16_t WebRtcIsacfix_kRc2Levels[11] = {
-32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 3 */
-const WebRtc_Word16 WebRtcIsacfix_kRc3Levels[11] = {
+const int16_t WebRtcIsacfix_kRc3Levels[11] = {
-32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 4 */
-const WebRtc_Word16 WebRtcIsacfix_kRc4Levels[11] = {
+const int16_t WebRtcIsacfix_kRc4Levels[11] = {
-32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 5 */
-const WebRtc_Word16 WebRtcIsacfix_kRc5Levels[11] = {
+const int16_t WebRtcIsacfix_kRc5Levels[11] = {
-32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 6 */
-const WebRtc_Word16 WebRtcIsacfix_kRc6Levels[11] = {
+const int16_t WebRtcIsacfix_kRc6Levels[11] = {
-32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104
};
/* quantization boundary levels for reflection coefficients */
-const WebRtc_Word16 WebRtcIsacfix_kRcBound[12] = {
+const int16_t WebRtcIsacfix_kRcBound[12] = {
-32768, -31441, -27566, -21458, -13612, -4663,
4663, 13612, 21458, 27566, 31441, 32767
};
/* initial index for AR reflection coefficient quantizer and cdf table search */
-const WebRtc_UWord16 WebRtcIsacfix_kRcInitInd[6] = {
+const uint16_t WebRtcIsacfix_kRcInitInd[6] = {
5, 5, 5, 5, 5, 5
};
/* pointers to AR cdf tables */
-const WebRtc_UWord16 *WebRtcIsacfix_kRcCdfPtr[AR_ORDER] = {
+const uint16_t *WebRtcIsacfix_kRcCdfPtr[AR_ORDER] = {
WebRtcIsacfix_kRc1Cdf,
WebRtcIsacfix_kRc2Cdf,
WebRtcIsacfix_kRc3Cdf,
@@ -109,7 +109,7 @@
};
/* pointers to AR representation levels tables */
-const WebRtc_Word16 *WebRtcIsacfix_kRcLevPtr[AR_ORDER] = {
+const int16_t *WebRtcIsacfix_kRcLevPtr[AR_ORDER] = {
WebRtcIsacfix_kRc1Levels,
WebRtcIsacfix_kRc2Levels,
WebRtcIsacfix_kRc3Levels,
@@ -122,30 +122,30 @@
/******************** GAIN Coefficient Tables ***********************/
/* cdf for Gain coefficient */
-const WebRtc_UWord16 WebRtcIsacfix_kGainCdf[19] = {
+const uint16_t WebRtcIsacfix_kGainCdf[19] = {
0, 2, 4, 6, 8, 10, 12, 14, 16, 1172,
11119, 29411, 51699, 64445, 65527, 65529, 65531, 65533, 65535
};
/* representation levels for quantized squared Gain coefficient */
-const WebRtc_Word32 WebRtcIsacfix_kGain2Lev[18] = {
+const int32_t WebRtcIsacfix_kGain2Lev[18] = {
128, 128, 128, 128, 128, 215, 364, 709, 1268,
1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000
};
/* quantization boundary levels for squared Gain coefficient */
-const WebRtc_Word32 WebRtcIsacfix_kGain2Bound[19] = {
+const int32_t WebRtcIsacfix_kGain2Bound[19] = {
0, 21, 35, 59, 99, 166, 280, 475, 815, 1414,
2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF
};
/* pointers to Gain cdf table */
-const WebRtc_UWord16 *WebRtcIsacfix_kGainPtr[1] = {
+const uint16_t *WebRtcIsacfix_kGainPtr[1] = {
WebRtcIsacfix_kGainCdf
};
/* gain initial index for gain quantizer and cdf table search */
-const WebRtc_UWord16 WebRtcIsacfix_kGainInitInd[1] = {
+const uint16_t WebRtcIsacfix_kGainInitInd[1] = {
11
};
@@ -153,7 +153,7 @@
/************************* Cosine Tables ****************************/
/* cosine table */
-const WebRtc_Word16 WebRtcIsacfix_kCos[6][60] = {
+const int16_t WebRtcIsacfix_kCos[6][60] = {
{ 512, 512, 511, 510, 508, 507, 505, 502, 499, 496,
493, 489, 485, 480, 476, 470, 465, 459, 453, 447,
440, 433, 426, 418, 410, 402, 394, 385, 376, 367,
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h
index b506d0e..115509a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/spectrum_ar_model_tables.h
@@ -25,72 +25,72 @@
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRc1Cdf[12];
+extern const uint16_t WebRtcIsacfix_kRc1Cdf[12];
/* cdf for quantized reflection coefficient 2 */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRc2Cdf[12];
+extern const uint16_t WebRtcIsacfix_kRc2Cdf[12];
/* cdf for quantized reflection coefficient 3 */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRc3Cdf[12];
+extern const uint16_t WebRtcIsacfix_kRc3Cdf[12];
/* cdf for quantized reflection coefficient 4 */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRc4Cdf[12];
+extern const uint16_t WebRtcIsacfix_kRc4Cdf[12];
/* cdf for quantized reflection coefficient 5 */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRc5Cdf[12];
+extern const uint16_t WebRtcIsacfix_kRc5Cdf[12];
/* cdf for quantized reflection coefficient 6 */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRc6Cdf[12];
+extern const uint16_t WebRtcIsacfix_kRc6Cdf[12];
/* representation levels for quantized reflection coefficient 1 */
-extern const WebRtc_Word16 WebRtcIsacfix_kRc1Levels[11];
+extern const int16_t WebRtcIsacfix_kRc1Levels[11];
/* representation levels for quantized reflection coefficient 2 */
-extern const WebRtc_Word16 WebRtcIsacfix_kRc2Levels[11];
+extern const int16_t WebRtcIsacfix_kRc2Levels[11];
/* representation levels for quantized reflection coefficient 3 */
-extern const WebRtc_Word16 WebRtcIsacfix_kRc3Levels[11];
+extern const int16_t WebRtcIsacfix_kRc3Levels[11];
/* representation levels for quantized reflection coefficient 4 */
-extern const WebRtc_Word16 WebRtcIsacfix_kRc4Levels[11];
+extern const int16_t WebRtcIsacfix_kRc4Levels[11];
/* representation levels for quantized reflection coefficient 5 */
-extern const WebRtc_Word16 WebRtcIsacfix_kRc5Levels[11];
+extern const int16_t WebRtcIsacfix_kRc5Levels[11];
/* representation levels for quantized reflection coefficient 6 */
-extern const WebRtc_Word16 WebRtcIsacfix_kRc6Levels[11];
+extern const int16_t WebRtcIsacfix_kRc6Levels[11];
/* quantization boundary levels for reflection coefficients */
-extern const WebRtc_Word16 WebRtcIsacfix_kRcBound[12];
+extern const int16_t WebRtcIsacfix_kRcBound[12];
/* initial indices for AR reflection coefficient quantizer and cdf table search */
-extern const WebRtc_UWord16 WebRtcIsacfix_kRcInitInd[AR_ORDER];
+extern const uint16_t WebRtcIsacfix_kRcInitInd[AR_ORDER];
/* pointers to AR cdf tables */
-extern const WebRtc_UWord16 *WebRtcIsacfix_kRcCdfPtr[AR_ORDER];
+extern const uint16_t *WebRtcIsacfix_kRcCdfPtr[AR_ORDER];
/* pointers to AR representation levels tables */
-extern const WebRtc_Word16 *WebRtcIsacfix_kRcLevPtr[AR_ORDER];
+extern const int16_t *WebRtcIsacfix_kRcLevPtr[AR_ORDER];
/******************** GAIN Coefficient Tables ***********************/
/* cdf for Gain coefficient */
-extern const WebRtc_UWord16 WebRtcIsacfix_kGainCdf[19];
+extern const uint16_t WebRtcIsacfix_kGainCdf[19];
/* representation levels for quantized Gain coefficient */
-extern const WebRtc_Word32 WebRtcIsacfix_kGain2Lev[18];
+extern const int32_t WebRtcIsacfix_kGain2Lev[18];
/* squared quantization boundary levels for Gain coefficient */
-extern const WebRtc_Word32 WebRtcIsacfix_kGain2Bound[19];
+extern const int32_t WebRtcIsacfix_kGain2Bound[19];
/* pointer to Gain cdf table */
-extern const WebRtc_UWord16 *WebRtcIsacfix_kGainPtr[1];
+extern const uint16_t *WebRtcIsacfix_kGainPtr[1];
/* Gain initial index for gain quantizer and cdf table search */
-extern const WebRtc_UWord16 WebRtcIsacfix_kGainInitInd[1];
+extern const uint16_t WebRtcIsacfix_kGainInitInd[1];
/************************* Cosine Tables ****************************/
/* Cosine table */
-extern const WebRtc_Word16 WebRtcIsacfix_kCos[6][60];
+extern const int16_t WebRtcIsacfix_kCos[6][60];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
index c038a43..4d04356 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/structs.h
@@ -26,11 +26,11 @@
/* Bitstream struct for decoder */
typedef struct Bitstreamstruct_dec {
- WebRtc_UWord16 *stream; /* Pointer to bytestream to decode */
- WebRtc_UWord32 W_upper; /* Upper boundary of interval W */
- WebRtc_UWord32 streamval;
- WebRtc_UWord16 stream_index; /* Index to the current position in bytestream */
- WebRtc_Word16 full; /* 0 - first byte in memory filled, second empty*/
+ uint16_t *stream; /* Pointer to bytestream to decode */
+ uint32_t W_upper; /* Upper boundary of interval W */
+ uint32_t streamval;
+ uint16_t stream_index; /* Index to the current position in bytestream */
+ int16_t full; /* 0 - first byte in memory filled, second empty*/
/* 1 - both bytes are empty (we just filled the previous memory */
} Bitstr_dec;
@@ -38,11 +38,11 @@
/* Bitstream struct for encoder */
typedef struct Bitstreamstruct_enc {
- WebRtc_UWord16 stream[STREAM_MAXW16_60MS]; /* Vector for adding encoded bytestream */
- WebRtc_UWord32 W_upper; /* Upper boundary of interval W */
- WebRtc_UWord32 streamval;
- WebRtc_UWord16 stream_index; /* Index to the current position in bytestream */
- WebRtc_Word16 full; /* 0 - first byte in memory filled, second empty*/
+ uint16_t stream[STREAM_MAXW16_60MS]; /* Vector for adding encoded bytestream */
+ uint32_t W_upper; /* Upper boundary of interval W */
+ uint32_t streamval;
+ uint16_t stream_index; /* Index to the current position in bytestream */
+ int16_t full; /* 0 - first byte in memory filled, second empty*/
/* 1 - both bytes are empty (we just filled the previous memory */
} Bitstr_enc;
@@ -50,19 +50,19 @@
typedef struct {
- WebRtc_Word16 DataBufferLoQ0[WINLEN];
- WebRtc_Word16 DataBufferHiQ0[WINLEN];
+ int16_t DataBufferLoQ0[WINLEN];
+ int16_t DataBufferHiQ0[WINLEN];
- WebRtc_Word32 CorrBufLoQQ[ORDERLO+1];
- WebRtc_Word32 CorrBufHiQQ[ORDERHI+1];
+ int32_t CorrBufLoQQ[ORDERLO+1];
+ int32_t CorrBufHiQQ[ORDERHI+1];
- WebRtc_Word16 CorrBufLoQdom[ORDERLO+1];
- WebRtc_Word16 CorrBufHiQdom[ORDERHI+1];
+ int16_t CorrBufLoQdom[ORDERLO+1];
+ int16_t CorrBufHiQdom[ORDERHI+1];
- WebRtc_Word32 PreStateLoGQ15[ORDERLO+1];
- WebRtc_Word32 PreStateHiGQ15[ORDERHI+1];
+ int32_t PreStateLoGQ15[ORDERLO+1];
+ int32_t PreStateHiGQ15[ORDERHI+1];
- WebRtc_UWord32 OldEnergy;
+ uint32_t OldEnergy;
} MaskFiltstr_enc;
@@ -70,10 +70,10 @@
typedef struct {
- WebRtc_Word16 PostStateLoGQ0[ORDERLO+1];
- WebRtc_Word16 PostStateHiGQ0[ORDERHI+1];
+ int16_t PostStateLoGQ0[ORDERLO+1];
+ int16_t PostStateHiGQ0[ORDERHI+1];
- WebRtc_UWord32 OldEnergy;
+ uint32_t OldEnergy;
} MaskFiltstr_dec;
@@ -88,13 +88,13 @@
//state vectors for each of the two analysis filters
- WebRtc_Word32 INSTAT1_fix[2*(QORDER-1)];
- WebRtc_Word32 INSTAT2_fix[2*(QORDER-1)];
- WebRtc_Word16 INLABUF1_fix[QLOOKAHEAD];
- WebRtc_Word16 INLABUF2_fix[QLOOKAHEAD];
+ int32_t INSTAT1_fix[2*(QORDER-1)];
+ int32_t INSTAT2_fix[2*(QORDER-1)];
+ int16_t INLABUF1_fix[QLOOKAHEAD];
+ int16_t INLABUF2_fix[QLOOKAHEAD];
/* High pass filter */
- WebRtc_Word32 HPstates_fix[HPORDER];
+ int32_t HPstates_fix[HPORDER];
} PreFiltBankstr;
@@ -102,13 +102,13 @@
typedef struct {
//state vectors for each of the two analysis filters
- WebRtc_Word32 STATE_0_LOWER_fix[2*POSTQORDER];
- WebRtc_Word32 STATE_0_UPPER_fix[2*POSTQORDER];
+ int32_t STATE_0_LOWER_fix[2*POSTQORDER];
+ int32_t STATE_0_UPPER_fix[2*POSTQORDER];
/* High pass filter */
- WebRtc_Word32 HPstates1_fix[HPORDER];
- WebRtc_Word32 HPstates2_fix[HPORDER];
+ int32_t HPstates1_fix[HPORDER];
+ int32_t HPstates2_fix[HPORDER];
} PostFiltBankstr;
@@ -116,14 +116,14 @@
/* data buffer for pitch filter */
- WebRtc_Word16 ubufQQ[PITCH_BUFFSIZE];
+ int16_t ubufQQ[PITCH_BUFFSIZE];
/* low pass state vector */
- WebRtc_Word16 ystateQQ[PITCH_DAMPORDER];
+ int16_t ystateQQ[PITCH_DAMPORDER];
/* old lag and gain */
- WebRtc_Word16 oldlagQ7;
- WebRtc_Word16 oldgainQ12;
+ int16_t oldlagQ7;
+ int16_t oldgainQ12;
} PitchFiltstr;
@@ -132,9 +132,9 @@
typedef struct {
//for inital estimator
- WebRtc_Word16 dec_buffer16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2];
- WebRtc_Word32 decimator_state32[2*ALLPASSSECTIONS+1];
- WebRtc_Word16 inbuf[QLOOKAHEAD];
+ int16_t dec_buffer16[PITCH_CORR_LEN2+PITCH_CORR_STEP2+PITCH_MAX_LAG/2-PITCH_FRAME_LEN/2+2];
+ int32_t decimator_state32[2*ALLPASSSECTIONS+1];
+ int16_t inbuf[QLOOKAHEAD];
PitchFiltstr PFstr_wght;
PitchFiltstr PFstr;
@@ -147,41 +147,41 @@
/* Parameters used in PLC to avoid re-computation */
/* --- residual signals --- */
- WebRtc_Word16 prevPitchInvIn[FRAMESAMPLES/2];
- WebRtc_Word16 prevPitchInvOut[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90
- WebRtc_Word32 prevHP[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90
+ int16_t prevPitchInvIn[FRAMESAMPLES/2];
+ int16_t prevPitchInvOut[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90
+ int32_t prevHP[PITCH_MAX_LAG + 10]; // [FRAMESAMPLES/2]; save 90
- WebRtc_Word16 decayCoeffPriodic; /* how much to supress a sample */
- WebRtc_Word16 decayCoeffNoise;
- WebRtc_Word16 used; /* if PLC is used */
+ int16_t decayCoeffPriodic; /* how much to supress a sample */
+ int16_t decayCoeffNoise;
+ int16_t used; /* if PLC is used */
- WebRtc_Word16 *lastPitchLP; // [FRAMESAMPLES/2]; saved 240;
+ int16_t *lastPitchLP; // [FRAMESAMPLES/2]; saved 240;
/* --- LPC side info --- */
- WebRtc_Word16 lofilt_coefQ15[ ORDERLO ];
- WebRtc_Word16 hifilt_coefQ15[ ORDERHI ];
- WebRtc_Word32 gain_lo_hiQ17[2];
+ int16_t lofilt_coefQ15[ ORDERLO ];
+ int16_t hifilt_coefQ15[ ORDERHI ];
+ int32_t gain_lo_hiQ17[2];
/* --- LTP side info --- */
- WebRtc_Word16 AvgPitchGain_Q12;
- WebRtc_Word16 lastPitchGain_Q12;
- WebRtc_Word16 lastPitchLag_Q7;
+ int16_t AvgPitchGain_Q12;
+ int16_t lastPitchGain_Q12;
+ int16_t lastPitchLag_Q7;
/* --- Add-overlap in recovery packet --- */
- WebRtc_Word16 overlapLP[ RECOVERY_OVERLAP ]; // [FRAMESAMPLES/2]; saved 160
+ int16_t overlapLP[ RECOVERY_OVERLAP ]; // [FRAMESAMPLES/2]; saved 160
- WebRtc_Word16 pitchCycles;
- WebRtc_Word16 A;
- WebRtc_Word16 B;
- WebRtc_Word16 pitchIndex;
- WebRtc_Word16 stretchLag;
- WebRtc_Word16 *prevPitchLP; // [ FRAMESAMPLES/2 ]; saved 240
- WebRtc_Word16 seed;
+ int16_t pitchCycles;
+ int16_t A;
+ int16_t B;
+ int16_t pitchIndex;
+ int16_t stretchLag;
+ int16_t *prevPitchLP; // [ FRAMESAMPLES/2 ]; saved 240
+ int16_t seed;
- WebRtc_Word16 std;
+ int16_t std;
} PLCstr;
@@ -189,60 +189,60 @@
/* Have instance of struct together with other iSAC structs */
typedef struct {
- WebRtc_Word16 prevFrameSizeMs; /* Previous frame size (in ms) */
- WebRtc_UWord16 prevRtpNumber; /* Previous RTP timestamp from received packet */
+ int16_t prevFrameSizeMs; /* Previous frame size (in ms) */
+ uint16_t prevRtpNumber; /* Previous RTP timestamp from received packet */
/* (in samples relative beginning) */
- WebRtc_UWord32 prevSendTime; /* Send time for previous packet, from RTP header */
- WebRtc_UWord32 prevArrivalTime; /* Arrival time for previous packet (in ms using timeGetTime()) */
- WebRtc_UWord16 prevRtpRate; /* rate of previous packet, derived from RTP timestamps (in bits/s) */
- WebRtc_UWord32 lastUpdate; /* Time since the last update of the Bottle Neck estimate (in samples) */
- WebRtc_UWord32 lastReduction; /* Time sinse the last reduction (in samples) */
- WebRtc_Word32 countUpdates; /* How many times the estimate was update in the beginning */
+ uint32_t prevSendTime; /* Send time for previous packet, from RTP header */
+ uint32_t prevArrivalTime; /* Arrival time for previous packet (in ms using timeGetTime()) */
+ uint16_t prevRtpRate; /* rate of previous packet, derived from RTP timestamps (in bits/s) */
+ uint32_t lastUpdate; /* Time since the last update of the Bottle Neck estimate (in samples) */
+ uint32_t lastReduction; /* Time sinse the last reduction (in samples) */
+ int32_t countUpdates; /* How many times the estimate was update in the beginning */
/* The estimated bottle neck rate from there to here (in bits/s) */
- WebRtc_UWord32 recBw;
- WebRtc_UWord32 recBwInv;
- WebRtc_UWord32 recBwAvg;
- WebRtc_UWord32 recBwAvgQ;
+ uint32_t recBw;
+ uint32_t recBwInv;
+ uint32_t recBwAvg;
+ uint32_t recBwAvgQ;
- WebRtc_UWord32 minBwInv;
- WebRtc_UWord32 maxBwInv;
+ uint32_t minBwInv;
+ uint32_t maxBwInv;
/* The estimated mean absolute jitter value, as seen on this side (in ms) */
- WebRtc_Word32 recJitter;
- WebRtc_Word32 recJitterShortTerm;
- WebRtc_Word32 recJitterShortTermAbs;
- WebRtc_Word32 recMaxDelay;
- WebRtc_Word32 recMaxDelayAvgQ;
+ int32_t recJitter;
+ int32_t recJitterShortTerm;
+ int32_t recJitterShortTermAbs;
+ int32_t recMaxDelay;
+ int32_t recMaxDelayAvgQ;
- WebRtc_Word16 recHeaderRate; /* (assumed) bitrate for headers (bps) */
+ int16_t recHeaderRate; /* (assumed) bitrate for headers (bps) */
- WebRtc_UWord32 sendBwAvg; /* The estimated bottle neck rate from here to there (in bits/s) */
- WebRtc_Word32 sendMaxDelayAvg; /* The estimated mean absolute jitter value, as seen on the other siee (in ms) */
+ uint32_t sendBwAvg; /* The estimated bottle neck rate from here to there (in bits/s) */
+ int32_t sendMaxDelayAvg; /* The estimated mean absolute jitter value, as seen on the other siee (in ms) */
- WebRtc_Word16 countRecPkts; /* number of packets received since last update */
- WebRtc_Word16 highSpeedRec; /* flag for marking that a high speed network has been detected downstream */
+ int16_t countRecPkts; /* number of packets received since last update */
+ int16_t highSpeedRec; /* flag for marking that a high speed network has been detected downstream */
/* number of consecutive pkts sent during which the bwe estimate has
remained at a value greater than the downstream threshold for determining highspeed network */
- WebRtc_Word16 countHighSpeedRec;
+ int16_t countHighSpeedRec;
/* flag indicating bwe should not adjust down immediately for very late pckts */
- WebRtc_Word16 inWaitPeriod;
+ int16_t inWaitPeriod;
/* variable holding the time of the start of a window of time when
bwe should not adjust down immediately for very late pckts */
- WebRtc_UWord32 startWaitPeriod;
+ uint32_t startWaitPeriod;
/* number of consecutive pkts sent during which the bwe estimate has
remained at a value greater than the upstream threshold for determining highspeed network */
- WebRtc_Word16 countHighSpeedSent;
+ int16_t countHighSpeedSent;
/* flag indicated the desired number of packets over threshold rate have been sent and
bwe will assume the connection is over broadband network */
- WebRtc_Word16 highSpeedSend;
+ int16_t highSpeedSend;
@@ -253,15 +253,15 @@
typedef struct {
/* boolean, flags if previous packet exceeded B.N. */
- WebRtc_Word16 PrevExceed;
+ int16_t PrevExceed;
/* ms */
- WebRtc_Word16 ExceedAgo;
+ int16_t ExceedAgo;
/* packets left to send in current burst */
- WebRtc_Word16 BurstCounter;
+ int16_t BurstCounter;
/* packets */
- WebRtc_Word16 InitCounter;
+ int16_t InitCounter;
/* ms remaining in buffer when next packet will be sent */
- WebRtc_Word16 StillBuffered;
+ int16_t StillBuffered;
} RateModel;
@@ -276,24 +276,24 @@
int startIdx;
/* Frame length in samples */
- WebRtc_Word16 framelength;
+ int16_t framelength;
/* Pitch Gain */
- WebRtc_Word16 pitchGain_index[2];
+ int16_t pitchGain_index[2];
/* Pitch Lag */
- WebRtc_Word32 meanGain[2];
- WebRtc_Word16 pitchIndex[PITCH_SUBFRAMES*2];
+ int32_t meanGain[2];
+ int16_t pitchIndex[PITCH_SUBFRAMES*2];
/* LPC */
- WebRtc_Word32 LPCcoeffs_g[12*2]; /* KLT_ORDER_GAIN = 12 */
- WebRtc_Word16 LPCindex_s[108*2]; /* KLT_ORDER_SHAPE = 108 */
- WebRtc_Word16 LPCindex_g[12*2]; /* KLT_ORDER_GAIN = 12 */
+ int32_t LPCcoeffs_g[12*2]; /* KLT_ORDER_GAIN = 12 */
+ int16_t LPCindex_s[108*2]; /* KLT_ORDER_SHAPE = 108 */
+ int16_t LPCindex_g[12*2]; /* KLT_ORDER_GAIN = 12 */
/* Encode Spec */
- WebRtc_Word16 fre[FRAMESAMPLES];
- WebRtc_Word16 fim[FRAMESAMPLES];
- WebRtc_Word16 AvgPitchGain[2];
+ int16_t fre[FRAMESAMPLES];
+ int16_t fim[FRAMESAMPLES];
+ int16_t AvgPitchGain[2];
/* Used in adaptive mode only */
int minBytes;
@@ -309,29 +309,29 @@
PitchAnalysisStruct pitchanalysisstr_obj;
RateModel rate_data_obj;
- WebRtc_Word16 buffer_index;
- WebRtc_Word16 current_framesamples;
+ int16_t buffer_index;
+ int16_t current_framesamples;
- WebRtc_Word16 data_buffer_fix[FRAMESAMPLES]; // the size was MAX_FRAMESAMPLES
+ int16_t data_buffer_fix[FRAMESAMPLES]; // the size was MAX_FRAMESAMPLES
- WebRtc_Word16 frame_nb;
- WebRtc_Word16 BottleNeck;
- WebRtc_Word16 MaxDelay;
- WebRtc_Word16 new_framelength;
- WebRtc_Word16 s2nr;
- WebRtc_UWord16 MaxBits;
+ int16_t frame_nb;
+ int16_t BottleNeck;
+ int16_t MaxDelay;
+ int16_t new_framelength;
+ int16_t s2nr;
+ uint16_t MaxBits;
- WebRtc_Word16 bitstr_seed;
+ int16_t bitstr_seed;
#ifdef WEBRTC_ISAC_FIX_NB_CALLS_ENABLED
PostFiltBankstr interpolatorstr_obj;
#endif
ISAC_SaveEncData_t *SaveEnc_ptr;
- WebRtc_Word16 payloadLimitBytes30; /* Maximum allowed number of bits for a 30 msec packet */
- WebRtc_Word16 payloadLimitBytes60; /* Maximum allowed number of bits for a 30 msec packet */
- WebRtc_Word16 maxPayloadBytes; /* Maximum allowed number of bits for both 30 and 60 msec packet */
- WebRtc_Word16 maxRateInBytes; /* Maximum allowed rate in bytes per 30 msec packet */
- WebRtc_Word16 enforceFrameSize; /* If set iSAC will never change packet size */
+ int16_t payloadLimitBytes30; /* Maximum allowed number of bits for a 30 msec packet */
+ int16_t payloadLimitBytes60; /* Maximum allowed number of bits for a 30 msec packet */
+ int16_t maxPayloadBytes; /* Maximum allowed number of bits for both 30 and 60 msec packet */
+ int16_t maxRateInBytes; /* Maximum allowed rate in bytes per 30 msec packet */
+ int16_t enforceFrameSize; /* If set iSAC will never change packet size */
} ISACFIX_EncInst_t;
@@ -357,23 +357,23 @@
ISACFIX_EncInst_t ISACenc_obj;
ISACFIX_DecInst_t ISACdec_obj;
BwEstimatorstr bwestimator_obj;
- WebRtc_Word16 CodingMode; /* 0 = adaptive; 1 = instantaneous */
- WebRtc_Word16 errorcode;
- WebRtc_Word16 initflag; /* 0 = nothing initiated; 1 = encoder or decoder */
+ int16_t CodingMode; /* 0 = adaptive; 1 = instantaneous */
+ int16_t errorcode;
+ int16_t initflag; /* 0 = nothing initiated; 1 = encoder or decoder */
/* not initiated; 2 = all initiated */
} ISACFIX_SubStruct;
typedef struct {
- WebRtc_Word32 lpcGains[12]; /* 6 lower-band & 6 upper-band we may need to double it for 60*/
+ int32_t lpcGains[12]; /* 6 lower-band & 6 upper-band we may need to double it for 60*/
/* */
- WebRtc_UWord32 W_upper; /* Upper boundary of interval W */
- WebRtc_UWord32 streamval;
- WebRtc_UWord16 stream_index; /* Index to the current position in bytestream */
- WebRtc_Word16 full; /* 0 - first byte in memory filled, second empty*/
+ uint32_t W_upper; /* Upper boundary of interval W */
+ uint32_t streamval;
+ uint16_t stream_index; /* Index to the current position in bytestream */
+ int16_t full; /* 0 - first byte in memory filled, second empty*/
/* 1 - both bytes are empty (we just filled the previous memory */
- WebRtc_UWord16 beforeLastWord;
- WebRtc_UWord16 lastWord;
+ uint16_t beforeLastWord;
+ uint16_t lastWord;
} transcode_obj;
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.c b/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.c
index c7a3e80..67e513c 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.c
@@ -22,14 +22,14 @@
#if (defined WEBRTC_DETECT_ARM_NEON || defined WEBRTC_ARCH_ARM_NEON)
/* Tables are defined in ARM assembly files. */
/* Cosine table 1 in Q14 */
-extern const WebRtc_Word16 WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2];
+extern const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2];
/* Sine table 1 in Q14 */
-extern const WebRtc_Word16 WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2];
+extern const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2];
/* Sine table 2 in Q14 */
-extern const WebRtc_Word16 WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4];
+extern const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4];
#else
/* Cosine table 1 in Q14 */
-static const WebRtc_Word16 WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2] = {
+static const int16_t WebRtcIsacfix_kCosTab1[FRAMESAMPLES/2] = {
16384, 16383, 16378, 16371, 16362, 16349, 16333, 16315, 16294, 16270,
16244, 16214, 16182, 16147, 16110, 16069, 16026, 15980, 15931, 15880,
15826, 15769, 15709, 15647, 15582, 15515, 15444, 15371, 15296, 15218,
@@ -58,7 +58,7 @@
};
/* Sine table 1 in Q14 */
-static const WebRtc_Word16 WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2] = {
+static const int16_t WebRtcIsacfix_kSinTab1[FRAMESAMPLES/2] = {
0, 214, 429, 643, 857, 1072, 1285, 1499, 1713, 1926,
2139, 2351, 2563, 2775, 2986, 3196, 3406, 3616, 3825, 4033,
4240, 4447, 4653, 4859, 5063, 5266, 5469, 5671, 5872, 6071,
@@ -87,7 +87,7 @@
/* Sine table 2 in Q14 */
-static const WebRtc_Word16 WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4] = {
+static const int16_t WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4] = {
16384, -16381, 16375, -16367, 16356, -16342, 16325, -16305, 16283, -16257,
16229, -16199, 16165, -16129, 16090, -16048, 16003, -15956, 15906, -15853,
15798, -15739, 15679, -15615, 15549, -15480, 15408, -15334, 15257, -15178,
@@ -103,18 +103,18 @@
};
#endif // WEBRTC_DETECT_ARM_NEON || WEBRTC_ARCH_ARM_NEON
-void WebRtcIsacfix_Time2SpecC(WebRtc_Word16 *inre1Q9,
- WebRtc_Word16 *inre2Q9,
- WebRtc_Word16 *outreQ7,
- WebRtc_Word16 *outimQ7)
+void WebRtcIsacfix_Time2SpecC(int16_t *inre1Q9,
+ int16_t *inre2Q9,
+ int16_t *outreQ7,
+ int16_t *outimQ7)
{
int k;
- WebRtc_Word32 tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2];
- WebRtc_Word16 tmp1rQ14, tmp1iQ14;
- WebRtc_Word32 xrQ16, xiQ16, yrQ16, yiQ16;
- WebRtc_Word32 v1Q16, v2Q16;
- WebRtc_Word16 factQ19, sh;
+ int32_t tmpreQ16[FRAMESAMPLES/2], tmpimQ16[FRAMESAMPLES/2];
+ int16_t tmp1rQ14, tmp1iQ14;
+ int32_t xrQ16, xiQ16, yrQ16, yiQ16;
+ int32_t v1Q16, v2Q16;
+ int16_t factQ19, sh;
/* Multiply with complex exponentials and combine into one complex vector */
factQ19 = 16921; // 0.5/sqrt(240) in Q19 is round(.5/sqrt(240)*(2^19)) = 16921
@@ -141,14 +141,14 @@
//"Fastest" vectors
if (sh>=0) {
for (k=0; k<FRAMESAMPLES/2; k++) {
- inre1Q9[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(tmpreQ16[k], sh); //Q(16+sh)
- inre2Q9[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(tmpimQ16[k], sh); //Q(16+sh)
+ inre1Q9[k] = (int16_t) WEBRTC_SPL_LSHIFT_W32(tmpreQ16[k], sh); //Q(16+sh)
+ inre2Q9[k] = (int16_t) WEBRTC_SPL_LSHIFT_W32(tmpimQ16[k], sh); //Q(16+sh)
}
} else {
- WebRtc_Word32 round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, -sh-1);
+ int32_t round = WEBRTC_SPL_LSHIFT_W32((int32_t)1, -sh-1);
for (k=0; k<FRAMESAMPLES/2; k++) {
- inre1Q9[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpreQ16[k]+round, -sh); //Q(16+sh)
- inre2Q9[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(tmpimQ16[k]+round, -sh); //Q(16+sh)
+ inre1Q9[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmpreQ16[k]+round, -sh); //Q(16+sh)
+ inre2Q9[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmpimQ16[k]+round, -sh); //Q(16+sh)
}
}
@@ -158,13 +158,13 @@
//"Fastest" vectors
if (sh>=0) {
for (k=0; k<FRAMESAMPLES/2; k++) {
- tmpreQ16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inre1Q9[k], sh); //Q(16+sh) -> Q16
- tmpimQ16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inre2Q9[k], sh); //Q(16+sh) -> Q16
+ tmpreQ16[k] = WEBRTC_SPL_RSHIFT_W32((int32_t)inre1Q9[k], sh); //Q(16+sh) -> Q16
+ tmpimQ16[k] = WEBRTC_SPL_RSHIFT_W32((int32_t)inre2Q9[k], sh); //Q(16+sh) -> Q16
}
} else {
for (k=0; k<FRAMESAMPLES/2; k++) {
- tmpreQ16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inre1Q9[k], -sh); //Q(16+sh) -> Q16
- tmpimQ16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inre2Q9[k], -sh); //Q(16+sh) -> Q16
+ tmpreQ16[k] = WEBRTC_SPL_LSHIFT_W32((int32_t)inre1Q9[k], -sh); //Q(16+sh) -> Q16
+ tmpimQ16[k] = WEBRTC_SPL_LSHIFT_W32((int32_t)inre2Q9[k], -sh); //Q(16+sh) -> Q16
}
}
@@ -179,36 +179,36 @@
tmp1iQ14 = WebRtcIsacfix_kSinTab2[k];
v1Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xiQ16);
v2Q16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, xrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, xiQ16);
- outreQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(v1Q16, 9);
- outimQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(v2Q16, 9);
+ outreQ7[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(v1Q16, 9);
+ outimQ7[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(v2Q16, 9);
v1Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yrQ16) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yiQ16);
v2Q16 = -WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, yrQ16) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, yiQ16);
- outreQ7[FRAMESAMPLES/2 - 1 - k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(v1Q16, 9); //CalcLrIntQ(v1Q16, 9);
- outimQ7[FRAMESAMPLES/2 - 1 - k] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(v2Q16, 9); //CalcLrIntQ(v2Q16, 9);
+ outreQ7[FRAMESAMPLES/2 - 1 - k] = (int16_t)WEBRTC_SPL_RSHIFT_W32(v1Q16, 9); //CalcLrIntQ(v1Q16, 9);
+ outimQ7[FRAMESAMPLES/2 - 1 - k] = (int16_t)WEBRTC_SPL_RSHIFT_W32(v2Q16, 9); //CalcLrIntQ(v2Q16, 9);
}
}
-void WebRtcIsacfix_Spec2TimeC(WebRtc_Word16 *inreQ7, WebRtc_Word16 *inimQ7, WebRtc_Word32 *outre1Q16, WebRtc_Word32 *outre2Q16)
+void WebRtcIsacfix_Spec2TimeC(int16_t *inreQ7, int16_t *inimQ7, int32_t *outre1Q16, int32_t *outre2Q16)
{
int k;
- WebRtc_Word16 tmp1rQ14, tmp1iQ14;
- WebRtc_Word32 xrQ16, xiQ16, yrQ16, yiQ16;
- WebRtc_Word32 tmpInRe, tmpInIm, tmpInRe2, tmpInIm2;
- WebRtc_Word16 factQ11;
- WebRtc_Word16 sh;
+ int16_t tmp1rQ14, tmp1iQ14;
+ int32_t xrQ16, xiQ16, yrQ16, yiQ16;
+ int32_t tmpInRe, tmpInIm, tmpInRe2, tmpInIm2;
+ int16_t factQ11;
+ int16_t sh;
for (k = 0; k < FRAMESAMPLES/4; k++) {
/* Move zero in time to beginning of frames */
tmp1rQ14 = -WebRtcIsacfix_kSinTab2[FRAMESAMPLES/4 - 1 - k];
tmp1iQ14 = WebRtcIsacfix_kSinTab2[k];
- tmpInRe = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inreQ7[k], 9); // Q7 -> Q16
- tmpInIm = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inimQ7[k], 9); // Q7 -> Q16
- tmpInRe2 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inreQ7[FRAMESAMPLES/2 - 1 - k], 9); // Q7 -> Q16
- tmpInIm2 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) inimQ7[FRAMESAMPLES/2 - 1 - k], 9); // Q7 -> Q16
+ tmpInRe = WEBRTC_SPL_LSHIFT_W32((int32_t) inreQ7[k], 9); // Q7 -> Q16
+ tmpInIm = WEBRTC_SPL_LSHIFT_W32((int32_t) inimQ7[k], 9); // Q7 -> Q16
+ tmpInRe2 = WEBRTC_SPL_LSHIFT_W32((int32_t) inreQ7[FRAMESAMPLES/2 - 1 - k], 9); // Q7 -> Q16
+ tmpInIm2 = WEBRTC_SPL_LSHIFT_W32((int32_t) inimQ7[FRAMESAMPLES/2 - 1 - k], 9); // Q7 -> Q16
xrQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInRe) + WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInIm);
xiQ16 = WEBRTC_SPL_MUL_16_32_RSFT14(tmp1rQ14, tmpInIm) - WEBRTC_SPL_MUL_16_32_RSFT14(tmp1iQ14, tmpInRe);
@@ -236,14 +236,14 @@
//"Fastest" vectors
if (sh>=0) {
for (k=0; k<240; k++) {
- inreQ7[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(outre1Q16[k], sh); //Q(16+sh)
- inimQ7[k] = (WebRtc_Word16) WEBRTC_SPL_LSHIFT_W32(outre2Q16[k], sh); //Q(16+sh)
+ inreQ7[k] = (int16_t) WEBRTC_SPL_LSHIFT_W32(outre1Q16[k], sh); //Q(16+sh)
+ inimQ7[k] = (int16_t) WEBRTC_SPL_LSHIFT_W32(outre2Q16[k], sh); //Q(16+sh)
}
} else {
- WebRtc_Word32 round = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)1, -sh-1);
+ int32_t round = WEBRTC_SPL_LSHIFT_W32((int32_t)1, -sh-1);
for (k=0; k<240; k++) {
- inreQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(outre1Q16[k]+round, -sh); //Q(16+sh)
- inimQ7[k] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(outre2Q16[k]+round, -sh); //Q(16+sh)
+ inreQ7[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(outre1Q16[k]+round, -sh); //Q(16+sh)
+ inimQ7[k] = (int16_t) WEBRTC_SPL_RSHIFT_W32(outre2Q16[k]+round, -sh); //Q(16+sh)
}
}
@@ -252,13 +252,13 @@
//"Fastest" vectors
if (sh>=0) {
for (k=0; k<240; k++) {
- outre1Q16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inreQ7[k], sh); //Q(16+sh) -> Q16
- outre2Q16[k] = WEBRTC_SPL_RSHIFT_W32((WebRtc_Word32)inimQ7[k], sh); //Q(16+sh) -> Q16
+ outre1Q16[k] = WEBRTC_SPL_RSHIFT_W32((int32_t)inreQ7[k], sh); //Q(16+sh) -> Q16
+ outre2Q16[k] = WEBRTC_SPL_RSHIFT_W32((int32_t)inimQ7[k], sh); //Q(16+sh) -> Q16
}
} else {
for (k=0; k<240; k++) {
- outre1Q16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inreQ7[k], -sh); //Q(16+sh) -> Q16
- outre2Q16[k] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)inimQ7[k], -sh); //Q(16+sh) -> Q16
+ outre1Q16[k] = WEBRTC_SPL_LSHIFT_W32((int32_t)inreQ7[k], -sh); //Q(16+sh) -> Q16
+ outre2Q16[k] = WEBRTC_SPL_LSHIFT_W32((int32_t)inimQ7[k], -sh); //Q(16+sh) -> Q16
}
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.h b/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.h
index d9bd462..564385e 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.h
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/transform.h
@@ -19,16 +19,16 @@
#include "webrtc/typedefs.h"
/* Cosine table 1 in Q14 */
-extern const WebRtc_Word16 kCosTab1[FRAMESAMPLES/2];
+extern const int16_t kCosTab1[FRAMESAMPLES/2];
/* Sine table 1 in Q14 */
-extern const WebRtc_Word16 kSinTab1[FRAMESAMPLES/2];
+extern const int16_t kSinTab1[FRAMESAMPLES/2];
/* Cosine table 2 in Q14 */
-extern const WebRtc_Word16 kCosTab2[FRAMESAMPLES/4];
+extern const int16_t kCosTab2[FRAMESAMPLES/4];
/* Sine table 2 in Q14 */
-extern const WebRtc_Word16 kSinTab2[FRAMESAMPLES/4];
+extern const int16_t kSinTab2[FRAMESAMPLES/4];
#ifdef __cplusplus
} /* extern "C" */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S b/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
index 135f130..46682ac 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/source/transform_neon.S
@@ -20,10 +20,10 @@
GLOBAL_LABEL WebRtcIsacfix_kCosTab1
GLOBAL_LABEL WebRtcIsacfix_kSinTab2
-@ void WebRtcIsacfix_Time2SpecNeon(WebRtc_Word16* inre1Q9,
-@ WebRtc_Word16* inre2Q9,
-@ WebRtc_Word16* outreQ7,
-@ WebRtc_Word16* outimQ7);
+@ void WebRtcIsacfix_Time2SpecNeon(int16_t* inre1Q9,
+@ int16_t* inre2Q9,
+@ int16_t* outreQ7,
+@ int16_t* outimQ7);
DEFINE_FUNCTION WebRtcIsacfix_Time2SpecNeon
.align 2
@@ -317,10 +317,10 @@
.short 3406, 3196, 2986, 2775, 2563, 2351, 2139, 1926
.short 1713, 1499, 1285, 1072, 857, 643, 429, 214
-@ void WebRtcIsacfix_Spec2TimeNeon(WebRtc_Word16 *inreQ7,
-@ WebRtc_Word16 *inimQ7,
-@ WebRtc_Word32 *outre1Q16,
-@ WebRtc_Word32 *outre2Q16);
+@ void WebRtcIsacfix_Spec2TimeNeon(int16_t *inreQ7,
+@ int16_t *inimQ7,
+@ int32_t *outre1Q16,
+@ int32_t *outre2Q16);
DEFINE_FUNCTION WebRtcIsacfix_Spec2TimeNeon
.align 2
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/test/Isac_test.cc b/webrtc/modules/audio_coding/codecs/isac/fix/test/Isac_test.cc
index 2791db4..61d79e4 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/test/Isac_test.cc
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/test/Isac_test.cc
@@ -25,9 +25,9 @@
typedef struct {
- WebRtc_UWord32 arrival_time; /* samples */
- WebRtc_UWord32 sample_count; /* samples */
- WebRtc_UWord16 rtp_number;
+ uint32_t arrival_time; /* samples */
+ uint32_t sample_count; /* samples */
+ uint16_t rtp_number;
} BottleNeckModel;
void get_arrival_time(int current_framesamples, /* samples */
@@ -60,18 +60,18 @@
/* Parameters */
FILE *pInFile, *pOutFile, *pChcFile;
- WebRtc_Word8 inFile[40];
- WebRtc_Word8 outFile[40];
- WebRtc_Word8 chcFile[40];
- WebRtc_Word8 codec[10];
- WebRtc_Word16 bitrt, spType, size;
- WebRtc_UWord16 frameLen;
- WebRtc_Word16 sigOut[1000], sigIn[1000];
- WebRtc_UWord16 bitStream[500]; /* double to 32 kbps for 60 ms */
+ int8_t inFile[40];
+ int8_t outFile[40];
+ int8_t chcFile[40];
+ int8_t codec[10];
+ int16_t bitrt, spType, size;
+ uint16_t frameLen;
+ int16_t sigOut[1000], sigIn[1000];
+ uint16_t bitStream[500]; /* double to 32 kbps for 60 ms */
- WebRtc_Word16 chc, ok;
+ int16_t chc, ok;
int noOfCalls, cdlen;
- WebRtc_Word16 noOfLostFrames;
+ int16_t noOfLostFrames;
int err, errtype;
BottleNeckModel BN_data;
@@ -170,12 +170,12 @@
exit(EXIT_FAILURE);
}
/* loop over frame */
- while (fread(sigIn,sizeof(WebRtc_Word16),frameLen,pInFile) == frameLen) {
+ while (fread(sigIn,sizeof(int16_t),frameLen,pInFile) == frameLen) {
noOfCalls=0;
cdlen=0;
while (cdlen<=0) {
- cdlen=WebRtcIsacfix_Encode(ISACfix_inst,&sigIn[noOfCalls*160],(WebRtc_Word16*)bitStream);
+ cdlen=WebRtcIsacfix_Encode(ISACfix_inst,&sigIn[noOfCalls*160],(int16_t*)bitStream);
if(cdlen==-1){
errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
printf("\n\nError in encoder: %d.\n\n", errtype);
@@ -186,7 +186,7 @@
if(_stricmp("none", chcFile)){
- if (fread(&chc,sizeof(WebRtc_Word16),1,pChcFile)!=1) /* packet may be lost */
+ if (fread(&chc,sizeof(int16_t),1,pChcFile)!=1) /* packet may be lost */
break;
} else {
chc = 1; /* packets never lost */
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/test/kenny.cc b/webrtc/modules/audio_coding/codecs/isac/fix/test/kenny.cc
index ee70eb0..c3c6f13 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/test/kenny.cc
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/test/kenny.cc
@@ -27,11 +27,11 @@
#define FS 16000 /* sampling frequency (Hz) */
/* Function for reading audio data from PCM file */
-int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
+int readframe(int16_t *data, FILE *inp, int length) {
short k, rlen, status = 0;
- rlen = fread(data, sizeof(WebRtc_Word16), length, inp);
+ rlen = fread(data, sizeof(int16_t), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
@@ -43,10 +43,10 @@
/* Struct for bottleneck model */
typedef struct {
- WebRtc_UWord32 send_time; /* samples */
- WebRtc_UWord32 arrival_time; /* samples */
- WebRtc_UWord32 sample_count; /* samples */
- WebRtc_UWord16 rtp_number;
+ uint32_t send_time; /* samples */
+ uint32_t arrival_time; /* samples */
+ uint32_t sample_count; /* samples */
+ uint16_t rtp_number;
} BottleNeckModel;
void get_arrival_time(int current_framesamples, /* samples */
@@ -99,25 +99,25 @@
int endfile;
int i, errtype, h = 0, k, packetLossPercent = 0;
- WebRtc_Word16 CodingMode;
- WebRtc_Word16 bottleneck;
- WebRtc_Word16 framesize = 30; /* ms */
+ int16_t CodingMode;
+ int16_t bottleneck;
+ int16_t framesize = 30; /* ms */
int cur_framesmpls, err = 0, lostPackets = 0;
/* Runtime statistics */
double starttime, runtime, length_file;
- WebRtc_Word16 stream_len = 0;
- WebRtc_Word16 framecnt, declen = 0;
- WebRtc_Word16 shortdata[FRAMESAMPLES_10ms];
- WebRtc_Word16 decoded[MAX_FRAMESAMPLES];
- WebRtc_UWord16 streamdata[500];
- WebRtc_Word16 speechType[1];
- WebRtc_Word16 prevFrameSize = 1;
- WebRtc_Word16 rateBPS = 0;
- WebRtc_Word16 fixedFL = 0;
- WebRtc_Word16 payloadSize = 0;
- WebRtc_Word32 payloadRate = 0;
+ int16_t stream_len = 0;
+ int16_t framecnt, declen = 0;
+ int16_t shortdata[FRAMESAMPLES_10ms];
+ int16_t decoded[MAX_FRAMESAMPLES];
+ uint16_t streamdata[500];
+ int16_t speechType[1];
+ int16_t prevFrameSize = 1;
+ int16_t rateBPS = 0;
+ int16_t fixedFL = 0;
+ int16_t payloadSize = 0;
+ int32_t payloadRate = 0;
int setControlBWE = 0;
int readLoss;
FILE *plFile = NULL;
@@ -127,7 +127,7 @@
int totalbits =0;
int totalsmpls =0;
- WebRtc_Word16 testNum, testCE;
+ int16_t testNum, testCE;
FILE *fp_gns = NULL;
int gns = 0;
@@ -135,7 +135,7 @@
char gns_file[100];
int nbTest = 0;
- WebRtc_Word16 lostFrame;
+ int16_t lostFrame;
float scale = (float)0.7;
/* only one structure used for ISAC encoder */
ISACFIX_MainStruct *ISAC_main_inst = NULL;
@@ -388,7 +388,7 @@
exit(0);
}
}
- bottleneck = (WebRtc_Word16)aux_var;
+ bottleneck = (int16_t)aux_var;
/* Bottleneck is a cosine function
* Matlab code for writing the bottleneck file:
* BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi);
@@ -565,18 +565,18 @@
/* Encode */
stream_len = WebRtcIsacfix_Encode(ISAC_main_inst,
shortdata,
- (WebRtc_Word16*)streamdata);
+ (int16_t*)streamdata);
/* If packet is ready, and CE testing, call the different API
functions from the internal API. */
if (stream_len>0) {
if (testCE == 1) {
- err = WebRtcIsacfix_ReadBwIndex((WebRtc_Word16*)streamdata, &bwe);
+ err = WebRtcIsacfix_ReadBwIndex((int16_t*)streamdata, &bwe);
stream_len = WebRtcIsacfix_GetNewBitStream(
ISAC_main_inst,
bwe,
scale,
- (WebRtc_Word16*)streamdata);
+ (int16_t*)streamdata);
} else if (testCE == 2) {
/* transcode function not supported */
} else if (testCE == 3) {
@@ -637,7 +637,7 @@
exit(0);
}
}
- bottleneck = (WebRtc_Word16)aux_var;
+ bottleneck = (int16_t)aux_var;
if (CodingMode == 1) {
WebRtcIsacfix_Control(ISAC_main_inst, bottleneck, framesize);
}
@@ -712,7 +712,7 @@
}
if( readLoss == 1 ) {
- if( fread( &lostFrame, sizeof(WebRtc_Word16), 1, plFile ) != 1 ) {
+ if( fread( &lostFrame, sizeof(int16_t), 1, plFile ) != 1 ) {
rewind( plFile );
}
lostFrame = !lostFrame;
@@ -740,7 +740,7 @@
if (nbTest !=2 ) {
short FL;
/* Call getFramelen, only used here for function test */
- err = WebRtcIsacfix_ReadFrameLen((WebRtc_Word16*)streamdata, &FL);
+ err = WebRtcIsacfix_ReadFrameLen((int16_t*)streamdata, &FL);
declen = WebRtcIsacfix_Decode( ISAC_main_inst, streamdata, stream_len,
decoded, speechType );
/* Error check */
@@ -768,7 +768,7 @@
}
/* Write decoded speech frame to file */
- if (fwrite(decoded, sizeof(WebRtc_Word16),
+ if (fwrite(decoded, sizeof(int16_t),
declen, outp) != (size_t)declen) {
return -1;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c b/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c
index 57c30ca..d5682b2 100644
--- a/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c
+++ b/webrtc/modules/audio_coding/codecs/isac/fix/test/test_iSACfixfloat.c
@@ -42,11 +42,11 @@
/* function for reading audio data from PCM file */
-int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
+int readframe(int16_t *data, FILE *inp, int length) {
short k, rlen, status = 0;
- rlen = fread(data, sizeof(WebRtc_Word16), length, inp);
+ rlen = fread(data, sizeof(int16_t), length, inp);
if (rlen < length) {
for (k = rlen; k < length; k++)
data[k] = 0;
@@ -57,10 +57,10 @@
}
typedef struct {
- WebRtc_UWord32 send_time; /* samples */
- WebRtc_UWord32 arrival_time; /* samples */
- WebRtc_UWord32 sample_count; /* samples */
- WebRtc_UWord16 rtp_number;
+ uint32_t send_time; /* samples */
+ uint32_t arrival_time; /* samples */
+ uint32_t sample_count; /* samples */
+ uint16_t rtp_number;
} BottleNeckModel;
void get_arrival_time(int current_framesamples, /* samples */
@@ -96,11 +96,11 @@
int i,j,errtype, plc=0;
- WebRtc_Word16 CodingMode;
- WebRtc_Word16 bottleneck;
+ int16_t CodingMode;
+ int16_t bottleneck;
- WebRtc_Word16 framesize = 30; /* ms */
- //WebRtc_Word16 framesize = 60; /* To invoke cisco complexity case at frame 2252 */
+ int16_t framesize = 30; /* ms */
+ //int16_t framesize = 60; /* To invoke cisco complexity case at frame 2252 */
int cur_framesmpls, err;
@@ -109,15 +109,15 @@
double runtime;
double length_file;
- WebRtc_Word16 stream_len = 0;
- WebRtc_Word16 declen;
+ int16_t stream_len = 0;
+ int16_t declen;
- WebRtc_Word16 shortdata[FRAMESAMPLES_10ms];
- WebRtc_Word16 decoded[MAX_FRAMESAMPLES];
- WebRtc_UWord16 streamdata[600];
- WebRtc_Word16 speechType[1];
+ int16_t shortdata[FRAMESAMPLES_10ms];
+ int16_t decoded[MAX_FRAMESAMPLES];
+ uint16_t streamdata[600];
+ int16_t speechType[1];
-// WebRtc_Word16 *iSACstruct;
+// int16_t *iSACstruct;
char version_number[20];
int mode=-1, tmp, nbTest=0; /*,sss;*/
@@ -641,7 +641,7 @@
}
/* Write decoded speech frame to file */
- fwrite(decoded, sizeof(WebRtc_Word16), declen, outp);
+ fwrite(decoded, sizeof(int16_t), declen, outp);
}
fprintf(stderr," \rframe = %d", framecnt);
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h b/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
index 54c630a..f937b34 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h
@@ -39,7 +39,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_AssignSize(
+ int16_t WebRtcIsac_AssignSize(
int* sizeinbytes);
@@ -58,7 +58,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_Assign(
+ int16_t WebRtcIsac_Assign(
ISACStruct** ISAC_main_inst,
void* ISAC_inst_Addr);
@@ -76,7 +76,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_Create(
+ int16_t WebRtcIsac_Create(
ISACStruct** ISAC_main_inst);
@@ -92,7 +92,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_Free(
+ int16_t WebRtcIsac_Free(
ISACStruct* ISAC_main_inst);
@@ -115,9 +115,9 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_EncoderInit(
+ int16_t WebRtcIsac_EncoderInit(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16 CodingMode);
+ int16_t CodingMode);
/******************************************************************************
@@ -144,10 +144,10 @@
* : -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_Encode(
+ int16_t WebRtcIsac_Encode(
ISACStruct* ISAC_main_inst,
- const WebRtc_Word16* speechIn,
- WebRtc_Word16* encoded);
+ const int16_t* speechIn,
+ int16_t* encoded);
/******************************************************************************
@@ -163,7 +163,7 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_DecoderInit(
+ int16_t WebRtcIsac_DecoderInit(
ISACStruct* ISAC_main_inst);
@@ -185,13 +185,13 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_UpdateBwEstimate(
+ int16_t WebRtcIsac_UpdateBwEstimate(
ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts);
+ const uint16_t* encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts);
/******************************************************************************
@@ -214,12 +214,12 @@
* -1 - Error.
*/
- WebRtc_Word16 WebRtcIsac_Decode(
+ int16_t WebRtcIsac_Decode(
ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word16 len,
- WebRtc_Word16* decoded,
- WebRtc_Word16* speechType);
+ const uint16_t* encoded,
+ int16_t len,
+ int16_t* decoded,
+ int16_t* speechType);
/******************************************************************************
@@ -241,10 +241,10 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_DecodePlc(
+ int16_t WebRtcIsac_DecodePlc(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16* decoded,
- WebRtc_Word16 noOfLostFrames);
+ int16_t* decoded,
+ int16_t noOfLostFrames);
/******************************************************************************
@@ -266,10 +266,10 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_Control(
+ int16_t WebRtcIsac_Control(
ISACStruct* ISAC_main_inst,
- WebRtc_Word32 rate,
- WebRtc_Word16 framesize);
+ int32_t rate,
+ int16_t framesize);
/******************************************************************************
@@ -297,11 +297,11 @@
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_ControlBwe(
+ int16_t WebRtcIsac_ControlBwe(
ISACStruct* ISAC_main_inst,
- WebRtc_Word32 rateBPS,
- WebRtc_Word16 frameSizeMs,
- WebRtc_Word16 enforceFrameSize);
+ int32_t rateBPS,
+ int16_t frameSizeMs,
+ int16_t enforceFrameSize);
/******************************************************************************
@@ -317,10 +317,10 @@
*
*/
- WebRtc_Word16 WebRtcIsac_ReadFrameLen(
+ int16_t WebRtcIsac_ReadFrameLen(
ISACStruct* ISAC_main_inst,
- const WebRtc_Word16* encoded,
- WebRtc_Word16* frameLength);
+ const int16_t* encoded,
+ int16_t* frameLength);
/******************************************************************************
@@ -351,7 +351,7 @@
* Return value : Error code
*/
- WebRtc_Word16 WebRtcIsac_GetErrorCode(
+ int16_t WebRtcIsac_GetErrorCode(
ISACStruct* ISAC_main_inst);
@@ -379,9 +379,9 @@
* 0 bit-rates computed correctly.
*/
- WebRtc_Word16 WebRtcIsac_GetUplinkBw(
+ int16_t WebRtcIsac_GetUplinkBw(
ISACStruct* ISAC_main_inst,
- WebRtc_Word32* bottleneck);
+ int32_t* bottleneck);
/******************************************************************************
@@ -419,9 +419,9 @@
* -1 if error happens
*/
- WebRtc_Word16 WebRtcIsac_SetMaxPayloadSize(
+ int16_t WebRtcIsac_SetMaxPayloadSize(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16 maxPayloadBytes);
+ int16_t maxPayloadBytes);
/******************************************************************************
@@ -463,9 +463,9 @@
* -1 if error happens
*/
- WebRtc_Word16 WebRtcIsac_SetMaxRate(
+ int16_t WebRtcIsac_SetMaxRate(
ISACStruct* ISAC_main_inst,
- WebRtc_Word32 maxRate);
+ int32_t maxRate);
/******************************************************************************
@@ -479,7 +479,7 @@
*
*/
- WebRtc_UWord16 WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst);
+ uint16_t WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst);
/******************************************************************************
@@ -492,7 +492,7 @@
*
*/
- WebRtc_UWord16 WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst);
+ uint16_t WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst);
/******************************************************************************
@@ -509,8 +509,8 @@
* -1 if failed.
*/
- WebRtc_Word16 WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst,
- WebRtc_UWord16 samp_rate_hz);
+ int16_t WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst,
+ uint16_t samp_rate_hz);
/******************************************************************************
@@ -529,8 +529,8 @@
* -1 if failed.
*/
- WebRtc_Word16 WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst,
- WebRtc_UWord16 sample_rate_hz);
+ int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst,
+ uint16_t sample_rate_hz);
@@ -569,13 +569,13 @@
* the struct since it is only allowed to read
* the struct.
*/
- WebRtc_Word16 WebRtcIsac_GetNewBitStream(
+ int16_t WebRtcIsac_GetNewBitStream(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16 bweIndex,
- WebRtc_Word16 jitterInfo,
- WebRtc_Word32 rate,
- WebRtc_Word16* encoded,
- WebRtc_Word16 isRCU);
+ int16_t bweIndex,
+ int16_t jitterInfo,
+ int32_t rate,
+ int16_t* encoded,
+ int16_t isRCU);
@@ -593,10 +593,10 @@
*
*/
- WebRtc_Word16 WebRtcIsac_GetDownLinkBwIndex(
+ int16_t WebRtcIsac_GetDownLinkBwIndex(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16* bweIndex,
- WebRtc_Word16* jitterInfo);
+ int16_t* bweIndex,
+ int16_t* jitterInfo);
/****************************************************************************
@@ -611,9 +611,9 @@
*
*/
- WebRtc_Word16 WebRtcIsac_UpdateUplinkBw(
+ int16_t WebRtcIsac_UpdateUplinkBw(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16 bweIndex);
+ int16_t bweIndex);
/****************************************************************************
@@ -630,9 +630,9 @@
*
*/
- WebRtc_Word16 WebRtcIsac_ReadBwIndex(
- const WebRtc_Word16* encoded,
- WebRtc_Word16* bweIndex);
+ int16_t WebRtcIsac_ReadBwIndex(
+ const int16_t* encoded,
+ int16_t* bweIndex);
@@ -652,7 +652,7 @@
*
*/
- WebRtc_Word16 WebRtcIsac_GetNewFrameLen(
+ int16_t WebRtcIsac_GetNewFrameLen(
ISACStruct* ISAC_main_inst);
@@ -677,9 +677,9 @@
*
*
*/
- WebRtc_Word16 WebRtcIsac_GetRedPayload(
+ int16_t WebRtcIsac_GetRedPayload(
ISACStruct* ISAC_main_inst,
- WebRtc_Word16* encoded);
+ int16_t* encoded);
/****************************************************************************
@@ -701,12 +701,12 @@
* Return value : >0 - number of samples in decoded vector
* -1 - Error
*/
- WebRtc_Word16 WebRtcIsac_DecodeRcu(
+ int16_t WebRtcIsac_DecodeRcu(
ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word16 len,
- WebRtc_Word16* decoded,
- WebRtc_Word16* speechType);
+ const uint16_t* encoded,
+ int16_t len,
+ int16_t* decoded,
+ int16_t* speechType);
#if defined(__cplusplus)
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c
index 31c441a..5c901bb 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.c
@@ -18,7 +18,7 @@
*/
int WebRtcIsac_EncTerminate(Bitstr *streamdata) /* in-/output struct containing bitstream */
{
- WebRtc_UWord8 *stream_ptr;
+ uint8_t *stream_ptr;
/* point to the right place in the stream buffer */
@@ -37,7 +37,7 @@
stream_ptr = streamdata->stream + streamdata->stream_index;
}
/* write remaining data to bitstream */
- *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+ *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24);
}
else
{
@@ -51,8 +51,8 @@
stream_ptr = streamdata->stream + streamdata->stream_index;
}
/* write remaining data to bitstream */
- *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
- *stream_ptr++ = (WebRtc_UWord8) ((streamdata->streamval >> 16) & 0x00FF);
+ *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24);
+ *stream_ptr++ = (uint8_t) ((streamdata->streamval >> 16) & 0x00FF);
}
/* calculate stream length */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h
index 8e5f496..43ba40e 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h
@@ -23,41 +23,41 @@
int WebRtcIsac_EncLogisticMulti2(
Bitstr *streamdata, /* in-/output struct containing bitstream */
- WebRtc_Word16 *dataQ7, /* input: data vector */
- const WebRtc_UWord16 *env, /* input: side info vector defining the width of the pdf */
+ int16_t *dataQ7, /* input: data vector */
+ const uint16_t *env, /* input: side info vector defining the width of the pdf */
const int N, /* input: data vector length */
- const WebRtc_Word16 isSWB12kHz); /* if the codec is working in 12kHz bandwidth */
+ const int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */
/* returns the number of bytes in the stream */
int WebRtcIsac_EncTerminate(Bitstr *streamdata); /* in-/output struct containing bitstream */
/* returns the number of bytes in the stream so far */
int WebRtcIsac_DecLogisticMulti2(
- WebRtc_Word16 *data, /* output: data vector */
+ int16_t *data, /* output: data vector */
Bitstr *streamdata, /* in-/output struct containing bitstream */
- const WebRtc_UWord16 *env, /* input: side info vector defining the width of the pdf */
- const WebRtc_Word16 *dither, /* input: dither vector */
+ const uint16_t *env, /* input: side info vector defining the width of the pdf */
+ const int16_t *dither, /* input: dither vector */
const int N, /* input: data vector length */
- const WebRtc_Word16 isSWB12kHz); /* if the codec is working in 12kHz bandwidth */
+ const int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */
void WebRtcIsac_EncHistMulti(
Bitstr *streamdata, /* in-/output struct containing bitstream */
const int *data, /* input: data vector */
- const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
+ const uint16_t **cdf, /* input: array of cdf arrays */
const int N); /* input: data vector length */
int WebRtcIsac_DecHistBisectMulti(
int *data, /* output: data vector */
Bitstr *streamdata, /* in-/output struct containing bitstream */
- const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
- const WebRtc_UWord16 *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */
+ const uint16_t **cdf, /* input: array of cdf arrays */
+ const uint16_t *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */
const int N); /* input: data vector length */
int WebRtcIsac_DecHistOneStepMulti(
int *data, /* output: data vector */
Bitstr *streamdata, /* in-/output struct containing bitstream */
- const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
- const WebRtc_UWord16 *init_index,/* input: vector of initial cdf table search entries */
+ const uint16_t **cdf, /* input: array of cdf arrays */
+ const uint16_t *init_index,/* input: vector of initial cdf table search entries */
const int N); /* input: data vector length */
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
index f4a13d6..63e4928 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_hist.c
@@ -17,14 +17,14 @@
*/
void WebRtcIsac_EncHistMulti(Bitstr *streamdata, /* in-/output struct containing bitstream */
const int *data, /* input: data vector */
- const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
+ const uint16_t **cdf, /* input: array of cdf arrays */
const int N) /* input: data vector length */
{
- WebRtc_UWord32 W_lower, W_upper;
- WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
- WebRtc_UWord8 *stream_ptr;
- WebRtc_UWord8 *stream_ptr_carry;
- WebRtc_UWord32 cdf_lo, cdf_hi;
+ uint32_t W_lower, W_upper;
+ uint32_t W_upper_LSB, W_upper_MSB;
+ uint8_t *stream_ptr;
+ uint8_t *stream_ptr_carry;
+ uint32_t cdf_lo, cdf_hi;
int k;
@@ -35,8 +35,8 @@
for (k=N; k>0; k--)
{
/* fetch cdf_lower and cdf_upper from cdf tables */
- cdf_lo = (WebRtc_UWord32) *(*cdf + *data);
- cdf_hi = (WebRtc_UWord32) *(*cdf++ + *data++ + 1);
+ cdf_lo = (uint32_t) *(*cdf + *data);
+ cdf_hi = (uint32_t) *(*cdf++ + *data++ + 1);
/* update interval */
W_upper_LSB = W_upper & 0x0000FFFF;
@@ -64,7 +64,7 @@
while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */
{
W_upper <<= 8;
- *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+ *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24);
streamdata->streamval <<= 8;
}
}
@@ -84,16 +84,16 @@
*/
int WebRtcIsac_DecHistBisectMulti(int *data, /* output: data vector */
Bitstr *streamdata, /* in-/output struct containing bitstream */
- const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
- const WebRtc_UWord16 *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */
+ const uint16_t **cdf, /* input: array of cdf arrays */
+ const uint16_t *cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */
const int N) /* input: data vector length */
{
- WebRtc_UWord32 W_lower, W_upper;
- WebRtc_UWord32 W_tmp;
- WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
- WebRtc_UWord32 streamval;
- const WebRtc_UWord8 *stream_ptr;
- const WebRtc_UWord16 *cdf_ptr;
+ uint32_t W_lower, W_upper;
+ uint32_t W_tmp;
+ uint32_t W_upper_LSB, W_upper_MSB;
+ uint32_t streamval;
+ const uint8_t *stream_ptr;
+ const uint16_t *cdf_ptr;
int size_tmp;
int k;
@@ -192,16 +192,16 @@
*/
int WebRtcIsac_DecHistOneStepMulti(int *data, /* output: data vector */
Bitstr *streamdata, /* in-/output struct containing bitstream */
- const WebRtc_UWord16 **cdf, /* input: array of cdf arrays */
- const WebRtc_UWord16 *init_index, /* input: vector of initial cdf table search entries */
+ const uint16_t **cdf, /* input: array of cdf arrays */
+ const uint16_t *init_index, /* input: vector of initial cdf table search entries */
const int N) /* input: data vector length */
{
- WebRtc_UWord32 W_lower, W_upper;
- WebRtc_UWord32 W_tmp;
- WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
- WebRtc_UWord32 streamval;
- const WebRtc_UWord8 *stream_ptr;
- const WebRtc_UWord16 *cdf_ptr;
+ uint32_t W_lower, W_upper;
+ uint32_t W_tmp;
+ uint32_t W_upper_LSB, W_upper_MSB;
+ uint32_t streamval;
+ const uint8_t *stream_ptr;
+ const uint16_t *cdf_ptr;
int k;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c
index 422855a..eeed7ae 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines_logist.c
@@ -21,7 +21,7 @@
-static const WebRtc_Word32 kHistEdgesQ15[51] = {
+static const int32_t kHistEdgesQ15[51] = {
-327680, -314573, -301466, -288359, -275252, -262144, -249037, -235930, -222823, -209716,
-196608, -183501, -170394, -157287, -144180, -131072, -117965, -104858, -91751, -78644,
-65536, -52429, -39322, -26215, -13108, 0, 13107, 26214, 39321, 52428,
@@ -49,10 +49,10 @@
/* function to be converted to fixed point */
-static __inline WebRtc_UWord32 piecewise(WebRtc_Word32 xinQ15) {
+static __inline uint32_t piecewise(int32_t xinQ15) {
- WebRtc_Word32 ind, qtmp1, qtmp2, qtmp3;
- WebRtc_UWord32 tmpUW32;
+ int32_t ind, qtmp1, qtmp2, qtmp3;
+ uint32_t tmpUW32;
qtmp2 = xinQ15;
@@ -79,17 +79,17 @@
int WebRtcIsac_EncLogisticMulti2(
Bitstr *streamdata, /* in-/output struct containing bitstream */
- WebRtc_Word16 *dataQ7, /* input: data vector */
- const WebRtc_UWord16 *envQ8, /* input: side info vector defining the width of the pdf */
+ int16_t *dataQ7, /* input: data vector */
+ const uint16_t *envQ8, /* input: side info vector defining the width of the pdf */
const int N, /* input: data vector length / 2 */
- const WebRtc_Word16 isSWB12kHz)
+ const int16_t isSWB12kHz)
{
- WebRtc_UWord32 W_lower, W_upper;
- WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
- WebRtc_UWord8 *stream_ptr;
- WebRtc_UWord8 *maxStreamPtr;
- WebRtc_UWord8 *stream_ptr_carry;
- WebRtc_UWord32 cdf_lo, cdf_hi;
+ uint32_t W_lower, W_upper;
+ uint32_t W_upper_LSB, W_upper_MSB;
+ uint8_t *stream_ptr;
+ uint8_t *maxStreamPtr;
+ uint8_t *stream_ptr_carry;
+ uint32_t cdf_lo, cdf_hi;
int k;
/* point to beginning of stream buffer */
@@ -149,7 +149,7 @@
while ( !(W_upper & 0xFF000000) ) /* W_upper < 2^24 */
{
W_upper <<= 8;
- *stream_ptr++ = (WebRtc_UWord8) (streamdata->streamval >> 24);
+ *stream_ptr++ = (uint8_t) (streamdata->streamval >> 24);
if(stream_ptr > maxStreamPtr)
{
@@ -169,20 +169,20 @@
int WebRtcIsac_DecLogisticMulti2(
- WebRtc_Word16 *dataQ7, /* output: data vector */
+ int16_t *dataQ7, /* output: data vector */
Bitstr *streamdata, /* in-/output struct containing bitstream */
- const WebRtc_UWord16 *envQ8, /* input: side info vector defining the width of the pdf */
- const WebRtc_Word16 *ditherQ7,/* input: dither vector */
+ const uint16_t *envQ8, /* input: side info vector defining the width of the pdf */
+ const int16_t *ditherQ7,/* input: dither vector */
const int N, /* input: data vector length */
- const WebRtc_Word16 isSWB12kHz)
+ const int16_t isSWB12kHz)
{
- WebRtc_UWord32 W_lower, W_upper;
- WebRtc_UWord32 W_tmp;
- WebRtc_UWord32 W_upper_LSB, W_upper_MSB;
- WebRtc_UWord32 streamval;
- const WebRtc_UWord8 *stream_ptr;
- WebRtc_UWord32 cdf_tmp;
- WebRtc_Word16 candQ7;
+ uint32_t W_lower, W_upper;
+ uint32_t W_tmp;
+ uint32_t W_upper_LSB, W_upper_MSB;
+ uint32_t streamval;
+ const uint8_t *stream_ptr;
+ uint32_t cdf_tmp;
+ int16_t candQ7;
int k;
stream_ptr = streamdata->stream + streamdata->stream_index;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c b/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c
index d0a50c5..c4ceb59 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.c
@@ -41,7 +41,7 @@
-WebRtc_Word32 WebRtcIsac_InitBandwidthEstimator(
+int32_t WebRtcIsac_InitBandwidthEstimator(
BwEstimatorstr* bwest_str,
enum IsacSamplingRate encoderSampRate,
enum IsacSamplingRate decoderSampRate)
@@ -67,7 +67,7 @@
bwest_str->prev_frame_length = INIT_FRAME_LEN_WB;
bwest_str->rec_bw_inv = 1.0f /
(INIT_BN_EST_WB + INIT_HDR_RATE_WB);
- bwest_str->rec_bw = (WebRtc_Word32)INIT_BN_EST_WB;
+ bwest_str->rec_bw = (int32_t)INIT_BN_EST_WB;
bwest_str->rec_bw_avg_Q = INIT_BN_EST_WB;
bwest_str->rec_bw_avg = INIT_BN_EST_WB + INIT_HDR_RATE_WB;
bwest_str->rec_header_rate = INIT_HDR_RATE_WB;
@@ -78,7 +78,7 @@
bwest_str->prev_frame_length = INIT_FRAME_LEN_SWB;
bwest_str->rec_bw_inv = 1.0f /
(INIT_BN_EST_SWB + INIT_HDR_RATE_SWB);
- bwest_str->rec_bw = (WebRtc_Word32)INIT_BN_EST_SWB;
+ bwest_str->rec_bw = (int32_t)INIT_BN_EST_SWB;
bwest_str->rec_bw_avg_Q = INIT_BN_EST_SWB;
bwest_str->rec_bw_avg = INIT_BN_EST_SWB + INIT_HDR_RATE_SWB;
bwest_str->rec_header_rate = INIT_HDR_RATE_SWB;
@@ -131,14 +131,14 @@
/* pksize - size of packet in bytes, from NetEq */
/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
/* returns 0 if everything went fine, -1 otherwise */
-WebRtc_Word16 WebRtcIsac_UpdateBandwidthEstimator(
+int16_t WebRtcIsac_UpdateBandwidthEstimator(
BwEstimatorstr *bwest_str,
- const WebRtc_UWord16 rtp_number,
- const WebRtc_Word32 frame_length,
- const WebRtc_UWord32 send_ts,
- const WebRtc_UWord32 arr_ts,
- const WebRtc_Word32 pksize
- /*, const WebRtc_UWord16 Index*/)
+ const uint16_t rtp_number,
+ const int32_t frame_length,
+ const uint32_t send_ts,
+ const uint32_t arr_ts,
+ const int32_t pksize
+ /*, const uint16_t Index*/)
{
float weight = 0.0f;
float curr_bw_inv = 0.0f;
@@ -207,7 +207,7 @@
// that strict -DH
{
/* if not been updated for a long time, reduce the BN estimate */
- if((WebRtc_UWord32)(arr_ts - bwest_str->last_update_ts) *
+ if((uint32_t)(arr_ts - bwest_str->last_update_ts) *
1000.0f / FS > 3000)
{
//how many frames should have been received since the last
@@ -222,7 +222,7 @@
0.9)
{
float inv_bitrate = (float) pow( 0.99995,
- (double)((WebRtc_UWord32)(arr_ts -
+ (double)((uint32_t)(arr_ts -
bwest_str->last_reduction_ts)*1000.0f/FS) );
if ( inv_bitrate )
@@ -303,7 +303,7 @@
float averageLatencyMs = latencyMs / bwest_str->numConsecLatePkts;
delay_correction_factor = frame_length / (frame_length + averageLatencyMs);
immediate_set = 1;
- bwest_str->inWaitLatePkts = (WebRtc_Word16)((bwest_str->consecLatency/(FS/1000)) / 30);// + 150;
+ bwest_str->inWaitLatePkts = (int16_t)((bwest_str->consecLatency/(FS/1000)) / 30);// + 150;
bwest_str->start_wait_period = arr_ts;
}
///////////////////////////////////////////////
@@ -466,17 +466,17 @@
bwest_str->prev_rec_send_ts = send_ts;
/* Replace bwest_str->rec_bw by the new value (atomic operation) */
- bwest_str->rec_bw = (WebRtc_Word32)(1.0f / bwest_str->rec_bw_inv -
+ bwest_str->rec_bw = (int32_t)(1.0f / bwest_str->rec_bw_inv -
bwest_str->rec_header_rate);
if (immediate_set)
{
- bwest_str->rec_bw = (WebRtc_Word32) (delay_correction_factor *
+ bwest_str->rec_bw = (int32_t) (delay_correction_factor *
(float) bwest_str->rec_bw);
- if (bwest_str->rec_bw < (WebRtc_Word32) MIN_ISAC_BW)
+ if (bwest_str->rec_bw < (int32_t) MIN_ISAC_BW)
{
- bwest_str->rec_bw = (WebRtc_Word32) MIN_ISAC_BW;
+ bwest_str->rec_bw = (int32_t) MIN_ISAC_BW;
}
bwest_str->rec_bw_avg = bwest_str->rec_bw +
@@ -503,9 +503,9 @@
/* This function updates the send bottle neck rate */
/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
/* returns 0 if everything went fine, -1 otherwise */
-WebRtc_Word16 WebRtcIsac_UpdateUplinkBwImpl(
+int16_t WebRtcIsac_UpdateUplinkBwImpl(
BwEstimatorstr* bwest_str,
- WebRtc_Word16 index,
+ int16_t index,
enum IsacSamplingRate encoderSamplingFreq)
{
if((index < 0) || (index > 23))
@@ -560,9 +560,9 @@
// called when there is upper-band bit-stream to update jitter
// statistics.
-WebRtc_Word16 WebRtcIsac_UpdateUplinkJitter(
+int16_t WebRtcIsac_UpdateUplinkJitter(
BwEstimatorstr* bwest_str,
- WebRtc_Word32 index)
+ int32_t index)
{
if((index < 0) || (index > 23))
{
@@ -589,25 +589,25 @@
// Returns the bandwidth/jitter estimation code (integer 0...23)
// to put in the sending iSAC payload
-WebRtc_UWord16
+uint16_t
WebRtcIsac_GetDownlinkBwJitIndexImpl(
BwEstimatorstr* bwest_str,
- WebRtc_Word16* bottleneckIndex,
- WebRtc_Word16* jitterInfo,
+ int16_t* bottleneckIndex,
+ int16_t* jitterInfo,
enum IsacSamplingRate decoderSamplingFreq)
{
float MaxDelay;
- //WebRtc_UWord16 MaxDelayBit;
+ //uint16_t MaxDelayBit;
float rate;
float r;
float e1, e2;
const float weight = 0.1f;
const float* ptrQuantizationTable;
- WebRtc_Word16 addJitterInfo;
- WebRtc_Word16 minInd;
- WebRtc_Word16 maxInd;
- WebRtc_Word16 midInd;
+ int16_t addJitterInfo;
+ int16_t minInd;
+ int16_t maxInd;
+ int16_t midInd;
/* Get Max Delay Bit */
/* get unquantized max delay */
@@ -691,9 +691,9 @@
/* get the bottle neck rate from far side to here, as estimated on this side */
-WebRtc_Word32 WebRtcIsac_GetDownlinkBandwidth( const BwEstimatorstr *bwest_str)
+int32_t WebRtcIsac_GetDownlinkBandwidth( const BwEstimatorstr *bwest_str)
{
- WebRtc_Word32 rec_bw;
+ int32_t rec_bw;
float jitter_sign;
float bw_adjust;
@@ -705,7 +705,7 @@
bw_adjust = 1.0f - jitter_sign * (0.15f + 0.15f * jitter_sign * jitter_sign);
/* adjust Rate if jitter sign is mostly constant */
- rec_bw = (WebRtc_Word32)(bwest_str->rec_bw * bw_adjust);
+ rec_bw = (int32_t)(bwest_str->rec_bw * bw_adjust);
/* limit range of bottle neck rate */
if (rec_bw < MIN_ISAC_BW)
@@ -720,12 +720,12 @@
}
/* Returns the max delay (in ms) */
-WebRtc_Word32
+int32_t
WebRtcIsac_GetDownlinkMaxDelay(const BwEstimatorstr *bwest_str)
{
- WebRtc_Word32 rec_max_delay;
+ int32_t rec_max_delay;
- rec_max_delay = (WebRtc_Word32)(bwest_str->rec_max_delay);
+ rec_max_delay = (int32_t)(bwest_str->rec_max_delay);
/* limit range of jitter estimate */
if (rec_max_delay < MIN_ISAC_MD)
@@ -743,7 +743,7 @@
void
WebRtcIsac_GetUplinkBandwidth(
const BwEstimatorstr* bwest_str,
- WebRtc_Word32* bitRate)
+ int32_t* bitRate)
{
/* limit range of bottle neck rate */
if (bwest_str->send_bw_avg < MIN_ISAC_BW)
@@ -756,18 +756,18 @@
}
else
{
- *bitRate = (WebRtc_Word32)(bwest_str->send_bw_avg);
+ *bitRate = (int32_t)(bwest_str->send_bw_avg);
}
return;
}
/* Returns the max delay value from the other side in ms */
-WebRtc_Word32
+int32_t
WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr *bwest_str)
{
- WebRtc_Word32 send_max_delay;
+ int32_t send_max_delay;
- send_max_delay = (WebRtc_Word32)(bwest_str->send_max_delay_avg);
+ send_max_delay = (int32_t)(bwest_str->send_max_delay_avg);
/* limit range of jitter estimate */
if (send_max_delay < MIN_ISAC_MD)
@@ -793,7 +793,7 @@
const double BottleNeck, /* bottle neck rate; excl headers (bps) */
const double DelayBuildUp, /* max delay from bottleneck buffering (ms) */
enum ISACBandwidth bandwidth
- /*,WebRtc_Word16 frequentLargePackets*/)
+ /*,int16_t frequentLargePackets*/)
{
double MinRate = 0.0;
int MinBytes;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h b/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h
index 5604d7b..edabdff 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h
@@ -75,7 +75,7 @@
/* This function initializes the struct */
/* to be called before using the struct for anything else */
/* returns 0 if everything went fine, -1 otherwise */
- WebRtc_Word32 WebRtcIsac_InitBandwidthEstimator(
+ int32_t WebRtcIsac_InitBandwidthEstimator(
BwEstimatorstr* bwest_str,
enum IsacSamplingRate encoderSampRate,
enum IsacSamplingRate decoderSampRate);
@@ -89,42 +89,42 @@
/* pksize - size of packet in bytes, from NetEq */
/* Index - integer (range 0...23) indicating bottle neck & jitter as estimated by other side */
/* returns 0 if everything went fine, -1 otherwise */
- WebRtc_Word16 WebRtcIsac_UpdateBandwidthEstimator(
+ int16_t WebRtcIsac_UpdateBandwidthEstimator(
BwEstimatorstr* bwest_str,
- const WebRtc_UWord16 rtp_number,
- const WebRtc_Word32 frame_length,
- const WebRtc_UWord32 send_ts,
- const WebRtc_UWord32 arr_ts,
- const WebRtc_Word32 pksize);
+ const uint16_t rtp_number,
+ const int32_t frame_length,
+ const uint32_t send_ts,
+ const uint32_t arr_ts,
+ const int32_t pksize);
/* Update receiving estimates. Used when we only receive BWE index, no iSAC data packet. */
- WebRtc_Word16 WebRtcIsac_UpdateUplinkBwImpl(
+ int16_t WebRtcIsac_UpdateUplinkBwImpl(
BwEstimatorstr* bwest_str,
- WebRtc_Word16 Index,
+ int16_t Index,
enum IsacSamplingRate encoderSamplingFreq);
/* Returns the bandwidth/jitter estimation code (integer 0...23) to put in the sending iSAC payload */
- WebRtc_UWord16 WebRtcIsac_GetDownlinkBwJitIndexImpl(
+ uint16_t WebRtcIsac_GetDownlinkBwJitIndexImpl(
BwEstimatorstr* bwest_str,
- WebRtc_Word16* bottleneckIndex,
- WebRtc_Word16* jitterInfo,
+ int16_t* bottleneckIndex,
+ int16_t* jitterInfo,
enum IsacSamplingRate decoderSamplingFreq);
/* Returns the bandwidth estimation (in bps) */
- WebRtc_Word32 WebRtcIsac_GetDownlinkBandwidth(
+ int32_t WebRtcIsac_GetDownlinkBandwidth(
const BwEstimatorstr *bwest_str);
/* Returns the max delay (in ms) */
- WebRtc_Word32 WebRtcIsac_GetDownlinkMaxDelay(
+ int32_t WebRtcIsac_GetDownlinkMaxDelay(
const BwEstimatorstr *bwest_str);
/* Returns the bandwidth that iSAC should send with in bps */
void WebRtcIsac_GetUplinkBandwidth(
const BwEstimatorstr* bwest_str,
- WebRtc_Word32* bitRate);
+ int32_t* bitRate);
/* Returns the max delay value from the other side in ms */
- WebRtc_Word32 WebRtcIsac_GetUplinkMaxDelay(
+ int32_t WebRtcIsac_GetUplinkMaxDelay(
const BwEstimatorstr *bwest_str);
@@ -139,7 +139,7 @@
const double BottleNeck, /* bottle neck rate; excl headers (bps) */
const double DelayBuildUp, /* max delay from bottleneck buffering (ms) */
enum ISACBandwidth bandwidth
- /*,WebRtc_Word16 frequentLargePackets*/);
+ /*,int16_t frequentLargePackets*/);
/*
* update long-term average bitrate and amount of data in buffer
@@ -165,9 +165,9 @@
int new_framelength);
- WebRtc_Word16 WebRtcIsac_UpdateUplinkJitter(
+ int16_t WebRtcIsac_UpdateUplinkJitter(
BwEstimatorstr* bwest_str,
- WebRtc_Word32 index);
+ int32_t index);
#if defined(__cplusplus)
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/codec.h b/webrtc/modules/audio_coding/codecs/isac/main/source/codec.h
index 0b4d862..845f357 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/codec.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/codec.h
@@ -25,21 +25,21 @@
void WebRtcIsac_ResetBitstream(Bitstr* bit_stream);
int WebRtcIsac_EstimateBandwidth(BwEstimatorstr* bwest_str, Bitstr* streamdata,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts, WebRtc_UWord32 arr_ts,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts, uint32_t arr_ts,
enum IsacSamplingRate encoderSampRate,
enum IsacSamplingRate decoderSampRate);
int WebRtcIsac_DecodeLb(float* signal_out, ISACLBDecStruct* ISACdec_obj,
- WebRtc_Word16* current_framesamples,
- WebRtc_Word16 isRCUPayload);
+ int16_t* current_framesamples,
+ int16_t isRCUPayload);
int WebRtcIsac_DecodeRcuLb(float* signal_out, ISACLBDecStruct* ISACdec_obj,
- WebRtc_Word16* current_framesamples);
+ int16_t* current_framesamples);
int WebRtcIsac_EncodeLb(float* in, ISACLBEncStruct* ISACencLB_obj,
- WebRtc_Word16 codingMode, WebRtc_Word16
+ int16_t codingMode, int16_t
bottleneckIndex);
int WebRtcIsac_EncodeStoredDataLb(const ISAC_SaveEncData_t* ISACSavedEnc_obj,
@@ -48,9 +48,9 @@
int WebRtcIsac_EncodeStoredDataUb(
const ISACUBSaveEncDataStruct* ISACSavedEnc_obj, Bitstr* bitStream,
- WebRtc_Word32 jitterInfo, float scale, enum ISACBandwidth bandwidth);
+ int32_t jitterInfo, float scale, enum ISACBandwidth bandwidth);
-WebRtc_Word16 WebRtcIsac_GetRedPayloadUb(
+int16_t WebRtcIsac_GetRedPayloadUb(
const ISACUBSaveEncDataStruct* ISACSavedEncObj, Bitstr* bitStreamObj,
enum ISACBandwidth bandwidth);
@@ -72,10 +72,10 @@
* -1 if failed to allocate rates.
*/
-WebRtc_Word16 WebRtcIsac_RateAllocation(WebRtc_Word32 inRateBitPerSec,
- double* rateLBBitPerSec,
- double* rateUBBitPerSec,
- enum ISACBandwidth* bandwidthKHz);
+int16_t WebRtcIsac_RateAllocation(int32_t inRateBitPerSec,
+ double* rateLBBitPerSec,
+ double* rateUBBitPerSec,
+ enum ISACBandwidth* bandwidthKHz);
/******************************************************************************
@@ -94,7 +94,7 @@
* <0 if an error occurred.
*/
int WebRtcIsac_DecodeUb16(float* signal_out, ISACUBDecStruct* ISACdec_obj,
- WebRtc_Word16 isRCUPayload);
+ int16_t isRCUPayload);
/******************************************************************************
@@ -113,7 +113,7 @@
* <0 if an error occurred.
*/
int WebRtcIsac_DecodeUb12(float* signal_out, ISACUBDecStruct* ISACdec_obj,
- WebRtc_Word16 isRCUPayload);
+ int16_t isRCUPayload);
/******************************************************************************
@@ -132,7 +132,7 @@
* <0 if an error occurred.
*/
int WebRtcIsac_EncodeUb16(float* in, ISACUBEncStruct* ISACenc_obj,
- WebRtc_Word32 jitterInfo);
+ int32_t jitterInfo);
/******************************************************************************
@@ -151,7 +151,7 @@
* <0 if an error occurred.
*/
int WebRtcIsac_EncodeUb12(float* in, ISACUBEncStruct* ISACenc_obj,
- WebRtc_Word32 jitterInfo);
+ int32_t jitterInfo);
/************************** initialization functions *************************/
@@ -170,8 +170,8 @@
void WebRtcIsac_InitTransform();
-void WebRtcIsac_Time2Spec(double* inre1, double* inre2, WebRtc_Word16* outre,
- WebRtc_Word16* outim, FFTstr* fftstr_obj);
+void WebRtcIsac_Time2Spec(double* inre1, double* inre2, int16_t* outre,
+ int16_t* outim, FFTstr* fftstr_obj);
void WebRtcIsac_Spec2time(double* inre, double* inim, double* outre1,
double* outre2, FFTstr* fftstr_obj);
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/crc.c b/webrtc/modules/audio_coding/codecs/isac/main/source/crc.c
index 098e4b7..1d36ff0 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/crc.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/crc.c
@@ -15,7 +15,7 @@
#define POLYNOMIAL 0x04c11db7L
-static const WebRtc_UWord32 kCrcTable[256] = {
+static const uint32_t kCrcTable[256] = {
0, 0x4c11db7, 0x9823b6e, 0xd4326d9, 0x130476dc, 0x17c56b6b,
0x1a864db2, 0x1e475005, 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61,
0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, 0x4c11db70, 0x48d0c6c7,
@@ -80,12 +80,12 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_GetCrc(const WebRtc_Word16* bitstream,
- WebRtc_Word16 len_bitstream_in_bytes,
- WebRtc_UWord32* crc)
+int16_t WebRtcIsac_GetCrc(const int16_t* bitstream,
+ int16_t len_bitstream_in_bytes,
+ uint32_t* crc)
{
- WebRtc_UWord8* bitstream_ptr_uw8;
- WebRtc_UWord32 crc_state;
+ uint8_t* bitstream_ptr_uw8;
+ uint32_t crc_state;
int byte_cntr;
int crc_tbl_indx;
@@ -94,7 +94,7 @@
return -1;
}
/* cast to UWord8 pointer */
- bitstream_ptr_uw8 = (WebRtc_UWord8 *)bitstream;
+ bitstream_ptr_uw8 = (uint8_t *)bitstream;
/* initialize */
crc_state = 0xFFFFFFFF;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/crc.h b/webrtc/modules/audio_coding/codecs/isac/main/source/crc.h
index 0151278..dba8749 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/crc.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/crc.h
@@ -36,10 +36,10 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_GetCrc(
- const WebRtc_Word16* encoded,
- WebRtc_Word16 no_of_word8s,
- WebRtc_UWord32* crc);
+int16_t WebRtcIsac_GetCrc(
+ const int16_t* encoded,
+ int16_t no_of_word8s,
+ uint32_t* crc);
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/decode.c b/webrtc/modules/audio_coding/codecs/isac/main/source/decode.c
index 2b06cd0..e23765b 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/decode.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/decode.c
@@ -36,11 +36,11 @@
* returns the total number of bytes in the stream
*/
int WebRtcIsac_DecodeLb(float* signal_out, ISACLBDecStruct* ISACdecLB_obj,
- WebRtc_Word16* current_framesamples,
- WebRtc_Word16 isRCUPayload) {
+ int16_t* current_framesamples,
+ int16_t isRCUPayload) {
int k;
int len, err;
- WebRtc_Word16 bandwidthInd;
+ int16_t bandwidthInd;
float LP_dec_float[FRAMESAMPLES_HALF];
float HP_dec_float[FRAMESAMPLES_HALF];
@@ -58,8 +58,8 @@
double PitchLags[4];
double PitchGains[4];
double AvgPitchGain;
- WebRtc_Word16 PitchGains_Q12[4];
- WebRtc_Word16 AvgPitchGain_Q12;
+ int16_t PitchGains_Q12[4];
+ int16_t AvgPitchGain_Q12;
float gain;
@@ -182,7 +182,7 @@
* frequency, but split to 12 sub-frames, i.e. twice as lower-band.
*/
int WebRtcIsac_DecodeUb16(float* signal_out, ISACUBDecStruct* ISACdecUB_obj,
- WebRtc_Word16 isRCUPayload) {
+ int16_t isRCUPayload) {
int len, err;
double halfFrameFirst[FRAMESAMPLES_HALF];
@@ -193,7 +193,7 @@
double real_f[FRAMESAMPLES_HALF];
double imag_f[FRAMESAMPLES_HALF];
- const WebRtc_Word16 kAveragePitchGain = 0; /* No pitch-gain for upper-band. */
+ const int16_t kAveragePitchGain = 0; /* No pitch-gain for upper-band. */
len = 0;
/* Decode & de-quantize filter coefficients. */
@@ -246,7 +246,7 @@
* are combined, to reconstruct the upperband 8-16 kHz.
*/
int WebRtcIsac_DecodeUb12(float* signal_out, ISACUBDecStruct* ISACdecUB_obj,
- WebRtc_Word16 isRCUPayload) {
+ int16_t isRCUPayload) {
int len, err;
float LP_dec_float[FRAMESAMPLES_HALF];
@@ -259,7 +259,7 @@
double real_f[FRAMESAMPLES_HALF];
double imag_f[FRAMESAMPLES_HALF];
- const WebRtc_Word16 kAveragePitchGain = 0; /* No pitch-gain for upper-band. */
+ const int16_t kAveragePitchGain = 0; /* No pitch-gain for upper-band. */
len = 0;
/* Decode & dequantize filter coefficients. */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c b/webrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c
index a92b9b9..5abe204 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/decode_bwe.c
@@ -18,19 +18,19 @@
WebRtcIsac_EstimateBandwidth(
BwEstimatorstr* bwest_str,
Bitstr* streamdata,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts,
enum IsacSamplingRate encoderSampRate,
enum IsacSamplingRate decoderSampRate)
{
- WebRtc_Word16 index;
- WebRtc_Word16 frame_samples;
- WebRtc_UWord32 sendTimestampIn16kHz;
- WebRtc_UWord32 arrivalTimestampIn16kHz;
- WebRtc_UWord32 diffSendTime;
- WebRtc_UWord32 diffArrivalTime;
+ int16_t index;
+ int16_t frame_samples;
+ uint32_t sendTimestampIn16kHz;
+ uint32_t arrivalTimestampIn16kHz;
+ uint32_t diffSendTime;
+ uint32_t diffArrivalTime;
int err;
/* decode framelength and BW estimation */
@@ -55,26 +55,26 @@
// We like BWE to work at 16 kHz sampling rate,
// therefore, we have to change the timestamps accordingly.
// translate the send timestamp if required
- diffSendTime = (WebRtc_UWord32)((WebRtc_UWord32)send_ts -
- (WebRtc_UWord32)bwest_str->senderTimestamp);
+ diffSendTime = (uint32_t)((uint32_t)send_ts -
+ (uint32_t)bwest_str->senderTimestamp);
bwest_str->senderTimestamp = send_ts;
- diffArrivalTime = (WebRtc_UWord32)((WebRtc_UWord32)arr_ts -
- (WebRtc_UWord32)bwest_str->receiverTimestamp);
+ diffArrivalTime = (uint32_t)((uint32_t)arr_ts -
+ (uint32_t)bwest_str->receiverTimestamp);
bwest_str->receiverTimestamp = arr_ts;
if(decoderSampRate == kIsacSuperWideband)
{
- diffArrivalTime = (WebRtc_UWord32)diffArrivalTime >> 1;
- diffSendTime = (WebRtc_UWord32)diffSendTime >> 1;
+ diffArrivalTime = (uint32_t)diffArrivalTime >> 1;
+ diffSendTime = (uint32_t)diffSendTime >> 1;
}
// arrival timestamp in 16 kHz
- arrivalTimestampIn16kHz = (WebRtc_UWord32)((WebRtc_UWord32)
- bwest_str->prev_rec_arr_ts + (WebRtc_UWord32)diffArrivalTime);
+ arrivalTimestampIn16kHz = (uint32_t)((uint32_t)
+ bwest_str->prev_rec_arr_ts + (uint32_t)diffArrivalTime);
// send timestamp in 16 kHz
- sendTimestampIn16kHz = (WebRtc_UWord32)((WebRtc_UWord32)
- bwest_str->prev_rec_send_ts + (WebRtc_UWord32)diffSendTime);
+ sendTimestampIn16kHz = (uint32_t)((uint32_t)
+ bwest_str->prev_rec_send_ts + (uint32_t)diffSendTime);
err = WebRtcIsac_UpdateBandwidthEstimator(bwest_str, rtp_seq_number,
(frame_samples * 1000) / FS, sendTimestampIn16kHz,
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/encode.c b/webrtc/modules/audio_coding/codecs/isac/main/source/encode.c
index f6bdc17..5d8fa38 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/encode.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/encode.c
@@ -70,15 +70,15 @@
*/
/* 38 39.17 40.33 41.5 42.67 43.83 45 */
-static const WebRtc_Word16 kLowerBandBitRate12[7] = {
+static const int16_t kLowerBandBitRate12[7] = {
29000, 30000, 30000, 31000, 31000, 32000, 32000 };
-static const WebRtc_Word16 kUpperBandBitRate12[7] = {
+static const int16_t kUpperBandBitRate12[7] = {
25000, 25000, 27000, 27000, 29000, 29000, 32000 };
/* 50 51.2 52.4 53.6 54.8 56 */
-static const WebRtc_Word16 kLowerBandBitRate16[6] = {
+static const int16_t kLowerBandBitRate16[6] = {
31000, 31000, 32000, 32000, 32000, 32000 };
-static const WebRtc_Word16 kUpperBandBitRate16[6] = {
+static const int16_t kUpperBandBitRate16[6] = {
28000, 29000, 29000, 30000, 31000, 32000 };
/******************************************************************************
@@ -99,18 +99,18 @@
* -1 if failed to allocate rates.
*/
-WebRtc_Word16 WebRtcIsac_RateAllocation(WebRtc_Word32 inRateBitPerSec,
+int16_t WebRtcIsac_RateAllocation(int32_t inRateBitPerSec,
double* rateLBBitPerSec,
double* rateUBBitPerSec,
enum ISACBandwidth* bandwidthKHz) {
- WebRtc_Word16 idx;
+ int16_t idx;
double idxD;
double idxErr;
if (inRateBitPerSec < 38000) {
/* If the given overall bottleneck is less than 38000 then
* then codec has to operate in wideband mode, i.e. 8 kHz
* bandwidth. */
- *rateLBBitPerSec = (WebRtc_Word16)((inRateBitPerSec > 32000) ?
+ *rateLBBitPerSec = (int16_t)((inRateBitPerSec > 32000) ?
32000 : inRateBitPerSec);
*rateUBBitPerSec = 0;
*bandwidthKHz = isac8kHz;
@@ -123,15 +123,15 @@
* step is (45000 - 38000)/6.0 we use the inverse of it. */
const double stepSizeInv = 8.5714286e-4;
idxD = (inRateBitPerSec - 38000) * stepSizeInv;
- idx = (idxD >= 6) ? 6 : ((WebRtc_Word16)idxD);
+ idx = (idxD >= 6) ? 6 : ((int16_t)idxD);
idxErr = idxD - idx;
*rateLBBitPerSec = kLowerBandBitRate12[idx];
*rateUBBitPerSec = kUpperBandBitRate12[idx];
if (idx < 6) {
- *rateLBBitPerSec += (WebRtc_Word16)(
+ *rateLBBitPerSec += (int16_t)(
idxErr * (kLowerBandBitRate12[idx + 1] - kLowerBandBitRate12[idx]));
- *rateUBBitPerSec += (WebRtc_Word16)(
+ *rateUBBitPerSec += (int16_t)(
idxErr * (kUpperBandBitRate12[idx + 1] - kUpperBandBitRate12[idx]));
}
*bandwidthKHz = isac12kHz;
@@ -144,17 +144,17 @@
* step is (56000 - 50000)/5 we use the inverse of it. */
const double stepSizeInv = 8.3333333e-4;
idxD = (inRateBitPerSec - 50000) * stepSizeInv;
- idx = (idxD >= 5) ? 5 : ((WebRtc_Word16)idxD);
+ idx = (idxD >= 5) ? 5 : ((int16_t)idxD);
idxErr = idxD - idx;
*rateLBBitPerSec = kLowerBandBitRate16[idx];
*rateUBBitPerSec = kUpperBandBitRate16[idx];
if (idx < 5) {
- *rateLBBitPerSec += (WebRtc_Word16)(idxErr *
+ *rateLBBitPerSec += (int16_t)(idxErr *
(kLowerBandBitRate16[idx + 1] -
kLowerBandBitRate16[idx]));
- *rateUBBitPerSec += (WebRtc_Word16)(idxErr *
+ *rateUBBitPerSec += (int16_t)(idxErr *
(kUpperBandBitRate16[idx + 1] -
kUpperBandBitRate16[idx]));
}
@@ -178,8 +178,8 @@
}
int WebRtcIsac_EncodeLb(float* in, ISACLBEncStruct* ISACencLB_obj,
- WebRtc_Word16 codingMode,
- WebRtc_Word16 bottleneckIndex) {
+ int16_t codingMode,
+ int16_t bottleneckIndex) {
int stream_length = 0;
int err;
int k;
@@ -197,20 +197,20 @@
double HPw[FRAMESAMPLES_HALF];
double LPw_pf[FRAMESAMPLES_HALF];
- WebRtc_Word16 fre[FRAMESAMPLES_HALF]; /* Q7 */
- WebRtc_Word16 fim[FRAMESAMPLES_HALF]; /* Q7 */
+ int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */
+ int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */
double PitchLags[4];
double PitchGains[4];
- WebRtc_Word16 PitchGains_Q12[4];
- WebRtc_Word16 AvgPitchGain_Q12;
+ int16_t PitchGains_Q12[4];
+ int16_t AvgPitchGain_Q12;
int frame_mode; /* 0 for 30ms, 1 for 60ms */
int status = 0;
int my_index;
transcode_obj transcodingParam;
double bytesLeftSpecCoding;
- WebRtc_UWord16 payloadLimitBytes;
+ uint16_t payloadLimitBytes;
/* Copy new frame-length and bottleneck rate only for the first 10 ms data */
if (ISACencLB_obj->buffer_index == 0) {
@@ -292,7 +292,7 @@
/* Convert PitchGain to Fixed point. */
for (k = 0; k < PITCH_SUBFRAMES; k++) {
- PitchGains_Q12[k] = (WebRtc_Word16)(PitchGains[k] * 4096.0);
+ PitchGains_Q12[k] = (int16_t)(PitchGains[k] * 4096.0);
}
/* Set where to store data in multiple packets memory. */
@@ -458,8 +458,8 @@
/* Scale DFT coefficients. */
for (k = 0; k < FRAMESAMPLES_HALF; k++) {
- fre[k] = (WebRtc_Word16)(fre[k] * transcodeScale);
- fim[k] = (WebRtc_Word16)(fim[k] * transcodeScale);
+ fre[k] = (int16_t)(fre[k] * transcodeScale);
+ fim[k] = (int16_t)(fim[k] * transcodeScale);
}
/* Save data for multiple packets memory. */
@@ -531,17 +531,17 @@
static int LimitPayloadUb(ISACUBEncStruct* ISACencUB_obj,
- WebRtc_UWord16 payloadLimitBytes,
+ uint16_t payloadLimitBytes,
double bytesLeftSpecCoding,
transcode_obj* transcodingParam,
- WebRtc_Word16* fre, WebRtc_Word16* fim,
+ int16_t* fre, int16_t* fim,
double* lpcGains, enum ISACBand band, int status) {
int iterCntr = 0;
int k;
double bytesSpecCoderUsed;
double transcodeScale;
- const WebRtc_Word16 kAveragePitchGain = 0.0;
+ const int16_t kAveragePitchGain = 0.0;
do {
if (iterCntr >= MAX_PAYLOAD_LIMIT_ITERATION) {
@@ -580,8 +580,8 @@
/* Scale DFT coefficients. */
for (k = 0; k < FRAMESAMPLES_HALF; k++) {
- fre[k] = (WebRtc_Word16)(fre[k] * transcodeScale + 0.5);
- fim[k] = (WebRtc_Word16)(fim[k] * transcodeScale + 0.5);
+ fre[k] = (int16_t)(fre[k] * transcodeScale + 0.5);
+ fim[k] = (int16_t)(fim[k] * transcodeScale + 0.5);
}
/* Store FFT coefficients for multiple encoding. */
memcpy(ISACencUB_obj->SaveEnc_obj.realFFT, fre,
@@ -642,7 +642,7 @@
}
int WebRtcIsac_EncodeUb16(float* in, ISACUBEncStruct* ISACencUB_obj,
- WebRtc_Word32 jitterInfo) {
+ int32_t jitterInfo) {
int err;
int k;
@@ -651,8 +651,8 @@
(1 + UB_LPC_ORDER)];
double LP_lookahead[FRAMESAMPLES];
- WebRtc_Word16 fre[FRAMESAMPLES_HALF]; /* Q7 */
- WebRtc_Word16 fim[FRAMESAMPLES_HALF]; /* Q7 */
+ int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */
+ int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */
int status = 0;
@@ -660,9 +660,9 @@
double corr[SUBFRAMES << 1][UB_LPC_ORDER + 1];
double lpcGains[SUBFRAMES << 1];
transcode_obj transcodingParam;
- WebRtc_UWord16 payloadLimitBytes;
+ uint16_t payloadLimitBytes;
double s2nr;
- const WebRtc_Word16 kAveragePitchGain = 0.0;
+ const int16_t kAveragePitchGain = 0.0;
int bytesLeftSpecCoding;
/* Buffer speech samples (by 10ms packet) until the frame-length is */
@@ -827,7 +827,7 @@
int WebRtcIsac_EncodeUb12(float* in, ISACUBEncStruct* ISACencUB_obj,
- WebRtc_Word32 jitterInfo) {
+ int32_t jitterInfo) {
int err;
int k;
@@ -842,8 +842,8 @@
double LPw[FRAMESAMPLES_HALF];
double HPw[FRAMESAMPLES_HALF];
- WebRtc_Word16 fre[FRAMESAMPLES_HALF]; /* Q7 */
- WebRtc_Word16 fim[FRAMESAMPLES_HALF]; /* Q7 */
+ int16_t fre[FRAMESAMPLES_HALF]; /* Q7 */
+ int16_t fim[FRAMESAMPLES_HALF]; /* Q7 */
int status = 0;
@@ -852,9 +852,9 @@
double corr[UB_LPC_GAIN_DIM][UB_LPC_ORDER + 1];
double lpcGains[SUBFRAMES];
transcode_obj transcodingParam;
- WebRtc_UWord16 payloadLimitBytes;
+ uint16_t payloadLimitBytes;
double s2nr;
- const WebRtc_Word16 kAveragePitchGain = 0.0;
+ const int16_t kAveragePitchGain = 0.0;
double bytesLeftSpecCoding;
/* Buffer speech samples (by 10ms packet) until the framelength is */
@@ -1011,13 +1011,13 @@
int status;
int BWno = BWnumber;
- const WebRtc_UWord16* WebRtcIsac_kQPitchGainCdf_ptr[1];
- const WebRtc_UWord16** cdf;
+ const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1];
+ const uint16_t** cdf;
double tmpLPCcoeffs_lo[(ORDERLO + 1)*SUBFRAMES * 2];
double tmpLPCcoeffs_hi[(ORDERHI + 1)*SUBFRAMES * 2];
int tmpLPCindex_g[12 * 2];
- WebRtc_Word16 tmp_fre[FRAMESAMPLES], tmp_fim[FRAMESAMPLES];
+ int16_t tmp_fre[FRAMESAMPLES], tmp_fim[FRAMESAMPLES];
const int kModel = 0;
/* Sanity Check - possible values for BWnumber is 0 - 23. */
@@ -1053,8 +1053,8 @@
for (ii = 0;
ii < (FRAMESAMPLES_HALF * (1 + ISACSavedEnc_obj->startIdx));
ii++) {
- tmp_fre[ii] = (WebRtc_Word16)((scale) * (float)ISACSavedEnc_obj->fre[ii]);
- tmp_fim[ii] = (WebRtc_Word16)((scale) * (float)ISACSavedEnc_obj->fim[ii]);
+ tmp_fre[ii] = (int16_t)((scale) * (float)ISACSavedEnc_obj->fre[ii]);
+ tmp_fim[ii] = (int16_t)((scale) * (float)ISACSavedEnc_obj->fim[ii]);
}
} else {
for (ii = 0;
@@ -1134,17 +1134,17 @@
int WebRtcIsac_EncodeStoredDataUb(
const ISACUBSaveEncDataStruct* ISACSavedEnc_obj,
Bitstr* bitStream,
- WebRtc_Word32 jitterInfo,
+ int32_t jitterInfo,
float scale,
enum ISACBandwidth bandwidth) {
int n;
int err;
double lpcGain[SUBFRAMES];
- WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
- WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
- const WebRtc_UWord16** shape_cdf;
+ int16_t realFFT[FRAMESAMPLES_HALF];
+ int16_t imagFFT[FRAMESAMPLES_HALF];
+ const uint16_t** shape_cdf;
int shape_len;
- const WebRtc_Word16 kAveragePitchGain = 0.0;
+ const int16_t kAveragePitchGain = 0.0;
enum ISACBand band;
/* Reset bitstream. */
WebRtcIsac_ResetBitstream(bitStream);
@@ -1201,9 +1201,9 @@
}
for (n = 0; n < FRAMESAMPLES_HALF; n++) {
- realFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->realFFT[n] +
+ realFFT[n] = (int16_t)(scale * (float)ISACSavedEnc_obj->realFFT[n] +
0.5f);
- imagFFT[n] = (WebRtc_Word16)(scale * (float)ISACSavedEnc_obj->imagFFT[n] +
+ imagFFT[n] = (int16_t)(scale * (float)ISACSavedEnc_obj->imagFFT[n] +
0.5f);
}
/* Store FFT coefficients. */
@@ -1219,24 +1219,24 @@
return WebRtcIsac_EncTerminate(bitStream);
}
-WebRtc_Word16 WebRtcIsac_GetRedPayloadUb(
+int16_t WebRtcIsac_GetRedPayloadUb(
const ISACUBSaveEncDataStruct* ISACSavedEncObj,
Bitstr* bitStreamObj,
enum ISACBandwidth bandwidth) {
int n;
- WebRtc_Word16 status;
- WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
- WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
+ int16_t status;
+ int16_t realFFT[FRAMESAMPLES_HALF];
+ int16_t imagFFT[FRAMESAMPLES_HALF];
enum ISACBand band;
- const WebRtc_Word16 kAveragePitchGain = 0.0;
+ const int16_t kAveragePitchGain = 0.0;
/* Store bit-stream object. */
memcpy(bitStreamObj, &ISACSavedEncObj->bitStreamObj, sizeof(Bitstr));
/* Scale FFT coefficients. */
for (n = 0; n < FRAMESAMPLES_HALF; n++) {
- realFFT[n] = (WebRtc_Word16)((float)ISACSavedEncObj->realFFT[n] *
+ realFFT[n] = (int16_t)((float)ISACSavedEncObj->realFFT[n] *
RCU_TRANSCODING_SCALE_UB + 0.5);
- imagFFT[n] = (WebRtc_Word16)((float)ISACSavedEncObj->imagFFT[n] *
+ imagFFT[n] = (int16_t)((float)ISACSavedEncObj->imagFFT[n] *
RCU_TRANSCODING_SCALE_UB + 0.5);
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c b/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c
index 2bf4c36..67e77b5 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.c
@@ -44,14 +44,14 @@
*
*
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_RemoveLarMean(
double* lar,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 coeffCntr;
- WebRtc_Word16 vecCntr;
- WebRtc_Word16 numVec;
+ int16_t coeffCntr;
+ int16_t vecCntr;
+ int16_t numVec;
const double* meanLAR;
switch(bandwidth)
{
@@ -98,18 +98,18 @@
* Output:
* -out : decorrelated LAR vectors.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_DecorrelateIntraVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
const double* ptrData;
const double* ptrRow;
- WebRtc_Word16 rowCntr;
- WebRtc_Word16 colCntr;
- WebRtc_Word16 larVecCntr;
- WebRtc_Word16 numVec;
+ int16_t rowCntr;
+ int16_t colCntr;
+ int16_t larVecCntr;
+ int16_t numVec;
const double* decorrMat;
switch(bandwidth)
{
@@ -172,17 +172,17 @@
* Output:
* -out : decorrelated LAR vectors.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_DecorrelateInterVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 coeffCntr;
- WebRtc_Word16 rowCntr;
- WebRtc_Word16 colCntr;
+ int16_t coeffCntr;
+ int16_t rowCntr;
+ int16_t colCntr;
const double* decorrMat;
- WebRtc_Word16 interVecDim;
+ int16_t interVecDim;
switch(bandwidth)
{
@@ -245,14 +245,14 @@
WebRtcIsac_QuantizeUncorrLar(
double* data,
int* recIdx,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 cntr;
- WebRtc_Word32 idx;
- WebRtc_Word16 interVecDim;
+ int16_t cntr;
+ int32_t idx;
+ int16_t interVecDim;
const double* leftRecPoint;
double quantizationStepSize;
- const WebRtc_Word16* numQuantCell;
+ const int16_t* numQuantCell;
switch(bandwidth)
{
case isac12kHz:
@@ -280,7 +280,7 @@
//
for(cntr = 0; cntr < UB_LPC_ORDER * interVecDim; cntr++)
{
- idx = (WebRtc_Word32)floor((*data - leftRecPoint[cntr]) /
+ idx = (int32_t)floor((*data - leftRecPoint[cntr]) /
quantizationStepSize + 0.5);
if(idx < 0)
{
@@ -311,14 +311,14 @@
* Output:
* -out : pointer to quantized values.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_DequantizeLpcParam(
const int* idx,
double* out,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 cntr;
- WebRtc_Word16 interVecDim;
+ int16_t cntr;
+ int16_t interVecDim;
const double* leftRecPoint;
double quantizationStepSize;
@@ -367,16 +367,16 @@
* Output:
* -out : correlated parametrs.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_CorrelateIntraVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 vecCntr;
- WebRtc_Word16 rowCntr;
- WebRtc_Word16 colCntr;
- WebRtc_Word16 numVec;
+ int16_t vecCntr;
+ int16_t rowCntr;
+ int16_t colCntr;
+ int16_t numVec;
const double* ptrData;
const double* intraVecDecorrMat;
@@ -430,16 +430,16 @@
* Output:
* -out : correlated parametrs.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_CorrelateInterVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 coeffCntr;
- WebRtc_Word16 rowCntr;
- WebRtc_Word16 colCntr;
- WebRtc_Word16 interVecDim;
+ int16_t coeffCntr;
+ int16_t rowCntr;
+ int16_t colCntr;
+ int16_t interVecDim;
double myVec[UB16_LPC_VEC_PER_FRAME];
const double* interVecDecorrMat;
@@ -495,14 +495,14 @@
* Output:
* -data : pointer to LARs.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_AddLarMean(
double* data,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
- WebRtc_Word16 coeffCntr;
- WebRtc_Word16 vecCntr;
- WebRtc_Word16 numVec;
+ int16_t coeffCntr;
+ int16_t vecCntr;
+ int16_t numVec;
const double* meanLAR;
switch(bandwidth)
@@ -544,11 +544,11 @@
* Output:
* -lpcGain : mean-removed in log domain.
*/
-WebRtc_Word16
+int16_t
WebRtcIsac_ToLogDomainRemoveMean(
double* data)
{
- WebRtc_Word16 coeffCntr;
+ int16_t coeffCntr;
for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
{
data[coeffCntr] = log(data[coeffCntr]) - WebRtcIsac_kMeanLpcGain;
@@ -569,12 +569,12 @@
* Output:
* -out : decorrelated parameters.
*/
-WebRtc_Word16 WebRtcIsac_DecorrelateLPGain(
+int16_t WebRtcIsac_DecorrelateLPGain(
const double* data,
double* out)
{
- WebRtc_Word16 rowCntr;
- WebRtc_Word16 colCntr;
+ int16_t rowCntr;
+ int16_t colCntr;
for(colCntr = 0; colCntr < UB_LPC_GAIN_DIM; colCntr++)
{
@@ -604,7 +604,7 @@
double* data,
int* idx)
{
- WebRtc_Word16 coeffCntr;
+ int16_t coeffCntr;
for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
{
*idx = (int)floor((*data - WebRtcIsac_kLeftRecPointLpcGain[coeffCntr]) /
@@ -638,11 +638,11 @@
* Output:
* -lpcGains : quantized values of the given parametes.
*/
-WebRtc_Word16 WebRtcIsac_DequantizeLpcGain(
+int16_t WebRtcIsac_DequantizeLpcGain(
const int* idx,
double* out)
{
- WebRtc_Word16 coeffCntr;
+ int16_t coeffCntr;
for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
{
*out = WebRtcIsac_kLeftRecPointLpcGain[coeffCntr] + *idx *
@@ -664,12 +664,12 @@
* Output:
* -out : correlated parameters.
*/
-WebRtc_Word16 WebRtcIsac_CorrelateLpcGain(
+int16_t WebRtcIsac_CorrelateLpcGain(
const double* data,
double* out)
{
- WebRtc_Word16 rowCntr;
- WebRtc_Word16 colCntr;
+ int16_t rowCntr;
+ int16_t colCntr;
for(rowCntr = 0; rowCntr < UB_LPC_GAIN_DIM; rowCntr++)
{
@@ -696,10 +696,10 @@
* Output:
* -lpcGain : LPC gain in normal domain.
*/
-WebRtc_Word16 WebRtcIsac_AddMeanToLinearDomain(
+int16_t WebRtcIsac_AddMeanToLinearDomain(
double* lpcGains)
{
- WebRtc_Word16 coeffCntr;
+ int16_t coeffCntr;
for(coeffCntr = 0; coeffCntr < UB_LPC_GAIN_DIM; coeffCntr++)
{
lpcGains[coeffCntr] = exp(lpcGains[coeffCntr] + WebRtcIsac_kMeanLpcGain);
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h b/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h
index e7f1a76..eab98c1 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/encode_lpc_swb.h
@@ -40,9 +40,9 @@
*
*
*/
-WebRtc_Word16 WebRtcIsac_RemoveLarMean(
+int16_t WebRtcIsac_RemoveLarMean(
double* lar,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
* WebRtcIsac_DecorrelateIntraVec()
@@ -60,10 +60,10 @@
* Output:
* -out : decorrelated LAR vectors.
*/
-WebRtc_Word16 WebRtcIsac_DecorrelateIntraVec(
+int16_t WebRtcIsac_DecorrelateIntraVec(
const double* inLAR,
double* out,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -83,10 +83,10 @@
* Output:
* -out : decorrelated LAR vectors.
*/
-WebRtc_Word16 WebRtcIsac_DecorrelateInterVec(
+int16_t WebRtcIsac_DecorrelateInterVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -106,7 +106,7 @@
double WebRtcIsac_QuantizeUncorrLar(
double* data,
int* idx,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -122,10 +122,10 @@
* Output:
* -out : correlated parametrs.
*/
-WebRtc_Word16 WebRtcIsac_CorrelateIntraVec(
+int16_t WebRtcIsac_CorrelateIntraVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -141,10 +141,10 @@
* Output:
* -out : correlated parametrs.
*/
-WebRtc_Word16 WebRtcIsac_CorrelateInterVec(
+int16_t WebRtcIsac_CorrelateInterVec(
const double* data,
double* out,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -160,9 +160,9 @@
* Output:
* -data : pointer to LARs.
*/
-WebRtc_Word16 WebRtcIsac_AddLarMean(
+int16_t WebRtcIsac_AddLarMean(
double* data,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -178,10 +178,10 @@
* Output:
* -out : pointer to quantized values.
*/
-WebRtc_Word16 WebRtcIsac_DequantizeLpcParam(
+int16_t WebRtcIsac_DequantizeLpcParam(
const int* idx,
double* out,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
/******************************************************************************
@@ -195,7 +195,7 @@
* Output:
* -lpcGain : mean-removed in log domain.
*/
-WebRtc_Word16 WebRtcIsac_ToLogDomainRemoveMean(
+int16_t WebRtcIsac_ToLogDomainRemoveMean(
double* lpGains);
@@ -211,7 +211,7 @@
* Output:
* -out : decorrelated parameters.
*/
-WebRtc_Word16 WebRtcIsac_DecorrelateLPGain(
+int16_t WebRtcIsac_DecorrelateLPGain(
const double* data,
double* out);
@@ -244,7 +244,7 @@
* Output:
* -lpcGains : quantized values of the given parametes.
*/
-WebRtc_Word16 WebRtcIsac_DequantizeLpcGain(
+int16_t WebRtcIsac_DequantizeLpcGain(
const int* idx,
double* lpGains);
@@ -260,7 +260,7 @@
* Output:
* -out : correlated parameters.
*/
-WebRtc_Word16 WebRtcIsac_CorrelateLpcGain(
+int16_t WebRtcIsac_CorrelateLpcGain(
const double* data,
double* out);
@@ -276,7 +276,7 @@
* Output:
* -lpcGain : LPC gain in normal domain.
*/
-WebRtc_Word16 WebRtcIsac_AddMeanToLinearDomain(
+int16_t WebRtcIsac_AddMeanToLinearDomain(
double* lpcGains);
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c b/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
index 0ef1b38..66bf06d 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.c
@@ -34,32 +34,32 @@
#include <math.h>
#include <string.h>
-static const WebRtc_UWord16 kLpcVecPerSegmentUb12 = 5;
-static const WebRtc_UWord16 kLpcVecPerSegmentUb16 = 4;
+static const uint16_t kLpcVecPerSegmentUb12 = 5;
+static const uint16_t kLpcVecPerSegmentUb16 = 4;
/* CDF array for encoder bandwidth (12 vs 16 kHz) indicator. */
-static const WebRtc_UWord16 kOneBitEqualProbCdf[3] = {
+static const uint16_t kOneBitEqualProbCdf[3] = {
0, 32768, 65535 };
/* Pointer to cdf array for encoder bandwidth (12 vs 16 kHz) indicator. */
-static const WebRtc_UWord16* kOneBitEqualProbCdf_ptr[1] = {
+static const uint16_t* kOneBitEqualProbCdf_ptr[1] = {
kOneBitEqualProbCdf };
/*
* Initial cdf index for decoder of encoded bandwidth
* (12 vs 16 kHz) indicator.
*/
-static const WebRtc_UWord16 kOneBitEqualProbInitIndex[1] = { 1 };
+static const uint16_t kOneBitEqualProbInitIndex[1] = { 1 };
static const int kIsSWB12 = 1;
/* compute correlation from power spectrum */
-static void FindCorrelation(WebRtc_Word32* PSpecQ12, WebRtc_Word32* CorrQ7) {
- WebRtc_Word32 summ[FRAMESAMPLES / 8];
- WebRtc_Word32 diff[FRAMESAMPLES / 8];
- const WebRtc_Word16* CS_ptrQ9;
- WebRtc_Word32 sum;
+static void FindCorrelation(int32_t* PSpecQ12, int32_t* CorrQ7) {
+ int32_t summ[FRAMESAMPLES / 8];
+ int32_t diff[FRAMESAMPLES / 8];
+ const int16_t* CS_ptrQ9;
+ int32_t sum;
int k, n;
for (k = 0; k < FRAMESAMPLES / 8; k++) {
@@ -92,15 +92,15 @@
/* compute inverse AR power spectrum */
/* Changed to the function used in iSAC FIX for compatibility reasons */
-static void FindInvArSpec(const WebRtc_Word16* ARCoefQ12,
- const WebRtc_Word32 gainQ10,
- WebRtc_Word32* CurveQ16) {
- WebRtc_Word32 CorrQ11[AR_ORDER + 1];
- WebRtc_Word32 sum, tmpGain;
- WebRtc_Word32 diffQ16[FRAMESAMPLES / 8];
- const WebRtc_Word16* CS_ptrQ9;
+static void FindInvArSpec(const int16_t* ARCoefQ12,
+ const int32_t gainQ10,
+ int32_t* CurveQ16) {
+ int32_t CorrQ11[AR_ORDER + 1];
+ int32_t sum, tmpGain;
+ int32_t diffQ16[FRAMESAMPLES / 8];
+ const int16_t* CS_ptrQ9;
int k, n;
- WebRtc_Word16 round, shftVal = 0, sh;
+ int16_t round, shftVal = 0, sh;
sum = 0;
for (n = 0; n < AR_ORDER + 1; n++) {
@@ -174,10 +174,10 @@
}
/* Generate array of dither samples in Q7. */
-static void GenerateDitherQ7Lb(WebRtc_Word16* bufQ7, WebRtc_UWord32 seed,
- int length, WebRtc_Word16 AvgPitchGain_Q12) {
+static void GenerateDitherQ7Lb(int16_t* bufQ7, uint32_t seed,
+ int length, int16_t AvgPitchGain_Q12) {
int k, shft;
- WebRtc_Word16 dither1_Q7, dither2_Q7, dither_gain_Q14;
+ int16_t dither1_Q7, dither2_Q7, dither_gain_Q14;
/* This threshold should be equal to that in decode_spec(). */
if (AvgPitchGain_Q12 < 614) {
@@ -187,13 +187,13 @@
/* Fixed-point dither sample between -64 and 64 (Q7). */
/* dither = seed * 128 / 4294967295 */
- dither1_Q7 = (WebRtc_Word16)(((int)seed + 16777216) >> 25);
+ dither1_Q7 = (int16_t)(((int)seed + 16777216) >> 25);
/* New random unsigned int. */
seed = (seed * 196314165) + 907633515;
/* Fixed-point dither sample between -64 and 64. */
- dither2_Q7 = (WebRtc_Word16)(((int)seed + 16777216) >> 25);
+ dither2_Q7 = (int16_t)(((int)seed + 16777216) >> 25);
shft = (seed >> 25) & 15;
if (shft < 5) {
@@ -211,7 +211,7 @@
}
}
} else {
- dither_gain_Q14 = (WebRtc_Word16)(22528 - 10 * AvgPitchGain_Q12);
+ dither_gain_Q14 = (int16_t)(22528 - 10 * AvgPitchGain_Q12);
/* Dither on half of the coefficients. */
for (k = 0; k < length - 1; k += 2) {
@@ -219,7 +219,7 @@
seed = (seed * 196314165) + 907633515;
/* Fixed-point dither sample between -64 and 64. */
- dither1_Q7 = (WebRtc_Word16)(((int)seed + 16777216) >> 25);
+ dither1_Q7 = (int16_t)(((int)seed + 16777216) >> 25);
/* Dither sample is placed in either even or odd index. */
shft = (seed >> 25) & 1; /* Either 0 or 1 */
@@ -249,8 +249,8 @@
* -bufQ7 : pointer to a buffer where dithers are written to.
*/
static void GenerateDitherQ7LbUB(
- WebRtc_Word16* bufQ7,
- WebRtc_UWord32 seed,
+ int16_t* bufQ7,
+ uint32_t seed,
int length) {
int k;
for (k = 0; k < length; k++) {
@@ -259,10 +259,10 @@
/* Fixed-point dither sample between -64 and 64 (Q7). */
/* bufQ7 = seed * 128 / 4294967295 */
- bufQ7[k] = (WebRtc_Word16)(((int)seed + 16777216) >> 25);
+ bufQ7[k] = (int16_t)(((int)seed + 16777216) >> 25);
/* Scale by 0.35. */
- bufQ7[k] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(bufQ7[k], 2048, 13);
+ bufQ7[k] = (int16_t)WEBRTC_SPL_MUL_16_16_RSFT(bufQ7[k], 2048, 13);
}
}
@@ -270,18 +270,18 @@
* Function to decode the complex spectrum from the bit stream
* returns the total number of bytes in the stream.
*/
-int WebRtcIsac_DecodeSpec(Bitstr* streamdata, WebRtc_Word16 AvgPitchGain_Q12,
+int WebRtcIsac_DecodeSpec(Bitstr* streamdata, int16_t AvgPitchGain_Q12,
enum ISACBand band, double* fr, double* fi) {
- WebRtc_Word16 DitherQ7[FRAMESAMPLES];
- WebRtc_Word16 data[FRAMESAMPLES];
- WebRtc_Word32 invARSpec2_Q16[FRAMESAMPLES_QUARTER];
- WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
- WebRtc_Word16 ARCoefQ12[AR_ORDER + 1];
- WebRtc_Word16 RCQ15[AR_ORDER];
- WebRtc_Word16 gainQ10;
- WebRtc_Word32 gain2_Q10, res;
- WebRtc_Word32 in_sqrt;
- WebRtc_Word32 newRes;
+ int16_t DitherQ7[FRAMESAMPLES];
+ int16_t data[FRAMESAMPLES];
+ int32_t invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+ uint16_t invARSpecQ8[FRAMESAMPLES_QUARTER];
+ int16_t ARCoefQ12[AR_ORDER + 1];
+ int16_t RCQ15[AR_ORDER];
+ int16_t gainQ10;
+ int32_t gain2_Q10, res;
+ int32_t in_sqrt;
+ int32_t newRes;
int k, len, i;
int is_12khz = !kIsSWB12;
int num_dft_coeff = FRAMESAMPLES;
@@ -326,7 +326,7 @@
newRes = (in_sqrt / res + res) >> 1;
} while (newRes != res && i-- > 0);
- invARSpecQ8[k] = (WebRtc_Word16)newRes;
+ invARSpecQ8[k] = (int16_t)newRes;
}
len = WebRtcIsac_DecLogisticMulti2(data, streamdata, invARSpecQ8, DitherQ7,
@@ -339,8 +339,8 @@
switch (band) {
case kIsacLowerBand: {
/* Scale down spectral samples with low SNR. */
- WebRtc_Word32 p1;
- WebRtc_Word32 p2;
+ int32_t p1;
+ int32_t p2;
if (AvgPitchGain_Q12 <= 614) {
p1 = 30 << 10;
p2 = 32768 + (33 << 16);
@@ -349,7 +349,7 @@
p2 = 32768 + (40 << 16);
}
for (k = 0; k < FRAMESAMPLES; k += 4) {
- gainQ10 = WebRtcSpl_DivW32W16ResW16(p1, (WebRtc_Word16)(
+ gainQ10 = WebRtcSpl_DivW32W16ResW16(p1, (int16_t)(
(invARSpec2_Q16[k >> 2] + p2) >> 16));
*fr++ = (double)((data[ k ] * gainQ10 + 512) >> 10) / 128.0;
*fi++ = (double)((data[k + 1] * gainQ10 + 512) >> 10) / 128.0;
@@ -391,26 +391,26 @@
}
-int WebRtcIsac_EncodeSpec(const WebRtc_Word16* fr, const WebRtc_Word16* fi,
- WebRtc_Word16 AvgPitchGain_Q12, enum ISACBand band,
+int WebRtcIsac_EncodeSpec(const int16_t* fr, const int16_t* fi,
+ int16_t AvgPitchGain_Q12, enum ISACBand band,
Bitstr* streamdata) {
- WebRtc_Word16 ditherQ7[FRAMESAMPLES];
- WebRtc_Word16 dataQ7[FRAMESAMPLES];
- WebRtc_Word32 PSpec[FRAMESAMPLES_QUARTER];
- WebRtc_Word32 invARSpec2_Q16[FRAMESAMPLES_QUARTER];
- WebRtc_UWord16 invARSpecQ8[FRAMESAMPLES_QUARTER];
- WebRtc_Word32 CorrQ7[AR_ORDER + 1];
- WebRtc_Word32 CorrQ7_norm[AR_ORDER + 1];
- WebRtc_Word16 RCQ15[AR_ORDER];
- WebRtc_Word16 ARCoefQ12[AR_ORDER + 1];
- WebRtc_Word32 gain2_Q10;
- WebRtc_Word16 val;
- WebRtc_Word32 nrg, res;
- WebRtc_UWord32 sum;
- WebRtc_Word32 in_sqrt;
- WebRtc_Word32 newRes;
- WebRtc_Word16 err;
- WebRtc_UWord32 nrg_u32;
+ int16_t ditherQ7[FRAMESAMPLES];
+ int16_t dataQ7[FRAMESAMPLES];
+ int32_t PSpec[FRAMESAMPLES_QUARTER];
+ int32_t invARSpec2_Q16[FRAMESAMPLES_QUARTER];
+ uint16_t invARSpecQ8[FRAMESAMPLES_QUARTER];
+ int32_t CorrQ7[AR_ORDER + 1];
+ int32_t CorrQ7_norm[AR_ORDER + 1];
+ int16_t RCQ15[AR_ORDER];
+ int16_t ARCoefQ12[AR_ORDER + 1];
+ int32_t gain2_Q10;
+ int16_t val;
+ int32_t nrg, res;
+ uint32_t sum;
+ int32_t in_sqrt;
+ int32_t newRes;
+ int16_t err;
+ uint32_t nrg_u32;
int shift_var;
int k, n, j, i;
int is_12khz = !kIsSWB12;
@@ -542,7 +542,7 @@
}
}
- nrg_u32 = (WebRtc_UWord32)nrg;
+ nrg_u32 = (uint32_t)nrg;
if (shift_var > 0) {
nrg_u32 = nrg_u32 >> shift_var;
} else {
@@ -551,7 +551,7 @@
if (nrg_u32 > 0x7FFFFFFF) {
nrg = 0x7FFFFFFF;
} else {
- nrg = (WebRtc_Word32)nrg_u32;
+ nrg = (int32_t)nrg_u32;
}
/* Also shifts 31 bits to the left! */
gain2_Q10 = WebRtcSpl_DivResultInQ31(FRAMESAMPLES_QUARTER, nrg);
@@ -579,7 +579,7 @@
newRes = (in_sqrt / res + res) >> 1;
} while (newRes != res && i-- > 0);
- invARSpecQ8[k] = (WebRtc_Word16)newRes;
+ invARSpecQ8[k] = (int16_t)newRes;
}
/* arithmetic coding of spectrum */
err = WebRtcIsac_EncLogisticMulti2(streamdata, dataQ7, invARSpecQ8,
@@ -682,13 +682,13 @@
}
-WebRtc_Word16 WebRtcIsac_Poly2LarUB(double* lpcVecs, WebRtc_Word16 bandwidth) {
+int16_t WebRtcIsac_Poly2LarUB(double* lpcVecs, int16_t bandwidth) {
double poly[MAX_ORDER];
double rc[MAX_ORDER];
double* ptrIO;
- WebRtc_Word16 vecCntr;
- WebRtc_Word16 vecSize;
- WebRtc_Word16 numVec;
+ int16_t vecCntr;
+ int16_t vecSize;
+ int16_t numVec;
vecSize = UB_LPC_ORDER;
switch (bandwidth) {
@@ -791,16 +791,16 @@
return 0;
}
-WebRtc_Word16 WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata,
- double* percepFilterParams,
- WebRtc_Word16 bandwidth) {
+int16_t WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata,
+ double* percepFilterParams,
+ int16_t bandwidth) {
double lpcCoeff[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
int err;
int interpolCntr;
int subframeCntr;
- WebRtc_Word16 numSegments;
- WebRtc_Word16 numVecPerSegment;
- WebRtc_Word16 numGains;
+ int16_t numSegments;
+ int16_t numVecPerSegment;
+ int16_t numGains;
double percepFilterGains[SUBFRAMES << 1];
double* ptrOutParam = percepFilterParams;
@@ -1181,9 +1181,9 @@
}
-WebRtc_Word16 WebRtcIsac_EncodeLpcUB(double* lpcVecs, Bitstr* streamdata,
- double* interpolLPCCoeff,
- WebRtc_Word16 bandwidth,
+int16_t WebRtcIsac_EncodeLpcUB(double* lpcVecs, Bitstr* streamdata,
+ double* interpolLPCCoeff,
+ int16_t bandwidth,
ISACUBSaveEncDataStruct* encData) {
double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
int idx[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
@@ -1402,7 +1402,7 @@
-WebRtc_Word16 WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata) {
+int16_t WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata) {
double U[UB_LPC_GAIN_DIM];
int idx[UB_LPC_GAIN_DIM];
int err;
@@ -1422,7 +1422,7 @@
/* decode & dequantize RC */
-int WebRtcIsac_DecodeRc(Bitstr* streamdata, WebRtc_Word16* RCQ15) {
+int WebRtcIsac_DecodeRc(Bitstr* streamdata, int16_t* RCQ15) {
int k, err;
int index[AR_ORDER];
@@ -1442,7 +1442,7 @@
/* quantize & code RC */
-void WebRtcIsac_EncodeRc(WebRtc_Word16* RCQ15, Bitstr* streamdata) {
+void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata) {
int k;
int index[AR_ORDER];
@@ -1466,7 +1466,7 @@
/* decode & dequantize squared Gain */
-int WebRtcIsac_DecodeGain2(Bitstr* streamdata, WebRtc_Word32* gainQ10) {
+int WebRtcIsac_DecodeGain2(Bitstr* streamdata, int32_t* gainQ10) {
int index, err;
/* entropy decoding of quantization index */
@@ -1483,7 +1483,7 @@
/* quantize & code squared Gain */
-int WebRtcIsac_EncodeGain2(WebRtc_Word32* gainQ10, Bitstr* streamdata) {
+int WebRtcIsac_EncodeGain2(int32_t* gainQ10, Bitstr* streamdata) {
int index;
/* find quantization index */
@@ -1508,9 +1508,9 @@
/* decode & dequantize Pitch Gains */
int WebRtcIsac_DecodePitchGain(Bitstr* streamdata,
- WebRtc_Word16* PitchGains_Q12) {
+ int16_t* PitchGains_Q12) {
int index_comb, err;
- const WebRtc_UWord16* WebRtcIsac_kQPitchGainCdf_ptr[1];
+ const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1];
/* Entropy decoding of quantization indices */
*WebRtcIsac_kQPitchGainCdf_ptr = WebRtcIsac_kQPitchGainCdf;
@@ -1531,7 +1531,7 @@
/* Quantize & code Pitch Gains. */
-void WebRtcIsac_EncodePitchGain(WebRtc_Word16* PitchGains_Q12,
+void WebRtcIsac_EncodePitchGain(int16_t* PitchGains_Q12,
Bitstr* streamdata,
ISAC_SaveEncData_t* encData) {
int k, j;
@@ -1539,7 +1539,7 @@
double S[PITCH_SUBFRAMES];
int index[3];
int index_comb;
- const WebRtc_UWord16* WebRtcIsac_kQPitchGainCdf_ptr[1];
+ const uint16_t* WebRtcIsac_kQPitchGainCdf_ptr[1];
double PitchGains[PITCH_SUBFRAMES] = {0, 0, 0, 0};
/* Take the asin. */
@@ -1589,7 +1589,7 @@
/* Pitch LAG */
/* Decode & de-quantize Pitch Lags. */
-int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, WebRtc_Word16* PitchGain_Q12,
+int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, int16_t* PitchGain_Q12,
double* PitchLags) {
int k, err;
double StepSize;
@@ -1597,10 +1597,10 @@
int index[PITCH_SUBFRAMES];
double mean_gain;
const double* mean_val2, *mean_val3, *mean_val4;
- const WebRtc_Word16* lower_limit;
- const WebRtc_UWord16* init_index;
- const WebRtc_UWord16* cdf_size;
- const WebRtc_UWord16** cdf;
+ const int16_t* lower_limit;
+ const uint16_t* init_index;
+ const uint16_t* cdf_size;
+ const uint16_t** cdf;
double PitchGain[4] = {0, 0, 0, 0};
/* compute mean pitch gain */
@@ -1676,7 +1676,7 @@
/* Quantize & code pitch lags. */
-void WebRtcIsac_EncodePitchLag(double* PitchLags, WebRtc_Word16* PitchGain_Q12,
+void WebRtcIsac_EncodePitchLag(double* PitchLags, int16_t* PitchGain_Q12,
Bitstr* streamdata,
ISAC_SaveEncData_t* encData) {
int k, j;
@@ -1685,8 +1685,8 @@
int index[PITCH_SUBFRAMES];
double mean_gain;
const double* mean_val2, *mean_val3, *mean_val4;
- const WebRtc_Word16* lower_limit, *upper_limit;
- const WebRtc_UWord16** cdf;
+ const int16_t* lower_limit, *upper_limit;
+ const uint16_t** cdf;
double PitchGain[4] = {0, 0, 0, 0};
/* compute mean pitch gain */
@@ -1777,18 +1777,18 @@
/* cdf array for frame length indicator */
-const WebRtc_UWord16 WebRtcIsac_kFrameLengthCdf[4] = {
+const uint16_t WebRtcIsac_kFrameLengthCdf[4] = {
0, 21845, 43690, 65535 };
/* pointer to cdf array for frame length indicator */
-const WebRtc_UWord16* WebRtcIsac_kFrameLengthCdf_ptr[1] = {
+const uint16_t* WebRtcIsac_kFrameLengthCdf_ptr[1] = {
WebRtcIsac_kFrameLengthCdf };
/* initial cdf index for decoder of frame length indicator */
-const WebRtc_UWord16 WebRtcIsac_kFrameLengthInitIndex[1] = { 1 };
+const uint16_t WebRtcIsac_kFrameLengthInitIndex[1] = { 1 };
-int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, WebRtc_Word16* framesamples) {
+int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, int16_t* framesamples) {
int frame_mode, err;
err = 0;
/* entropy decoding of frame length [1:30ms,2:60ms] */
@@ -1811,7 +1811,7 @@
return err;
}
-int WebRtcIsac_EncodeFrameLen(WebRtc_Word16 framesamples, Bitstr* streamdata) {
+int WebRtcIsac_EncodeFrameLen(int16_t framesamples, Bitstr* streamdata) {
int frame_mode, status;
status = 0;
@@ -1837,19 +1837,19 @@
}
/* cdf array for estimated bandwidth */
-static const WebRtc_UWord16 kBwCdf[25] = {
+static const uint16_t kBwCdf[25] = {
0, 2731, 5461, 8192, 10923, 13653, 16384, 19114, 21845, 24576, 27306, 30037,
32768, 35498, 38229, 40959, 43690, 46421, 49151, 51882, 54613, 57343, 60074,
62804, 65535 };
/* pointer to cdf array for estimated bandwidth */
-static const WebRtc_UWord16* kBwCdfPtr[1] = { kBwCdf };
+static const uint16_t* kBwCdfPtr[1] = { kBwCdf };
/* initial cdf index for decoder of estimated bandwidth*/
-static const WebRtc_UWord16 kBwInitIndex[1] = { 7 };
+static const uint16_t kBwInitIndex[1] = { 7 };
-int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, WebRtc_Word16* BWno) {
+int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, int16_t* BWno) {
int BWno32, err;
/* entropy decoding of sender's BW estimation [0..23] */
@@ -1858,7 +1858,7 @@
if (err < 0) {
return -ISAC_RANGE_ERROR_DECODE_BANDWIDTH;
}
- *BWno = (WebRtc_Word16)BWno32;
+ *BWno = (int16_t)BWno32;
return err;
}
@@ -1950,7 +1950,7 @@
/* Decode & de-quantize LPC Coefficients. */
int WebRtcIsac_DecodeLpcCoefUB(Bitstr* streamdata, double* lpcVecs,
double* percepFilterGains,
- WebRtc_Word16 bandwidth) {
+ int16_t bandwidth) {
int index_s[KLT_ORDER_SHAPE];
double U[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
@@ -1993,8 +1993,8 @@
return 0;
}
-WebRtc_Word16 WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth,
- Bitstr* streamData) {
+int16_t WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth,
+ Bitstr* streamData) {
int bandwidthMode;
switch (bandwidth) {
case isac12kHz: {
@@ -2013,8 +2013,8 @@
return 0;
}
-WebRtc_Word16 WebRtcIsac_DecodeBandwidth(Bitstr* streamData,
- enum ISACBandwidth* bandwidth) {
+int16_t WebRtcIsac_DecodeBandwidth(Bitstr* streamData,
+ enum ISACBandwidth* bandwidth) {
int bandwidthMode;
if (WebRtcIsac_DecHistOneStepMulti(&bandwidthMode, streamData,
kOneBitEqualProbCdf_ptr,
@@ -2036,8 +2036,8 @@
return 0;
}
-WebRtc_Word16 WebRtcIsac_EncodeJitterInfo(WebRtc_Word32 jitterIndex,
- Bitstr* streamData) {
+int16_t WebRtcIsac_EncodeJitterInfo(int32_t jitterIndex,
+ Bitstr* streamData) {
/* This is to avoid LINUX warning until we change 'int' to 'Word32'. */
int intVar;
@@ -2051,8 +2051,8 @@
return 0;
}
-WebRtc_Word16 WebRtcIsac_DecodeJitterInfo(Bitstr* streamData,
- WebRtc_Word32* jitterInfo) {
+int16_t WebRtcIsac_DecodeJitterInfo(Bitstr* streamData,
+ int32_t* jitterInfo) {
int intVar;
/* Use the same CDF table as for bandwidth
* both take two values with equal probability. */
@@ -2061,6 +2061,6 @@
kOneBitEqualProbInitIndex, 1) < 0) {
return -ISAC_RANGE_ERROR_DECODE_BANDWITH;
}
- *jitterInfo = (WebRtc_Word16)(intVar);
+ *jitterInfo = (int16_t)(intVar);
return 0;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h b/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h
index 1a46953..1a86ce2 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/entropy_coding.h
@@ -46,7 +46,7 @@
* Return value : < 0 if an error occures
* 0 if succeeded.
*/
-int WebRtcIsac_DecodeSpec(Bitstr* streamdata, WebRtc_Word16 AvgPitchGain_Q12,
+int WebRtcIsac_DecodeSpec(Bitstr* streamdata, int16_t AvgPitchGain_Q12,
enum ISACBand band, double* fr, double* fi);
/******************************************************************************
@@ -72,15 +72,15 @@
* Return value : < 0 if an error occures
* 0 if succeeded.
*/
-int WebRtcIsac_EncodeSpec(const WebRtc_Word16* fr, const WebRtc_Word16* fi,
- WebRtc_Word16 AvgPitchGain_Q12, enum ISACBand band,
+int WebRtcIsac_EncodeSpec(const int16_t* fr, const int16_t* fi,
+ int16_t AvgPitchGain_Q12, enum ISACBand band,
Bitstr* streamdata);
/* decode & dequantize LPC Coef */
int WebRtcIsac_DecodeLpcCoef(Bitstr* streamdata, double* LPCCoef);
int WebRtcIsac_DecodeLpcCoefUB(Bitstr* streamdata, double* lpcVecs,
double* percepFilterGains,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
int WebRtcIsac_DecodeLpc(Bitstr* streamdata, double* LPCCoef_lo,
double* LPCCoef_hi);
@@ -126,10 +126,10 @@
* Return value : 0 if encoding is successful,
* <0 if failed to encode.
*/
-WebRtc_Word16 WebRtcIsac_EncodeLpcUB(double* lpcCoeff, Bitstr* streamdata,
- double* interpolLPCCoeff,
- WebRtc_Word16 bandwidth,
- ISACUBSaveEncDataStruct* encData);
+int16_t WebRtcIsac_EncodeLpcUB(double* lpcCoeff, Bitstr* streamdata,
+ double* interpolLPCCoeff,
+ int16_t bandwidth,
+ ISACUBSaveEncDataStruct* encData);
/******************************************************************************
* WebRtcIsac_DecodeInterpolLpcUb()
@@ -159,37 +159,37 @@
* Return value : 0 if encoding is successful,
* <0 if failed to encode.
*/
-WebRtc_Word16 WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata,
- double* percepFilterParam,
- WebRtc_Word16 bandwidth);
+int16_t WebRtcIsac_DecodeInterpolLpcUb(Bitstr* streamdata,
+ double* percepFilterParam,
+ int16_t bandwidth);
/* Decode & dequantize RC */
-int WebRtcIsac_DecodeRc(Bitstr* streamdata, WebRtc_Word16* RCQ15);
+int WebRtcIsac_DecodeRc(Bitstr* streamdata, int16_t* RCQ15);
/* Quantize & code RC */
-void WebRtcIsac_EncodeRc(WebRtc_Word16* RCQ15, Bitstr* streamdata);
+void WebRtcIsac_EncodeRc(int16_t* RCQ15, Bitstr* streamdata);
/* Decode & dequantize squared Gain */
-int WebRtcIsac_DecodeGain2(Bitstr* streamdata, WebRtc_Word32* Gain2);
+int WebRtcIsac_DecodeGain2(Bitstr* streamdata, int32_t* Gain2);
/* Quantize & code squared Gain (input is squared gain) */
-int WebRtcIsac_EncodeGain2(WebRtc_Word32* gain2, Bitstr* streamdata);
+int WebRtcIsac_EncodeGain2(int32_t* gain2, Bitstr* streamdata);
-void WebRtcIsac_EncodePitchGain(WebRtc_Word16* PitchGains_Q12,
+void WebRtcIsac_EncodePitchGain(int16_t* PitchGains_Q12,
Bitstr* streamdata,
ISAC_SaveEncData_t* encData);
-void WebRtcIsac_EncodePitchLag(double* PitchLags, WebRtc_Word16* PitchGain_Q12,
+void WebRtcIsac_EncodePitchLag(double* PitchLags, int16_t* PitchGain_Q12,
Bitstr* streamdata, ISAC_SaveEncData_t* encData);
int WebRtcIsac_DecodePitchGain(Bitstr* streamdata,
- WebRtc_Word16* PitchGain_Q12);
-int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, WebRtc_Word16* PitchGain_Q12,
+ int16_t* PitchGain_Q12);
+int WebRtcIsac_DecodePitchLag(Bitstr* streamdata, int16_t* PitchGain_Q12,
double* PitchLag);
-int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, WebRtc_Word16* framelength);
-int WebRtcIsac_EncodeFrameLen(WebRtc_Word16 framelength, Bitstr* streamdata);
-int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, WebRtc_Word16* BWno);
+int WebRtcIsac_DecodeFrameLen(Bitstr* streamdata, int16_t* framelength);
+int WebRtcIsac_EncodeFrameLen(int16_t framelength, Bitstr* streamdata);
+int WebRtcIsac_DecodeSendBW(Bitstr* streamdata, int16_t* BWno);
void WebRtcIsac_EncodeReceiveBw(int* BWno, Bitstr* streamdata);
/* Step-down */
@@ -253,7 +253,7 @@
* Return value : 0 if succeeded.
* <0 if failed.
*/
-WebRtc_Word16 WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata);
+int16_t WebRtcIsac_DecodeLpcGainUb(double* lpGains, Bitstr* streamdata);
/******************************************************************************
@@ -272,8 +272,8 @@
* Return value : 0 if succeeded.
* <0 if failed.
*/
-WebRtc_Word16 WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth,
- Bitstr* streamData);
+int16_t WebRtcIsac_EncodeBandwidth(enum ISACBandwidth bandwidth,
+ Bitstr* streamData);
/******************************************************************************
@@ -293,8 +293,8 @@
* Return value : 0 if succeeded.
* <0 if failed.
*/
-WebRtc_Word16 WebRtcIsac_DecodeBandwidth(Bitstr* streamData,
- enum ISACBandwidth* bandwidth);
+int16_t WebRtcIsac_DecodeBandwidth(Bitstr* streamData,
+ enum ISACBandwidth* bandwidth);
/******************************************************************************
@@ -314,8 +314,8 @@
* Return value : 0 if succeeded.
* <0 if failed.
*/
-WebRtc_Word16 WebRtcIsac_EncodeJitterInfo(WebRtc_Word32 jitterIndex,
- Bitstr* streamData);
+int16_t WebRtcIsac_EncodeJitterInfo(int32_t jitterIndex,
+ Bitstr* streamData);
/******************************************************************************
@@ -335,7 +335,7 @@
* Return value : 0 if succeeded.
* <0 if failed.
*/
-WebRtc_Word16 WebRtcIsac_DecodeJitterInfo(Bitstr* streamData,
- WebRtc_Word32* jitterInfo);
+int16_t WebRtcIsac_DecodeJitterInfo(Bitstr* streamData,
+ int32_t* jitterInfo);
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ENTROPY_CODING_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c b/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
index 428fda8..ed794a5 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/isac.c
@@ -55,10 +55,10 @@
*
*/
static void UpdatePayloadSizeLimit(ISACMainStruct* instISAC) {
- WebRtc_Word16 lim30MsPayloadBytes = WEBRTC_SPL_MIN(
+ int16_t lim30MsPayloadBytes = WEBRTC_SPL_MIN(
(instISAC->maxPayloadSizeBytes),
(instISAC->maxRateBytesPer30Ms));
- WebRtc_Word16 lim60MsPayloadBytes = WEBRTC_SPL_MIN(
+ int16_t lim60MsPayloadBytes = WEBRTC_SPL_MIN(
(instISAC->maxPayloadSizeBytes),
(instISAC->maxRateBytesPer30Ms << 1));
@@ -112,7 +112,7 @@
if ((instISAC->codingMode == 0) &&
(instISAC->instLB.ISACencLB_obj.buffer_index == 0) &&
(instISAC->instLB.ISACencLB_obj.frame_nb == 0)) {
- WebRtc_Word32 bottleneck;
+ int32_t bottleneck;
WebRtcIsac_GetUplinkBandwidth(&(instISAC->bwestimator_obj),
&bottleneck);
@@ -190,8 +190,8 @@
*
*/
static void GetSendBandwidthInfo(ISACMainStruct* instISAC,
- WebRtc_Word16* bandwidthIndex,
- WebRtc_Word16* jitterInfo) {
+ int16_t* bandwidthIndex,
+ int16_t* jitterInfo) {
if ((instISAC->instLB.ISACencLB_obj.buffer_index ==
(FRAMESAMPLES_10ms << 1)) &&
(instISAC->instLB.ISACencLB_obj.frame_nb == 0)) {
@@ -216,8 +216,8 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_AssignSize(int* sizeInBytes) {
- *sizeInBytes = sizeof(ISACMainStruct) * 2 / sizeof(WebRtc_Word16);
+int16_t WebRtcIsac_AssignSize(int* sizeInBytes) {
+ *sizeInBytes = sizeof(ISACMainStruct) * 2 / sizeof(int16_t);
return 0;
}
@@ -235,8 +235,8 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_Assign(ISACStruct** ISAC_main_inst,
- void* instISAC_Addr) {
+int16_t WebRtcIsac_Assign(ISACStruct** ISAC_main_inst,
+ void* instISAC_Addr) {
if (instISAC_Addr != NULL) {
ISACMainStruct* instISAC = (ISACMainStruct*)instISAC_Addr;
instISAC->errorCode = 0;
@@ -269,7 +269,7 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_Create(ISACStruct** ISAC_main_inst) {
+int16_t WebRtcIsac_Create(ISACStruct** ISAC_main_inst) {
ISACMainStruct* instISAC;
instISAC = (ISACMainStruct*)WEBRTC_SPL_VNEW(ISACMainStruct, 1);
@@ -300,7 +300,7 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_Free(ISACStruct* ISAC_main_inst) {
+int16_t WebRtcIsac_Free(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
WEBRTC_SPL_FREE(instISAC);
return 0;
@@ -329,10 +329,10 @@
* Return value : 0 - Ok
* -1 - Error
*/
-static WebRtc_Word16 EncoderInitLb(ISACLBStruct* instLB,
- WebRtc_Word16 codingMode,
- enum IsacSamplingRate sampRate) {
- WebRtc_Word16 statusInit = 0;
+static int16_t EncoderInitLb(ISACLBStruct* instLB,
+ int16_t codingMode,
+ enum IsacSamplingRate sampRate) {
+ int16_t statusInit = 0;
int k;
/* Init stream vector to zero */
@@ -371,9 +371,9 @@
return statusInit;
}
-static WebRtc_Word16 EncoderInitUb(ISACUBStruct* instUB,
- WebRtc_Word16 bandwidth) {
- WebRtc_Word16 statusInit = 0;
+static int16_t EncoderInitUb(ISACUBStruct* instUB,
+ int16_t bandwidth) {
+ int16_t statusInit = 0;
int k;
/* Init stream vector to zero. */
@@ -406,10 +406,10 @@
}
-WebRtc_Word16 WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst,
- WebRtc_Word16 codingMode) {
+int16_t WebRtcIsac_EncoderInit(ISACStruct* ISAC_main_inst,
+ int16_t codingMode) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WebRtc_Word16 status;
+ int16_t status;
if ((codingMode != 0) && (codingMode != 1)) {
instISAC->errorCode = ISAC_DISALLOWED_CODING_MODE;
@@ -449,9 +449,9 @@
if (instISAC->encoderSamplingRateKHz == kIsacSuperWideband) {
/* Initialize encoder filter-bank. */
memset(instISAC->analysisFBState1, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
memset(instISAC->analysisFBState2, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
status = EncoderInitUb(&(instISAC->instUB),
instISAC->bandwidthKHz);
@@ -489,21 +489,21 @@
* samples.
* : -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
- const WebRtc_Word16* speechIn,
- WebRtc_Word16* encoded) {
+int16_t WebRtcIsac_Encode(ISACStruct* ISAC_main_inst,
+ const int16_t* speechIn,
+ int16_t* encoded) {
float inFrame[FRAMESAMPLES_10ms];
- WebRtc_Word16 speechInLB[FRAMESAMPLES_10ms];
- WebRtc_Word16 speechInUB[FRAMESAMPLES_10ms];
- WebRtc_Word16 streamLenLB = 0;
- WebRtc_Word16 streamLenUB = 0;
- WebRtc_Word16 streamLen = 0;
- WebRtc_Word16 k = 0;
- WebRtc_UWord8* ptrEncodedUW8 = (WebRtc_UWord8*)encoded;
+ int16_t speechInLB[FRAMESAMPLES_10ms];
+ int16_t speechInUB[FRAMESAMPLES_10ms];
+ int16_t streamLenLB = 0;
+ int16_t streamLenUB = 0;
+ int16_t streamLen = 0;
+ int16_t k = 0;
+ uint8_t* ptrEncodedUW8 = (uint8_t*)encoded;
int garbageLen = 0;
- WebRtc_Word32 bottleneck = 0;
- WebRtc_Word16 bottleneckIdx = 0;
- WebRtc_Word16 jitterInfo = 0;
+ int32_t bottleneck = 0;
+ int16_t bottleneckIdx = 0;
+ int16_t jitterInfo = 0;
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
ISACLBStruct* instLB = &(instISAC->instLB);
@@ -641,7 +641,7 @@
memcpy(ptrEncodedUW8, instLB->ISACencLB_obj.bitstr_obj.stream, streamLenLB);
streamLen = streamLenLB;
if (streamLenUB > 0) {
- ptrEncodedUW8[streamLenLB] = (WebRtc_UWord8)(streamLenUB + 1 +
+ ptrEncodedUW8[streamLenLB] = (uint8_t)(streamLenUB + 1 +
LEN_CHECK_SUM_WORD8);
memcpy(&ptrEncodedUW8[streamLenLB + 1],
instUB->ISACencUB_obj.bitstr_obj.stream, streamLenUB);
@@ -664,7 +664,7 @@
if (instISAC->codingMode == 0) {
int minBytes;
int limit;
- WebRtc_UWord8* ptrGarbage;
+ uint8_t* ptrGarbage;
instISAC->MaxDelay = (double)WebRtcIsac_GetUplinkMaxDelay(
&instISAC->bwestimator_obj);
@@ -706,20 +706,20 @@
/* If bit-stream too short then add garbage at the end. */
if (garbageLen > 0) {
for (k = 0; k < garbageLen; k++) {
- ptrGarbage[k] = (WebRtc_UWord8)(rand() & 0xFF);
+ ptrGarbage[k] = (uint8_t)(rand() & 0xFF);
}
/* For a correct length of the upper-band bit-stream together
* with the garbage. Garbage is embeded in upper-band bit-stream.
* That is the only way to preserve backward compatibility. */
if ((instISAC->bandwidthKHz == isac8kHz) ||
(streamLenUB == 0)) {
- ptrEncodedUW8[streamLenLB] = (WebRtc_UWord8)garbageLen;
+ ptrEncodedUW8[streamLenLB] = (uint8_t)garbageLen;
} else {
- ptrEncodedUW8[streamLenLB] += (WebRtc_UWord8)garbageLen;
+ ptrEncodedUW8[streamLenLB] += (uint8_t)garbageLen;
/* Write the length of the garbage at the end of the upper-band
* bit-stream, if exists. This helps for sanity check. */
ptrEncodedUW8[streamLenLB + 1 + streamLenUB] =
- (WebRtc_UWord8)garbageLen;
+ (uint8_t)garbageLen;
}
streamLen += garbageLen;
@@ -734,14 +734,14 @@
/* Generate CRC if required. */
if ((instISAC->bandwidthKHz != isac8kHz) && (streamLenUB > 0)) {
- WebRtc_UWord32 crc;
+ uint32_t crc;
- WebRtcIsac_GetCrc((WebRtc_Word16*)(&(ptrEncodedUW8[streamLenLB + 1])),
+ WebRtcIsac_GetCrc((int16_t*)(&(ptrEncodedUW8[streamLenLB + 1])),
streamLenUB + garbageLen, &crc);
#ifndef WEBRTC_BIG_ENDIAN
for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) {
ptrEncodedUW8[streamLen - LEN_CHECK_SUM_WORD8 + k] =
- (WebRtc_UWord8)((crc >> (24 - k * 8)) & 0xFF);
+ (uint8_t)((crc >> (24 - k * 8)) & 0xFF);
}
#else
memcpy(&ptrEncodedUW8[streamLenLB + streamLenUB + 1], &crc,
@@ -782,27 +782,27 @@
* the struct since it is only allowed to read
* the struct.
*/
-WebRtc_Word16 WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst,
- WebRtc_Word16 bweIndex,
- WebRtc_Word16 jitterInfo,
- WebRtc_Word32 rate,
- WebRtc_Word16* encoded,
- WebRtc_Word16 isRCU) {
+int16_t WebRtcIsac_GetNewBitStream(ISACStruct* ISAC_main_inst,
+ int16_t bweIndex,
+ int16_t jitterInfo,
+ int32_t rate,
+ int16_t* encoded,
+ int16_t isRCU) {
Bitstr iSACBitStreamInst; /* Local struct for bitstream handling */
- WebRtc_Word16 streamLenLB;
- WebRtc_Word16 streamLenUB;
- WebRtc_Word16 totalStreamLen;
+ int16_t streamLenLB;
+ int16_t streamLenUB;
+ int16_t totalStreamLen;
double gain2;
double gain1;
float scale;
enum ISACBandwidth bandwidthKHz;
double rateLB;
double rateUB;
- WebRtc_Word32 currentBN;
- WebRtc_UWord8* encodedPtrUW8 = (WebRtc_UWord8*)encoded;
- WebRtc_UWord32 crc;
+ int32_t currentBN;
+ uint8_t* encodedPtrUW8 = (uint8_t*)encoded;
+ uint32_t crc;
#ifndef WEBRTC_BIG_ENDIAN
- WebRtc_Word16 k;
+ int16_t k;
#endif
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
@@ -849,7 +849,7 @@
return -1;
}
- /* Convert from bytes to WebRtc_Word16. */
+ /* Convert from bytes to int16_t. */
memcpy(encoded, iSACBitStreamInst.stream, streamLenLB);
if (bandwidthKHz == isac8kHz) {
@@ -890,12 +890,12 @@
memcpy(&encodedPtrUW8[streamLenLB + 1], iSACBitStreamInst.stream,
streamLenUB);
- WebRtcIsac_GetCrc((WebRtc_Word16*)(&(encodedPtrUW8[streamLenLB + 1])),
+ WebRtcIsac_GetCrc((int16_t*)(&(encodedPtrUW8[streamLenLB + 1])),
streamLenUB, &crc);
#ifndef WEBRTC_BIG_ENDIAN
for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) {
encodedPtrUW8[totalStreamLen - LEN_CHECK_SUM_WORD8 + k] =
- (WebRtc_UWord8)((crc >> (24 - k * 8)) & 0xFF);
+ (uint8_t)((crc >> (24 - k * 8)) & 0xFF);
}
#else
memcpy(&encodedPtrUW8[streamLenLB + streamLenUB + 1], &crc,
@@ -921,7 +921,7 @@
* : 0 - Ok
* -1 - Error
*/
-static WebRtc_Word16 DecoderInitLb(ISACLBStruct* instISAC) {
+static int16_t DecoderInitLb(ISACLBStruct* instISAC) {
int i;
/* Initialize stream vector to zero. */
for (i = 0; i < STREAM_SIZE_MAX_60; i++) {
@@ -935,7 +935,7 @@
return 0;
}
-static WebRtc_Word16 DecoderInitUb(ISACUBStruct* instISAC) {
+static int16_t DecoderInitUb(ISACUBStruct* instISAC) {
int i;
/* Init stream vector to zero */
for (i = 0; i < STREAM_SIZE_MAX_60; i++) {
@@ -948,7 +948,7 @@
return (0);
}
-WebRtc_Word16 WebRtcIsac_DecoderInit(ISACStruct* ISAC_main_inst) {
+int16_t WebRtcIsac_DecoderInit(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
if (DecoderInitLb(&instISAC->instLB) < 0) {
@@ -956,9 +956,9 @@
}
if (instISAC->decoderSamplingRateKHz == kIsacSuperWideband) {
memset(instISAC->synthesisFBState1, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
memset(instISAC->synthesisFBState2, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
if (DecoderInitUb(&(instISAC->instUB)) < 0) {
return -1;
@@ -996,18 +996,18 @@
* Return value : 0 - Ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts) {
+int16_t WebRtcIsac_UpdateBwEstimate(ISACStruct* ISAC_main_inst,
+ const uint16_t* encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
Bitstr streamdata;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
/* Check if decoder initiated. */
if ((instISAC->initFlag & BIT_MASK_DEC_INIT) != BIT_MASK_DEC_INIT) {
@@ -1025,7 +1025,7 @@
#ifndef WEBRTC_BIG_ENDIAN
for (k = 0; k < 10; k++) {
- streamdata.stream[k] = (WebRtc_UWord8)((encoded[k >> 1] >>
+ streamdata.stream[k] = (uint8_t)((encoded[k >> 1] >>
((k & 1) << 3)) & 0xFF);
}
#else
@@ -1044,30 +1044,30 @@
return 0;
}
-static WebRtc_Word16 Decode(ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word16 lenEncodedBytes,
- WebRtc_Word16* decoded,
- WebRtc_Word16* speechType,
- WebRtc_Word16 isRCUPayload) {
+static int16_t Decode(ISACStruct* ISAC_main_inst,
+ const uint16_t* encoded,
+ int16_t lenEncodedBytes,
+ int16_t* decoded,
+ int16_t* speechType,
+ int16_t isRCUPayload) {
/* Number of samples (480 or 960), output from decoder
that were actually used in the encoder/decoder
(determined on the fly). */
- WebRtc_Word16 numSamplesLB;
- WebRtc_Word16 numSamplesUB;
- WebRtc_Word16 speechIdx;
+ int16_t numSamplesLB;
+ int16_t numSamplesUB;
+ int16_t speechIdx;
float outFrame[MAX_FRAMESAMPLES];
- WebRtc_Word16 outFrameLB[MAX_FRAMESAMPLES];
- WebRtc_Word16 outFrameUB[MAX_FRAMESAMPLES];
- WebRtc_Word16 numDecodedBytesLB;
- WebRtc_Word16 numDecodedBytesUB;
- WebRtc_Word16 lenEncodedLBBytes;
- WebRtc_Word16 validChecksum = 1;
- WebRtc_Word16 k;
- WebRtc_UWord8* ptrEncodedUW8 = (WebRtc_UWord8*)encoded;
- WebRtc_UWord16 numLayer;
- WebRtc_Word16 totSizeBytes;
- WebRtc_Word16 err;
+ int16_t outFrameLB[MAX_FRAMESAMPLES];
+ int16_t outFrameUB[MAX_FRAMESAMPLES];
+ int16_t numDecodedBytesLB;
+ int16_t numDecodedBytesUB;
+ int16_t lenEncodedLBBytes;
+ int16_t validChecksum = 1;
+ int16_t k;
+ uint8_t* ptrEncodedUW8 = (uint8_t*)encoded;
+ uint16_t numLayer;
+ int16_t totSizeBytes;
+ int16_t err;
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
ISACUBDecStruct* decInstUB = &(instISAC->instUB.ISACdecUB_obj);
@@ -1131,12 +1131,12 @@
} else if (outFrame[k] < -32768) {
decoded[k] = -32768;
} else {
- decoded[k] = (WebRtc_Word16)WebRtcIsac_lrint(outFrame[k]);
+ decoded[k] = (int16_t)WebRtcIsac_lrint(outFrame[k]);
}
}
numSamplesUB = 0;
} else {
- WebRtc_UWord32 crc;
+ uint32_t crc;
/* We don't accept larger than 30ms (480 samples at lower-band)
* frame-size. */
for (k = 0; k < numSamplesLB; k++) {
@@ -1145,7 +1145,7 @@
} else if (outFrame[k] < -32768) {
outFrameLB[k] = -32768;
} else {
- outFrameLB[k] = (WebRtc_Word16)WebRtcIsac_lrint(outFrame[k]);
+ outFrameLB[k] = (int16_t)WebRtcIsac_lrint(outFrame[k]);
}
}
@@ -1153,13 +1153,13 @@
if (numDecodedBytesLB == lenEncodedBytes) {
/* Decoding was successful. No super-wideband bit-stream exists. */
numSamplesUB = numSamplesLB;
- memset(outFrameUB, 0, sizeof(WebRtc_Word16) * numSamplesUB);
+ memset(outFrameUB, 0, sizeof(int16_t) * numSamplesUB);
/* Prepare for the potential increase of signal bandwidth. */
instISAC->resetFlag_8kHz = 2;
} else {
/* This includes the checksum and the bytes that stores the length. */
- WebRtc_Word16 lenNextStream = ptrEncodedUW8[numDecodedBytesLB];
+ int16_t lenNextStream = ptrEncodedUW8[numDecodedBytesLB];
/* Is this garbage or valid super-wideband bit-stream?
* Check if checksum is valid. */
@@ -1169,7 +1169,7 @@
validChecksum = 0;
} else {
/* Run CRC to see if the checksum match. */
- WebRtcIsac_GetCrc((WebRtc_Word16*)(
+ WebRtcIsac_GetCrc((int16_t*)(
&ptrEncodedUW8[numDecodedBytesLB + 1]),
lenNextStream - LEN_CHECK_SUM_WORD8 - 1, &crc);
@@ -1185,11 +1185,11 @@
/* This is a garbage, we have received a wideband
* bit-stream with garbage. */
numSamplesUB = numSamplesLB;
- memset(outFrameUB, 0, sizeof(WebRtc_Word16) * numSamplesUB);
+ memset(outFrameUB, 0, sizeof(int16_t) * numSamplesUB);
} else {
/* A valid super-wideband biststream exists. */
enum ISACBandwidth bandwidthKHz;
- WebRtc_Word32 maxDelayBit;
+ int32_t maxDelayBit;
/* If we have super-wideband bit-stream, we cannot
* have 60 ms frame-size. */
@@ -1298,7 +1298,7 @@
} else if (outFrame[k] < -32768) {
outFrameUB[k] = -32768;
} else {
- outFrameUB[k] = (WebRtc_Word16)WebRtcIsac_lrint(
+ outFrameUB[k] = (int16_t)WebRtcIsac_lrint(
outFrame[k]);
}
}
@@ -1344,12 +1344,12 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_Decode(ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word16 lenEncodedBytes,
- WebRtc_Word16* decoded,
- WebRtc_Word16* speechType) {
- WebRtc_Word16 isRCUPayload = 0;
+int16_t WebRtcIsac_Decode(ISACStruct* ISAC_main_inst,
+ const uint16_t* encoded,
+ int16_t lenEncodedBytes,
+ int16_t* decoded,
+ int16_t* speechType) {
+ int16_t isRCUPayload = 0;
return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
speechType, isRCUPayload);
}
@@ -1376,12 +1376,12 @@
-WebRtc_Word16 WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst,
- const WebRtc_UWord16* encoded,
- WebRtc_Word16 lenEncodedBytes,
- WebRtc_Word16* decoded,
- WebRtc_Word16* speechType) {
- WebRtc_Word16 isRCUPayload = 1;
+int16_t WebRtcIsac_DecodeRcu(ISACStruct* ISAC_main_inst,
+ const uint16_t* encoded,
+ int16_t lenEncodedBytes,
+ int16_t* decoded,
+ int16_t* speechType) {
+ int16_t isRCUPayload = 1;
return Decode(ISAC_main_inst, encoded, lenEncodedBytes, decoded,
speechType, isRCUPayload);
}
@@ -1404,10 +1404,10 @@
* Return value : >0 - number of samples in decoded PLC vector
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_DecodePlc(ISACStruct* ISAC_main_inst,
- WebRtc_Word16* decoded,
- WebRtc_Word16 noOfLostFrames) {
- WebRtc_Word16 numSamples = 0;
+int16_t WebRtcIsac_DecodePlc(ISACStruct* ISAC_main_inst,
+ int16_t* decoded,
+ int16_t noOfLostFrames) {
+ int16_t numSamples = 0;
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
/* Limit number of frames to two = 60 millisecond.
@@ -1429,7 +1429,7 @@
}
/* Set output samples to zero. */
- memset(decoded, 0, numSamples * sizeof(WebRtc_Word16));
+ memset(decoded, 0, numSamples * sizeof(int16_t));
return numSamples;
}
@@ -1451,8 +1451,8 @@
* Return value : 0 - ok
* -1 - Error
*/
-static WebRtc_Word16 ControlLb(ISACLBStruct* instISAC, double rate,
- WebRtc_Word16 frameSize) {
+static int16_t ControlLb(ISACLBStruct* instISAC, double rate,
+ int16_t frameSize) {
if ((rate >= 10000) && (rate <= 32000)) {
instISAC->ISACencLB_obj.bottleneck = rate;
} else {
@@ -1468,7 +1468,7 @@
return 0;
}
-static WebRtc_Word16 ControlUb(ISACUBStruct* instISAC, double rate) {
+static int16_t ControlUb(ISACUBStruct* instISAC, double rate) {
if ((rate >= 10000) && (rate <= 32000)) {
instISAC->ISACencUB_obj.bottleneck = rate;
} else {
@@ -1477,11 +1477,11 @@
return 0;
}
-WebRtc_Word16 WebRtcIsac_Control(ISACStruct* ISAC_main_inst,
- WebRtc_Word32 bottleneckBPS,
- WebRtc_Word16 frameSize) {
+int16_t WebRtcIsac_Control(ISACStruct* ISAC_main_inst,
+ int32_t bottleneckBPS,
+ int16_t frameSize) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WebRtc_Word16 status;
+ int16_t status;
double rateLB;
double rateUB;
enum ISACBandwidth bandwidthKHz;
@@ -1586,10 +1586,10 @@
* Return value : 0 - ok
* -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst,
- WebRtc_Word32 bottleneckBPS,
- WebRtc_Word16 frameSizeMs,
- WebRtc_Word16 enforceFrameSize) {
+int16_t WebRtcIsac_ControlBwe(ISACStruct* ISAC_main_inst,
+ int32_t bottleneckBPS,
+ int16_t frameSizeMs,
+ int16_t enforceFrameSize) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
enum ISACBandwidth bandwidth;
@@ -1659,9 +1659,9 @@
* - bweIndex : Bandwidth estimate to transmit to other side.
*
*/
-WebRtc_Word16 WebRtcIsac_GetDownLinkBwIndex(ISACStruct* ISAC_main_inst,
- WebRtc_Word16* bweIndex,
- WebRtc_Word16* jitterInfo) {
+int16_t WebRtcIsac_GetDownLinkBwIndex(ISACStruct* ISAC_main_inst,
+ int16_t* bweIndex,
+ int16_t* jitterInfo) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
/* Check if encoder initialized. */
@@ -1692,10 +1692,10 @@
* Return value : 0 - ok
* -1 - index out of range
*/
-WebRtc_Word16 WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst,
- WebRtc_Word16 bweIndex) {
+int16_t WebRtcIsac_UpdateUplinkBw(ISACStruct* ISAC_main_inst,
+ int16_t bweIndex) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WebRtc_Word16 returnVal;
+ int16_t returnVal;
/* Check if encoder initiated. */
if ((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
@@ -1732,19 +1732,19 @@
* - bweIndex : Bandwidth estimate in bit-stream
*
*/
-WebRtc_Word16 WebRtcIsac_ReadBwIndex(const WebRtc_Word16* encoded,
- WebRtc_Word16* bweIndex) {
+int16_t WebRtcIsac_ReadBwIndex(const int16_t* encoded,
+ int16_t* bweIndex) {
Bitstr streamdata;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
WebRtcIsac_ResetBitstream(&(streamdata));
#ifndef WEBRTC_BIG_ENDIAN
for (k = 0; k < 10; k++) {
- streamdata.stream[k] = (WebRtc_UWord8)((encoded[k >> 1] >>
+ streamdata.stream[k] = (uint8_t)((encoded[k >> 1] >>
((k & 1) << 3)) & 0xFF);
}
#else
@@ -1780,21 +1780,21 @@
* - frameLength : Length of frame in packet (in samples)
*
*/
-WebRtc_Word16 WebRtcIsac_ReadFrameLen(ISACStruct* ISAC_main_inst,
- const WebRtc_Word16* encoded,
- WebRtc_Word16* frameLength) {
+int16_t WebRtcIsac_ReadFrameLen(ISACStruct* ISAC_main_inst,
+ const int16_t* encoded,
+ int16_t* frameLength) {
Bitstr streamdata;
#ifndef WEBRTC_BIG_ENDIAN
int k;
#endif
- WebRtc_Word16 err;
+ int16_t err;
ISACMainStruct* instISAC;
WebRtcIsac_ResetBitstream(&(streamdata));
#ifndef WEBRTC_BIG_ENDIAN
for (k = 0; k < 10; k++) {
- streamdata.stream[k] = (WebRtc_UWord8)((encoded[k >> 1] >>
+ streamdata.stream[k] = (uint8_t)((encoded[k >> 1] >>
((k & 1) << 3)) & 0xFF);
}
#else
@@ -1834,7 +1834,7 @@
* Return Value : frame lenght in samples
*
*/
-WebRtc_Word16 WebRtcIsac_GetNewFrameLen(ISACStruct* ISAC_main_inst) {
+int16_t WebRtcIsac_GetNewFrameLen(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
/* Return new frame length. */
@@ -1860,7 +1860,7 @@
*
* Return value : Error code
*/
-WebRtc_Word16 WebRtcIsac_GetErrorCode(ISACStruct* ISAC_main_inst) {
+int16_t WebRtcIsac_GetErrorCode(ISACStruct* ISAC_main_inst) {
return ((ISACMainStruct*)ISAC_main_inst)->errorCode;
}
@@ -1886,13 +1886,13 @@
* Return value : -1 if error happens
* 0 bit-rates computed correctly.
*/
-WebRtc_Word16 WebRtcIsac_GetUplinkBw(ISACStruct* ISAC_main_inst,
- WebRtc_Word32* bottleneck) {
+int16_t WebRtcIsac_GetUplinkBw(ISACStruct* ISAC_main_inst,
+ int32_t* bottleneck) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
if (instISAC->codingMode == 0) {
/* We are in adaptive mode then get the bottleneck from BWE. */
- *bottleneck = (WebRtc_Word32)instISAC->bwestimator_obj.send_bw_avg;
+ *bottleneck = (int32_t)instISAC->bwestimator_obj.send_bw_avg;
} else {
*bottleneck = instISAC->bottleneck;
}
@@ -1939,10 +1939,10 @@
* Return value : 0 if successful
* -1 if error happens
*/
-WebRtc_Word16 WebRtcIsac_SetMaxPayloadSize(ISACStruct* ISAC_main_inst,
- WebRtc_Word16 maxPayloadBytes) {
+int16_t WebRtcIsac_SetMaxPayloadSize(ISACStruct* ISAC_main_inst,
+ int16_t maxPayloadBytes) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WebRtc_Word16 status = 0;
+ int16_t status = 0;
/* Check if encoder initiated */
if ((instISAC->initFlag & BIT_MASK_ENC_INIT) !=
@@ -2022,11 +2022,11 @@
* Return value : 0 if successful
* -1 if error happens
*/
-WebRtc_Word16 WebRtcIsac_SetMaxRate(ISACStruct* ISAC_main_inst,
- WebRtc_Word32 maxRate) {
+int16_t WebRtcIsac_SetMaxRate(ISACStruct* ISAC_main_inst,
+ int32_t maxRate) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
- WebRtc_Word16 maxRateInBytesPer30Ms;
- WebRtc_Word16 status = 0;
+ int16_t maxRateInBytesPer30Ms;
+ int16_t status = 0;
/* check if encoder initiated */
if ((instISAC->initFlag & BIT_MASK_ENC_INIT) != BIT_MASK_ENC_INIT) {
@@ -2037,7 +2037,7 @@
given maximum rate. Multiply with 30/1000 to get number of
bits per 30 ms, divide by 8 to get number of bytes per 30 ms:
maxRateInBytes = floor((maxRate * 30/1000) / 8); */
- maxRateInBytesPer30Ms = (WebRtc_Word16)(maxRate * 3 / 800);
+ maxRateInBytesPer30Ms = (int16_t)(maxRate * 3 / 800);
if (instISAC->encoderSamplingRateKHz == kIsacWideband) {
if (maxRate < 32000) {
@@ -2093,14 +2093,14 @@
* Return value : >0 - Length (in bytes) of coded data
* : -1 - Error
*/
-WebRtc_Word16 WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst,
- WebRtc_Word16* encoded) {
+int16_t WebRtcIsac_GetRedPayload(ISACStruct* ISAC_main_inst,
+ int16_t* encoded) {
Bitstr iSACBitStreamInst;
- WebRtc_Word16 streamLenLB;
- WebRtc_Word16 streamLenUB;
- WebRtc_Word16 streamLen;
- WebRtc_Word16 totalLenUB;
- WebRtc_UWord8* ptrEncodedUW8 = (WebRtc_UWord8*)encoded;
+ int16_t streamLenLB;
+ int16_t streamLenUB;
+ int16_t streamLen;
+ int16_t totalLenUB;
+ uint8_t* ptrEncodedUW8 = (uint8_t*)encoded;
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
#ifndef WEBRTC_BIG_ENDIAN
int k;
@@ -2122,7 +2122,7 @@
return -1;
}
- /* convert from bytes to WebRtc_Word16. */
+ /* convert from bytes to int16_t. */
memcpy(ptrEncodedUW8, iSACBitStreamInst.stream, streamLenLB);
streamLen = streamLenLB;
if (instISAC->bandwidthKHz == isac8kHz) {
@@ -2150,18 +2150,18 @@
/* Generate CRC if required. */
if ((instISAC->bandwidthKHz != isac8kHz) &&
(streamLenUB > 0)) {
- WebRtc_UWord32 crc;
+ uint32_t crc;
streamLen += totalLenUB;
- ptrEncodedUW8[streamLenLB] = (WebRtc_UWord8)totalLenUB;
+ ptrEncodedUW8[streamLenLB] = (uint8_t)totalLenUB;
memcpy(&ptrEncodedUW8[streamLenLB + 1], iSACBitStreamInst.stream,
streamLenUB);
- WebRtcIsac_GetCrc((WebRtc_Word16*)(&(ptrEncodedUW8[streamLenLB + 1])),
+ WebRtcIsac_GetCrc((int16_t*)(&(ptrEncodedUW8[streamLenLB + 1])),
streamLenUB, &crc);
#ifndef WEBRTC_BIG_ENDIAN
for (k = 0; k < LEN_CHECK_SUM_WORD8; k++) {
ptrEncodedUW8[streamLen - LEN_CHECK_SUM_WORD8 + k] =
- (WebRtc_UWord8)((crc >> (24 - k * 8)) & 0xFF);
+ (uint8_t)((crc >> (24 - k * 8)) & 0xFF);
}
#else
memcpy(&ptrEncodedUW8[streamLenLB + streamLenUB + 1], &crc,
@@ -2209,8 +2209,8 @@
* Return value : 0 if successful
* -1 if failed.
*/
-WebRtc_Word16 WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst,
- WebRtc_UWord16 sample_rate_hz) {
+int16_t WebRtcIsac_SetEncSampRate(ISACStruct* ISAC_main_inst,
+ uint16_t sample_rate_hz) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
enum IsacSamplingRate encoder_operational_rate;
@@ -2238,9 +2238,9 @@
ISACLBStruct* instLB = &(instISAC->instLB);
double bottleneckLB;
double bottleneckUB;
- WebRtc_Word32 bottleneck = instISAC->bottleneck;
- WebRtc_Word16 codingMode = instISAC->codingMode;
- WebRtc_Word16 frameSizeMs = instLB->ISACencLB_obj.new_framelength /
+ int32_t bottleneck = instISAC->bottleneck;
+ int16_t codingMode = instISAC->codingMode;
+ int16_t frameSizeMs = instLB->ISACencLB_obj.new_framelength /
(FS / 1000);
if ((encoder_operational_rate == kIsacWideband) &&
@@ -2269,9 +2269,9 @@
EncoderInitUb(instUB, instISAC->bandwidthKHz);
memset(instISAC->analysisFBState1, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
memset(instISAC->analysisFBState2, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
if (codingMode == 1) {
instISAC->bottleneck = bottleneck;
@@ -2306,8 +2306,8 @@
* Return value : 0 if successful
* -1 if failed.
*/
-WebRtc_Word16 WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst,
- WebRtc_UWord16 sample_rate_hz) {
+int16_t WebRtcIsac_SetDecSampRate(ISACStruct* ISAC_main_inst,
+ uint16_t sample_rate_hz) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
enum IsacSamplingRate decoder_operational_rate;
@@ -2326,9 +2326,9 @@
/* Switching from wideband to super-wideband at the decoder
* we need to reset the filter-bank and initialize upper-band decoder. */
memset(instISAC->synthesisFBState1, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
memset(instISAC->synthesisFBState2, 0,
- FB_STATE_SIZE_WORD32 * sizeof(WebRtc_Word32));
+ FB_STATE_SIZE_WORD32 * sizeof(int32_t));
if (DecoderInitUb(&(instISAC->instUB)) < 0) {
return -1;
@@ -2349,7 +2349,7 @@
* is expected to be sampled in this rate.
*
*/
-WebRtc_UWord16 WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst) {
+uint16_t WebRtcIsac_EncSampRate(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
return instISAC->in_sample_rate_hz;
}
@@ -2366,7 +2366,7 @@
* sampled at this rate.
*
*/
-WebRtc_UWord16 WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst) {
+uint16_t WebRtcIsac_DecSampRate(ISACStruct* ISAC_main_inst) {
ISACMainStruct* instISAC = (ISACMainStruct*)ISAC_main_inst;
return instISAC->decoderSamplingRateKHz == kIsacWideband ? 16000 : 32000;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c
index 854b2d7..5198ebf 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.c
@@ -111,7 +111,7 @@
//was static before, but didn't work with MEX file
-void WebRtcIsac_GetVars(const double *input, const WebRtc_Word16 *pitchGains_Q12,
+void WebRtcIsac_GetVars(const double *input, const int16_t *pitchGains_Q12,
double *oldEnergy, double *varscale)
{
double nrg[4], chng, pg;
@@ -206,7 +206,7 @@
}
void WebRtcIsac_GetLpcCoefLb(double *inLo, double *inHi, MaskFiltstr *maskdata,
- double signal_noise_ratio, const WebRtc_Word16 *pitchGains_Q12,
+ double signal_noise_ratio, const int16_t *pitchGains_Q12,
double *lo_coeff, double *hi_coeff)
{
int k, n, j, pos1, pos2;
@@ -388,12 +388,12 @@
double* lpCoeff,
double corrMat[][UB_LPC_ORDER + 1],
double* varscale,
- WebRtc_Word16 bandwidth)
+ int16_t bandwidth)
{
int frameCntr, activeFrameCntr, n, pos1, pos2;
- WebRtc_Word16 criterion1;
- WebRtc_Word16 criterion2;
- WebRtc_Word16 numSubFrames = SUBFRAMES * (1 + (bandwidth == isac16kHz));
+ int16_t criterion1;
+ int16_t criterion2;
+ int16_t numSubFrames = SUBFRAMES * (1 + (bandwidth == isac16kHz));
double data[WINLEN];
double corrSubFrame[UB_LPC_ORDER+2];
double reflecCoeff[UB_LPC_ORDER];
@@ -492,8 +492,8 @@
double corrMat[][UB_LPC_ORDER + 1],
const double* varscale)
{
- WebRtc_Word16 j, n;
- WebRtc_Word16 subFrameCntr;
+ int16_t j, n;
+ int16_t subFrameCntr;
double aPolynom[ORDERLO + 1];
double res_nrg;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h
index 4eafeac..866c76d 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_analysis.h
@@ -23,11 +23,11 @@
double WebRtcIsac_LevDurb(double *a, double *k, double *r, int order);
-void WebRtcIsac_GetVars(const double *input, const WebRtc_Word16 *pitchGains_Q12,
+void WebRtcIsac_GetVars(const double *input, const int16_t *pitchGains_Q12,
double *oldEnergy, double *varscale);
void WebRtcIsac_GetLpcCoefLb(double *inLo, double *inHi, MaskFiltstr *maskdata,
- double signal_noise_ratio, const WebRtc_Word16 *pitchGains_Q12,
+ double signal_noise_ratio, const int16_t *pitchGains_Q12,
double *lo_coeff, double *hi_coeff);
@@ -45,6 +45,6 @@
double* lpCoeff,
double corr[][UB_LPC_ORDER + 1],
double* varscale,
- WebRtc_Word16 bandwidth);
+ int16_t bandwidth);
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_ANALYIS_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c
index 25c69cb..8ce004b 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.c
@@ -36,7 +36,7 @@
/*
* Number of reconstruction points of quantizers for LPC Gains.
*/
-const WebRtc_Word16 WebRtcIsac_kNumQCellLpcGain[SUBFRAMES] =
+const int16_t WebRtcIsac_kNumQCellLpcGain[SUBFRAMES] =
{
17, 20, 25, 45, 77, 170
};
@@ -44,7 +44,7 @@
* Starting index for entropy decoder to search for the right interval,
* one entry per LAR coefficient
*/
-const WebRtc_UWord16 WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES] =
+const uint16_t WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES] =
{
8, 10, 12, 22, 38, 85
};
@@ -53,26 +53,26 @@
* The following 6 vectors define CDF of 6 decorrelated LPC
* gains.
*/
-const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec0[18] =
+const uint16_t WebRtcIsac_kLpcGainCdfVec0[18] =
{
0, 10, 27, 83, 234, 568, 1601, 4683, 16830, 57534, 63437,
64767, 65229, 65408, 65483, 65514, 65527, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec1[21] =
+const uint16_t WebRtcIsac_kLpcGainCdfVec1[21] =
{
0, 15, 33, 84, 185, 385, 807, 1619, 3529, 7850, 19488,
51365, 62437, 64548, 65088, 65304, 65409, 65484, 65507, 65522, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec2[26] =
+const uint16_t WebRtcIsac_kLpcGainCdfVec2[26] =
{
0, 15, 29, 54, 89, 145, 228, 380, 652, 1493, 4260,
12359, 34133, 50749, 57224, 60814, 62927, 64078, 64742, 65103, 65311, 65418,
65473, 65509, 65521, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec3[46] =
+const uint16_t WebRtcIsac_kLpcGainCdfVec3[46] =
{
0, 8, 12, 16, 26, 42, 56, 76, 111, 164, 247,
366, 508, 693, 1000, 1442, 2155, 3188, 4854, 7387, 11249, 17617,
@@ -81,7 +81,7 @@
65523, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec4[78] =
+const uint16_t WebRtcIsac_kLpcGainCdfVec4[78] =
{
0, 17, 29, 39, 51, 70, 104, 154, 234, 324, 443,
590, 760, 971, 1202, 1494, 1845, 2274, 2797, 3366, 4088, 4905,
@@ -93,7 +93,7 @@
65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec5[171] =
+const uint16_t WebRtcIsac_kLpcGainCdfVec5[171] =
{
0, 10, 12, 14, 16, 18, 23, 29, 35, 42, 51,
58, 65, 72, 78, 87, 96, 103, 111, 122, 134, 150,
@@ -116,7 +116,7 @@
/*
* An array of pointers to CDFs of decorrelated LPC Gains
*/
-const WebRtc_UWord16* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES] =
+const uint16_t* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES] =
{
WebRtcIsac_kLpcGainCdfVec0, WebRtcIsac_kLpcGainCdfVec1,
WebRtcIsac_kLpcGainCdfVec2, WebRtcIsac_kLpcGainCdfVec3,
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h
index 1eba97c..121d05e 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_gain_swb_tables.h
@@ -26,23 +26,23 @@
extern const double WebRtcIsac_kLeftRecPointLpcGain[SUBFRAMES];
-extern const WebRtc_Word16 WebRtcIsac_kNumQCellLpcGain[SUBFRAMES];
+extern const int16_t WebRtcIsac_kNumQCellLpcGain[SUBFRAMES];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES];
+extern const uint16_t WebRtcIsac_kLpcGainEntropySearch[SUBFRAMES];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec0[18];
+extern const uint16_t WebRtcIsac_kLpcGainCdfVec0[18];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec1[21];
+extern const uint16_t WebRtcIsac_kLpcGainCdfVec1[21];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec2[26];
+extern const uint16_t WebRtcIsac_kLpcGainCdfVec2[26];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec3[46];
+extern const uint16_t WebRtcIsac_kLpcGainCdfVec3[46];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec4[78];
+extern const uint16_t WebRtcIsac_kLpcGainCdfVec4[78];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcGainCdfVec5[171];
+extern const uint16_t WebRtcIsac_kLpcGainCdfVec5[171];
-extern const WebRtc_UWord16* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES];
+extern const uint16_t* WebRtcIsac_kLpcGainCdfMat[SUBFRAMES];
extern const double WebRtcIsac_kLpcGainDecorrMat[SUBFRAMES][SUBFRAMES];
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c
index 695d583..2c5698f 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.c
@@ -72,7 +72,7 @@
/*
* Number of reconstruction points of quantizers for LAR coefficients.
*/
-const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb12
+const int16_t WebRtcIsac_kLpcShapeNumRecPointUb12
[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] =
{
13, 15, 19, 27, 19, 24, 32, 48
@@ -82,7 +82,7 @@
* Starting index for entropy decoder to search for the right interval,
* one entry per LAR coefficient
*/
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb12
+const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb12
[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] =
{
6, 7, 9, 13, 9, 12, 16, 24
@@ -92,52 +92,52 @@
* The following 8 vectors define CDF of 8 decorrelated LAR
* coefficients.
*/
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec0Ub12[14] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec0Ub12[14] =
{
0, 13, 95, 418, 1687, 6498, 21317, 44200, 59029, 63849, 65147,
65449, 65525, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub12[16] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub12[16] =
{
0, 10, 59, 255, 858, 2667, 8200, 22609, 42988, 57202, 62947,
64743, 65308, 65476, 65522, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub12[20] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub12[20] =
{
0, 18, 40, 118, 332, 857, 2017, 4822, 11321, 24330, 41279,
54342, 60637, 63394, 64659, 65184, 65398, 65482, 65518, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub12[28] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub12[28] =
{
0, 21, 38, 90, 196, 398, 770, 1400, 2589, 4650, 8211,
14933, 26044, 39592, 50814, 57452, 60971, 62884, 63995, 64621, 65019, 65273,
65410, 65480, 65514, 65522, 65531, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub12[20] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub12[20] =
{
0, 7, 46, 141, 403, 969, 2132, 4649, 10633, 24902, 43254,
54665, 59928, 62674, 64173, 64938, 65293, 65464, 65523, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub12[25] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub12[25] =
{
0, 7, 22, 72, 174, 411, 854, 1737, 3545, 6774, 13165,
25221, 40980, 52821, 58714, 61706, 63472, 64437, 64989, 65287, 65430, 65503,
65525, 65529, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub12[33] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub12[33] =
{
0, 11, 21, 36, 65, 128, 228, 401, 707, 1241, 2126,
3589, 6060, 10517, 18853, 31114, 42477, 49770, 54271, 57467, 59838, 61569,
62831, 63772, 64433, 64833, 65123, 65306, 65419, 65466, 65499, 65519, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub12[49] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub12[49] =
{
0, 14, 34, 67, 107, 167, 245, 326, 449, 645, 861,
1155, 1508, 2003, 2669, 3544, 4592, 5961, 7583, 9887, 13256, 18765,
@@ -149,7 +149,7 @@
/*
* An array of pointers to CDFs of decorrelated LARs
*/
-const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb12
+const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb12
[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME] =
{
WebRtcIsac_kLpcShapeCdfVec0Ub12, WebRtcIsac_kLpcShapeCdfVec1Ub12,
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h
index 1e93847..cef885a 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb12_tables.h
@@ -37,29 +37,29 @@
[UB_LPC_ORDER*UB_LPC_VEC_PER_FRAME];
-extern const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb12
+extern const int16_t WebRtcIsac_kLpcShapeNumRecPointUb12
[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb12
+extern const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb12
[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec0Ub12[14];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec0Ub12[14];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub12[16];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub12[16];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub12[20];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub12[20];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub12[28];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub12[28];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub12[20];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub12[20];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub12[25];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub12[25];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub12[33];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub12[33];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub12[49];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub12[49];
-extern const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb12
+extern const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb12
[UB_LPC_ORDER * UB_LPC_VEC_PER_FRAME];
#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_LPC_SHAPE_SWB12_TABLES_H_
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c
index 89f4523..0f567ed 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.c
@@ -60,26 +60,26 @@
* The following 16 vectors define CDF of 16 decorrelated LAR
* coefficients.
*/
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub16[14] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub16[14] =
{
0, 2, 20, 159, 1034, 5688, 20892, 44653,
59849, 64485, 65383, 65518, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub16[16] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub16[16] =
{
0, 1, 7, 43, 276, 1496, 6681, 21653,
43891, 58859, 64022, 65248, 65489, 65529, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub16[18] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub16[18] =
{
0, 1, 9, 54, 238, 933, 3192, 9461,
23226, 42146, 56138, 62413, 64623, 65300, 65473, 65521,
65533, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub16[30] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub16[30] =
{
0, 2, 4, 8, 17, 36, 75, 155,
329, 683, 1376, 2662, 5047, 9508, 17526, 29027,
@@ -87,27 +87,27 @@
65273, 65429, 65497, 65526, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub16[16] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub16[16] =
{
0, 1, 10, 63, 361, 1785, 7407, 22242,
43337, 58125, 63729, 65181, 65472, 65527, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub16[17] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub16[17] =
{
0, 1, 7, 29, 134, 599, 2443, 8590,
22962, 42635, 56911, 63060, 64940, 65408, 65513, 65531,
65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub16[21] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub16[21] =
{
0, 1, 5, 16, 57, 191, 611, 1808,
4847, 11755, 24612, 40910, 53789, 60698, 63729, 64924,
65346, 65486, 65523, 65532, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub16[36] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub16[36] =
{
0, 1, 4, 12, 25, 55, 104, 184,
314, 539, 926, 1550, 2479, 3861, 5892, 8845,
@@ -116,21 +116,21 @@
65518, 65530, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec8Ub16[21] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec8Ub16[21] =
{
0, 1, 2, 7, 26, 103, 351, 1149,
3583, 10204, 23846, 41711, 55361, 61917, 64382, 65186,
65433, 65506, 65528, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub160[21] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub160[21] =
{
0, 6, 19, 63, 205, 638, 1799, 4784,
11721, 24494, 40803, 53805, 60886, 63822, 64931, 65333,
65472, 65517, 65530, 65533, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub161[28] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub161[28] =
{
0, 1, 3, 11, 31, 86, 221, 506,
1101, 2296, 4486, 8477, 15356, 26079, 38941, 49952,
@@ -138,7 +138,7 @@
65526, 65532, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub162[55] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub162[55] =
{
0, 3, 12, 23, 42, 65, 89, 115,
150, 195, 248, 327, 430, 580, 784, 1099,
@@ -149,7 +149,7 @@
65527, 65529, 65531, 65532, 65533, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub163[26] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub163[26] =
{
0, 2, 4, 10, 21, 48, 114, 280,
701, 1765, 4555, 11270, 24267, 41213, 54285, 61003,
@@ -157,7 +157,7 @@
65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub164[28] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub164[28] =
{
0, 1, 3, 6, 15, 36, 82, 196,
453, 1087, 2557, 5923, 13016, 25366, 40449, 52582,
@@ -165,7 +165,7 @@
65529, 65533, 65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub165[34] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub165[34] =
{
0, 2, 4, 8, 18, 35, 73, 146,
279, 524, 980, 1789, 3235, 5784, 10040, 16998,
@@ -174,7 +174,7 @@
65534, 65535
};
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub166[71] =
+const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub166[71] =
{
0, 1, 2, 6, 13, 26, 55, 92,
141, 191, 242, 296, 355, 429, 522, 636,
@@ -190,7 +190,7 @@
/*
* An array of pointers to CDFs of decorrelated LARs
*/
-const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb16
+const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] = {
WebRtcIsac_kLpcShapeCdfVec01Ub16,
WebRtcIsac_kLpcShapeCdfVec1Ub16,
@@ -224,7 +224,7 @@
/*
* Number of reconstruction points of quantizers for LAR coefficients.
*/
-const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb16
+const int16_t WebRtcIsac_kLpcShapeNumRecPointUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] =
{
13, 15, 17, 29, 15, 16, 20, 35, 20,
@@ -235,7 +235,7 @@
* Starting index for entropy decoder to search for the right interval,
* one entry per LAR coefficient
*/
-const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb16
+const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME] =
{
6, 7, 8, 14, 7, 8, 10, 17, 10,
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h
index 68d08b2..a4b3a59 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_shape_swb16_tables.h
@@ -30,48 +30,48 @@
extern const double WebRtcIsac_kInterVecDecorrMatUb16
[UB16_LPC_VEC_PER_FRAME][UB16_LPC_VEC_PER_FRAME];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub16[14];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub16[14];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec1Ub16[16];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec1Ub16[16];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec2Ub16[18];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec2Ub16[18];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec3Ub16[30];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec3Ub16[30];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec4Ub16[16];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec4Ub16[16];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec5Ub16[17];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec5Ub16[17];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec6Ub16[21];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec6Ub16[21];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec7Ub16[36];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec7Ub16[36];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec8Ub16[21];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec8Ub16[21];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub160[21];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub160[21];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub161[28];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub161[28];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub162[55];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub162[55];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub163[26];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub163[26];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub164[28];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub164[28];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub165[34];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub165[34];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeCdfVec01Ub166[71];
+extern const uint16_t WebRtcIsac_kLpcShapeCdfVec01Ub166[71];
-extern const WebRtc_UWord16* WebRtcIsac_kLpcShapeCdfMatUb16
+extern const uint16_t* WebRtcIsac_kLpcShapeCdfMatUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
extern const double WebRtcIsac_kLpcShapeLeftRecPointUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
-extern const WebRtc_Word16 WebRtcIsac_kLpcShapeNumRecPointUb16
+extern const int16_t WebRtcIsac_kLpcShapeNumRecPointUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
-extern const WebRtc_UWord16 WebRtcIsac_kLpcShapeEntropySearchUb16
+extern const uint16_t WebRtcIsac_kLpcShapeEntropySearchUb16
[UB_LPC_ORDER * UB16_LPC_VEC_PER_FRAME];
extern const double WebRtcIsac_kLpcShapeQStepSizeUb16;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c
index 985e43c..909809b 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.c
@@ -14,15 +14,15 @@
#include "settings.h"
/* cdf array for model indicator */
-const WebRtc_UWord16 WebRtcIsac_kQKltModelCdf[4] = {
+const uint16_t WebRtcIsac_kQKltModelCdf[4] = {
0, 15434, 37548, 65535 };
/* pointer to cdf array for model indicator */
-const WebRtc_UWord16 *WebRtcIsac_kQKltModelCdfPtr[1] = {
+const uint16_t *WebRtcIsac_kQKltModelCdfPtr[1] = {
WebRtcIsac_kQKltModelCdf };
/* initial cdf index for decoder of model indicator */
-const WebRtc_UWord16 WebRtcIsac_kQKltModelInitIndex[1] = { 1 };
+const uint16_t WebRtcIsac_kQKltModelInitIndex[1] = { 1 };
/* offset to go from rounded value to quantization index */
const short WebRtcIsac_kQKltQuantMinGain[12] = {
@@ -43,10 +43,10 @@
5, 6, 7, 11, 9, 13, 12, 26 };
/* maximum quantization index */
-const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndGain[12] = {
+const uint16_t WebRtcIsac_kQKltMaxIndGain[12] = {
6, 12, 8, 14, 10, 19, 12, 31, 22, 56, 52, 138 };
-const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndShape[108] = {
+const uint16_t WebRtcIsac_kQKltMaxIndShape[108] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
2, 2, 2, 2, 4, 4, 5, 6, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 2, 2,
@@ -60,10 +60,10 @@
9, 10, 13, 19, 17, 23, 25, 49 };
/* index offset */
-const WebRtc_UWord16 WebRtcIsac_kQKltOffsetGain[12] = {
+const uint16_t WebRtcIsac_kQKltOffsetGain[12] = {
0, 7, 20, 29, 44, 55, 75, 88, 120, 143, 200, 253 };
-const WebRtc_UWord16 WebRtcIsac_kQKltOffsetShape[108] = {
+const uint16_t WebRtcIsac_kQKltOffsetShape[108] = {
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
11, 14, 17, 20, 23, 28, 33, 39, 46, 47,
48, 49, 50, 52, 53, 54, 55, 56, 58, 61,
@@ -77,10 +77,10 @@
405, 415, 426, 440, 460, 478, 502, 528 };
/* initial cdf index for KLT coefficients */
-const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexGain[12] = {
+const uint16_t WebRtcIsac_kQKltInitIndexGain[12] = {
3, 6, 4, 7, 5, 10, 6, 16, 11, 28, 26, 69};
-const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexShape[108] = {
+const uint16_t WebRtcIsac_kQKltInitIndexShape[108] = {
0, 0, 0, 0, 0, 0, 0, 0, 0, 1,
1, 1, 1, 1, 2, 2, 3, 3, 0, 0,
0, 0, 1, 0, 0, 0, 0, 1, 1, 1,
@@ -296,7 +296,7 @@
/* cdf tables for quantizer indices */
-const WebRtc_UWord16 WebRtcIsac_kQKltCdfGain[404] = {
+const uint16_t WebRtcIsac_kQKltCdfGain[404] = {
0, 13, 301, 3730, 61784, 65167, 65489, 65535, 0, 17,
142, 314, 929, 2466, 7678, 56450, 63463, 64740, 65204, 65426,
65527, 65535, 0, 8, 100, 724, 6301, 60105, 65125, 65510,
@@ -340,7 +340,7 @@
65514, 65516, 65518, 65522, 65531, 65533, 65535 };
-const WebRtc_UWord16 WebRtcIsac_kQKltCdfShape[686] = {
+const uint16_t WebRtcIsac_kQKltCdfShape[686] = {
0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 65535,
0, 65535, 0, 65535, 0, 65535, 0, 65535, 0, 4,
65535, 0, 8, 65514, 65535, 0, 29, 65481, 65535, 0,
@@ -413,7 +413,7 @@
/* pointers to cdf tables for quantizer indices */
-const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrGain[12] = {
+const uint16_t *WebRtcIsac_kQKltCdfPtrGain[12] = {
WebRtcIsac_kQKltCdfGain +0 +0, WebRtcIsac_kQKltCdfGain +0 +8,
WebRtcIsac_kQKltCdfGain +0 +22, WebRtcIsac_kQKltCdfGain +0 +32,
WebRtcIsac_kQKltCdfGain +0 +48, WebRtcIsac_kQKltCdfGain +0 +60,
@@ -421,7 +421,7 @@
WebRtcIsac_kQKltCdfGain +0 +128, WebRtcIsac_kQKltCdfGain +0 +152,
WebRtcIsac_kQKltCdfGain +0 +210, WebRtcIsac_kQKltCdfGain +0 +264 };
-const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrShape[108] = {
+const uint16_t *WebRtcIsac_kQKltCdfPtrShape[108] = {
WebRtcIsac_kQKltCdfShape +0 +0, WebRtcIsac_kQKltCdfShape +0 +2,
WebRtcIsac_kQKltCdfShape +0 +4, WebRtcIsac_kQKltCdfShape +0 +6,
WebRtcIsac_kQKltCdfShape +0 +8, WebRtcIsac_kQKltCdfShape +0 +10,
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h
index c217804..51f6316 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/lpc_tables.h
@@ -39,13 +39,13 @@
#define KLT_ORDER_SHAPE (LPC_SHAPE_ORDER * SUBFRAMES)
/* cdf array for model indicator */
-extern const WebRtc_UWord16 WebRtcIsac_kQKltModelCdf[KLT_NUM_MODELS+1];
+extern const uint16_t WebRtcIsac_kQKltModelCdf[KLT_NUM_MODELS+1];
/* pointer to cdf array for model indicator */
-extern const WebRtc_UWord16 *WebRtcIsac_kQKltModelCdfPtr[1];
+extern const uint16_t *WebRtcIsac_kQKltModelCdfPtr[1];
/* initial cdf index for decoder of model indicator */
-extern const WebRtc_UWord16 WebRtcIsac_kQKltModelInitIndex[1];
+extern const uint16_t WebRtcIsac_kQKltModelInitIndex[1];
/* offset to go from rounded value to quantization index */
extern const short WebRtcIsac_kQKltQuantMinGain[12];
@@ -53,19 +53,19 @@
extern const short WebRtcIsac_kQKltQuantMinShape[108];
/* maximum quantization index */
-extern const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndGain[12];
+extern const uint16_t WebRtcIsac_kQKltMaxIndGain[12];
-extern const WebRtc_UWord16 WebRtcIsac_kQKltMaxIndShape[108];
+extern const uint16_t WebRtcIsac_kQKltMaxIndShape[108];
/* index offset */
-extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetGain[12];
+extern const uint16_t WebRtcIsac_kQKltOffsetGain[12];
-extern const WebRtc_UWord16 WebRtcIsac_kQKltOffsetShape[108];
+extern const uint16_t WebRtcIsac_kQKltOffsetShape[108];
/* initial cdf index for KLT coefficients */
-extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexGain[12];
+extern const uint16_t WebRtcIsac_kQKltInitIndexGain[12];
-extern const WebRtc_UWord16 WebRtcIsac_kQKltInitIndexShape[108];
+extern const uint16_t WebRtcIsac_kQKltInitIndexShape[108];
/* quantizer representation levels */
extern const double WebRtcIsac_kQKltLevelsGain[392];
@@ -73,14 +73,14 @@
extern const double WebRtcIsac_kQKltLevelsShape[578];
/* cdf tables for quantizer indices */
-extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfGain[404];
+extern const uint16_t WebRtcIsac_kQKltCdfGain[404];
-extern const WebRtc_UWord16 WebRtcIsac_kQKltCdfShape[686];
+extern const uint16_t WebRtcIsac_kQKltCdfShape[686];
/* pointers to cdf tables for quantizer indices */
-extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrGain[12];
+extern const uint16_t *WebRtcIsac_kQKltCdfPtrGain[12];
-extern const WebRtc_UWord16 *WebRtcIsac_kQKltCdfPtrShape[108];
+extern const uint16_t *WebRtcIsac_kQKltCdfPtrShape[108];
/* left KLT transforms */
extern const double WebRtcIsac_kKltT1Gain[4];
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c
index 5d998a2..947d3e7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.c
@@ -15,7 +15,7 @@
/* header file for coding tables for the pitch filter side-info in the entropy coder */
/********************* Pitch Filter Gain Coefficient Tables ************************/
/* cdf for quantized pitch filter gains */
-const WebRtc_UWord16 WebRtcIsac_kQPitchGainCdf[255] = {
+const uint16_t WebRtcIsac_kQPitchGainCdf[255] = {
0, 2, 4, 6, 64, 901, 903, 905, 16954, 16956,
16961, 17360, 17362, 17364, 17366, 17368, 17370, 17372, 17374, 17411,
17514, 17516, 17583, 18790, 18796, 18802, 20760, 20777, 20782, 21722,
@@ -44,22 +44,22 @@
65535, 65535, 65535, 65535, 65535};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsac_kIndexLowerLimitGain[3] = {
+const int16_t WebRtcIsac_kIndexLowerLimitGain[3] = {
-7, -2, -1};
-const WebRtc_Word16 WebRtcIsac_kIndexUpperLimitGain[3] = {
+const int16_t WebRtcIsac_kIndexUpperLimitGain[3] = {
0, 3, 1};
-const WebRtc_UWord16 WebRtcIsac_kIndexMultsGain[2] = {
+const uint16_t WebRtcIsac_kIndexMultsGain[2] = {
18, 3};
/* size of cdf table */
-const WebRtc_UWord16 WebRtcIsac_kQCdfTableSizeGain[1] = {
+const uint16_t WebRtcIsac_kQCdfTableSizeGain[1] = {
256};
///////////////////////////FIXED POINT
/* mean values of pitch filter gains in FIXED point */
-const WebRtc_Word16 WebRtcIsac_kQMeanGain1Q12[144] = {
+const int16_t WebRtcIsac_kQMeanGain1Q12[144] = {
843, 1092, 1336, 1222, 1405, 1656, 1500, 1815, 1843, 1838, 1839, 1843, 1843, 1843, 1843, 1843,
1843, 1843, 814, 846, 1092, 1013, 1174, 1383, 1391, 1511, 1584, 1734, 1753, 1843, 1843, 1843,
1843, 1843, 1843, 1843, 524, 689, 777, 845, 947, 1069, 1090, 1263, 1380, 1447, 1559, 1676,
@@ -70,7 +70,7 @@
112, 120, 190, 283, 442, 343, 526, 809, 684, 935, 1134, 1020, 1265, 1506, 0, 0,
0, 0, 0, 0, 0, 111, 256, 87, 373, 597, 430, 684, 935, 770, 1020, 1265};
-const WebRtc_Word16 WebRtcIsac_kQMeanGain2Q12[144] = {
+const int16_t WebRtcIsac_kQMeanGain2Q12[144] = {
1760, 1525, 1285, 1747, 1671, 1393, 1843, 1826, 1555, 1843, 1784, 1606, 1843, 1843, 1711, 1843,
1843, 1814, 1389, 1275, 1040, 1564, 1414, 1252, 1610, 1495, 1343, 1753, 1592, 1405, 1804, 1720,
1475, 1843, 1814, 1581, 1208, 1061, 856, 1349, 1148, 994, 1390, 1253, 1111, 1495, 1343, 1178,
@@ -81,7 +81,7 @@
222, 38, 513, 271, 124, 624, 325, 157, 737, 484, 233, 849, 597, 343, 27, 0,
0, 141, 0, 0, 256, 69, 0, 370, 87, 0, 484, 229, 0, 597, 343, 87};
-const WebRtc_Word16 WebRtcIsac_kQMeanGain3Q12[144] = {
+const int16_t WebRtcIsac_kQMeanGain3Q12[144] = {
1843, 1843, 1711, 1843, 1818, 1606, 1843, 1827, 1511, 1814, 1639, 1393, 1760, 1525, 1285, 1656,
1419, 1176, 1835, 1718, 1475, 1841, 1650, 1387, 1648, 1498, 1287, 1600, 1411, 1176, 1522, 1299,
1040, 1419, 1176, 928, 1773, 1461, 1128, 1532, 1355, 1202, 1429, 1260, 1115, 1398, 1151, 1025,
@@ -93,7 +93,7 @@
0, 370, 57, 0, 256, 43, 0, 141, 0, 0, 27, 0, 0, 0, 0, 0};
-const WebRtc_Word16 WebRtcIsac_kQMeanGain4Q12[144] = {
+const int16_t WebRtcIsac_kQMeanGain4Q12[144] = {
1843, 1843, 1843, 1843, 1841, 1843, 1500, 1821, 1843, 1222, 1434, 1656, 843, 1092, 1336, 504,
757, 1007, 1843, 1843, 1843, 1838, 1791, 1843, 1265, 1505, 1599, 965, 1219, 1425, 730, 821,
1092, 249, 504, 757, 1783, 1819, 1843, 1351, 1567, 1727, 1096, 1268, 1409, 805, 961, 1131,
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h
index f958f5d..e7518b1 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_gain_tables.h
@@ -23,23 +23,23 @@
/* header file for coding tables for the pitch filter side-info in the entropy coder */
/********************* Pitch Filter Gain Coefficient Tables ************************/
/* cdf for quantized pitch filter gains */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchGainCdf[255];
+extern const uint16_t WebRtcIsac_kQPitchGainCdf[255];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsac_kIndexLowerLimitGain[3];
+extern const int16_t WebRtcIsac_kIndexLowerLimitGain[3];
-extern const WebRtc_Word16 WebRtcIsac_kIndexUpperLimitGain[3];
-extern const WebRtc_UWord16 WebRtcIsac_kIndexMultsGain[2];
+extern const int16_t WebRtcIsac_kIndexUpperLimitGain[3];
+extern const uint16_t WebRtcIsac_kIndexMultsGain[2];
/* mean values of pitch filter gains */
//(Y)
-extern const WebRtc_Word16 WebRtcIsac_kQMeanGain1Q12[144];
-extern const WebRtc_Word16 WebRtcIsac_kQMeanGain2Q12[144];
-extern const WebRtc_Word16 WebRtcIsac_kQMeanGain3Q12[144];
-extern const WebRtc_Word16 WebRtcIsac_kQMeanGain4Q12[144];
+extern const int16_t WebRtcIsac_kQMeanGain1Q12[144];
+extern const int16_t WebRtcIsac_kQMeanGain2Q12[144];
+extern const int16_t WebRtcIsac_kQMeanGain3Q12[144];
+extern const int16_t WebRtcIsac_kQMeanGain4Q12[144];
//(Y)
/* size of cdf table */
-extern const WebRtc_UWord16 WebRtcIsac_kQCdfTableSizeGain[1];
+extern const uint16_t WebRtcIsac_kQCdfTableSizeGain[1];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_PITCH_GAIN_TABLES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c
index 72a031e..f845a22 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.c
@@ -17,7 +17,7 @@
/* tables for use with small pitch gain */
/* cdf for quantized pitch filter lags */
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Lo[127] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf1Lo[127] = {
0, 134, 336, 549, 778, 998, 1264, 1512, 1777, 2070,
2423, 2794, 3051, 3361, 3708, 3979, 4315, 4610, 4933, 5269,
5575, 5896, 6155, 6480, 6816, 7129, 7477, 7764, 8061, 8358,
@@ -32,30 +32,30 @@
59288, 60179, 61076, 61806, 62474, 63129, 63656, 64160, 64533, 64856,
65152, 65535, 65535, 65535, 65535, 65535, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Lo[20] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf2Lo[20] = {
0, 429, 3558, 5861, 8558, 11639, 15210, 19502, 24773, 31983,
42602, 48567, 52601, 55676, 58160, 60172, 61889, 63235, 65383, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Lo[2] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf3Lo[2] = {
0, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Lo[10] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf4Lo[10] = {
0, 2966, 6368, 11182, 19431, 37793, 48532, 55353, 60626, 65535};
-const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrLo[4] = {WebRtcIsac_kQPitchLagCdf1Lo, WebRtcIsac_kQPitchLagCdf2Lo, WebRtcIsac_kQPitchLagCdf3Lo, WebRtcIsac_kQPitchLagCdf4Lo};
+const uint16_t *WebRtcIsac_kQPitchLagCdfPtrLo[4] = {WebRtcIsac_kQPitchLagCdf1Lo, WebRtcIsac_kQPitchLagCdf2Lo, WebRtcIsac_kQPitchLagCdf3Lo, WebRtcIsac_kQPitchLagCdf4Lo};
/* size of first cdf table */
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeLo[1] = {128};
+const uint16_t WebRtcIsac_kQPitchLagCdfSizeLo[1] = {128};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagLo[4] = {
+const int16_t WebRtcIsac_kQIndexLowerLimitLagLo[4] = {
-140, -9, 0, -4};
-const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagLo[4] = {
+const int16_t WebRtcIsac_kQIndexUpperLimitLagLo[4] = {
-20, 9, 0, 4};
/* initial index for arithmetic decoder */
-const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagLo[3] = {
+const uint16_t WebRtcIsac_kQInitIndexLagLo[3] = {
10, 1, 5};
/* mean values of pitch filter lags */
@@ -75,7 +75,7 @@
/* tables for use with medium pitch gain */
/* cdf for quantized pitch filter lags */
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Mid[255] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf1Mid[255] = {
0, 28, 61, 88, 121, 149, 233, 331, 475, 559,
624, 661, 689, 712, 745, 791, 815, 843, 866, 922,
959, 1024, 1061, 1117, 1178, 1238, 1280, 1350, 1453, 1513,
@@ -103,33 +103,33 @@
65414, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535,
65535, 65535, 65535, 65535, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Mid[36] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf2Mid[36] = {
0, 71, 335, 581, 836, 1039, 1323, 1795, 2258, 2608,
3005, 3591, 4243, 5344, 7163, 10583, 16848, 28078, 49448, 57007,
60357, 61850, 62837, 63437, 63872, 64188, 64377, 64614, 64774, 64949,
65039, 65115, 65223, 65360, 65474, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Mid[2] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf3Mid[2] = {
0, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Mid[20] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf4Mid[20] = {
0, 28, 246, 459, 667, 1045, 1523, 2337, 4337, 11347,
44231, 56709, 60781, 62243, 63161, 63969, 64608, 65062, 65502, 65535};
-const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrMid[4] = {WebRtcIsac_kQPitchLagCdf1Mid, WebRtcIsac_kQPitchLagCdf2Mid, WebRtcIsac_kQPitchLagCdf3Mid, WebRtcIsac_kQPitchLagCdf4Mid};
+const uint16_t *WebRtcIsac_kQPitchLagCdfPtrMid[4] = {WebRtcIsac_kQPitchLagCdf1Mid, WebRtcIsac_kQPitchLagCdf2Mid, WebRtcIsac_kQPitchLagCdf3Mid, WebRtcIsac_kQPitchLagCdf4Mid};
/* size of first cdf table */
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeMid[1] = {256};
+const uint16_t WebRtcIsac_kQPitchLagCdfSizeMid[1] = {256};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagMid[4] = {
+const int16_t WebRtcIsac_kQIndexLowerLimitLagMid[4] = {
-280, -17, 0, -9};
-const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagMid[4] = {
+const int16_t WebRtcIsac_kQIndexUpperLimitLagMid[4] = {
-40, 17, 0, 9};
/* initial index for arithmetic decoder */
-const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagMid[3] = {
+const uint16_t WebRtcIsac_kQInitIndexLagMid[3] = {
18, 1, 10};
/* mean values of pitch filter lags */
@@ -152,7 +152,7 @@
/* tables for use with large pitch gain */
/* cdf for quantized pitch filter lags */
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Hi[511] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf1Hi[511] = {
0, 7, 18, 33, 69, 105, 156, 228, 315, 612,
680, 691, 709, 724, 735, 738, 742, 746, 749, 753,
756, 760, 764, 774, 782, 785, 789, 796, 800, 803,
@@ -206,7 +206,7 @@
65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535, 65535,
65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Hi[68] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf2Hi[68] = {
0, 7, 11, 22, 37, 52, 56, 59, 81, 85,
89, 96, 115, 130, 137, 152, 170, 181, 193, 200,
207, 233, 237, 259, 289, 318, 363, 433, 592, 992,
@@ -215,29 +215,29 @@
65413, 65420, 65428, 65435, 65439, 65450, 65454, 65468, 65472, 65476,
65483, 65491, 65498, 65505, 65516, 65520, 65528, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Hi[2] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf3Hi[2] = {
0, 65535};
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Hi[35] = {
+const uint16_t WebRtcIsac_kQPitchLagCdf4Hi[35] = {
0, 7, 19, 30, 41, 48, 63, 74, 82, 96,
122, 152, 215, 330, 701, 2611, 10931, 48106, 61177, 64341,
65112, 65238, 65309, 65338, 65364, 65379, 65401, 65427, 65453, 65465,
65476, 65490, 65509, 65528, 65535};
-const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrHi[4] = {WebRtcIsac_kQPitchLagCdf1Hi, WebRtcIsac_kQPitchLagCdf2Hi, WebRtcIsac_kQPitchLagCdf3Hi, WebRtcIsac_kQPitchLagCdf4Hi};
+const uint16_t *WebRtcIsac_kQPitchLagCdfPtrHi[4] = {WebRtcIsac_kQPitchLagCdf1Hi, WebRtcIsac_kQPitchLagCdf2Hi, WebRtcIsac_kQPitchLagCdf3Hi, WebRtcIsac_kQPitchLagCdf4Hi};
/* size of first cdf table */
-const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeHi[1] = {512};
+const uint16_t WebRtcIsac_kQPitchLagCdfSizeHi[1] = {512};
/* index limits and ranges */
-const WebRtc_Word16 WebRtcIsac_kQindexLowerLimitLagHi[4] = {
+const int16_t WebRtcIsac_kQindexLowerLimitLagHi[4] = {
-552, -34, 0, -16};
-const WebRtc_Word16 WebRtcIsac_kQindexUpperLimitLagHi[4] = {
+const int16_t WebRtcIsac_kQindexUpperLimitLagHi[4] = {
-80, 32, 0, 17};
/* initial index for arithmetic decoder */
-const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagHi[3] = {
+const uint16_t WebRtcIsac_kQInitIndexLagHi[3] = {
34, 1, 18};
/* mean values of pitch filter lags */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h
index 67b02e5..7c72571 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/pitch_lag_tables.h
@@ -25,22 +25,22 @@
/* tables for use with small pitch gain */
/* cdfs for quantized pitch lags */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Lo[127];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Lo[20];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Lo[2];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Lo[10];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf1Lo[127];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf2Lo[20];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf3Lo[2];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf4Lo[10];
-extern const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrLo[4];
+extern const uint16_t *WebRtcIsac_kQPitchLagCdfPtrLo[4];
/* size of first cdf table */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeLo[1];
+extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeLo[1];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagLo[4];
-extern const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagLo[4];
+extern const int16_t WebRtcIsac_kQIndexLowerLimitLagLo[4];
+extern const int16_t WebRtcIsac_kQIndexUpperLimitLagLo[4];
/* initial index for arithmetic decoder */
-extern const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagLo[3];
+extern const uint16_t WebRtcIsac_kQInitIndexLagLo[3];
/* mean values of pitch filter lags */
extern const double WebRtcIsac_kQMeanLag2Lo[19];
@@ -53,22 +53,22 @@
/* tables for use with medium pitch gain */
/* cdfs for quantized pitch lags */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Mid[255];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Mid[36];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Mid[2];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Mid[20];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf1Mid[255];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf2Mid[36];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf3Mid[2];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf4Mid[20];
-extern const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrMid[4];
+extern const uint16_t *WebRtcIsac_kQPitchLagCdfPtrMid[4];
/* size of first cdf table */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeMid[1];
+extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeMid[1];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsac_kQIndexLowerLimitLagMid[4];
-extern const WebRtc_Word16 WebRtcIsac_kQIndexUpperLimitLagMid[4];
+extern const int16_t WebRtcIsac_kQIndexLowerLimitLagMid[4];
+extern const int16_t WebRtcIsac_kQIndexUpperLimitLagMid[4];
/* initial index for arithmetic decoder */
-extern const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagMid[3];
+extern const uint16_t WebRtcIsac_kQInitIndexLagMid[3];
/* mean values of pitch filter lags */
extern const double WebRtcIsac_kQMeanLag2Mid[35];
@@ -81,22 +81,22 @@
/* tables for use with large pitch gain */
/* cdfs for quantized pitch lags */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf1Hi[511];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf2Hi[68];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf3Hi[2];
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdf4Hi[35];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf1Hi[511];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf2Hi[68];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf3Hi[2];
+extern const uint16_t WebRtcIsac_kQPitchLagCdf4Hi[35];
-extern const WebRtc_UWord16 *WebRtcIsac_kQPitchLagCdfPtrHi[4];
+extern const uint16_t *WebRtcIsac_kQPitchLagCdfPtrHi[4];
/* size of first cdf table */
-extern const WebRtc_UWord16 WebRtcIsac_kQPitchLagCdfSizeHi[1];
+extern const uint16_t WebRtcIsac_kQPitchLagCdfSizeHi[1];
/* index limits and ranges */
-extern const WebRtc_Word16 WebRtcIsac_kQindexLowerLimitLagHi[4];
-extern const WebRtc_Word16 WebRtcIsac_kQindexUpperLimitLagHi[4];
+extern const int16_t WebRtcIsac_kQindexLowerLimitLagHi[4];
+extern const int16_t WebRtcIsac_kQindexUpperLimitLagHi[4];
/* initial index for arithmetic decoder */
-extern const WebRtc_UWord16 WebRtcIsac_kQInitIndexLagHi[3];
+extern const uint16_t WebRtcIsac_kQInitIndexLagHi[3];
/* mean values of pitch filter lags */
extern const double WebRtcIsac_kQMeanLag2Hi[67];
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c b/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
index 92b9c4d..9eae055 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.c
@@ -13,82 +13,82 @@
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-const WebRtc_UWord16 WebRtcIsac_kQArRc1Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc1Cdf[12] = {
0, 2, 4, 129, 7707, 57485, 65495, 65527, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 2 */
-const WebRtc_UWord16 WebRtcIsac_kQArRc2Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc2Cdf[12] = {
0, 2, 4, 7, 531, 25298, 64525, 65526, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 3 */
-const WebRtc_UWord16 WebRtcIsac_kQArRc3Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc3Cdf[12] = {
0, 2, 4, 6, 620, 22898, 64843, 65527, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 4 */
-const WebRtc_UWord16 WebRtcIsac_kQArRc4Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc4Cdf[12] = {
0, 2, 4, 6, 35, 10034, 60733, 65506, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 5 */
-const WebRtc_UWord16 WebRtcIsac_kQArRc5Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc5Cdf[12] = {
0, 2, 4, 6, 36, 7567, 56727, 65385, 65529, 65531,
65533, 65535};
/* cdf for quantized reflection coefficient 6 */
-const WebRtc_UWord16 WebRtcIsac_kQArRc6Cdf[12] = {
+const uint16_t WebRtcIsac_kQArRc6Cdf[12] = {
0, 2, 4, 6, 14, 6579, 57360, 65409, 65529, 65531,
65533, 65535};
/* representation levels for quantized reflection coefficient 1 */
-const WebRtc_Word16 WebRtcIsac_kQArRc1Levels[11] = {
+const int16_t WebRtcIsac_kQArRc1Levels[11] = {
-32104, -29007, -23202, -15496, -9279, -2577, 5934, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 2 */
-const WebRtc_Word16 WebRtcIsac_kQArRc2Levels[11] = {
+const int16_t WebRtcIsac_kQArRc2Levels[11] = {
-32104, -29503, -23494, -15261, -7309, -1399, 6158, 16381, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 3 */
-const WebRtc_Word16 WebRtcIsac_kQArRc3Levels[11] = {
+const int16_t WebRtcIsac_kQArRc3Levels[11] = {
-32104, -29503, -23157, -15186, -7347, -1359, 5829, 17535, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 4 */
-const WebRtc_Word16 WebRtcIsac_kQArRc4Levels[11] = {
+const int16_t WebRtcIsac_kQArRc4Levels[11] = {
-32104, -29503, -24512, -15362, -6665, -342, 6596, 14585, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 5 */
-const WebRtc_Word16 WebRtcIsac_kQArRc5Levels[11] = {
+const int16_t WebRtcIsac_kQArRc5Levels[11] = {
-32104, -29503, -24512, -15005, -6564, -106, 7123, 14920, 24512, 29503, 32104
};
/* representation levels for quantized reflection coefficient 6 */
-const WebRtc_Word16 WebRtcIsac_kQArRc6Levels[11] = {
+const int16_t WebRtcIsac_kQArRc6Levels[11] = {
-32104, -29503, -24512, -15096, -6656, -37, 7036, 14847, 24512, 29503, 32104
};
/* quantization boundary levels for reflection coefficients */
-const WebRtc_Word16 WebRtcIsac_kQArBoundaryLevels[12] = {
+const int16_t WebRtcIsac_kQArBoundaryLevels[12] = {
-32768, -31441, -27566, -21458, -13612, -4663, 4663, 13612, 21458, 27566, 31441, 32767
};
/* initial index for AR reflection coefficient quantizer and cdf table search */
-const WebRtc_UWord16 WebRtcIsac_kQArRcInitIndex[6] = {
+const uint16_t WebRtcIsac_kQArRcInitIndex[6] = {
5, 5, 5, 5, 5, 5};
/* pointers to AR cdf tables */
-const WebRtc_UWord16 *WebRtcIsac_kQArRcCdfPtr[AR_ORDER] = {
+const uint16_t *WebRtcIsac_kQArRcCdfPtr[AR_ORDER] = {
WebRtcIsac_kQArRc1Cdf, WebRtcIsac_kQArRc2Cdf, WebRtcIsac_kQArRc3Cdf,
WebRtcIsac_kQArRc4Cdf, WebRtcIsac_kQArRc5Cdf, WebRtcIsac_kQArRc6Cdf
};
/* pointers to AR representation levels tables */
-const WebRtc_Word16 *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER] = {
+const int16_t *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER] = {
WebRtcIsac_kQArRc1Levels, WebRtcIsac_kQArRc2Levels, WebRtcIsac_kQArRc3Levels,
WebRtcIsac_kQArRc4Levels, WebRtcIsac_kQArRc5Levels, WebRtcIsac_kQArRc6Levels
};
@@ -96,27 +96,27 @@
/******************** GAIN Coefficient Tables ***********************/
/* cdf for Gain coefficient */
-const WebRtc_UWord16 WebRtcIsac_kQGainCdf[19] = {
+const uint16_t WebRtcIsac_kQGainCdf[19] = {
0, 2, 4, 6, 8, 10, 12, 14, 16, 1172,
11119, 29411, 51699, 64445, 65527, 65529, 65531, 65533, 65535};
/* representation levels for quantized squared Gain coefficient */
-const WebRtc_Word32 WebRtcIsac_kQGain2Levels[18] = {
+const int32_t WebRtcIsac_kQGain2Levels[18] = {
// 17, 28, 46, 76, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000};
128, 128, 128, 128, 128, 215, 364, 709, 1268, 1960, 3405, 6078, 11286, 17827, 51918, 134498, 487432, 2048000};
/* quantization boundary levels for squared Gain coefficient */
-const WebRtc_Word32 WebRtcIsac_kQGain2BoundaryLevels[19] = {
+const int32_t WebRtcIsac_kQGain2BoundaryLevels[19] = {
0, 21, 35, 59, 99, 166, 280, 475, 815, 1414, 2495, 4505, 8397, 16405, 34431, 81359, 240497, 921600, 0x7FFFFFFF};
/* pointers to Gain cdf table */
-const WebRtc_UWord16 *WebRtcIsac_kQGainCdf_ptr[1] = {WebRtcIsac_kQGainCdf};
+const uint16_t *WebRtcIsac_kQGainCdf_ptr[1] = {WebRtcIsac_kQGainCdf};
/* Gain initial index for gain quantizer and cdf table search */
-const WebRtc_UWord16 WebRtcIsac_kQGainInitIndex[1] = {11};
+const uint16_t WebRtcIsac_kQGainInitIndex[1] = {11};
/************************* Cosine Tables ****************************/
/* Cosine table */
-const WebRtc_Word16 WebRtcIsac_kCos[6][60] = {
+const int16_t WebRtcIsac_kCos[6][60] = {
{512, 512, 511, 510, 508, 507, 505, 502, 499, 496, 493, 489, 485, 480, 476, 470, 465, 459, 453, 447,
440, 433, 426, 418, 410, 402, 394, 385, 376, 367, 357, 348, 338, 327, 317, 306, 295, 284, 273, 262,
250, 238, 226, 214, 202, 190, 177, 165, 152, 139, 126, 113, 100, 87, 73, 60, 47, 33, 20, 7},
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h b/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
index 159245b..22fe6a2 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/spectrum_ar_model_tables.h
@@ -23,54 +23,54 @@
/********************* AR Coefficient Tables ************************/
/* cdf for quantized reflection coefficient 1 */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRc1Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc1Cdf[12];
/* cdf for quantized reflection coefficient 2 */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRc2Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc2Cdf[12];
/* cdf for quantized reflection coefficient 3 */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRc3Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc3Cdf[12];
/* cdf for quantized reflection coefficient 4 */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRc4Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc4Cdf[12];
/* cdf for quantized reflection coefficient 5 */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRc5Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc5Cdf[12];
/* cdf for quantized reflection coefficient 6 */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRc6Cdf[12];
+extern const uint16_t WebRtcIsac_kQArRc6Cdf[12];
/* quantization boundary levels for reflection coefficients */
-extern const WebRtc_Word16 WebRtcIsac_kQArBoundaryLevels[12];
+extern const int16_t WebRtcIsac_kQArBoundaryLevels[12];
/* initial indices for AR reflection coefficient quantizer and cdf table search */
-extern const WebRtc_UWord16 WebRtcIsac_kQArRcInitIndex[AR_ORDER];
+extern const uint16_t WebRtcIsac_kQArRcInitIndex[AR_ORDER];
/* pointers to AR cdf tables */
-extern const WebRtc_UWord16 *WebRtcIsac_kQArRcCdfPtr[AR_ORDER];
+extern const uint16_t *WebRtcIsac_kQArRcCdfPtr[AR_ORDER];
/* pointers to AR representation levels tables */
-extern const WebRtc_Word16 *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER];
+extern const int16_t *WebRtcIsac_kQArRcLevelsPtr[AR_ORDER];
/******************** GAIN Coefficient Tables ***********************/
/* cdf for Gain coefficient */
-extern const WebRtc_UWord16 WebRtcIsac_kQGainCdf[19];
+extern const uint16_t WebRtcIsac_kQGainCdf[19];
/* representation levels for quantized Gain coefficient */
-extern const WebRtc_Word32 WebRtcIsac_kQGain2Levels[18];
+extern const int32_t WebRtcIsac_kQGain2Levels[18];
/* squared quantization boundary levels for Gain coefficient */
-extern const WebRtc_Word32 WebRtcIsac_kQGain2BoundaryLevels[19];
+extern const int32_t WebRtcIsac_kQGain2BoundaryLevels[19];
/* pointer to Gain cdf table */
-extern const WebRtc_UWord16 *WebRtcIsac_kQGainCdf_ptr[1];
+extern const uint16_t *WebRtcIsac_kQGainCdf_ptr[1];
/* Gain initial index for gain quantizer and cdf table search */
-extern const WebRtc_UWord16 WebRtcIsac_kQGainInitIndex[1];
+extern const uint16_t WebRtcIsac_kQGainInitIndex[1];
/************************* Cosine Tables ****************************/
/* Cosine table */
-extern const WebRtc_Word16 WebRtcIsac_kCos[6][60];
+extern const int16_t WebRtcIsac_kCos[6][60];
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_SPECTRUM_AR_MODEL_TABLES_H_ */
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h b/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
index 62dc204..1bd73e7 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/structs.h
@@ -25,10 +25,10 @@
typedef struct Bitstreamstruct {
- WebRtc_UWord8 stream[STREAM_SIZE_MAX];
- WebRtc_UWord32 W_upper;
- WebRtc_UWord32 streamval;
- WebRtc_UWord32 stream_index;
+ uint8_t stream[STREAM_SIZE_MAX];
+ uint32_t W_upper;
+ uint32_t streamval;
+ uint32_t stream_index;
} Bitstr;
@@ -149,32 +149,32 @@
typedef struct {
/* Previous frame length (in ms) */
- WebRtc_Word32 prev_frame_length;
+ int32_t prev_frame_length;
/* Previous RTP timestamp from received
packet (in samples relative beginning) */
- WebRtc_Word32 prev_rec_rtp_number;
+ int32_t prev_rec_rtp_number;
/* Send timestamp for previous packet (in ms using timeGetTime()) */
- WebRtc_UWord32 prev_rec_send_ts;
+ uint32_t prev_rec_send_ts;
/* Arrival time for previous packet (in ms using timeGetTime()) */
- WebRtc_UWord32 prev_rec_arr_ts;
+ uint32_t prev_rec_arr_ts;
/* rate of previous packet, derived from RTP timestamps (in bits/s) */
float prev_rec_rtp_rate;
/* Time sinse the last update of the BN estimate (in ms) */
- WebRtc_UWord32 last_update_ts;
+ uint32_t last_update_ts;
/* Time sinse the last reduction (in ms) */
- WebRtc_UWord32 last_reduction_ts;
+ uint32_t last_reduction_ts;
/* How many times the estimate was update in the beginning */
- WebRtc_Word32 count_tot_updates_rec;
+ int32_t count_tot_updates_rec;
/* The estimated bottle neck rate from there to here (in bits/s) */
- WebRtc_Word32 rec_bw;
+ int32_t rec_bw;
float rec_bw_inv;
float rec_bw_avg;
float rec_bw_avg_Q;
@@ -212,18 +212,18 @@
// been detected upstream
int hsn_detect_snd;
- WebRtc_UWord32 start_wait_period;
+ uint32_t start_wait_period;
int in_wait_period;
int change_to_WB;
- WebRtc_UWord32 senderTimestamp;
- WebRtc_UWord32 receiverTimestamp;
+ uint32_t senderTimestamp;
+ uint32_t receiverTimestamp;
//enum IsacSamplingRate incomingStreamSampFreq;
- WebRtc_UWord16 numConsecLatePkts;
+ uint16_t numConsecLatePkts;
float consecLatency;
- WebRtc_Word16 inWaitLatePkts;
+ int16_t inWaitLatePkts;
} BwEstimatorstr;
@@ -268,7 +268,7 @@
int startIdx;
/* Frame length in samples */
- WebRtc_Word16 framelength;
+ int16_t framelength;
/* Pitch Gain */
int pitchGain_index[2];
@@ -284,9 +284,9 @@
double LPCcoeffs_hi[(ORDERHI+1)*SUBFRAMES*2];
/* Encode Spec */
- WebRtc_Word16 fre[FRAMESAMPLES];
- WebRtc_Word16 fim[FRAMESAMPLES];
- WebRtc_Word16 AvgPitchGain[2];
+ int16_t fre[FRAMESAMPLES];
+ int16_t fim[FRAMESAMPLES];
+ int16_t AvgPitchGain[2];
/* Used in adaptive mode only */
int minBytes;
@@ -302,8 +302,8 @@
Bitstr bitStreamObj;
- WebRtc_Word16 realFFT[FRAMESAMPLES_HALF];
- WebRtc_Word16 imagFFT[FRAMESAMPLES_HALF];
+ int16_t realFFT[FRAMESAMPLES_HALF];
+ int16_t imagFFT[FRAMESAMPLES_HALF];
} ISACUBSaveEncDataStruct;
@@ -319,29 +319,29 @@
ISAC_SaveEncData_t SaveEnc_obj;
int buffer_index;
- WebRtc_Word16 current_framesamples;
+ int16_t current_framesamples;
float data_buffer_float[FRAMESAMPLES_30ms];
int frame_nb;
double bottleneck;
- WebRtc_Word16 new_framelength;
+ int16_t new_framelength;
double s2nr;
/* Maximum allowed number of bits for a 30 msec packet */
- WebRtc_Word16 payloadLimitBytes30;
+ int16_t payloadLimitBytes30;
/* Maximum allowed number of bits for a 30 msec packet */
- WebRtc_Word16 payloadLimitBytes60;
+ int16_t payloadLimitBytes60;
/* Maximum allowed number of bits for both 30 and 60 msec packet */
- WebRtc_Word16 maxPayloadBytes;
+ int16_t maxPayloadBytes;
/* Maximum allowed rate in bytes per 30 msec packet */
- WebRtc_Word16 maxRateInBytes;
+ int16_t maxRateInBytes;
/*---
If set to 1 iSAC will not addapt the frame-size, if used in
channel-adaptive mode. The initial value will be used for all rates.
---*/
- WebRtc_Word16 enforceFrameSize;
+ int16_t enforceFrameSize;
/*-----
This records the BWE index the encoder injected into the bit-stream.
@@ -350,7 +350,7 @@
a recursive procedure (WebRtcIsac_GetDownlinkBwJitIndexImpl) and has to be
called only once per each encode.
-----*/
- WebRtc_Word16 lastBWIdx;
+ int16_t lastBWIdx;
} ISACLBEncStruct;
typedef struct {
@@ -366,14 +366,14 @@
LB_TOTAL_DELAY_SAMPLES];
double bottleneck;
/* Maximum allowed number of bits for a 30 msec packet */
- //WebRtc_Word16 payloadLimitBytes30;
+ //int16_t payloadLimitBytes30;
/* Maximum allowed number of bits for both 30 and 60 msec packet */
- //WebRtc_Word16 maxPayloadBytes;
- WebRtc_Word16 maxPayloadSizeBytes;
+ //int16_t maxPayloadBytes;
+ int16_t maxPayloadSizeBytes;
double lastLPCVec[UB_LPC_ORDER];
- WebRtc_Word16 numBytesUsed;
- WebRtc_Word16 lastJitterInfo;
+ int16_t numBytesUsed;
+ int16_t lastJitterInfo;
} ISACUBEncStruct;
@@ -422,11 +422,11 @@
double loFiltGain[SUBFRAMES];
double hiFiltGain[SUBFRAMES];
/* Upper boundary of interval W */
- WebRtc_UWord32 W_upper;
- WebRtc_UWord32 streamval;
+ uint32_t W_upper;
+ uint32_t streamval;
/* Index to the current position in bytestream */
- WebRtc_UWord32 stream_index;
- WebRtc_UWord8 stream[3];
+ uint32_t stream_index;
+ uint8_t stream[3];
} transcode_obj;
@@ -442,19 +442,19 @@
double MaxDelay;
/* 0 = adaptive; 1 = instantaneous */
- WebRtc_Word16 codingMode;
+ int16_t codingMode;
// overall bottleneck of the codec
- WebRtc_Word32 bottleneck;
+ int32_t bottleneck;
// QMF Filter state
- WebRtc_Word32 analysisFBState1[FB_STATE_SIZE_WORD32];
- WebRtc_Word32 analysisFBState2[FB_STATE_SIZE_WORD32];
- WebRtc_Word32 synthesisFBState1[FB_STATE_SIZE_WORD32];
- WebRtc_Word32 synthesisFBState2[FB_STATE_SIZE_WORD32];
+ int32_t analysisFBState1[FB_STATE_SIZE_WORD32];
+ int32_t analysisFBState2[FB_STATE_SIZE_WORD32];
+ int32_t synthesisFBState1[FB_STATE_SIZE_WORD32];
+ int32_t synthesisFBState2[FB_STATE_SIZE_WORD32];
// Error Code
- WebRtc_Word16 errorCode;
+ int16_t errorCode;
// bandwidth of the encoded audio 8, 12 or 16 kHz
enum ISACBandwidth bandwidthKHz;
@@ -463,19 +463,19 @@
enum IsacSamplingRate decoderSamplingRateKHz;
// Flag to keep track of initializations, lower & upper-band
// encoder and decoder.
- WebRtc_Word16 initFlag;
+ int16_t initFlag;
// Flag to to indicate signal bandwidth switch
- WebRtc_Word16 resetFlag_8kHz;
+ int16_t resetFlag_8kHz;
// Maximum allowed rate, measured in Bytes per 30 ms.
- WebRtc_Word16 maxRateBytesPer30Ms;
+ int16_t maxRateBytesPer30Ms;
// Maximum allowed payload-size, measured in Bytes.
- WebRtc_Word16 maxPayloadSizeBytes;
+ int16_t maxPayloadSizeBytes;
/* The expected sampling rate of the input signal. Valid values are 16000,
* 32000 and 48000. This is not the operation sampling rate of the codec.
* Input signals at 48 kHz are resampled to 32 kHz, then encoded. */
- WebRtc_UWord16 in_sample_rate_hz;
+ uint16_t in_sample_rate_hz;
/* State for the input-resampler. It is only used for 48 kHz input signals. */
int16_t state_in_resampler[SIZE_RESAMPLER_STATE];
} ISACMainStruct;
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/source/transform.c b/webrtc/modules/audio_coding/codecs/isac/main/source/transform.c
index 97b801a..ea6b579 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/source/transform.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/source/transform.c
@@ -44,8 +44,8 @@
void WebRtcIsac_Time2Spec(double *inre1,
double *inre2,
- WebRtc_Word16 *outreQ7,
- WebRtc_Word16 *outimQ7,
+ int16_t *outreQ7,
+ int16_t *outimQ7,
FFTstr *fftstr_obj)
{
@@ -80,10 +80,10 @@
tmp1r = costab2[k];
tmp1i = sintab2[k];
- outreQ7[k] = (WebRtc_Word16)WebRtcIsac_lrint((xr * tmp1r - xi * tmp1i) * 128.0);
- outimQ7[k] = (WebRtc_Word16)WebRtcIsac_lrint((xr * tmp1i + xi * tmp1r) * 128.0);
- outreQ7[FRAMESAMPLES_HALF - 1 - k] = (WebRtc_Word16)WebRtcIsac_lrint((-yr * tmp1i - yi * tmp1r) * 128.0);
- outimQ7[FRAMESAMPLES_HALF - 1 - k] = (WebRtc_Word16)WebRtcIsac_lrint((-yr * tmp1r + yi * tmp1i) * 128.0);
+ outreQ7[k] = (int16_t)WebRtcIsac_lrint((xr * tmp1r - xi * tmp1i) * 128.0);
+ outimQ7[k] = (int16_t)WebRtcIsac_lrint((xr * tmp1i + xi * tmp1r) * 128.0);
+ outreQ7[FRAMESAMPLES_HALF - 1 - k] = (int16_t)WebRtcIsac_lrint((-yr * tmp1i - yi * tmp1r) * 128.0);
+ outimQ7[FRAMESAMPLES_HALF - 1 - k] = (int16_t)WebRtcIsac_lrint((-yr * tmp1r + yi * tmp1i) * 128.0);
}
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc b/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
index db34602..5506bd4 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
+++ b/webrtc/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
@@ -48,32 +48,32 @@
int framecnt, endfile;
int i, errtype, VADusage = 0, packetLossPercent = 0;
- WebRtc_Word16 CodingMode;
- WebRtc_Word32 bottleneck;
- WebRtc_Word16 framesize = 30; /* ms */
+ int16_t CodingMode;
+ int32_t bottleneck;
+ int16_t framesize = 30; /* ms */
int cur_framesmpls, err;
/* Runtime statistics */
double starttime, runtime, length_file;
- WebRtc_Word16 stream_len = 0;
- WebRtc_Word16 declen, lostFrame = 0, declenTC = 0;
+ int16_t stream_len = 0;
+ int16_t declen, lostFrame = 0, declenTC = 0;
- WebRtc_Word16 shortdata[SWBFRAMESAMPLES_10ms];
- WebRtc_Word16 vaddata[SWBFRAMESAMPLES_10ms*3];
- WebRtc_Word16 decoded[MAX_FRAMESAMPLES << 1];
- WebRtc_Word16 decodedTC[MAX_FRAMESAMPLES << 1];
- WebRtc_UWord16 streamdata[500];
- WebRtc_Word16 speechType[1];
- WebRtc_Word16 rateBPS = 0;
- WebRtc_Word16 fixedFL = 0;
- WebRtc_Word16 payloadSize = 0;
- WebRtc_Word32 payloadRate = 0;
+ int16_t shortdata[SWBFRAMESAMPLES_10ms];
+ int16_t vaddata[SWBFRAMESAMPLES_10ms*3];
+ int16_t decoded[MAX_FRAMESAMPLES << 1];
+ int16_t decodedTC[MAX_FRAMESAMPLES << 1];
+ uint16_t streamdata[500];
+ int16_t speechType[1];
+ int16_t rateBPS = 0;
+ int16_t fixedFL = 0;
+ int16_t payloadSize = 0;
+ int32_t payloadRate = 0;
int setControlBWE = 0;
short FL, testNum;
char version_number[20];
FILE *plFile;
- WebRtc_Word32 sendBN;
+ int32_t sendBN;
#ifdef _DEBUG
FILE *fy;
@@ -92,12 +92,12 @@
short useAssign = 0;
//FILE logFile;
bool doTransCoding = false;
- WebRtc_Word32 rateTransCoding = 0;
- WebRtc_UWord16 streamDataTransCoding[600];
- WebRtc_Word16 streamLenTransCoding = 0;
+ int32_t rateTransCoding = 0;
+ uint16_t streamDataTransCoding[600];
+ int16_t streamLenTransCoding = 0;
FILE* transCodingFile = NULL;
FILE* transcodingBitstream = NULL;
- WebRtc_UWord32 numTransCodingBytes = 0;
+ uint32_t numTransCodingBytes = 0;
/* only one structure used for ISAC encoder */
ISACStruct* ISAC_main_inst = NULL;
@@ -180,14 +180,14 @@
useAssign = 0;
//logFile = NULL;
char transCodingFileName[500];
- WebRtc_Word16 totFileLoop = 0;
- WebRtc_Word16 numFileLoop = 0;
+ int16_t totFileLoop = 0;
+ int16_t numFileLoop = 0;
for (i = 1; i < argc-2;i++)
{
if(!strcmp("-LOOP", argv[i]))
{
i++;
- totFileLoop = (WebRtc_Word16)atol(argv[i]);
+ totFileLoop = (int16_t)atol(argv[i]);
if(totFileLoop <= 0)
{
fprintf(stderr, "Invalid number of runs for the given input file, %d.", totFileLoop);
@@ -609,8 +609,8 @@
cout << "\n" << flush;
length_file = 0;
- WebRtc_Word16 bnIdxTC;
- WebRtc_Word16 jitterInfoTC;
+ int16_t bnIdxTC;
+ int16_t jitterInfoTC;
while (endfile == 0)
{
/* Call init functions at random, fault test number 7 */
@@ -663,7 +663,7 @@
{
stream_len = WebRtcIsac_Encode(ISAC_main_inst,
shortdata,
- (WebRtc_Word16*)streamdata);
+ (int16_t*)streamdata);
if((payloadSize != 0) && (stream_len > payloadSize))
{
if(testNum == 0)
@@ -681,40 +681,40 @@
{
if(doTransCoding)
{
- WebRtc_Word16 indexStream;
- WebRtc_UWord8 auxUW8;
+ int16_t indexStream;
+ uint8_t auxUW8;
/************************* Main Transcoding stream *******************************/
WebRtcIsac_GetDownLinkBwIndex(ISAC_main_inst, &bnIdxTC, &jitterInfoTC);
streamLenTransCoding = WebRtcIsac_GetNewBitStream(
ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding,
- (WebRtc_Word16*)streamDataTransCoding, false);
+ (int16_t*)streamDataTransCoding, false);
if(streamLenTransCoding < 0)
{
fprintf(stderr, "Error in trans-coding\n");
exit(0);
}
- auxUW8 = (WebRtc_UWord8)(((streamLenTransCoding & 0xFF00) >> 8) & 0x00FF);
- if (fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1,
+ auxUW8 = (uint8_t)(((streamLenTransCoding & 0xFF00) >> 8) & 0x00FF);
+ if (fwrite(&auxUW8, sizeof(uint8_t), 1,
transcodingBitstream) != 1) {
return -1;
}
- auxUW8 = (WebRtc_UWord8)(streamLenTransCoding & 0x00FF);
- if (fwrite(&auxUW8, sizeof(WebRtc_UWord8),
+ auxUW8 = (uint8_t)(streamLenTransCoding & 0x00FF);
+ if (fwrite(&auxUW8, sizeof(uint8_t),
1, transcodingBitstream) != 1) {
return -1;
}
- if (fwrite((WebRtc_UWord8*)streamDataTransCoding,
- sizeof(WebRtc_UWord8),
+ if (fwrite((uint8_t*)streamDataTransCoding,
+ sizeof(uint8_t),
streamLenTransCoding,
transcodingBitstream) !=
static_cast<size_t>(streamLenTransCoding)) {
return -1;
}
- WebRtcIsac_ReadBwIndex((WebRtc_Word16*)streamDataTransCoding, &indexStream);
+ WebRtcIsac_ReadBwIndex((int16_t*)streamDataTransCoding, &indexStream);
if(indexStream != bnIdxTC)
{
fprintf(stderr, "Error in inserting Bandwidth index into transcoding stream.\n");
@@ -781,13 +781,13 @@
if(lostFrame)
{
stream_len = WebRtcIsac_GetRedPayload(ISAC_main_inst,
- (WebRtc_Word16*)streamdata);
+ (int16_t*)streamdata);
if(doTransCoding)
{
streamLenTransCoding = WebRtcIsac_GetNewBitStream(
ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding,
- (WebRtc_Word16*)streamDataTransCoding, true);
+ (int16_t*)streamDataTransCoding, true);
if(streamLenTransCoding < 0)
{
fprintf(stderr, "Error in RED trans-coding\n");
@@ -873,7 +873,7 @@
/* Call getFramelen, only used here for function test */
err = WebRtcIsac_ReadFrameLen(ISAC_main_inst,
- (WebRtc_Word16*)streamdata, &FL);
+ (int16_t*)streamdata, &FL);
if(err < 0)
{
/* exit if returned with error */
@@ -951,7 +951,7 @@
/* Write decoded speech frame to file */
if((declen > 0) && (numFileLoop == 0))
{
- if (fwrite(decoded, sizeof(WebRtc_Word16), declen,
+ if (fwrite(decoded, sizeof(int16_t), declen,
outp) != static_cast<size_t>(declen)) {
return -1;
}
@@ -959,7 +959,7 @@
if((declenTC > 0) && (numFileLoop == 0))
{
- if (fwrite(decodedTC, sizeof(WebRtc_Word16), declen,
+ if (fwrite(decodedTC, sizeof(int16_t), declen,
transCodingFile) != static_cast<size_t>(declen)) {
return -1;
}
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc b/webrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc
index 196eb11..fd70eca 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc
+++ b/webrtc/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc
@@ -36,7 +36,7 @@
FILE* outFile[MAX_NUM_CLIENTS];
ISACStruct* codecInstance[MAX_NUM_CLIENTS];
- WebRtc_Word32 resamplerState[MAX_NUM_CLIENTS][8];
+ int32_t resamplerState[MAX_NUM_CLIENTS][8];
int encoderSampRate[MAX_NUM_CLIENTS];
@@ -141,7 +141,7 @@
return -1;
}
memset(packetData[clientCntr], 0, sizeof(BottleNeckModel));
- memset(resamplerState[clientCntr], 0, sizeof(WebRtc_Word32) * 8);
+ memset(resamplerState[clientCntr], 0, sizeof(int32_t) * 8);
}
for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
@@ -284,7 +284,7 @@
streamLen = WebRtcIsac_Encode(codecInstance[senderIdx],
audioBuff10ms, (short*)bitStream);
- WebRtc_Word16 ggg;
+ int16_t ggg;
if (streamLen > 0) {
if(( WebRtcIsac_ReadFrameLen(codecInstance[receiverIdx],
(short *) bitStream, &ggg))<0)
@@ -319,12 +319,12 @@
if(codingMode == 0)
{
- WebRtc_Word32 bn;
+ int32_t bn;
WebRtcIsac_GetUplinkBw(codecInstance[senderIdx], &bn);
printf("[%d] ", bn);
}
- //WebRtc_Word16 rateIndexLB;
- //WebRtc_Word16 rateIndexUB;
+ //int16_t rateIndexLB;
+ //int16_t rateIndexUB;
//WebRtcIsac_GetDownLinkBwIndex(codecInstance[receiverIdx],
// &rateIndexLB, &rateIndexUB);
//printf(" (%2d, %2d) ", rateIndexLB, rateIndexUB);
diff --git a/webrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c b/webrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c
index f1b78c2..4175890 100644
--- a/webrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c
+++ b/webrtc/modules/audio_coding/codecs/isac/main/test/simpleKenny.c
@@ -62,21 +62,21 @@
unsigned long totalBitsRCU = 0;
unsigned long totalsmpls =0;
- WebRtc_Word32 bottleneck = 39;
- WebRtc_Word16 frameSize = 30; /* ms */
- WebRtc_Word16 codingMode = 1;
- WebRtc_Word16 shortdata[FRAMESAMPLES_SWB_10ms];
- WebRtc_Word16 decoded[MAX_FRAMESAMPLES_SWB];
- //WebRtc_UWord16 streamdata[1000];
- WebRtc_Word16 speechType[1];
- WebRtc_Word16 payloadLimit;
- WebRtc_Word32 rateLimit;
+ int32_t bottleneck = 39;
+ int16_t frameSize = 30; /* ms */
+ int16_t codingMode = 1;
+ int16_t shortdata[FRAMESAMPLES_SWB_10ms];
+ int16_t decoded[MAX_FRAMESAMPLES_SWB];
+ //uint16_t streamdata[1000];
+ int16_t speechType[1];
+ int16_t payloadLimit;
+ int32_t rateLimit;
ISACStruct* ISAC_main_inst;
- WebRtc_Word16 stream_len = 0;
- WebRtc_Word16 declen;
- WebRtc_Word16 err;
- WebRtc_Word16 cur_framesmpls;
+ int16_t stream_len = 0;
+ int16_t declen;
+ int16_t err;
+ int16_t cur_framesmpls;
int endfile;
#ifdef WIN32
double length_file;
@@ -95,17 +95,17 @@
FILE* averageFile;
int sampFreqKHz;
int samplesIn10Ms;
- WebRtc_Word16 maxStreamLen = 0;
+ int16_t maxStreamLen = 0;
char histFileName[500];
char averageFileName[500];
unsigned int hist[600];
unsigned int tmpSumStreamLen = 0;
unsigned int packetCntr = 0;
unsigned int lostPacketCntr = 0;
- WebRtc_UWord16 payload[600];
- WebRtc_UWord16 payloadRCU[600];
- WebRtc_UWord16 packetLossPercent = 0;
- WebRtc_Word16 rcuStreamLen = 0;
+ uint16_t payload[600];
+ uint16_t payloadRCU[600];
+ uint16_t packetLossPercent = 0;
+ int16_t rcuStreamLen = 0;
int onlyEncode;
int onlyDecode;
@@ -164,7 +164,7 @@
sscanf(argv[1], "%s", inname);
sscanf(argv[2], "%s", outname);
codingMode = readSwitch(argc, argv, "-I");
- sampFreqKHz = (WebRtc_Word16)readParamInt(argc, argv, "-fs", 32);
+ sampFreqKHz = (int16_t)readParamInt(argc, argv, "-fs", 32);
if(readParamString(argc, argv, "-h", histFileName, 500) > 0)
{
histFile = fopen(histFileName, "a");
@@ -218,7 +218,7 @@
valid values are 8 and 16.\n", sampFreqKHz);
exit(-1);
}
- payloadLimit = (WebRtc_Word16)readParamInt(argc, argv, "-plim", 400);
+ payloadLimit = (int16_t)readParamInt(argc, argv, "-plim", 400);
rateLimit = readParamInt(argc, argv, "-rlim", 106800);
if ((inp = fopen(inname,"rb")) == NULL) {
@@ -279,7 +279,7 @@
}
//{
- // WebRtc_Word32 b1, b2;
+ // int32_t b1, b2;
// FILE* fileID = fopen("GetBNTest.txt", "w");
// b2 = 32100;
// while(b2 <= 52000)
@@ -341,18 +341,18 @@
if(onlyDecode)
{
- WebRtc_UWord8 auxUW8;
+ uint8_t auxUW8;
size_t auxSizet;
- if(fread(&auxUW8, sizeof(WebRtc_UWord8), 1, inp) < 1)
+ if(fread(&auxUW8, sizeof(uint8_t), 1, inp) < 1)
{
break;
}
- stream_len = ((WebRtc_UWord8)auxUW8) << 8;
- if(fread(&auxUW8, sizeof(WebRtc_UWord8), 1, inp) < 1)
+ stream_len = ((uint8_t)auxUW8) << 8;
+ if(fread(&auxUW8, sizeof(uint8_t), 1, inp) < 1)
{
break;
}
- stream_len |= (WebRtc_UWord16)auxUW8;
+ stream_len |= (uint16_t)auxUW8;
auxSizet = (size_t)stream_len;
if(fread(payload, 1, auxSizet, inp) < auxSizet)
{
@@ -374,7 +374,7 @@
//-------- iSAC encoding ---------
stream_len = WebRtcIsac_Encode(ISAC_main_inst, shortdata,
- (WebRtc_Word16*)payload);
+ (int16_t*)payload);
if(stream_len < 0)
{
@@ -393,7 +393,7 @@
break;
}
- rcuStreamLen = WebRtcIsac_GetRedPayload(ISAC_main_inst, (WebRtc_Word16*)payloadRCU);
+ rcuStreamLen = WebRtcIsac_GetRedPayload(ISAC_main_inst, (int16_t*)payloadRCU);
get_arrival_time(cur_framesmpls, stream_len, bottleneck, &packetData,
sampFreqKHz * 1000, sampFreqKHz * 1000);
@@ -430,14 +430,14 @@
if(onlyEncode)
{
- WebRtc_UWord8 auxUW8;
- auxUW8 = (WebRtc_UWord8)(((stream_len & 0x7F00) >> 8) & 0xFF);
- if (fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1, outp) != 1) {
+ uint8_t auxUW8;
+ auxUW8 = (uint8_t)(((stream_len & 0x7F00) >> 8) & 0xFF);
+ if (fwrite(&auxUW8, sizeof(uint8_t), 1, outp) != 1) {
return -1;
}
- auxUW8 = (WebRtc_UWord8)(stream_len & 0xFF);
- if (fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1, outp) != 1) {
+ auxUW8 = (uint8_t)(stream_len & 0xFF);
+ if (fwrite(&auxUW8, sizeof(uint8_t), 1, outp) != 1) {
return -1;
}
if (fwrite(payload, 1, stream_len,
@@ -470,7 +470,7 @@
}
// Write decoded speech frame to file
- if (fwrite(decoded, sizeof(WebRtc_Word16),
+ if (fwrite(decoded, sizeof(int16_t),
declen, outp) != (size_t)declen) {
return -1;
}
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h b/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
index e3cac4d..76ba2ff 100644
--- a/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
@@ -35,9 +35,9 @@
* Returned value : Size in bytes of speechOut16b
*/
-WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
- WebRtc_Word16 len,
- WebRtc_Word16 *speechOut16b);
+int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b,
+ int16_t len,
+ int16_t *speechOut16b);
/****************************************************************************
* WebRtcPcm16b_Encode(...)
@@ -54,9 +54,9 @@
* Returned value : Size in bytes of speech8b
*/
-WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
- WebRtc_Word16 len,
- unsigned char *speech8b);
+int16_t WebRtcPcm16b_Encode(int16_t *speech16b,
+ int16_t len,
+ unsigned char *speech8b);
/****************************************************************************
* WebRtcPcm16b_DecodeW16(...)
@@ -73,11 +73,11 @@
* Returned value : Samples in speechOut16b
*/
-WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
- WebRtc_Word16 *speechIn16b,
- WebRtc_Word16 len,
- WebRtc_Word16 *speechOut16b,
- WebRtc_Word16* speechType);
+int16_t WebRtcPcm16b_DecodeW16(void *inst,
+ int16_t *speechIn16b,
+ int16_t len,
+ int16_t *speechOut16b,
+ int16_t* speechType);
/****************************************************************************
* WebRtcPcm16b_Decode(...)
@@ -95,9 +95,9 @@
*/
-WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
- WebRtc_Word16 len,
- WebRtc_Word16 *speech16b);
+int16_t WebRtcPcm16b_Decode(unsigned char *speech8b,
+ int16_t len,
+ int16_t *speech16b);
#ifdef __cplusplus
}
diff --git a/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c b/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c
index 0cff5dd..04814b7 100644
--- a/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c
+++ b/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c
@@ -24,17 +24,17 @@
-/* Encoder with WebRtc_Word16 Output */
-WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
- WebRtc_Word16 len,
- WebRtc_Word16 *speechOut16b)
+/* Encoder with int16_t Output */
+int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b,
+ int16_t len,
+ int16_t *speechOut16b)
{
#ifdef WEBRTC_BIG_ENDIAN
WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len);
#else
int i;
for (i=0;i<len;i++) {
- speechOut16b[i]=(((WebRtc_UWord16)speechIn16b[i])>>8)|((((WebRtc_UWord16)speechIn16b[i])<<8)&0xFF00);
+ speechOut16b[i]=(((uint16_t)speechIn16b[i])>>8)|((((uint16_t)speechIn16b[i])<<8)&0xFF00);
}
#endif
return(len<<1);
@@ -42,14 +42,14 @@
/* Encoder with char Output (old version) */
-WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
- WebRtc_Word16 len,
- unsigned char *speech8b)
+int16_t WebRtcPcm16b_Encode(int16_t *speech16b,
+ int16_t len,
+ unsigned char *speech8b)
{
- WebRtc_Word16 samples=len*2;
- WebRtc_Word16 pos;
- WebRtc_Word16 short1;
- WebRtc_Word16 short2;
+ int16_t samples=len*2;
+ int16_t pos;
+ int16_t short1;
+ int16_t short2;
for (pos=0;pos<len;pos++) {
short1=HIGHEND & speech16b[pos];
short2=LOWEND & speech16b[pos];
@@ -61,21 +61,21 @@
}
-/* Decoder with WebRtc_Word16 Input instead of char when the WebRtc_Word16 Encoder is used */
-WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
- WebRtc_Word16 *speechIn16b,
- WebRtc_Word16 len,
- WebRtc_Word16 *speechOut16b,
- WebRtc_Word16* speechType)
+/* Decoder with int16_t Input instead of char when the int16_t Encoder is used */
+int16_t WebRtcPcm16b_DecodeW16(void *inst,
+ int16_t *speechIn16b,
+ int16_t len,
+ int16_t *speechOut16b,
+ int16_t* speechType)
{
#ifdef WEBRTC_BIG_ENDIAN
- WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(WebRtc_Word16)+1)>>1));
+ WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1));
#else
int i;
int samples=len>>1;
for (i=0;i<samples;i++) {
- speechOut16b[i]=(((WebRtc_UWord16)speechIn16b[i])>>8)|(((WebRtc_UWord16)(speechIn16b[i]&0xFF))<<8);
+ speechOut16b[i]=(((uint16_t)speechIn16b[i])>>8)|(((uint16_t)(speechIn16b[i]&0xFF))<<8);
}
#endif
@@ -88,13 +88,13 @@
}
/* "old" version of the decoder that uses char as input (not used in NetEq any more) */
-WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
- WebRtc_Word16 len,
- WebRtc_Word16 *speech16b)
+int16_t WebRtcPcm16b_Decode(unsigned char *speech8b,
+ int16_t len,
+ int16_t *speech16b)
{
- WebRtc_Word16 samples=len>>1;
- WebRtc_Word16 pos;
- WebRtc_Word16 shortval;
+ int16_t samples=len>>1;
+ int16_t pos;
+ int16_t shortval;
for (pos=0;pos<samples;pos++) {
shortval=((unsigned short) speech8b[pos*2]);
shortval=(shortval<<8)&HIGHEND;
diff --git a/webrtc/modules/audio_coding/main/interface/audio_coding_module.h b/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
index 11c2556..a2e7efe 100644
--- a/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
+++ b/webrtc/modules/audio_coding/main/interface/audio_coding_module.h
@@ -31,12 +31,12 @@
public:
virtual ~AudioPacketizationCallback() {}
- virtual WebRtc_Word32 SendData(
+ virtual int32_t SendData(
FrameType frame_type,
- WebRtc_UWord8 payload_type,
- WebRtc_UWord32 timestamp,
- const WebRtc_UWord8* payload_data,
- WebRtc_UWord16 payload_len_bytes,
+ uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ uint16_t payload_len_bytes,
const RTPFragmentationHeader* fragmentation) = 0;
};
@@ -45,8 +45,8 @@
public:
virtual ~AudioCodingFeedback() {}
- virtual WebRtc_Word32 IncomingDtmf(const WebRtc_UWord8 digit_dtmf,
- const bool end) = 0;
+ virtual int32_t IncomingDtmf(const uint8_t digit_dtmf,
+ const bool end) = 0;
};
// Callback class used for reporting VAD decision
@@ -54,7 +54,7 @@
public:
virtual ~ACMVADCallback() {}
- virtual WebRtc_Word32 InFrameType(WebRtc_Word16 frameType) = 0;
+ virtual int32_t InFrameType(int16_t frameType) = 0;
};
// Callback class used for reporting receiver statistics
@@ -62,12 +62,12 @@
public:
virtual ~ACMVQMonCallback() {}
- virtual WebRtc_Word32 NetEqStatistics(
- const WebRtc_Word32 id, // current ACM id
- const WebRtc_UWord16 MIUsValid, // valid voice duration in ms
- const WebRtc_UWord16 MIUsReplaced, // concealed voice duration in ms
- const WebRtc_UWord8 eventFlags, // concealed voice flags
- const WebRtc_UWord16 delayMS) = 0; // average delay in ms
+ virtual int32_t NetEqStatistics(
+ const int32_t id, // current ACM id
+ const uint16_t MIUsValid, // valid voice duration in ms
+ const uint16_t MIUsReplaced, // concealed voice duration in ms
+ const uint8_t eventFlags, // concealed voice flags
+ const uint16_t delayMS) = 0; // average delay in ms
};
class AudioCodingModule: public Module {
@@ -79,7 +79,7 @@
///////////////////////////////////////////////////////////////////////////
// Creation and destruction of a ACM
//
- static AudioCodingModule* Create(const WebRtc_Word32 id);
+ static AudioCodingModule* Create(const int32_t id);
static void Destroy(AudioCodingModule* module);
@@ -88,16 +88,16 @@
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_UWord8 NumberOfCodecs()
+ // uint8_t NumberOfCodecs()
// Returns number of supported codecs.
//
// Return value:
// number of supported codecs.
///
- static WebRtc_UWord8 NumberOfCodecs();
+ static uint8_t NumberOfCodecs();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 Codec()
+ // int32_t Codec()
// Get supported codec with list number.
//
// Input:
@@ -111,10 +111,10 @@
// -1 if the list number (list_id) is invalid.
// 0 if succeeded.
//
- static WebRtc_Word32 Codec(WebRtc_UWord8 list_id, CodecInst* codec);
+ static int32_t Codec(uint8_t list_id, CodecInst* codec);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 Codec()
+ // int32_t Codec()
// Get supported codec with the given codec name, sampling frequency, and
// a given number of channels.
//
@@ -132,11 +132,11 @@
// -1 if no codec matches the given parameters.
// 0 if succeeded.
//
- static WebRtc_Word32 Codec(const char* payload_name, CodecInst* codec,
- int sampling_freq_hz, int channels);
+ static int32_t Codec(const char* payload_name, CodecInst* codec,
+ int sampling_freq_hz, int channels);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 Codec()
+ // int32_t Codec()
//
// Returns the list number of the given codec name, sampling frequency, and
// a given number of channels.
@@ -151,7 +151,7 @@
// if the codec is found, the index of the codec in the list,
// -1 if the codec is not found.
//
- static WebRtc_Word32 Codec(const char* payload_name, int sampling_freq_hz,
+ static int32_t Codec(const char* payload_name, int sampling_freq_hz,
int channels);
///////////////////////////////////////////////////////////////////////////
@@ -173,7 +173,7 @@
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 InitializeSender()
+ // int32_t InitializeSender()
// Any encoder-related state of ACM will be initialized to the
// same state when ACM is created. This will not interrupt or
// effect decoding functionality of ACM. ACM will lose all the
@@ -184,10 +184,10 @@
// -1 if failed to initialize,
// 0 if succeeded.
//
- virtual WebRtc_Word32 InitializeSender() = 0;
+ virtual int32_t InitializeSender() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ResetEncoder()
+ // int32_t ResetEncoder()
// This API resets the states of encoder. All the encoder settings, such as
// send-codec or VAD/DTX, will be preserved.
//
@@ -195,10 +195,10 @@
// -1 if failed to initialize,
// 0 if succeeded.
//
- virtual WebRtc_Word32 ResetEncoder() = 0;
+ virtual int32_t ResetEncoder() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 RegisterSendCodec()
+ // int32_t RegisterSendCodec()
// Registers a codec, specified by |send_codec|, as sending codec.
// This API can be called multiple of times to register Codec. The last codec
// registered overwrites the previous ones.
@@ -224,7 +224,7 @@
// -1 if failed to initialize,
// 0 if succeeded.
//
- virtual WebRtc_Word32 RegisterSendCodec(const CodecInst& send_codec) = 0;
+ virtual int32_t RegisterSendCodec(const CodecInst& send_codec) = 0;
///////////////////////////////////////////////////////////////////////////
// int RegisterSecondarySendCodec()
@@ -254,7 +254,7 @@
virtual void UnregisterSecondarySendCodec() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SendCodec()
+ // int32_t SendCodec()
// Get parameters for the codec currently registered as send codec.
//
// Output:
@@ -264,7 +264,7 @@
// -1 if failed to get send codec,
// 0 if succeeded.
//
- virtual WebRtc_Word32 SendCodec(CodecInst* current_send_codec) const = 0;
+ virtual int32_t SendCodec(CodecInst* current_send_codec) const = 0;
///////////////////////////////////////////////////////////////////////////
// int SecondarySendCodec()
@@ -280,27 +280,27 @@
virtual int SecondarySendCodec(CodecInst* secondary_codec) const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SendFrequency()
+ // int32_t SendFrequency()
// Get the sampling frequency of the current encoder in Hertz.
//
// Return value:
// positive; sampling frequency [Hz] of the current encoder.
// -1 if an error has happened.
//
- virtual WebRtc_Word32 SendFrequency() const = 0;
+ virtual int32_t SendFrequency() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 Bitrate()
+ // int32_t Bitrate()
// Get encoding bit-rate in bits per second.
//
// Return value:
// positive; encoding rate in bits/sec,
// -1 if an error is happened.
//
- virtual WebRtc_Word32 SendBitrate() const = 0;
+ virtual int32_t SendBitrate() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetReceivedEstimatedBandwidth()
+ // int32_t SetReceivedEstimatedBandwidth()
// Set available bandwidth [bits/sec] of the up-link channel.
// This information is used for traffic shaping, and is currently only
// supported if iSAC is the send codec.
@@ -312,11 +312,11 @@
// -1 if error occurred in setting the bandwidth,
// 0 bandwidth is set successfully.
//
- virtual WebRtc_Word32 SetReceivedEstimatedBandwidth(
- const WebRtc_Word32 bw) = 0;
+ virtual int32_t SetReceivedEstimatedBandwidth(
+ const int32_t bw) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 RegisterTransportCallback()
+ // int32_t RegisterTransportCallback()
// Register a transport callback which will be called to deliver
// the encoded buffers whenever Process() is called and a
// bit-stream is ready.
@@ -331,11 +331,11 @@
// -1 if the transport callback could not be registered
// 0 if registration is successful.
//
- virtual WebRtc_Word32 RegisterTransportCallback(
+ virtual int32_t RegisterTransportCallback(
AudioPacketizationCallback* transport) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 Add10MsData()
+ // int32_t Add10MsData()
// Add 10MS of raw (PCM) audio data to the encoder. If the sampling
// frequency of the audio does not match the sampling frequency of the
// current encoder ACM will resample the audio.
@@ -352,14 +352,14 @@
// < -1 to add the frame to the buffer n samples had to be
// overwritten, -n is the return value in this case.
//
- virtual WebRtc_Word32 Add10MsData(const AudioFrame& audio_frame) = 0;
+ virtual int32_t Add10MsData(const AudioFrame& audio_frame) = 0;
///////////////////////////////////////////////////////////////////////////
// (FEC) Forward Error Correction
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetFECStatus(const bool enable)
+ // int32_t SetFECStatus(const bool enable)
// configure FEC status i.e. on/off.
//
// RFC 2198 describes a solution which has a single payload type which
@@ -380,7 +380,7 @@
// -1 if failed to set FEC status,
// 0 if succeeded.
//
- virtual WebRtc_Word32 SetFECStatus(const bool enable_fec) = 0;
+ virtual int32_t SetFECStatus(const bool enable_fec) = 0;
///////////////////////////////////////////////////////////////////////////
// bool FECStatus()
@@ -397,7 +397,7 @@
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetVAD()
+ // int32_t SetVAD()
// If DTX is enabled & the codec does not have internal DTX/VAD
// WebRtc VAD will be automatically enabled and |enable_vad| is ignored.
//
@@ -422,12 +422,12 @@
// -1 if failed to set up VAD/DTX,
// 0 if succeeded.
//
- virtual WebRtc_Word32 SetVAD(const bool enable_dtx = true,
+ virtual int32_t SetVAD(const bool enable_dtx = true,
const bool enable_vad = false,
const ACMVADMode vad_mode = VADNormal) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 VAD()
+ // int32_t VAD()
// Get VAD status.
//
// Outputs:
@@ -441,11 +441,11 @@
// -1 if fails to retrieve the setting of DTX/VAD,
// 0 if succeeded.
//
- virtual WebRtc_Word32 VAD(bool* dtx_enabled, bool* vad_enabled,
+ virtual int32_t VAD(bool* dtx_enabled, bool* vad_enabled,
ACMVADMode* vad_mode) const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ReplaceInternalDTXWithWebRtc()
+ // int32_t ReplaceInternalDTXWithWebRtc()
// Used to replace codec internal DTX scheme with WebRtc. This is only
// supported for G729, where this call replaces AnnexB with WebRtc DTX.
//
@@ -458,11 +458,11 @@
// -1 if failed to replace codec internal DTX with WebRtc,
// 0 if succeeded.
//
- virtual WebRtc_Word32 ReplaceInternalDTXWithWebRtc(
+ virtual int32_t ReplaceInternalDTXWithWebRtc(
const bool use_webrtc_dtx = false) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 IsInternalDTXReplacedWithWebRtc()
+ // int32_t IsInternalDTXReplacedWithWebRtc()
// Get status if the codec internal DTX (when such exists) is replaced with
// WebRtc DTX. This is only supported for G729.
//
@@ -475,11 +475,11 @@
// -1 if failed to determine if codec internal DTX is replaced with WebRtc,
// 0 if succeeded.
//
- virtual WebRtc_Word32 IsInternalDTXReplacedWithWebRtc(
+ virtual int32_t IsInternalDTXReplacedWithWebRtc(
bool* uses_webrtc_dtx) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 RegisterVADCallback()
+ // int32_t RegisterVADCallback()
// Call this method to register a callback function which is called
// any time that ACM encounters an empty frame. That is a frame which is
// recognized inactive. Depending on the codec WebRtc VAD or internal codec
@@ -492,14 +492,14 @@
// -1 if failed to register the callback function.
// 0 if the callback function is registered successfully.
//
- virtual WebRtc_Word32 RegisterVADCallback(ACMVADCallback* vad_callback) = 0;
+ virtual int32_t RegisterVADCallback(ACMVADCallback* vad_callback) = 0;
///////////////////////////////////////////////////////////////////////////
// Receiver
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 InitializeReceiver()
+ // int32_t InitializeReceiver()
// Any decoder-related state of ACM will be initialized to the
// same state when ACM is created. This will not interrupt or
// effect encoding functionality of ACM. ACM would lose all the
@@ -511,10 +511,10 @@
// -1 if failed to initialize,
// 0 if succeeded.
//
- virtual WebRtc_Word32 InitializeReceiver() = 0;
+ virtual int32_t InitializeReceiver() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ResetDecoder()
+ // int32_t ResetDecoder()
// This API resets the states of decoders. ACM will not lose any
// decoder-related settings, such as registered codecs.
//
@@ -522,29 +522,29 @@
// -1 if failed to initialize,
// 0 if succeeded.
//
- virtual WebRtc_Word32 ResetDecoder() = 0;
+ virtual int32_t ResetDecoder() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ReceiveFrequency()
+ // int32_t ReceiveFrequency()
// Get sampling frequency of the last received payload.
//
// Return value:
// non-negative the sampling frequency in Hertz.
// -1 if an error has occurred.
//
- virtual WebRtc_Word32 ReceiveFrequency() const = 0;
+ virtual int32_t ReceiveFrequency() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 PlayoutFrequency()
+ // int32_t PlayoutFrequency()
// Get sampling frequency of audio played out.
//
// Return value:
// the sampling frequency in Hertz.
//
- virtual WebRtc_Word32 PlayoutFrequency() const = 0;
+ virtual int32_t PlayoutFrequency() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 RegisterReceiveCodec()
+ // int32_t RegisterReceiveCodec()
// Register possible decoders, can be called multiple times for
// codecs, CNG-NB, CNG-WB, CNG-SWB, AVT and RED.
//
@@ -557,11 +557,11 @@
// -1 if failed to register the codec
// 0 if the codec registered successfully.
//
- virtual WebRtc_Word32 RegisterReceiveCodec(
+ virtual int32_t RegisterReceiveCodec(
const CodecInst& receive_codec) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 UnregisterReceiveCodec()
+ // int32_t UnregisterReceiveCodec()
// Unregister the codec currently registered with a specific payload type
// from the list of possible receive codecs.
//
@@ -573,11 +573,11 @@
// -1 if fails to unregister.
// 0 if the given codec is successfully unregistered.
//
- virtual WebRtc_Word32 UnregisterReceiveCodec(
- const WebRtc_Word16 payload_type) = 0;
+ virtual int32_t UnregisterReceiveCodec(
+ const int16_t payload_type) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ReceiveCodec()
+ // int32_t ReceiveCodec()
// Get the codec associated with last received payload.
//
// Output:
@@ -589,10 +589,10 @@
// -1 if failed to retrieve the codec,
// 0 if the codec is successfully retrieved.
//
- virtual WebRtc_Word32 ReceiveCodec(CodecInst* curr_receive_codec) const = 0;
+ virtual int32_t ReceiveCodec(CodecInst* curr_receive_codec) const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 IncomingPacket()
+ // int32_t IncomingPacket()
// Call this function to insert a parsed RTP packet into ACM.
//
// Inputs:
@@ -605,12 +605,12 @@
// -1 if failed to push in the payload
// 0 if payload is successfully pushed in.
//
- virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 payload_len_bytes,
+ virtual int32_t IncomingPacket(const uint8_t* incoming_payload,
+ const int32_t payload_len_bytes,
const WebRtcRTPHeader& rtp_info) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 IncomingPayload()
+ // int32_t IncomingPayload()
// Call this API to push incoming payloads when there is no rtp-info.
// The rtp-info will be created in ACM. One usage for this API is when
// pre-encoded files are pushed in ACM
@@ -633,13 +633,13 @@
// -1 if failed to push in the payload
// 0 if payload is successfully pushed in.
//
- virtual WebRtc_Word32 IncomingPayload(const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 payload_len_byte,
- const WebRtc_UWord8 payload_type,
- const WebRtc_UWord32 timestamp = 0) = 0;
+ virtual int32_t IncomingPayload(const uint8_t* incoming_payload,
+ const int32_t payload_len_byte,
+ const uint8_t payload_type,
+ const uint32_t timestamp = 0) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetMinimumPlayoutDelay()
+ // int32_t SetMinimumPlayoutDelay()
// Set Minimum playout delay, used for lip-sync.
//
// Input:
@@ -649,10 +649,10 @@
// -1 if failed to set the delay,
// 0 if the minimum delay is set.
//
- virtual WebRtc_Word32 SetMinimumPlayoutDelay(const WebRtc_Word32 time_ms) = 0;
+ virtual int32_t SetMinimumPlayoutDelay(const int32_t time_ms) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 RegisterIncomingMessagesCallback()
+ // int32_t RegisterIncomingMessagesCallback()
// Used by the module to deliver messages to the codec module/application
// when a DTMF tone is detected, as well as when it stopped.
//
@@ -669,13 +669,13 @@
// -1 if the message callback could not be registered
// 0 if registration is successful.
//
- virtual WebRtc_Word32
+ virtual int32_t
RegisterIncomingMessagesCallback(
AudioCodingFeedback* in_message_callback,
const ACMCountries cpt = ACMDisableCountryDetection) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetDtmfPlayoutStatus()
+ // int32_t SetDtmfPlayoutStatus()
// Configure DTMF playout, i.e. whether out-of-band
// DTMF tones are played or not.
//
@@ -687,7 +687,7 @@
// -1 if the method fails, e.g. DTMF playout is not supported.
// 0 if the status is set successfully.
//
- virtual WebRtc_Word32 SetDtmfPlayoutStatus(const bool enable) = 0;
+ virtual int32_t SetDtmfPlayoutStatus(const bool enable) = 0;
///////////////////////////////////////////////////////////////////////////
// bool DtmfPlayoutStatus()
@@ -700,7 +700,7 @@
virtual bool DtmfPlayoutStatus() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetBackgroundNoiseMode()
+ // int32_t SetBackgroundNoiseMode()
// Sets the mode of the background noise playout in an event of long
// packet loss burst. For the valid modes see the declaration of
// ACMBackgroundNoiseMode in audio_coding_module_typedefs.h.
@@ -712,11 +712,11 @@
// -1 if failed to set the mode.
// 0 if succeeded in setting the mode.
//
- virtual WebRtc_Word32 SetBackgroundNoiseMode(
+ virtual int32_t SetBackgroundNoiseMode(
const ACMBackgroundNoiseMode mode) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 BackgroundNoiseMode()
+ // int32_t BackgroundNoiseMode()
// Call this method to get the mode of the background noise playout.
// Playout of background noise is a result of a long packet loss burst.
// See ACMBackgroundNoiseMode in audio_coding_module_typedefs.h for
@@ -730,26 +730,26 @@
// -1 if ACM failed to output a valid mode.
//
// TODO(tlegrand): Change function to return the mode.
- virtual WebRtc_Word32 BackgroundNoiseMode(ACMBackgroundNoiseMode* mode) = 0;
+ virtual int32_t BackgroundNoiseMode(ACMBackgroundNoiseMode* mode) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 PlayoutTimestamp()
+ // int32_t PlayoutTimestamp()
// The send timestamp of an RTP packet is associated with the decoded
// audio of the packet in question. This function returns the timestamp of
// the latest audio obtained by calling PlayoutData10ms().
//
// Input:
- // -timestamp : a reference to a WebRtc_UWord32 to receive the
+ // -timestamp : a reference to a uint32_t to receive the
// timestamp.
// Return value:
// 0 if the output is a correct timestamp.
// -1 if failed to output the correct timestamp.
//
// TODO(tlegrand): Change function to return the timestamp.
- virtual WebRtc_Word32 PlayoutTimestamp(WebRtc_UWord32* timestamp) = 0;
+ virtual int32_t PlayoutTimestamp(uint32_t* timestamp) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 DecoderEstimatedBandwidth()
+ // int32_t DecoderEstimatedBandwidth()
// Get the estimate of the Bandwidth, in bits/second, based on the incoming
// stream. This API is useful in one-way communication scenarios, where
// the bandwidth information is sent in an out-of-band fashion.
@@ -759,10 +759,10 @@
// >0 bandwidth in bits/second.
// -1 if failed to get a bandwidth estimate.
//
- virtual WebRtc_Word32 DecoderEstimatedBandwidth() const = 0;
+ virtual int32_t DecoderEstimatedBandwidth() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetPlayoutMode()
+ // int32_t SetPlayoutMode()
// Call this API to set the playout mode. Playout mode could be optimized
// for i) voice, ii) FAX or iii) streaming. In Voice mode, NetEQ is
// optimized to deliver highest audio quality while maintaining a minimum
@@ -781,7 +781,7 @@
// -1 if failed to set the mode,
// 0 if succeeding.
//
- virtual WebRtc_Word32 SetPlayoutMode(const AudioPlayoutMode mode) = 0;
+ virtual int32_t SetPlayoutMode(const AudioPlayoutMode mode) = 0;
///////////////////////////////////////////////////////////////////////////
// AudioPlayoutMode PlayoutMode()
@@ -799,7 +799,7 @@
virtual AudioPlayoutMode PlayoutMode() const = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 PlayoutData10Ms(
+ // int32_t PlayoutData10Ms(
// Get 10 milliseconds of raw audio data for playout, at the given sampling
// frequency. ACM will perform a resampling if required.
//
@@ -818,7 +818,7 @@
// -1 if the function fails,
// 0 if the function succeeds.
//
- virtual WebRtc_Word32 PlayoutData10Ms(WebRtc_Word32 desired_freq_hz,
+ virtual int32_t PlayoutData10Ms(int32_t desired_freq_hz,
AudioFrame* audio_frame) = 0;
///////////////////////////////////////////////////////////////////////////
@@ -827,7 +827,7 @@
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 SetReceiveVADMode()
+ // int16_t SetReceiveVADMode()
// Configure VAD aggressiveness on the incoming stream.
//
// Input:
@@ -840,7 +840,7 @@
// -1 if fails to set the mode,
// 0 if the mode is set successfully.
//
- virtual WebRtc_Word16 SetReceiveVADMode(const ACMVADMode mode) = 0;
+ virtual int16_t SetReceiveVADMode(const ACMVADMode mode) = 0;
///////////////////////////////////////////////////////////////////////////
// ACMVADMode ReceiveVADMode()
@@ -859,7 +859,7 @@
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetISACMaxRate()
+ // int32_t SetISACMaxRate()
// Set the maximum instantaneous rate of iSAC. For a payload of B bits
// with a frame-size of T sec the instantaneous rate is B/T bits per
// second. Therefore, (B/T < |max_rate_bps|) and
@@ -873,11 +873,11 @@
// -1 if failed to set the maximum rate.
// 0 if the maximum rate is set successfully.
//
- virtual WebRtc_Word32 SetISACMaxRate(
- const WebRtc_UWord32 max_rate_bps) = 0;
+ virtual int32_t SetISACMaxRate(
+ const uint32_t max_rate_bps) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetISACMaxPayloadSize()
+ // int32_t SetISACMaxPayloadSize()
// Set the maximum payload size of iSAC packets. No iSAC payload,
// regardless of its frame-size, may exceed the given limit. For
// an iSAC payload of size B bits and frame-size T seconds we have;
@@ -891,11 +891,11 @@
// -1 if failed to set the maximum payload-size.
// 0 if the given length is set successfully.
//
- virtual WebRtc_Word32 SetISACMaxPayloadSize(
- const WebRtc_UWord16 max_payload_len_bytes) = 0;
+ virtual int32_t SetISACMaxPayloadSize(
+ const uint16_t max_payload_len_bytes) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ConfigISACBandwidthEstimator()
+ // int32_t ConfigISACBandwidthEstimator()
// Call this function to configure the bandwidth estimator of ISAC.
// During the adaptation of bit-rate, iSAC automatically adjusts the
// frame-size (either 30 or 60 ms) to save on RTP header. The initial
@@ -918,9 +918,9 @@
// -1 if failed to configure the bandwidth estimator,
// 0 if the configuration was successfully applied.
//
- virtual WebRtc_Word32 ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 init_frame_size_ms,
- const WebRtc_UWord16 init_rate_bps,
+ virtual int32_t ConfigISACBandwidthEstimator(
+ const uint8_t init_frame_size_ms,
+ const uint16_t init_rate_bps,
const bool enforce_frame_size = false) = 0;
///////////////////////////////////////////////////////////////////////////
@@ -928,7 +928,7 @@
//
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 NetworkStatistics()
+ // int32_t NetworkStatistics()
// Get network statistics.
//
// Input:
@@ -938,7 +938,7 @@
// -1 if failed to set the network statistics,
// 0 if statistics are set successfully.
//
- virtual WebRtc_Word32 NetworkStatistics(
+ virtual int32_t NetworkStatistics(
ACMNetworkStatistics* network_statistics) const = 0;
//
diff --git a/webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h b/webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
index dace4b8..03a7df8 100644
--- a/webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
+++ b/webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
@@ -167,14 +167,14 @@
// -maxWaitingTimeMs : max packet waiting time in the buffer
// -addedSamples : samples inserted because of packet loss in off mode
typedef struct {
- WebRtc_UWord16 currentBufferSize;
- WebRtc_UWord16 preferredBufferSize;
+ uint16_t currentBufferSize;
+ uint16_t preferredBufferSize;
bool jitterPeaksFound;
- WebRtc_UWord16 currentPacketLossRate;
- WebRtc_UWord16 currentDiscardRate;
- WebRtc_UWord16 currentExpandRate;
- WebRtc_UWord16 currentPreemptiveRate;
- WebRtc_UWord16 currentAccelerateRate;
+ uint16_t currentPacketLossRate;
+ uint16_t currentDiscardRate;
+ uint16_t currentExpandRate;
+ uint16_t currentPreemptiveRate;
+ uint16_t currentAccelerateRate;
int32_t clockDriftPPM;
int meanWaitingTimeMs;
int medianWaitingTimeMs;
diff --git a/webrtc/modules/audio_coding/main/source/acm_amr.cc b/webrtc/modules/audio_coding/main/source/acm_amr.cc
index 249fe7b..8e8d6d5 100644
--- a/webrtc/modules/audio_coding/main/source/acm_amr.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_amr.cc
@@ -50,7 +50,7 @@
namespace webrtc {
#ifndef WEBRTC_CODEC_AMR
-ACMAMR::ACMAMR(WebRtc_Word16 /* codec_id */)
+ACMAMR::ACMAMR(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
encoding_mode_(-1), // Invalid value.
@@ -64,39 +64,39 @@
return;
}
-WebRtc_Word16 ACMAMR::InternalEncode(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMAMR::InternalEncode(uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMAMR::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMAMR::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMAMR::EnableDTX() {
+int16_t ACMAMR::EnableDTX() {
return -1;
}
-WebRtc_Word16 ACMAMR::DisableDTX() {
+int16_t ACMAMR::DisableDTX() {
return -1;
}
-WebRtc_Word16 ACMAMR::InternalInitEncoder(
+int16_t ACMAMR::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMAMR::InternalInitDecoder(
+int16_t ACMAMR::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -104,7 +104,7 @@
return NULL;
}
-WebRtc_Word16 ACMAMR::InternalCreateEncoder() {
+int16_t ACMAMR::InternalCreateEncoder() {
return -1;
}
@@ -112,7 +112,7 @@
return;
}
-WebRtc_Word16 ACMAMR::InternalCreateDecoder() {
+int16_t ACMAMR::InternalCreateDecoder() {
return -1;
}
@@ -120,7 +120,7 @@
return;
}
-WebRtc_Word16 ACMAMR::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+int16_t ACMAMR::SetBitRateSafe(const int32_t /* rate */) {
return -1;
}
@@ -128,7 +128,7 @@
return;
}
-WebRtc_Word16 ACMAMR::SetAMREncoderPackingFormat(
+int16_t ACMAMR::SetAMREncoderPackingFormat(
ACMAMRPackingFormat /* packing_format */) {
return -1;
}
@@ -137,7 +137,7 @@
return AMRUndefined;
}
-WebRtc_Word16 ACMAMR::SetAMRDecoderPackingFormat(
+int16_t ACMAMR::SetAMRDecoderPackingFormat(
ACMAMRPackingFormat /* packing_format */) {
return -1;
}
@@ -157,7 +157,7 @@
#define WEBRTC_AMR_MR102 6
#define WEBRTC_AMR_MR122 7
-ACMAMR::ACMAMR(WebRtc_Word16 codec_id)
+ACMAMR::ACMAMR(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
encoding_mode_(-1), // invalid value
@@ -181,9 +181,9 @@
return;
}
-WebRtc_Word16 ACMAMR::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
- WebRtc_Word16 vad_decision = 1;
+int16_t ACMAMR::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
+ int16_t vad_decision = 1;
// sanity check, if the rate is set correctly. we might skip this
// sanity check. if rate is not set correctly, initialization flag
// should be false and should not be here.
@@ -195,7 +195,7 @@
*bitstream_len_byte = WebRtcAmr_Encode(encoder_inst_ptr_,
&in_audio_[in_audio_ix_read_],
frame_len_smpl_,
- (WebRtc_Word16*)bitstream,
+ (int16_t*)bitstream,
encoding_mode_);
// Update VAD, if internal DTX is used
@@ -203,7 +203,7 @@
if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
vad_decision = 0;
}
- for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+ for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
vad_label_[n] = vad_decision;
}
}
@@ -212,15 +212,15 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMAMR::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMAMR::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMAMR::EnableDTX() {
+int16_t ACMAMR::EnableDTX() {
if (dtx_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -235,7 +235,7 @@
}
}
-WebRtc_Word16 ACMAMR::DisableDTX() {
+int16_t ACMAMR::DisableDTX() {
if (!dtx_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -251,8 +251,8 @@
}
}
-WebRtc_Word16 ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
- WebRtc_Word16 status = SetBitRateSafe((codec_params->codec_inst).rate);
+int16_t ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
+ int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
status += (WebRtcAmr_EncoderInit(
encoder_inst_ptr_, ((codec_params->enable_dtx) ? 1 : 0)) < 0) ? -1 : 0;
status += (WebRtcAmr_EncodeBitmode(
@@ -260,16 +260,16 @@
return (status < 0) ? -1 : 0;
}
-WebRtc_Word16 ACMAMR::InternalInitDecoder(
+int16_t ACMAMR::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
- WebRtc_Word16 status =
+ int16_t status =
((WebRtcAmr_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
status += WebRtcAmr_DecodeBitmode(decoder_inst_ptr_, decoder_packing_format_);
return (status < 0) ? -1 : 0;
}
-WebRtc_Word32 ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
// Todo:
// log error
@@ -289,7 +289,7 @@
return NULL;
}
-WebRtc_Word16 ACMAMR::InternalCreateEncoder() {
+int16_t ACMAMR::InternalCreateEncoder() {
return WebRtcAmr_CreateEnc(&encoder_inst_ptr_);
}
@@ -305,7 +305,7 @@
encoding_rate_ = 0; // invalid value
}
-WebRtc_Word16 ACMAMR::InternalCreateDecoder() {
+int16_t ACMAMR::InternalCreateDecoder() {
return WebRtcAmr_CreateDec(&decoder_inst_ptr_);
}
@@ -319,7 +319,7 @@
decoder_initialized_ = false;
}
-WebRtc_Word16 ACMAMR::SetBitRateSafe(const WebRtc_Word32 rate) {
+int16_t ACMAMR::SetBitRateSafe(const int32_t rate) {
switch (rate) {
case 4750: {
encoding_mode_ = WEBRTC_AMR_MR475;
@@ -376,7 +376,7 @@
return;
}
-WebRtc_Word16 ACMAMR::SetAMREncoderPackingFormat(
+int16_t ACMAMR::SetAMREncoderPackingFormat(
ACMAMRPackingFormat packing_format) {
if ((packing_format != AMRBandwidthEfficient) &&
(packing_format != AMROctetAlligned) &&
@@ -398,7 +398,7 @@
return encoder_packing_format_;
}
-WebRtc_Word16 ACMAMR::SetAMRDecoderPackingFormat(
+int16_t ACMAMR::SetAMRDecoderPackingFormat(
ACMAMRPackingFormat packing_format) {
if ((packing_format != AMRBandwidthEfficient) &&
(packing_format != AMROctetAlligned) &&
diff --git a/webrtc/modules/audio_coding/main/source/acm_amr.h b/webrtc/modules/audio_coding/main/source/acm_amr.h
index ebff0bb..9c87434 100644
--- a/webrtc/modules/audio_coding/main/source/acm_amr.h
+++ b/webrtc/modules/audio_coding/main/source/acm_amr.h
@@ -23,58 +23,58 @@
class ACMAMR: public ACMGenericCodec {
public:
- explicit ACMAMR(WebRtc_Word16 codec_id);
+ explicit ACMAMR(int16_t codec_id);
~ACMAMR();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 SetAMREncoderPackingFormat(
+ int16_t SetAMREncoderPackingFormat(
const ACMAMRPackingFormat packing_format);
ACMAMRPackingFormat AMREncoderPackingFormat() const;
- WebRtc_Word16 SetAMRDecoderPackingFormat(
+ int16_t SetAMRDecoderPackingFormat(
const ACMAMRPackingFormat packing_format);
ACMAMRPackingFormat AMRDecoderPackingFormat() const;
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio, WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio, int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+ int16_t SetBitRateSafe(const int32_t rate);
- WebRtc_Word16 EnableDTX();
+ int16_t EnableDTX();
- WebRtc_Word16 DisableDTX();
+ int16_t DisableDTX();
AMR_encinst_t_* encoder_inst_ptr_;
AMR_decinst_t_* decoder_inst_ptr_;
- WebRtc_Word16 encoding_mode_;
- WebRtc_Word16 encoding_rate_;
+ int16_t encoding_mode_;
+ int16_t encoding_rate_;
ACMAMRPackingFormat encoder_packing_format_;
ACMAMRPackingFormat decoder_packing_format_;
};
diff --git a/webrtc/modules/audio_coding/main/source/acm_amrwb.cc b/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
index caa9494..fb86a3b 100644
--- a/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_amrwb.cc
@@ -47,7 +47,7 @@
namespace webrtc {
#ifndef WEBRTC_CODEC_AMRWB
-ACMAMRwb::ACMAMRwb(WebRtc_Word16 /* codec_id */)
+ACMAMRwb::ACMAMRwb(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
encoding_mode_(-1), // invalid value
@@ -59,40 +59,40 @@
ACMAMRwb::~ACMAMRwb() {
}
-WebRtc_Word16 ACMAMRwb::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMAMRwb::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMAMRwb::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMAMRwb::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMAMRwb::EnableDTX() {
+int16_t ACMAMRwb::EnableDTX() {
return -1;
}
-WebRtc_Word16 ACMAMRwb::DisableDTX() {
+int16_t ACMAMRwb::DisableDTX() {
return -1;
}
-WebRtc_Word16 ACMAMRwb::InternalInitEncoder(
+int16_t ACMAMRwb::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMAMRwb::InternalInitDecoder(
+int16_t ACMAMRwb::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -101,7 +101,7 @@
return NULL;
}
-WebRtc_Word16 ACMAMRwb::InternalCreateEncoder() {
+int16_t ACMAMRwb::InternalCreateEncoder() {
return -1;
}
@@ -109,7 +109,7 @@
return;
}
-WebRtc_Word16 ACMAMRwb::InternalCreateDecoder() {
+int16_t ACMAMRwb::InternalCreateDecoder() {
return -1;
}
@@ -117,7 +117,7 @@
return;
}
-WebRtc_Word16 ACMAMRwb::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+int16_t ACMAMRwb::SetBitRateSafe(const int32_t /* rate */) {
return -1;
}
@@ -125,7 +125,7 @@
return;
}
-WebRtc_Word16 ACMAMRwb::SetAMRwbEncoderPackingFormat(
+int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
ACMAMRPackingFormat /* packing_format */) {
return -1;
}
@@ -134,7 +134,7 @@
return AMRUndefined;
}
-WebRtc_Word16 ACMAMRwb::SetAMRwbDecoderPackingFormat(
+int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
ACMAMRPackingFormat /* packing_format */) {
return -1;
}
@@ -155,7 +155,7 @@
#define AMRWB_MODE_23k 7
#define AMRWB_MODE_24k 8
-ACMAMRwb::ACMAMRwb(WebRtc_Word16 codec_id)
+ACMAMRwb::ACMAMRwb(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
encoding_mode_(-1), // invalid value
@@ -179,9 +179,9 @@
return;
}
-WebRtc_Word16 ACMAMRwb::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
- WebRtc_Word16 vad_decision = 1;
+int16_t ACMAMRwb::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
+ int16_t vad_decision = 1;
// sanity check, if the rate is set correctly. we might skip this
// sanity check. if rate is not set correctly, initialization flag
// should be false and should not be here.
@@ -192,7 +192,7 @@
*bitstream_len_byte = WebRtcAmrWb_Encode(encoder_inst_ptr_,
&in_audio_[in_audio_ix_read_],
frame_len_smpl_,
- (WebRtc_Word16*)bitstream,
+ (int16_t*)bitstream,
encoding_mode_);
// Update VAD, if internal DTX is used
@@ -200,7 +200,7 @@
if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
vad_decision = 0;
}
- for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+ for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
vad_label_[n] = vad_decision;
}
}
@@ -210,15 +210,15 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMAMRwb::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMAMRwb::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMAMRwb::EnableDTX() {
+int16_t ACMAMRwb::EnableDTX() {
if (dtx_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -233,7 +233,7 @@
}
}
-WebRtc_Word16 ACMAMRwb::DisableDTX() {
+int16_t ACMAMRwb::DisableDTX() {
if (!dtx_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -249,14 +249,14 @@
}
}
-WebRtc_Word16 ACMAMRwb::InternalInitEncoder(
+int16_t ACMAMRwb::InternalInitEncoder(
WebRtcACMCodecParams* codec_params) {
// sanity check
if (encoder_inst_ptr_ == NULL) {
return -1;
}
- WebRtc_Word16 status = SetBitRateSafe((codec_params->codec_inst).rate);
+ int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
status += (WebRtcAmrWb_EncoderInit(
encoder_inst_ptr_, ((codec_params->enable_dtx) ? 1 : 0)) < 0) ? -1 : 0;
status += (WebRtcAmrWb_EncodeBitmode(
@@ -264,16 +264,16 @@
return (status < 0) ? -1 : 0;
}
-WebRtc_Word16 ACMAMRwb::InternalInitDecoder(
+int16_t ACMAMRwb::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
- WebRtc_Word16 status = WebRtcAmrWb_DecodeBitmode(decoder_inst_ptr_,
+ int16_t status = WebRtcAmrWb_DecodeBitmode(decoder_inst_ptr_,
decoder_packing_format_);
status += ((WebRtcAmrWb_DecoderInit(decoder_inst_ptr_) < 0) ? -1 : 0);
return (status < 0) ? -1 : 0;
}
-WebRtc_Word32 ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
return -1;
}
@@ -292,7 +292,7 @@
return NULL;
}
-WebRtc_Word16 ACMAMRwb::InternalCreateEncoder() {
+int16_t ACMAMRwb::InternalCreateEncoder() {
return WebRtcAmrWb_CreateEnc(&encoder_inst_ptr_);
}
@@ -308,7 +308,7 @@
encoding_rate_ = 0;
}
-WebRtc_Word16 ACMAMRwb::InternalCreateDecoder() {
+int16_t ACMAMRwb::InternalCreateDecoder() {
return WebRtcAmrWb_CreateDec(&decoder_inst_ptr_);
}
@@ -322,7 +322,7 @@
decoder_initialized_ = false;
}
-WebRtc_Word16 ACMAMRwb::SetBitRateSafe(const WebRtc_Word32 rate) {
+int16_t ACMAMRwb::SetBitRateSafe(const int32_t rate) {
switch (rate) {
case 7000: {
encoding_mode_ = AMRWB_MODE_7k;
@@ -383,7 +383,7 @@
return;
}
-WebRtc_Word16 ACMAMRwb::SetAMRwbEncoderPackingFormat(
+int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
ACMAMRPackingFormat packing_format) {
if ((packing_format != AMRBandwidthEfficient) &&
(packing_format != AMROctetAlligned) &&
@@ -405,7 +405,7 @@
return encoder_packing_format_;
}
-WebRtc_Word16 ACMAMRwb::SetAMRwbDecoderPackingFormat(
+int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
ACMAMRPackingFormat packing_format) {
if ((packing_format != AMRBandwidthEfficient) &&
(packing_format != AMROctetAlligned) &&
diff --git a/webrtc/modules/audio_coding/main/source/acm_amrwb.h b/webrtc/modules/audio_coding/main/source/acm_amrwb.h
index 0f8d0bb..2cd301a 100644
--- a/webrtc/modules/audio_coding/main/source/acm_amrwb.h
+++ b/webrtc/modules/audio_coding/main/source/acm_amrwb.h
@@ -21,59 +21,59 @@
class ACMAMRwb: public ACMGenericCodec {
public:
- explicit ACMAMRwb(WebRtc_Word16 codec_id);
+ explicit ACMAMRwb(int16_t codec_id);
~ACMAMRwb();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams* codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams* codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams* codec_params);
- WebRtc_Word16 SetAMRwbEncoderPackingFormat(
+ int16_t SetAMRwbEncoderPackingFormat(
const ACMAMRPackingFormat packing_format);
ACMAMRPackingFormat AMRwbEncoderPackingFormat() const;
- WebRtc_Word16 SetAMRwbDecoderPackingFormat(
+ int16_t SetAMRwbDecoderPackingFormat(
const ACMAMRPackingFormat packing_format);
ACMAMRPackingFormat AMRwbDecoderPackingFormat() const;
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio, WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio, int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+ int16_t SetBitRateSafe(const int32_t rate);
- WebRtc_Word16 EnableDTX();
+ int16_t EnableDTX();
- WebRtc_Word16 DisableDTX();
+ int16_t DisableDTX();
AMRWB_encinst_t_* encoder_inst_ptr_;
AMRWB_decinst_t_* decoder_inst_ptr_;
- WebRtc_Word16 encoding_mode_;
- WebRtc_Word16 encoding_rate_;
+ int16_t encoding_mode_;
+ int16_t encoding_rate_;
ACMAMRPackingFormat encoder_packing_format_;
ACMAMRPackingFormat decoder_packing_format_;
};
diff --git a/webrtc/modules/audio_coding/main/source/acm_celt.cc b/webrtc/modules/audio_coding/main/source/acm_celt.cc
index e6ceda4..31d9e37 100644
--- a/webrtc/modules/audio_coding/main/source/acm_celt.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_celt.cc
@@ -49,7 +49,7 @@
int16_t /* bitstream_len_byte */,
int16_t* /* audio */,
int16_t* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+ int8_t* /* speech_type */) {
return -1;
}
@@ -156,7 +156,7 @@
int16_t /* bitstream_len_byte */,
int16_t* /* audio */,
int16_t* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+ int8_t* /* speech_type */) {
return 0;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_celt.h b/webrtc/modules/audio_coding/main/source/acm_celt.h
index 4f6953e..ee68044 100644
--- a/webrtc/modules/audio_coding/main/source/acm_celt.h
+++ b/webrtc/modules/audio_coding/main/source/acm_celt.h
@@ -33,14 +33,12 @@
int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(
+ int16_t DecodeSafe(
uint8_t* /* bitstream */,
int16_t /* bitstream_len_byte */,
int16_t* /* audio */,
int16_t* /* audio_samples */,
- // TODO(leozwang): use int8_t here when WebRtc_Word8 is properly typed.
- // http://code.google.com/p/webrtc/issues/detail?id=311
- WebRtc_Word8* /* speech_type */);
+ int8_t* /* speech_type */);
int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
const CodecInst& codec_inst);
diff --git a/webrtc/modules/audio_coding/main/source/acm_cng.cc b/webrtc/modules/audio_coding/main/source/acm_cng.cc
index ff8cea0..3549122 100644
--- a/webrtc/modules/audio_coding/main/source/acm_cng.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_cng.cc
@@ -20,7 +20,7 @@
namespace webrtc {
-ACMCNG::ACMCNG(WebRtc_Word16 codec_id) {
+ACMCNG::ACMCNG(int16_t codec_id) {
encoder_inst_ptr_ = NULL;
decoder_inst_ptr_ = NULL;
codec_id_ = codec_id;
@@ -44,16 +44,16 @@
// should not be called normally
// instead the following function is called from inside
// ACMGenericCodec::ProcessFrameVADDTX
-WebRtc_Word16 ACMCNG::InternalEncode(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMCNG::InternalEncode(uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMCNG::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMCNG::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
@@ -61,18 +61,18 @@
// this function should not be called normally
// instead the following function is called from inside
// ACMGenericCodec::ProcessFrameVADDTX
-WebRtc_Word16 ACMCNG::InternalInitEncoder(
+int16_t ACMCNG::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMCNG::InternalInitDecoder(
+int16_t ACMCNG::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return WebRtcCng_InitDec(decoder_inst_ptr_);
}
-WebRtc_Word32 ACMCNG::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMCNG::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
// TODO(tlegrand): log error
return -1;
@@ -97,7 +97,7 @@
return NULL;
}
-WebRtc_Word16 ACMCNG::InternalCreateEncoder() {
+int16_t ACMCNG::InternalCreateEncoder() {
if (WebRtcCng_CreateEnc(&encoder_inst_ptr_) < 0) {
encoder_inst_ptr_ = NULL;
return -1;
@@ -115,7 +115,7 @@
encoder_initialized_ = false;
}
-WebRtc_Word16 ACMCNG::InternalCreateDecoder() {
+int16_t ACMCNG::InternalCreateDecoder() {
if (WebRtcCng_CreateDec(&decoder_inst_ptr_) < 0) {
decoder_inst_ptr_ = NULL;
return -1;
diff --git a/webrtc/modules/audio_coding/main/source/acm_cng.h b/webrtc/modules/audio_coding/main/source/acm_cng.h
index 3f77a85..7221fbe 100644
--- a/webrtc/modules/audio_coding/main/source/acm_cng.h
+++ b/webrtc/modules/audio_coding/main/source/acm_cng.h
@@ -21,49 +21,49 @@
class ACMCNG: public ACMGenericCodec {
public:
- explicit ACMCNG(WebRtc_Word16 codec_id);
+ explicit ACMCNG(int16_t codec_id);
~ACMCNG();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio, WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio, int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 EnableDTX() {
+ int16_t EnableDTX() {
return -1;
}
- WebRtc_Word16 DisableDTX() {
+ int16_t DisableDTX() {
return -1;
}
WebRtcCngEncInst* encoder_inst_ptr_;
WebRtcCngDecInst* decoder_inst_ptr_;
- WebRtc_UWord16 samp_freq_hz_;
+ uint16_t samp_freq_hz_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/source/acm_common_defs.h b/webrtc/modules/audio_coding/main/source/acm_common_defs.h
index 61fe279..db901c1 100644
--- a/webrtc/modules/audio_coding/main/source/acm_common_defs.h
+++ b/webrtc/modules/audio_coding/main/source/acm_common_defs.h
@@ -103,13 +103,13 @@
// last_in_timestamp: same as AudioCodingModuleImpl::last_in_timestamp_
//
struct WebRtcACMAudioBuff {
- WebRtc_Word16 in_audio[AUDIO_BUFFER_SIZE_W16];
- WebRtc_Word16 in_audio_ix_read;
- WebRtc_Word16 in_audio_ix_write;
- WebRtc_UWord32 in_timestamp[TIMESTAMP_BUFFER_SIZE_W32];
- WebRtc_Word16 in_timestamp_ix_write;
- WebRtc_UWord32 last_timestamp;
- WebRtc_UWord32 last_in_timestamp;
+ int16_t in_audio[AUDIO_BUFFER_SIZE_W16];
+ int16_t in_audio_ix_read;
+ int16_t in_audio_ix_write;
+ uint32_t in_timestamp[TIMESTAMP_BUFFER_SIZE_W32];
+ int16_t in_timestamp_ix_write;
+ uint32_t last_timestamp;
+ uint32_t last_in_timestamp;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc b/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
index 6271eae..5820bc4 100644
--- a/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.cc
@@ -18,20 +18,20 @@
ACMDTMFDetection::~ACMDTMFDetection() {}
-WebRtc_Word16 ACMDTMFDetection::Enable(ACMCountries /* cpt */) {
+int16_t ACMDTMFDetection::Enable(ACMCountries /* cpt */) {
return -1;
}
-WebRtc_Word16 ACMDTMFDetection::Disable() {
+int16_t ACMDTMFDetection::Disable() {
return -1;
}
-WebRtc_Word16 ACMDTMFDetection::Detect(
- const WebRtc_Word16* /* in_audio_buff */,
- const WebRtc_UWord16 /* in_buff_len_word16 */,
- const WebRtc_Word32 /* in_freq_hz */,
+int16_t ACMDTMFDetection::Detect(
+ const int16_t* /* in_audio_buff */,
+ const uint16_t /* in_buff_len_word16 */,
+ const int32_t /* in_freq_hz */,
bool& /* tone_detected */,
- WebRtc_Word16& /* tone */) {
+ int16_t& /* tone */) {
return -1;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h b/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
index 4251d00..cd265fe 100644
--- a/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
+++ b/webrtc/modules/audio_coding/main/source/acm_dtmf_detection.h
@@ -21,13 +21,13 @@
public:
ACMDTMFDetection();
~ACMDTMFDetection();
- WebRtc_Word16 Enable(ACMCountries cpt = ACMDisableCountryDetection);
- WebRtc_Word16 Disable();
- WebRtc_Word16 Detect(const WebRtc_Word16* in_audio_buff,
- const WebRtc_UWord16 in_buff_len_word16,
- const WebRtc_Word32 in_freq_hz,
- bool& tone_detected,
- WebRtc_Word16& tone);
+ int16_t Enable(ACMCountries cpt = ACMDisableCountryDetection);
+ int16_t Disable();
+ int16_t Detect(const int16_t* in_audio_buff,
+ const uint16_t in_buff_len_word16,
+ const int32_t in_freq_hz,
+ bool& tone_detected,
+ int16_t& tone);
private:
ACMResampler resampler_;
diff --git a/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc b/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
index bf4f63b..6b91db9 100644
--- a/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.cc
@@ -21,7 +21,7 @@
#ifndef WEBRTC_CODEC_AVT
ACMDTMFPlayout::ACMDTMFPlayout(
- WebRtc_Word16 /* codec_id */) {
+ int16_t /* codec_id */) {
return;
}
@@ -29,33 +29,33 @@
return;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMDTMFPlayout::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMDTMFPlayout::DecodeSafe(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMDTMFPlayout::DecodeSafe(
+ uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalInitEncoder(
+int16_t ACMDTMFPlayout::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalInitDecoder(
+int16_t ACMDTMFPlayout::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -63,11 +63,11 @@
return NULL;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalCreateEncoder() {
+int16_t ACMDTMFPlayout::InternalCreateEncoder() {
return -1;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalCreateDecoder() {
+int16_t ACMDTMFPlayout::InternalCreateDecoder() {
return -1;
}
@@ -85,7 +85,7 @@
#else //===================== Actual Implementation =======================
-ACMDTMFPlayout::ACMDTMFPlayout(WebRtc_Word16 codec_id) {
+ACMDTMFPlayout::ACMDTMFPlayout(int16_t codec_id) {
codec_id_ = codec_id;
}
@@ -93,37 +93,37 @@
return;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMDTMFPlayout::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return 0;
}
-WebRtc_Word16 ACMDTMFPlayout::DecodeSafe(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMDTMFPlayout::DecodeSafe(
+ uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalInitEncoder(
+int16_t ACMDTMFPlayout::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization,
// DTMFPlayout has no instance
return 0;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalInitDecoder(
+int16_t ACMDTMFPlayout::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization,
// DTMFPlayout has no instance
return 0;
}
-WebRtc_Word32 ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
// Fill up the structure by calling
// "SET_CODEC_PAR" & "SET_AVT_FUNCTION."
// Then call NetEQ to add the codec to it's
@@ -137,12 +137,12 @@
return NULL;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalCreateEncoder() {
+int16_t ACMDTMFPlayout::InternalCreateEncoder() {
// DTMFPlayout has no instance
return 0;
}
-WebRtc_Word16 ACMDTMFPlayout::InternalCreateDecoder() {
+int16_t ACMDTMFPlayout::InternalCreateDecoder() {
// DTMFPlayout has no instance
return 0;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h b/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
index 2607a2f..d07da47 100644
--- a/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
+++ b/webrtc/modules/audio_coding/main/source/acm_dtmf_playout.h
@@ -17,35 +17,34 @@
class ACMDTMFPlayout: public ACMGenericCodec {
public:
- explicit ACMDTMFPlayout(WebRtc_Word16 codec_id);
+ explicit ACMDTMFPlayout(int16_t codec_id);
~ACMDTMFPlayout();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio, WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio, int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
};
diff --git a/webrtc/modules/audio_coding/main/source/acm_g722.cc b/webrtc/modules/audio_coding/main/source/acm_g722.cc
index cc93801..1a023db 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g722.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_g722.cc
@@ -22,7 +22,7 @@
#ifndef WEBRTC_CODEC_G722
-ACMG722::ACMG722(WebRtc_Word16 /* codec_id */)
+ACMG722::ACMG722(int16_t /* codec_id */)
: ptr_enc_str_(NULL),
ptr_dec_str_(NULL),
encoder_inst_ptr_(NULL),
@@ -31,40 +31,40 @@
ACMG722::~ACMG722() {}
-WebRtc_Word32 ACMG722::Add10MsDataSafe(
- const WebRtc_UWord32 /* timestamp */,
- const WebRtc_Word16* /* data */,
- const WebRtc_UWord16 /* length_smpl */,
- const WebRtc_UWord8 /* audio_channel */) {
+int32_t ACMG722::Add10MsDataSafe(
+ const uint32_t /* timestamp */,
+ const int16_t* /* data */,
+ const uint16_t /* length_smpl */,
+ const uint8_t /* audio_channel */) {
return -1;
}
-WebRtc_Word16 ACMG722::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMG722::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMG722::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG722::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMG722::InternalInitEncoder(
+int16_t ACMG722::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMG722::InternalInitDecoder(
+int16_t ACMG722::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMG722::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMG722::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -72,7 +72,7 @@
return NULL;
}
-WebRtc_Word16 ACMG722::InternalCreateEncoder() {
+int16_t ACMG722::InternalCreateEncoder() {
return -1;
}
@@ -80,7 +80,7 @@
return;
}
-WebRtc_Word16 ACMG722::InternalCreateDecoder() {
+int16_t ACMG722::InternalCreateDecoder() {
return -1;
}
@@ -107,7 +107,7 @@
G722DecInst* inst_right; // instance for right channel in case of stereo
};
-ACMG722::ACMG722(WebRtc_Word16 codec_id)
+ACMG722::ACMG722(int16_t codec_id)
: encoder_inst_ptr_(NULL),
encoder_inst_ptr_right_(NULL),
decoder_inst_ptr_(NULL) {
@@ -157,33 +157,33 @@
return;
}
-WebRtc_Word32 ACMG722::Add10MsDataSafe(const WebRtc_UWord32 timestamp,
- const WebRtc_Word16* data,
- const WebRtc_UWord16 length_smpl,
- const WebRtc_UWord8 audio_channel) {
+int32_t ACMG722::Add10MsDataSafe(const uint32_t timestamp,
+ const int16_t* data,
+ const uint16_t length_smpl,
+ const uint8_t audio_channel) {
return ACMGenericCodec::Add10MsDataSafe((timestamp >> 1), data, length_smpl,
audio_channel);
}
-WebRtc_Word16 ACMG722::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMG722::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
// If stereo, split input signal in left and right channel before encoding
if (num_channels_ == 2) {
- WebRtc_Word16 left_channel[960];
- WebRtc_Word16 right_channel[960];
- WebRtc_UWord8 out_left[480];
- WebRtc_UWord8 out_right[480];
- WebRtc_Word16 len_in_bytes;
+ int16_t left_channel[960];
+ int16_t right_channel[960];
+ uint8_t out_left[480];
+ uint8_t out_right[480];
+ int16_t len_in_bytes;
for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
left_channel[j] = in_audio_[in_audio_ix_read_ + i];
right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
}
len_in_bytes = WebRtcG722_Encode(encoder_inst_ptr_, left_channel,
frame_len_smpl_,
- (WebRtc_Word16*)out_left);
+ (int16_t*)out_left);
len_in_bytes += WebRtcG722_Encode(encoder_inst_ptr_right_, right_channel,
frame_len_smpl_,
- (WebRtc_Word16*)out_right);
+ (int16_t*)out_right);
*bitstream_len_byte = len_in_bytes;
// Interleave the 4 bits per sample from left and right channel
@@ -195,7 +195,7 @@
*bitstream_len_byte = WebRtcG722_Encode(encoder_inst_ptr_,
&in_audio_[in_audio_ix_read_],
frame_len_smpl_,
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
}
// increment the read index this tell the caller how far
@@ -204,15 +204,15 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMG722::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG722::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
+int16_t ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
if (codec_params->codec_inst.channels == 2) {
// Create codec struct for right channel
if (ptr_enc_str_->inst_right == NULL) {
@@ -230,13 +230,13 @@
return WebRtcG722_EncoderInit(encoder_inst_ptr_);
}
-WebRtc_Word16 ACMG722::InternalInitDecoder(
+int16_t ACMG722::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return WebRtcG722_DecoderInit(decoder_inst_ptr_);
}
-WebRtc_Word32 ACMG722::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMG722::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
// TODO(turajs): log error
return -1;
@@ -260,7 +260,7 @@
return NULL;
}
-WebRtc_Word16 ACMG722::InternalCreateEncoder() {
+int16_t ACMG722::InternalCreateEncoder() {
if (ptr_enc_str_ == NULL) {
// this structure must be created at the costructor
// if it is still NULL then there is a probelm and
@@ -286,7 +286,7 @@
encoder_initialized_ = false;
}
-WebRtc_Word16 ACMG722::InternalCreateDecoder() {
+int16_t ACMG722::InternalCreateDecoder() {
if (ptr_dec_str_ == NULL) {
// this structure must be created at the costructor
// if it is still NULL then there is a probelm and
diff --git a/webrtc/modules/audio_coding/main/source/acm_g722.h b/webrtc/modules/audio_coding/main/source/acm_g722.h
index 60ea2b4..d4273ab 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g722.h
+++ b/webrtc/modules/audio_coding/main/source/acm_g722.h
@@ -24,40 +24,39 @@
class ACMG722: public ACMGenericCodec {
public:
- explicit ACMG722(WebRtc_Word16 codec_id);
+ explicit ACMG722(int16_t codec_id);
~ACMG722();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio, WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio, int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
- WebRtc_Word32 Add10MsDataSafe(const WebRtc_UWord32 timestamp,
- const WebRtc_Word16* data,
- const WebRtc_UWord16 length_smpl,
- const WebRtc_UWord8 audio_channel);
+ int32_t Add10MsDataSafe(const uint32_t timestamp,
+ const int16_t* data,
+ const uint16_t length_smpl,
+ const uint8_t audio_channel);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
diff --git a/webrtc/modules/audio_coding/main/source/acm_g7221.cc b/webrtc/modules/audio_coding/main/source/acm_g7221.cc
index 48c058c..f784b62 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g7221.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_g7221.cc
@@ -88,7 +88,7 @@
#ifndef WEBRTC_CODEC_G722_1
-ACMG722_1::ACMG722_1(WebRtc_Word16 /* codec_id */)
+ACMG722_1::ACMG722_1(int16_t /* codec_id */)
: operational_rate_(-1),
encoder_inst_ptr_(NULL),
encoder_inst_ptr_right_(NULL),
@@ -109,32 +109,32 @@
return;
}
-WebRtc_Word16 ACMG722_1::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMG722_1::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMG722_1::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG722_1::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMG722_1::InternalInitEncoder(
+int16_t ACMG722_1::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMG722_1::InternalInitDecoder(
+int16_t ACMG722_1::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -142,7 +142,7 @@
return NULL;
}
-WebRtc_Word16 ACMG722_1::InternalCreateEncoder() {
+int16_t ACMG722_1::InternalCreateEncoder() {
return -1;
}
@@ -150,7 +150,7 @@
return;
}
-WebRtc_Word16 ACMG722_1::InternalCreateDecoder() {
+int16_t ACMG722_1::InternalCreateDecoder() {
return -1;
}
@@ -163,7 +163,7 @@
}
#else //===================== Actual Implementation =======================
-ACMG722_1::ACMG722_1(WebRtc_Word16 codec_id)
+ACMG722_1::ACMG722_1(int16_t codec_id)
: encoder_inst_ptr_(NULL),
encoder_inst_ptr_right_(NULL),
decoder_inst_ptr_(NULL),
@@ -229,12 +229,12 @@
return;
}
-WebRtc_Word16 ACMG722_1::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
- WebRtc_Word16 left_channel[320];
- WebRtc_Word16 right_channel[320];
- WebRtc_Word16 len_in_bytes;
- WebRtc_Word16 out_bits[160];
+int16_t ACMG722_1::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
+ int16_t left_channel[320];
+ int16_t right_channel[320];
+ int16_t len_in_bytes;
+ int16_t out_bits[160];
// If stereo, split input signal in left and right channel before encoding
if (num_channels_ == 2) {
@@ -292,17 +292,17 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMG722_1::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG722_1::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMG722_1::InternalInitEncoder(
+int16_t ACMG722_1::InternalInitEncoder(
WebRtcACMCodecParams* codec_params) {
- WebRtc_Word16 ret;
+ int16_t ret;
switch (operational_rate_) {
case 16000: {
@@ -334,7 +334,7 @@
}
}
-WebRtc_Word16 ACMG722_1::InternalInitDecoder(
+int16_t ACMG722_1::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
switch (operational_rate_) {
case 16000: {
@@ -354,8 +354,8 @@
}
}
-WebRtc_Word32 ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
// Todo:
// log error
@@ -401,7 +401,7 @@
return NULL;
}
-WebRtc_Word16 ACMG722_1::InternalCreateEncoder() {
+int16_t ACMG722_1::InternalCreateEncoder() {
if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
return -1;
}
@@ -446,7 +446,7 @@
encoder_inst32_ptr_ = NULL;
}
-WebRtc_Word16 ACMG722_1::InternalCreateDecoder() {
+int16_t ACMG722_1::InternalCreateDecoder() {
if (decoder_inst_ptr_ == NULL) {
return -1;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_g7221.h b/webrtc/modules/audio_coding/main/source/acm_g7221.h
index 9b4cb91..4e35476 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g7221.h
+++ b/webrtc/modules/audio_coding/main/source/acm_g7221.h
@@ -26,39 +26,38 @@
class ACMG722_1: public ACMGenericCodec {
public:
- explicit ACMG722_1(WebRtc_Word16 codec_id);
+ explicit ACMG722_1(int16_t codec_id);
~ACMG722_1();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio, WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio, int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word32 operational_rate_;
+ int32_t operational_rate_;
G722_1_Inst_t_* encoder_inst_ptr_;
G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
diff --git a/webrtc/modules/audio_coding/main/source/acm_g7221c.cc b/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
index 3930ffa..a0d9483 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_g7221c.cc
@@ -89,7 +89,7 @@
#ifndef WEBRTC_CODEC_G722_1C
-ACMG722_1C::ACMG722_1C(WebRtc_Word16 /* codec_id */)
+ACMG722_1C::ACMG722_1C(int16_t /* codec_id */)
: operational_rate_(-1),
encoder_inst_ptr_(NULL),
encoder_inst_ptr_right_(NULL),
@@ -110,32 +110,32 @@
return;
}
-WebRtc_Word16 ACMG722_1C::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMG722_1C::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMG722_1C::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG722_1C::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMG722_1C::InternalInitEncoder(
+int16_t ACMG722_1C::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMG722_1C::InternalInitDecoder(
+int16_t ACMG722_1C::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -143,7 +143,7 @@
return NULL;
}
-WebRtc_Word16 ACMG722_1C::InternalCreateEncoder() {
+int16_t ACMG722_1C::InternalCreateEncoder() {
return -1;
}
@@ -151,7 +151,7 @@
return;
}
-WebRtc_Word16 ACMG722_1C::InternalCreateDecoder() {
+int16_t ACMG722_1C::InternalCreateDecoder() {
return -1;
}
@@ -164,7 +164,7 @@
}
#else //===================== Actual Implementation =======================
-ACMG722_1C::ACMG722_1C(WebRtc_Word16 codec_id)
+ACMG722_1C::ACMG722_1C(int16_t codec_id)
: encoder_inst_ptr_(NULL),
encoder_inst_ptr_right_(NULL),
decoder_inst_ptr_(NULL),
@@ -234,12 +234,12 @@
return;
}
-WebRtc_Word16 ACMG722_1C::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
- WebRtc_Word16 left_channel[640];
- WebRtc_Word16 right_channel[640];
- WebRtc_Word16 len_in_bytes;
- WebRtc_Word16 out_bits[240];
+int16_t ACMG722_1C::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
+ int16_t left_channel[640];
+ int16_t right_channel[640];
+ int16_t len_in_bytes;
+ int16_t out_bits[240];
// If stereo, split input signal in left and right channel before encoding
if (num_channels_ == 2) {
@@ -299,17 +299,17 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMG722_1C::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG722_1C::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMG722_1C::InternalInitEncoder(
+int16_t ACMG722_1C::InternalInitEncoder(
WebRtcACMCodecParams* codec_params) {
- WebRtc_Word16 ret;
+ int16_t ret;
switch (operational_rate_) {
case 24000: {
@@ -341,7 +341,7 @@
}
}
-WebRtc_Word16 ACMG722_1C::InternalInitDecoder(
+int16_t ACMG722_1C::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
switch (operational_rate_) {
case 24000: {
@@ -361,8 +361,8 @@
}
}
-WebRtc_Word32 ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"CodeDef: decoder not initialized for G722_1c");
@@ -409,7 +409,7 @@
return NULL;
}
-WebRtc_Word16 ACMG722_1C::InternalCreateEncoder() {
+int16_t ACMG722_1C::InternalCreateEncoder() {
if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
return -1;
}
@@ -454,7 +454,7 @@
encoder_inst48_ptr_ = NULL;
}
-WebRtc_Word16 ACMG722_1C::InternalCreateDecoder() {
+int16_t ACMG722_1C::InternalCreateDecoder() {
if (decoder_inst_ptr_ == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateEncoder: cannot create decoder");
diff --git a/webrtc/modules/audio_coding/main/source/acm_g7221c.h b/webrtc/modules/audio_coding/main/source/acm_g7221c.h
index 4ce2d84..1b4e756 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g7221c.h
+++ b/webrtc/modules/audio_coding/main/source/acm_g7221c.h
@@ -26,31 +26,31 @@
class ACMG722_1C : public ACMGenericCodec {
public:
- explicit ACMG722_1C(WebRtc_Word16 codec_id);
+ explicit ACMG722_1C(int16_t codec_id);
~ACMG722_1C();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(
- WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(
+ uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(
+ int16_t InternalInitEncoder(
WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(
+ int16_t InternalInitDecoder(
WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(
- WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(
+ uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(
+ int32_t CodecDef(
WebRtcNetEQ_CodecDef& codec_def,
const CodecInst& codec_inst);
@@ -58,14 +58,14 @@
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(
void* ptr_inst);
- WebRtc_Word32 operational_rate_;
+ int32_t operational_rate_;
G722_1_Inst_t_* encoder_inst_ptr_;
G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
diff --git a/webrtc/modules/audio_coding/main/source/acm_g729.cc b/webrtc/modules/audio_coding/main/source/acm_g729.cc
index 031ccb7..67611cb 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g729.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_g729.cc
@@ -27,7 +27,7 @@
#ifndef WEBRTC_CODEC_G729
-ACMG729::ACMG729(WebRtc_Word16 /* codec_id */)
+ACMG729::ACMG729(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
return;
@@ -37,50 +37,50 @@
return;
}
-WebRtc_Word16 ACMG729::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMG729::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMG729::EnableDTX() {
+int16_t ACMG729::EnableDTX() {
return -1;
}
-WebRtc_Word16 ACMG729::DisableDTX() {
+int16_t ACMG729::DisableDTX() {
return -1;
}
-WebRtc_Word32 ACMG729::ReplaceInternalDTXSafe(
+int32_t ACMG729::ReplaceInternalDTXSafe(
const bool /*replace_internal_dtx */) {
return -1;
}
-WebRtc_Word32 ACMG729::IsInternalDTXReplacedSafe(
+int32_t ACMG729::IsInternalDTXReplacedSafe(
bool* /* internal_dtx_replaced */) {
return -1;
}
-WebRtc_Word16 ACMG729::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG729::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMG729::InternalInitEncoder(
+int16_t ACMG729::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMG729::InternalInitDecoder(
+int16_t ACMG729::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMG729::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMG729::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -88,7 +88,7 @@
return NULL;
}
-WebRtc_Word16 ACMG729::InternalCreateEncoder() {
+int16_t ACMG729::InternalCreateEncoder() {
return -1;
}
@@ -96,7 +96,7 @@
return;
}
-WebRtc_Word16 ACMG729::InternalCreateDecoder() {
+int16_t ACMG729::InternalCreateDecoder() {
return -1;
}
@@ -109,7 +109,7 @@
}
#else //===================== Actual Implementation =======================
-ACMG729::ACMG729(WebRtc_Word16 codec_id)
+ACMG729::ACMG729(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
codec_id_ = codec_id;
@@ -131,19 +131,19 @@
return;
}
-WebRtc_Word16 ACMG729::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMG729::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
// Initialize before entering the loop
- WebRtc_Word16 num_encoded_samples = 0;
- WebRtc_Word16 tmp_len_byte = 0;
- WebRtc_Word16 vad_decision = 0;
+ int16_t num_encoded_samples = 0;
+ int16_t tmp_len_byte = 0;
+ int16_t vad_decision = 0;
*bitstream_len_byte = 0;
while (num_encoded_samples < frame_len_smpl_) {
// Call G.729 encoder with pointer to encoder memory, input
// audio, number of samples and bitsream
tmp_len_byte = WebRtcG729_Encode(
encoder_inst_ptr_, &in_audio_[in_audio_ix_read_], 80,
- (WebRtc_Word16*)(&(bitstream[*bitstream_len_byte])));
+ (int16_t*)(&(bitstream[*bitstream_len_byte])));
// increment the read index this tell the caller that how far
// we have gone forward in reading the audio buffer
@@ -173,7 +173,7 @@
// check if G.729 internal DTX is enabled
if (has_internal_dtx_ && dtx_enabled_) {
vad_decision = 0;
- for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+ for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
vad_label_[n] = vad_decision;
}
}
@@ -197,7 +197,7 @@
// update VAD decision vector
if (has_internal_dtx_ && !vad_decision && dtx_enabled_) {
- for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+ for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
vad_label_[n] = vad_decision;
}
}
@@ -206,7 +206,7 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMG729::EnableDTX() {
+int16_t ACMG729::EnableDTX() {
if (dtx_enabled_) {
// DTX already enabled, do nothing
return 0;
@@ -222,7 +222,7 @@
}
}
-WebRtc_Word16 ACMG729::DisableDTX() {
+int16_t ACMG729::DisableDTX() {
if (!dtx_enabled_) {
// DTX already dissabled, do nothing
return 0;
@@ -239,7 +239,7 @@
}
}
-WebRtc_Word32 ACMG729::ReplaceInternalDTXSafe(const bool replace_internal_dtx) {
+int32_t ACMG729::ReplaceInternalDTXSafe(const bool replace_internal_dtx) {
// This function is used to disable the G.729 built in DTX and use an
// external instead.
@@ -256,7 +256,7 @@
ACMGenericCodec::DisableDTX();
}
has_internal_dtx_ = !replace_internal_dtx;
- WebRtc_Word16 status = SetVADSafe(old_enable_dtx, old_enable_vad, old_mode);
+ int16_t status = SetVADSafe(old_enable_dtx, old_enable_vad, old_mode);
// Check if VAD status has changed from inactive to active, or if error was
// reported
if (status == 1) {
@@ -270,35 +270,35 @@
return 0;
}
-WebRtc_Word32 ACMG729::IsInternalDTXReplacedSafe(bool* internal_dtx_replaced) {
+int32_t ACMG729::IsInternalDTXReplacedSafe(bool* internal_dtx_replaced) {
// Get status of wether DTX is replaced or not
*internal_dtx_replaced = !has_internal_dtx_;
return 0;
}
-WebRtc_Word16 ACMG729::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG729::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
// This function is not used. G.729 decoder is called from inside NetEQ
return 0;
}
-WebRtc_Word16 ACMG729::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
+int16_t ACMG729::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
// Init G.729 encoder
return WebRtcG729_EncoderInit(encoder_inst_ptr_,
((codec_params->enable_dtx) ? 1 : 0));
}
-WebRtc_Word16 ACMG729::InternalInitDecoder(
+int16_t ACMG729::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
// Init G.729 decoder
return WebRtcG729_DecoderInit(decoder_inst_ptr_);
}
-WebRtc_Word32 ACMG729::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMG729::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
// Todo:
// log error
@@ -320,7 +320,7 @@
return NULL;
}
-WebRtc_Word16 ACMG729::InternalCreateEncoder() {
+int16_t ACMG729::InternalCreateEncoder() {
// Create encoder memory
return WebRtcG729_CreateEnc(&encoder_inst_ptr_);
}
@@ -335,7 +335,7 @@
}
}
-WebRtc_Word16 ACMG729::InternalCreateDecoder() {
+int16_t ACMG729::InternalCreateDecoder() {
// Create decoder memory
return WebRtcG729_CreateDec(&decoder_inst_ptr_);
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_g729.h b/webrtc/modules/audio_coding/main/source/acm_g729.h
index e923a32..d50aa5f 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g729.h
+++ b/webrtc/modules/audio_coding/main/source/acm_g729.h
@@ -21,46 +21,46 @@
class ACMG729 : public ACMGenericCodec {
public:
- explicit ACMG729(WebRtc_Word16 codec_id);
+ explicit ACMG729(int16_t codec_id);
~ACMG729();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 EnableDTX();
+ int16_t EnableDTX();
- WebRtc_Word16 DisableDTX();
+ int16_t DisableDTX();
- WebRtc_Word32 ReplaceInternalDTXSafe(const bool replace_internal_dtx);
+ int32_t ReplaceInternalDTXSafe(const bool replace_internal_dtx);
- WebRtc_Word32 IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
+ int32_t IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
G729_encinst_t_* encoder_inst_ptr_;
G729_decinst_t_* decoder_inst_ptr_;
diff --git a/webrtc/modules/audio_coding/main/source/acm_g7291.cc b/webrtc/modules/audio_coding/main/source/acm_g7291.cc
index 89c984a..da473ca 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g7291.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_g7291.cc
@@ -26,7 +26,7 @@
#ifndef WEBRTC_CODEC_G729_1
-ACMG729_1::ACMG729_1(WebRtc_Word16 /* codec_id */)
+ACMG729_1::ACMG729_1(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
my_rate_(32000),
@@ -39,32 +39,32 @@
return;
}
-WebRtc_Word16 ACMG729_1::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMG729_1::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMG729_1::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG729_1::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMG729_1::InternalInitEncoder(
+int16_t ACMG729_1::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMG729_1::InternalInitDecoder(
+int16_t ACMG729_1::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -72,7 +72,7 @@
return NULL;
}
-WebRtc_Word16 ACMG729_1::InternalCreateEncoder() {
+int16_t ACMG729_1::InternalCreateEncoder() {
return -1;
}
@@ -80,7 +80,7 @@
return;
}
-WebRtc_Word16 ACMG729_1::InternalCreateDecoder() {
+int16_t ACMG729_1::InternalCreateDecoder() {
return -1;
}
@@ -92,7 +92,7 @@
return;
}
-WebRtc_Word16 ACMG729_1::SetBitRateSafe(const WebRtc_Word32 /*rate*/) {
+int16_t ACMG729_1::SetBitRateSafe(const int32_t /*rate*/) {
return -1;
}
@@ -100,7 +100,7 @@
struct G729_1_inst_t_;
-ACMG729_1::ACMG729_1(WebRtc_Word16 codec_id)
+ACMG729_1::ACMG729_1(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
my_rate_(32000), // Default rate.
@@ -124,18 +124,18 @@
return;
}
-WebRtc_Word16 ACMG729_1::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMG729_1::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
// Initialize before entering the loop
- WebRtc_Word16 num_encoded_samples = 0;
+ int16_t num_encoded_samples = 0;
*bitstream_len_byte = 0;
- WebRtc_Word16 byte_length_frame = 0;
+ int16_t byte_length_frame = 0;
// Derive number of 20ms frames per encoded packet.
// [1,2,3] <=> [20,40,60]ms <=> [320,640,960] samples
- WebRtc_Word16 num_20ms_frames = (frame_len_smpl_ / 320);
+ int16_t num_20ms_frames = (frame_len_smpl_ / 320);
// Byte length for the frame. +1 is for rate information.
byte_length_frame = my_rate_ / (8 * 50) * num_20ms_frames + (1 -
flag_g729_mode_);
@@ -144,7 +144,7 @@
do {
*bitstream_len_byte = WebRtcG7291_Encode(encoder_inst_ptr_,
&in_audio_[in_audio_ix_read_],
- (WebRtc_Word16*) bitstream,
+ (int16_t*) bitstream,
my_rate_, num_20ms_frames);
// increment the read index this tell the caller that how far
@@ -179,22 +179,22 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMG729_1::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMG729_1::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMG729_1::InternalInitEncoder(
+int16_t ACMG729_1::InternalInitEncoder(
WebRtcACMCodecParams* codec_params) {
//set the bit rate and initialize
my_rate_ = codec_params->codec_inst.rate;
- return SetBitRateSafe((WebRtc_UWord32) my_rate_);
+ return SetBitRateSafe((uint32_t) my_rate_);
}
-WebRtc_Word16 ACMG729_1::InternalInitDecoder(
+int16_t ACMG729_1::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
if (WebRtcG7291_DecoderInit(decoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
@@ -204,8 +204,8 @@
return 0;
}
-WebRtc_Word32 ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMG729_1::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"CodeDef: Decoder uninitialized for G729_1");
@@ -226,7 +226,7 @@
return NULL;
}
-WebRtc_Word16 ACMG729_1::InternalCreateEncoder() {
+int16_t ACMG729_1::InternalCreateEncoder() {
if (WebRtcG7291_Create(&encoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateEncoder: create encoder failed for G729_1");
@@ -244,7 +244,7 @@
}
}
-WebRtc_Word16 ACMG729_1::InternalCreateDecoder() {
+int16_t ACMG729_1::InternalCreateDecoder() {
if (WebRtcG7291_Create(&decoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateDecoder: create decoder failed for G729_1");
@@ -269,7 +269,7 @@
return;
}
-WebRtc_Word16 ACMG729_1::SetBitRateSafe(const WebRtc_Word32 rate) {
+int16_t ACMG729_1::SetBitRateSafe(const int32_t rate) {
// allowed rates: { 8000, 12000, 14000, 16000, 18000, 20000,
// 22000, 24000, 26000, 28000, 30000, 32000};
// TODO(tlegrand): This check exists in one other place two. Should be
diff --git a/webrtc/modules/audio_coding/main/source/acm_g7291.h b/webrtc/modules/audio_coding/main/source/acm_g7291.h
index d55c51b..433b2fd 100644
--- a/webrtc/modules/audio_coding/main/source/acm_g7291.h
+++ b/webrtc/modules/audio_coding/main/source/acm_g7291.h
@@ -21,47 +21,46 @@
class ACMG729_1 : public ACMGenericCodec {
public:
- explicit ACMG729_1(WebRtc_Word16 codec_id);
+ explicit ACMG729_1(int16_t codec_id);
~ACMG729_1();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+ int16_t SetBitRateSafe(const int32_t rate);
G729_1_inst_t_* encoder_inst_ptr_;
G729_1_inst_t_* decoder_inst_ptr_;
- WebRtc_UWord16 my_rate_;
- WebRtc_Word16 flag_8khz_;
- WebRtc_Word16 flag_g729_mode_;
+ uint16_t my_rate_;
+ int16_t flag_8khz_;
+ int16_t flag_g729_mode_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/source/acm_generic_codec.h b/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
index 681f56e..945967e 100644
--- a/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
+++ b/webrtc/modules/audio_coding/main/source/acm_generic_codec.h
@@ -48,7 +48,7 @@
virtual ACMGenericCodec* CreateInstance() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 Encode()
+ // int16_t Encode()
// The function is called to perform an encoding of the audio stored in
// audio buffer. An encoding is performed only if enough audio, i.e. equal
// to the frame-size of the codec, exist. The audio frame will be processed
@@ -100,13 +100,13 @@
// -1 if error is occurred, otherwise the length of the bit-stream in
// bytes.
//
- WebRtc_Word16 Encode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte,
- WebRtc_UWord32* timestamp,
- WebRtcACMEncodingType* encoding_type);
+ int16_t Encode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte,
+ uint32_t* timestamp,
+ WebRtcACMEncodingType* encoding_type);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 Decode()
+ // int16_t Decode()
// This function is used to decode a given bit-stream, without engaging
// NetEQ.
//
@@ -127,11 +127,11 @@
// -1 if failed to decode,
// 0 if succeeded.
//
- WebRtc_Word16 Decode(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t Decode(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
///////////////////////////////////////////////////////////////////////////
// void SplitStereoPacket()
@@ -149,8 +149,8 @@
// we simply copy the data and return it both for
// left channel and right channel decoding.
//
- virtual void SplitStereoPacket(WebRtc_UWord8* /* payload */,
- WebRtc_Word32* /* payload_length */) {}
+ virtual void SplitStereoPacket(uint8_t* /* payload */,
+ int32_t* /* payload_length */) {}
///////////////////////////////////////////////////////////////////////////
// bool EncoderInitialized();
@@ -171,7 +171,7 @@
bool DecoderInitialized();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 EncoderParams()
+ // int16_t EncoderParams()
// It is called to get encoder parameters. It will call
// EncoderParamsSafe() in turn.
//
@@ -184,10 +184,10 @@
// -1 if the encoder is not initialized,
// 0 otherwise.
//
- WebRtc_Word16 EncoderParams(WebRtcACMCodecParams *enc_params);
+ int16_t EncoderParams(WebRtcACMCodecParams *enc_params);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 DecoderParams(...)
+ // int16_t DecoderParams(...)
// It is called to get decoder parameters. It will call DecoderParamsSafe()
// in turn.
//
@@ -202,10 +202,10 @@
//
//
bool DecoderParams(WebRtcACMCodecParams *dec_params,
- const WebRtc_UWord8 payload_type);
+ const uint8_t payload_type);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InitEncoder(...)
+ // int16_t InitEncoder(...)
// This function is called to initialize the encoder with the given
// parameters.
//
@@ -220,11 +220,11 @@
// -1 if failed to initialize.
//
//
- WebRtc_Word16 InitEncoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
+ int16_t InitEncoder(WebRtcACMCodecParams* codec_params,
+ bool force_initialization);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InitDecoder()
+ // int16_t InitDecoder()
// This function is called to initialize the decoder with the given
// parameters. (c.f. acm_common_defs.h & common_types.h for the
// definition of the structure)
@@ -240,11 +240,11 @@
// -1 if failed to initialize.
//
//
- WebRtc_Word16 InitDecoder(WebRtcACMCodecParams* codec_params,
- bool force_initialization);
+ int16_t InitDecoder(WebRtcACMCodecParams* codec_params,
+ bool force_initialization);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 RegisterInNetEq(...)
+ // int32_t RegisterInNetEq(...)
// This function is called to register the decoder in NetEq, with the given
// payload type.
//
@@ -256,10 +256,10 @@
// -1 if failed to register,
// 0 if successfully initialized.
//
- WebRtc_Word32 RegisterInNetEq(ACMNetEQ* neteq, const CodecInst& codec_inst);
+ int32_t RegisterInNetEq(ACMNetEQ* neteq, const CodecInst& codec_inst);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 Add10MsData(...)
+ // int32_t Add10MsData(...)
// This function is called to add 10 ms of audio to the audio buffer of
// the codec.
//
@@ -277,13 +277,13 @@
// -1 if failed
// 0 otherwise.
//
- WebRtc_Word32 Add10MsData(const WebRtc_UWord32 timestamp,
- const WebRtc_Word16* data,
- const WebRtc_UWord16 length,
- const WebRtc_UWord8 audio_channel);
+ int32_t Add10MsData(const uint32_t timestamp,
+ const int16_t* data,
+ const uint16_t length,
+ const uint8_t audio_channel);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_UWord32 NoMissedSamples()
+ // uint32_t NoMissedSamples()
// This function returns the number of samples which are overwritten in
// the audio buffer. The audio samples are overwritten if the input audio
// buffer is full, but Add10MsData() is called. (We might remove this
@@ -292,7 +292,7 @@
// Return Value:
// Number of samples which are overwritten.
//
- WebRtc_UWord32 NoMissedSamples() const;
+ uint32_t NoMissedSamples() const;
///////////////////////////////////////////////////////////////////////////
// void ResetNoMissedSamples()
@@ -302,7 +302,7 @@
void ResetNoMissedSamples();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 SetBitRate()
+ // int16_t SetBitRate()
// The function is called to set the encoding rate.
//
// Input:
@@ -313,7 +313,7 @@
// codec is not rate-adjustable.
// 0 if the rate is adjusted successfully
//
- WebRtc_Word16 SetBitRate(const WebRtc_Word32 bitrate_bps);
+ int16_t SetBitRate(const int32_t bitrate_bps);
///////////////////////////////////////////////////////////////////////////
// DestructEncoderInst()
@@ -328,7 +328,7 @@
void DestructEncoderInst(void* ptr_inst);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 AudioBuffer()
+ // int16_t AudioBuffer()
// This is used when synchronization of codecs is required. There are cases
// that the audio buffers of two codecs have to be synched. By calling this
// function on can get the audio buffer and other related parameters, such
@@ -342,20 +342,20 @@
// -1 if fails to copy the audio buffer,
// 0 if succeeded.
//
- WebRtc_Word16 AudioBuffer(WebRtcACMAudioBuff& audio_buff);
+ int16_t AudioBuffer(WebRtcACMAudioBuff& audio_buff);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_UWord32 EarliestTimestamp()
+ // uint32_t EarliestTimestamp()
// Returns the timestamp of the first 10 ms in audio buffer. This is used
// to identify if a synchronization of two encoders is required.
//
// Return value:
// timestamp of the first 10 ms audio in the audio buffer.
//
- WebRtc_UWord32 EarliestTimestamp() const;
+ uint32_t EarliestTimestamp() const;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 SetAudioBuffer()
+ // int16_t SetAudioBuffer()
// This function is called to set the audio buffer and the associated
// parameters to a given value.
//
@@ -363,10 +363,10 @@
// -1 if fails to copy the audio buffer,
// 0 if succeeded.
//
- WebRtc_Word16 SetAudioBuffer(WebRtcACMAudioBuff& audio_buff);
+ int16_t SetAudioBuffer(WebRtcACMAudioBuff& audio_buff);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 SetVAD()
+ // int16_t SetVAD()
// This is called to set VAD & DTX. If the codec has internal DTX that will
// be used. If DTX is enabled and the codec does not have internal DTX,
// WebRtc-VAD will be used to decide if the frame is active. If DTX is
@@ -392,12 +392,12 @@
// -1 if failed to set DTX & VAD as specified,
// 0 if succeeded.
//
- WebRtc_Word16 SetVAD(const bool enable_dtx = true,
- const bool enable_vad = false,
- const ACMVADMode mode = VADNormal);
+ int16_t SetVAD(const bool enable_dtx = true,
+ const bool enable_vad = false,
+ const ACMVADMode mode = VADNormal);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ReplaceInternalDTX()
+ // int32_t ReplaceInternalDTX()
// This is called to replace the codec internal DTX with WebRtc DTX.
// This is only valid for G729 where the user has possibility to replace
// AnnexB with WebRtc DTX. For other codecs this function has no effect.
@@ -409,10 +409,10 @@
// -1 if failed to replace internal DTX,
// 0 if succeeded.
//
- WebRtc_Word32 ReplaceInternalDTX(const bool replace_internal_dtx);
+ int32_t ReplaceInternalDTX(const bool replace_internal_dtx);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 IsInternalDTXReplaced()
+ // int32_t IsInternalDTXReplaced()
// This is called to check if the codec internal DTX is replaced by WebRtc
// DTX. This is only valid for G729 where the user has possibility to replace
// AnnexB with WebRtc DTX. For other codecs this function has no effect.
@@ -424,7 +424,7 @@
// -1 if failed to check
// 0 if succeeded.
//
- WebRtc_Word32 IsInternalDTXReplaced(bool* internal_dtx_replaced);
+ int32_t IsInternalDTXReplaced(bool* internal_dtx_replaced);
///////////////////////////////////////////////////////////////////////////
// void SetNetEqDecodeLock()
@@ -450,7 +450,7 @@
}
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 GetEstimatedBandwidth()
+ // int32_t GetEstimatedBandwidth()
// Used to get decoder estimated bandwidth. Only iSAC will provide a value.
//
//
@@ -458,10 +458,10 @@
// -1 if fails to get decoder estimated bandwidth,
// >0 estimated bandwidth in bits/sec.
//
- WebRtc_Word32 GetEstimatedBandwidth();
+ int32_t GetEstimatedBandwidth();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 SetEstimatedBandwidth()
+ // int32_t SetEstimatedBandwidth()
// Used to set estiamted bandwidth sent out of band from other side. Only
// iSAC will have use for the value.
//
@@ -472,10 +472,10 @@
// -1 if fails to set estimated bandwidth,
// 0 on success.
//
- WebRtc_Word32 SetEstimatedBandwidth(WebRtc_Word32 estimated_bandwidth);
+ int32_t SetEstimatedBandwidth(int32_t estimated_bandwidth);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 GetRedPayload()
+ // int32_t GetRedPayload()
// Used to get codec specific RED payload (if such is implemented).
// Currently only done in iSAC.
//
@@ -487,11 +487,11 @@
// -1 if fails to get codec specific RED,
// 0 if succeeded.
//
- WebRtc_Word32 GetRedPayload(WebRtc_UWord8* red_payload,
- WebRtc_Word16* payload_bytes);
+ int32_t GetRedPayload(uint8_t* red_payload,
+ int16_t* payload_bytes);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 ResetEncoder()
+ // int16_t ResetEncoder()
// By calling this function you would re-initialize the encoder with the
// current parameters. All the settings, e.g. VAD/DTX, frame-size... should
// remain unchanged. (In case of iSAC we don't want to lose BWE history.)
@@ -500,10 +500,10 @@
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 ResetEncoder();
+ int16_t ResetEncoder();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 ResetEncoder()
+ // int16_t ResetEncoder()
// By calling this function you would re-initialize the decoder with the
// current parameters.
//
@@ -511,7 +511,7 @@
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 ResetDecoder(WebRtc_Word16 payload_type);
+ int16_t ResetDecoder(int16_t payload_type);
///////////////////////////////////////////////////////////////////////////
// void DestructEncoder()
@@ -533,22 +533,22 @@
void DestructDecoder();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 SamplesLeftToEncode()
+ // int16_t SamplesLeftToEncode()
// Returns the number of samples required to be able to do encoding.
//
// Return value:
// Number of samples.
//
- WebRtc_Word16 SamplesLeftToEncode();
+ int16_t SamplesLeftToEncode();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_UWord32 LastEncodedTimestamp()
+ // uint32_t LastEncodedTimestamp()
// Returns the timestamp of the last frame it encoded.
//
// Return value:
// Timestamp.
//
- WebRtc_UWord32 LastEncodedTimestamp() const;
+ uint32_t LastEncodedTimestamp() const;
///////////////////////////////////////////////////////////////////////////
// SetUniqueID()
@@ -557,7 +557,7 @@
// Input
// -id : A number to identify the codec.
//
- void SetUniqueID(const WebRtc_UWord32 id);
+ void SetUniqueID(const uint32_t id);
///////////////////////////////////////////////////////////////////////////
// IsAudioBufferFresh()
@@ -594,7 +594,7 @@
// 0 if succeeded in updating the decoder.
// -1 if failed to update.
//
- virtual WebRtc_Word16 UpdateDecoderSampFreq(WebRtc_Word16 /* codec_id */) {
+ virtual int16_t UpdateDecoderSampFreq(int16_t /* codec_id */) {
return 0;
}
@@ -614,8 +614,8 @@
// -1 if failed, or if this is meaningless for the given codec.
// 0 if succeeded.
//
- virtual WebRtc_Word16 UpdateEncoderSampFreq(
- WebRtc_UWord16 samp_freq_hz);
+ virtual int16_t UpdateEncoderSampFreq(
+ uint16_t samp_freq_hz);
///////////////////////////////////////////////////////////////////////////
// EncoderSampFreq()
@@ -629,10 +629,10 @@
// -1 if failed to output sampling rate.
// 0 if the sample rate is returned successfully.
//
- virtual WebRtc_Word16 EncoderSampFreq(WebRtc_UWord16& samp_freq_hz);
+ virtual int16_t EncoderSampFreq(uint16_t& samp_freq_hz);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word32 ConfigISACBandwidthEstimator()
+ // int32_t ConfigISACBandwidthEstimator()
// Call this function to configure the bandwidth estimator of ISAC.
// During the adaptation of bit-rate, iSAC automatically adjusts the
// frame-size (either 30 or 60 ms) to save on RTP header. The initial
@@ -655,9 +655,9 @@
// -1 if failed to configure the bandwidth estimator,
// 0 if the configuration was successfully applied.
//
- virtual WebRtc_Word32 ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 init_frame_size_msec,
- const WebRtc_UWord16 init_rate_bps,
+ virtual int32_t ConfigISACBandwidthEstimator(
+ const uint8_t init_frame_size_msec,
+ const uint16_t init_rate_bps,
const bool enforce_frame_size);
///////////////////////////////////////////////////////////////////////////
@@ -675,8 +675,8 @@
// -1 if failed to set the maximum payload-size.
// 0 if the given length is set successfully.
//
- virtual WebRtc_Word32 SetISACMaxPayloadSize(
- const WebRtc_UWord16 max_payload_len_bytes);
+ virtual int32_t SetISACMaxPayloadSize(
+ const uint16_t max_payload_len_bytes);
///////////////////////////////////////////////////////////////////////////
// SetISACMaxRate()
@@ -693,7 +693,7 @@
// -1 if failed to set the maximum rate.
// 0 if the maximum rate is set successfully.
//
- virtual WebRtc_Word32 SetISACMaxRate(const WebRtc_UWord32 max_rate_bps);
+ virtual int32_t SetISACMaxRate(const uint32_t max_rate_bps);
///////////////////////////////////////////////////////////////////////////
// SaveDecoderParamS()
@@ -705,7 +705,7 @@
//
void SaveDecoderParam(const WebRtcACMCodecParams* codec_params);
- WebRtc_Word32 FrameSize() {
+ int32_t FrameSize() {
return frame_len_smpl_;
}
@@ -734,10 +734,10 @@
// -1 if an error occurs, otherwise the length of the payload (in Bytes)
// is returned.
//
- virtual WebRtc_Word16 REDPayloadISAC(const WebRtc_Word32 isac_rate,
- const WebRtc_Word16 isac_bw_estimate,
- WebRtc_UWord8* payload,
- WebRtc_Word16* payload_len_bytes);
+ virtual int16_t REDPayloadISAC(const int32_t isac_rate,
+ const int16_t isac_bw_estimate,
+ uint8_t* payload,
+ int16_t* payload_len_bytes);
///////////////////////////////////////////////////////////////////////////
// IsTrueStereoCodec()
@@ -771,33 +771,33 @@
// See Decode() for the description of function, input(s)/output(s) and
// return value.
//
- virtual WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type) = 0;
+ virtual int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type) = 0;
///////////////////////////////////////////////////////////////////////////
// See Add10MsSafe() for the description of function, input(s)/output(s)
// and return value.
//
- virtual WebRtc_Word32 Add10MsDataSafe(const WebRtc_UWord32 timestamp,
- const WebRtc_Word16* data,
- const WebRtc_UWord16 length,
- const WebRtc_UWord8 audio_channel);
+ virtual int32_t Add10MsDataSafe(const uint32_t timestamp,
+ const int16_t* data,
+ const uint16_t length,
+ const uint8_t audio_channel);
///////////////////////////////////////////////////////////////////////////
// See RegisterInNetEq() for the description of function,
// input(s)/output(s) and return value.
//
- virtual WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) = 0;
+ virtual int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) = 0;
///////////////////////////////////////////////////////////////////////////
// See EncoderParam() for the description of function, input(s)/output(s)
// and return value.
//
- WebRtc_Word16 EncoderParamsSafe(WebRtcACMCodecParams *enc_params);
+ int16_t EncoderParamsSafe(WebRtcACMCodecParams *enc_params);
///////////////////////////////////////////////////////////////////////////
// See DecoderParam for the description of function, input(s)/output(s)
@@ -814,33 +814,33 @@
// with a single codec instance.
//
virtual bool DecoderParamsSafe(WebRtcACMCodecParams *dec_params,
- const WebRtc_UWord8 payload_type);
+ const uint8_t payload_type);
///////////////////////////////////////////////////////////////////////////
// See ResetEncoder() for the description of function, input(s)/output(s)
// and return value.
//
- WebRtc_Word16 ResetEncoderSafe();
+ int16_t ResetEncoderSafe();
///////////////////////////////////////////////////////////////////////////
// See InitEncoder() for the description of function, input(s)/output(s)
// and return value.
//
- WebRtc_Word16 InitEncoderSafe(WebRtcACMCodecParams *codec_params,
- bool force_initialization);
+ int16_t InitEncoderSafe(WebRtcACMCodecParams *codec_params,
+ bool force_initialization);
///////////////////////////////////////////////////////////////////////////
// See InitDecoder() for the description of function, input(s)/output(s)
// and return value.
//
- WebRtc_Word16 InitDecoderSafe(WebRtcACMCodecParams *codec_params,
- bool force_initialization);
+ int16_t InitDecoderSafe(WebRtcACMCodecParams *codec_params,
+ bool force_initialization);
///////////////////////////////////////////////////////////////////////////
// See ResetDecoder() for the description of function, input(s)/output(s)
// and return value.
//
- WebRtc_Word16 ResetDecoderSafe(WebRtc_Word16 payload_type);
+ int16_t ResetDecoderSafe(int16_t payload_type);
///////////////////////////////////////////////////////////////////////////
// See DestructEncoder() for the description of function,
@@ -860,70 +860,70 @@
//
// Any codec that can change the bit-rate has to implement this.
//
- virtual WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 bitrate_bps);
+ virtual int16_t SetBitRateSafe(const int32_t bitrate_bps);
///////////////////////////////////////////////////////////////////////////
// See GetEstimatedBandwidth() for the description of function,
// input(s)/output(s) and return value.
//
- virtual WebRtc_Word32 GetEstimatedBandwidthSafe();
+ virtual int32_t GetEstimatedBandwidthSafe();
///////////////////////////////////////////////////////////////////////////
// See SetEstimatedBandwidth() for the description of function,
// input(s)/output(s) and return value.
//
- virtual WebRtc_Word32 SetEstimatedBandwidthSafe(
- WebRtc_Word32 estimated_bandwidth);
+ virtual int32_t SetEstimatedBandwidthSafe(
+ int32_t estimated_bandwidth);
///////////////////////////////////////////////////////////////////////////
// See GetRedPayload() for the description of function, input(s)/output(s)
// and return value.
//
- virtual WebRtc_Word32 GetRedPayloadSafe(WebRtc_UWord8* red_payload,
- WebRtc_Word16* payload_bytes);
+ virtual int32_t GetRedPayloadSafe(uint8_t* red_payload,
+ int16_t* payload_bytes);
///////////////////////////////////////////////////////////////////////////
// See SetVAD() for the description of function, input(s)/output(s) and
// return value.
//
- WebRtc_Word16 SetVADSafe(const bool enable_dtx = true,
- const bool enable_vad = false,
- const ACMVADMode mode = VADNormal);
+ int16_t SetVADSafe(const bool enable_dtx = true,
+ const bool enable_vad = false,
+ const ACMVADMode mode = VADNormal);
///////////////////////////////////////////////////////////////////////////
// See ReplaceInternalDTX() for the description of function, input and
// return value.
//
- virtual WebRtc_Word32 ReplaceInternalDTXSafe(const bool replace_internal_dtx);
+ virtual int32_t ReplaceInternalDTXSafe(const bool replace_internal_dtx);
///////////////////////////////////////////////////////////////////////////
// See IsInternalDTXReplaced() for the description of function, input and
// return value.
//
- virtual WebRtc_Word32 IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
+ virtual int32_t IsInternalDTXReplacedSafe(bool* internal_dtx_replaced);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 CreateEncoder()
+ // int16_t CreateEncoder()
// Creates the encoder instance.
//
// Return value:
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 CreateEncoder();
+ int16_t CreateEncoder();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 CreateDecoder()
+ // int16_t CreateDecoder()
// Creates the decoder instance.
//
// Return value:
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 CreateDecoder();
+ int16_t CreateDecoder();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 EnableVAD();
+ // int16_t EnableVAD();
// Enables VAD with the given mode. The VAD instance will be created if
// it does not exists.
//
@@ -935,20 +935,20 @@
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 EnableVAD(ACMVADMode mode);
+ int16_t EnableVAD(ACMVADMode mode);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 DisableVAD()
+ // int16_t DisableVAD()
// Disables VAD.
//
// Return value:
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 DisableVAD();
+ int16_t DisableVAD();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 EnableDTX()
+ // int16_t EnableDTX()
// Enables DTX. This method should be overwritten for codecs which have
// internal DTX.
//
@@ -956,10 +956,10 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 EnableDTX();
+ virtual int16_t EnableDTX();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 DisableDTX()
+ // int16_t DisableDTX()
// Disables usage of DTX. This method should be overwritten for codecs which
// have internal DTX.
//
@@ -967,10 +967,10 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 DisableDTX();
+ virtual int16_t DisableDTX();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InternalEncode()
+ // int16_t InternalEncode()
// This is a codec-specific function called in EncodeSafe() to actually
// encode a frame of audio.
//
@@ -984,11 +984,11 @@
// -1 if failed,
// otherwise the length of the bit-stream is returned.
//
- virtual WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) = 0;
+ virtual int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InternalInitEncoder()
+ // int16_t InternalInitEncoder()
// This is a codec-specific function called in InitEncoderSafe(), it has to
// do all codec-specific operation to initialize the encoder given the
// encoder parameters.
@@ -1006,11 +1006,11 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 InternalInitEncoder(
+ virtual int16_t InternalInitEncoder(
WebRtcACMCodecParams *codec_params) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InternalInitDecoder()
+ // int16_t InternalInitDecoder()
// This is a codec-specific function called in InitDecoderSafe(), it has to
// do all codec-specific operation to initialize the decoder given the
// decoder parameters.
@@ -1023,7 +1023,7 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 InternalInitDecoder(
+ virtual int16_t InternalInitDecoder(
WebRtcACMCodecParams *codec_params) = 0;
///////////////////////////////////////////////////////////////////////////
@@ -1035,10 +1035,10 @@
// -num_samples : the number of overwritten samples is incremented
// by this value.
//
- void IncreaseNoMissedSamples(const WebRtc_Word16 num_samples);
+ void IncreaseNoMissedSamples(const int16_t num_samples);
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InternalCreateEncoder()
+ // int16_t InternalCreateEncoder()
// This is a codec-specific method called in CreateEncoderSafe() it is
// supposed to perform all codec-specific operations to create encoder
// instance.
@@ -1047,10 +1047,10 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 InternalCreateEncoder() = 0;
+ virtual int16_t InternalCreateEncoder() = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InternalCreateDecoder()
+ // int16_t InternalCreateDecoder()
// This is a codec-specific method called in CreateDecoderSafe() it is
// supposed to perform all codec-specific operations to create decoder
// instance.
@@ -1059,7 +1059,7 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 InternalCreateDecoder() = 0;
+ virtual int16_t InternalCreateDecoder() = 0;
///////////////////////////////////////////////////////////////////////////
// void InternalDestructEncoderInst()
@@ -1079,7 +1079,7 @@
virtual void InternalDestructEncoderInst(void* ptr_inst) = 0;
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 InternalResetEncoder()
+ // int16_t InternalResetEncoder()
// This method is called to reset the states of encoder. However, the
// current parameters, e.g. frame-length, should remain as they are. For
// most of the codecs a re-initialization of the encoder is what needs to
@@ -1091,10 +1091,10 @@
// -1 if failed,
// 0 if succeeded.
//
- virtual WebRtc_Word16 InternalResetEncoder();
+ virtual int16_t InternalResetEncoder();
///////////////////////////////////////////////////////////////////////////
- // WebRtc_Word16 ProcessFrameVADDTX()
+ // int16_t ProcessFrameVADDTX()
// This function is called when a full frame of audio is available. It will
// break the audio frame into blocks such that each block could be processed
// by VAD & CN/DTX. If a frame is divided into two blocks then there are two
@@ -1123,9 +1123,9 @@
// -1 if failed,
// 0 if succeeded.
//
- WebRtc_Word16 ProcessFrameVADDTX(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte,
- WebRtc_Word16* samples_processed);
+ int16_t ProcessFrameVADDTX(uint8_t* bitstream,
+ int16_t* bitstream_len_byte,
+ int16_t* samples_processed);
///////////////////////////////////////////////////////////////////////////
// CanChangeEncodingParam()
@@ -1148,7 +1148,7 @@
// Output:
// -rate_bps : the current target rate of the codec.
//
- virtual void CurrentRate(WebRtc_Word32& /* rate_bps */) {
+ virtual void CurrentRate(int32_t& /* rate_bps */) {
return;
}
@@ -1156,30 +1156,30 @@
// &in_audio_[in_audio_ix_write_] always point to where new audio can be
// written to
- WebRtc_Word16 in_audio_ix_write_;
+ int16_t in_audio_ix_write_;
// &in_audio_[in_audio_ix_read_] points to where audio has to be read from
- WebRtc_Word16 in_audio_ix_read_;
+ int16_t in_audio_ix_read_;
- WebRtc_Word16 in_timestamp_ix_write_;
+ int16_t in_timestamp_ix_write_;
// Where the audio is stored before encoding,
// To save memory the following buffer can be allocated
// dynamically for 80 ms depending on the sampling frequency
// of the codec.
- WebRtc_Word16* in_audio_;
- WebRtc_UWord32* in_timestamp_;
+ int16_t* in_audio_;
+ uint32_t* in_timestamp_;
- WebRtc_Word16 frame_len_smpl_;
- WebRtc_UWord16 num_channels_;
+ int16_t frame_len_smpl_;
+ uint16_t num_channels_;
// This will point to a static database of the supported codecs
- WebRtc_Word16 codec_id_;
+ int16_t codec_id_;
// This will account for the number of samples were not encoded
// the case is rare, either samples are missed due to overwrite
// at input buffer or due to encoding error
- WebRtc_UWord32 num_missed_samples_;
+ uint32_t num_missed_samples_;
// True if the encoder instance created
bool encoder_exist_;
@@ -1195,10 +1195,10 @@
WebRtcVadInst* ptr_vad_inst_;
bool vad_enabled_;
ACMVADMode vad_mode_;
- WebRtc_Word16 vad_label_[MAX_FRAME_SIZE_10MSEC];
+ int16_t vad_label_[MAX_FRAME_SIZE_10MSEC];
bool dtx_enabled_;
WebRtcCngEncInst* ptr_dtx_inst_;
- WebRtc_UWord8 num_lpc_params_;
+ uint8_t num_lpc_params_;
bool sent_cn_previous_;
bool is_master_;
int16_t prev_frame_cng_;
@@ -1213,10 +1213,10 @@
// such as buffers and state variables.
RWLockWrapper& codec_wrapper_lock_;
- WebRtc_UWord32 last_encoded_timestamp_;
- WebRtc_UWord32 last_timestamp_;
+ uint32_t last_encoded_timestamp_;
+ uint32_t last_timestamp_;
bool is_audio_buff_fresh_;
- WebRtc_UWord32 unique_id_;
+ uint32_t unique_id_;
};
} // namespace webrt
diff --git a/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc b/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
index 5b59ed3..22bbbd8 100644
--- a/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_gsmfr.cc
@@ -26,7 +26,7 @@
#ifndef WEBRTC_CODEC_GSMFR
-ACMGSMFR::ACMGSMFR(WebRtc_Word16 /* codec_id */)
+ACMGSMFR::ACMGSMFR(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
return;
@@ -36,40 +36,40 @@
return;
}
-WebRtc_Word16 ACMGSMFR::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMGSMFR::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMGSMFR::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMGSMFR::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMGSMFR::EnableDTX() {
+int16_t ACMGSMFR::EnableDTX() {
return -1;
}
-WebRtc_Word16 ACMGSMFR::DisableDTX() {
+int16_t ACMGSMFR::DisableDTX() {
return -1;
}
-WebRtc_Word16 ACMGSMFR::InternalInitEncoder(
+int16_t ACMGSMFR::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMGSMFR::InternalInitDecoder(
+int16_t ACMGSMFR::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -77,7 +77,7 @@
return NULL;
}
-WebRtc_Word16 ACMGSMFR::InternalCreateEncoder() {
+int16_t ACMGSMFR::InternalCreateEncoder() {
return -1;
}
@@ -85,7 +85,7 @@
return;
}
-WebRtc_Word16 ACMGSMFR::InternalCreateDecoder() {
+int16_t ACMGSMFR::InternalCreateDecoder() {
return -1;
}
@@ -99,7 +99,7 @@
#else //===================== Actual Implementation =======================
-ACMGSMFR::ACMGSMFR(WebRtc_Word16 codec_id)
+ACMGSMFR::ACMGSMFR(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
codec_id_ = codec_id;
@@ -119,27 +119,27 @@
return;
}
-WebRtc_Word16 ACMGSMFR::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMGSMFR::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
*bitstream_len_byte = WebRtcGSMFR_Encode(encoder_inst_ptr_,
&in_audio_[in_audio_ix_read_],
frame_len_smpl_,
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
// increment the read index this tell the caller that how far
// we have gone forward in reading the audio buffer
in_audio_ix_read_ += frame_len_smpl_;
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMGSMFR::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMGSMFR::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMGSMFR::EnableDTX() {
+int16_t ACMGSMFR::EnableDTX() {
if (dtx_enabled_) {
return 0;
} else if (encoder_exist_) {
@@ -155,7 +155,7 @@
}
}
-WebRtc_Word16 ACMGSMFR::DisableDTX() {
+int16_t ACMGSMFR::DisableDTX() {
if (!dtx_enabled_) {
return 0;
} else if (encoder_exist_) {
@@ -172,7 +172,7 @@
}
}
-WebRtc_Word16 ACMGSMFR::InternalInitEncoder(
+int16_t ACMGSMFR::InternalInitEncoder(
WebRtcACMCodecParams* codec_params) {
if (WebRtcGSMFR_EncoderInit(encoder_inst_ptr_,
((codec_params->enable_dtx) ? 1 : 0)) < 0) {
@@ -182,7 +182,7 @@
return 0;
}
-WebRtc_Word16 ACMGSMFR::InternalInitDecoder(
+int16_t ACMGSMFR::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
if (WebRtcGSMFR_DecoderInit(decoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
@@ -192,8 +192,8 @@
return 0;
}
-WebRtc_Word32 ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMGSMFR::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"CodecDef: decoder is not initialized for GSMFR");
@@ -213,7 +213,7 @@
return NULL;
}
-WebRtc_Word16 ACMGSMFR::InternalCreateEncoder() {
+int16_t ACMGSMFR::InternalCreateEncoder() {
if (WebRtcGSMFR_CreateEnc(&encoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateEncoder: cannot create instance for GSMFR "
@@ -232,7 +232,7 @@
encoder_initialized_ = false;
}
-WebRtc_Word16 ACMGSMFR::InternalCreateDecoder() {
+int16_t ACMGSMFR::InternalCreateDecoder() {
if (WebRtcGSMFR_CreateDec(&decoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateDecoder: cannot create instance for GSMFR "
diff --git a/webrtc/modules/audio_coding/main/source/acm_gsmfr.h b/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
index b841711..61f5768 100644
--- a/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
+++ b/webrtc/modules/audio_coding/main/source/acm_gsmfr.h
@@ -21,42 +21,42 @@
class ACMGSMFR : public ACMGenericCodec {
public:
- explicit ACMGSMFR(WebRtc_Word16 codec_id);
+ explicit ACMGSMFR(int16_t codec_id);
~ACMGSMFR();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 EnableDTX();
+ int16_t EnableDTX();
- WebRtc_Word16 DisableDTX();
+ int16_t DisableDTX();
GSMFR_encinst_t_* encoder_inst_ptr_;
GSMFR_decinst_t_* decoder_inst_ptr_;
diff --git a/webrtc/modules/audio_coding/main/source/acm_ilbc.cc b/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
index 963201b..a2a294e 100644
--- a/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_ilbc.cc
@@ -23,7 +23,7 @@
#ifndef WEBRTC_CODEC_ILBC
-ACMILBC::ACMILBC(WebRtc_Word16 /* codec_id */)
+ACMILBC::ACMILBC(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
return;
@@ -33,32 +33,32 @@
return;
}
-WebRtc_Word16 ACMILBC::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMILBC::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMILBC::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMILBC::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMILBC::InternalInitEncoder(
+int16_t ACMILBC::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMILBC::InternalInitDecoder(
+int16_t ACMILBC::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -66,7 +66,7 @@
return NULL;
}
-WebRtc_Word16 ACMILBC::InternalCreateEncoder() {
+int16_t ACMILBC::InternalCreateEncoder() {
return -1;
}
@@ -74,7 +74,7 @@
return;
}
-WebRtc_Word16 ACMILBC::InternalCreateDecoder() {
+int16_t ACMILBC::InternalCreateDecoder() {
return -1;
}
@@ -86,13 +86,13 @@
return;
}
-WebRtc_Word16 ACMILBC::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+int16_t ACMILBC::SetBitRateSafe(const int32_t /* rate */) {
return -1;
}
#else //===================== Actual Implementation =======================
-ACMILBC::ACMILBC(WebRtc_Word16 codec_id)
+ACMILBC::ACMILBC(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
codec_id_ = codec_id;
@@ -111,12 +111,12 @@
return;
}
-WebRtc_Word16 ACMILBC::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMILBC::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
*bitstream_len_byte = WebRtcIlbcfix_Encode(encoder_inst_ptr_,
&in_audio_[in_audio_ix_read_],
frame_len_smpl_,
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
if (*bitstream_len_byte < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalEncode: error in encode for ILBC");
@@ -128,15 +128,15 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMILBC::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMILBC::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMILBC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
+int16_t ACMILBC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
// initialize with a correct processing block length
if ((160 == (codec_params->codec_inst).pacsize) ||
(320 == (codec_params->codec_inst).pacsize)) {
@@ -153,7 +153,7 @@
}
}
-WebRtc_Word16 ACMILBC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
+int16_t ACMILBC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
// initialize with a correct processing block length
if ((160 == (codec_params->codec_inst).pacsize) ||
(320 == (codec_params->codec_inst).pacsize)) {
@@ -170,8 +170,8 @@
}
}
-WebRtc_Word32 ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMILBC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"CodeDef: decoder not initialized for ILBC");
@@ -191,7 +191,7 @@
return NULL;
}
-WebRtc_Word16 ACMILBC::InternalCreateEncoder() {
+int16_t ACMILBC::InternalCreateEncoder() {
if (WebRtcIlbcfix_EncoderCreate(&encoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateEncoder: cannot create instance for ILBC "
@@ -210,7 +210,7 @@
}
}
-WebRtc_Word16 ACMILBC::InternalCreateDecoder() {
+int16_t ACMILBC::InternalCreateDecoder() {
if (WebRtcIlbcfix_DecoderCreate(&decoder_inst_ptr_) < 0) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"InternalCreateDecoder: cannot create instance for ILBC "
@@ -236,7 +236,7 @@
return;
}
-WebRtc_Word16 ACMILBC::SetBitRateSafe(const WebRtc_Word32 rate) {
+int16_t ACMILBC::SetBitRateSafe(const int32_t rate) {
// Check that rate is valid. No need to store the value
if (rate == 13300) {
WebRtcIlbcfix_EncoderInit(encoder_inst_ptr_, 30);
diff --git a/webrtc/modules/audio_coding/main/source/acm_ilbc.h b/webrtc/modules/audio_coding/main/source/acm_ilbc.h
index a61e34e..eb619f0 100644
--- a/webrtc/modules/audio_coding/main/source/acm_ilbc.h
+++ b/webrtc/modules/audio_coding/main/source/acm_ilbc.h
@@ -21,38 +21,38 @@
class ACMILBC : public ACMGenericCodec {
public:
- explicit ACMILBC(WebRtc_Word16 codec_id);
+ explicit ACMILBC(int16_t codec_id);
~ACMILBC();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
- WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+ int16_t SetBitRateSafe(const int32_t rate);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
diff --git a/webrtc/modules/audio_coding/main/source/acm_isac.cc b/webrtc/modules/audio_coding/main/source/acm_isac.cc
index defcb8b..e22d3f6 100644
--- a/webrtc/modules/audio_coding/main/source/acm_isac.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_isac.cc
@@ -41,14 +41,14 @@
// Tables for bandwidth estimates
#define NR_ISAC_BANDWIDTHS 24
-static const WebRtc_Word32 kIsacRatesWb[NR_ISAC_BANDWIDTHS] = {
+static const int32_t kIsacRatesWb[NR_ISAC_BANDWIDTHS] = {
10000, 11100, 12300, 13700, 15200, 16900,
18800, 20900, 23300, 25900, 28700, 31900,
10100, 11200, 12400, 13800, 15300, 17000,
18900, 21000, 23400, 26000, 28800, 32000
};
-static const WebRtc_Word32 kIsacRatesSwb[NR_ISAC_BANDWIDTHS] = {
+static const int32_t kIsacRatesSwb[NR_ISAC_BANDWIDTHS] = {
10000, 11000, 12400, 13800, 15300, 17000,
18900, 21000, 23200, 25400, 27600, 29800,
32000, 34100, 36300, 38500, 40700, 42900,
@@ -57,7 +57,7 @@
#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
-ACMISAC::ACMISAC(WebRtc_Word16 /* codec_id */)
+ACMISAC::ACMISAC(int16_t /* codec_id */)
: codec_inst_ptr_(NULL),
is_enc_initialized_(false),
isac_coding_mode_(CHANNEL_INDEPENDENT),
@@ -79,31 +79,31 @@
return NULL;
}
-WebRtc_Word16 ACMISAC::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMISAC::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMISAC::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMISAC::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMISAC::InternalInitEncoder(
+int16_t ACMISAC::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMISAC::InternalInitDecoder(
+int16_t ACMISAC::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMISAC::InternalCreateDecoder() {
+int16_t ACMISAC::InternalCreateDecoder() {
return -1;
}
@@ -111,7 +111,7 @@
return;
}
-WebRtc_Word16 ACMISAC::InternalCreateEncoder() {
+int16_t ACMISAC::InternalCreateEncoder() {
return -1;
}
@@ -119,8 +119,8 @@
return;
}
-WebRtc_Word32 ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -128,69 +128,69 @@
return;
}
-WebRtc_Word16 ACMISAC::DeliverCachedIsacData(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */,
- WebRtc_UWord32* /* timestamp */,
+int16_t ACMISAC::DeliverCachedIsacData(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */,
+ uint32_t* /* timestamp */,
WebRtcACMEncodingType* /* encoding_type */,
- const WebRtc_UWord16 /* isac_rate */,
- const WebRtc_UWord8 /* isac_bw_estimate */) {
+ const uint16_t /* isac_rate */,
+ const uint8_t /* isac_bw_estimate */) {
return -1;
}
-WebRtc_Word16 ACMISAC::Transcode(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */,
- WebRtc_Word16 /* q_bwe */,
- WebRtc_Word32 /* scale */,
- bool /* is_red */) {
+int16_t ACMISAC::Transcode(uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */,
+ int16_t /* q_bwe */,
+ int32_t /* scale */,
+ bool /* is_red */) {
return -1;
}
-WebRtc_Word16 ACMISAC::SetBitRateSafe(WebRtc_Word32 /* bit_rate */) {
+int16_t ACMISAC::SetBitRateSafe(int32_t /* bit_rate */) {
return -1;
}
-WebRtc_Word32 ACMISAC::GetEstimatedBandwidthSafe() {
+int32_t ACMISAC::GetEstimatedBandwidthSafe() {
return -1;
}
-WebRtc_Word32 ACMISAC::SetEstimatedBandwidthSafe(
- WebRtc_Word32 /* estimated_bandwidth */) {
+int32_t ACMISAC::SetEstimatedBandwidthSafe(
+ int32_t /* estimated_bandwidth */) {
return -1;
}
-WebRtc_Word32 ACMISAC::GetRedPayloadSafe(WebRtc_UWord8* /* red_payload */,
- WebRtc_Word16* /* payload_bytes */) {
+int32_t ACMISAC::GetRedPayloadSafe(uint8_t* /* red_payload */,
+ int16_t* /* payload_bytes */) {
return -1;
}
-WebRtc_Word16 ACMISAC::UpdateDecoderSampFreq(WebRtc_Word16 /* codec_id */) {
+int16_t ACMISAC::UpdateDecoderSampFreq(int16_t /* codec_id */) {
return -1;
}
-WebRtc_Word16 ACMISAC::UpdateEncoderSampFreq(
- WebRtc_UWord16 /* encoder_samp_freq_hz */) {
+int16_t ACMISAC::UpdateEncoderSampFreq(
+ uint16_t /* encoder_samp_freq_hz */) {
return -1;
}
-WebRtc_Word16 ACMISAC::EncoderSampFreq(WebRtc_UWord16& /* samp_freq_hz */) {
+int16_t ACMISAC::EncoderSampFreq(uint16_t& /* samp_freq_hz */) {
return -1;
}
-WebRtc_Word32 ACMISAC::ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 /* init_frame_size_msec */,
- const WebRtc_UWord16 /* init_rate_bit_per_sec */,
+int32_t ACMISAC::ConfigISACBandwidthEstimator(
+ const uint8_t /* init_frame_size_msec */,
+ const uint16_t /* init_rate_bit_per_sec */,
const bool /* enforce_frame_size */) {
return -1;
}
-WebRtc_Word32 ACMISAC::SetISACMaxPayloadSize(
- const WebRtc_UWord16 /* max_payload_len_bytes */) {
+int32_t ACMISAC::SetISACMaxPayloadSize(
+ const uint16_t /* max_payload_len_bytes */) {
return -1;
}
-WebRtc_Word32 ACMISAC::SetISACMaxRate(
- const WebRtc_UWord32 /* max_rate_bit_per_sec */) {
+int32_t ACMISAC::SetISACMaxRate(
+ const uint32_t /* max_rate_bit_per_sec */) {
return -1;
}
@@ -198,14 +198,14 @@
return;
}
-void ACMISAC::CurrentRate(WebRtc_Word32& /*rate_bit_per_sec */) {
+void ACMISAC::CurrentRate(int32_t& /*rate_bit_per_sec */) {
return;
}
bool
ACMISAC::DecoderParamsSafe(
WebRtcACMCodecParams* /* dec_params */,
- const WebRtc_UWord8 /* payload_type */) {
+ const uint8_t /* payload_type */) {
return false;
}
@@ -215,11 +215,11 @@
return;
}
-WebRtc_Word16 ACMISAC::REDPayloadISAC(
- const WebRtc_Word32 /* isac_rate */,
- const WebRtc_Word16 /* isac_bw_estimate */,
- WebRtc_UWord8* /* payload */,
- WebRtc_Word16* /* payload_len_bytes */) {
+int16_t ACMISAC::REDPayloadISAC(
+ const int32_t /* isac_rate */,
+ const int16_t /* isac_bw_estimate */,
+ uint8_t* /* payload */,
+ int16_t* /* payload_len_bytes */) {
return -1;
}
@@ -243,7 +243,7 @@
#define ISAC_NUM_SUPPORTED_RATES 9
-static const WebRtc_UWord16 kIsacSuportedRates[ISAC_NUM_SUPPORTED_RATES] = {
+static const uint16_t kIsacSuportedRates[ISAC_NUM_SUPPORTED_RATES] = {
32000, 30000, 26000, 23000, 21000,
19000, 17000, 15000, 12000
};
@@ -258,11 +258,11 @@
kIsacSuperWideband = 32
};
-static float ACMISACFixTranscodingScale(WebRtc_UWord16 rate) {
+static float ACMISACFixTranscodingScale(uint16_t rate) {
// find the scale for transcoding, the scale is rounded
// downward
float scale = -1;
- for (WebRtc_Word16 n = 0; n < ISAC_NUM_SUPPORTED_RATES; n++) {
+ for (int16_t n = 0; n < ISAC_NUM_SUPPORTED_RATES; n++) {
if (rate >= kIsacSuportedRates[n]) {
scale = kIsacScale[n];
break;
@@ -272,29 +272,29 @@
}
static void ACMISACFixGetSendBitrate(ACM_ISAC_STRUCT* inst,
- WebRtc_Word32* bottleneck) {
+ int32_t* bottleneck) {
*bottleneck = WebRtcIsacfix_GetUplinkBw(inst);
}
-static WebRtc_Word16 ACMISACFixGetNewBitstream(ACM_ISAC_STRUCT* inst,
- WebRtc_Word16 bwe_index,
- WebRtc_Word16 /* jitter_index */,
- WebRtc_Word32 rate,
- WebRtc_Word16* bitstream,
+static int16_t ACMISACFixGetNewBitstream(ACM_ISAC_STRUCT* inst,
+ int16_t bwe_index,
+ int16_t /* jitter_index */,
+ int32_t rate,
+ int16_t* bitstream,
bool is_red) {
if (is_red) {
// RED not supported with iSACFIX
return -1;
}
- float scale = ACMISACFixTranscodingScale((WebRtc_UWord16) rate);
+ float scale = ACMISACFixTranscodingScale((uint16_t) rate);
return WebRtcIsacfix_GetNewBitStream(inst, bwe_index, scale, bitstream);
}
-static WebRtc_Word16 ACMISACFixGetSendBWE(ACM_ISAC_STRUCT* inst,
- WebRtc_Word16* rate_index,
- WebRtc_Word16* /* dummy */) {
- WebRtc_Word16 local_rate_index;
- WebRtc_Word16 status = WebRtcIsacfix_GetDownLinkBwIndex(inst,
+static int16_t ACMISACFixGetSendBWE(ACM_ISAC_STRUCT* inst,
+ int16_t* rate_index,
+ int16_t* /* dummy */) {
+ int16_t local_rate_index;
+ int16_t status = WebRtcIsacfix_GetDownLinkBwIndex(inst,
&local_rate_index);
if (status < 0) {
return -1;
@@ -304,34 +304,34 @@
}
}
-static WebRtc_Word16 ACMISACFixControlBWE(ACM_ISAC_STRUCT* inst,
- WebRtc_Word32 rate_bps,
- WebRtc_Word16 frame_size_ms,
- WebRtc_Word16 enforce_frame_size) {
- return WebRtcIsacfix_ControlBwe(inst, (WebRtc_Word16) rate_bps, frame_size_ms,
+static int16_t ACMISACFixControlBWE(ACM_ISAC_STRUCT* inst,
+ int32_t rate_bps,
+ int16_t frame_size_ms,
+ int16_t enforce_frame_size) {
+ return WebRtcIsacfix_ControlBwe(inst, (int16_t) rate_bps, frame_size_ms,
enforce_frame_size);
}
-static WebRtc_Word16 ACMISACFixControl(ACM_ISAC_STRUCT* inst,
- WebRtc_Word32 rate_bps,
- WebRtc_Word16 frame_size_ms) {
- return WebRtcIsacfix_Control(inst, (WebRtc_Word16) rate_bps, frame_size_ms);
+static int16_t ACMISACFixControl(ACM_ISAC_STRUCT* inst,
+ int32_t rate_bps,
+ int16_t frame_size_ms) {
+ return WebRtcIsacfix_Control(inst, (int16_t) rate_bps, frame_size_ms);
}
// The following two function should have the same signature as their counter
// part in iSAC floating-point, i.e. WebRtcIsac_EncSampRate &
// WebRtcIsac_DecSampRate.
-static WebRtc_UWord16 ACMISACFixGetEncSampRate(ACM_ISAC_STRUCT* /* inst */) {
+static uint16_t ACMISACFixGetEncSampRate(ACM_ISAC_STRUCT* /* inst */) {
return 16000;
}
-static WebRtc_UWord16 ACMISACFixGetDecSampRate(ACM_ISAC_STRUCT* /* inst */) {
+static uint16_t ACMISACFixGetDecSampRate(ACM_ISAC_STRUCT* /* inst */) {
return 16000;
}
#endif
-ACMISAC::ACMISAC(WebRtc_Word16 codec_id)
+ACMISAC::ACMISAC(int16_t codec_id)
: is_enc_initialized_(false),
isac_coding_mode_(CHANNEL_INDEPENDENT),
enforce_frame_size_(false),
@@ -372,8 +372,8 @@
return NULL;
}
-WebRtc_Word16 ACMISAC::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMISAC::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
// ISAC takes 10ms audio everytime we call encoder, therefor,
// it should be treated like codecs with 'basic coding block'
// non-zero, and the following 'while-loop' should not be necessary.
@@ -396,7 +396,7 @@
}
*bitstream_len_byte = ACM_ISAC_ENCODE(codec_inst_ptr_->inst,
&in_audio_[in_audio_ix_read_],
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
// increment the read index this tell the caller that how far
// we have gone forward in reading the audio buffer
in_audio_ix_read_ += samples_in_10ms_audio_;
@@ -417,15 +417,15 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMISAC::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_sample */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMISAC::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_sample */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
+int16_t ACMISAC::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
// if rate is set to -1 then iSAC has to be in adaptive mode
if (codec_params->codec_inst.rate == -1) {
isac_coding_mode_ = ADAPTIVE;
@@ -439,7 +439,7 @@
}
// we need to set the encoder sampling frequency.
- if (UpdateEncoderSampFreq((WebRtc_UWord16) codec_params->codec_inst.plfreq)
+ if (UpdateEncoderSampFreq((uint16_t) codec_params->codec_inst.plfreq)
< 0) {
return -1;
}
@@ -465,7 +465,7 @@
return 0;
}
-WebRtc_Word16 ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
+int16_t ACMISAC::InternalInitDecoder(WebRtcACMCodecParams* codec_params) {
if (codec_inst_ptr_ == NULL) {
return -1;
}
@@ -496,11 +496,11 @@
return ACM_ISAC_DECODERINIT(codec_inst_ptr_->inst);
}
-WebRtc_Word16 ACMISAC::InternalCreateDecoder() {
+int16_t ACMISAC::InternalCreateDecoder() {
if (codec_inst_ptr_ == NULL) {
return -1;
}
- WebRtc_Word16 status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
+ int16_t status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
// specific to codecs with one instance for encoding and decoding
encoder_initialized_ = false;
@@ -518,11 +518,11 @@
return;
}
-WebRtc_Word16 ACMISAC::InternalCreateEncoder() {
+int16_t ACMISAC::InternalCreateEncoder() {
if (codec_inst_ptr_ == NULL) {
return -1;
}
- WebRtc_Word16 status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
+ int16_t status = ACM_ISAC_CREATE(&(codec_inst_ptr_->inst));
// specific to codecs with one instance for encoding and decoding
decoder_initialized_ = false;
@@ -540,8 +540,8 @@
return;
}
-WebRtc_Word32 ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMISAC::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
// Sanity checks
if (codec_inst_ptr_ == NULL) {
return -1;
@@ -588,12 +588,12 @@
return;
}
-WebRtc_Word16 ACMISAC::Transcode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte,
- WebRtc_Word16 q_bwe,
- WebRtc_Word32 rate,
- bool is_red) {
- WebRtc_Word16 jitter_info = 0;
+int16_t ACMISAC::Transcode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte,
+ int16_t q_bwe,
+ int32_t rate,
+ bool is_red) {
+ int16_t jitter_info = 0;
// transcode from a higher rate to lower rate sanity check
if (codec_inst_ptr_ == NULL) {
return -1;
@@ -601,7 +601,7 @@
*bitstream_len_byte = ACM_ISAC_GETNEWBITSTREAM(codec_inst_ptr_->inst, q_bwe,
jitter_info, rate,
- (WebRtc_Word16*)bitstream,
+ (int16_t*)bitstream,
(is_red) ? 1 : 0);
if (*bitstream_len_byte < 0) {
@@ -613,11 +613,11 @@
}
}
-WebRtc_Word16 ACMISAC::SetBitRateSafe(WebRtc_Word32 bit_rate) {
+int16_t ACMISAC::SetBitRateSafe(int32_t bit_rate) {
if (codec_inst_ptr_ == NULL) {
return -1;
}
- WebRtc_UWord16 encoder_samp_freq;
+ uint16_t encoder_samp_freq;
EncoderSampFreq(encoder_samp_freq);
bool reinit = false;
// change the BN of iSAC
@@ -640,13 +640,13 @@
reinit = true;
}
// store the bottleneck
- isac_current_bn_ = (WebRtc_UWord16) bit_rate;
+ isac_current_bn_ = (uint16_t) bit_rate;
} else {
// invlaid rate
return -1;
}
- WebRtc_Word16 status = 0;
+ int16_t status = 0;
if (reinit) {
// initialize and check if it is successful
if (ACM_ISAC_ENCODERINIT(codec_inst_ptr_->inst, isac_coding_mode_) < 0) {
@@ -671,9 +671,9 @@
return status;
}
-WebRtc_Word32 ACMISAC::GetEstimatedBandwidthSafe() {
- WebRtc_Word16 bandwidth_index = 0;
- WebRtc_Word16 delay_index = 0;
+int32_t ACMISAC::GetEstimatedBandwidthSafe() {
+ int16_t bandwidth_index = 0;
+ int16_t delay_index = 0;
int samp_rate;
// Get bandwidth information
@@ -693,10 +693,10 @@
}
}
-WebRtc_Word32 ACMISAC::SetEstimatedBandwidthSafe(
- WebRtc_Word32 estimated_bandwidth) {
+int32_t ACMISAC::SetEstimatedBandwidthSafe(
+ int32_t estimated_bandwidth) {
int samp_rate;
- WebRtc_Word16 bandwidth_index;
+ int16_t bandwidth_index;
// Check sample frequency and choose appropriate table
samp_rate = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
@@ -734,14 +734,14 @@
return 0;
}
-WebRtc_Word32 ACMISAC::GetRedPayloadSafe(
+int32_t ACMISAC::GetRedPayloadSafe(
#if (!defined(WEBRTC_CODEC_ISAC))
- WebRtc_UWord8* /* red_payload */, WebRtc_Word16* /* payload_bytes */) {
+ uint8_t* /* red_payload */, int16_t* /* payload_bytes */) {
return -1;
#else
- WebRtc_UWord8* red_payload, WebRtc_Word16* payload_bytes) {
- WebRtc_Word16 bytes = WebRtcIsac_GetRedPayload(codec_inst_ptr_->inst,
- (WebRtc_Word16*)red_payload);
+ uint8_t* red_payload, int16_t* payload_bytes) {
+ int16_t bytes = WebRtcIsac_GetRedPayload(codec_inst_ptr_->inst,
+ (int16_t*)red_payload);
if (bytes < 0) {
return -1;
}
@@ -750,9 +750,9 @@
#endif
}
-WebRtc_Word16 ACMISAC::UpdateDecoderSampFreq(
+int16_t ACMISAC::UpdateDecoderSampFreq(
#ifdef WEBRTC_CODEC_ISAC
- WebRtc_Word16 codec_id) {
+ int16_t codec_id) {
// The decoder supports only wideband and super-wideband.
if (ACMCodecDB::kISAC == codec_id) {
return WebRtcIsac_SetDecSampRate(codec_inst_ptr_->inst, 16000);
@@ -763,15 +763,15 @@
return -1;
}
#else
- WebRtc_Word16 /* codec_id */) {
+ int16_t /* codec_id */) {
return 0;
#endif
}
-WebRtc_Word16 ACMISAC::UpdateEncoderSampFreq(
+int16_t ACMISAC::UpdateEncoderSampFreq(
#ifdef WEBRTC_CODEC_ISAC
- WebRtc_UWord16 encoder_samp_freq_hz) {
- WebRtc_UWord16 current_samp_rate_hz;
+ uint16_t encoder_samp_freq_hz) {
+ uint16_t current_samp_rate_hz;
EncoderSampFreq(current_samp_rate_hz);
if (current_samp_rate_hz != encoder_samp_freq_hz) {
@@ -795,23 +795,23 @@
}
}
#else
- WebRtc_UWord16 /* codec_id */) {
+ uint16_t /* codec_id */) {
#endif
return 0;
}
-WebRtc_Word16 ACMISAC::EncoderSampFreq(WebRtc_UWord16& samp_freq_hz) {
+int16_t ACMISAC::EncoderSampFreq(uint16_t& samp_freq_hz) {
samp_freq_hz = ACM_ISAC_GETENCSAMPRATE(codec_inst_ptr_->inst);
return 0;
}
-WebRtc_Word32 ACMISAC::ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 init_frame_size_msec,
- const WebRtc_UWord16 init_rate_bit_per_sec,
+int32_t ACMISAC::ConfigISACBandwidthEstimator(
+ const uint8_t init_frame_size_msec,
+ const uint16_t init_rate_bit_per_sec,
const bool enforce_frame_size) {
- WebRtc_Word16 status;
+ int16_t status;
{
- WebRtc_UWord16 samp_freq_hz;
+ uint16_t samp_freq_hz;
EncoderSampFreq(samp_freq_hz);
// TODO(turajs): at 32kHz we hardcode calling with 30ms and enforce
// the frame-size otherwise we might get error. Revise if
@@ -836,14 +836,14 @@
return 0;
}
-WebRtc_Word32 ACMISAC::SetISACMaxPayloadSize(
- const WebRtc_UWord16 max_payload_len_bytes) {
+int32_t ACMISAC::SetISACMaxPayloadSize(
+ const uint16_t max_payload_len_bytes) {
return ACM_ISAC_SETMAXPAYLOADSIZE(codec_inst_ptr_->inst,
max_payload_len_bytes);
}
-WebRtc_Word32 ACMISAC::SetISACMaxRate(
- const WebRtc_UWord32 max_rate_bit_per_sec) {
+int32_t ACMISAC::SetISACMaxRate(
+ const uint32_t max_rate_bit_per_sec) {
return ACM_ISAC_SETMAXRATE(codec_inst_ptr_->inst, max_rate_bit_per_sec);
}
@@ -852,14 +852,14 @@
encoder_params_.codec_inst.pacsize = frame_len_smpl_;
}
-void ACMISAC::CurrentRate(WebRtc_Word32& rate_bit_per_sec) {
+void ACMISAC::CurrentRate(int32_t& rate_bit_per_sec) {
if (isac_coding_mode_ == ADAPTIVE) {
ACM_ISAC_GETSENDBITRATE(codec_inst_ptr_->inst, &rate_bit_per_sec);
}
}
bool ACMISAC::DecoderParamsSafe(WebRtcACMCodecParams* dec_params,
- const WebRtc_UWord8 payload_type) {
+ const uint8_t payload_type) {
if (decoder_initialized_) {
if (payload_type == decoder_params_.codec_inst.pltype) {
memcpy(dec_params, &decoder_params_, sizeof(WebRtcACMCodecParams));
@@ -883,11 +883,11 @@
}
}
-WebRtc_Word16 ACMISAC::REDPayloadISAC(const WebRtc_Word32 isac_rate,
- const WebRtc_Word16 isac_bw_estimate,
- WebRtc_UWord8* payload,
- WebRtc_Word16* payload_len_bytes) {
- WebRtc_Word16 status;
+int16_t ACMISAC::REDPayloadISAC(const int32_t isac_rate,
+ const int16_t isac_bw_estimate,
+ uint8_t* payload,
+ int16_t* payload_len_bytes) {
+ int16_t status;
ReadLockScoped rl(codec_wrapper_lock_);
status = Transcode(payload, payload_len_bytes, isac_bw_estimate, isac_rate,
true);
diff --git a/webrtc/modules/audio_coding/main/source/acm_isac.h b/webrtc/modules/audio_coding/main/source/acm_isac.h
index 1724cf8..6255477 100644
--- a/webrtc/modules/audio_coding/main/source/acm_isac.h
+++ b/webrtc/modules/audio_coding/main/source/acm_isac.h
@@ -24,95 +24,95 @@
class ACMISAC : public ACMGenericCodec {
public:
- explicit ACMISAC(WebRtc_Word16 codec_id);
+ explicit ACMISAC(int16_t codec_id);
~ACMISAC();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 DeliverCachedIsacData(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte,
- WebRtc_UWord32* timestamp,
- WebRtcACMEncodingType* encoding_type,
- const WebRtc_UWord16 isac_rate,
- const WebRtc_UWord8 isac_bwestimate);
+ int16_t DeliverCachedIsacData(uint8_t* bitstream,
+ int16_t* bitstream_len_byte,
+ uint32_t* timestamp,
+ WebRtcACMEncodingType* encoding_type,
+ const uint16_t isac_rate,
+ const uint8_t isac_bwestimate);
- WebRtc_Word16 DeliverCachedData(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */,
- WebRtc_UWord32* /* timestamp */,
- WebRtcACMEncodingType* /* encoding_type */) {
+ int16_t DeliverCachedData(uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */,
+ uint32_t* /* timestamp */,
+ WebRtcACMEncodingType* /* encoding_type */) {
return -1;
}
- WebRtc_Word16 UpdateDecoderSampFreq(WebRtc_Word16 codec_id);
+ int16_t UpdateDecoderSampFreq(int16_t codec_id);
- WebRtc_Word16 UpdateEncoderSampFreq(WebRtc_UWord16 samp_freq_hz);
+ int16_t UpdateEncoderSampFreq(uint16_t samp_freq_hz);
- WebRtc_Word16 EncoderSampFreq(WebRtc_UWord16& samp_freq_hz);
+ int16_t EncoderSampFreq(uint16_t& samp_freq_hz);
- WebRtc_Word32 ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 init_frame_size_msec,
- const WebRtc_UWord16 init_rate_bit_per_sec,
+ int32_t ConfigISACBandwidthEstimator(
+ const uint8_t init_frame_size_msec,
+ const uint16_t init_rate_bit_per_sec,
const bool enforce_frame_size);
- WebRtc_Word32 SetISACMaxPayloadSize(
- const WebRtc_UWord16 max_payload_len_bytes);
+ int32_t SetISACMaxPayloadSize(
+ const uint16_t max_payload_len_bytes);
- WebRtc_Word32 SetISACMaxRate(const WebRtc_UWord32 max_rate_bit_per_sec);
+ int32_t SetISACMaxRate(const uint32_t max_rate_bit_per_sec);
- WebRtc_Word16 REDPayloadISAC(const WebRtc_Word32 isac_rate,
- const WebRtc_Word16 isac_bw_estimate,
- WebRtc_UWord8* payload,
- WebRtc_Word16* payload_len_bytes);
+ int16_t REDPayloadISAC(const int32_t isac_rate,
+ const int16_t isac_bw_estimate,
+ uint8_t* payload,
+ int16_t* payload_len_bytes);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 bit_rate);
+ int16_t SetBitRateSafe(const int32_t bit_rate);
- WebRtc_Word32 GetEstimatedBandwidthSafe();
+ int32_t GetEstimatedBandwidthSafe();
- WebRtc_Word32 SetEstimatedBandwidthSafe(WebRtc_Word32 estimated_bandwidth);
+ int32_t SetEstimatedBandwidthSafe(int32_t estimated_bandwidth);
- WebRtc_Word32 GetRedPayloadSafe(WebRtc_UWord8* red_payload,
- WebRtc_Word16* payload_bytes);
+ int32_t GetRedPayloadSafe(uint8_t* red_payload,
+ int16_t* payload_bytes);
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 Transcode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte,
- WebRtc_Word16 q_bwe,
- WebRtc_Word32 rate,
- bool is_red);
+ int16_t Transcode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte,
+ int16_t q_bwe,
+ int32_t rate,
+ bool is_red);
- void CurrentRate(WebRtc_Word32& rate_bit_per_sec);
+ void CurrentRate(int32_t& rate_bit_per_sec);
void UpdateFrameLen();
bool DecoderParamsSafe(WebRtcACMCodecParams *dec_params,
- const WebRtc_UWord8 payload_type);
+ const uint8_t payload_type);
void SaveDecoderParamSafe(const WebRtcACMCodecParams* codec_params);
@@ -120,8 +120,8 @@
bool is_enc_initialized_;
IsacCodingMode isac_coding_mode_;
bool enforce_frame_size_;
- WebRtc_Word32 isac_current_bn_;
- WebRtc_UWord16 samples_in_10ms_audio_;
+ int32_t isac_current_bn_;
+ uint16_t samples_in_10ms_audio_;
WebRtcACMCodecParams decoder_params_32khz_;
};
diff --git a/webrtc/modules/audio_coding/main/source/acm_neteq.cc b/webrtc/modules/audio_coding/main/source/acm_neteq.cc
index ce80184..75e73eb 100644
--- a/webrtc/modules/audio_coding/main/source/acm_neteq.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_neteq.cc
@@ -77,10 +77,10 @@
}
}
-WebRtc_Word32 ACMNetEQ::Init() {
+int32_t ACMNetEQ::Init() {
CriticalSectionScoped lock(neteq_crit_sect_);
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (InitByIdxSafe(idx) < 0) {
return -1;
}
@@ -117,7 +117,7 @@
return 0;
}
-WebRtc_Word16 ACMNetEQ::InitByIdxSafe(const WebRtc_Word16 idx) {
+int16_t ACMNetEQ::InitByIdxSafe(const int16_t idx) {
int memory_size_bytes;
if (WebRtcNetEQ_AssignSize(&memory_size_bytes) != 0) {
LogError("AssignSize", idx);
@@ -163,7 +163,7 @@
return 0;
}
-WebRtc_Word16 ACMNetEQ::EnableVADByIdxSafe(const WebRtc_Word16 idx) {
+int16_t ACMNetEQ::EnableVADByIdxSafe(const int16_t idx) {
if (ptr_vadinst_[idx] == NULL) {
if (WebRtcVad_Create(&ptr_vadinst_[idx]) < 0) {
ptr_vadinst_[idx] = NULL;
@@ -196,15 +196,15 @@
return 0;
}
-WebRtc_Word32 ACMNetEQ::AllocatePacketBuffer(
+int32_t ACMNetEQ::AllocatePacketBuffer(
const WebRtcNetEQDecoder* used_codecs,
- WebRtc_Word16 num_codecs) {
+ int16_t num_codecs) {
// Due to WebRtcNetEQ_GetRecommendedBufferSize
// the following has to be int otherwise we will have compiler error
// if not casted
CriticalSectionScoped lock(neteq_crit_sect_);
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (AllocatePacketBufferByIdxSafe(used_codecs, num_codecs, idx) < 0) {
return -1;
}
@@ -212,10 +212,10 @@
return 0;
}
-WebRtc_Word16 ACMNetEQ::AllocatePacketBufferByIdxSafe(
+int16_t ACMNetEQ::AllocatePacketBufferByIdxSafe(
const WebRtcNetEQDecoder* used_codecs,
- WebRtc_Word16 num_codecs,
- const WebRtc_Word16 idx) {
+ int16_t num_codecs,
+ const int16_t idx) {
int max_num_packets;
int buffer_size_in_bytes;
int per_packet_overhead_bytes;
@@ -249,7 +249,7 @@
neteq_packet_buffer_[idx] = NULL;
}
- neteq_packet_buffer_[idx] = (WebRtc_Word16 *) malloc(buffer_size_in_bytes);
+ neteq_packet_buffer_[idx] = (int16_t *) malloc(buffer_size_in_bytes);
if (neteq_packet_buffer_[idx] == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"AllocatePacketBufferByIdxSafe: NetEq Initialization error: "
@@ -269,10 +269,10 @@
return 0;
}
-WebRtc_Word32 ACMNetEQ::SetExtraDelay(const WebRtc_Word32 delay_in_ms) {
+int32_t ACMNetEQ::SetExtraDelay(const int32_t delay_in_ms) {
CriticalSectionScoped lock(neteq_crit_sect_);
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"SetExtraDelay: NetEq is not initialized.");
@@ -287,10 +287,10 @@
return 0;
}
-WebRtc_Word32 ACMNetEQ::SetAVTPlayout(const bool enable) {
+int32_t ACMNetEQ::SetAVTPlayout(const bool enable) {
CriticalSectionScoped lock(neteq_crit_sect_);
if (avt_playout_ != enable) {
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"SetAVTPlayout: NetEq is not initialized.");
@@ -311,17 +311,17 @@
return avt_playout_;
}
-WebRtc_Word32 ACMNetEQ::CurrentSampFreqHz() const {
+int32_t ACMNetEQ::CurrentSampFreqHz() const {
CriticalSectionScoped lock(neteq_crit_sect_);
if (!is_initialized_[0]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"CurrentSampFreqHz: NetEq is not initialized.");
return -1;
}
- return (WebRtc_Word32)(1000 * current_samp_freq_khz_);
+ return (int32_t)(1000 * current_samp_freq_khz_);
}
-WebRtc_Word32 ACMNetEQ::SetPlayoutMode(const AudioPlayoutMode mode) {
+int32_t ACMNetEQ::SetPlayoutMode(const AudioPlayoutMode mode) {
CriticalSectionScoped lock(neteq_crit_sect_);
if (playout_mode_ == mode)
return 0;
@@ -375,7 +375,7 @@
return playout_mode_;
}
-WebRtc_Word32 ACMNetEQ::NetworkStatistics(
+int32_t ACMNetEQ::NetworkStatistics(
ACMNetworkStatistics* statistics) const {
WebRtcNetEQ_NetworkStatistics stats;
CriticalSectionScoped lock(neteq_crit_sect_);
@@ -435,10 +435,10 @@
return 0;
}
-WebRtc_Word32 ACMNetEQ::RecIn(const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 length_payload,
- const WebRtcRTPHeader& rtp_info) {
- WebRtc_Word16 payload_length = static_cast<WebRtc_Word16>(length_payload);
+int32_t ACMNetEQ::RecIn(const uint8_t* incoming_payload,
+ const int32_t length_payload,
+ const WebRtcRTPHeader& rtp_info) {
+ int16_t payload_length = static_cast<int16_t>(length_payload);
// translate to NetEq struct
WebRtcNetEQ_RTPInfo neteq_rtpinfo;
@@ -453,10 +453,10 @@
// the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
// we masked 6 most significant bits of 32-bit so we don't loose resolution
// when do the following multiplication.
- const WebRtc_UWord32 now_in_ms =
- static_cast<WebRtc_UWord32>(
+ const uint32_t now_in_ms =
+ static_cast<uint32_t>(
TickTime::MillisecondTimestamp() & 0x03ffffff);
- WebRtc_UWord32 recv_timestamp = static_cast<WebRtc_UWord32>(
+ uint32_t recv_timestamp = static_cast<uint32_t>(
current_samp_freq_khz_ * now_in_ms);
int status;
@@ -505,13 +505,13 @@
return 0;
}
-WebRtc_Word32 ACMNetEQ::RecOut(AudioFrame& audio_frame) {
+int32_t ACMNetEQ::RecOut(AudioFrame& audio_frame) {
enum WebRtcNetEQOutputType type;
- WebRtc_Word16 payload_len_sample;
+ int16_t payload_len_sample;
enum WebRtcNetEQOutputType type_master;
enum WebRtcNetEQOutputType type_slave;
- WebRtc_Word16 payload_len_sample_slave;
+ int16_t payload_len_sample_slave;
CriticalSectionScoped lockNetEq(neteq_crit_sect_);
@@ -546,8 +546,8 @@
"RecOut: NetEq is not initialized.");
return -1;
}
- WebRtc_Word16 payload_master[480];
- WebRtc_Word16 payload_slave[480];
+ int16_t payload_master[480];
+ int16_t payload_slave[480];
{
WriteLockScoped lockCodec(*decode_lock_);
if (WebRtcNetEQ_RecOutMasterSlave(inst_[0], payload_master,
@@ -590,11 +590,11 @@
if (payload_len_sample > payload_len_sample_slave) {
memset(&payload_slave[payload_len_sample_slave], 0,
(payload_len_sample - payload_len_sample_slave) *
- sizeof(WebRtc_Word16));
+ sizeof(int16_t));
}
}
- for (WebRtc_Word16 n = 0; n < payload_len_sample; n++) {
+ for (int16_t n = 0; n < payload_len_sample; n++) {
audio_frame.data_[n << 1] = payload_master[n];
audio_frame.data_[(n << 1) + 1] = payload_slave[n];
}
@@ -610,7 +610,7 @@
}
audio_frame.samples_per_channel_ =
- static_cast<WebRtc_UWord16>(payload_len_sample);
+ static_cast<uint16_t>(payload_len_sample);
// NetEq always returns 10 ms of audio.
current_samp_freq_khz_ =
static_cast<float>(audio_frame.samples_per_channel_) / 10.0f;
@@ -660,8 +660,8 @@
// When ACMGenericCodec has set the codec specific parameters in codec_def
// it calls AddCodec() to add the new codec to the NetEQ database.
-WebRtc_Word32 ACMNetEQ::AddCodec(WebRtcNetEQ_CodecDef* codec_def,
- bool to_master) {
+int32_t ACMNetEQ::AddCodec(WebRtcNetEQ_CodecDef* codec_def,
+ bool to_master) {
if (codec_def == NULL) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"ACMNetEQ::AddCodec: error, codec_def is NULL");
@@ -669,7 +669,7 @@
}
CriticalSectionScoped lock(neteq_crit_sect_);
- WebRtc_Word16 idx;
+ int16_t idx;
if (to_master) {
idx = 0;
} else {
@@ -693,11 +693,11 @@
// Creates a Word16 RTP packet out of a Word8 payload and an rtp info struct.
// Must be byte order safe.
-void ACMNetEQ::RTPPack(WebRtc_Word16* rtp_packet, const WebRtc_Word8* payload,
- const WebRtc_Word32 payload_length_bytes,
+void ACMNetEQ::RTPPack(int16_t* rtp_packet, const int8_t* payload,
+ const int32_t payload_length_bytes,
const WebRtcRTPHeader& rtp_info) {
- WebRtc_Word32 idx = 0;
- WEBRTC_SPL_SET_BYTE(rtp_packet, (WebRtc_Word8) 0x80, idx);
+ int32_t idx = 0;
+ WEBRTC_SPL_SET_BYTE(rtp_packet, (int8_t) 0x80, idx);
idx++;
WEBRTC_SPL_SET_BYTE(rtp_packet, rtp_info.header.payloadType, idx);
idx++;
@@ -737,7 +737,7 @@
WEBRTC_SPL_SET_BYTE(rtp_packet, WEBRTC_SPL_GET_BYTE(&(rtp_info.header.ssrc),
0), idx);
idx++;
- for (WebRtc_Word16 i = 0; i < payload_length_bytes; i++) {
+ for (int16_t i = 0; i < payload_length_bytes; i++) {
WEBRTC_SPL_SET_BYTE(rtp_packet, payload[i], idx);
idx++;
}
@@ -748,12 +748,12 @@
}
}
-WebRtc_Word16 ACMNetEQ::EnableVAD() {
+int16_t ACMNetEQ::EnableVAD() {
CriticalSectionScoped lock(neteq_crit_sect_);
if (vad_status_) {
return 0;
}
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"SetVADStatus: NetEq is not initialized.");
@@ -776,7 +776,7 @@
return vad_mode_;
}
-WebRtc_Word16 ACMNetEQ::SetVADMode(const ACMVADMode mode) {
+int16_t ACMNetEQ::SetVADMode(const ACMVADMode mode) {
CriticalSectionScoped lock(neteq_crit_sect_);
if ((mode < VADNormal) || (mode > VADVeryAggr)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
@@ -784,7 +784,7 @@
"supported");
return -1;
} else {
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"SetVADMode: NetEq is not initialized.");
@@ -800,9 +800,9 @@
}
}
-WebRtc_Word32 ACMNetEQ::FlushBuffers() {
+int32_t ACMNetEQ::FlushBuffers() {
CriticalSectionScoped lock(neteq_crit_sect_);
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"FlushBuffers: NetEq is not initialized.");
@@ -816,8 +816,8 @@
return 0;
}
-WebRtc_Word16 ACMNetEQ::RemoveCodec(WebRtcNetEQDecoder codec_idx,
- bool is_stereo) {
+int16_t ACMNetEQ::RemoveCodec(WebRtcNetEQDecoder codec_idx,
+ bool is_stereo) {
// sanity check
if ((codec_idx <= kDecoderReservedStart) ||
(codec_idx >= kDecoderReservedEnd)) {
@@ -848,10 +848,10 @@
return 0;
}
-WebRtc_Word16 ACMNetEQ::SetBackgroundNoiseMode(
+int16_t ACMNetEQ::SetBackgroundNoiseMode(
const ACMBackgroundNoiseMode mode) {
CriticalSectionScoped lock(neteq_crit_sect_);
- for (WebRtc_Word16 idx = 0; idx < num_slaves_ + 1; idx++) {
+ for (int16_t idx = 0; idx < num_slaves_ + 1; idx++) {
if (!is_initialized_[idx]) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"SetBackgroundNoiseMode: NetEq is not initialized.");
@@ -865,7 +865,7 @@
return 0;
}
-WebRtc_Word16 ACMNetEQ::BackgroundNoiseMode(ACMBackgroundNoiseMode& mode) {
+int16_t ACMNetEQ::BackgroundNoiseMode(ACMBackgroundNoiseMode& mode) {
WebRtcNetEQBGNMode my_mode;
CriticalSectionScoped lock(neteq_crit_sect_);
if (!is_initialized_[0]) {
@@ -882,13 +882,13 @@
return 0;
}
-void ACMNetEQ::set_id(WebRtc_Word32 id) {
+void ACMNetEQ::set_id(int32_t id) {
CriticalSectionScoped lock(neteq_crit_sect_);
id_ = id;
}
void ACMNetEQ::LogError(const char* neteq_func_name,
- const WebRtc_Word16 idx) const {
+ const int16_t idx) const {
char error_name[NETEQ_ERR_MSG_LEN_BYTE];
char my_func_name[50];
int neteq_error_code = WebRtcNetEQ_GetErrorCode(inst_[idx]);
@@ -902,7 +902,7 @@
" %s", idx, my_func_name, neteq_error_code, error_name);
}
-WebRtc_Word32 ACMNetEQ::PlayoutTimestamp(WebRtc_UWord32& timestamp) {
+int32_t ACMNetEQ::PlayoutTimestamp(uint32_t& timestamp) {
CriticalSectionScoped lock(neteq_crit_sect_);
if (WebRtcNetEQ_GetSpeechTimeStamp(inst_[0], ×tamp) < 0) {
LogError("GetSpeechTimeStamp", 0);
@@ -944,10 +944,10 @@
}
}
-WebRtc_Word16 ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
- WebRtc_Word16 num_codecs) {
+int16_t ACMNetEQ::AddSlave(const WebRtcNetEQDecoder* used_codecs,
+ int16_t num_codecs) {
CriticalSectionScoped lock(neteq_crit_sect_);
- const WebRtc_Word16 slave_idx = 1;
+ const int16_t slave_idx = 1;
if (num_slaves_ < 1) {
// initialize the receiver, this also sets up VAD.
if (InitByIdxSafe(slave_idx) < 0) {
@@ -1050,7 +1050,7 @@
received_stereo_ = received_stereo;
}
-WebRtc_UWord8 ACMNetEQ::num_slaves() {
+uint8_t ACMNetEQ::num_slaves() {
CriticalSectionScoped lock(neteq_crit_sect_);
return num_slaves_;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_neteq.h b/webrtc/modules/audio_coding/main/source/acm_neteq.h
index 06a1148..ac91f9f 100644
--- a/webrtc/modules/audio_coding/main/source/acm_neteq.h
+++ b/webrtc/modules/audio_coding/main/source/acm_neteq.h
@@ -48,7 +48,7 @@
// -1 if NetEQ or VAD returned an error or
// if out of memory.
//
- WebRtc_Word32 Init();
+ int32_t Init();
//
// RecIn()
@@ -64,9 +64,9 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 RecIn(const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 length_payload,
- const WebRtcRTPHeader& rtp_info);
+ int32_t RecIn(const uint8_t* incoming_payload,
+ const int32_t length_payload,
+ const WebRtcRTPHeader& rtp_info);
//
// RecOut()
@@ -79,7 +79,7 @@
// Return value : 0 if ok.
// -1 if NetEQ returned an error.
//
- WebRtc_Word32 RecOut(AudioFrame& audio_frame);
+ int32_t RecOut(AudioFrame& audio_frame);
//
// AddCodec()
@@ -94,8 +94,8 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 AddCodec(WebRtcNetEQ_CodecDef *codec_def,
- bool to_master = true);
+ int32_t AddCodec(WebRtcNetEQ_CodecDef *codec_def,
+ bool to_master = true);
//
// AllocatePacketBuffer()
@@ -108,8 +108,8 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 AllocatePacketBuffer(const WebRtcNetEQDecoder* used_codecs,
- WebRtc_Word16 num_codecs);
+ int32_t AllocatePacketBuffer(const WebRtcNetEQDecoder* used_codecs,
+ int16_t num_codecs);
//
// SetExtraDelay()
@@ -121,7 +121,7 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 SetExtraDelay(const WebRtc_Word32 delay_in_ms);
+ int32_t SetExtraDelay(const int32_t delay_in_ms);
//
// SetAVTPlayout()
@@ -133,7 +133,7 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 SetAVTPlayout(const bool enable);
+ int32_t SetAVTPlayout(const bool enable);
//
// AVTPlayout()
@@ -150,7 +150,7 @@
//
// Return value : Sampling frequency in Hz.
//
- WebRtc_Word32 CurrentSampFreqHz() const;
+ int32_t CurrentSampFreqHz() const;
//
// SetPlayoutMode()
@@ -163,7 +163,7 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 SetPlayoutMode(const AudioPlayoutMode mode);
+ int32_t SetPlayoutMode(const AudioPlayoutMode mode);
//
// PlayoutMode()
@@ -183,7 +183,7 @@
// Return value : 0 if ok.
// <0 if NetEQ returned an error.
//
- WebRtc_Word32 NetworkStatistics(ACMNetworkStatistics* statistics) const;
+ int32_t NetworkStatistics(ACMNetworkStatistics* statistics) const;
//
// VADMode()
@@ -203,7 +203,7 @@
// Return value : 0 if ok.
// -1 if an error occurred.
//
- WebRtc_Word16 SetVADMode(const ACMVADMode mode);
+ int16_t SetVADMode(const ACMVADMode mode);
//
// DecodeLock()
@@ -222,7 +222,7 @@
// Return value : 0 if ok.
// -1 if NetEQ returned an error.
//
- WebRtc_Word32 FlushBuffers();
+ int32_t FlushBuffers();
//
// RemoveCodec()
@@ -234,8 +234,8 @@
// Return value : 0 if ok.
// -1 if an error occurred.
//
- WebRtc_Word16 RemoveCodec(WebRtcNetEQDecoder codec_idx,
- bool is_stereo = false);
+ int16_t RemoveCodec(WebRtcNetEQDecoder codec_idx,
+ bool is_stereo = false);
//
// SetBackgroundNoiseMode()
@@ -248,7 +248,7 @@
// Return value : 0 if succeeded,
// -1 if failed to set the mode.
//
- WebRtc_Word16 SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
+ int16_t SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
//
// BackgroundNoiseMode()
@@ -256,21 +256,21 @@
//
// Return value : The mode of background noise.
//
- WebRtc_Word16 BackgroundNoiseMode(ACMBackgroundNoiseMode& mode);
+ int16_t BackgroundNoiseMode(ACMBackgroundNoiseMode& mode);
- void set_id(WebRtc_Word32 id);
+ void set_id(int32_t id);
- WebRtc_Word32 PlayoutTimestamp(WebRtc_UWord32& timestamp);
+ int32_t PlayoutTimestamp(uint32_t& timestamp);
void set_received_stereo(bool received_stereo);
- WebRtc_UWord8 num_slaves();
+ uint8_t num_slaves();
// Delete all slaves.
void RemoveSlaves();
- WebRtc_Word16 AddSlave(const WebRtcNetEQDecoder* used_codecs,
- WebRtc_Word16 num_codecs);
+ int16_t AddSlave(const WebRtcNetEQDecoder* used_codecs,
+ int16_t num_codecs);
void BufferSpec(int& num_packets, int& size_bytes, int& overhead_bytes) {
num_packets = min_of_max_num_packets_;
@@ -292,13 +292,13 @@
// Output:
// - rtp_packet : The RTP packet.
//
- static void RTPPack(WebRtc_Word16* rtp_packet, const WebRtc_Word8* payload,
- const WebRtc_Word32 payload_length_bytes,
+ static void RTPPack(int16_t* rtp_packet, const int8_t* payload,
+ const int32_t payload_length_bytes,
const WebRtcRTPHeader& rtp_info);
- void LogError(const char* neteq_func_name, const WebRtc_Word16 idx) const;
+ void LogError(const char* neteq_func_name, const int16_t idx) const;
- WebRtc_Word16 InitByIdxSafe(const WebRtc_Word16 idx);
+ int16_t InitByIdxSafe(const int16_t idx);
//
// EnableVAD()
@@ -307,14 +307,14 @@
// Return value : 0 if ok.
// -1 if an error occurred.
//
- WebRtc_Word16 EnableVAD();
+ int16_t EnableVAD();
- WebRtc_Word16 EnableVADByIdxSafe(const WebRtc_Word16 idx);
+ int16_t EnableVADByIdxSafe(const int16_t idx);
- WebRtc_Word16 AllocatePacketBufferByIdxSafe(
+ int16_t AllocatePacketBufferByIdxSafe(
const WebRtcNetEQDecoder* used_codecs,
- WebRtc_Word16 num_codecs,
- const WebRtc_Word16 idx);
+ int16_t num_codecs,
+ const int16_t idx);
// Delete the NetEQ corresponding to |index|.
void RemoveNetEQSafe(int index);
@@ -324,9 +324,9 @@
void* inst_[MAX_NUM_SLAVE_NETEQ + 1];
void* inst_mem_[MAX_NUM_SLAVE_NETEQ + 1];
- WebRtc_Word16* neteq_packet_buffer_[MAX_NUM_SLAVE_NETEQ + 1];
+ int16_t* neteq_packet_buffer_[MAX_NUM_SLAVE_NETEQ + 1];
- WebRtc_Word32 id_;
+ int32_t id_;
float current_samp_freq_khz_;
bool avt_playout_;
AudioPlayoutMode playout_mode_;
@@ -338,11 +338,11 @@
ACMVADMode vad_mode_;
RWLockWrapper* decode_lock_;
bool is_initialized_[MAX_NUM_SLAVE_NETEQ + 1];
- WebRtc_UWord8 num_slaves_;
+ uint8_t num_slaves_;
bool received_stereo_;
void* master_slave_info_;
AudioFrame::VADActivity previous_audio_activity_;
- WebRtc_Word32 extra_delay_;
+ int32_t extra_delay_;
CriticalSectionWrapper* callback_crit_sect_;
// Minimum of "max number of packets," among all NetEq instances.
diff --git a/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc b/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
index d7f3f83..9cf280b 100644
--- a/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_neteq_unittest.cc
@@ -68,7 +68,7 @@
rtp_header.header.payloadType = payload_type;
rtp_header.header.markerBit = marker_bit;
rtp_header.type.Audio.channel = 1;
- ASSERT_EQ(0, neteq_.RecIn(reinterpret_cast<WebRtc_UWord8*>(payload),
+ ASSERT_EQ(0, neteq_.RecIn(reinterpret_cast<uint8_t*>(payload),
len_payload_bytes, rtp_header));
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc b/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
index f341295..91cb9e0 100644
--- a/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_pcm16b.cc
@@ -25,7 +25,7 @@
#ifndef WEBRTC_CODEC_PCM16
-ACMPCM16B::ACMPCM16B(WebRtc_Word16 /* codec_id */) {
+ACMPCM16B::ACMPCM16B(int16_t /* codec_id */) {
return;
}
@@ -33,32 +33,32 @@
return;
}
-WebRtc_Word16 ACMPCM16B::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMPCM16B::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMPCM16B::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMPCM16B::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMPCM16B::InternalInitEncoder(
+int16_t ACMPCM16B::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMPCM16B::InternalInitDecoder(
+int16_t ACMPCM16B::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -66,11 +66,11 @@
return NULL;
}
-WebRtc_Word16 ACMPCM16B::InternalCreateEncoder() {
+int16_t ACMPCM16B::InternalCreateEncoder() {
return -1;
}
-WebRtc_Word16 ACMPCM16B::InternalCreateDecoder() {
+int16_t ACMPCM16B::InternalCreateDecoder() {
return -1;
}
@@ -91,7 +91,7 @@
}
#else //===================== Actual Implementation =======================
-ACMPCM16B::ACMPCM16B(WebRtc_Word16 codec_id) {
+ACMPCM16B::ACMPCM16B(int16_t codec_id) {
codec_id_ = codec_id;
sampling_freq_hz_ = ACMCodecDB::CodecFreq(codec_id_);
}
@@ -100,8 +100,8 @@
return;
}
-WebRtc_Word16 ACMPCM16B::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMPCM16B::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
*bitstream_len_byte = WebRtcPcm16b_Encode(&in_audio_[in_audio_ix_read_],
frame_len_smpl_ * num_channels_,
bitstream);
@@ -111,28 +111,28 @@
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMPCM16B::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMPCM16B::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMPCM16B::InternalInitEncoder(
+int16_t ACMPCM16B::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization, PCM has no instance.
return 0;
}
-WebRtc_Word16 ACMPCM16B::InternalInitDecoder(
+int16_t ACMPCM16B::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization, PCM has no instance.
return 0;
}
-WebRtc_Word32 ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
// Fill up the structure by calling "SET_CODEC_PAR" & "SET_PCMU_FUNCTION".
// Then call NetEQ to add the codec to it's database.
if (codec_inst.channels == 1) {
@@ -190,12 +190,12 @@
return NULL;
}
-WebRtc_Word16 ACMPCM16B::InternalCreateEncoder() {
+int16_t ACMPCM16B::InternalCreateEncoder() {
// PCM has no instance.
return 0;
}
-WebRtc_Word16 ACMPCM16B::InternalCreateDecoder() {
+int16_t ACMPCM16B::InternalCreateDecoder() {
// PCM has no instance.
return 0;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_pcm16b.h b/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
index 314e3bb..5368fe7 100644
--- a/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
+++ b/webrtc/modules/audio_coding/main/source/acm_pcm16b.h
@@ -17,42 +17,42 @@
class ACMPCM16B : public ACMGenericCodec {
public:
- explicit ACMPCM16B(WebRtc_Word16 codec_id);
+ explicit ACMPCM16B(int16_t codec_id);
~ACMPCM16B();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
void SplitStereoPacket(uint8_t* payload, int32_t* payload_length);
- WebRtc_Word32 sampling_freq_hz_;
+ int32_t sampling_freq_hz_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/source/acm_pcma.cc b/webrtc/modules/audio_coding/main/source/acm_pcma.cc
index 24a6fe2..83c1249 100644
--- a/webrtc/modules/audio_coding/main/source/acm_pcma.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_pcma.cc
@@ -21,7 +21,7 @@
namespace webrtc {
-ACMPCMA::ACMPCMA(WebRtc_Word16 codec_id) {
+ACMPCMA::ACMPCMA(int16_t codec_id) {
codec_id_ = codec_id;
}
@@ -29,39 +29,39 @@
return;
}
-WebRtc_Word16 ACMPCMA::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMPCMA::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
*bitstream_len_byte = WebRtcG711_EncodeA(NULL, &in_audio_[in_audio_ix_read_],
frame_len_smpl_ * num_channels_,
- (WebRtc_Word16*) bitstream);
+ (int16_t*) bitstream);
// Increment the read index this tell the caller that how far
// we have gone forward in reading the audio buffer.
in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMPCMA::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMPCMA::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMPCMA::InternalInitEncoder(
+int16_t ACMPCMA::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization, PCM has no instance.
return 0;
}
-WebRtc_Word16 ACMPCMA::InternalInitDecoder(
+int16_t ACMPCMA::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization, PCM has no instance.
return 0;
}
-WebRtc_Word32 ACMPCMA::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMPCMA::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
// Fill up the structure by calling
// "SET_CODEC_PAR" & "SET_PCMA_FUNCTION."
// Then call NetEQ to add the codec to it's database.
@@ -80,12 +80,12 @@
return NULL;
}
-WebRtc_Word16 ACMPCMA::InternalCreateEncoder() {
+int16_t ACMPCMA::InternalCreateEncoder() {
// PCM has no instance.
return 0;
}
-WebRtc_Word16 ACMPCMA::InternalCreateDecoder() {
+int16_t ACMPCMA::InternalCreateDecoder() {
// PCM has no instance.
return 0;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_pcma.h b/webrtc/modules/audio_coding/main/source/acm_pcma.h
index cf12dca..59c3389 100644
--- a/webrtc/modules/audio_coding/main/source/acm_pcma.h
+++ b/webrtc/modules/audio_coding/main/source/acm_pcma.h
@@ -17,36 +17,36 @@
class ACMPCMA : public ACMGenericCodec {
public:
- explicit ACMPCMA(WebRtc_Word16 codec_id);
+ explicit ACMPCMA(int16_t codec_id);
~ACMPCMA();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
diff --git a/webrtc/modules/audio_coding/main/source/acm_pcmu.cc b/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
index 919826d..61a64ac 100644
--- a/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_pcmu.cc
@@ -21,7 +21,7 @@
namespace webrtc {
-ACMPCMU::ACMPCMU(WebRtc_Word16 codec_id) {
+ACMPCMU::ACMPCMU(int16_t codec_id) {
codec_id_ = codec_id;
}
@@ -29,39 +29,39 @@
return;
}
-WebRtc_Word16 ACMPCMU::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
+int16_t ACMPCMU::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
*bitstream_len_byte = WebRtcG711_EncodeU(NULL, &in_audio_[in_audio_ix_read_],
frame_len_smpl_ * num_channels_,
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
// Increment the read index this tell the caller that how far
// we have gone forward in reading the audio buffer.
in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMPCMU::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMPCMU::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMPCMU::InternalInitEncoder(
+int16_t ACMPCMU::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization, PCM has no instance.
return 0;
}
-WebRtc_Word16 ACMPCMU::InternalInitDecoder(
+int16_t ACMPCMU::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization, PCM has no instance.
return 0;
}
-WebRtc_Word32 ACMPCMU::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMPCMU::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
// Fill up the structure by calling
// "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
// Then call NetEQ to add the codec to it's database.
@@ -80,12 +80,12 @@
return NULL;
}
-WebRtc_Word16 ACMPCMU::InternalCreateEncoder() {
+int16_t ACMPCMU::InternalCreateEncoder() {
// PCM has no instance.
return 0;
}
-WebRtc_Word16 ACMPCMU::InternalCreateDecoder() {
+int16_t ACMPCMU::InternalCreateDecoder() {
// PCM has no instance.
return 0;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_pcmu.h b/webrtc/modules/audio_coding/main/source/acm_pcmu.h
index f75e7aa..5169f13 100644
--- a/webrtc/modules/audio_coding/main/source/acm_pcmu.h
+++ b/webrtc/modules/audio_coding/main/source/acm_pcmu.h
@@ -17,36 +17,36 @@
class ACMPCMU : public ACMGenericCodec {
public:
- explicit ACMPCMU(WebRtc_Word16 codec_id);
+ explicit ACMPCMU(int16_t codec_id);
~ACMPCMU();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
diff --git a/webrtc/modules/audio_coding/main/source/acm_red.cc b/webrtc/modules/audio_coding/main/source/acm_red.cc
index a249449..6e7ae9f 100644
--- a/webrtc/modules/audio_coding/main/source/acm_red.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_red.cc
@@ -18,7 +18,7 @@
namespace webrtc {
-ACMRED::ACMRED(WebRtc_Word16 codec_id) {
+ACMRED::ACMRED(int16_t codec_id) {
codec_id_ = codec_id;
}
@@ -26,37 +26,37 @@
return;
}
-WebRtc_Word16 ACMRED::InternalEncode(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMRED::InternalEncode(uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
// RED is never used as an encoder
// RED has no instance
return 0;
}
-WebRtc_Word16 ACMRED::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMRED::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMRED::InternalInitEncoder(
+int16_t ACMRED::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization,
// RED has no instance
return 0;
}
-WebRtc_Word16 ACMRED::InternalInitDecoder(
+int16_t ACMRED::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
// This codec does not need initialization,
// RED has no instance
return 0;
}
-WebRtc_Word32 ACMRED::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMRED::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
// Todo:
// log error
@@ -76,12 +76,12 @@
return NULL;
}
-WebRtc_Word16 ACMRED::InternalCreateEncoder() {
+int16_t ACMRED::InternalCreateEncoder() {
// RED has no instance
return 0;
}
-WebRtc_Word16 ACMRED::InternalCreateDecoder() {
+int16_t ACMRED::InternalCreateDecoder() {
// RED has no instance
return 0;
}
diff --git a/webrtc/modules/audio_coding/main/source/acm_red.h b/webrtc/modules/audio_coding/main/source/acm_red.h
index 5f4cb9b..e19e335 100644
--- a/webrtc/modules/audio_coding/main/source/acm_red.h
+++ b/webrtc/modules/audio_coding/main/source/acm_red.h
@@ -17,36 +17,36 @@
class ACMRED : public ACMGenericCodec {
public:
- explicit ACMRED(WebRtc_Word16 codec_id);
+ explicit ACMRED(int16_t codec_id);
~ACMRED();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
};
diff --git a/webrtc/modules/audio_coding/main/source/acm_resampler.cc b/webrtc/modules/audio_coding/main/source/acm_resampler.cc
index a776052..2618649 100644
--- a/webrtc/modules/audio_coding/main/source/acm_resampler.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_resampler.cc
@@ -27,25 +27,25 @@
delete resampler_crit_sect_;
}
-WebRtc_Word16 ACMResampler::Resample10Msec(const WebRtc_Word16* in_audio,
- WebRtc_Word32 in_freq_hz,
- WebRtc_Word16* out_audio,
- WebRtc_Word32 out_freq_hz,
- WebRtc_UWord8 num_audio_channels) {
+int16_t ACMResampler::Resample10Msec(const int16_t* in_audio,
+ int32_t in_freq_hz,
+ int16_t* out_audio,
+ int32_t out_freq_hz,
+ uint8_t num_audio_channels) {
CriticalSectionScoped cs(resampler_crit_sect_);
if (in_freq_hz == out_freq_hz) {
size_t length = static_cast<size_t>(in_freq_hz * num_audio_channels / 100);
- memcpy(out_audio, in_audio, length * sizeof(WebRtc_Word16));
- return static_cast<WebRtc_Word16>(in_freq_hz / 100);
+ memcpy(out_audio, in_audio, length * sizeof(int16_t));
+ return static_cast<int16_t>(in_freq_hz / 100);
}
// |maxLen| is maximum number of samples for 10ms at 48kHz.
int max_len = 480 * num_audio_channels;
- int length_in = (WebRtc_Word16)(in_freq_hz / 100) * num_audio_channels;
+ int length_in = (int16_t)(in_freq_hz / 100) * num_audio_channels;
int out_len;
- WebRtc_Word32 ret;
+ int32_t ret;
ResamplerType type;
type = (num_audio_channels == 1) ? kResamplerSynchronous :
kResamplerSynchronousStereo;
@@ -64,7 +64,7 @@
return -1;
}
- WebRtc_Word16 out_audio_len_smpl = (WebRtc_Word16) out_len /
+ int16_t out_audio_len_smpl = (int16_t) out_len /
num_audio_channels;
return out_audio_len_smpl;
diff --git a/webrtc/modules/audio_coding/main/source/acm_resampler.h b/webrtc/modules/audio_coding/main/source/acm_resampler.h
index aacd222..ddb0094 100644
--- a/webrtc/modules/audio_coding/main/source/acm_resampler.h
+++ b/webrtc/modules/audio_coding/main/source/acm_resampler.h
@@ -23,11 +23,11 @@
ACMResampler();
~ACMResampler();
- WebRtc_Word16 Resample10Msec(const WebRtc_Word16* in_audio,
- const WebRtc_Word32 in_freq_hz,
- WebRtc_Word16* out_audio,
- const WebRtc_Word32 out_freq_hz,
- WebRtc_UWord8 num_audio_channels);
+ int16_t Resample10Msec(const int16_t* in_audio,
+ const int32_t in_freq_hz,
+ int16_t* out_audio,
+ const int32_t out_freq_hz,
+ uint8_t num_audio_channels);
private:
// Use the Resampler class.
diff --git a/webrtc/modules/audio_coding/main/source/acm_speex.cc b/webrtc/modules/audio_coding/main/source/acm_speex.cc
index 73ab7fc..ce20526 100644
--- a/webrtc/modules/audio_coding/main/source/acm_speex.cc
+++ b/webrtc/modules/audio_coding/main/source/acm_speex.cc
@@ -26,7 +26,7 @@
namespace webrtc {
#ifndef WEBRTC_CODEC_SPEEX
-ACMSPEEX::ACMSPEEX(WebRtc_Word16 /* codec_id */)
+ACMSPEEX::ACMSPEEX(int16_t /* codec_id */)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL),
compl_mode_(0),
@@ -41,40 +41,40 @@
return;
}
-WebRtc_Word16 ACMSPEEX::InternalEncode(
- WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16* /* bitstream_len_byte */) {
+int16_t ACMSPEEX::InternalEncode(
+ uint8_t* /* bitstream */,
+ int16_t* /* bitstream_len_byte */) {
return -1;
}
-WebRtc_Word16 ACMSPEEX::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMSPEEX::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return -1;
}
-WebRtc_Word16 ACMSPEEX::EnableDTX() {
+int16_t ACMSPEEX::EnableDTX() {
return -1;
}
-WebRtc_Word16 ACMSPEEX::DisableDTX() {
+int16_t ACMSPEEX::DisableDTX() {
return -1;
}
-WebRtc_Word16 ACMSPEEX::InternalInitEncoder(
+int16_t ACMSPEEX::InternalInitEncoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word16 ACMSPEEX::InternalInitDecoder(
+int16_t ACMSPEEX::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
return -1;
}
-WebRtc_Word32 ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
- const CodecInst& /* codec_inst */) {
+int32_t ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& /* codec_def */,
+ const CodecInst& /* codec_inst */) {
return -1;
}
@@ -82,7 +82,7 @@
return NULL;
}
-WebRtc_Word16 ACMSPEEX::InternalCreateEncoder() {
+int16_t ACMSPEEX::InternalCreateEncoder() {
return -1;
}
@@ -90,7 +90,7 @@
return;
}
-WebRtc_Word16 ACMSPEEX::InternalCreateDecoder() {
+int16_t ACMSPEEX::InternalCreateDecoder() {
return -1;
}
@@ -98,7 +98,7 @@
return;
}
-WebRtc_Word16 ACMSPEEX::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+int16_t ACMSPEEX::SetBitRateSafe(const int32_t /* rate */) {
return -1;
}
@@ -107,22 +107,22 @@
}
#ifdef UNUSEDSPEEX
-WebRtc_Word16 ACMSPEEX::EnableVBR() {
+int16_t ACMSPEEX::EnableVBR() {
return -1;
}
-WebRtc_Word16 ACMSPEEX::DisableVBR() {
+int16_t ACMSPEEX::DisableVBR() {
return -1;
}
-WebRtc_Word16 ACMSPEEX::SetComplMode(WebRtc_Word16 mode) {
+int16_t ACMSPEEX::SetComplMode(int16_t mode) {
return -1;
}
#endif
#else //===================== Actual Implementation =======================
-ACMSPEEX::ACMSPEEX(WebRtc_Word16 codec_id)
+ACMSPEEX::ACMSPEEX(int16_t codec_id)
: encoder_inst_ptr_(NULL),
decoder_inst_ptr_(NULL) {
codec_id_ = codec_id;
@@ -165,11 +165,11 @@
return;
}
-WebRtc_Word16 ACMSPEEX::InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte) {
- WebRtc_Word16 status;
- WebRtc_Word16 num_encoded_samples = 0;
- WebRtc_Word16 n = 0;
+int16_t ACMSPEEX::InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte) {
+ int16_t status;
+ int16_t num_encoded_samples = 0;
+ int16_t n = 0;
while (num_encoded_samples < frame_len_smpl_) {
status = WebRtcSpeex_Encode(encoder_inst_ptr_,
@@ -196,25 +196,25 @@
// This frame is detected as inactive. We need send whatever
// encoded so far.
*bitstream_len_byte = WebRtcSpeex_GetBitstream(encoder_inst_ptr_,
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
return *bitstream_len_byte;
}
}
*bitstream_len_byte = WebRtcSpeex_GetBitstream(encoder_inst_ptr_,
- (WebRtc_Word16*)bitstream);
+ (int16_t*)bitstream);
return *bitstream_len_byte;
}
-WebRtc_Word16 ACMSPEEX::DecodeSafe(WebRtc_UWord8* /* bitstream */,
- WebRtc_Word16 /* bitstream_len_byte */,
- WebRtc_Word16* /* audio */,
- WebRtc_Word16* /* audio_samples */,
- WebRtc_Word8* /* speech_type */) {
+int16_t ACMSPEEX::DecodeSafe(uint8_t* /* bitstream */,
+ int16_t /* bitstream_len_byte */,
+ int16_t* /* audio */,
+ int16_t* /* audio_samples */,
+ int8_t* /* speech_type */) {
return 0;
}
-WebRtc_Word16 ACMSPEEX::EnableDTX() {
+int16_t ACMSPEEX::EnableDTX() {
if (dtx_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -234,7 +234,7 @@
return 0;
}
-WebRtc_Word16 ACMSPEEX::DisableDTX() {
+int16_t ACMSPEEX::DisableDTX() {
if (!dtx_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -255,7 +255,7 @@
return 0;
}
-WebRtc_Word16 ACMSPEEX::InternalInitEncoder(
+int16_t ACMSPEEX::InternalInitEncoder(
WebRtcACMCodecParams* codec_params) {
// sanity check
if (encoder_inst_ptr_ == NULL) {
@@ -264,7 +264,7 @@
return -1;
}
- WebRtc_Word16 status = SetBitRateSafe((codec_params->codecInstant).rate);
+ int16_t status = SetBitRateSafe((codec_params->codecInstant).rate);
status +=
(WebRtcSpeex_EncoderInit(encoder_inst_ptr_, vbr_enabled_, compl_mode_,
((codec_params->enable_dtx) ? 1 : 0)) < 0) ?
@@ -279,9 +279,9 @@
}
}
-WebRtc_Word16 ACMSPEEX::InternalInitDecoder(
+int16_t ACMSPEEX::InternalInitDecoder(
WebRtcACMCodecParams* /* codec_params */) {
- WebRtc_Word16 status;
+ int16_t status;
// sanity check
if (decoder_inst_ptr_ == NULL) {
@@ -300,8 +300,8 @@
}
}
-WebRtc_Word32 ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst) {
+int32_t ACMSPEEX::CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst) {
if (!decoder_initialized_) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
"Error, Speex decoder is not initialized");
@@ -340,7 +340,7 @@
return NULL;
}
-WebRtc_Word16 ACMSPEEX::InternalCreateEncoder() {
+int16_t ACMSPEEX::InternalCreateEncoder() {
return WebRtcSpeex_CreateEnc(&encoder_inst_ptr_, sampling_frequency_);
}
@@ -355,7 +355,7 @@
encoding_rate_ = 0;
}
-WebRtc_Word16 ACMSPEEX::InternalCreateDecoder() {
+int16_t ACMSPEEX::InternalCreateDecoder() {
return WebRtcSpeex_CreateDec(&decoder_inst_ptr_, sampling_frequency_, 1);
}
@@ -369,7 +369,7 @@
decoder_initialized_ = false;
}
-WebRtc_Word16 ACMSPEEX::SetBitRateSafe(const WebRtc_Word32 rate) {
+int16_t ACMSPEEX::SetBitRateSafe(const int32_t rate) {
// Check if changed rate
if (rate == encoding_rate_) {
return 0;
@@ -397,7 +397,7 @@
// This API is currently not in use. If requested to be able to enable/disable
// VBR an ACM API need to be added.
-WebRtc_Word16 ACMSPEEX::EnableVBR() {
+int16_t ACMSPEEX::EnableVBR() {
if (vbr_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -418,7 +418,7 @@
// This API is currently not in use. If requested to be able to enable/disable
// VBR an ACM API need to be added.
-WebRtc_Word16 ACMSPEEX::DisableVBR() {
+int16_t ACMSPEEX::DisableVBR() {
if (!vbr_enabled_) {
return 0;
} else if (encoder_exist_) { // check if encoder exist
@@ -440,7 +440,7 @@
// This API is currently not in use. If requested to be able to set complexity
// an ACM API need to be added.
-WebRtc_Word16 ACMSPEEX::SetComplMode(WebRtc_Word16 mode) {
+int16_t ACMSPEEX::SetComplMode(int16_t mode) {
// Check if new mode
if (mode == compl_mode_) {
return 0;
diff --git a/webrtc/modules/audio_coding/main/source/acm_speex.h b/webrtc/modules/audio_coding/main/source/acm_speex.h
index d1cd840..0f62ea3 100644
--- a/webrtc/modules/audio_coding/main/source/acm_speex.h
+++ b/webrtc/modules/audio_coding/main/source/acm_speex.h
@@ -21,60 +21,60 @@
class ACMSPEEX : public ACMGenericCodec {
public:
- explicit ACMSPEEX(WebRtc_Word16 codec_id);
+ explicit ACMSPEEX(int16_t codec_id);
~ACMSPEEX();
// for FEC
ACMGenericCodec* CreateInstance(void);
- WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
- WebRtc_Word16* bitstream_len_byte);
+ int16_t InternalEncode(uint8_t* bitstream,
+ int16_t* bitstream_len_byte);
- WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
- WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codec_params);
+ int16_t InternalInitDecoder(WebRtcACMCodecParams *codec_params);
protected:
- WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitstream,
- WebRtc_Word16 bitstream_len_byte,
- WebRtc_Word16* audio,
- WebRtc_Word16* audio_samples,
- WebRtc_Word8* speech_type);
+ int16_t DecodeSafe(uint8_t* bitstream,
+ int16_t bitstream_len_byte,
+ int16_t* audio,
+ int16_t* audio_samples,
+ int8_t* speech_type);
- WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codec_def,
- const CodecInst& codec_inst);
+ int32_t CodecDef(WebRtcNetEQ_CodecDef& codec_def,
+ const CodecInst& codec_inst);
void DestructEncoderSafe();
void DestructDecoderSafe();
- WebRtc_Word16 InternalCreateEncoder();
+ int16_t InternalCreateEncoder();
- WebRtc_Word16 InternalCreateDecoder();
+ int16_t InternalCreateDecoder();
void InternalDestructEncoderInst(void* ptr_inst);
- WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+ int16_t SetBitRateSafe(const int32_t rate);
- WebRtc_Word16 EnableDTX();
+ int16_t EnableDTX();
- WebRtc_Word16 DisableDTX();
+ int16_t DisableDTX();
#ifdef UNUSEDSPEEX
- WebRtc_Word16 EnableVBR();
+ int16_t EnableVBR();
- WebRtc_Word16 DisableVBR();
+ int16_t DisableVBR();
- WebRtc_Word16 SetComplMode(WebRtc_Word16 mode);
+ int16_t SetComplMode(int16_t mode);
#endif
SPEEX_encinst_t_* encoder_inst_ptr_;
SPEEX_decinst_t_* decoder_inst_ptr_;
- WebRtc_Word16 compl_mode_;
+ int16_t compl_mode_;
bool vbr_enabled_;
- WebRtc_Word32 encoding_rate_;
- WebRtc_Word16 sampling_frequency_;
- WebRtc_UWord16 samples_in_20ms_audio_;
+ int32_t encoding_rate_;
+ int16_t sampling_frequency_;
+ uint16_t samples_in_20ms_audio_;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/source/audio_coding_module.cc b/webrtc/modules/audio_coding/main/source/audio_coding_module.cc
index 91620b3..159fbb8 100644
--- a/webrtc/modules/audio_coding/main/source/audio_coding_module.cc
+++ b/webrtc/modules/audio_coding/main/source/audio_coding_module.cc
@@ -19,7 +19,7 @@
namespace webrtc {
// Create module
-AudioCodingModule* AudioCodingModule::Create(const WebRtc_Word32 id) {
+AudioCodingModule* AudioCodingModule::Create(const int32_t id) {
return new AudioCodingModuleImpl(id);
}
@@ -29,21 +29,21 @@
}
// Get number of supported codecs
-WebRtc_UWord8 AudioCodingModule::NumberOfCodecs() {
- return static_cast<WebRtc_UWord8>(ACMCodecDB::kNumCodecs);
+uint8_t AudioCodingModule::NumberOfCodecs() {
+ return static_cast<uint8_t>(ACMCodecDB::kNumCodecs);
}
// Get supported codec param with id
-WebRtc_Word32 AudioCodingModule::Codec(WebRtc_UWord8 list_id,
- CodecInst* codec) {
+int32_t AudioCodingModule::Codec(uint8_t list_id,
+ CodecInst* codec) {
// Get the codec settings for the codec with the given list ID
return ACMCodecDB::Codec(list_id, codec);
}
// Get supported codec Param with name, frequency and number of channels.
-WebRtc_Word32 AudioCodingModule::Codec(const char* payload_name,
- CodecInst* codec, int sampling_freq_hz,
- int channels) {
+int32_t AudioCodingModule::Codec(const char* payload_name,
+ CodecInst* codec, int sampling_freq_hz,
+ int channels) {
int codec_id;
// Get the id of the codec from the database.
@@ -70,8 +70,8 @@
}
// Get supported codec Index with name, frequency and number of channels.
-WebRtc_Word32 AudioCodingModule::Codec(const char* payload_name,
- int sampling_freq_hz, int channels) {
+int32_t AudioCodingModule::Codec(const char* payload_name,
+ int sampling_freq_hz, int channels) {
return ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels);
}
diff --git a/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc b/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
index f8e966d..8f16bf3 100644
--- a/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
+++ b/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.cc
@@ -102,7 +102,7 @@
} // namespace
-AudioCodingModuleImpl::AudioCodingModuleImpl(const WebRtc_Word32 id)
+AudioCodingModuleImpl::AudioCodingModuleImpl(const int32_t id)
: packetization_callback_(NULL),
id_(id),
last_timestamp_(0xD87F3F9F),
@@ -170,7 +170,7 @@
neteq_.set_id(id_);
// Allocate memory for RED.
- red_buffer_ = new WebRtc_UWord8[MAX_PAYLOAD_SIZE_BYTE];
+ red_buffer_ = new uint8_t[MAX_PAYLOAD_SIZE_BYTE];
// TODO(turajs): This might not be exactly how this class is supposed to work.
// The external usage might be that |fragmentationVectorSize| has to match
@@ -270,7 +270,7 @@
"Destroyed");
}
-WebRtc_Word32 AudioCodingModuleImpl::ChangeUniqueId(const WebRtc_Word32 id) {
+int32_t AudioCodingModuleImpl::ChangeUniqueId(const int32_t id) {
{
CriticalSectionScoped lock(acm_crit_sect_);
id_ = id;
@@ -288,7 +288,7 @@
// Returns the number of milliseconds until the module want a
// worker thread to call Process.
-WebRtc_Word32 AudioCodingModuleImpl::TimeUntilNextProcess() {
+int32_t AudioCodingModuleImpl::TimeUntilNextProcess() {
CriticalSectionScoped lock(acm_crit_sect_);
if (!HaveValidEncoder("TimeUntilNextProcess")) {
@@ -298,7 +298,7 @@
(send_codec_inst_.plfreq / 1000);
}
-WebRtc_Word32 AudioCodingModuleImpl::Process() {
+int32_t AudioCodingModuleImpl::Process() {
bool dual_stream;
{
CriticalSectionScoped lock(acm_crit_sect_);
@@ -327,7 +327,7 @@
fragmentation_.fragmentationLength[fragmentation_index] = len_bytes;
fragmentation_.fragmentationPlType[fragmentation_index] = payload_type;
fragmentation_.fragmentationTimeDiff[fragmentation_index] =
- static_cast<WebRtc_UWord16>(current_timestamp - rtp_timestamp);
+ static_cast<uint16_t>(current_timestamp - rtp_timestamp);
fragmentation_.fragmentationVectorSize++;
return len_bytes;
}
@@ -448,7 +448,7 @@
fragmentation_.fragmentationPlType[index_previous_secondary] =
secondary_send_codec_inst_.pltype;
fragmentation_.fragmentationTimeDiff[index_previous_secondary] =
- static_cast<WebRtc_UWord16>(current_timestamp - last_fec_timestamp_);
+ static_cast<uint16_t>(current_timestamp - last_fec_timestamp_);
fragmentation_.fragmentationVectorSize++;
}
@@ -512,14 +512,14 @@
// Process any pending tasks such as timeouts.
int AudioCodingModuleImpl::ProcessSingleStream() {
// Make room for 1 RED payload.
- WebRtc_UWord8 stream[2 * MAX_PAYLOAD_SIZE_BYTE];
- WebRtc_Word16 length_bytes = 2 * MAX_PAYLOAD_SIZE_BYTE;
- WebRtc_Word16 red_length_bytes = length_bytes;
- WebRtc_UWord32 rtp_timestamp;
- WebRtc_Word16 status;
+ uint8_t stream[2 * MAX_PAYLOAD_SIZE_BYTE];
+ int16_t length_bytes = 2 * MAX_PAYLOAD_SIZE_BYTE;
+ int16_t red_length_bytes = length_bytes;
+ uint32_t rtp_timestamp;
+ int16_t status;
WebRtcACMEncodingType encoding_type;
FrameType frame_type = kAudioFrameSpeech;
- WebRtc_UWord8 current_payload_type = 0;
+ uint8_t current_payload_type = 0;
bool has_data_to_send = false;
bool fec_active = false;
RTPFragmentationHeader my_fragmentation;
@@ -553,7 +553,7 @@
}
case kActiveNormalEncoded:
case kPassiveNormalEncoded: {
- current_payload_type = (WebRtc_UWord8) send_codec_inst_.pltype;
+ current_payload_type = (uint8_t) send_codec_inst_.pltype;
frame_type = kAudioFrameSpeech;
break;
}
@@ -641,7 +641,7 @@
fragmentation_.fragmentationLength[1]);
// Update the fragmentation time difference vector, in number of
// timestamps.
- WebRtc_UWord16 time_since_last = WebRtc_UWord16(
+ uint16_t time_since_last = uint16_t(
rtp_timestamp - last_fec_timestamp_);
// Update fragmentation vectors.
@@ -665,7 +665,7 @@
// RFC 2198 (RTP Payload for Redundant Audio Data) will be used.
// First fragment is the current data (new).
// Second fragment is the previous data (old).
- length_bytes = static_cast<WebRtc_Word16>(
+ length_bytes = static_cast<int16_t>(
fragmentation_.fragmentationLength[0] +
fragmentation_.fragmentationLength[1]);
@@ -714,7 +714,7 @@
if (vad_callback_ != NULL) {
// Callback with VAD decision.
- vad_callback_->InFrameType(((WebRtc_Word16) encoding_type));
+ vad_callback_->InFrameType(((int16_t) encoding_type));
}
}
return length_bytes;
@@ -725,7 +725,7 @@
//
// Initialize send codec.
-WebRtc_Word32 AudioCodingModuleImpl::InitializeSender() {
+int32_t AudioCodingModuleImpl::InitializeSender() {
CriticalSectionScoped lock(acm_crit_sect_);
// Start with invalid values.
@@ -756,7 +756,7 @@
return 0;
}
-WebRtc_Word32 AudioCodingModuleImpl::ResetEncoder() {
+int32_t AudioCodingModuleImpl::ResetEncoder() {
CriticalSectionScoped lock(acm_crit_sect_);
if (!HaveValidEncoder("ResetEncoder")) {
return -1;
@@ -921,7 +921,7 @@
}
// Can be called multiple times for Codec, CNG, RED.
-WebRtc_Word32 AudioCodingModuleImpl::RegisterSendCodec(
+int32_t AudioCodingModuleImpl::RegisterSendCodec(
const CodecInst& send_codec) {
int mirror_id;
int codec_id = IsValidSendCodec(send_codec, true, id_, &mirror_id);
@@ -1038,7 +1038,7 @@
}
ACMGenericCodec* codec_ptr = codecs_[codec_id];
- WebRtc_Word16 status;
+ int16_t status;
WebRtcACMCodecParams codec_params;
memcpy(&(codec_params.codec_inst), &send_codec, sizeof(CodecInst));
@@ -1171,7 +1171,7 @@
}
// Get current send codec.
-WebRtc_Word32 AudioCodingModuleImpl::SendCodec(
+int32_t AudioCodingModuleImpl::SendCodec(
CodecInst* current_codec) const {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
"SendCodec()");
@@ -1193,7 +1193,7 @@
}
// Get current send frequency.
-WebRtc_Word32 AudioCodingModuleImpl::SendFrequency() const {
+int32_t AudioCodingModuleImpl::SendFrequency() const {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
"SendFrequency()");
CriticalSectionScoped lock(acm_crit_sect_);
@@ -1211,7 +1211,7 @@
// Get encode bitrate.
// Adaptive rate codecs return their current encode target rate, while other
// codecs return there longterm avarage or their fixed rate.
-WebRtc_Word32 AudioCodingModuleImpl::SendBitrate() const {
+int32_t AudioCodingModuleImpl::SendBitrate() const {
CriticalSectionScoped lock(acm_crit_sect_);
if (!send_codec_registered_) {
@@ -1229,14 +1229,14 @@
// Set available bandwidth, inform the encoder about the estimated bandwidth
// received from the remote party.
-WebRtc_Word32 AudioCodingModuleImpl::SetReceivedEstimatedBandwidth(
- const WebRtc_Word32 bw) {
+int32_t AudioCodingModuleImpl::SetReceivedEstimatedBandwidth(
+ const int32_t bw) {
return codecs_[current_send_codec_idx_]->SetEstimatedBandwidth(bw);
}
// Register a transport callback which will be called to deliver
// the encoded buffers.
-WebRtc_Word32 AudioCodingModuleImpl::RegisterTransportCallback(
+int32_t AudioCodingModuleImpl::RegisterTransportCallback(
AudioPacketizationCallback* transport) {
CriticalSectionScoped lock(callback_crit_sect_);
packetization_callback_ = transport;
@@ -1245,7 +1245,7 @@
// Used by the module to deliver messages to the codec module/application
// AVT(DTMF).
-WebRtc_Word32 AudioCodingModuleImpl::RegisterIncomingMessagesCallback(
+int32_t AudioCodingModuleImpl::RegisterIncomingMessagesCallback(
#ifndef WEBRTC_DTMF_DETECTION
AudioCodingFeedback* /* incoming_message */,
const ACMCountries /* cpt */) {
@@ -1253,7 +1253,7 @@
#else
AudioCodingFeedback* incoming_message,
const ACMCountries cpt) {
- WebRtc_Word16 status = 0;
+ int16_t status = 0;
// Enter the critical section for callback.
{
@@ -1302,7 +1302,7 @@
}
// Add 10MS of raw (PCM) audio data to the encoder.
-WebRtc_Word32 AudioCodingModuleImpl::Add10MsData(
+int32_t AudioCodingModuleImpl::Add10MsData(
const AudioFrame& audio_frame) {
if (audio_frame.samples_per_channel_ <= 0) {
assert(false);
@@ -1412,7 +1412,7 @@
assert((secondary_encoder_.get() != NULL) ?
secondary_send_codec_inst_.plfreq == send_codec_inst_.plfreq : true);
- bool resample = ((WebRtc_Word32) in_frame.sample_rate_hz_
+ bool resample = ((int32_t) in_frame.sample_rate_hz_
!= send_codec_inst_.plfreq);
// This variable is true if primary codec and secondary codec (if exists)
@@ -1465,7 +1465,7 @@
// Calculate the timestamp of this frame.
if (last_in_timestamp_ > in_frame.timestamp_) {
// A wrap around has happened.
- timestamp_diff = ((WebRtc_UWord32) 0xFFFFFFFF - last_in_timestamp_)
+ timestamp_diff = ((uint32_t) 0xFFFFFFFF - last_in_timestamp_)
+ in_frame.timestamp_;
} else {
timestamp_diff = in_frame.timestamp_ - last_in_timestamp_;
@@ -1502,7 +1502,7 @@
}
// Configure FEC status i.e on/off.
-WebRtc_Word32
+int32_t
AudioCodingModuleImpl::SetFECStatus(
#ifdef WEBRTC_CODEC_RED
const bool enable_fec) {
@@ -1532,9 +1532,9 @@
/////////////////////////////////////////
// (VAD) Voice Activity Detection
//
-WebRtc_Word32 AudioCodingModuleImpl::SetVAD(const bool enable_dtx,
- const bool enable_vad,
- const ACMVADMode mode) {
+int32_t AudioCodingModuleImpl::SetVAD(const bool enable_dtx,
+ const bool enable_vad,
+ const ACMVADMode mode) {
CriticalSectionScoped lock(acm_crit_sect_);
return SetVADSafe(enable_dtx, enable_vad, mode);
}
@@ -1569,7 +1569,7 @@
// If a send codec is registered, set VAD/DTX for the codec.
if (HaveValidEncoder("SetVAD")) {
- WebRtc_Word16 status = codecs_[current_send_codec_idx_]->SetVAD(enable_dtx,
+ int16_t status = codecs_[current_send_codec_idx_]->SetVAD(enable_dtx,
enable_vad,
mode);
if (status == 1) {
@@ -1600,8 +1600,8 @@
// Get VAD/DTX settings.
// TODO(tlegrand): Change this method to void.
-WebRtc_Word32 AudioCodingModuleImpl::VAD(bool* dtx_enabled, bool* vad_enabled,
- ACMVADMode* mode) const {
+int32_t AudioCodingModuleImpl::VAD(bool* dtx_enabled, bool* vad_enabled,
+ ACMVADMode* mode) const {
CriticalSectionScoped lock(acm_crit_sect_);
*dtx_enabled = dtx_enabled_;
@@ -1615,13 +1615,13 @@
// Receiver
//
-WebRtc_Word32 AudioCodingModuleImpl::InitializeReceiver() {
+int32_t AudioCodingModuleImpl::InitializeReceiver() {
CriticalSectionScoped lock(acm_crit_sect_);
return InitializeReceiverSafe();
}
// Initialize receiver, resets codec database etc.
-WebRtc_Word32 AudioCodingModuleImpl::InitializeReceiverSafe() {
+int32_t AudioCodingModuleImpl::InitializeReceiverSafe() {
initial_delay_ms_ = 0;
num_packets_accumulated_ = 0;
num_bytes_accumulated_ = 0;
@@ -1673,7 +1673,7 @@
}
// Reset the decoder state.
-WebRtc_Word32 AudioCodingModuleImpl::ResetDecoder() {
+int32_t AudioCodingModuleImpl::ResetDecoder() {
CriticalSectionScoped lock(acm_crit_sect_);
for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
@@ -1689,7 +1689,7 @@
}
// Get current receive frequency.
-WebRtc_Word32 AudioCodingModuleImpl::ReceiveFrequency() const {
+int32_t AudioCodingModuleImpl::ReceiveFrequency() const {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
"ReceiveFrequency()");
WebRtcACMCodecParams codec_params;
@@ -1706,7 +1706,7 @@
}
// Get current playout frequency.
-WebRtc_Word32 AudioCodingModuleImpl::PlayoutFrequency() const {
+int32_t AudioCodingModuleImpl::PlayoutFrequency() const {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
"PlayoutFrequency()");
@@ -1717,7 +1717,7 @@
// Register possible receive codecs, can be called multiple times,
// for codecs, CNG (NB, WB and SWB), DTMF, RED.
-WebRtc_Word32 AudioCodingModuleImpl::RegisterReceiveCodec(
+int32_t AudioCodingModuleImpl::RegisterReceiveCodec(
const CodecInst& receive_codec) {
CriticalSectionScoped lock(acm_crit_sect_);
@@ -1859,9 +1859,9 @@
return 0;
}
-WebRtc_Word32 AudioCodingModuleImpl::RegisterRecCodecMSSafe(
- const CodecInst& receive_codec, WebRtc_Word16 codec_id,
- WebRtc_Word16 mirror_id, ACMNetEQ::JitterBuffer jitter_buffer) {
+int32_t AudioCodingModuleImpl::RegisterRecCodecMSSafe(
+ const CodecInst& receive_codec, int16_t codec_id,
+ int16_t mirror_id, ACMNetEQ::JitterBuffer jitter_buffer) {
ACMGenericCodec** codecs;
if (jitter_buffer == ACMNetEQ::kMasterJb) {
codecs = &codecs_[0];
@@ -1896,7 +1896,7 @@
codecs[codec_id]->SetIsMaster(jitter_buffer == ACMNetEQ::kMasterJb);
- WebRtc_Word16 status = 0;
+ int16_t status = 0;
WebRtcACMCodecParams codec_params;
memcpy(&(codec_params.codec_inst), &receive_codec, sizeof(CodecInst));
codec_params.enable_vad = false;
@@ -1933,7 +1933,7 @@
}
// Get current received codec.
-WebRtc_Word32 AudioCodingModuleImpl::ReceiveCodec(
+int32_t AudioCodingModuleImpl::ReceiveCodec(
CodecInst* current_codec) const {
WebRtcACMCodecParams decoder_param;
CriticalSectionScoped lock(acm_crit_sect_);
@@ -1958,9 +1958,9 @@
}
// Incoming packet from network parsed and ready for decode.
-WebRtc_Word32 AudioCodingModuleImpl::IncomingPacket(
- const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 payload_length,
+int32_t AudioCodingModuleImpl::IncomingPacket(
+ const uint8_t* incoming_payload,
+ const int32_t payload_length,
const WebRtcRTPHeader& rtp_info) {
WebRtcRTPHeader rtp_header;
@@ -1977,7 +1977,7 @@
// and "received frequency."
CriticalSectionScoped lock(acm_crit_sect_);
- WebRtc_UWord8 my_payload_type;
+ uint8_t my_payload_type;
// Check if this is an RED payload.
if (rtp_info.header.payloadType == receive_red_pltype_) {
@@ -2037,8 +2037,8 @@
if (expected_channels_ == 2) {
if (!rtp_info.type.Audio.isCNG) {
// Create a new vector for the payload, maximum payload size.
- WebRtc_Word32 length = payload_length;
- WebRtc_UWord8 payload[kMaxPacketSize];
+ int32_t length = payload_length;
+ uint8_t payload[kMaxPacketSize];
assert(payload_length <= kMaxPacketSize);
memcpy(payload, incoming_payload, payload_length);
codecs_[current_receive_codec_idx_]->SplitStereoPacket(payload, &length);
@@ -2133,8 +2133,8 @@
}
// Minimum playout delay (Used for lip-sync).
-WebRtc_Word32 AudioCodingModuleImpl::SetMinimumPlayoutDelay(
- const WebRtc_Word32 time_ms) {
+int32_t AudioCodingModuleImpl::SetMinimumPlayoutDelay(
+ const int32_t time_ms) {
if ((time_ms < 0) || (time_ms > 10000)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
"Delay must be in the range of 0-10000 milliseconds.");
@@ -2160,7 +2160,7 @@
// Configure Dtmf playout status i.e on/off playout the incoming outband
// Dtmf tone.
-WebRtc_Word32 AudioCodingModuleImpl::SetDtmfPlayoutStatus(
+int32_t AudioCodingModuleImpl::SetDtmfPlayoutStatus(
#ifndef WEBRTC_CODEC_AVT
const bool /* enable */) {
WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, id_,
@@ -2175,9 +2175,9 @@
// Estimate the Bandwidth based on the incoming stream, needed for one way
// audio where the RTCP send the BW estimate.
// This is also done in the RTP module.
-WebRtc_Word32 AudioCodingModuleImpl::DecoderEstimatedBandwidth() const {
+int32_t AudioCodingModuleImpl::DecoderEstimatedBandwidth() const {
CodecInst codec;
- WebRtc_Word16 codec_id = -1;
+ int16_t codec_id = -1;
int pltype_wb;
int pltype_swb;
@@ -2212,7 +2212,7 @@
}
// Set playout mode for: voice, fax, or streaming.
-WebRtc_Word32 AudioCodingModuleImpl::SetPlayoutMode(
+int32_t AudioCodingModuleImpl::SetPlayoutMode(
const AudioPlayoutMode mode) {
if ((mode != voice) && (mode != fax) && (mode != streaming) &&
(mode != off)) {
@@ -2230,10 +2230,9 @@
// Get 10 milliseconds of raw audio data to play out.
// Automatic resample to the requested frequency.
-WebRtc_Word32 AudioCodingModuleImpl::PlayoutData10Ms(
- WebRtc_Word32 desired_freq_hz, AudioFrame* audio_frame) {
+int32_t AudioCodingModuleImpl::PlayoutData10Ms(
+ int32_t desired_freq_hz, AudioFrame* audio_frame) {
TRACE_EVENT0("webrtc_voe", "ACM::PlayoutData10Ms");
-
bool stereo_mode;
if (GetSilence(desired_freq_hz, audio_frame))
@@ -2254,11 +2253,11 @@
// For stereo playout:
// Master and Slave samples are interleaved starting with Master.
- const WebRtc_UWord16 receive_freq =
- static_cast<WebRtc_UWord16>(audio_frame_.sample_rate_hz_);
+ const uint16_t receive_freq =
+ static_cast<uint16_t>(audio_frame_.sample_rate_hz_);
bool tone_detected = false;
- WebRtc_Word16 last_detected_tone;
- WebRtc_Word16 tone;
+ int16_t last_detected_tone;
+ int16_t tone;
// Limit the scope of ACM Critical section.
{
@@ -2266,7 +2265,7 @@
if ((receive_freq != desired_freq_hz) && (desired_freq_hz != -1)) {
// Resample payload_data.
- WebRtc_Word16 temp_len = output_resampler_.Resample10Msec(
+ int16_t temp_len = output_resampler_.Resample10Msec(
audio_frame_.data_, receive_freq, audio_frame->data_,
desired_freq_hz, audio_frame_.num_channels_);
@@ -2277,13 +2276,13 @@
}
// Set the payload data length from the resampler.
- audio_frame->samples_per_channel_ = (WebRtc_UWord16) temp_len;
+ audio_frame->samples_per_channel_ = (uint16_t) temp_len;
// Set the sampling frequency.
audio_frame->sample_rate_hz_ = desired_freq_hz;
} else {
memcpy(audio_frame->data_, audio_frame_.data_,
audio_frame_.samples_per_channel_ * audio_frame->num_channels_
- * sizeof(WebRtc_Word16));
+ * sizeof(int16_t));
// Set the payload length.
audio_frame->samples_per_channel_ =
audio_frame_.samples_per_channel_;
@@ -2304,7 +2303,7 @@
tone);
} else {
// We are in 8 kHz so the master channel needs only 80 samples.
- WebRtc_Word16 master_channel[80];
+ int16_t master_channel[80];
for (int n = 0; n < 80; n++) {
master_channel[n] = audio_frame->data_[n << 1];
}
@@ -2320,7 +2319,7 @@
audio_frame_.samples_per_channel_,
receive_freq, tone_detected, tone);
} else {
- WebRtc_Word16 master_channel[WEBRTC_10MS_PCM_AUDIO];
+ int16_t master_channel[WEBRTC_10MS_PCM_AUDIO];
for (int n = 0; n < audio_frame_.samples_per_channel_; n++) {
master_channel[n] = audio_frame_.data_[n << 1];
}
@@ -2348,11 +2347,11 @@
if (dtmf_callback_ != NULL) {
if (tone != kACMToneEnd) {
// just a tone
- dtmf_callback_->IncomingDtmf((WebRtc_UWord8) tone, false);
+ dtmf_callback_->IncomingDtmf((uint8_t) tone, false);
} else if ((tone == kACMToneEnd) && (last_detected_tone != kACMToneEnd)) {
// The tone is "END" and the previously detected tone is
// not "END," so call fir an end.
- dtmf_callback_->IncomingDtmf((WebRtc_UWord8) last_detected_tone, true);
+ dtmf_callback_->IncomingDtmf((uint8_t) last_detected_tone, true);
}
}
}
@@ -2375,7 +2374,7 @@
}
// Configure VAD aggressiveness on the incoming stream.
-WebRtc_Word16 AudioCodingModuleImpl::SetReceiveVADMode(const ACMVADMode mode) {
+int16_t AudioCodingModuleImpl::SetReceiveVADMode(const ACMVADMode mode) {
return neteq_.SetVADMode(mode);
}
@@ -2383,9 +2382,9 @@
// Statistics
//
-WebRtc_Word32 AudioCodingModuleImpl::NetworkStatistics(
+int32_t AudioCodingModuleImpl::NetworkStatistics(
ACMNetworkStatistics* statistics) const {
- WebRtc_Word32 status;
+ int32_t status;
status = neteq_.NetworkStatistics(statistics);
return status;
}
@@ -2400,7 +2399,7 @@
codecs_[current_send_codec_idx_]->DestructEncoderInst(inst);
}
-WebRtc_Word16 AudioCodingModuleImpl::AudioBuffer(
+int16_t AudioCodingModuleImpl::AudioBuffer(
WebRtcACMAudioBuff& buffer) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
"AudioBuffer()");
@@ -2411,7 +2410,7 @@
return codecs_[current_send_codec_idx_]->AudioBuffer(buffer);
}
-WebRtc_Word16 AudioCodingModuleImpl::SetAudioBuffer(
+int16_t AudioCodingModuleImpl::SetAudioBuffer(
WebRtcACMAudioBuff& buffer) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
"SetAudioBuffer()");
@@ -2421,7 +2420,7 @@
return codecs_[current_send_codec_idx_]->SetAudioBuffer(buffer);
}
-WebRtc_UWord32 AudioCodingModuleImpl::EarliestTimestamp() const {
+uint32_t AudioCodingModuleImpl::EarliestTimestamp() const {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
"EarliestTimestamp()");
if (!HaveValidEncoder("EarliestTimestamp")) {
@@ -2430,7 +2429,7 @@
return codecs_[current_send_codec_idx_]->EarliestTimestamp();
}
-WebRtc_Word32 AudioCodingModuleImpl::RegisterVADCallback(
+int32_t AudioCodingModuleImpl::RegisterVADCallback(
ACMVADCallback* vad_callback) {
WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, id_,
"RegisterVADCallback()");
@@ -2440,9 +2439,9 @@
}
// TODO(tlegrand): Modify this function to work for stereo, and add tests.
-WebRtc_Word32 AudioCodingModuleImpl::IncomingPayload(
- const WebRtc_UWord8* incoming_payload, const WebRtc_Word32 payload_length,
- const WebRtc_UWord8 payload_type, const WebRtc_UWord32 timestamp) {
+int32_t AudioCodingModuleImpl::IncomingPayload(
+ const uint8_t* incoming_payload, const int32_t payload_length,
+ const uint8_t payload_type, const uint32_t timestamp) {
if (payload_length < 0) {
// Log error in trace file.
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
@@ -2467,8 +2466,8 @@
// Start with random numbers.
dummy_rtp_header_->header.sequenceNumber = rand();
dummy_rtp_header_->header.timestamp =
- (static_cast<WebRtc_UWord32>(rand()) << 16) +
- static_cast<WebRtc_UWord32>(rand());
+ (static_cast<uint32_t>(rand()) << 16) +
+ static_cast<uint32_t>(rand());
dummy_rtp_header_->type.Audio.channel = 1;
if (DecoderParamByPlType(payload_type, codec_params) < 0) {
@@ -2513,11 +2512,11 @@
return 0;
}
-WebRtc_Word16 AudioCodingModuleImpl::DecoderParamByPlType(
- const WebRtc_UWord8 payload_type,
+int16_t AudioCodingModuleImpl::DecoderParamByPlType(
+ const uint8_t payload_type,
WebRtcACMCodecParams& codec_params) const {
CriticalSectionScoped lock(acm_crit_sect_);
- for (WebRtc_Word16 id = 0; id < ACMCodecDB::kMaxNumCodecs;
+ for (int16_t id = 0; id < ACMCodecDB::kMaxNumCodecs;
id++) {
if (codecs_[id] != NULL) {
if (codecs_[id]->DecoderInitialized()) {
@@ -2537,17 +2536,17 @@
return -1;
}
-WebRtc_Word16 AudioCodingModuleImpl::DecoderListIDByPlName(
- const char* name, const WebRtc_UWord16 frequency) const {
+int16_t AudioCodingModuleImpl::DecoderListIDByPlName(
+ const char* name, const uint16_t frequency) const {
WebRtcACMCodecParams codec_params;
CriticalSectionScoped lock(acm_crit_sect_);
- for (WebRtc_Word16 id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
+ for (int16_t id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
if ((codecs_[id] != NULL)) {
if (codecs_[id]->DecoderInitialized()) {
assert(registered_pltypes_[id] >= 0);
assert(registered_pltypes_[id] <= 255);
codecs_[id]->DecoderParams(
- &codec_params, (WebRtc_UWord8) registered_pltypes_[id]);
+ &codec_params, (uint8_t) registered_pltypes_[id]);
if (!STR_CASE_CMP(codec_params.codec_inst.plname, name)) {
// Check if the given sampling frequency matches.
// A zero sampling frequency means we matching the names
@@ -2568,8 +2567,8 @@
return -1;
}
-WebRtc_Word32 AudioCodingModuleImpl::LastEncodedTimestamp(
- WebRtc_UWord32& timestamp) const {
+int32_t AudioCodingModuleImpl::LastEncodedTimestamp(
+ uint32_t& timestamp) const {
CriticalSectionScoped lock(acm_crit_sect_);
if (!HaveValidEncoder("LastEncodedTimestamp")) {
return -1;
@@ -2578,7 +2577,7 @@
return 0;
}
-WebRtc_Word32 AudioCodingModuleImpl::ReplaceInternalDTXWithWebRtc(
+int32_t AudioCodingModuleImpl::ReplaceInternalDTXWithWebRtc(
bool use_webrtc_dtx) {
CriticalSectionScoped lock(acm_crit_sect_);
@@ -2589,7 +2588,7 @@
return -1;
}
- WebRtc_Word32 res = codecs_[current_send_codec_idx_]->ReplaceInternalDTX(
+ int32_t res = codecs_[current_send_codec_idx_]->ReplaceInternalDTX(
use_webrtc_dtx);
// Check if VAD is turned on, or if there is any error.
if (res == 1) {
@@ -2604,7 +2603,7 @@
return 0;
}
-WebRtc_Word32 AudioCodingModuleImpl::IsInternalDTXReplacedWithWebRtc(
+int32_t AudioCodingModuleImpl::IsInternalDTXReplacedWithWebRtc(
bool* uses_webrtc_dtx) {
CriticalSectionScoped lock(acm_crit_sect_);
@@ -2618,8 +2617,8 @@
return 0;
}
-WebRtc_Word32 AudioCodingModuleImpl::SetISACMaxRate(
- const WebRtc_UWord32 max_bit_per_sec) {
+int32_t AudioCodingModuleImpl::SetISACMaxRate(
+ const uint32_t max_bit_per_sec) {
CriticalSectionScoped lock(acm_crit_sect_);
if (!HaveValidEncoder("SetISACMaxRate")) {
@@ -2629,8 +2628,8 @@
return codecs_[current_send_codec_idx_]->SetISACMaxRate(max_bit_per_sec);
}
-WebRtc_Word32 AudioCodingModuleImpl::SetISACMaxPayloadSize(
- const WebRtc_UWord16 max_size_bytes) {
+int32_t AudioCodingModuleImpl::SetISACMaxPayloadSize(
+ const uint16_t max_size_bytes) {
CriticalSectionScoped lock(acm_crit_sect_);
if (!HaveValidEncoder("SetISACMaxPayloadSize")) {
@@ -2641,9 +2640,9 @@
max_size_bytes);
}
-WebRtc_Word32 AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 frame_size_ms,
- const WebRtc_UWord16 rate_bit_per_sec,
+int32_t AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
+ const uint8_t frame_size_ms,
+ const uint16_t rate_bit_per_sec,
const bool enforce_frame_size) {
CriticalSectionScoped lock(acm_crit_sect_);
@@ -2655,7 +2654,7 @@
frame_size_ms, rate_bit_per_sec, enforce_frame_size);
}
-WebRtc_Word32 AudioCodingModuleImpl::SetBackgroundNoiseMode(
+int32_t AudioCodingModuleImpl::SetBackgroundNoiseMode(
const ACMBackgroundNoiseMode mode) {
if ((mode < On) || (mode > Off)) {
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, id_,
@@ -2665,13 +2664,13 @@
return neteq_.SetBackgroundNoiseMode(mode);
}
-WebRtc_Word32 AudioCodingModuleImpl::BackgroundNoiseMode(
+int32_t AudioCodingModuleImpl::BackgroundNoiseMode(
ACMBackgroundNoiseMode* mode) {
return neteq_.BackgroundNoiseMode(*mode);
}
-WebRtc_Word32 AudioCodingModuleImpl::PlayoutTimestamp(
- WebRtc_UWord32* timestamp) {
+int32_t AudioCodingModuleImpl::PlayoutTimestamp(
+ uint32_t* timestamp) {
WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, id_,
"PlayoutTimestamp()");
{
@@ -2705,8 +2704,8 @@
return true;
}
-WebRtc_Word32 AudioCodingModuleImpl::UnregisterReceiveCodec(
- const WebRtc_Word16 payload_type) {
+int32_t AudioCodingModuleImpl::UnregisterReceiveCodec(
+ const int16_t payload_type) {
CriticalSectionScoped lock(acm_crit_sect_);
int id;
@@ -2727,10 +2726,10 @@
return UnregisterReceiveCodecSafe(id);
}
-WebRtc_Word32 AudioCodingModuleImpl::UnregisterReceiveCodecSafe(
- const WebRtc_Word16 codec_id) {
+int32_t AudioCodingModuleImpl::UnregisterReceiveCodecSafe(
+ const int16_t codec_id) {
const WebRtcNetEQDecoder *neteq_decoder = ACMCodecDB::NetEQDecoders();
- WebRtc_Word16 mirror_id = ACMCodecDB::MirrorID(codec_id);
+ int16_t mirror_id = ACMCodecDB::MirrorID(codec_id);
bool stereo_receiver = false;
if (codecs_[codec_id] != NULL) {
@@ -2798,13 +2797,13 @@
return 0;
}
-WebRtc_Word32 AudioCodingModuleImpl::REDPayloadISAC(
- const WebRtc_Word32 isac_rate, const WebRtc_Word16 isac_bw_estimate,
- WebRtc_UWord8* payload, WebRtc_Word16* length_bytes) {
+int32_t AudioCodingModuleImpl::REDPayloadISAC(
+ const int32_t isac_rate, const int16_t isac_bw_estimate,
+ uint8_t* payload, int16_t* length_bytes) {
if (!HaveValidEncoder("EncodeData")) {
return -1;
}
- WebRtc_Word16 status;
+ int16_t status;
status = codecs_[current_send_codec_idx_]->REDPayloadISAC(isac_rate,
isac_bw_estimate,
payload,
@@ -2823,7 +2822,7 @@
memset(fragmentation_.fragmentationPlType, 0, kMaxNumFragmentationVectors *
sizeof(fragmentation_.fragmentationPlType[0]));
fragmentation_.fragmentationVectorSize =
- static_cast<WebRtc_UWord16>(vector_size);
+ static_cast<uint16_t>(vector_size);
}
int AudioCodingModuleImpl::SetInitialPlayoutDelay(int delay_ms) {
diff --git a/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h b/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
index 6fb40d5..7ce32dc 100644
--- a/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
+++ b/webrtc/modules/audio_coding/main/source/audio_coding_module_impl.h
@@ -28,33 +28,33 @@
class AudioCodingModuleImpl : public AudioCodingModule {
public:
// Constructor
- explicit AudioCodingModuleImpl(const WebRtc_Word32 id);
+ explicit AudioCodingModuleImpl(const int32_t id);
// Destructor
~AudioCodingModuleImpl();
// Change the unique identifier of this object.
- virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+ virtual int32_t ChangeUniqueId(const int32_t id);
// Returns the number of milliseconds until the module want a worker thread
// to call Process.
- WebRtc_Word32 TimeUntilNextProcess();
+ int32_t TimeUntilNextProcess();
// Process any pending tasks such as timeouts.
- WebRtc_Word32 Process();
+ int32_t Process();
/////////////////////////////////////////
// Sender
//
// Initialize send codec.
- WebRtc_Word32 InitializeSender();
+ int32_t InitializeSender();
// Reset send codec.
- WebRtc_Word32 ResetEncoder();
+ int32_t ResetEncoder();
// Can be called multiple times for Codec, CNG, RED.
- WebRtc_Word32 RegisterSendCodec(const CodecInst& send_codec);
+ int32_t RegisterSendCodec(const CodecInst& send_codec);
// Register Secondary codec for dual-streaming. Dual-streaming is activated
// right after the secondary codec is registered.
@@ -68,45 +68,45 @@
int SecondarySendCodec(CodecInst* secondary_codec) const;
// Get current send codec.
- WebRtc_Word32 SendCodec(CodecInst* current_codec) const;
+ int32_t SendCodec(CodecInst* current_codec) const;
// Get current send frequency.
- WebRtc_Word32 SendFrequency() const;
+ int32_t SendFrequency() const;
// Get encode bit-rate.
// Adaptive rate codecs return their current encode target rate, while other
// codecs return there long-term average or their fixed rate.
- WebRtc_Word32 SendBitrate() const;
+ int32_t SendBitrate() const;
// Set available bandwidth, inform the encoder about the
// estimated bandwidth received from the remote party.
- virtual WebRtc_Word32 SetReceivedEstimatedBandwidth(const WebRtc_Word32 bw);
+ virtual int32_t SetReceivedEstimatedBandwidth(const int32_t bw);
// Register a transport callback which will be
// called to deliver the encoded buffers.
- WebRtc_Word32 RegisterTransportCallback(
+ int32_t RegisterTransportCallback(
AudioPacketizationCallback* transport);
// Used by the module to deliver messages to the codec module/application
// AVT(DTMF).
- WebRtc_Word32 RegisterIncomingMessagesCallback(
+ int32_t RegisterIncomingMessagesCallback(
AudioCodingFeedback* incoming_message, const ACMCountries cpt);
// Add 10 ms of raw (PCM) audio data to the encoder.
- WebRtc_Word32 Add10MsData(const AudioFrame& audio_frame);
+ int32_t Add10MsData(const AudioFrame& audio_frame);
// Set background noise mode for NetEQ, on, off or fade.
- WebRtc_Word32 SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
+ int32_t SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
// Get current background noise mode.
- WebRtc_Word32 BackgroundNoiseMode(ACMBackgroundNoiseMode* mode);
+ int32_t BackgroundNoiseMode(ACMBackgroundNoiseMode* mode);
/////////////////////////////////////////
// (FEC) Forward Error Correction
//
// Configure FEC status i.e on/off.
- WebRtc_Word32 SetFECStatus(const bool enable_fec);
+ int32_t SetFECStatus(const bool enable_fec);
// Get FEC status.
bool FECStatus() const;
@@ -117,62 +117,62 @@
// (CNG) Comfort Noise Generation
//
- WebRtc_Word32 SetVAD(const bool enable_dtx = true,
- const bool enable_vad = false,
- const ACMVADMode mode = VADNormal);
+ int32_t SetVAD(const bool enable_dtx = true,
+ const bool enable_vad = false,
+ const ACMVADMode mode = VADNormal);
- WebRtc_Word32 VAD(bool* dtx_enabled, bool* vad_enabled,
- ACMVADMode* mode) const;
+ int32_t VAD(bool* dtx_enabled, bool* vad_enabled,
+ ACMVADMode* mode) const;
- WebRtc_Word32 RegisterVADCallback(ACMVADCallback* vad_callback);
+ int32_t RegisterVADCallback(ACMVADCallback* vad_callback);
// Get VAD aggressiveness on the incoming stream.
ACMVADMode ReceiveVADMode() const;
// Configure VAD aggressiveness on the incoming stream.
- WebRtc_Word16 SetReceiveVADMode(const ACMVADMode mode);
+ int16_t SetReceiveVADMode(const ACMVADMode mode);
/////////////////////////////////////////
// Receiver
//
// Initialize receiver, resets codec database etc.
- WebRtc_Word32 InitializeReceiver();
+ int32_t InitializeReceiver();
// Reset the decoder state.
- WebRtc_Word32 ResetDecoder();
+ int32_t ResetDecoder();
// Get current receive frequency.
- WebRtc_Word32 ReceiveFrequency() const;
+ int32_t ReceiveFrequency() const;
// Get current playout frequency.
- WebRtc_Word32 PlayoutFrequency() const;
+ int32_t PlayoutFrequency() const;
// Register possible receive codecs, can be called multiple times,
// for codecs, CNG, DTMF, RED.
- WebRtc_Word32 RegisterReceiveCodec(const CodecInst& receive_codec);
+ int32_t RegisterReceiveCodec(const CodecInst& receive_codec);
// Get current received codec.
- WebRtc_Word32 ReceiveCodec(CodecInst* current_codec) const;
+ int32_t ReceiveCodec(CodecInst* current_codec) const;
// Incoming packet from network parsed and ready for decode.
- WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 payload_length,
- const WebRtcRTPHeader& rtp_info);
+ int32_t IncomingPacket(const uint8_t* incoming_payload,
+ const int32_t payload_length,
+ const WebRtcRTPHeader& rtp_info);
// Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
// One usage for this API is when pre-encoded files are pushed in ACM.
- WebRtc_Word32 IncomingPayload(const WebRtc_UWord8* incoming_payload,
- const WebRtc_Word32 payload_length,
- const WebRtc_UWord8 payload_type,
- const WebRtc_UWord32 timestamp = 0);
+ int32_t IncomingPayload(const uint8_t* incoming_payload,
+ const int32_t payload_length,
+ const uint8_t payload_type,
+ const uint32_t timestamp = 0);
// Minimum playout delay (used for lip-sync).
- WebRtc_Word32 SetMinimumPlayoutDelay(const WebRtc_Word32 time_ms);
+ int32_t SetMinimumPlayoutDelay(const int32_t time_ms);
// Configure Dtmf playout status i.e on/off playout the incoming outband Dtmf
// tone.
- WebRtc_Word32 SetDtmfPlayoutStatus(const bool enable);
+ int32_t SetDtmfPlayoutStatus(const bool enable);
// Get Dtmf playout status.
bool DtmfPlayoutStatus() const;
@@ -180,81 +180,81 @@
// Estimate the Bandwidth based on the incoming stream, needed
// for one way audio where the RTCP send the BW estimate.
// This is also done in the RTP module .
- WebRtc_Word32 DecoderEstimatedBandwidth() const;
+ int32_t DecoderEstimatedBandwidth() const;
// Set playout mode voice, fax.
- WebRtc_Word32 SetPlayoutMode(const AudioPlayoutMode mode);
+ int32_t SetPlayoutMode(const AudioPlayoutMode mode);
// Get playout mode voice, fax.
AudioPlayoutMode PlayoutMode() const;
// Get playout timestamp.
- WebRtc_Word32 PlayoutTimestamp(WebRtc_UWord32* timestamp);
+ int32_t PlayoutTimestamp(uint32_t* timestamp);
// Get 10 milliseconds of raw audio data to play out, and
// automatic resample to the requested frequency if > 0.
- WebRtc_Word32 PlayoutData10Ms(WebRtc_Word32 desired_freq_hz,
- AudioFrame* audio_frame);
+ int32_t PlayoutData10Ms(int32_t desired_freq_hz,
+ AudioFrame* audio_frame);
/////////////////////////////////////////
// Statistics
//
- WebRtc_Word32 NetworkStatistics(ACMNetworkStatistics* statistics) const;
+ int32_t NetworkStatistics(ACMNetworkStatistics* statistics) const;
void DestructEncoderInst(void* inst);
- WebRtc_Word16 AudioBuffer(WebRtcACMAudioBuff& buffer);
+ int16_t AudioBuffer(WebRtcACMAudioBuff& buffer);
// GET RED payload for iSAC. The method id called when 'this' ACM is
// the default ACM.
- WebRtc_Word32 REDPayloadISAC(const WebRtc_Word32 isac_rate,
- const WebRtc_Word16 isac_bw_estimate,
- WebRtc_UWord8* payload,
- WebRtc_Word16* length_bytes);
+ int32_t REDPayloadISAC(const int32_t isac_rate,
+ const int16_t isac_bw_estimate,
+ uint8_t* payload,
+ int16_t* length_bytes);
- WebRtc_Word16 SetAudioBuffer(WebRtcACMAudioBuff& buffer);
+ int16_t SetAudioBuffer(WebRtcACMAudioBuff& buffer);
- WebRtc_UWord32 EarliestTimestamp() const;
+ uint32_t EarliestTimestamp() const;
- WebRtc_Word32 LastEncodedTimestamp(WebRtc_UWord32& timestamp) const;
+ int32_t LastEncodedTimestamp(uint32_t& timestamp) const;
- WebRtc_Word32 ReplaceInternalDTXWithWebRtc(const bool use_webrtc_dtx);
+ int32_t ReplaceInternalDTXWithWebRtc(const bool use_webrtc_dtx);
- WebRtc_Word32 IsInternalDTXReplacedWithWebRtc(bool* uses_webrtc_dtx);
+ int32_t IsInternalDTXReplacedWithWebRtc(bool* uses_webrtc_dtx);
- WebRtc_Word32 SetISACMaxRate(const WebRtc_UWord32 max_bit_per_sec);
+ int32_t SetISACMaxRate(const uint32_t max_bit_per_sec);
- WebRtc_Word32 SetISACMaxPayloadSize(const WebRtc_UWord16 max_size_bytes);
+ int32_t SetISACMaxPayloadSize(const uint16_t max_size_bytes);
- WebRtc_Word32 ConfigISACBandwidthEstimator(
- const WebRtc_UWord8 frame_size_ms,
- const WebRtc_UWord16 rate_bit_per_sec,
+ int32_t ConfigISACBandwidthEstimator(
+ const uint8_t frame_size_ms,
+ const uint16_t rate_bit_per_sec,
const bool enforce_frame_size = false);
- WebRtc_Word32 UnregisterReceiveCodec(const WebRtc_Word16 payload_type);
+ int32_t UnregisterReceiveCodec(const int16_t payload_type);
protected:
void UnregisterSendCodec();
- WebRtc_Word32 UnregisterReceiveCodecSafe(const WebRtc_Word16 id);
+ int32_t UnregisterReceiveCodecSafe(const int16_t id);
ACMGenericCodec* CreateCodec(const CodecInst& codec);
- WebRtc_Word16 DecoderParamByPlType(const WebRtc_UWord8 payload_type,
- WebRtcACMCodecParams& codec_params) const;
+ int16_t DecoderParamByPlType(const uint8_t payload_type,
+ WebRtcACMCodecParams& codec_params) const;
- WebRtc_Word16 DecoderListIDByPlName(
- const char* name, const WebRtc_UWord16 frequency = 0) const;
+ int16_t DecoderListIDByPlName(
+ const char* name, const uint16_t frequency = 0) const;
- WebRtc_Word32 InitializeReceiverSafe();
+ int32_t InitializeReceiverSafe();
bool HaveValidEncoder(const char* caller_name) const;
- WebRtc_Word32 RegisterRecCodecMSSafe(const CodecInst& receive_codec,
- WebRtc_Word16 codec_id,
- WebRtc_Word16 mirror_id,
- ACMNetEQ::JitterBuffer jitter_buffer);
+ int32_t RegisterRecCodecMSSafe(const CodecInst& receive_codec,
+ int16_t codec_id,
+ int16_t mirror_id,
+ ACMNetEQ::JitterBuffer jitter_buffer);
// Set VAD/DTX status. This function does not acquire a lock, and it is
// created to be called only from inside a critical section.
@@ -313,9 +313,9 @@
bool GetSilence(int desired_sample_rate_hz, AudioFrame* frame);
AudioPacketizationCallback* packetization_callback_;
- WebRtc_Word32 id_;
- WebRtc_UWord32 last_timestamp_;
- WebRtc_UWord32 last_in_timestamp_;
+ int32_t id_;
+ uint32_t last_timestamp_;
+ uint32_t last_in_timestamp_;
CodecInst send_codec_inst_;
uint8_t cng_nb_pltype_;
uint8_t cng_wb_pltype_;
@@ -327,13 +327,13 @@
ACMVADMode vad_mode_;
ACMGenericCodec* codecs_[ACMCodecDB::kMaxNumCodecs];
ACMGenericCodec* slave_codecs_[ACMCodecDB::kMaxNumCodecs];
- WebRtc_Word16 mirror_codec_idx_[ACMCodecDB::kMaxNumCodecs];
+ int16_t mirror_codec_idx_[ACMCodecDB::kMaxNumCodecs];
bool stereo_receive_[ACMCodecDB::kMaxNumCodecs];
bool stereo_receive_registered_;
bool stereo_send_;
int prev_received_channel_;
int expected_channels_;
- WebRtc_Word32 current_send_codec_idx_;
+ int32_t current_send_codec_idx_;
int current_receive_codec_idx_;
bool send_codec_registered_;
ACMResampler input_resampler_;
@@ -341,7 +341,7 @@
ACMNetEQ neteq_;
CriticalSectionWrapper* acm_crit_sect_;
ACMVADCallback* vad_callback_;
- WebRtc_UWord8 last_recv_audio_codec_pltype_;
+ uint8_t last_recv_audio_codec_pltype_;
// RED/FEC.
bool is_first_red_;
@@ -350,35 +350,35 @@
// as pointers and not an array. If concerned about the memory, then make a
// set-up function to allocate them only when they are going to be used, i.e.
// FEC or Dual-streaming is enabled.
- WebRtc_UWord8* red_buffer_;
+ uint8_t* red_buffer_;
// TODO(turajs): we actually don't need |fragmentation_| as a member variable.
// It is sufficient to keep the length & payload type of previous payload in
// member variables.
RTPFragmentationHeader fragmentation_;
- WebRtc_UWord32 last_fec_timestamp_;
+ uint32_t last_fec_timestamp_;
// If no RED is registered as receive codec this
// will have an invalid value.
- WebRtc_UWord8 receive_red_pltype_;
+ uint8_t receive_red_pltype_;
// This is to keep track of CN instances where we can send DTMFs.
- WebRtc_UWord8 previous_pltype_;
+ uint8_t previous_pltype_;
// This keeps track of payload types associated with codecs_[].
// We define it as signed variable and initialize with -1 to indicate
// unused elements.
- WebRtc_Word16 registered_pltypes_[ACMCodecDB::kMaxNumCodecs];
+ int16_t registered_pltypes_[ACMCodecDB::kMaxNumCodecs];
// Used when payloads are pushed into ACM without any RTP info
// One example is when pre-encoded bit-stream is pushed from
// a file.
WebRtcRTPHeader* dummy_rtp_header_;
- WebRtc_UWord16 recv_pl_frame_size_smpls_;
+ uint16_t recv_pl_frame_size_smpls_;
bool receiver_initialized_;
ACMDTMFDetection* dtmf_detector_;
AudioCodingFeedback* dtmf_callback_;
- WebRtc_Word16 last_detected_tone_;
+ int16_t last_detected_tone_;
CriticalSectionWrapper* callback_crit_sect_;
AudioFrame audio_frame_;
diff --git a/webrtc/modules/audio_coding/main/test/APITest.cc b/webrtc/modules/audio_coding/main/test/APITest.cc
index 97376a2..6e4115d 100644
--- a/webrtc/modules/audio_coding/main/test/APITest.cc
+++ b/webrtc/modules/audio_coding/main/test/APITest.cc
@@ -50,7 +50,7 @@
void
-APITest::Wait(WebRtc_UWord32 waitLengthMs)
+APITest::Wait(uint32_t waitLengthMs)
{
if(_randomTest)
{
@@ -160,19 +160,19 @@
-//WebRtc_Word16
-//APITest::SetInFile(char* fileName, WebRtc_UWord16 frequencyHz)
+//int16_t
+//APITest::SetInFile(char* fileName, uint16_t frequencyHz)
//{
// return _inFile.Open(fileName, frequencyHz, "rb");
//}
//
-//WebRtc_Word16
-//APITest::SetOutFile(char* fileName, WebRtc_UWord16 frequencyHz)
+//int16_t
+//APITest::SetOutFile(char* fileName, uint16_t frequencyHz)
//{
// return _outFile.Open(fileName, frequencyHz, "wb");
//}
-WebRtc_Word16
+int16_t
APITest::SetUp()
{
_acmA = AudioCodingModule::Create(1);
@@ -181,8 +181,8 @@
CodecInst dummyCodec;
int lastPayloadType = 0;
- WebRtc_Word16 numCodecs = _acmA->NumberOfCodecs();
- for(WebRtc_UWord8 n = 0; n < numCodecs; n++)
+ int16_t numCodecs = _acmA->NumberOfCodecs();
+ for(uint8_t n = 0; n < numCodecs; n++)
{
AudioCodingModule::Codec(n, &dummyCodec);
if((STR_CASE_CMP(dummyCodec.plname, "CN") == 0) &&
@@ -250,15 +250,15 @@
_thereIsDecoderB = true;
// Register Send Codec
- AudioCodingModule::Codec((WebRtc_UWord8)_codecCntrA, &dummyCodec);
+ AudioCodingModule::Codec((uint8_t)_codecCntrA, &dummyCodec);
CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
_thereIsEncoderA = true;
//
- AudioCodingModule::Codec((WebRtc_UWord8)_codecCntrB, &dummyCodec);
+ AudioCodingModule::Codec((uint8_t)_codecCntrB, &dummyCodec);
CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
_thereIsEncoderB = true;
- WebRtc_UWord16 frequencyHz;
+ uint16_t frequencyHz;
printf("\n\nAPI Test\n");
printf("========\n");
@@ -747,8 +747,8 @@
// Keep main thread waiting for sender/receiver
// threads to complete
EventWrapper* completeEvent = EventWrapper::Create();
- WebRtc_UWord64 startTime = TickTime::MillisecondTimestamp();
- WebRtc_UWord64 currentTime;
+ uint64_t startTime = TickTime::MillisecondTimestamp();
+ uint64_t currentTime;
do
{
{
@@ -891,11 +891,11 @@
{
AudioCodingModule* myACM;
Channel* myChannel;
- WebRtc_Word32* myMinDelay;
+ int32_t* myMinDelay;
EventWrapper* myEvent = EventWrapper::Create();
- WebRtc_UWord32 inTimestamp = 0;
- WebRtc_UWord32 outTimestamp = 0;
+ uint32_t inTimestamp = 0;
+ uint32_t outTimestamp = 0;
double estimDelay = 0;
double averageEstimDelay = 0;
@@ -937,7 +937,7 @@
CHECK_ERROR_MT(myACM->PlayoutTimestamp(&outTimestamp));
//std::cout << outTimestamp << std::endl << std::flush;
- estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) /
+ estimDelay = (double)((uint32_t)(inTimestamp - outTimestamp)) /
((double)myACM->ReceiveFrequency() / 1000.0);
estimDelayCB.Update(estimDelay);
@@ -1063,7 +1063,7 @@
if(!FixedPayloadTypeCodec(myCodec.plname))
{
- WebRtc_Word32 i;
+ int32_t i;
for(i = 0; i < 32; i++)
{
if(!_payloadUsed[i])
@@ -1172,8 +1172,8 @@
receiveACM = _acmA;
}
- WebRtc_Word32 receiveFreqHz = receiveACM->ReceiveFrequency();
- WebRtc_Word32 playoutFreqHz = receiveACM->PlayoutFrequency();
+ int32_t receiveFreqHz = receiveACM->ReceiveFrequency();
+ int32_t playoutFreqHz = receiveACM->PlayoutFrequency();
CHECK_ERROR_MT(receiveFreqHz);
CHECK_ERROR_MT(playoutFreqHz);
@@ -1437,7 +1437,7 @@
{
CodecInst myCodec;
AudioCodingModule* myACM;
- WebRtc_UWord8* codecCntr;
+ uint8_t* codecCntr;
bool* thereIsEncoder;
bool* vad;
bool* dtx;
diff --git a/webrtc/modules/audio_coding/main/test/APITest.h b/webrtc/modules/audio_coding/main/test/APITest.h
index ee3f5e6..ead3134 100644
--- a/webrtc/modules/audio_coding/main/test/APITest.h
+++ b/webrtc/modules/audio_coding/main/test/APITest.h
@@ -29,7 +29,7 @@
void Perform();
private:
- WebRtc_Word16 SetUp();
+ int16_t SetUp();
static bool PushAudioThreadA(void* obj);
static bool PullAudioThreadA(void* obj);
@@ -63,7 +63,7 @@
void ChangeCodec(char side);
- void Wait(WebRtc_UWord32 waitLengthMs);
+ void Wait(uint32_t waitLengthMs);
void LookForDTMF(char side);
@@ -99,9 +99,9 @@
//--- I/O params
// A
- WebRtc_Word32 _outFreqHzA;
+ int32_t _outFreqHzA;
// B
- WebRtc_Word32 _outFreqHzB;
+ int32_t _outFreqHzB;
// Should we write to file.
// we might skip writing to file if we
@@ -120,8 +120,8 @@
EventWrapper* _apiEventB; // API calls
// keep track of the codec in either side.
- WebRtc_UWord8 _codecCntrA;
- WebRtc_UWord8 _codecCntrB;
+ uint8_t _codecCntrA;
+ uint8_t _codecCntrB;
// Is set to true if there is no encoder in either side
bool _thereIsEncoderA;
@@ -137,8 +137,8 @@
bool _sendDTXB;
ACMVADMode _sendVADModeB;
- WebRtc_Word32 _minDelayA;
- WebRtc_Word32 _minDelayB;
+ int32_t _minDelayA;
+ int32_t _minDelayB;
bool _payloadUsed[32];
AudioPlayoutMode _playoutModeA;
diff --git a/webrtc/modules/audio_coding/main/test/Channel.cc b/webrtc/modules/audio_coding/main/test/Channel.cc
index 55ffef5..c06e452 100644
--- a/webrtc/modules/audio_coding/main/test/Channel.cc
+++ b/webrtc/modules/audio_coding/main/test/Channel.cc
@@ -19,18 +19,18 @@
namespace webrtc {
-WebRtc_Word32
+int32_t
Channel::SendData(
const FrameType frameType,
- const WebRtc_UWord8 payloadType,
- const WebRtc_UWord32 timeStamp,
- const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize,
+ const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const uint8_t* payloadData,
+ const uint16_t payloadSize,
const RTPFragmentationHeader* fragmentation)
{
WebRtcRTPHeader rtpInfo;
- WebRtc_Word32 status;
- WebRtc_UWord16 payloadDataSize = payloadSize;
+ int32_t status;
+ uint16_t payloadDataSize = payloadSize;
rtpInfo.header.markerBit = false;
rtpInfo.header.ssrc = 0;
@@ -60,10 +60,10 @@
{
// only 0x80 if we have multiple blocks
_payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
- WebRtc_UWord32 REDheader = (((WebRtc_UWord32)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
- _payloadData[1] = WebRtc_UWord8((REDheader >> 16) & 0x000000FF);
- _payloadData[2] = WebRtc_UWord8((REDheader >> 8) & 0x000000FF);
- _payloadData[3] = WebRtc_UWord8(REDheader & 0x000000FF);
+ uint32_t REDheader = (((uint32_t)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
+ _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
+ _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
+ _payloadData[3] = uint8_t(REDheader & 0x000000FF);
_payloadData[4] = fragmentation->fragmentationPlType[0];
// copy the RED data
@@ -81,7 +81,7 @@
memcpy(_payloadData,
payloadData + fragmentation->fragmentationOffset[0],
fragmentation->fragmentationLength[0]);
- payloadDataSize = WebRtc_UWord16(fragmentation->fragmentationLength[0]);
+ payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
}
}
@@ -108,7 +108,7 @@
_channelCritSect->Enter();
if(_saveBitStream)
{
- //fwrite(payloadData, sizeof(WebRtc_UWord8), payloadSize, _bitStreamFile);
+ //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
}
if(!_isStereo)
@@ -138,7 +138,7 @@
void
Channel::CalcStatistics(
WebRtcRTPHeader& rtpInfo,
- WebRtc_UWord16 payloadSize)
+ uint16_t payloadSize)
{
int n;
if((rtpInfo.header.payloadType != _lastPayloadType) &&
@@ -175,8 +175,8 @@
{
if(!currentPayloadStr->newPacket)
{
- WebRtc_UWord32 lastFrameSizeSample = (WebRtc_UWord32)((WebRtc_UWord32)rtpInfo.header.timestamp -
- (WebRtc_UWord32)currentPayloadStr->lastTimestamp);
+ uint32_t lastFrameSizeSample = (uint32_t)((uint32_t)rtpInfo.header.timestamp -
+ (uint32_t)currentPayloadStr->lastTimestamp);
assert(lastFrameSizeSample > 0);
int k = 0;
while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
@@ -187,7 +187,7 @@
}
ACMTestFrameSizeStats* currentFrameSizeStats =
&(currentPayloadStr->frameSizeStats[k]);
- currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
+ currentFrameSizeStats->frameSizeSample = (int16_t)lastFrameSizeSample;
// increment the number of encoded samples.
currentFrameSizeStats->totalEncodedSamples +=
@@ -235,7 +235,7 @@
}
}
-Channel::Channel(WebRtc_Word16 chID) :
+Channel::Channel(int16_t chID) :
_receiverACM(NULL),
_seqNo(0),
_channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
@@ -315,7 +315,7 @@
_channelCritSect->Leave();
}
-WebRtc_Word16
+int16_t
Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
{
_channelCritSect->Enter();
@@ -355,12 +355,12 @@
}
void
-Channel::Stats(WebRtc_UWord32* numPackets)
+Channel::Stats(uint32_t* numPackets)
{
_channelCritSect->Enter();
int k;
int n;
- memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
+ memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
for(k = 0; k < MAX_NUM_PAYLOADS; k++)
{
if(_payloadStats[k].payloadType == -1)
@@ -382,20 +382,20 @@
}
void
-Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
+Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte)
{
_channelCritSect->Enter();
int k;
int n;
- memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
+ memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
for(k = 0; k < MAX_NUM_PAYLOADS; k++)
{
if(_payloadStats[k].payloadType == -1)
{
break;
}
- payloadType[k] = (WebRtc_UWord8)_payloadStats[k].payloadType;
+ payloadType[k] = (uint8_t)_payloadStats[k].payloadType;
payloadLenByte[k] = 0;
for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
{
@@ -403,7 +403,7 @@
{
break;
}
- payloadLenByte[k] += (WebRtc_UWord16)
+ payloadLenByte[k] += (uint16_t)
_payloadStats[k].frameSizeStats[n].totalPayloadLenByte;
}
}
@@ -453,10 +453,10 @@
}
-WebRtc_UWord32
+uint32_t
Channel::LastInTimestamp()
{
- WebRtc_UWord32 timestamp;
+ uint32_t timestamp;
_channelCritSect->Enter();
timestamp = _lastInTimestamp;
_channelCritSect->Leave();
@@ -467,7 +467,7 @@
Channel::BitRate()
{
double rate;
- WebRtc_UWord64 currTime = TickTime::MillisecondTimestamp();
+ uint64_t currTime = TickTime::MillisecondTimestamp();
_channelCritSect->Enter();
rate = ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
_channelCritSect->Leave();
diff --git a/webrtc/modules/audio_coding/main/test/Channel.h b/webrtc/modules/audio_coding/main/test/Channel.h
index 7484584..c0bf7f3 100644
--- a/webrtc/modules/audio_coding/main/test/Channel.h
+++ b/webrtc/modules/audio_coding/main/test/Channel.h
@@ -26,11 +26,11 @@
struct ACMTestFrameSizeStats
{
- WebRtc_UWord16 frameSizeSample;
- WebRtc_Word16 maxPayloadLen;
- WebRtc_UWord32 numPackets;
- WebRtc_UWord64 totalPayloadLenByte;
- WebRtc_UWord64 totalEncodedSamples;
+ uint16_t frameSizeSample;
+ int16_t maxPayloadLen;
+ uint32_t numPackets;
+ uint64_t totalPayloadLenByte;
+ uint64_t totalEncodedSamples;
double rateBitPerSec;
double usageLenSec;
@@ -39,9 +39,9 @@
struct ACMTestPayloadStats
{
bool newPacket;
- WebRtc_Word16 payloadType;
- WebRtc_Word16 lastPayloadLenByte;
- WebRtc_UWord32 lastTimestamp;
+ int16_t payloadType;
+ int16_t lastPayloadLenByte;
+ uint32_t lastTimestamp;
ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
};
@@ -50,15 +50,15 @@
public:
Channel(
- WebRtc_Word16 chID = -1);
+ int16_t chID = -1);
~Channel();
- WebRtc_Word32 SendData(
+ int32_t SendData(
const FrameType frameType,
- const WebRtc_UWord8 payloadType,
- const WebRtc_UWord32 timeStamp,
- const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize,
+ const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const uint8_t* payloadData,
+ const uint16_t payloadSize,
const RTPFragmentationHeader* fragmentation);
void RegisterReceiverACM(
@@ -66,16 +66,16 @@
void ResetStats();
- WebRtc_Word16 Stats(
+ int16_t Stats(
CodecInst& codecInst,
ACMTestPayloadStats& payloadStats);
void Stats(
- WebRtc_UWord32* numPackets);
+ uint32_t* numPackets);
void Stats(
- WebRtc_UWord8* payloadLenByte,
- WebRtc_UWord32* payloadType);
+ uint8_t* payloadLenByte,
+ uint32_t* payloadType);
void PrintStats(
CodecInst& codecInst);
@@ -85,7 +85,7 @@
_isStereo = isStereo;
}
- WebRtc_UWord32 LastInTimestamp();
+ uint32_t LastInTimestamp();
void SetFECTestWithPacketLoss(bool usePacketLoss)
{
@@ -97,27 +97,27 @@
private:
void CalcStatistics(
WebRtcRTPHeader& rtpInfo,
- WebRtc_UWord16 payloadSize);
+ uint16_t payloadSize);
AudioCodingModule* _receiverACM;
- WebRtc_UWord16 _seqNo;
+ uint16_t _seqNo;
// 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample
- WebRtc_UWord8 _payloadData[60 * 32 * 2 * 2];
+ uint8_t _payloadData[60 * 32 * 2 * 2];
CriticalSectionWrapper* _channelCritSect;
FILE* _bitStreamFile;
bool _saveBitStream;
- WebRtc_Word16 _lastPayloadType;
+ int16_t _lastPayloadType;
ACMTestPayloadStats _payloadStats[MAX_NUM_PAYLOADS];
bool _isStereo;
WebRtcRTPHeader _rtpInfo;
bool _leftChannel;
- WebRtc_UWord32 _lastInTimestamp;
+ uint32_t _lastInTimestamp;
// FEC Test variables
- WebRtc_Word16 _packetLoss;
+ int16_t _packetLoss;
bool _useFECTestWithPacketLoss;
- WebRtc_UWord64 _beginTime;
- WebRtc_UWord64 _totalBytes;
+ uint64_t _beginTime;
+ uint64_t _totalBytes;
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc b/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc
index c4f9a47..58e6299 100644
--- a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc
+++ b/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.cc
@@ -28,7 +28,7 @@
namespace webrtc {
TestPacketization::TestPacketization(RTPStream *rtpStream,
- WebRtc_UWord16 frequency)
+ uint16_t frequency)
: _rtpStream(rtpStream),
_frequency(frequency),
_seqNo(0) {
@@ -36,12 +36,12 @@
TestPacketization::~TestPacketization() { }
-WebRtc_Word32 TestPacketization::SendData(
+int32_t TestPacketization::SendData(
const FrameType /* frameType */,
- const WebRtc_UWord8 payloadType,
- const WebRtc_UWord32 timeStamp,
- const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize,
+ const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const uint8_t* payloadData,
+ const uint16_t payloadSize,
const RTPFragmentationHeader* /* fragmentation */) {
_rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
_frequency);
@@ -103,7 +103,7 @@
bool Sender::Add10MsData() {
if (!_pcmFile.EndOfFile()) {
_pcmFile.Read10MsData(_audioFrame);
- WebRtc_Word32 ok = _acm->Add10MsData(_audioFrame);
+ int32_t ok = _acm->Add10MsData(_audioFrame);
if (ok != 0) {
printf("Error calling Add10MsData: for run: codecId: %d\n", codeId);
exit(1);
@@ -114,7 +114,7 @@
}
bool Sender::Process() {
- WebRtc_Word32 ok = _acm->Process();
+ int32_t ok = _acm->Process();
if (ok < 0) {
printf("Error calling Add10MsData: for run: codecId: %d\n", codeId);
exit(1);
@@ -145,7 +145,7 @@
noOfCodecs = acm->NumberOfCodecs();
for (int i = 0; i < noOfCodecs; i++) {
- acm->Codec((WebRtc_UWord8) i, &recvCodec);
+ acm->Codec((uint8_t) i, &recvCodec);
if (acm->RegisterReceiveCodec(recvCodec) != 0) {
printf("Unable to register codec: for run: codecId: %d\n", codeId);
exit(1);
@@ -177,7 +177,7 @@
}
_realPayloadSizeBytes = 0;
- _playoutBuffer = new WebRtc_Word16[WEBRTC_10MS_PCM_AUDIO];
+ _playoutBuffer = new int16_t[WEBRTC_10MS_PCM_AUDIO];
_frequency = playSampFreq;
_acm = acm;
_firstTime = true;
@@ -207,7 +207,7 @@
}
}
- WebRtc_Word32 ok = _acm->IncomingPacket(_incomingPayload,
+ int32_t ok = _acm->IncomingPacket(_incomingPayload,
_realPayloadSizeBytes, _rtpInfo);
if (ok != 0) {
printf("Error when inserting packet to ACM, for run: codecId: %d\n",
@@ -239,8 +239,8 @@
}
void Receiver::Run() {
- WebRtc_UWord8 counter500Ms = 50;
- WebRtc_UWord32 clock = 0;
+ uint8_t counter500Ms = 50;
+ uint32_t clock = 0;
while (counter500Ms > 0) {
if (clock == 0 || clock >= _nextTime) {
diff --git a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h b/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h
index f407a6b..9b58d4d 100644
--- a/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h
+++ b/webrtc/modules/audio_coding/main/test/EncodeDecodeTest.h
@@ -26,22 +26,22 @@
// TestPacketization callback which writes the encoded payloads to file
class TestPacketization: public AudioPacketizationCallback {
public:
- TestPacketization(RTPStream *rtpStream, WebRtc_UWord16 frequency);
+ TestPacketization(RTPStream *rtpStream, uint16_t frequency);
~TestPacketization();
- virtual WebRtc_Word32 SendData(const FrameType frameType,
- const WebRtc_UWord8 payloadType,
- const WebRtc_UWord32 timeStamp,
- const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize,
- const RTPFragmentationHeader* fragmentation);
+ virtual int32_t SendData(const FrameType frameType,
+ const uint8_t payloadType,
+ const uint32_t timeStamp,
+ const uint8_t* payloadData,
+ const uint16_t payloadSize,
+ const RTPFragmentationHeader* fragmentation);
private:
- static void MakeRTPheader(WebRtc_UWord8* rtpHeader, WebRtc_UWord8 payloadType,
- WebRtc_Word16 seqNo, WebRtc_UWord32 timeStamp,
- WebRtc_UWord32 ssrc);
+ static void MakeRTPheader(uint8_t* rtpHeader, uint8_t payloadType,
+ int16_t seqNo, uint32_t timeStamp,
+ uint32_t ssrc);
RTPStream* _rtpStream;
- WebRtc_Word32 _frequency;
- WebRtc_Word16 _seqNo;
+ int32_t _frequency;
+ int16_t _seqNo;
};
class Sender {
@@ -54,8 +54,8 @@
bool Process();
//for auto_test and logging
- WebRtc_UWord8 testMode;
- WebRtc_UWord8 codeId;
+ uint8_t testMode;
+ uint8_t codeId;
private:
AudioCodingModule* _acm;
@@ -74,22 +74,22 @@
bool PlayoutData();
//for auto_test and logging
- WebRtc_UWord8 codeId;
- WebRtc_UWord8 testMode;
+ uint8_t codeId;
+ uint8_t testMode;
private:
AudioCodingModule* _acm;
RTPStream* _rtpStream;
PCMFile _pcmFile;
- WebRtc_Word16* _playoutBuffer;
- WebRtc_UWord16 _playoutLengthSmpls;
- WebRtc_UWord8 _incomingPayload[MAX_INCOMING_PAYLOAD];
- WebRtc_UWord16 _payloadSizeBytes;
- WebRtc_UWord16 _realPayloadSizeBytes;
- WebRtc_Word32 _frequency;
+ int16_t* _playoutBuffer;
+ uint16_t _playoutLengthSmpls;
+ uint8_t _incomingPayload[MAX_INCOMING_PAYLOAD];
+ uint16_t _payloadSizeBytes;
+ uint16_t _realPayloadSizeBytes;
+ int32_t _frequency;
bool _firstTime;
WebRtcRTPHeader _rtpInfo;
- WebRtc_UWord32 _nextTime;
+ uint32_t _nextTime;
};
class EncodeDecodeTest: public ACMTest {
@@ -98,8 +98,8 @@
EncodeDecodeTest(int testMode);
virtual void Perform();
- WebRtc_UWord16 _playoutFreq;
- WebRtc_UWord8 _testMode;
+ uint16_t _playoutFreq;
+ uint8_t _testMode;
private:
void EncodeToFile(int fileType, int codeId, int* codePars, int testMode);
diff --git a/webrtc/modules/audio_coding/main/test/PCMFile.cc b/webrtc/modules/audio_coding/main/test/PCMFile.cc
index fbe73f5..0b61bbb 100644
--- a/webrtc/modules/audio_coding/main/test/PCMFile.cc
+++ b/webrtc/modules/audio_coding/main/test/PCMFile.cc
@@ -30,11 +30,11 @@
rewinded_(false),
read_stereo_(false),
save_stereo_(false) {
- timestamp_ = (((WebRtc_UWord32)rand() & 0x0000FFFF) << 16) |
- ((WebRtc_UWord32)rand() & 0x0000FFFF);
+ timestamp_ = (((uint32_t)rand() & 0x0000FFFF) << 16) |
+ ((uint32_t)rand() & 0x0000FFFF);
}
-PCMFile::PCMFile(WebRtc_UWord32 timestamp)
+PCMFile::PCMFile(uint32_t timestamp)
: pcm_file_(NULL),
samples_10ms_(160),
frequency_(16000),
@@ -46,13 +46,12 @@
timestamp_ = timestamp;
}
-WebRtc_Word16 PCMFile::ChooseFile(std::string* file_name,
- WebRtc_Word16 max_len) {
+int16_t PCMFile::ChooseFile(std::string* file_name, int16_t max_len) {
char tmp_name[MAX_FILE_NAME_LENGTH_BYTE];
EXPECT_TRUE(fgets(tmp_name, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
tmp_name[MAX_FILE_NAME_LENGTH_BYTE - 1] = '\0';
- WebRtc_Word16 n = 0;
+ int16_t n = 0;
// Removing leading spaces.
while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (tmp_name[n] != 0)
@@ -64,7 +63,7 @@
}
// Removing trailing spaces.
- n = (WebRtc_Word16)(strlen(tmp_name) - 1);
+ n = (int16_t)(strlen(tmp_name) - 1);
if (n >= 0) {
while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (n >= 0)) {
n--;
@@ -74,7 +73,7 @@
tmp_name[n + 1] = '\0';
}
- WebRtc_Word16 len = (WebRtc_Word16) strlen(tmp_name);
+ int16_t len = (int16_t) strlen(tmp_name);
if (len > max_len) {
return -1;
}
@@ -85,14 +84,14 @@
return 0;
}
-WebRtc_Word16 PCMFile::ChooseFile(std::string* file_name,
- WebRtc_Word16 max_len,
- WebRtc_UWord16* frequency_hz) {
+int16_t PCMFile::ChooseFile(std::string* file_name,
+ int16_t max_len,
+ uint16_t* frequency_hz) {
char tmp_name[MAX_FILE_NAME_LENGTH_BYTE];
EXPECT_TRUE(fgets(tmp_name, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
tmp_name[MAX_FILE_NAME_LENGTH_BYTE - 1] = '\0';
- WebRtc_Word16 n = 0;
+ int16_t n = 0;
// Removing trailing spaces.
while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (tmp_name[n] != 0)
@@ -104,7 +103,7 @@
}
// Removing trailing spaces.
- n = (WebRtc_Word16)(strlen(tmp_name) - 1);
+ n = (int16_t)(strlen(tmp_name) - 1);
if (n >= 0) {
while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (n >= 0)) {
n--;
@@ -114,7 +113,7 @@
tmp_name[n + 1] = '\0';
}
- WebRtc_Word16 len = (WebRtc_Word16) strlen(tmp_name);
+ int16_t len = (int16_t) strlen(tmp_name);
if (len > max_len) {
return -1;
}
@@ -125,42 +124,42 @@
printf("Enter the sampling frequency (in Hz) of the above file [%u]: ",
*frequency_hz);
EXPECT_TRUE(fgets(tmp_name, 10, stdin) != NULL);
- WebRtc_UWord16 tmp_frequency = (WebRtc_UWord16) atoi(tmp_name);
+ uint16_t tmp_frequency = (uint16_t) atoi(tmp_name);
if (tmp_frequency > 0) {
*frequency_hz = tmp_frequency;
}
return 0;
}
-void PCMFile::Open(const std::string& file_name, WebRtc_UWord16 frequency,
+void PCMFile::Open(const std::string& file_name, uint16_t frequency,
const char* mode, bool auto_rewind) {
if ((pcm_file_ = fopen(file_name.c_str(), mode)) == NULL) {
printf("Cannot open file %s.\n", file_name.c_str());
ADD_FAILURE() << "Unable to read file";
}
frequency_ = frequency;
- samples_10ms_ = (WebRtc_UWord16)(frequency_ / 100);
+ samples_10ms_ = (uint16_t)(frequency_ / 100);
auto_rewind_ = auto_rewind;
end_of_file_ = false;
rewinded_ = false;
}
-WebRtc_Word32 PCMFile::SamplingFrequency() const {
+int32_t PCMFile::SamplingFrequency() const {
return frequency_;
}
-WebRtc_UWord16 PCMFile::PayloadLength10Ms() const {
+uint16_t PCMFile::PayloadLength10Ms() const {
return samples_10ms_;
}
-WebRtc_Word32 PCMFile::Read10MsData(AudioFrame& audio_frame) {
- WebRtc_UWord16 channels = 1;
+int32_t PCMFile::Read10MsData(AudioFrame& audio_frame) {
+ uint16_t channels = 1;
if (read_stereo_) {
channels = 2;
}
- WebRtc_Word32 payload_size = (WebRtc_Word32) fread(audio_frame.data_,
- sizeof(WebRtc_UWord16),
+ int32_t payload_size = (int32_t) fread(audio_frame.data_,
+ sizeof(uint16_t),
samples_10ms_ * channels,
pcm_file_);
if (payload_size < samples_10ms_ * channels) {
@@ -185,20 +184,20 @@
void PCMFile::Write10MsData(AudioFrame& audio_frame) {
if (audio_frame.num_channels_ == 1) {
if (!save_stereo_) {
- if (fwrite(audio_frame.data_, sizeof(WebRtc_UWord16),
+ if (fwrite(audio_frame.data_, sizeof(uint16_t),
audio_frame.samples_per_channel_, pcm_file_) !=
static_cast<size_t>(audio_frame.samples_per_channel_)) {
return;
}
} else {
- WebRtc_Word16* stereo_audio =
- new WebRtc_Word16[2 * audio_frame.samples_per_channel_];
+ int16_t* stereo_audio =
+ new int16_t[2 * audio_frame.samples_per_channel_];
int k;
for (k = 0; k < audio_frame.samples_per_channel_; k++) {
stereo_audio[k << 1] = audio_frame.data_[k];
stereo_audio[(k << 1) + 1] = audio_frame.data_[k];
}
- if (fwrite(stereo_audio, sizeof(WebRtc_Word16),
+ if (fwrite(stereo_audio, sizeof(int16_t),
2 * audio_frame.samples_per_channel_, pcm_file_) !=
static_cast<size_t>(2 * audio_frame.samples_per_channel_)) {
return;
@@ -206,7 +205,7 @@
delete[] stereo_audio;
}
} else {
- if (fwrite(audio_frame.data_, sizeof(WebRtc_Word16),
+ if (fwrite(audio_frame.data_, sizeof(int16_t),
audio_frame.num_channels_ * audio_frame.samples_per_channel_,
pcm_file_) != static_cast<size_t>(
audio_frame.num_channels_ * audio_frame.samples_per_channel_)) {
@@ -215,9 +214,9 @@
}
}
-void PCMFile::Write10MsData(WebRtc_Word16* playout_buffer,
- WebRtc_UWord16 length_smpls) {
- if (fwrite(playout_buffer, sizeof(WebRtc_UWord16),
+void PCMFile::Write10MsData(int16_t* playout_buffer,
+ uint16_t length_smpls) {
+ if (fwrite(playout_buffer, sizeof(uint16_t),
length_smpls, pcm_file_) != length_smpls) {
return;
}
diff --git a/webrtc/modules/audio_coding/main/test/PCMFile.h b/webrtc/modules/audio_coding/main/test/PCMFile.h
index 2dbfecd..bd2ce21 100644
--- a/webrtc/modules/audio_coding/main/test/PCMFile.h
+++ b/webrtc/modules/audio_coding/main/test/PCMFile.h
@@ -23,45 +23,45 @@
class PCMFile {
public:
PCMFile();
- PCMFile(WebRtc_UWord32 timestamp);
+ PCMFile(uint32_t timestamp);
~PCMFile() {
if (pcm_file_ != NULL) {
fclose(pcm_file_);
}
}
- void Open(const std::string& filename, WebRtc_UWord16 frequency,
+ void Open(const std::string& filename, uint16_t frequency,
const char* mode, bool auto_rewind = false);
- WebRtc_Word32 Read10MsData(AudioFrame& audio_frame);
+ int32_t Read10MsData(AudioFrame& audio_frame);
- void Write10MsData(WebRtc_Word16 *playout_buffer,
- WebRtc_UWord16 length_smpls);
+ void Write10MsData(int16_t *playout_buffer,
+ uint16_t length_smpls);
void Write10MsData(AudioFrame& audio_frame);
- WebRtc_UWord16 PayloadLength10Ms() const;
- WebRtc_Word32 SamplingFrequency() const;
+ uint16_t PayloadLength10Ms() const;
+ int32_t SamplingFrequency() const;
void Close();
bool EndOfFile() const {
return end_of_file_;
}
void Rewind();
- static WebRtc_Word16 ChooseFile(std::string* file_name,
- WebRtc_Word16 max_len,
- WebRtc_UWord16* frequency_hz);
- static WebRtc_Word16 ChooseFile(std::string* file_name,
- WebRtc_Word16 max_len);
+ static int16_t ChooseFile(std::string* file_name,
+ int16_t max_len,
+ uint16_t* frequency_hz);
+ static int16_t ChooseFile(std::string* file_name,
+ int16_t max_len);
bool Rewinded();
void SaveStereo(bool is_stereo = true);
void ReadStereo(bool is_stereo = true);
private:
FILE* pcm_file_;
- WebRtc_UWord16 samples_10ms_;
- WebRtc_Word32 frequency_;
+ uint16_t samples_10ms_;
+ int32_t frequency_;
bool end_of_file_;
bool auto_rewind_;
bool rewinded_;
- WebRtc_UWord32 timestamp_;
+ uint32_t timestamp_;
bool read_stereo_;
bool save_stereo_;
};
diff --git a/webrtc/modules/audio_coding/main/test/RTPFile.cc b/webrtc/modules/audio_coding/main/test/RTPFile.cc
index 37f9d3c..47850ae 100644
--- a/webrtc/modules/audio_coding/main/test/RTPFile.cc
+++ b/webrtc/modules/audio_coding/main/test/RTPFile.cc
@@ -25,23 +25,23 @@
namespace webrtc {
-void RTPStream::ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const WebRtc_UWord8* rtpHeader)
+void RTPStream::ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const uint8_t* rtpHeader)
{
rtpInfo->header.payloadType = rtpHeader[1];
- rtpInfo->header.sequenceNumber = (static_cast<WebRtc_UWord16>(rtpHeader[2])<<8) | rtpHeader[3];
- rtpInfo->header.timestamp = (static_cast<WebRtc_UWord32>(rtpHeader[4])<<24) |
- (static_cast<WebRtc_UWord32>(rtpHeader[5])<<16) |
- (static_cast<WebRtc_UWord32>(rtpHeader[6])<<8) |
+ rtpInfo->header.sequenceNumber = (static_cast<uint16_t>(rtpHeader[2])<<8) | rtpHeader[3];
+ rtpInfo->header.timestamp = (static_cast<uint32_t>(rtpHeader[4])<<24) |
+ (static_cast<uint32_t>(rtpHeader[5])<<16) |
+ (static_cast<uint32_t>(rtpHeader[6])<<8) |
rtpHeader[7];
- rtpInfo->header.ssrc = (static_cast<WebRtc_UWord32>(rtpHeader[8])<<24) |
- (static_cast<WebRtc_UWord32>(rtpHeader[9])<<16) |
- (static_cast<WebRtc_UWord32>(rtpHeader[10])<<8) |
+ rtpInfo->header.ssrc = (static_cast<uint32_t>(rtpHeader[8])<<24) |
+ (static_cast<uint32_t>(rtpHeader[9])<<16) |
+ (static_cast<uint32_t>(rtpHeader[10])<<8) |
rtpHeader[11];
}
-void RTPStream::MakeRTPheader(WebRtc_UWord8* rtpHeader,
- WebRtc_UWord8 payloadType, WebRtc_Word16 seqNo,
- WebRtc_UWord32 timeStamp, WebRtc_UWord32 ssrc)
+void RTPStream::MakeRTPheader(uint8_t* rtpHeader,
+ uint8_t payloadType, int16_t seqNo,
+ uint32_t timeStamp, uint32_t ssrc)
{
rtpHeader[0]=(unsigned char)0x80;
rtpHeader[1]=(unsigned char)(payloadType & 0xFF);
@@ -61,9 +61,9 @@
}
-RTPPacket::RTPPacket(WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
- WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+RTPPacket::RTPPacket(uint8_t payloadType, uint32_t timeStamp,
+ int16_t seqNo, const uint8_t* payloadData,
+ uint16_t payloadSize, uint32_t frequency)
:
payloadType(payloadType),
timeStamp(timeStamp),
@@ -73,7 +73,7 @@
{
if (payloadSize > 0)
{
- this->payloadData = new WebRtc_UWord8[payloadSize];
+ this->payloadData = new uint8_t[payloadSize];
memcpy(this->payloadData, payloadData, payloadSize);
}
}
@@ -94,9 +94,9 @@
}
void
-RTPBuffer::Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
- const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+RTPBuffer::Write(const uint8_t payloadType, const uint32_t timeStamp,
+ const int16_t seqNo, const uint8_t* payloadData,
+ const uint16_t payloadSize, uint32_t frequency)
{
RTPPacket *packet = new RTPPacket(payloadType, timeStamp, seqNo, payloadData, payloadSize, frequency);
_queueRWLock->AcquireLockExclusive();
@@ -104,11 +104,11 @@
_queueRWLock->ReleaseLockExclusive();
}
-WebRtc_UWord16
+uint16_t
RTPBuffer::Read(WebRtcRTPHeader* rtpInfo,
- WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
- WebRtc_UWord32* offset)
+ uint8_t* payloadData,
+ uint16_t payloadSize,
+ uint32_t* offset)
{
_queueRWLock->AcquireLockShared();
RTPPacket *packet = _rtpQueue.front();
@@ -165,7 +165,7 @@
{
// Write data in a format that NetEQ and RTP Play can parse
fprintf(_rtpFile, "#!RTPencode%s\n", "1.0");
- WebRtc_UWord32 dummy_variable = 0;
+ uint32_t dummy_variable = 0;
// should be converted to network endian format, but does not matter when 0
if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
return;
@@ -187,8 +187,8 @@
void RTPFile::ReadHeader()
{
- WebRtc_UWord32 start_sec, start_usec, source;
- WebRtc_UWord16 port, padding;
+ uint32_t start_sec, start_usec, source;
+ uint16_t port, padding;
char fileHeader[40];
EXPECT_TRUE(fgets(fileHeader, 40, _rtpFile) != 0);
EXPECT_EQ(1u, fread(&start_sec, 4, 1, _rtpFile));
@@ -203,16 +203,16 @@
padding=ntohs(padding);
}
-void RTPFile::Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
- const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+void RTPFile::Write(const uint8_t payloadType, const uint32_t timeStamp,
+ const int16_t seqNo, const uint8_t* payloadData,
+ const uint16_t payloadSize, uint32_t frequency)
{
/* write RTP packet to file */
- WebRtc_UWord8 rtpHeader[12];
+ uint8_t rtpHeader[12];
MakeRTPheader(rtpHeader, payloadType, seqNo, timeStamp, 0);
- WebRtc_UWord16 lengthBytes = htons(12 + payloadSize + 8);
- WebRtc_UWord16 plen = htons(12 + payloadSize);
- WebRtc_UWord32 offsetMs;
+ uint16_t lengthBytes = htons(12 + payloadSize + 8);
+ uint16_t plen = htons(12 + payloadSize);
+ uint32_t offsetMs;
offsetMs = (timeStamp/(frequency/1000));
offsetMs = htonl(offsetMs);
@@ -233,14 +233,14 @@
}
}
-WebRtc_UWord16 RTPFile::Read(WebRtcRTPHeader* rtpInfo,
- WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
- WebRtc_UWord32* offset)
+uint16_t RTPFile::Read(WebRtcRTPHeader* rtpInfo,
+ uint8_t* payloadData,
+ uint16_t payloadSize,
+ uint32_t* offset)
{
- WebRtc_UWord16 lengthBytes;
- WebRtc_UWord16 plen;
- WebRtc_UWord8 rtpHeader[12];
+ uint16_t lengthBytes;
+ uint16_t plen;
+ uint8_t rtpHeader[12];
size_t read_len = fread(&lengthBytes, 2, 1, _rtpFile);
/* Check if we have reached end of file. */
if ((read_len == 0) && feof(_rtpFile))
diff --git a/webrtc/modules/audio_coding/main/test/RTPFile.h b/webrtc/modules/audio_coding/main/test/RTPFile.h
index b5f5299..b6bbf87 100644
--- a/webrtc/modules/audio_coding/main/test/RTPFile.h
+++ b/webrtc/modules/audio_coding/main/test/RTPFile.h
@@ -25,38 +25,38 @@
public:
virtual ~RTPStream(){}
- virtual void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
- const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency) = 0;
+ virtual void Write(const uint8_t payloadType, const uint32_t timeStamp,
+ const int16_t seqNo, const uint8_t* payloadData,
+ const uint16_t payloadSize, uint32_t frequency) = 0;
// Returns the packet's payload size. Zero should be treated as an
// end-of-stream (in the case that EndOfFile() is true) or an error.
- virtual WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
- WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
- WebRtc_UWord32* offset) = 0;
+ virtual uint16_t Read(WebRtcRTPHeader* rtpInfo,
+ uint8_t* payloadData,
+ uint16_t payloadSize,
+ uint32_t* offset) = 0;
virtual bool EndOfFile() const = 0;
protected:
- void MakeRTPheader(WebRtc_UWord8* rtpHeader,
- WebRtc_UWord8 payloadType, WebRtc_Word16 seqNo,
- WebRtc_UWord32 timeStamp, WebRtc_UWord32 ssrc);
- void ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const WebRtc_UWord8* rtpHeader);
+ void MakeRTPheader(uint8_t* rtpHeader,
+ uint8_t payloadType, int16_t seqNo,
+ uint32_t timeStamp, uint32_t ssrc);
+ void ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const uint8_t* rtpHeader);
};
class RTPPacket
{
public:
- RTPPacket(WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
- WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+ RTPPacket(uint8_t payloadType, uint32_t timeStamp,
+ int16_t seqNo, const uint8_t* payloadData,
+ uint16_t payloadSize, uint32_t frequency);
~RTPPacket();
- WebRtc_UWord8 payloadType;
- WebRtc_UWord32 timeStamp;
- WebRtc_Word16 seqNo;
- WebRtc_UWord8* payloadData;
- WebRtc_UWord16 payloadSize;
- WebRtc_UWord32 frequency;
+ uint8_t payloadType;
+ uint32_t timeStamp;
+ int16_t seqNo;
+ uint8_t* payloadData;
+ uint16_t payloadSize;
+ uint32_t frequency;
};
class RTPBuffer : public RTPStream
@@ -64,13 +64,13 @@
public:
RTPBuffer();
~RTPBuffer();
- void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
- const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
- WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
- WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
- WebRtc_UWord32* offset);
+ void Write(const uint8_t payloadType, const uint32_t timeStamp,
+ const int16_t seqNo, const uint8_t* payloadData,
+ const uint16_t payloadSize, uint32_t frequency);
+ uint16_t Read(WebRtcRTPHeader* rtpInfo,
+ uint8_t* payloadData,
+ uint16_t payloadSize,
+ uint32_t* offset);
virtual bool EndOfFile() const;
private:
RWLockWrapper* _queueRWLock;
@@ -86,13 +86,13 @@
void Close();
void WriteHeader();
void ReadHeader();
- void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
- const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
- const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
- WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
- WebRtc_UWord8* payloadData,
- WebRtc_UWord16 payloadSize,
- WebRtc_UWord32* offset);
+ void Write(const uint8_t payloadType, const uint32_t timeStamp,
+ const int16_t seqNo, const uint8_t* payloadData,
+ const uint16_t payloadSize, uint32_t frequency);
+ uint16_t Read(WebRtcRTPHeader* rtpInfo,
+ uint8_t* payloadData,
+ uint16_t payloadSize,
+ uint32_t* offset);
bool EndOfFile() const { return _rtpEOF; }
private:
FILE* _rtpFile;
diff --git a/webrtc/modules/audio_coding/main/test/SpatialAudio.cc b/webrtc/modules/audio_coding/main/test/SpatialAudio.cc
index 15875ee..ec8dd1f 100644
--- a/webrtc/modules/audio_coding/main/test/SpatialAudio.cc
+++ b/webrtc/modules/audio_coding/main/test/SpatialAudio.cc
@@ -38,7 +38,7 @@
_outFile.Close();
}
-WebRtc_Word16
+int16_t
SpatialAudio::Setup()
{
// Create ACMs and the Channel;
@@ -53,7 +53,7 @@
// Register the receiver ACM in channel
_channel->RegisterReceiverACM(_acmReceiver);
- WebRtc_UWord16 sampFreqHz = 32000;
+ uint16_t sampFreqHz = 32000;
const std::string file_name =
webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
@@ -79,9 +79,9 @@
// Register all available codes as receiving codecs.
CodecInst codecInst;
int status;
- WebRtc_UWord8 num_encoders = _acmReceiver->NumberOfCodecs();
+ uint8_t num_encoders = _acmReceiver->NumberOfCodecs();
// Register all available codes as receiving codecs once more.
- for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+ for (uint8_t n = 0; n < num_encoders; n++) {
status = _acmReceiver->Codec(n, &codecInst);
if (status < 0) {
printf("Error in Codec(), no matching codec found");
@@ -109,11 +109,11 @@
Setup();
CodecInst codecInst;
- _acmLeft->Codec((WebRtc_UWord8)1, &codecInst);
+ _acmLeft->Codec((uint8_t)1, &codecInst);
CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
EncodeDecode();
- WebRtc_Word16 pannCntr = 0;
+ int16_t pannCntr = 0;
double leftPanning[NUM_PANN_COEFFS] =
{1.00, 0.95, 0.90, 0.85, 0.80, 0.75, 0.70, 0.60, 0.55, 0.50};
@@ -122,7 +122,7 @@
while((pannCntr + 1) < NUM_PANN_COEFFS)
{
- _acmLeft->Codec((WebRtc_UWord8)0, &codecInst);
+ _acmLeft->Codec((uint8_t)0, &codecInst);
codecInst.pacsize = 480;
CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
@@ -131,7 +131,7 @@
pannCntr++;
// Change codec
- _acmLeft->Codec((WebRtc_UWord8)3, &codecInst);
+ _acmLeft->Codec((uint8_t)3, &codecInst);
codecInst.pacsize = 320;
CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
@@ -144,11 +144,11 @@
}
}
- _acmLeft->Codec((WebRtc_UWord8)4, &codecInst);
+ _acmLeft->Codec((uint8_t)4, &codecInst);
CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
EncodeDecode();
- _acmLeft->Codec((WebRtc_UWord8)0, &codecInst);
+ _acmLeft->Codec((uint8_t)0, &codecInst);
codecInst.pacsize = 480;
CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
@@ -174,7 +174,7 @@
const double rightPanning)
{
AudioFrame audioFrame;
- WebRtc_Word32 outFileSampFreq = _outFile.SamplingFrequency();
+ int32_t outFileSampFreq = _outFile.SamplingFrequency();
const double rightToLeftRatio = rightPanning / leftPanning;
@@ -185,14 +185,14 @@
_inFile.Read10MsData(audioFrame);
for(int n = 0; n < audioFrame.samples_per_channel_; n++)
{
- audioFrame.data_[n] = (WebRtc_Word16)floor(
+ audioFrame.data_[n] = (int16_t)floor(
audioFrame.data_[n] * leftPanning + 0.5);
}
CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
for(int n = 0; n < audioFrame.samples_per_channel_; n++)
{
- audioFrame.data_[n] = (WebRtc_Word16)floor(
+ audioFrame.data_[n] = (int16_t)floor(
audioFrame.data_[n] * rightToLeftRatio + 0.5);
}
CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
@@ -211,7 +211,7 @@
SpatialAudio::EncodeDecode()
{
AudioFrame audioFrame;
- WebRtc_Word32 outFileSampFreq = _outFile.SamplingFrequency();
+ int32_t outFileSampFreq = _outFile.SamplingFrequency();
_channel->SetIsStereo(false);
diff --git a/webrtc/modules/audio_coding/main/test/SpatialAudio.h b/webrtc/modules/audio_coding/main/test/SpatialAudio.h
index 6a88327..051d596 100644
--- a/webrtc/modules/audio_coding/main/test/SpatialAudio.h
+++ b/webrtc/modules/audio_coding/main/test/SpatialAudio.h
@@ -29,7 +29,7 @@
void Perform();
private:
- WebRtc_Word16 Setup();
+ int16_t Setup();
void EncodeDecode(double leftPanning, double rightPanning);
void EncodeDecode();
diff --git a/webrtc/modules/audio_coding/main/test/TestFEC.cc b/webrtc/modules/audio_coding/main/test/TestFEC.cc
index 9f5f022..3aad3fe 100644
--- a/webrtc/modules/audio_coding/main/test/TestFEC.cc
+++ b/webrtc/modules/audio_coding/main/test/TestFEC.cc
@@ -71,13 +71,13 @@
_acmA->InitializeReceiver();
_acmB->InitializeReceiver();
- WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+ uint8_t numEncoders = _acmA->NumberOfCodecs();
CodecInst myCodecParam;
if(_testMode != 0)
{
printf("Registering codecs at receiver... \n");
}
- for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+ for(uint8_t n = 0; n < numEncoders; n++)
{
_acmB->Codec(n, &myCodecParam);
if(_testMode != 0)
@@ -503,19 +503,19 @@
}
}
-WebRtc_Word32 TestFEC::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode)
+int32_t TestFEC::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode)
{
if(_testMode != 0)
{
printf("DTX %s; VAD %s; VAD-Mode %d\n",
enableDTX? "ON":"OFF",
enableVAD? "ON":"OFF",
- (WebRtc_Word16)vadMode);
+ (int16_t)vadMode);
}
return _acmA->SetVAD(enableDTX, enableVAD, vadMode);
}
-WebRtc_Word16 TestFEC::RegisterSendCodec(char side, char* codecName, WebRtc_Word32 samplingFreqHz)
+int16_t TestFEC::RegisterSendCodec(char side, char* codecName, int32_t samplingFreqHz)
{
if(_testMode != 0)
{
@@ -566,9 +566,9 @@
{
AudioFrame audioFrame;
- WebRtc_UWord16 msecPassed = 0;
- WebRtc_UWord32 secPassed = 0;
- WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+ uint16_t msecPassed = 0;
+ uint32_t secPassed = 0;
+ int32_t outFreqHzB = _outFileB.SamplingFrequency();
while(!_inFileA.EndOfFile())
{
@@ -599,7 +599,7 @@
_inFileA.Rewind();
}
-void TestFEC::OpenOutFile(WebRtc_Word16 test_number) {
+void TestFEC::OpenOutFile(int16_t test_number) {
std::string file_name;
std::stringstream file_stream;
file_stream << webrtc::test::OutputPath();
diff --git a/webrtc/modules/audio_coding/main/test/TestFEC.h b/webrtc/modules/audio_coding/main/test/TestFEC.h
index 00e951f..666058c 100644
--- a/webrtc/modules/audio_coding/main/test/TestFEC.h
+++ b/webrtc/modules/audio_coding/main/test/TestFEC.h
@@ -28,11 +28,11 @@
// The default value of '-1' indicates that the registration is based only on codec name
// and a sampling frequncy matching is not required. This is useful for codecs which support
// several sampling frequency.
- WebRtc_Word16 RegisterSendCodec(char side, char* codecName, WebRtc_Word32 sampFreqHz = -1);
+ int16_t RegisterSendCodec(char side, char* codecName, int32_t sampFreqHz = -1);
void Run();
- void OpenOutFile(WebRtc_Word16 testNumber);
+ void OpenOutFile(int16_t testNumber);
void DisplaySendReceiveCodec();
- WebRtc_Word32 SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
+ int32_t SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
AudioCodingModule* _acmA;
AudioCodingModule* _acmB;
@@ -40,7 +40,7 @@
PCMFile _inFileA;
PCMFile _outFileB;
- WebRtc_Word16 _testCntr;
+ int16_t _testCntr;
int _testMode;
};
diff --git a/webrtc/modules/audio_coding/main/test/TestStereo.cc b/webrtc/modules/audio_coding/main/test/TestStereo.cc
index e1186ba..d692703 100644
--- a/webrtc/modules/audio_coding/main/test/TestStereo.cc
+++ b/webrtc/modules/audio_coding/main/test/TestStereo.cc
@@ -42,15 +42,15 @@
return;
}
-WebRtc_Word32 TestPackStereo::SendData(
+int32_t TestPackStereo::SendData(
const FrameType frame_type,
- const WebRtc_UWord8 payload_type,
- const WebRtc_UWord32 timestamp,
- const WebRtc_UWord8* payload_data,
- const WebRtc_UWord16 payload_size,
+ const uint8_t payload_type,
+ const uint32_t timestamp,
+ const uint8_t* payload_data,
+ const uint16_t payload_size,
const RTPFragmentationHeader* fragmentation) {
WebRtcRTPHeader rtp_info;
- WebRtc_Word32 status = 0;
+ int32_t status = 0;
rtp_info.header.markerBit = false;
rtp_info.header.ssrc = 0;
@@ -86,11 +86,11 @@
return status;
}
-WebRtc_UWord16 TestPackStereo::payload_size() {
+uint16_t TestPackStereo::payload_size() {
return payload_size_;
}
-WebRtc_UWord32 TestPackStereo::timestamp_diff() {
+uint32_t TestPackStereo::timestamp_diff() {
return timestamp_diff_;
}
@@ -145,7 +145,7 @@
}
void TestStereo::Perform() {
- WebRtc_UWord16 frequency_hz;
+ uint16_t frequency_hz;
int audio_channels;
int codec_channels;
bool dtx;
@@ -179,21 +179,21 @@
EXPECT_EQ(0, acm_b_->InitializeReceiver());
// Register all available codes as receiving codecs.
- WebRtc_UWord8 num_encoders = acm_a_->NumberOfCodecs();
+ uint8_t num_encoders = acm_a_->NumberOfCodecs();
CodecInst my_codec_param;
- for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+ for (uint8_t n = 0; n < num_encoders; n++) {
EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
}
// Test that unregister all receive codecs works.
- for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+ for (uint8_t n = 0; n < num_encoders; n++) {
EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
EXPECT_EQ(0, acm_b_->UnregisterReceiveCodec(my_codec_param.pltype));
}
// Register all available codes as receiving codecs once more.
- for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+ for (uint8_t n = 0; n < num_encoders; n++) {
EXPECT_EQ(0, acm_b_->Codec(n, &my_codec_param));
EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
}
@@ -686,7 +686,7 @@
RegisterSendCodec('A', codec_opus, 48000, 32000, 960, codec_channels,
opus_pltype_);
CodecInst opus_codec_param;
- for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+ for (uint8_t n = 0; n < num_encoders; n++) {
EXPECT_EQ(0, acm_b_->Codec(n, &opus_codec_param));
if (!strcmp(opus_codec_param.plname, "opus")) {
opus_codec_param.channels = 1;
@@ -777,7 +777,7 @@
// channels - number of channels; 1 for mono, 2 for stereo
// payload_type - payload type for the codec
void TestStereo::RegisterSendCodec(char side, char* codec_name,
- WebRtc_Word32 sampling_freq_hz, int rate,
+ int32_t sampling_freq_hz, int rate,
int pack_size, int channels,
int payload_type) {
if (test_mode_ != 0) {
@@ -793,12 +793,12 @@
// packet. Add 0.875 to always round up to a whole byte.
// For Celt the packet size in bytes is already counting the stereo part.
if (!strcmp(codec_name, "CELT")) {
- pack_size_bytes_ = (WebRtc_UWord16)(
+ pack_size_bytes_ = (uint16_t)(
static_cast<float>(pack_size * rate) /
static_cast<float>(sampling_freq_hz * 8) + 0.875)
/ channels;
} else {
- pack_size_bytes_ = (WebRtc_UWord16)(
+ pack_size_bytes_ = (uint16_t)(
static_cast<float>(pack_size * rate) /
static_cast<float>(sampling_freq_hz * 8) + 0.875);
}
@@ -834,9 +834,9 @@
int percent_loss) {
AudioFrame audio_frame;
- WebRtc_Word32 out_freq_hz_b = out_file_.SamplingFrequency();
- WebRtc_UWord16 rec_size;
- WebRtc_UWord32 time_stamp_diff;
+ int32_t out_freq_hz_b = out_file_.SamplingFrequency();
+ uint16_t rec_size;
+ uint32_t time_stamp_diff;
channel->reset_payload_size();
int error_count = 0;
@@ -908,7 +908,7 @@
channel->set_lost_packet(false);
}
-void TestStereo::OpenOutFile(WebRtc_Word16 test_number) {
+void TestStereo::OpenOutFile(int16_t test_number) {
std::string file_name;
std::stringstream file_stream;
file_stream << webrtc::test::OutputPath() << "teststereo_out_"
diff --git a/webrtc/modules/audio_coding/main/test/TestStereo.h b/webrtc/modules/audio_coding/main/test/TestStereo.h
index e990515..ecfce5e 100644
--- a/webrtc/modules/audio_coding/main/test/TestStereo.h
+++ b/webrtc/modules/audio_coding/main/test/TestStereo.h
@@ -32,26 +32,26 @@
void RegisterReceiverACM(AudioCodingModule* acm);
- virtual WebRtc_Word32 SendData(const FrameType frame_type,
- const WebRtc_UWord8 payload_type,
- const WebRtc_UWord32 timestamp,
- const WebRtc_UWord8* payload_data,
- const WebRtc_UWord16 payload_size,
- const RTPFragmentationHeader* fragmentation);
+ virtual int32_t SendData(const FrameType frame_type,
+ const uint8_t payload_type,
+ const uint32_t timestamp,
+ const uint8_t* payload_data,
+ const uint16_t payload_size,
+ const RTPFragmentationHeader* fragmentation);
- WebRtc_UWord16 payload_size();
- WebRtc_UWord32 timestamp_diff();
+ uint16_t payload_size();
+ uint32_t timestamp_diff();
void reset_payload_size();
void set_codec_mode(StereoMonoMode mode);
void set_lost_packet(bool lost);
private:
AudioCodingModule* receiver_acm_;
- WebRtc_Word16 seq_no_;
- WebRtc_UWord32 timestamp_diff_;
- WebRtc_UWord32 last_in_timestamp_;
- WebRtc_UWord64 total_bytes_;
- WebRtc_UWord16 payload_size_;
+ int16_t seq_no_;
+ uint32_t timestamp_diff_;
+ uint32_t last_in_timestamp_;
+ uint64_t total_bytes_;
+ uint16_t payload_size_;
StereoMonoMode codec_mode_;
// Simulate packet losses
bool lost_packet_;
@@ -68,20 +68,20 @@
// codec name and a sampling frequncy matching is not required. This is useful
// for codecs which support several sampling frequency.
void RegisterSendCodec(char side, char* codec_name,
- WebRtc_Word32 samp_freq_hz, int rate, int pack_size,
+ int32_t samp_freq_hz, int rate, int pack_size,
int channels, int payload_type);
void Run(TestPackStereo* channel, int in_channels, int out_channels,
int percent_loss = 0);
- void OpenOutFile(WebRtc_Word16 test_number);
+ void OpenOutFile(int16_t test_number);
void DisplaySendReceiveCodec();
- WebRtc_Word32 SendData(const FrameType frame_type,
- const WebRtc_UWord8 payload_type,
- const WebRtc_UWord32 timestamp,
- const WebRtc_UWord8* payload_data,
- const WebRtc_UWord16 payload_size,
- const RTPFragmentationHeader* fragmentation);
+ int32_t SendData(const FrameType frame_type,
+ const uint8_t payload_type,
+ const uint32_t timestamp,
+ const uint8_t* payload_data,
+ const uint16_t payload_size,
+ const RTPFragmentationHeader* fragmentation);
int test_mode_;
@@ -93,9 +93,9 @@
PCMFile* in_file_stereo_;
PCMFile* in_file_mono_;
PCMFile out_file_;
- WebRtc_Word16 test_cntr_;
- WebRtc_UWord16 pack_size_samp_;
- WebRtc_UWord16 pack_size_bytes_;
+ int16_t test_cntr_;
+ uint16_t pack_size_samp_;
+ uint16_t pack_size_bytes_;
int counter_;
char* send_codec_name_;
diff --git a/webrtc/modules/audio_coding/main/test/TestVADDTX.cc b/webrtc/modules/audio_coding/main/test/TestVADDTX.cc
index 9832565..bd89dd5 100644
--- a/webrtc/modules/audio_coding/main/test/TestVADDTX.cc
+++ b/webrtc/modules/audio_coding/main/test/TestVADDTX.cc
@@ -71,13 +71,13 @@
_acmA->InitializeReceiver();
_acmB->InitializeReceiver();
- WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+ uint8_t numEncoders = _acmA->NumberOfCodecs();
CodecInst myCodecParam;
if(_testMode != 0)
{
printf("Registering codecs at receiver... \n");
}
- for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+ for(uint8_t n = 0; n < numEncoders; n++)
{
_acmB->Codec(n, &myCodecParam);
if(_testMode != 0)
@@ -99,8 +99,8 @@
_acmA->RegisterVADCallback(&_monitor);
- WebRtc_Word16 testCntr = 1;
- WebRtc_Word16 testResults = 0;
+ int16_t testCntr = 1;
+ int16_t testResults = 0;
#ifdef WEBRTC_CODEC_ISAC
// Open outputfile
@@ -232,7 +232,7 @@
_testResults += VerifyTest();
}
-void TestVADDTX::SetVAD(bool statusDTX, bool statusVAD, WebRtc_Word16 vadMode)
+void TestVADDTX::SetVAD(bool statusDTX, bool statusVAD, int16_t vadMode)
{
bool dtxEnabled, vadEnabled;
ACMVADMode vadModeSet;
@@ -261,7 +261,7 @@
if(vadModeSet != vadMode)
{
printf("VAD mode: %d not the same as requested: %d\n",
- (WebRtc_Word16)vadModeSet, (WebRtc_Word16)vadMode);
+ (int16_t)vadModeSet, (int16_t)vadMode);
}
}
@@ -293,10 +293,10 @@
return retStruct;
}
-WebRtc_Word16 TestVADDTX::RegisterSendCodec(char side,
- char* codecName,
- WebRtc_Word32 samplingFreqHz,
- WebRtc_Word32 rateKbps)
+int16_t TestVADDTX::RegisterSendCodec(char side,
+ char* codecName,
+ int32_t samplingFreqHz,
+ int32_t rateKbps)
{
if(_testMode != 0)
{
@@ -326,10 +326,10 @@
}
CodecInst myCodecParam;
- for(WebRtc_Word16 codecCntr = 0; codecCntr < myACM->NumberOfCodecs();
+ for(int16_t codecCntr = 0; codecCntr < myACM->NumberOfCodecs();
codecCntr++)
{
- CHECK_ERROR(myACM->Codec((WebRtc_UWord8)codecCntr, &myCodecParam));
+ CHECK_ERROR(myACM->Codec((uint8_t)codecCntr, &myCodecParam));
if(!STR_CASE_CMP(myCodecParam.plname, codecName))
{
if((samplingFreqHz == -1) || (myCodecParam.plfreq == samplingFreqHz))
@@ -354,9 +354,9 @@
{
AudioFrame audioFrame;
- WebRtc_UWord16 SamplesIn10MsecA = _inFileA.PayloadLength10Ms();
- WebRtc_UWord32 timestampA = 1;
- WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+ uint16_t SamplesIn10MsecA = _inFileA.PayloadLength10Ms();
+ uint32_t timestampA = 1;
+ int32_t outFreqHzB = _outFileB.SamplingFrequency();
while(!_inFileA.EndOfFile())
{
@@ -378,7 +378,7 @@
_monitor.ResetStatistics();
}
-void TestVADDTX::OpenOutFile(WebRtc_Word16 test_number) {
+void TestVADDTX::OpenOutFile(int16_t test_number) {
std::string file_name;
std::stringstream file_stream;
file_stream << webrtc::test::OutputPath();
@@ -393,12 +393,12 @@
}
-WebRtc_Word16 TestVADDTX::VerifyTest()
+int16_t TestVADDTX::VerifyTest()
{
// Verify empty frame result
- WebRtc_UWord8 statusEF = 0;
- WebRtc_UWord8 vadPattern = 0;
- WebRtc_UWord8 emptyFramePattern[6];
+ uint8_t statusEF = 0;
+ uint8_t vadPattern = 0;
+ uint8_t emptyFramePattern[6];
CodecInst myCodecParam;
_acmA->SendCodec(&myCodecParam);
bool dtxInUse = true;
@@ -493,7 +493,7 @@
{
}
-WebRtc_Word32 ActivityMonitor::InFrameType(WebRtc_Word16 frameType)
+int32_t ActivityMonitor::InFrameType(int16_t frameType)
{
_counter[frameType]++;
return 0;
@@ -522,7 +522,7 @@
_counter[0] = _counter[1] = _counter[2] = _counter[3] = _counter[4] = _counter[5] = 0;
}
-void ActivityMonitor::GetStatistics(WebRtc_UWord32* getCounter)
+void ActivityMonitor::GetStatistics(uint32_t* getCounter)
{
for (int ii = 0; ii < 6; ii++)
{
diff --git a/webrtc/modules/audio_coding/main/test/TestVADDTX.h b/webrtc/modules/audio_coding/main/test/TestVADDTX.h
index e8f9e1e..f85cff3 100644
--- a/webrtc/modules/audio_coding/main/test/TestVADDTX.h
+++ b/webrtc/modules/audio_coding/main/test/TestVADDTX.h
@@ -29,10 +29,10 @@
public:
ActivityMonitor();
~ActivityMonitor();
- WebRtc_Word32 InFrameType(WebRtc_Word16 frameType);
+ int32_t InFrameType(int16_t frameType);
void PrintStatistics(int testMode);
void ResetStatistics();
- void GetStatistics(WebRtc_UWord32* getCounter);
+ void GetStatistics(uint32_t* getCounter);
private:
// counting according to
/*enum WebRtcACMEncodingType
@@ -44,7 +44,7 @@
kPassiveDTXWB,
kPassiveDTXSWB
};*/
- WebRtc_UWord32 _counter[6];
+ uint32_t _counter[6];
};
class TestVADDTX : public ACMTest
@@ -57,17 +57,17 @@
private:
// Registration can be based on codec name only, codec name and sampling frequency, or
// codec name, sampling frequency and rate.
- WebRtc_Word16 RegisterSendCodec(char side,
+ int16_t RegisterSendCodec(char side,
char* codecName,
- WebRtc_Word32 samplingFreqHz = -1,
- WebRtc_Word32 rateKhz = -1);
+ int32_t samplingFreqHz = -1,
+ int32_t rateKhz = -1);
void Run();
- void OpenOutFile(WebRtc_Word16 testNumber);
+ void OpenOutFile(int16_t testNumber);
void runTestCases();
void runTestInternalDTX();
- void SetVAD(bool statusDTX, bool statusVAD, WebRtc_Word16 vadMode);
+ void SetVAD(bool statusDTX, bool statusVAD, int16_t vadMode);
VADDTXstruct GetVAD();
- WebRtc_Word16 VerifyTest();//VADDTXstruct setDTX, VADDTXstruct getDTX);
+ int16_t VerifyTest();//VADDTXstruct setDTX, VADDTXstruct getDTX);
AudioCodingModule* _acmA;
AudioCodingModule* _acmB;
@@ -77,7 +77,7 @@
PCMFile _outFileB;
ActivityMonitor _monitor;
- WebRtc_UWord32 _statCounter[6];
+ uint32_t _statCounter[6];
int _testMode;
int _testResults;
diff --git a/webrtc/modules/audio_coding/main/test/TimedTrace.cc b/webrtc/modules/audio_coding/main/test/TimedTrace.cc
index 6bf301f..dbfe4e7 100644
--- a/webrtc/modules/audio_coding/main/test/TimedTrace.cc
+++ b/webrtc/modules/audio_coding/main/test/TimedTrace.cc
@@ -28,7 +28,7 @@
_timedTraceFile = NULL;
}
-WebRtc_Word16
+int16_t
TimedTrace::SetUp(char* fileName)
{
if(_timedTraceFile == NULL)
@@ -62,8 +62,8 @@
void
TimedTrace::TimedLogg(char* message)
-{
- unsigned int minutes = (WebRtc_UWord32)floor(_timeEllapsedSec / 60.0);
+{
+ unsigned int minutes = (uint32_t)floor(_timeEllapsedSec / 60.0);
double seconds = _timeEllapsedSec - minutes * 60;
//char myFormat[100] = "%8.2f, %3u:%05.2f: %s\n";
if(_timedTraceFile != NULL)
diff --git a/webrtc/modules/audio_coding/main/test/TimedTrace.h b/webrtc/modules/audio_coding/main/test/TimedTrace.h
index d37d287..fd0790f 100644
--- a/webrtc/modules/audio_coding/main/test/TimedTrace.h
+++ b/webrtc/modules/audio_coding/main/test/TimedTrace.h
@@ -26,7 +26,7 @@
void SetTimeEllapsed(double myTime);
double TimeEllapsed();
void Tick10Msec();
- WebRtc_Word16 SetUp(char* fileName);
+ int16_t SetUp(char* fileName);
void TimedLogg(char* message);
private:
diff --git a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc b/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc
index 6b569fa..45ae525 100644
--- a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc
+++ b/webrtc/modules/audio_coding/main/test/TwoWayCommunication.cc
@@ -67,16 +67,16 @@
}
-WebRtc_UWord8
-TwoWayCommunication::ChooseCodec(WebRtc_UWord8* codecID_A,
- WebRtc_UWord8* codecID_B)
+uint8_t
+TwoWayCommunication::ChooseCodec(uint8_t* codecID_A,
+ uint8_t* codecID_B)
{
AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
- WebRtc_UWord8 noCodec = tmpACM->NumberOfCodecs();
+ uint8_t noCodec = tmpACM->NumberOfCodecs();
CodecInst codecInst;
printf("List of Supported Codecs\n");
printf("========================\n");
- for(WebRtc_UWord8 codecCntr = 0; codecCntr < noCodec; codecCntr++)
+ for(uint8_t codecCntr = 0; codecCntr < noCodec; codecCntr++)
{
tmpACM->Codec(codecCntr, &codecInst);
printf("%d- %s\n", codecCntr, codecInst.plname);
@@ -84,18 +84,18 @@
printf("\nChoose a send codec for side A [0]: ");
char myStr[15] = "";
EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
- *codecID_A = (WebRtc_UWord8)atoi(myStr);
+ *codecID_A = (uint8_t)atoi(myStr);
printf("\nChoose a send codec for side B [0]: ");
EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
- *codecID_B = (WebRtc_UWord8)atoi(myStr);
+ *codecID_B = (uint8_t)atoi(myStr);
AudioCodingModule::Destroy(tmpACM);
printf("\n");
return 0;
}
-WebRtc_Word16 TwoWayCommunication::SetUp()
+int16_t TwoWayCommunication::SetUp()
{
_acmA = AudioCodingModule::Create(1);
_acmB = AudioCodingModule::Create(2);
@@ -103,8 +103,8 @@
_acmRefA = AudioCodingModule::Create(3);
_acmRefB = AudioCodingModule::Create(4);
- WebRtc_UWord8 codecID_A;
- WebRtc_UWord8 codecID_B;
+ uint8_t codecID_A;
+ uint8_t codecID_B;
ChooseCodec(&codecID_A, &codecID_B);
CodecInst codecInst_A;
@@ -140,8 +140,8 @@
CHECK_ERROR(_acmRefB->RegisterSendCodec(codecInst_B));
CHECK_ERROR(_acmRefB->RegisterReceiveCodec(codecInst_A));
- WebRtc_UWord16 frequencyHz;
-
+ uint16_t frequencyHz;
+
//--- Input A
std::string in_file_name =
webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
@@ -202,7 +202,7 @@
return 0;
}
-WebRtc_Word16 TwoWayCommunication::SetUpAutotest()
+int16_t TwoWayCommunication::SetUpAutotest()
{
_acmA = AudioCodingModule::Create(1);
_acmB = AudioCodingModule::Create(2);
@@ -244,7 +244,7 @@
CHECK_ERROR(_acmRefB->RegisterSendCodec(codecInst_B));
CHECK_ERROR(_acmRefB->RegisterReceiveCodec(codecInst_A));
- WebRtc_UWord16 frequencyHz;
+ uint16_t frequencyHz;
//--- Input A and B
std::string in_file_name =
@@ -312,8 +312,8 @@
unsigned int msecPassed = 0;
unsigned int secPassed = 0;
- WebRtc_Word32 outFreqHzA = _outFileA.SamplingFrequency();
- WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+ int32_t outFreqHzA = _outFileA.SamplingFrequency();
+ int32_t outFreqHzB = _outFileB.SamplingFrequency();
AudioFrame audioFrame;
diff --git a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h b/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h
index fe53532..f955f3e 100644
--- a/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h
+++ b/webrtc/modules/audio_coding/main/test/TwoWayCommunication.h
@@ -27,9 +27,9 @@
void Perform();
private:
- WebRtc_UWord8 ChooseCodec(WebRtc_UWord8* codecID_A, WebRtc_UWord8* codecID_B);
- WebRtc_Word16 SetUp();
- WebRtc_Word16 SetUpAutotest();
+ uint8_t ChooseCodec(uint8_t* codecID_A, uint8_t* codecID_B);
+ int16_t SetUp();
+ int16_t SetUpAutotest();
AudioCodingModule* _acmA;
AudioCodingModule* _acmB;
diff --git a/webrtc/modules/audio_coding/main/test/delay_test.cc b/webrtc/modules/audio_coding/main/test/delay_test.cc
index ff63312..cd0367d 100644
--- a/webrtc/modules/audio_coding/main/test/delay_test.cc
+++ b/webrtc/modules/audio_coding/main/test/delay_test.cc
@@ -106,7 +106,7 @@
ASSERT_EQ(0, acm_b_->SetMinimumPlayoutDelay(FLAGS_delay));
}
- WebRtc_UWord8 num_encoders = acm_a_->NumberOfCodecs();
+ uint8_t num_encoders = acm_a_->NumberOfCodecs();
CodecInst my_codec_param;
for(int n = 0; n < num_encoders; n++) {
acm_b_->Codec(n, &my_codec_param);
diff --git a/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc b/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc
index 1e3d08e..d36e770 100644
--- a/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc
+++ b/webrtc/modules/audio_coding/main/test/dual_stream_unittest.cc
@@ -26,11 +26,11 @@
DualStreamTest();
~DualStreamTest();
- WebRtc_Word32 SendData(FrameType frameType, WebRtc_UWord8 payload_type,
- WebRtc_UWord32 timestamp,
- const WebRtc_UWord8* payload_data,
- WebRtc_UWord16 payload_size,
- const RTPFragmentationHeader* fragmentation);
+ int32_t SendData(FrameType frameType, uint8_t payload_type,
+ uint32_t timestamp,
+ const uint8_t* payload_data,
+ uint16_t payload_size,
+ const RTPFragmentationHeader* fragmentation);
void Perform(bool start_in_sync, int num_channels_input);
@@ -282,9 +282,9 @@
}
}
-WebRtc_Word32 DualStreamTest::SendData(
- FrameType frameType, WebRtc_UWord8 payload_type, WebRtc_UWord32 timestamp,
- const WebRtc_UWord8* payload_data, WebRtc_UWord16 payload_size,
+int32_t DualStreamTest::SendData(
+ FrameType frameType, uint8_t payload_type, uint32_t timestamp,
+ const uint8_t* payload_data, uint16_t payload_size,
const RTPFragmentationHeader* fragmentation) {
int position;
int stream_index;
diff --git a/webrtc/modules/audio_coding/main/test/iSACTest.cc b/webrtc/modules/audio_coding/main/test/iSACTest.cc
index a40f2b7..ce1e0ea 100644
--- a/webrtc/modules/audio_coding/main/test/iSACTest.cc
+++ b/webrtc/modules/audio_coding/main/test/iSACTest.cc
@@ -47,7 +47,7 @@
}
-WebRtc_Word16 SetISAConfig(
+int16_t SetISAConfig(
ACMTestISACConfig& isacConfig,
AudioCodingModule* acm,
int testMode)
@@ -112,8 +112,8 @@
(isacConfig.initRateBitPerSec != 0))
{
CHECK_ERROR(acm->ConfigISACBandwidthEstimator(
- (WebRtc_UWord8)isacConfig.initFrameSizeInMsec,
- (WebRtc_UWord16)isacConfig.initRateBitPerSec,
+ (uint8_t)isacConfig.initFrameSizeInMsec,
+ (uint16_t)isacConfig.initRateBitPerSec,
isacConfig.enforceFrameSize));
if((isacConfig.initFrameSizeInMsec != 0) && (testMode != 0))
{
@@ -146,7 +146,7 @@
}
-WebRtc_Word16
+int16_t
ISACTest::Setup()
{
int codecCntr;
@@ -245,7 +245,7 @@
Setup();
- WebRtc_Word16 testNr = 0;
+ int16_t testNr = 0;
ACMTestISACConfig wbISACConfig;
ACMTestISACConfig swbISACConfig;
@@ -290,21 +290,21 @@
SetISACConfigDefault(swbISACConfig);
testNr++;
EncodeDecode(testNr, wbISACConfig, swbISACConfig);
-
+
int user_input;
if((_testMode == 0) || (_testMode == 1))
{
- swbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)200;
- wbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)200;
+ swbISACConfig.maxPayloadSizeByte = (uint16_t)200;
+ wbISACConfig.maxPayloadSizeByte = (uint16_t)200;
}
else
{
printf("Enter the max payload-size for side A: ");
CHECK_ERROR(scanf("%d", &user_input));
- swbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)user_input;
+ swbISACConfig.maxPayloadSizeByte = (uint16_t)user_input;
printf("Enter the max payload-size for side B: ");
CHECK_ERROR(scanf("%d", &user_input));
- wbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)user_input;
+ wbISACConfig.maxPayloadSizeByte = (uint16_t)user_input;
}
testNr++;
EncodeDecode(testNr, wbISACConfig, swbISACConfig);
@@ -316,17 +316,17 @@
if((_testMode == 0) || (_testMode == 1))
{
- swbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)48000;
- wbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)48000;
+ swbISACConfig.maxRateBitPerSec = (uint32_t)48000;
+ wbISACConfig.maxRateBitPerSec = (uint32_t)48000;
}
else
{
printf("Enter the max rate for side A: ");
CHECK_ERROR(scanf("%d", &user_input));
- swbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)user_input;
+ swbISACConfig.maxRateBitPerSec = (uint32_t)user_input;
printf("Enter the max rate for side B: ");
CHECK_ERROR(scanf("%d", &user_input));
- wbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)user_input;
+ wbISACConfig.maxRateBitPerSec = (uint32_t)user_input;
}
testNr++;
diff --git a/webrtc/modules/audio_coding/main/test/iSACTest.h b/webrtc/modules/audio_coding/main/test/iSACTest.h
index 96d3fb6..548ecb6 100644
--- a/webrtc/modules/audio_coding/main/test/iSACTest.h
+++ b/webrtc/modules/audio_coding/main/test/iSACTest.h
@@ -27,13 +27,13 @@
struct ACMTestISACConfig
{
- WebRtc_Word32 currentRateBitPerSec;
- WebRtc_Word16 currentFrameSizeMsec;
- WebRtc_UWord32 maxRateBitPerSec;
- WebRtc_Word16 maxPayloadSizeByte;
- WebRtc_Word16 encodingMode;
- WebRtc_UWord32 initRateBitPerSec;
- WebRtc_Word16 initFrameSizeInMsec;
+ int32_t currentRateBitPerSec;
+ int16_t currentFrameSizeMsec;
+ uint32_t maxRateBitPerSec;
+ int16_t maxPayloadSizeByte;
+ int16_t encodingMode;
+ uint32_t initRateBitPerSec;
+ int16_t initFrameSizeInMsec;
bool enforceFrameSize;
};
@@ -47,10 +47,10 @@
void Perform();
private:
- WebRtc_Word16 Setup();
- WebRtc_Word16 SetupConference();
- WebRtc_Word16 RunConference();
-
+ int16_t Setup();
+ int16_t SetupConference();
+ int16_t RunConference();
+
void Run10ms();
@@ -78,8 +78,8 @@
PCMFile _outFileA;
PCMFile _outFileB;
- WebRtc_UWord8 _idISAC16kHz;
- WebRtc_UWord8 _idISAC32kHz;
+ uint8_t _idISAC16kHz;
+ uint8_t _idISAC32kHz;
CodecInst _paramISAC16kHz;
CodecInst _paramISAC32kHz;
diff --git a/webrtc/modules/audio_coding/main/test/utility.cc b/webrtc/modules/audio_coding/main/test/utility.cc
index b727ccd..74d99fc 100644
--- a/webrtc/modules/audio_coding/main/test/utility.cc
+++ b/webrtc/modules/audio_coding/main/test/utility.cc
@@ -116,15 +116,15 @@
}
-WebRtc_Word16
+int16_t
ChooseCodec(
CodecInst& codecInst)
{
PrintCodecs();
//AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
- WebRtc_UWord8 noCodec = AudioCodingModule::NumberOfCodecs();
- WebRtc_Word8 codecID;
+ uint8_t noCodec = AudioCodingModule::NumberOfCodecs();
+ int8_t codecID;
bool outOfRange = false;
char myStr[15] = "";
do
@@ -139,18 +139,18 @@
}
} while(outOfRange);
- CHECK_ERROR(AudioCodingModule::Codec((WebRtc_UWord8)codecID, &codecInst));
+ CHECK_ERROR(AudioCodingModule::Codec((uint8_t)codecID, &codecInst));
return 0;
}
void
PrintCodecs()
{
- WebRtc_UWord8 noCodec = AudioCodingModule::NumberOfCodecs();
-
+ uint8_t noCodec = AudioCodingModule::NumberOfCodecs();
+
CodecInst codecInst;
printf("No Name [Hz] [bps]\n");
- for(WebRtc_UWord8 codecCntr = 0; codecCntr < noCodec; codecCntr++)
+ for(uint8_t codecCntr = 0; codecCntr < noCodec; codecCntr++)
{
AudioCodingModule::Codec(codecCntr, &codecInst);
printf("%2d- %-18s %5d %6d\n",
@@ -159,7 +159,7 @@
}
-CircularBuffer::CircularBuffer(WebRtc_UWord32 len):
+CircularBuffer::CircularBuffer(uint32_t len):
_buff(NULL),
_idx(0),
_buffIsFull(false),
@@ -175,7 +175,7 @@
}
else
{
- for(WebRtc_UWord32 n = 0; n < len; n++)
+ for(uint32_t n = 0; n < len; n++)
{
_buff[n] = 0;
}
@@ -239,7 +239,7 @@
if(enable && !_calcAvg)
{
- WebRtc_UWord32 lim;
+ uint32_t lim;
if(_buffIsFull)
{
lim = _buffLen;
@@ -249,7 +249,7 @@
lim = _idx;
}
_sum = 0;
- for(WebRtc_UWord32 n = 0; n < lim; n++)
+ for(uint32_t n = 0; n < lim; n++)
{
_sum += _buff[n];
}
@@ -265,7 +265,7 @@
if(enable && !_calcVar)
{
- WebRtc_UWord32 lim;
+ uint32_t lim;
if(_buffIsFull)
{
lim = _buffLen;
@@ -275,7 +275,7 @@
lim = _idx;
}
_sumSqr = 0;
- for(WebRtc_UWord32 n = 0; n < lim; n++)
+ for(uint32_t n = 0; n < lim; n++)
{
_sumSqr += _buff[n] * _buff[n];
}
@@ -283,7 +283,7 @@
_calcAvg = enable;
}
-WebRtc_Word16
+int16_t
CircularBuffer::ArithMean(double& mean)
{
assert(_buffLen > 0);
@@ -309,7 +309,7 @@
}
}
-WebRtc_Word16
+int16_t
CircularBuffer::Variance(double& var)
{
assert(_buffLen > 0);
@@ -366,7 +366,7 @@
DTMFDetector::DTMFDetector()
{
- for(WebRtc_Word16 n = 0; n < 1000; n++)
+ for(int16_t n = 0; n < 1000; n++)
{
_toneCntr[n] = 0;
}
@@ -376,7 +376,7 @@
{
}
-WebRtc_Word32 DTMFDetector::IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool /* toneEnded */)
+int32_t DTMFDetector::IncomingDtmf(const uint8_t digitDtmf, const bool /* toneEnded */)
{
fprintf(stdout, "%d-",digitDtmf);
_toneCntr[digitDtmf]++;
@@ -385,7 +385,7 @@
void DTMFDetector::PrintDetectedDigits()
{
- for(WebRtc_Word16 n = 0; n < 1000; n++)
+ for(int16_t n = 0; n < 1000; n++)
{
if(_toneCntr[n] > 0)
{
@@ -424,9 +424,9 @@
fprintf(stdout, "Passive DTX super-wideband... %d\n", _numFrameTypes[5]);
}
-WebRtc_Word32
+int32_t
VADCallback::InFrameType(
- WebRtc_Word16 frameType)
+ int16_t frameType)
{
_numFrameTypes[frameType]++;
return 0;
diff --git a/webrtc/modules/audio_coding/main/test/utility.h b/webrtc/modules/audio_coding/main/test/utility.h
index 82935a5..a4a89d1 100644
--- a/webrtc/modules/audio_coding/main/test/utility.h
+++ b/webrtc/modules/audio_coding/main/test/utility.h
@@ -105,7 +105,7 @@
class CircularBuffer
{
public:
- CircularBuffer(WebRtc_UWord32 len);
+ CircularBuffer(uint32_t len);
~CircularBuffer();
void SetArithMean(
@@ -116,14 +116,14 @@
void Update(
const double newVal);
void IsBufferFull();
-
- WebRtc_Word16 Variance(double& var);
- WebRtc_Word16 ArithMean(double& mean);
+
+ int16_t Variance(double& var);
+ int16_t ArithMean(double& mean);
protected:
double* _buff;
- WebRtc_UWord32 _idx;
- WebRtc_UWord32 _buffLen;
+ uint32_t _idx;
+ uint32_t _buffLen;
bool _buffIsFull;
bool _calcAvg;
@@ -136,7 +136,7 @@
-WebRtc_Word16 ChooseCodec(
+int16_t ChooseCodec(
CodecInst& codecInst);
void PrintCodecs();
@@ -152,11 +152,11 @@
DTMFDetector();
~DTMFDetector();
// used for inband DTMF detection
- WebRtc_Word32 IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool toneEnded);
+ int32_t IncomingDtmf(const uint8_t digitDtmf, const bool toneEnded);
void PrintDetectedDigits();
private:
- WebRtc_UWord32 _toneCntr[1000];
+ uint32_t _toneCntr[1000];
};
@@ -169,14 +169,14 @@
VADCallback();
~VADCallback(){}
- WebRtc_Word32 InFrameType(
- WebRtc_Word16 frameType);
+ int32_t InFrameType(
+ int16_t frameType);
void PrintFrameTypes();
void Reset();
private:
- WebRtc_UWord32 _numFrameTypes[6];
+ uint32_t _numFrameTypes[6];
};
} // namespace webrtc
diff --git a/webrtc/modules/audio_coding/neteq/accelerate.c b/webrtc/modules/audio_coding/neteq/accelerate.c
index 285de4d..ce3f3b2 100644
--- a/webrtc/modules/audio_coding/neteq/accelerate.c
+++ b/webrtc/modules/audio_coding/neteq/accelerate.c
@@ -28,9 +28,9 @@
/* Scratch usage:
Type Name size startpos endpos
- WebRtc_Word16 pw16_downSampSpeech 110 0 109
- WebRtc_Word32 pw32_corr 2*50 110 209
- WebRtc_Word16 pw16_corr 50 0 49
+ int16_t pw16_downSampSpeech 110 0 109
+ int32_t pw32_corr 2*50 110 209
+ int16_t pw16_corr 50 0 49
Total: 110+2*50
*/
@@ -67,45 +67,45 @@
int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- const WebRtc_Word16 *pw16_decoded, int len,
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly)
+ const int16_t *pw16_decoded, int len,
+ int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly)
{
#ifdef SCRATCH
/* Use scratch memory for internal temporary vectors */
- WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
- WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
- WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
+ int16_t *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
+ int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
+ int16_t *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
#else
/* Allocate memory for temporary vectors */
- WebRtc_Word16 pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
- WebRtc_Word32 pw32_corr[ACCELERATE_CORR_LEN];
- WebRtc_Word16 pw16_corr[ACCELERATE_CORR_LEN];
+ int16_t pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
+ int32_t pw32_corr[ACCELERATE_CORR_LEN];
+ int16_t pw16_corr[ACCELERATE_CORR_LEN];
#endif
- WebRtc_Word16 w16_decodedMax = 0;
- WebRtc_Word16 w16_tmp;
- WebRtc_Word16 w16_tmp2;
- WebRtc_Word32 w32_tmp;
- WebRtc_Word32 w32_tmp2;
+ int16_t w16_decodedMax = 0;
+ int16_t w16_tmp;
+ int16_t w16_tmp2;
+ int32_t w32_tmp;
+ int32_t w32_tmp2;
- const WebRtc_Word16 w16_startLag = ACCELERATE_MIN_LAG;
- const WebRtc_Word16 w16_endLag = ACCELERATE_MAX_LAG;
- const WebRtc_Word16 w16_corrLen = ACCELERATE_CORR_LEN;
- const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
- WebRtc_Word16 *pw16_vectmp;
- WebRtc_Word16 w16_inc, w16_startfact;
- WebRtc_Word16 w16_bestIndex, w16_bestVal;
- WebRtc_Word16 w16_VAD = 1;
- WebRtc_Word16 fsMult;
- WebRtc_Word16 fsMult120;
- WebRtc_Word32 w32_en1, w32_en2, w32_cc;
- WebRtc_Word16 w16_en1, w16_en2;
- WebRtc_Word16 w16_en1Scale, w16_en2Scale;
- WebRtc_Word16 w16_sqrtEn1En2;
- WebRtc_Word16 w16_bestCorr = 0;
+ const int16_t w16_startLag = ACCELERATE_MIN_LAG;
+ const int16_t w16_endLag = ACCELERATE_MAX_LAG;
+ const int16_t w16_corrLen = ACCELERATE_CORR_LEN;
+ const int16_t *pw16_vec1, *pw16_vec2;
+ int16_t *pw16_vectmp;
+ int16_t w16_inc, w16_startfact;
+ int16_t w16_bestIndex, w16_bestVal;
+ int16_t w16_VAD = 1;
+ int16_t fsMult;
+ int16_t fsMult120;
+ int32_t w32_en1, w32_en2, w32_cc;
+ int16_t w16_en1, w16_en2;
+ int16_t w16_en1Scale, w16_en2Scale;
+ int16_t w16_sqrtEn1En2;
+ int16_t w16_bestCorr = 0;
int ok;
#ifdef NETEQ_STEREO
@@ -115,20 +115,20 @@
fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
/* Pre-calculate common multiplication with fsMult */
- fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
+ fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
/* Sanity check for len variable; must be (almost) 30 ms
(120*fsMult + max(bestIndex)) */
- if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
+ if (len < (int16_t) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
{
/* Length of decoded data too short */
inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
*pw16_len = len;
/* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return NETEQ_OTHER_ERROR;
}
@@ -150,7 +150,7 @@
*pw16_len = len;
/* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return NETEQ_OTHER_ERROR;
}
@@ -192,7 +192,7 @@
/****************************************************************/
/* find maximum absolute value */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+ w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
/* downsample the decoded speech to 4 kHz */
ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
@@ -203,7 +203,7 @@
inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
*pw16_len = len;
/* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return NETEQ_OTHER_ERROR;
}
@@ -218,9 +218,9 @@
WebRtcNetEQ_CrossCorr(
pw32_corr, &pw16_downSampSpeech[w16_endLag],
&pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
- (WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
+ (int16_t) (w16_endLag - w16_startLag), w16_tmp, -1);
- /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+ /* Normalize correlation to 14 bits and put in a int16_t vector */
w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
@@ -233,7 +233,7 @@
if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
{
/* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
+ WebRtcNetEQ_PeakDetection(pw16_corr, (int16_t) w16_corrLen, 1, fsMult,
&w16_bestIndex, &w16_bestVal);
/* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
@@ -265,7 +265,7 @@
#else /* NETEQ_STEREO */
/* Find the strongest correlation peak by using the parabolic fit method */
- WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
+ WebRtcNetEQ_PeakDetection(pw16_corr, (int16_t) w16_corrLen, 1, fsMult,
&w16_bestIndex, &w16_bestVal);
/* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
@@ -304,13 +304,13 @@
pw16_vec2 = &pw16_decoded[fsMult120];
/* Calculate energies for vec1 and vec2 */
- w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
- (WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
- w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
- (WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
+ w32_en1 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1,
+ (int16_t*) pw16_vec1, w16_bestIndex, w16_tmp);
+ w32_en2 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec2,
+ (int16_t*) pw16_vec2, w16_bestIndex, w16_tmp);
/* Calculate cross-correlation at the found lag */
- w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
+ w32_cc = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1, (int16_t*) pw16_vec2,
w16_bestIndex, w16_tmp);
/* Check VAD constraint
@@ -328,7 +328,7 @@
w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
- w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
+ w16_tmp2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
/* Scale w32_tmp properly before comparing with w32_tmp2 */
@@ -336,7 +336,7 @@
if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
{
/* Cannot scale only w32_tmp, must scale w32_temp2 too */
- WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
+ int16_t tempshift = WebRtcSpl_NormW32(w32_tmp);
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
@@ -372,21 +372,21 @@
w16_en1Scale += 1;
}
- /* Convert energies to WebRtc_Word16 */
- w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+ /* Convert energies to int16_t */
+ w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+ w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
/* Calculate energy product */
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
/* Calculate square-root of energy product */
- w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_tmp);
+ w16_sqrtEn1En2 = (int16_t) WebRtcSpl_SqrtFloor(w32_tmp);
/* Calculate cc/sqrt(en1*en2) in Q14 */
w16_tmp = 14 - WEBRTC_SPL_RSHIFT_W16(w16_en1Scale+w16_en2Scale, 1);
w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
- w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
+ w16_bestCorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
}
@@ -414,8 +414,8 @@
* Calculate cross-fading slope so that the fading factor goes from
* 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
*/
- w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
- (WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
+ w16_inc = (int16_t) WebRtcSpl_DivW32W16((int32_t) 16384,
+ (int16_t) (w16_bestIndex + 1)); /* in Q14 */
/* Initiate fading factor */
w16_startfact = 16384 - w16_inc;
@@ -432,14 +432,14 @@
/* Generate interpolated part of length bestIndex (1 pitch period) */
pw16_vectmp = pw16_outData + w16_tmp; /* start of interpolation output */
/* Reuse mixing function from Expand */
- WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec1,
- (WebRtc_Word16*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
+ WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (int16_t*) pw16_vec1,
+ (int16_t*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
/* Move the last part (also unmodified) */
/* Take from decoded at 15 ms + 1 pitch period */
pw16_vec2 = &pw16_decoded[fsMult120 + w16_bestIndex];
WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[fsMult120], pw16_vec2,
- (WebRtc_Word16) (len - fsMult120 - w16_bestIndex));
+ (int16_t) (len - fsMult120 - w16_bestIndex));
/* Set the mode flag */
if (w16_VAD)
@@ -478,7 +478,7 @@
*pw16_len = len;
/* Simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return 0;
}
diff --git a/webrtc/modules/audio_coding/neteq/automode.c b/webrtc/modules/audio_coding/neteq/automode.c
index 78933cc..edee98e 100644
--- a/webrtc/modules/audio_coding/neteq/automode.c
+++ b/webrtc/modules/audio_coding/neteq/automode.c
@@ -30,15 +30,15 @@
int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
- WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
- WebRtc_Word32 fsHz, int mdCodec, int streamingMode)
+ uint16_t seqNumber, uint32_t timeStamp,
+ int32_t fsHz, int mdCodec, int streamingMode)
{
- WebRtc_UWord32 timeIat; /* inter-arrival time */
+ uint32_t timeIat; /* inter-arrival time */
int i;
- WebRtc_Word32 tempsum = 0; /* temp summation */
- WebRtc_Word32 tempvar; /* temporary variable */
+ int32_t tempsum = 0; /* temp summation */
+ int32_t tempvar; /* temporary variable */
int retval = 0; /* return value */
- WebRtc_Word16 packetLenSamp; /* packet speech length in samples */
+ int16_t packetLenSamp; /* packet speech length in samples */
/****************/
/* Sanity check */
@@ -63,7 +63,7 @@
else
{
/* calculate timestamps per packet */
- packetLenSamp = (WebRtc_Word16) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
+ packetLenSamp = (int16_t) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
seqNumber - inst->lastSeqNo);
}
@@ -81,7 +81,7 @@
* Calculate IAT in Q8, including fractions of a packet (i.e., more accurate
* than timeIat).
*/
- WebRtc_Word16 timeIatQ8 = (WebRtc_Word16) WebRtcSpl_DivW32W16(
+ int16_t timeIatQ8 = (int16_t) WebRtcSpl_DivW32W16(
WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp);
/*
@@ -105,7 +105,7 @@
}
/* too long since the last maximum was observed; decrease max value */
- if (inst->maxCSumUpdateTimer > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
+ if (inst->maxCSumUpdateTimer > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz,
MAX_STREAMING_PEAK_PERIOD))
{
inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */
@@ -120,12 +120,12 @@
* the IAT is not negative.
*/
timeIat -= WEBRTC_SPL_MIN(timeIat,
- (WebRtc_UWord32) (seqNumber - inst->lastSeqNo - 1));
+ (uint32_t) (seqNumber - inst->lastSeqNo - 1));
}
else if (seqNumber < inst->lastSeqNo)
{
/* compensate for re-ordering */
- timeIat += (WebRtc_UWord32) (inst->lastSeqNo + 1 - seqNumber);
+ timeIat += (uint32_t) (inst->lastSeqNo + 1 - seqNumber);
}
/* saturate IAT at maximum value */
@@ -134,7 +134,7 @@
/* update iatProb = forgetting_factor * iatProb for all elements */
for (i = 0; i <= MAX_IAT; i++)
{
- WebRtc_Word32 tempHi, tempLo; /* Temporary variables */
+ int32_t tempHi, tempLo; /* Temporary variables */
/*
* Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps
@@ -143,12 +143,12 @@
/*
* 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb
- * 16 steps right to get the high 16 bits in a WebRtc_Word16 prior to
+ * 16 steps right to get the high 16 bits in a int16_t prior to
* multiplication, and left-shift with 1 afterwards to come back to
* Q30 = (Q15 * (Q30>>16)) << 1.
*/
tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact,
- (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
+ (int16_t) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */
/*
@@ -157,7 +157,7 @@
*/
tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */
tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact,
- (WebRtc_UWord16) tempLo);
+ (uint16_t) tempLo);
tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15);
/* Finally, add the high and low parts */
@@ -212,7 +212,7 @@
}
/* Calculate optimal buffer level based on updated statistics */
- tempvar = (WebRtc_Word32) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
+ tempvar = (int32_t) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
streamingMode);
if (tempvar > 0)
{
@@ -254,8 +254,8 @@
/* Calculate inter-arrival time in ms = packetIatCountSamp / (fsHz / 1000) */
timeIat = WEBRTC_SPL_UDIV(
- WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (WebRtc_Word16) 1000),
- (WebRtc_UWord32) fsHz);
+ WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (int16_t) 1000),
+ (uint32_t) fsHz);
/* Increase counter corresponding to current inter-arrival time */
if (timeIat > 2000)
@@ -291,16 +291,16 @@
}
-WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
- int mdCodec, WebRtc_UWord32 timeIatPkts,
- int streamingMode)
+int16_t WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, int32_t fsHz,
+ int mdCodec, uint32_t timeIatPkts,
+ int streamingMode)
{
- WebRtc_Word32 sum1 = 1 << 30; /* assign to 1 in Q30 */
- WebRtc_Word16 B;
- WebRtc_UWord16 Bopt;
+ int32_t sum1 = 1 << 30; /* assign to 1 in Q30 */
+ int16_t B;
+ uint16_t Bopt;
int i;
- WebRtc_Word32 betaInv; /* optimization parameter */
+ int32_t betaInv; /* optimization parameter */
#ifdef NETEQ_DELAY_LOGGING
/* special code for offline delay logging */
@@ -362,7 +362,7 @@
* Do not have to re-calculate all points, just back off a few steps from
* previous value of B.
*/
- WebRtc_Word32 sum2 = sum1; /* copy sum1 */
+ int32_t sum2 = sum1; /* copy sum1 */
while ((sum2 <= betaInv + inst->iatProb[Bopt]) && (Bopt > 0))
{
@@ -426,8 +426,8 @@
* If IAT > optimal level + threshold (+1 for MD codecs)
* or if IAT > 2 * optimal level (note: optimal level is in Q8):
*/
- if (timeIatPkts > (WebRtc_UWord32) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
- || timeIatPkts > (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
+ if (timeIatPkts > (uint32_t) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
+ || timeIatPkts > (uint32_t) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
{
/* A peak is observed */
@@ -440,7 +440,7 @@
}
else if (inst->peakIatCountSamp
<=
- (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
+ (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
{
/* This is not the first peak and the period time is valid */
@@ -450,7 +450,7 @@
/* saturate height to 16 bits */
inst->peakHeightPkt[inst->peakIndex]
=
- (WebRtc_Word16) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
+ (int16_t) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
/* increment peakIndex and wrap/modulo */
inst->peakIndex = (inst->peakIndex + 1) & PEAK_INDEX_MASK;
@@ -472,7 +472,7 @@
inst->peakModeDisabled >>= 1; /* decrease mode-disable "counter" */
}
- else if (inst->peakIatCountSamp > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
+ else if (inst->peakIatCountSamp > (uint32_t) WEBRTC_SPL_MUL_32_16(fsHz,
WEBRTC_SPL_LSHIFT_W16(MAX_PEAK_PERIOD, 1)))
{
/*
@@ -535,11 +535,11 @@
}
-int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
- int sampPerCall, WebRtc_Word16 fsMult)
+int WebRtcNetEQ_BufferLevelFilter(int32_t curSizeMs8, AutomodeInst_t *inst,
+ int sampPerCall, int16_t fsMult)
{
- WebRtc_Word16 curSizeFrames;
+ int16_t curSizeFrames;
/****************/
/* Sanity check */
@@ -558,7 +558,7 @@
* Current buffer level in packet lengths
* = (curSizeMs8 * fsMult) / packetSpeechLenSamp
*/
- curSizeFrames = (WebRtc_Word16) WebRtcSpl_DivW32W16(
+ curSizeFrames = (int16_t) WebRtcSpl_DivW32W16(
WEBRTC_SPL_MUL_32_16(curSizeMs8, fsMult), inst->packetSpeechLenSamp);
}
else
@@ -613,8 +613,8 @@
}
-int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
- WebRtc_Word32 fsHz)
+int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, int16_t newLenSamp,
+ int32_t fsHz)
{
/* Sanity check for newLenSamp and fsHz */
@@ -635,9 +635,9 @@
* the (fractional) number of packets that corresponds to PEAK_HEIGHT
* (in Q8 seconds). That is, threshold = PEAK_HEIGHT/256 * fsHz / packLen.
*/
- inst->peakThresholdPkt = (WebRtc_UWord16) WebRtcSpl_DivW32W16ResW16(
+ inst->peakThresholdPkt = (uint16_t) WebRtcSpl_DivW32W16ResW16(
WEBRTC_SPL_MUL_16_16_RSFT(PEAK_HEIGHT,
- (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
+ (int16_t) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
return 0;
}
@@ -647,7 +647,7 @@
{
int i;
- WebRtc_UWord16 tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
+ uint16_t tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
/* Sanity check for maxBufLenPackets */
if (maxBufLenPackets <= 1)
@@ -690,7 +690,7 @@
/* iatProb[i] = 0.5^(i+1) = iatProb[i-1] / 2 */
tempprob = WEBRTC_SPL_RSHIFT_U16(tempprob, 1);
/* store in PDF vector */
- inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) tempprob, 16);
+ inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((int32_t) tempprob, 16);
}
/*
diff --git a/webrtc/modules/audio_coding/neteq/automode.h b/webrtc/modules/audio_coding/neteq/automode.h
index 2e6b514..5996a51 100644
--- a/webrtc/modules/audio_coding/neteq/automode.h
+++ b/webrtc/modules/audio_coding/neteq/automode.h
@@ -64,57 +64,57 @@
{
/* Filtered current buffer level */
- WebRtc_UWord16 levelFiltFact; /* filter forgetting factor in Q8 */
+ uint16_t levelFiltFact; /* filter forgetting factor in Q8 */
int buffLevelFilt; /* filtered buffer level in Q8 */
/* Inter-arrival time (iat) statistics */
- WebRtc_Word32 iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
- WebRtc_Word16 iatProbFact; /* iat forgetting factor in Q15 */
- WebRtc_UWord32 packetIatCountSamp; /* time (in timestamps) elapsed since last
+ int32_t iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
+ int16_t iatProbFact; /* iat forgetting factor in Q15 */
+ uint32_t packetIatCountSamp; /* time (in timestamps) elapsed since last
packet arrival, based on RecOut calls */
int optBufLevel; /* current optimal buffer level in Q8 */
/* Packet related information */
- WebRtc_Word16 packetSpeechLenSamp; /* speech samples per incoming packet */
- WebRtc_Word16 lastPackCNGorDTMF; /* indicates that the last received packet
+ int16_t packetSpeechLenSamp; /* speech samples per incoming packet */
+ int16_t lastPackCNGorDTMF; /* indicates that the last received packet
contained special information */
- WebRtc_UWord16 lastSeqNo; /* sequence number for last packet received */
- WebRtc_UWord32 lastTimeStamp; /* timestamp for the last packet received */
- WebRtc_Word32 sampleMemory; /* memory position for keeping track of how many
+ uint16_t lastSeqNo; /* sequence number for last packet received */
+ uint32_t lastTimeStamp; /* timestamp for the last packet received */
+ int32_t sampleMemory; /* memory position for keeping track of how many
samples we cut during expand */
- WebRtc_Word16 prevTimeScale; /* indicates that the last mode was an accelerate
+ int16_t prevTimeScale; /* indicates that the last mode was an accelerate
or pre-emptive expand operation */
- WebRtc_UWord32 timescaleHoldOff; /* counter that is shifted one step right each
+ uint32_t timescaleHoldOff; /* counter that is shifted one step right each
RecOut call; time-scaling allowed when it has
reached 0 */
- WebRtc_Word16 extraDelayMs; /* extra delay for sync with video */
+ int16_t extraDelayMs; /* extra delay for sync with video */
/* Peak-detection */
/* vector with the latest peak periods (peak spacing in samples) */
- WebRtc_UWord32 peakPeriodSamp[NUM_PEAKS];
+ uint32_t peakPeriodSamp[NUM_PEAKS];
/* vector with the latest peak heights (in packets) */
- WebRtc_Word16 peakHeightPkt[NUM_PEAKS];
- WebRtc_Word16 peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
+ int16_t peakHeightPkt[NUM_PEAKS];
+ int16_t peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
-1 if still waiting for first peak */
- WebRtc_UWord16 peakThresholdPkt; /* definition of peak (in packets);
+ uint16_t peakThresholdPkt; /* definition of peak (in packets);
calculated from PEAK_HEIGHT */
- WebRtc_UWord32 peakIatCountSamp; /* samples elapsed since last peak was observed */
- WebRtc_UWord32 curPeakPeriod; /* current maximum of peakPeriodSamp vector */
- WebRtc_Word16 curPeakHeight; /* derived from peakHeightPkt vector;
+ uint32_t peakIatCountSamp; /* samples elapsed since last peak was observed */
+ uint32_t curPeakPeriod; /* current maximum of peakPeriodSamp vector */
+ int16_t curPeakHeight; /* derived from peakHeightPkt vector;
used as optimal buffer level in peak mode */
- WebRtc_Word16 peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
+ int16_t peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
uint16_t peakFound; /* 1 if peaks are detected and extra delay is applied;
* 0 otherwise. */
/* Post-call statistics */
- WebRtc_UWord32 countIAT500ms; /* number of times we got small network outage */
- WebRtc_UWord32 countIAT1000ms; /* number of times we got medium network outage */
- WebRtc_UWord32 countIAT2000ms; /* number of times we got large network outage */
- WebRtc_UWord32 longestIATms; /* mSec duration of longest network outage */
+ uint32_t countIAT500ms; /* number of times we got small network outage */
+ uint32_t countIAT1000ms; /* number of times we got medium network outage */
+ uint32_t countIAT2000ms; /* number of times we got large network outage */
+ uint32_t longestIATms; /* mSec duration of longest network outage */
- WebRtc_Word16 cSumIatQ8; /* cumulative sum of inter-arrival times */
- WebRtc_Word16 maxCSumIatQ8; /* max cumulative sum IAT */
- WebRtc_UWord32 maxCSumUpdateTimer;/* time elapsed since maximum was observed */
+ int16_t cSumIatQ8; /* cumulative sum of inter-arrival times */
+ int16_t maxCSumIatQ8; /* max cumulative sum IAT */
+ uint32_t maxCSumUpdateTimer;/* time elapsed since maximum was observed */
} AutomodeInst_t;
@@ -148,8 +148,8 @@
*/
int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
- WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
- WebRtc_Word32 fsHz, int mdCodec, int streamingMode);
+ uint16_t seqNumber, uint32_t timeStamp,
+ int32_t fsHz, int mdCodec, int streamingMode);
/****************************************************************************
* WebRtcNetEQ_CalcOptimalBufLvl(...)
@@ -172,9 +172,9 @@
* <0 - Error
*/
-WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
- int mdCodec, WebRtc_UWord32 timeIatPkts,
- int streamingMode);
+int16_t WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, int32_t fsHz,
+ int mdCodec, uint32_t timeIatPkts,
+ int streamingMode);
/****************************************************************************
* WebRtcNetEQ_BufferLevelFilter(...)
@@ -197,8 +197,8 @@
* : <0 - Error
*/
-int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
- int sampPerCall, WebRtc_Word16 fsMult);
+int WebRtcNetEQ_BufferLevelFilter(int32_t curSizeMs8, AutomodeInst_t *inst,
+ int sampPerCall, int16_t fsMult);
/****************************************************************************
* WebRtcNetEQ_SetPacketSpeechLen(...)
@@ -220,8 +220,8 @@
* <0 - Error
*/
-int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
- WebRtc_Word32 fsHz);
+int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, int16_t newLenSamp,
+ int32_t fsHz);
/****************************************************************************
* WebRtcNetEQ_ResetAutomode(...)
diff --git a/webrtc/modules/audio_coding/neteq/bgn_update.c b/webrtc/modules/audio_coding/neteq/bgn_update.c
index 05956c2..4d660ff 100644
--- a/webrtc/modules/audio_coding/neteq/bgn_update.c
+++ b/webrtc/modules/audio_coding/neteq/bgn_update.c
@@ -22,10 +22,10 @@
Designed for BGN_LPC_ORDER <= 10
Type Name size startpos endpos
- WebRtc_Word32 pw32_autoCorr 22 0 21 (Length (BGN_LPC_ORDER + 1)*2)
- WebRtc_Word16 pw16_tempVec 10 22 31 (Length BGN_LPC_ORDER)
- WebRtc_Word16 pw16_rc 10 32 41 (Length BGN_LPC_ORDER)
- WebRtc_Word16 pw16_outVec 74 0 73 (Length BGN_LPC_ORDER + 64)
+ int32_t pw32_autoCorr 22 0 21 (Length (BGN_LPC_ORDER + 1)*2)
+ int16_t pw16_tempVec 10 22 31 (Length BGN_LPC_ORDER)
+ int16_t pw16_rc 10 32 41 (Length BGN_LPC_ORDER)
+ int16_t pw16_outVec 74 0 73 (Length BGN_LPC_ORDER + 64)
Total: 74
*/
@@ -58,34 +58,34 @@
void WebRtcNetEQ_BGNUpdate(
#ifdef SCRATCH
- DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
+ DSPInst_t *inst, int16_t *pw16_scratchPtr
#else
DSPInst_t *inst
#endif
)
{
- const WebRtc_Word16 w16_vecLen = 256;
+ const int16_t w16_vecLen = 256;
BGNInst_t *BGN_Inst = &(inst->BGNInst);
#ifdef SCRATCH
- WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
- WebRtc_Word16 *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
- WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
- WebRtc_Word16 *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
+ int32_t *pw32_autoCorr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
+ int16_t *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
+ int16_t *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
+ int16_t *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
#else
- WebRtc_Word32 pw32_autoCorr[BGN_LPC_ORDER + 1];
- WebRtc_Word16 pw16_tempVec[BGN_LPC_ORDER];
- WebRtc_Word16 pw16_outVec[BGN_LPC_ORDER + 64];
- WebRtc_Word16 pw16_rc[BGN_LPC_ORDER];
+ int32_t pw32_autoCorr[BGN_LPC_ORDER + 1];
+ int16_t pw16_tempVec[BGN_LPC_ORDER];
+ int16_t pw16_outVec[BGN_LPC_ORDER + 64];
+ int16_t pw16_rc[BGN_LPC_ORDER];
#endif
- WebRtc_Word16 pw16_A[BGN_LPC_ORDER + 1];
- WebRtc_Word32 w32_tmp;
- WebRtc_Word16 *pw16_vec;
- WebRtc_Word16 w16_maxSample;
- WebRtc_Word16 w16_tmp, w16_tmp2;
- WebRtc_Word16 w16_enSampleShift;
- WebRtc_Word32 w32_en, w32_enBGN;
- WebRtc_Word32 w32_enUpdateThreashold;
- WebRtc_Word16 stability;
+ int16_t pw16_A[BGN_LPC_ORDER + 1];
+ int32_t w32_tmp;
+ int16_t *pw16_vec;
+ int16_t w16_maxSample;
+ int16_t w16_tmp, w16_tmp2;
+ int16_t w16_enSampleShift;
+ int32_t w32_en, w32_enBGN;
+ int32_t w32_enUpdateThreashold;
+ int16_t stability;
pw16_vec = inst->pw16_speechHistory + inst->w16_speechHistoryLen - w16_vecLen;
@@ -188,7 +188,7 @@
w32_enBGN = WEBRTC_SPL_SHIFT_W32(w32_enBGN, w16_tmp2);
/* Calculate scale and shift factor */
- BGN_Inst->w16_scale = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_enBGN);
+ BGN_Inst->w16_scale = (int16_t) WebRtcSpl_SqrtFloor(w32_enBGN);
BGN_Inst->w16_scaleShift = 13 + ((6 + w16_tmp2) >> 1); /* RANDN table is in Q13, */
/* 6=log2(64) */
@@ -207,13 +207,13 @@
w32_tmp = WEBRTC_SPL_MUL_16_16_RSFT(NETEQFIX_BGNFRAQINCQ16,
BGN_Inst->w32_energyUpdateLow, 16);
w32_tmp += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (WebRtc_Word16)(BGN_Inst->w32_energyUpdate & 0xFF));
+ (int16_t)(BGN_Inst->w32_energyUpdate & 0xFF));
w32_tmp += (WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (WebRtc_Word16)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
+ (int16_t)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
BGN_Inst->w32_energyUpdateLow += w32_tmp;
BGN_Inst->w32_energyUpdate += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
- (WebRtc_Word16)(BGN_Inst->w32_energyUpdate>>16));
+ (int16_t)(BGN_Inst->w32_energyUpdate>>16));
BGN_Inst->w32_energyUpdate += BGN_Inst->w32_energyUpdateLow >> 16;
BGN_Inst->w32_energyUpdateLow = (BGN_Inst->w32_energyUpdateLow & 0x0FFFF);
diff --git a/webrtc/modules/audio_coding/neteq/buffer_stats.h b/webrtc/modules/audio_coding/neteq/buffer_stats.h
index 9820519..722f477 100644
--- a/webrtc/modules/audio_coding/neteq/buffer_stats.h
+++ b/webrtc/modules/audio_coding/neteq/buffer_stats.h
@@ -51,13 +51,13 @@
{
/* store statistical data here */
- WebRtc_Word16 w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
- WebRtc_Word16 w16_noExpand;
- WebRtc_Word32 uw32_CNGplayedTS;
+ int16_t w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
+ int16_t w16_noExpand;
+ int32_t uw32_CNGplayedTS;
/* VQmon data */
- WebRtc_UWord16 avgDelayMsQ8;
- WebRtc_Word16 maxDelayMs;
+ uint16_t avgDelayMsQ8;
+ int16_t maxDelayMs;
AutomodeInst_t Automode_inst;
@@ -83,13 +83,13 @@
*
*/
-WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
- WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
- WebRtc_UWord32 availableTS, int noPacket,
- int cngPacket, int prevPlayMode,
- enum WebRtcNetEQPlayoutMode playoutMode,
- int timestampsPerCall, int NoOfExpandCalls,
- WebRtc_Word16 fs_mult,
- WebRtc_Word16 lastModeBGNonly, int playDtmf);
+uint16_t WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, int16_t frameSize,
+ int32_t cur_size, uint32_t targetTS,
+ uint32_t availableTS, int noPacket,
+ int cngPacket, int prevPlayMode,
+ enum WebRtcNetEQPlayoutMode playoutMode,
+ int timestampsPerCall, int NoOfExpandCalls,
+ int16_t fs_mult,
+ int16_t lastModeBGNonly, int playDtmf);
#endif
diff --git a/webrtc/modules/audio_coding/neteq/bufstats_decision.c b/webrtc/modules/audio_coding/neteq/bufstats_decision.c
index cf7c0b0..352e050 100644
--- a/webrtc/modules/audio_coding/neteq/bufstats_decision.c
+++ b/webrtc/modules/audio_coding/neteq/bufstats_decision.c
@@ -26,22 +26,22 @@
#define NETEQ_BUFSTAT_20MS_Q7 2560 /* = 20 ms in Q7 */
-WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
- WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
- WebRtc_UWord32 availableTS, int noPacket,
- int cngPacket, int prevPlayMode,
- enum WebRtcNetEQPlayoutMode playoutMode,
- int timestampsPerCall, int NoOfExpandCalls,
- WebRtc_Word16 fs_mult,
- WebRtc_Word16 lastModeBGNonly, int playDtmf)
+uint16_t WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, int16_t frameSize,
+ int32_t cur_size, uint32_t targetTS,
+ uint32_t availableTS, int noPacket,
+ int cngPacket, int prevPlayMode,
+ enum WebRtcNetEQPlayoutMode playoutMode,
+ int timestampsPerCall, int NoOfExpandCalls,
+ int16_t fs_mult,
+ int16_t lastModeBGNonly, int playDtmf)
{
int currentDelayMs;
- WebRtc_Word32 currSizeSamples = cur_size;
+ int32_t currSizeSamples = cur_size;
int extraDelayPacketsQ8 = 0;
/* Avoid overflow if the buffer size should be really large (cur_size is limited 256ms) */
- WebRtc_Word32 curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
+ int32_t curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
int level_limit_hi, level_limit_lo;
inst->Automode_inst.prevTimeScale &= (prevPlayMode == MODE_SUCCESS_ACCELERATE
@@ -68,7 +68,7 @@
/* Calculate VQmon related variables */
/* avgDelay = avgDelay*(511/512) + currentDelay*(1/512) (sample ms delay in Q8) */
- inst->avgDelayMsQ8 = (WebRtc_Word16) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
+ inst->avgDelayMsQ8 = (int16_t) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
+ (cur_size >> 9));
/* Update maximum delay if needed */
@@ -106,7 +106,7 @@
if (cngPacket)
{
/* signed difference between wanted and available TS */
- WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
+ int32_t diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
int32_t optimal_level_samp = (inst->Automode_inst.optBufLevel *
inst->Automode_inst.packetSpeechLenSamp) >> 8;
int32_t excess_waiting_time_samp = -diffTS - optimal_level_samp;
@@ -225,13 +225,13 @@
/* Check that we do not play a packet "too early" */
if ((prevPlayMode == MODE_EXPAND)
&& (availableTS - targetTS
- < (WebRtc_UWord32) WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
- (WebRtc_Word16)REINIT_AFTER_EXPANDS))
+ < (uint32_t) WEBRTC_SPL_MUL_16_16((int16_t)timestampsPerCall,
+ (int16_t)REINIT_AFTER_EXPANDS))
&& (NoOfExpandCalls < MAX_WAIT_FOR_PACKET)
&& (availableTS
> targetTS
- + WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
- (WebRtc_Word16)NoOfExpandCalls))
+ + WEBRTC_SPL_MUL_16_16((int16_t)timestampsPerCall,
+ (int16_t)NoOfExpandCalls))
&& (inst->Automode_inst.buffLevelFilt <= inst->Automode_inst.optBufLevel
+ extraDelayPacketsQ8))
{
@@ -256,7 +256,7 @@
* precaution), but make sure that the number of samples in buffer is no
* higher than 4 times the optimal level.
*/
- WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
+ int32_t diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
int val = ((inst->Automode_inst.optBufLevel +
extraDelayPacketsQ8) *
inst->Automode_inst.packetSpeechLenSamp) >> 6;
@@ -310,7 +310,7 @@
{ /* kPlayoutOff or kPlayoutFax */
if (cngPacket)
{
- if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
+ if (((int32_t) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
{
/* time to play this packet now */
return BUFSTATS_DO_RFC3389CNG_PACKET;
@@ -363,7 +363,7 @@
}
else
{
- if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
+ if (((int32_t) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
{
return BUFSTATS_DO_NORMAL;
}
diff --git a/webrtc/modules/audio_coding/neteq/cng_internal.c b/webrtc/modules/audio_coding/neteq/cng_internal.c
index f3a10dc..cb4878f 100644
--- a/webrtc/modules/audio_coding/neteq/cng_internal.c
+++ b/webrtc/modules/audio_coding/neteq/cng_internal.c
@@ -43,12 +43,12 @@
#ifdef NETEQ_CNG_CODEC
/* Must compile NetEQ with CNG support to enable this function */
-int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len)
+int WebRtcNetEQ_Cng(DSPInst_t *inst, int16_t *pw16_outData, int len)
{
- WebRtc_Word16 w16_winMute = 0; /* mixing factor for overlap data */
- WebRtc_Word16 w16_winUnMute = 0; /* mixing factor for comfort noise */
- WebRtc_Word16 w16_winMuteInc = 0; /* mixing factor increment (negative) */
- WebRtc_Word16 w16_winUnMuteInc = 0; /* mixing factor increment */
+ int16_t w16_winMute = 0; /* mixing factor for overlap data */
+ int16_t w16_winUnMute = 0; /* mixing factor for comfort noise */
+ int16_t w16_winMuteInc = 0; /* mixing factor increment (negative) */
+ int16_t w16_winUnMuteInc = 0; /* mixing factor increment */
int i;
/*
@@ -61,7 +61,7 @@
/* Generate len samples + overlap */
if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData,
- (WebRtc_Word16) (len + inst->ExpandInst.w16_overlap), 1) < 0)
+ (int16_t) (len + inst->ExpandInst.w16_overlap), 1) < 0)
{
/* error returned */
return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
@@ -116,7 +116,7 @@
for (i = 0; i < inst->ExpandInst.w16_overlap; i++)
{
/* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
- inst->ExpandInst.pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ inst->ExpandInst.pw16_overlapVec[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_16(
inst->ExpandInst.pw16_overlapVec[i], w16_winMute) +
WEBRTC_SPL_MUL_16_16(pw16_outData[i], w16_winUnMute)
@@ -140,7 +140,7 @@
/* This is a subsequent CNG call; no special overlap needed */
/* Generate len samples */
- if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (WebRtc_Word16) len, 0) < 0)
+ if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (int16_t) len, 0) < 0)
{
/* error returned */
return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
diff --git a/webrtc/modules/audio_coding/neteq/codec_db.c b/webrtc/modules/audio_coding/neteq/codec_db.c
index c3fc940..8d8389a 100644
--- a/webrtc/modules/audio_coding/neteq/codec_db.c
+++ b/webrtc/modules/audio_coding/neteq/codec_db.c
@@ -28,8 +28,8 @@
{
int i;
- WebRtcSpl_MemSetW16((WebRtc_Word16*) inst, 0,
- sizeof(CodecDbInst_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t*) inst, 0,
+ sizeof(CodecDbInst_t) / sizeof(int16_t));
for (i = 0; i < NUM_TOTAL_CODECS; i++)
{
@@ -54,13 +54,13 @@
*/
int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- WebRtc_Word16 payloadType, FuncDecode funcDecode,
+ int16_t payloadType, FuncDecode funcDecode,
FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
FuncUpdBWEst funcUpdBWEst, FuncDurationEst funcDurationEst,
FuncGetErrorCode funcGetErrorCode, void* codec_state,
- WebRtc_UWord16 codec_fs)
+ uint16_t codec_fs)
{
int temp;
@@ -404,8 +404,8 @@
}
else
{
- WebRtcSpl_MemSetW16((WebRtc_Word16*) ptr_inst, 0,
- sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t*) ptr_inst, 0,
+ sizeof(CodecFuncInst_t) / sizeof(int16_t));
return CODEC_DB_NOT_EXIST1;
}
}
@@ -723,7 +723,7 @@
/*
* Return the sample rate for the codec with the given payload type, 0 if error
*/
-WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
+uint16_t WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
{
int i;
CodecFuncInst_t codecInst;
diff --git a/webrtc/modules/audio_coding/neteq/codec_db.h b/webrtc/modules/audio_coding/neteq/codec_db.h
index 102dec8..cc4b48e 100644
--- a/webrtc/modules/audio_coding/neteq/codec_db.h
+++ b/webrtc/modules/audio_coding/neteq/codec_db.h
@@ -34,10 +34,10 @@
typedef struct
{
- WebRtc_Word16 position[NUM_TOTAL_CODECS];
- WebRtc_Word16 nrOfCodecs;
+ int16_t position[NUM_TOTAL_CODECS];
+ int16_t nrOfCodecs;
- WebRtc_Word16 payloadType[NUM_CODECS];
+ int16_t payloadType[NUM_CODECS];
FuncDecode funcDecode[NUM_CODECS];
FuncDecode funcDecodeRCU[NUM_CODECS];
FuncDecodePLC funcDecodePLC[NUM_CODECS];
@@ -49,8 +49,8 @@
FuncDurationEst funcDurationEst[NUM_CODECS];
FuncGetErrorCode funcGetErrorCode[NUM_CODECS];
void * codec_state[NUM_CODECS];
- WebRtc_UWord16 codec_fs[NUM_CODECS];
- WebRtc_Word16 CNGpayloadType[NUM_CNG_CODECS];
+ uint16_t codec_fs[NUM_CODECS];
+ int16_t CNGpayloadType[NUM_CNG_CODECS];
} CodecDbInst_t;
@@ -58,8 +58,8 @@
typedef struct
{
- WebRtc_Word16 deltaBytes;
- WebRtc_Word16 deltaTime;
+ int16_t deltaBytes;
+ int16_t deltaTime;
} SplitInfo_t;
/*
@@ -71,13 +71,13 @@
* Adds a new codec to the database.
*/
int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
- WebRtc_Word16 payloadType, FuncDecode funcDecode,
+ int16_t payloadType, FuncDecode funcDecode,
FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
FuncUpdBWEst funcUpdBWEst, FuncDurationEst funcDurationEst,
FuncGetErrorCode funcGetErrorCode, void* codec_state,
- WebRtc_UWord16 codec_fs);
+ uint16_t codec_fs);
/*
* Removes a codec from the database.
@@ -122,7 +122,7 @@
/*
* Return the sample rate for the codec with the given payload type, 0 if error.
*/
-WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
+uint16_t WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
#endif
diff --git a/webrtc/modules/audio_coding/neteq/codec_db_defines.h b/webrtc/modules/audio_coding/neteq/codec_db_defines.h
index ffe1e85..d97306a 100644
--- a/webrtc/modules/audio_coding/neteq/codec_db_defines.h
+++ b/webrtc/modules/audio_coding/neteq/codec_db_defines.h
@@ -23,45 +23,45 @@
/*
* Pointer to decoder function.
*/
-typedef WebRtc_Word16 (*FuncDecode)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len,
- WebRtc_Word16* decoded, WebRtc_Word16* speechType);
+typedef int16_t (*FuncDecode)(void* state, int16_t* encoded, int16_t len,
+ int16_t* decoded, int16_t* speechType);
/*
* Pointer to PLC function.
*/
-typedef WebRtc_Word16 (*FuncDecodePLC)(void* state, WebRtc_Word16* decodec,
- WebRtc_Word16 frames);
+typedef int16_t (*FuncDecodePLC)(void* state, int16_t* decodec,
+ int16_t frames);
/*
* Pointer to decoder init function.
*/
-typedef WebRtc_Word16 (*FuncDecodeInit)(void* state);
+typedef int16_t (*FuncDecodeInit)(void* state);
/*
* Pointer to add late packet function.
*/
-typedef WebRtc_Word16
- (*FuncAddLatePkt)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len);
+typedef int16_t
+ (*FuncAddLatePkt)(void* state, int16_t* encoded, int16_t len);
/*
* Pointer to get MD infofunction.
*/
-typedef WebRtc_Word16 (*FuncGetMDinfo)(void* state);
+typedef int16_t (*FuncGetMDinfo)(void* state);
/*
* Pointer to pitch info function.
* Return 0 for unvoiced, -1 if pitch not availiable.
*/
-typedef WebRtc_Word16 (*FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
- WebRtc_Word16* length);
+typedef int16_t (*FuncGetPitchInfo)(void* state, int16_t* encoded,
+ int16_t* length);
/*
* Pointer to the update bandwidth estimate function
*/
-typedef WebRtc_Word16 (*FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number, WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts);
+typedef int16_t (*FuncUpdBWEst)(void* state, const uint16_t *encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number, uint32_t send_ts,
+ uint32_t arr_ts);
/*
* Pointer to the frame size estimate function.
@@ -73,7 +73,7 @@
/*
* Pointer to error code function
*/
-typedef WebRtc_Word16 (*FuncGetErrorCode)(void* state);
+typedef int16_t (*FuncGetErrorCode)(void* state);
typedef struct CodecFuncInst_t_
{
@@ -88,8 +88,8 @@
FuncDurationEst funcDurationEst;
FuncGetErrorCode funcGetErrorCode;
void * codec_state;
- WebRtc_UWord16 codec_fs;
- WebRtc_UWord32 timeStamp;
+ uint16_t codec_fs;
+ uint32_t timeStamp;
} CodecFuncInst_t;
diff --git a/webrtc/modules/audio_coding/neteq/correlator.c b/webrtc/modules/audio_coding/neteq/correlator.c
index 97c41da..0a4404a 100644
--- a/webrtc/modules/audio_coding/neteq/correlator.c
+++ b/webrtc/modules/audio_coding/neteq/correlator.c
@@ -17,9 +17,9 @@
/* Scratch usage:
Type Name size startpos endpos
- WebRtc_Word16 pw16_corrVec 62 0 61
- WebRtc_Word16 pw16_data_ds 124 0 123
- WebRtc_Word32 pw32_corr 2*54 124 231
+ int16_t pw16_corrVec 62 0 61
+ int16_t pw16_data_ds 124 0 123
+ int32_t pw32_corr 2*54 124 231
Total: 232
*/
@@ -30,48 +30,48 @@
#define NETEQ_CORRELATOR_DSVECLEN 124 /* 124 = 60 + 10 + 54 */
-WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
+int16_t WebRtcNetEQ_Correlator(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_data,
- WebRtc_Word16 w16_dataLen,
- WebRtc_Word16 *pw16_corrOut,
- WebRtc_Word16 *pw16_corrScale)
+ int16_t *pw16_data,
+ int16_t w16_dataLen,
+ int16_t *pw16_corrOut,
+ int16_t *pw16_corrScale)
{
- WebRtc_Word16 w16_corrLen = 60;
+ int16_t w16_corrLen = 60;
#ifdef SCRATCH
- WebRtc_Word16 *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
- WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
- /* WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
+ int16_t *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
+ int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_pw32_corr);
+ /* int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
#else
- WebRtc_Word16 pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
- WebRtc_Word32 pw32_corr[54];
- /* WebRtc_Word16 pw16_corrVec[4+54+4];*/
+ int16_t pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
+ int32_t pw32_corr[54];
+ /* int16_t pw16_corrVec[4+54+4];*/
#endif
- /* WebRtc_Word16 *pw16_corr=&pw16_corrVec[4];*/
- WebRtc_Word16 w16_maxVal;
- WebRtc_Word32 w32_maxVal;
- WebRtc_Word16 w16_normVal;
- WebRtc_Word16 w16_normVal2;
- /* WebRtc_Word16 w16_corrUpsLen;*/
- WebRtc_Word16 *pw16_B = NULL;
- WebRtc_Word16 w16_Blen = 0;
- WebRtc_Word16 w16_factor = 0;
+ /* int16_t *pw16_corr=&pw16_corrVec[4];*/
+ int16_t w16_maxVal;
+ int32_t w32_maxVal;
+ int16_t w16_normVal;
+ int16_t w16_normVal2;
+ /* int16_t w16_corrUpsLen;*/
+ int16_t *pw16_B = NULL;
+ int16_t w16_Blen = 0;
+ int16_t w16_factor = 0;
/* Set constants depending on frequency used */
if (inst->fs == 8000)
{
w16_Blen = 3;
w16_factor = 2;
- pw16_B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
+ pw16_B = (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl;
#ifdef NETEQ_WIDEBAND
}
else if (inst->fs==16000)
{
w16_Blen = 5;
w16_factor = 4;
- pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl;
+ pw16_B = (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl;
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
}
@@ -79,7 +79,7 @@
{
w16_Blen = 7;
w16_factor = 8;
- pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl;
+ pw16_B = (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl;
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
}
@@ -87,19 +87,19 @@
{
w16_Blen = 7;
w16_factor = 12;
- pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl;
+ pw16_B = (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl;
#endif
}
/* Downsample data in order to work on a 4 kHz sampled signal */
WebRtcSpl_DownsampleFast(
pw16_data + w16_dataLen - (NETEQ_CORRELATOR_DSVECLEN * w16_factor),
- (WebRtc_Word16) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
- NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (WebRtc_Word16) 0);
+ (int16_t) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
+ NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (int16_t) 0);
/* Normalize downsampled vector to using entire 16 bit */
w16_maxVal = WebRtcSpl_MaxAbsValueW16(pw16_data_ds, 124);
- w16_normVal = 16 - WebRtcSpl_NormW32((WebRtc_Word32) w16_maxVal);
+ w16_normVal = 16 - WebRtcSpl_NormW32((int32_t) w16_maxVal);
WebRtcSpl_VectorBitShiftW16(pw16_data_ds, NETEQ_CORRELATOR_DSVECLEN, pw16_data_ds,
w16_normVal);
diff --git a/webrtc/modules/audio_coding/neteq/dsp.c b/webrtc/modules/audio_coding/neteq/dsp.c
index 82c54ea..e840ad1 100644
--- a/webrtc/modules/audio_coding/neteq/dsp.c
+++ b/webrtc/modules/audio_coding/neteq/dsp.c
@@ -24,40 +24,40 @@
Coefficients are in Q12. */
/* {0.3, 0.4, 0.3} */
-const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
+const int16_t WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
#ifdef NETEQ_WIDEBAND
/* {0.15, 0.2, 0.3, 0.2, 0.15} */
-const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[] =
+const int16_t WebRtcNetEQ_kDownsample16kHzTbl[] =
{ 614, 819, 1229, 819, 614};
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
/* {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425} */
-const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[] =
+const int16_t WebRtcNetEQ_kDownsample32kHzTbl[] =
{ 584, 512, 625, 667, 625, 512, 584};
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
/* {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487} */
-const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[] =
+const int16_t WebRtcNetEQ_kDownsample48kHzTbl[] =
{ 1019, 390, 427, 440, 427, 390, 1019};
#endif
/* Constants used in expand function WebRtcNetEQ_Expand */
/* Q12: -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 */
-const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
+const int16_t WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
/* Tabulated divisions to save complexity */
/* 1049/{0, .., 6} */
-const WebRtc_Word16 WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
+const int16_t WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
/* 2097/{0, .., 6} */
-const WebRtc_Word16 WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
+const int16_t WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
/* 5243/{0, .., 6} */
-const WebRtc_Word16 WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
+const int16_t WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
#ifdef WEBRTC_NETEQ_40BITACC_TEST
/*
@@ -89,17 +89,17 @@
* - crossCorr : The cross correlation in Q-rShift
*/
-void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr,
- WebRtc_Word16 *seq1,
- WebRtc_Word16 *seq2,
- WebRtc_Word16 dimSeq,
- WebRtc_Word16 dimCrossCorr,
- WebRtc_Word16 rShift,
- WebRtc_Word16 step_seq2)
+void WebRtcNetEQ_40BitAccCrossCorr(int32_t *crossCorr,
+ int16_t *seq1,
+ int16_t *seq2,
+ int16_t dimSeq,
+ int16_t dimCrossCorr,
+ int16_t rShift,
+ int16_t step_seq2)
{
int i, j;
- WebRtc_Word16 *seq1Ptr, *seq2Ptr;
- WebRtc_Word64 acc;
+ int16_t *seq1Ptr, *seq2Ptr;
+ int64_t acc;
for (i = 0; i < dimCrossCorr; i++)
{
@@ -117,7 +117,7 @@
seq2Ptr++;
}
- (*crossCorr) = (WebRtc_Word32) (acc >> rShift);
+ (*crossCorr) = (int32_t) (acc >> rShift);
crossCorr++;
}
}
@@ -125,7 +125,7 @@
/****************************************************************************
* WebRtcNetEQ_40BitAccDotW16W16(...)
*
- * Calculates the dot product between two vectors (WebRtc_Word16)
+ * Calculates the dot product between two vectors (int16_t)
*
* Input:
* - vector1 : Vector 1
@@ -137,14 +137,14 @@
* Return value : The dot product
*/
-WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1,
- WebRtc_Word16 *vector2,
+int32_t WebRtcNetEQ_40BitAccDotW16W16(int16_t *vector1,
+ int16_t *vector2,
int len,
int scaling)
{
- WebRtc_Word32 sum;
+ int32_t sum;
int i;
- WebRtc_Word64 acc;
+ int64_t acc;
acc = 0;
for (i = 0; i < len; i++)
@@ -152,7 +152,7 @@
acc += WEBRTC_SPL_MUL_16_16(*vector1++, *vector2++);
}
- sum = (WebRtc_Word32) (acc >> scaling);
+ sum = (int32_t) (acc >> scaling);
return(sum);
}
@@ -175,11 +175,11 @@
* : non-zero - error
*/
-int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs)
+int WebRtcNetEQ_DSPInit(DSPInst_t *inst, uint16_t fs)
{
int res = 0;
- WebRtc_Word16 fs_mult;
+ int16_t fs_mult;
/* Pointers and values to save before clearing the instance */
#ifdef NETEQ_CNG_CODEC
@@ -193,22 +193,22 @@
VADInitFunction savedVADinit = inst->VADInst.initFunction;
VADSetmodeFunction savedVADsetmode = inst->VADInst.setmodeFunction;
VADFunction savedVADfunc = inst->VADInst.VADFunction;
- WebRtc_Word16 savedVADEnabled = inst->VADInst.VADEnabled;
+ int16_t savedVADEnabled = inst->VADInst.VADEnabled;
int savedVADMode = inst->VADInst.VADMode;
#endif /* NETEQ_VAD */
DSPStats_t saveStats;
- WebRtc_Word16 saveMsPerCall = inst->millisecondsPerCall;
+ int16_t saveMsPerCall = inst->millisecondsPerCall;
enum BGNMode saveBgnMode = inst->BGNInst.bgnMode;
#ifdef NETEQ_STEREO
MasterSlaveInfo saveMSinfo;
#endif
/* copy contents of statInst to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveStats, &(inst->statInst),
- sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
+ sizeof(DSPStats_t)/sizeof(int16_t));
#ifdef NETEQ_STEREO
/* copy contents of msInfo to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveMSinfo, &(inst->msInfo),
- sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
+ sizeof(MasterSlaveInfo)/sizeof(int16_t));
#endif
/* check that the sample rate is valid */
@@ -232,14 +232,14 @@
fs_mult = WebRtcSpl_DivW32W16ResW16(fs, 8000);
/* Set everything to zero since most variables should be zero at start */
- WebRtcSpl_MemSetW16((WebRtc_Word16 *) inst, 0, sizeof(DSPInst_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t *) inst, 0, sizeof(DSPInst_t) / sizeof(int16_t));
/* Restore saved pointers */
#ifdef NETEQ_CNG_CODEC
inst->CNG_Codec_inst = (CNG_dec_inst *)savedPtr1;
#endif
- inst->pw16_readAddress = (WebRtc_Word16 *) savedPtr2;
- inst->pw16_writeAddress = (WebRtc_Word16 *) savedPtr3;
+ inst->pw16_readAddress = (int16_t *) savedPtr2;
+ inst->pw16_writeAddress = (int16_t *) savedPtr3;
inst->main_inst = savedPtr4;
#ifdef NETEQ_VAD
inst->VADInst.VADState = savedVADptr;
@@ -289,11 +289,11 @@
inst->BGNInst.bgnMode = saveBgnMode;
/* Recreate statistics counters */WEBRTC_SPL_MEMCPY_W16(&(inst->statInst), &saveStats,
- sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
+ sizeof(DSPStats_t)/sizeof(int16_t));
#ifdef NETEQ_STEREO
/* Recreate MSinfo */WEBRTC_SPL_MEMCPY_W16(&(inst->msInfo), &saveMSinfo,
- sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
+ sizeof(MasterSlaveInfo)/sizeof(int16_t));
#endif
#ifdef NETEQ_CNG_CODEC
@@ -335,8 +335,8 @@
{
/* set shared-memory addresses in the DSP instance */
- inst->pw16_readAddress = (WebRtc_Word16 *) data2DspAddress;
- inst->pw16_writeAddress = (WebRtc_Word16 *) data2McuAddress;
+ inst->pw16_readAddress = (int16_t *) data2DspAddress;
+ inst->pw16_writeAddress = (int16_t *) data2McuAddress;
/* set pointer to main NetEQ instance */
inst->main_inst = (void *) mainInst;
@@ -414,7 +414,7 @@
* -1 - Error
*/
-int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs)
+int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, uint16_t fs)
{
int res = 0;
@@ -505,7 +505,7 @@
int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst)
{
- WebRtc_Word16 fs_mult;
+ int16_t fs_mult;
/* calcualte fs/8000 */
fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
diff --git a/webrtc/modules/audio_coding/neteq/dsp.h b/webrtc/modules/audio_coding/neteq/dsp.h
index 40b7831..5452272 100644
--- a/webrtc/modules/audio_coding/neteq/dsp.h
+++ b/webrtc/modules/audio_coding/neteq/dsp.h
@@ -92,15 +92,15 @@
/* Constant tables */
/*******************/
-extern const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[];
-extern const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[];
-extern const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[];
-extern const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[];
-extern const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[];
-extern const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[];
-extern const WebRtc_Word16 WebRtcNetEQ_k1049div[];
-extern const WebRtc_Word16 WebRtcNetEQ_k2097div[];
-extern const WebRtc_Word16 WebRtcNetEQ_k5243div[];
+extern const int16_t WebRtcNetEQ_kDownsample8kHzTbl[];
+extern const int16_t WebRtcNetEQ_kDownsample16kHzTbl[];
+extern const int16_t WebRtcNetEQ_kDownsample32kHzTbl[];
+extern const int16_t WebRtcNetEQ_kDownsample48kHzTbl[];
+extern const int16_t WebRtcNetEQ_kRandnTbl[];
+extern const int16_t WebRtcNetEQ_kMixFractionFuncTbl[];
+extern const int16_t WebRtcNetEQ_k1049div[];
+extern const int16_t WebRtcNetEQ_k2097div[];
+extern const int16_t WebRtcNetEQ_k5243div[];
@@ -142,16 +142,16 @@
typedef struct BGNInst_t_
{
- WebRtc_Word32 w32_energy;
- WebRtc_Word32 w32_energyMax;
- WebRtc_Word32 w32_energyUpdate;
- WebRtc_Word32 w32_energyUpdateLow;
- WebRtc_Word16 pw16_filterState[BGN_LPC_ORDER];
- WebRtc_Word16 pw16_filter[BGN_LPC_ORDER + 1];
- WebRtc_Word16 w16_mutefactor;
- WebRtc_Word16 w16_scale;
- WebRtc_Word16 w16_scaleShift;
- WebRtc_Word16 w16_initialized;
+ int32_t w32_energy;
+ int32_t w32_energyMax;
+ int32_t w32_energyUpdate;
+ int32_t w32_energyUpdateLow;
+ int16_t pw16_filterState[BGN_LPC_ORDER];
+ int16_t pw16_filter[BGN_LPC_ORDER + 1];
+ int16_t w16_mutefactor;
+ int16_t w16_scale;
+ int16_t w16_scaleShift;
+ int16_t w16_initialized;
enum BGNMode bgnMode;
} BGNInst_t;
@@ -160,24 +160,24 @@
typedef struct ExpandInst_t_
{
- WebRtc_Word16 w16_overlap; /* Constant, 5 for NB and 10 for WB */
- WebRtc_Word16 w16_consecExp; /* Number of consecutive expand calls */
- WebRtc_Word16 *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1] */
- WebRtc_Word16 *pw16_arState; /* length [UNVOICED_LPC_ORDER] */
- WebRtc_Word16 w16_arGain;
- WebRtc_Word16 w16_arGainScale;
- WebRtc_Word16 w16_vFraction; /* Q14 */
- WebRtc_Word16 w16_currentVFraction; /* Q14 */
- WebRtc_Word16 *pw16_expVecs[2];
- WebRtc_Word16 w16_lags[3];
- WebRtc_Word16 w16_maxLag;
- WebRtc_Word16 *pw16_overlapVec; /* last samples of speech history */
- WebRtc_Word16 w16_lagsDirection;
- WebRtc_Word16 w16_lagsPosition;
- WebRtc_Word16 w16_expandMuteFactor; /* Q14 */
- WebRtc_Word16 w16_stopMuting;
- WebRtc_Word16 w16_onset;
- WebRtc_Word16 w16_muteSlope; /* Q20 */
+ int16_t w16_overlap; /* Constant, 5 for NB and 10 for WB */
+ int16_t w16_consecExp; /* Number of consecutive expand calls */
+ int16_t *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1] */
+ int16_t *pw16_arState; /* length [UNVOICED_LPC_ORDER] */
+ int16_t w16_arGain;
+ int16_t w16_arGainScale;
+ int16_t w16_vFraction; /* Q14 */
+ int16_t w16_currentVFraction; /* Q14 */
+ int16_t *pw16_expVecs[2];
+ int16_t w16_lags[3];
+ int16_t w16_maxLag;
+ int16_t *pw16_overlapVec; /* last samples of speech history */
+ int16_t w16_lagsDirection;
+ int16_t w16_lagsPosition;
+ int16_t w16_expandMuteFactor; /* Q14 */
+ int16_t w16_stopMuting;
+ int16_t w16_onset;
+ int16_t w16_muteSlope; /* Q20 */
} ExpandInst_t;
@@ -190,7 +190,7 @@
*/
typedef int (*VADInitFunction)(void *VAD_inst);
typedef int (*VADSetmodeFunction)(void *VAD_inst, int mode);
-typedef int (*VADFunction)(void *VAD_inst, int fs, WebRtc_Word16 *frame,
+typedef int (*VADFunction)(void *VAD_inst, int fs, int16_t *frame,
int frameLen);
/* Post-decode VAD instance (sub-instance of NETEQDSP_inst) */
@@ -199,10 +199,10 @@
void *VADState; /* pointer to a VAD instance */
- WebRtc_Word16 VADEnabled; /* 1 if enabled, 0 if disabled */
+ int16_t VADEnabled; /* 1 if enabled, 0 if disabled */
int VADMode; /* mode parameter to pass to the VAD function */
int VADDecision; /* 1 for active, 0 for passive */
- WebRtc_Word16 SIDintervalCounter; /* reset when decoding CNG/SID frame,
+ int16_t SIDintervalCounter; /* reset when decoding CNG/SID frame,
increment for each recout call */
/* Function pointers */
@@ -224,13 +224,13 @@
enum MasterSlaveExtraInfo extraInfo;
- WebRtc_UWord16 instruction;
- WebRtc_Word16 distLag;
- WebRtc_Word16 corrLag;
- WebRtc_Word16 bestIndex;
+ uint16_t instruction;
+ int16_t distLag;
+ int16_t corrLag;
+ int16_t bestIndex;
- WebRtc_UWord32 endTimestamp;
- WebRtc_UWord16 samplesLeftWithOverlap;
+ uint32_t endTimestamp;
+ uint16_t samplesLeftWithOverlap;
} MasterSlaveInfo;
#endif
@@ -241,13 +241,13 @@
{
/* MCU/DSP Communication layer */
- WebRtc_Word16 *pw16_readAddress;
- WebRtc_Word16 *pw16_writeAddress;
+ int16_t *pw16_readAddress;
+ int16_t *pw16_writeAddress;
void *main_inst;
/* Output frame size in ms and samples */
- WebRtc_Word16 millisecondsPerCall;
- WebRtc_Word16 timestampsPerCall;
+ int16_t millisecondsPerCall;
+ int16_t timestampsPerCall;
/*
* Example of speech buffer
@@ -263,27 +263,27 @@
* new data at the end.
*/
- WebRtc_Word16 speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
+ int16_t speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
int curPosition; /* Next sample to play */
int endPosition; /* Position that ends future data */
- WebRtc_UWord32 endTimestamp; /* Timestamp value at end of future data */
- WebRtc_UWord32 videoSyncTimestamp; /* (Estimated) timestamp of the last
+ uint32_t endTimestamp; /* Timestamp value at end of future data */
+ uint32_t videoSyncTimestamp; /* (Estimated) timestamp of the last
played sample (usually same as
endTimestamp-(endPosition-curPosition)
except during Expand and CNG) */
- WebRtc_UWord16 fs; /* sample rate in Hz */
- WebRtc_Word16 w16_frameLen; /* decoder frame length in samples */
- WebRtc_Word16 w16_mode; /* operation used during last RecOut call */
- WebRtc_Word16 w16_muteFactor; /* speech mute factor in Q14 */
- WebRtc_Word16 *pw16_speechHistory; /* beginning of speech history during Expand */
- WebRtc_Word16 w16_speechHistoryLen; /* 256 for NB and 512 for WB */
+ uint16_t fs; /* sample rate in Hz */
+ int16_t w16_frameLen; /* decoder frame length in samples */
+ int16_t w16_mode; /* operation used during last RecOut call */
+ int16_t w16_muteFactor; /* speech mute factor in Q14 */
+ int16_t *pw16_speechHistory; /* beginning of speech history during Expand */
+ int16_t w16_speechHistoryLen; /* 256 for NB and 512 for WB */
/* random noise seed parameters */
- WebRtc_Word16 w16_seedInc;
- WebRtc_UWord32 uw16_seed;
+ int16_t w16_seedInc;
+ uint32_t uw16_seed;
/* VQmon related variable */
- WebRtc_Word16 w16_concealedTS;
+ int16_t w16_concealedTS;
/*****************/
/* Sub-instances */
@@ -344,7 +344,7 @@
* : non-zero - error
*/
-int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs);
+int WebRtcNetEQ_DSPInit(DSPInst_t *inst, uint16_t fs);
/****************************************************************************
* WebRtcNetEQ_AddressInit(...)
@@ -420,8 +420,8 @@
* -1 - Error
*/
-int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly);
+int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly);
/****************************************************************************
* WebRtcNetEQ_Normal(...)
@@ -447,10 +447,10 @@
int WebRtcNetEQ_Normal(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
+ int16_t *pw16_decoded, int16_t len,
+ int16_t *pw16_outData, int16_t *pw16_len);
/****************************************************************************
* WebRtcNetEQ_Expand(...)
@@ -477,10 +477,10 @@
int WebRtcNetEQ_Expand(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly);
+ int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly);
/****************************************************************************
* WebRtcNetEQ_GenerateBGN(...)
@@ -506,9 +506,9 @@
int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 len);
+ int16_t *pw16_outData, int16_t len);
/****************************************************************************
* WebRtcNetEQ_PreEmptiveExpand(...)
@@ -543,11 +543,11 @@
int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly);
+ const int16_t *pw16_decoded, int len, int oldDataLen,
+ int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly);
/****************************************************************************
* WebRtcNetEQ_Accelerate(...)
@@ -578,11 +578,11 @@
int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- const WebRtc_Word16 *pw16_decoded, int len,
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly);
+ const int16_t *pw16_decoded, int len,
+ int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly);
/****************************************************************************
* WebRtcNetEQ_Merge(...)
@@ -611,10 +611,10 @@
int WebRtcNetEQ_Merge(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len);
+ int16_t *pw16_decoded, int len, int16_t *pw16_outData,
+ int16_t *pw16_len);
/****************************************************************************
* WebRtcNetEQ_Cng(...)
@@ -635,7 +635,7 @@
#ifdef NETEQ_CNG_CODEC
/* Must compile NetEQ with CNG support to enable this function */
-int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len);
+int WebRtcNetEQ_Cng(DSPInst_t *inst, int16_t *pw16_outData, int len);
#endif /* NETEQ_CNG_CODEC */
@@ -656,7 +656,7 @@
void WebRtcNetEQ_BGNUpdate(
#ifdef SCRATCH
- DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
+ DSPInst_t *inst, int16_t *pw16_scratchPtr
#else
DSPInst_t *inst
#endif
@@ -681,7 +681,7 @@
* -1 - Error
*/
-int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs);
+int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, uint16_t fs);
/****************************************************************************
* WebRtcNetEQ_SetVADModeInternal(...)
@@ -760,15 +760,15 @@
* - crossCorr : The cross correlation in Q-rShift
*/
-void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr, WebRtc_Word16 *seq1,
- WebRtc_Word16 *seq2, WebRtc_Word16 dimSeq,
- WebRtc_Word16 dimCrossCorr, WebRtc_Word16 rShift,
- WebRtc_Word16 step_seq2);
+void WebRtcNetEQ_40BitAccCrossCorr(int32_t *crossCorr, int16_t *seq1,
+ int16_t *seq2, int16_t dimSeq,
+ int16_t dimCrossCorr, int16_t rShift,
+ int16_t step_seq2);
/****************************************************************************
* WebRtcNetEQ_40BitAccDotW16W16(...)
*
- * Calculates the dot product between two vectors (WebRtc_Word16)
+ * Calculates the dot product between two vectors (int16_t)
*
* Input:
* - vector1 : Vector 1
@@ -780,8 +780,8 @@
* Return value : The dot product
*/
-WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1, WebRtc_Word16 *vector2,
- int len, int scaling);
+int32_t WebRtcNetEQ_40BitAccDotW16W16(int16_t *vector1, int16_t *vector2,
+ int len, int scaling);
#endif /* WEBRTC_NETEQ_40BITACC_TEST */
diff --git a/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c b/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
index 6e9a283..ef721d5 100644
--- a/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
+++ b/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.c
@@ -15,7 +15,7 @@
#include "dsp_helpfunctions.h"
-WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz)
+int16_t WebRtcNetEQ_CalcFsMult(uint16_t fsHz)
{
switch (fsHz)
{
@@ -43,13 +43,13 @@
}
-int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
- WebRtc_Word16 *out, int outLen, int compensateDelay)
+int WebRtcNetEQ_DownSampleTo4kHz(const int16_t *in, int inLen, uint16_t inFsHz,
+ int16_t *out, int outLen, int compensateDelay)
{
- WebRtc_Word16 *B; /* filter coefficients */
- WebRtc_Word16 Blen; /* number of coefficients */
- WebRtc_Word16 filterDelay; /* phase delay in samples */
- WebRtc_Word16 factor; /* conversion rate (inFsHz/8000) */
+ int16_t *B; /* filter coefficients */
+ int16_t Blen; /* number of coefficients */
+ int16_t filterDelay; /* phase delay in samples */
+ int16_t factor; /* conversion rate (inFsHz/8000) */
int ok;
/* Set constants depending on frequency used */
@@ -62,7 +62,7 @@
{
Blen = 3;
factor = 2;
- B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
+ B = (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl;
filterDelay = 1 + 1;
break;
}
@@ -71,7 +71,7 @@
{
Blen = 5;
factor = 4;
- B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample16kHzTbl;
+ B = (int16_t*) WebRtcNetEQ_kDownsample16kHzTbl;
filterDelay = 2 + 1;
break;
}
@@ -81,7 +81,7 @@
{
Blen = 7;
factor = 8;
- B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample32kHzTbl;
+ B = (int16_t*) WebRtcNetEQ_kDownsample32kHzTbl;
filterDelay = 3 + 1;
break;
}
@@ -91,7 +91,7 @@
{
Blen = 7;
factor = 12;
- B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample48kHzTbl;
+ B = (int16_t*) WebRtcNetEQ_kDownsample48kHzTbl;
filterDelay = 3 + 1;
break;
}
@@ -109,9 +109,9 @@
filterDelay = 0;
}
- ok = WebRtcSpl_DownsampleFast((WebRtc_Word16*) &in[Blen - 1],
- (WebRtc_Word16) (inLen - (Blen - 1)), /* number of input samples */
- out, (WebRtc_Word16) outLen, /* number of output samples to produce */
+ ok = WebRtcSpl_DownsampleFast((int16_t*) &in[Blen - 1],
+ (int16_t) (inLen - (Blen - 1)), /* number of input samples */
+ out, (int16_t) outLen, /* number of output samples to produce */
B, Blen, factor, filterDelay); /* filter parameters */
return ok; /* return value is -1 if input signal is too short */
diff --git a/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h b/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
index f728c09..11119f1 100644
--- a/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
+++ b/webrtc/modules/audio_coding/neteq/dsp_helpfunctions.h
@@ -36,13 +36,13 @@
* Return value : Length of correlated data
*/
-WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
+int16_t WebRtcNetEQ_Correlator(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
- WebRtc_Word16 *pw16_corrOut,
- WebRtc_Word16 *pw16_corrScale);
+ int16_t *pw16_data, int16_t w16_dataLen,
+ int16_t *pw16_corrOut,
+ int16_t *pw16_corrScale);
/****************************************************************************
* WebRtcNetEQ_PeakDetection(...)
@@ -62,10 +62,10 @@
* Return value : 0 for ok
*/
-WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
- WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
- WebRtc_Word16 *pw16_corrIndex,
- WebRtc_Word16 *pw16_winners);
+int16_t WebRtcNetEQ_PeakDetection(int16_t *pw16_data, int16_t w16_dataLen,
+ int16_t w16_nmbPeaks, int16_t fs_mult,
+ int16_t *pw16_corrIndex,
+ int16_t *pw16_winners);
/****************************************************************************
* WebRtcNetEQ_PrblFit(...)
@@ -83,8 +83,8 @@
* Return value : 0 for ok
*/
-WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
- WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult);
+int16_t WebRtcNetEQ_PrblFit(int16_t *pw16_3pts, int16_t *pw16_Ind,
+ int16_t *pw16_outVal, int16_t fs_mult);
/****************************************************************************
* WebRtcNetEQ_MinDistortion(...)
@@ -104,9 +104,9 @@
* Return value : Lag for minimum distortion
*/
-WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
- WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
- WebRtc_Word16 len, WebRtc_Word32 *pw16_dist);
+int16_t WebRtcNetEQ_MinDistortion(const int16_t *pw16_data,
+ int16_t w16_minLag, int16_t w16_maxLag,
+ int16_t len, int32_t *pw16_dist);
/****************************************************************************
* WebRtcNetEQ_RandomVec(...)
@@ -122,8 +122,8 @@
* - randVec : Generated random vector
*/
-void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
- WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval);
+void WebRtcNetEQ_RandomVec(uint32_t *w32_seed, int16_t *pw16_randVec,
+ int16_t w16_len, int16_t w16_incval);
/****************************************************************************
* WebRtcNetEQ_MixVoiceUnvoice(...)
@@ -141,10 +141,10 @@
* - outData : Mixed signal
*/
-void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
- WebRtc_Word16 *pw16_unvoicedVec,
- WebRtc_Word16 *w16_current_vfraction,
- WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N);
+void WebRtcNetEQ_MixVoiceUnvoice(int16_t *pw16_outData, int16_t *pw16_voicedVec,
+ int16_t *pw16_unvoicedVec,
+ int16_t *w16_current_vfraction,
+ int16_t w16_vfraction_change, int16_t N);
/****************************************************************************
* WebRtcNetEQ_UnmuteSignal(...)
@@ -161,9 +161,9 @@
* - outVec : Output signal
*/
-void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
- WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
- WebRtc_Word16 N);
+void WebRtcNetEQ_UnmuteSignal(int16_t *pw16_inVec, int16_t *startMuteFact,
+ int16_t *pw16_outVec, int16_t unmuteFact,
+ int16_t N);
/****************************************************************************
* WebRtcNetEQ_MuteSignal(...)
@@ -176,8 +176,8 @@
* - N : Number of samples
*/
-void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
- WebRtc_Word16 N);
+void WebRtcNetEQ_MuteSignal(int16_t *pw16_inout, int16_t muteSlope,
+ int16_t N);
/****************************************************************************
* WebRtcNetEQ_CalcFsMult(...)
@@ -190,7 +190,7 @@
* Return value : fsHz/8000 for the valid values, 1 for other inputs
*/
-WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz);
+int16_t WebRtcNetEQ_CalcFsMult(uint16_t fsHz);
/****************************************************************************
* WebRtcNetEQ_DownSampleTo4kHz(...)
@@ -213,8 +213,8 @@
*
*/
-int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
- WebRtc_Word16 *out, int outLen, int compensateDelay);
+int WebRtcNetEQ_DownSampleTo4kHz(const int16_t *in, int inLen, uint16_t inFsHz,
+ int16_t *out, int outLen, int compensateDelay);
#endif
diff --git a/webrtc/modules/audio_coding/neteq/dtmf_buffer.c b/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
index f00f9c994..9e32126 100644
--- a/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
+++ b/webrtc/modules/audio_coding/neteq/dtmf_buffer.c
@@ -22,7 +22,7 @@
#ifdef NETEQ_ATEVENT_DECODE
-WebRtc_Word16 WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
+int16_t WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
{
int i;
@@ -44,8 +44,8 @@
return 0;
}
-WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
- WebRtc_Word16 MaxPLCtime)
+int16_t WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, uint16_t fs,
+ int16_t MaxPLCtime)
{
int i;
if (((fs != 8000) && (fs != 16000) && (fs != 32000) && (fs != 48000)) || (MaxPLCtime < 0))
@@ -76,18 +76,18 @@
return 0;
}
-WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
- const WebRtc_Word16 *encoded, WebRtc_Word16 len,
- WebRtc_UWord32 timeStamp)
+int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
+ const int16_t *encoded, int16_t len,
+ uint32_t timeStamp)
{
int i;
- WebRtc_Word16 value;
- const WebRtc_Word16 *EventStart;
- WebRtc_Word16 endEvent;
- WebRtc_Word16 Volume;
- WebRtc_Word16 Duration;
- WebRtc_Word16 position = -1;
+ int16_t value;
+ const int16_t *EventStart;
+ int16_t endEvent;
+ int16_t Volume;
+ int16_t Duration;
+ int16_t position = -1;
/* Extract event */
if (len == 4)
@@ -102,8 +102,8 @@
value = ((*EventStart) & 0xFF);
endEvent = ((*EventStart) & 0x8000) >> 15;
Volume = ((*EventStart) & 0x3F00) >> 8;
- Duration = (((((WebRtc_UWord16) EventStart[1]) >> 8) & 0xFF)
- | (((WebRtc_UWord16) (EventStart[1] & 0xFF)) << 8));
+ Duration = (((((uint16_t) EventStart[1]) >> 8) & 0xFF)
+ | (((uint16_t) (EventStart[1] & 0xFF)) << 8));
#endif
/* Only events between 0-15 are supported (DTMF tones) */
if ((value < 0) || (value > 15))
@@ -153,8 +153,8 @@
return DTMF_INSERT_ERROR;
}
-WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
- WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp)
+int16_t WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, int16_t *event,
+ int16_t *volume, uint32_t currTimeStamp)
{
if (DTMFdec_inst->EventBufferSize < 1) return 0; /* No events to play */
@@ -204,7 +204,7 @@
else
{
/* Less than frameLen to play and not end of event. */
- DTMFdec_inst->CurrentPLCtime = (WebRtc_Word16) (currTimeStamp
+ DTMFdec_inst->CurrentPLCtime = (int16_t) (currTimeStamp
- DTMFdec_inst->EventQueueEndTime[0]);
if ((DTMFdec_inst->CurrentPLCtime > DTMFdec_inst->MaxPLCtime)
diff --git a/webrtc/modules/audio_coding/neteq/dtmf_buffer.h b/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
index e185411..99c9e6a 100644
--- a/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
+++ b/webrtc/modules/audio_coding/neteq/dtmf_buffer.h
@@ -26,15 +26,15 @@
typedef struct dtmf_inst_t_
{
- WebRtc_Word16 MaxPLCtime;
- WebRtc_Word16 CurrentPLCtime;
- WebRtc_Word16 EventQueue[MAX_DTMF_QUEUE_SIZE];
- WebRtc_Word16 EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
- WebRtc_Word16 EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
- WebRtc_UWord32 EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
- WebRtc_UWord32 EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
- WebRtc_Word16 EventBufferSize;
- WebRtc_Word16 framelen;
+ int16_t MaxPLCtime;
+ int16_t CurrentPLCtime;
+ int16_t EventQueue[MAX_DTMF_QUEUE_SIZE];
+ int16_t EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
+ int16_t EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
+ uint32_t EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
+ uint32_t EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
+ int16_t EventBufferSize;
+ int16_t framelen;
} dtmf_inst_t;
/****************************************************************************
@@ -51,8 +51,8 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
- WebRtc_Word16 MaxPLCtime);
+int16_t WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, uint16_t fs,
+ int16_t MaxPLCtime);
/****************************************************************************
* WebRtcNetEQ_DtmfInsertEvent(...)
@@ -69,9 +69,9 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
- const WebRtc_Word16 *encoded, WebRtc_Word16 len,
- WebRtc_UWord32 timeStamp);
+int16_t WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
+ const int16_t *encoded, int16_t len,
+ uint32_t timeStamp);
/****************************************************************************
* WebRtcNetEQ_DtmfDecode(...)
@@ -92,8 +92,8 @@
* -1 - Error
*/
-WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
- WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp);
+int16_t WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, int16_t *event,
+ int16_t *volume, uint32_t currTimeStamp);
#endif /* NETEQ_ATEVENT_DECODE */
diff --git a/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c b/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
index c9ad232..8ea413c 100644
--- a/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
+++ b/webrtc/modules/audio_coding/neteq/dtmf_tonegen.c
@@ -65,14 +65,14 @@
* Table values in Q14.
*/
-const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl8Khz[8] =
+const int16_t WebRtcNetEQ_dtfm_aTbl8Khz[8] =
{
27980, 26956, 25701, 24219,
19073, 16325, 13085, 9315
};
#ifdef NETEQ_WIDEBAND
-const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl16Khz[8]=
+const int16_t WebRtcNetEQ_dtfm_aTbl16Khz[8]=
{
31548, 31281, 30951, 30556,
29144, 28361, 27409, 26258
@@ -80,7 +80,7 @@
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
-const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl32Khz[8]=
+const int16_t WebRtcNetEQ_dtfm_aTbl32Khz[8]=
{
32462, 32394, 32311, 32210,
31849, 31647, 31400, 31098
@@ -88,7 +88,7 @@
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
-const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl48Khz[8]=
+const int16_t WebRtcNetEQ_dtfm_aTbl48Khz[8]=
{
32632, 32602, 32564, 32520,
32359, 32268, 32157, 32022
@@ -100,14 +100,14 @@
* Table values in Q14.
*/
-const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
+const int16_t WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
{
8528, 9315, 10163, 11036,
13323, 14206,15021, 15708
};
#ifdef NETEQ_WIDEBAND
-const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
+const int16_t WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
{
4429, 4879, 5380, 5918,
7490, 8207, 8979, 9801
@@ -115,7 +115,7 @@
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
-const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
+const int16_t WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
{
2235, 2468, 2728, 3010,
3853, 4249, 4685, 5164
@@ -123,7 +123,7 @@
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
-const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
+const int16_t WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
{
1493, 1649, 1823, 2013,
2582, 2851, 3148, 3476
@@ -135,7 +135,7 @@
Table entries are in Q14.
*/
-const WebRtc_Word16 WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
+const int16_t WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
7210, 6426, 5727, 5104, 4549, 4054, 3614,
3221, 2870, 2558, 2280, 2032, 1811, 1614,
1439, 1282, 1143, 1018, 908, 809, 721, 643,
@@ -163,21 +163,21 @@
* : <0 - error
*/
-WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, WebRtc_Word16 value,
- WebRtc_Word16 volume, WebRtc_Word16 *signal,
- WebRtc_UWord16 sampFreq, WebRtc_Word16 extFrameLen)
+int16_t WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, int16_t value,
+ int16_t volume, int16_t *signal,
+ uint16_t sampFreq, int16_t extFrameLen)
{
- const WebRtc_Word16 *aTbl; /* pointer to a-coefficient table */
- const WebRtc_Word16 *yInitTable; /* pointer to initialization value table */
- WebRtc_Word16 a1 = 0; /* a-coefficient for first tone (low tone) */
- WebRtc_Word16 a2 = 0; /* a-coefficient for second tone (high tone) */
+ const int16_t *aTbl; /* pointer to a-coefficient table */
+ const int16_t *yInitTable; /* pointer to initialization value table */
+ int16_t a1 = 0; /* a-coefficient for first tone (low tone) */
+ int16_t a2 = 0; /* a-coefficient for second tone (high tone) */
int i;
int frameLen; /* number of samples to generate */
int lowIndex = 0; /* Default to avoid compiler warnings. */
int highIndex = 4; /* Default to avoid compiler warnings. */
- WebRtc_Word32 tempVal;
- WebRtc_Word16 tempValLow;
- WebRtc_Word16 tempValHigh;
+ int32_t tempVal;
+ int16_t tempValLow;
+ int16_t tempValHigh;
/* Sanity check for volume */
if ((volume < 0) || (volume > 36))
@@ -333,10 +333,10 @@
/* Use rescursion formula y[n] = a*y[n-1] - y[n-2] */
tempValLow
- = (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
+ = (int16_t) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
+ 8192) >> 14) - DTMFdecInst->oldOutputLow[0]);
tempValHigh
- = (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
+ = (int16_t) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
+ 8192) >> 14) - DTMFdecInst->oldOutputHigh[0]);
/* Update recursion memory */
@@ -348,13 +348,13 @@
/* scale high tone with 32768 (15 left shifts)
and low tone with 23171 (3dB lower than high tone) */
tempVal = WEBRTC_SPL_MUL_16_16(DTMF_AMP_LOW, tempValLow)
- + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tempValHigh, 15);
+ + WEBRTC_SPL_LSHIFT_W32((int32_t)tempValHigh, 15);
/* Norm the signal to Q14 (with proper rounding) */
tempVal = (tempVal + 16384) >> 15;
/* Scale the signal to correct dbM0 value */
- signal[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ signal[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(tempVal, WebRtcNetEQ_dtfm_dBm0[volume])
+ 8192), 14); /* volume value is in Q14; use proper rounding */
}
diff --git a/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h b/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
index add6eb1..5f44899 100644
--- a/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
+++ b/webrtc/modules/audio_coding/neteq/dtmf_tonegen.h
@@ -28,10 +28,10 @@
typedef struct dtmf_tone_inst_t_
{
- WebRtc_Word16 reinit; /* non-zero if the oscillator model should
+ int16_t reinit; /* non-zero if the oscillator model should
be reinitialized for next event */
- WebRtc_Word16 oldOutputLow[2]; /* oscillator recursion history (low tone) */
- WebRtc_Word16 oldOutputHigh[2]; /* oscillator recursion history (high tone) */
+ int16_t oldOutputLow[2]; /* oscillator recursion history (low tone) */
+ int16_t oldOutputHigh[2]; /* oscillator recursion history (high tone) */
int lastDtmfSample; /* index to the first non-DTMF sample in the
speech history, if non-negative */
@@ -59,12 +59,12 @@
* : <0 - Error
*/
-WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
- WebRtc_Word16 value,
- WebRtc_Word16 volume,
- WebRtc_Word16 *signal,
- WebRtc_UWord16 sampFreq,
- WebRtc_Word16 frameLen
+int16_t WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
+ int16_t value,
+ int16_t volume,
+ int16_t *signal,
+ uint16_t sampFreq,
+ int16_t frameLen
);
#endif /* NETEQ_ATEVENT_DECODE */
diff --git a/webrtc/modules/audio_coding/neteq/expand.c b/webrtc/modules/audio_coding/neteq/expand.c
index 3db7a2a..6a69925 100644
--- a/webrtc/modules/audio_coding/neteq/expand.c
+++ b/webrtc/modules/audio_coding/neteq/expand.c
@@ -30,22 +30,22 @@
Type Name size startpos endpos
(First part of first expand)
- WebRtc_Word16 pw16_bestCorrIndex 3 0 2
- WebRtc_Word16 pw16_bestCorr 3 3 5
- WebRtc_Word16 pw16_bestDistIndex 3 6 8
- WebRtc_Word16 pw16_bestDist 3 9 11
- WebRtc_Word16 pw16_corrVec 102*fs/8000 12 11+102*fs/8000
+ int16_t pw16_bestCorrIndex 3 0 2
+ int16_t pw16_bestCorr 3 3 5
+ int16_t pw16_bestDistIndex 3 6 8
+ int16_t pw16_bestDist 3 9 11
+ int16_t pw16_corrVec 102*fs/8000 12 11+102*fs/8000
func WebRtcNetEQ_Correlator 232 12+102*fs/8000 243+102*fs/8000
(Second part of first expand)
- WebRtc_Word32 pw32_corr2 99*fs/8000+1 0 99*fs/8000
- WebRtc_Word32 pw32_autoCorr 2*7 0 13
- WebRtc_Word16 pw16_rc 6 14 19
+ int32_t pw32_corr2 99*fs/8000+1 0 99*fs/8000
+ int32_t pw32_autoCorr 2*7 0 13
+ int16_t pw16_rc 6 14 19
Signal combination:
- WebRtc_Word16 pw16_randVec 30+120*fs/8000 0 29+120*fs/8000
- WebRtc_Word16 pw16_scaledRandVec 125*fs/8000 30+120*fs/8000 29+245*fs/8000
- WebRtc_Word16 pw16_unvoicedVecSpace 10+125*fs/8000 30+245*fs/8000 39+370*fs/8000
+ int16_t pw16_randVec 30+120*fs/8000 0 29+120*fs/8000
+ int16_t pw16_scaledRandVec 125*fs/8000 30+120*fs/8000 29+245*fs/8000
+ int16_t pw16_unvoicedVecSpace 10+125*fs/8000 30+245*fs/8000 39+370*fs/8000
Total: 40+370*fs/8000 (size depends on UNVOICED_LPC_ORDER and BGN_LPC_ORDER)
*/
@@ -106,59 +106,59 @@
int WebRtcNetEQ_Expand(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly)
+ int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly)
{
- WebRtc_Word16 fs_mult;
+ int16_t fs_mult;
ExpandInst_t *ExpandState = &(inst->ExpandInst);
BGNInst_t *BGNState = &(inst->BGNInst);
int i;
#ifdef SCRATCH
- WebRtc_Word16 *pw16_randVec = pw16_scratchPtr + SCRATCH_PW16_RAND_VEC;
- WebRtc_Word16 *pw16_scaledRandVec = pw16_scratchPtr + SCRATCH_PW16_SCALED_RAND_VEC;
- WebRtc_Word16 *pw16_unvoicedVecSpace = pw16_scratchPtr + SCRATCH_PW16_UNVOICED_VEC_SPACE;
+ int16_t *pw16_randVec = pw16_scratchPtr + SCRATCH_PW16_RAND_VEC;
+ int16_t *pw16_scaledRandVec = pw16_scratchPtr + SCRATCH_PW16_SCALED_RAND_VEC;
+ int16_t *pw16_unvoicedVecSpace = pw16_scratchPtr + SCRATCH_PW16_UNVOICED_VEC_SPACE;
#else
- WebRtc_Word16 pw16_randVec[FSMULT * 120 + 30]; /* 150 for NB and 270 for WB */
- WebRtc_Word16 pw16_scaledRandVec[FSMULT * 125]; /* 125 for NB and 250 for WB */
- WebRtc_Word16 pw16_unvoicedVecSpace[BGN_LPC_ORDER + FSMULT * 125];
+ int16_t pw16_randVec[FSMULT * 120 + 30]; /* 150 for NB and 270 for WB */
+ int16_t pw16_scaledRandVec[FSMULT * 125]; /* 125 for NB and 250 for WB */
+ int16_t pw16_unvoicedVecSpace[BGN_LPC_ORDER + FSMULT * 125];
#endif
/* 125 for NB and 250 for WB etc. Reuse pw16_outData[] for this vector */
- WebRtc_Word16 *pw16_voicedVecStorage = pw16_outData;
- WebRtc_Word16 *pw16_voicedVec = &pw16_voicedVecStorage[ExpandState->w16_overlap];
- WebRtc_Word16 *pw16_unvoicedVec = pw16_unvoicedVecSpace + UNVOICED_LPC_ORDER;
- WebRtc_Word16 *pw16_cngVec = pw16_unvoicedVecSpace + BGN_LPC_ORDER;
- WebRtc_Word16 w16_expVecsLen, w16_lag = 0, w16_expVecPos;
- WebRtc_Word16 w16_randLen;
- WebRtc_Word16 w16_vfractionChange; /* in Q14 */
- WebRtc_Word16 w16_winMute = 0, w16_winMuteInc = 0, w16_winUnMute = 0, w16_winUnMuteInc = 0;
- WebRtc_Word32 w32_tmp;
- WebRtc_Word16 w16_tmp, w16_tmp2;
- WebRtc_Word16 stability;
+ int16_t *pw16_voicedVecStorage = pw16_outData;
+ int16_t *pw16_voicedVec = &pw16_voicedVecStorage[ExpandState->w16_overlap];
+ int16_t *pw16_unvoicedVec = pw16_unvoicedVecSpace + UNVOICED_LPC_ORDER;
+ int16_t *pw16_cngVec = pw16_unvoicedVecSpace + BGN_LPC_ORDER;
+ int16_t w16_expVecsLen, w16_lag = 0, w16_expVecPos;
+ int16_t w16_randLen;
+ int16_t w16_vfractionChange; /* in Q14 */
+ int16_t w16_winMute = 0, w16_winMuteInc = 0, w16_winUnMute = 0, w16_winUnMuteInc = 0;
+ int32_t w32_tmp;
+ int16_t w16_tmp, w16_tmp2;
+ int16_t stability;
enum BGNMode bgnMode = inst->BGNInst.bgnMode;
/* Pre-calculate common multiplications with fs_mult */
- WebRtc_Word16 fsMult4;
- WebRtc_Word16 fsMult20;
- WebRtc_Word16 fsMult120;
- WebRtc_Word16 fsMultDistLen;
- WebRtc_Word16 fsMultLPCAnalasysLen;
+ int16_t fsMult4;
+ int16_t fsMult20;
+ int16_t fsMult120;
+ int16_t fsMultDistLen;
+ int16_t fsMultLPCAnalasysLen;
#ifdef NETEQ_STEREO
MasterSlaveInfo *msInfo = inst->msInfo;
#endif
- /* fs is WebRtc_UWord16 (to hold fs=48000) */
+ /* fs is uint16_t (to hold fs=48000) */
fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs); /* calculate fs/8000 */
/* Pre-calculate common multiplications with fs_mult */
- fsMult4 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 4);
- fsMult20 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 20);
- fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 120);
- fsMultDistLen = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, DISTLEN);
- fsMultLPCAnalasysLen = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, LPCANALASYSLEN);
+ fsMult4 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 4);
+ fsMult20 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 20);
+ fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, 120);
+ fsMultDistLen = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, DISTLEN);
+ fsMultLPCAnalasysLen = (int16_t) WEBRTC_SPL_MUL_16_16(fs_mult, LPCANALASYSLEN);
/*
* Perform all the initial setup if it's the first expansion.
@@ -168,47 +168,47 @@
{
/* Setup more variables */
#ifdef SCRATCH
- WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr
+ int32_t *pw32_autoCorr = (int32_t*) (pw16_scratchPtr
+ SCRATCH_PW32_AUTO_CORR);
- WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
- WebRtc_Word16 *pw16_bestCorrIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR_INDEX;
- WebRtc_Word16 *pw16_bestCorr = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR;
- WebRtc_Word16 *pw16_bestDistIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST_INDEX;
- WebRtc_Word16 *pw16_bestDist = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST;
- WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_PW16_CORR_VEC;
- WebRtc_Word32 *pw32_corr2 = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW16_CORR2);
+ int16_t *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
+ int16_t *pw16_bestCorrIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR_INDEX;
+ int16_t *pw16_bestCorr = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR;
+ int16_t *pw16_bestDistIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST_INDEX;
+ int16_t *pw16_bestDist = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST;
+ int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_PW16_CORR_VEC;
+ int32_t *pw32_corr2 = (int32_t*) (pw16_scratchPtr + SCRATCH_PW16_CORR2);
#else
- WebRtc_Word32 pw32_autoCorr[UNVOICED_LPC_ORDER+1];
- WebRtc_Word16 pw16_rc[UNVOICED_LPC_ORDER];
- WebRtc_Word16 pw16_corrVec[FSMULT*102]; /* 102 for NB */
- WebRtc_Word16 pw16_bestCorrIndex[CHECK_NO_OF_CORRMAX];
- WebRtc_Word16 pw16_bestCorr[CHECK_NO_OF_CORRMAX];
- WebRtc_Word16 pw16_bestDistIndex[CHECK_NO_OF_CORRMAX];
- WebRtc_Word16 pw16_bestDist[CHECK_NO_OF_CORRMAX];
- WebRtc_Word32 pw32_corr2[(99*FSMULT)+1];
+ int32_t pw32_autoCorr[UNVOICED_LPC_ORDER+1];
+ int16_t pw16_rc[UNVOICED_LPC_ORDER];
+ int16_t pw16_corrVec[FSMULT*102]; /* 102 for NB */
+ int16_t pw16_bestCorrIndex[CHECK_NO_OF_CORRMAX];
+ int16_t pw16_bestCorr[CHECK_NO_OF_CORRMAX];
+ int16_t pw16_bestDistIndex[CHECK_NO_OF_CORRMAX];
+ int16_t pw16_bestDist[CHECK_NO_OF_CORRMAX];
+ int32_t pw32_corr2[(99*FSMULT)+1];
#endif
- WebRtc_Word32 pw32_bestDist[CHECK_NO_OF_CORRMAX];
- WebRtc_Word16 w16_ind = 0;
- WebRtc_Word16 w16_corrVecLen;
- WebRtc_Word16 w16_corrScale;
- WebRtc_Word16 w16_distScale;
- WebRtc_Word16 w16_indMin, w16_indMax;
- WebRtc_Word16 w16_len;
- WebRtc_Word32 w32_en1, w32_en2, w32_cc;
- WebRtc_Word16 w16_en1Scale, w16_en2Scale;
- WebRtc_Word16 w16_en1, w16_en2;
- WebRtc_Word32 w32_en1_mul_en2;
- WebRtc_Word16 w16_sqrt_en1en2;
- WebRtc_Word16 w16_ccShiftL;
- WebRtc_Word16 w16_bestcorr; /* Correlation in Q14 */
- WebRtc_Word16 *pw16_vec1, *pw16_vec2;
- WebRtc_Word16 w16_factor;
- WebRtc_Word16 w16_DistLag, w16_CorrLag, w16_diffLag;
- WebRtc_Word16 w16_energyLen;
- WebRtc_Word16 w16_slope;
- WebRtc_Word16 w16_startInd;
- WebRtc_Word16 w16_noOfcorr2;
- WebRtc_Word16 w16_scale;
+ int32_t pw32_bestDist[CHECK_NO_OF_CORRMAX];
+ int16_t w16_ind = 0;
+ int16_t w16_corrVecLen;
+ int16_t w16_corrScale;
+ int16_t w16_distScale;
+ int16_t w16_indMin, w16_indMax;
+ int16_t w16_len;
+ int32_t w32_en1, w32_en2, w32_cc;
+ int16_t w16_en1Scale, w16_en2Scale;
+ int16_t w16_en1, w16_en2;
+ int32_t w32_en1_mul_en2;
+ int16_t w16_sqrt_en1en2;
+ int16_t w16_ccShiftL;
+ int16_t w16_bestcorr; /* Correlation in Q14 */
+ int16_t *pw16_vec1, *pw16_vec2;
+ int16_t w16_factor;
+ int16_t w16_DistLag, w16_CorrLag, w16_diffLag;
+ int16_t w16_energyLen;
+ int16_t w16_slope;
+ int16_t w16_startInd;
+ int16_t w16_noOfcorr2;
+ int16_t w16_scale;
/* Initialize some variables */
ExpandState->w16_lagsDirection = 1;
@@ -273,7 +273,7 @@
}
- /* Shift the distortion values to fit in WebRtc_Word16 */
+ /* Shift the distortion values to fit in int16_t */
WebRtcSpl_VectorBitShiftW32ToW16(pw16_bestDist, CHECK_NO_OF_CORRMAX, pw32_bestDist,
w16_distScale);
@@ -282,15 +282,15 @@
* Do this by a cross multiplication.
*/
- w32_en1 = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[0],pw16_bestDist[1]);
- w32_en2 = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[1],pw16_bestDist[0]);
+ w32_en1 = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[0],pw16_bestDist[1]);
+ w32_en2 = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[1],pw16_bestDist[0]);
if (w32_en1 >= w32_en2)
{
/* 0 wins over 1 */
w32_en1
- = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[0], pw16_bestDist[2]);
+ = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[0], pw16_bestDist[2]);
w32_en2
- = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[2], pw16_bestDist[0]);
+ = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[2], pw16_bestDist[0]);
if (w32_en1 >= w32_en2)
{
/* 0 wins over 2 */
@@ -306,10 +306,10 @@
{
/* 1 wins over 0 */
w32_en1
- = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[1],pw16_bestDist[2]);
+ = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[1],pw16_bestDist[2]);
w32_en2
- = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[2],pw16_bestDist[1]);
- if ((WebRtc_Word32) w32_en1 >= (WebRtc_Word32) w32_en2)
+ = WEBRTC_SPL_MUL_16_16((int32_t) pw16_bestCorr[2],pw16_bestDist[1]);
+ if ((int32_t) w32_en1 >= (int32_t) w32_en2)
{
/* 1 wins over 2 */
w16_ind = 1;
@@ -376,7 +376,7 @@
= WebRtcSpl_MaxAbsValueW16(
&inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd
- w16_noOfcorr2],
- (WebRtc_Word16) (w16_len + w16_startInd + w16_noOfcorr2 - 1));
+ (int16_t) (w16_len + w16_startInd + w16_noOfcorr2 - 1));
w16_corrScale = ((31 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_tmp, w16_tmp)))
+ (31 - WebRtcSpl_NormW32(w16_len))) - 31;
w16_corrScale = WEBRTC_SPL_MAX(0, w16_corrScale);
@@ -418,15 +418,15 @@
/* if sum is odd */
w16_en1Scale += 1;
}
- w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+ w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+ w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
w32_en1_mul_en2 = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
- w16_sqrt_en1en2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_en1_mul_en2);
+ w16_sqrt_en1en2 = (int16_t) WebRtcSpl_SqrtFloor(w32_en1_mul_en2);
/* Calculate cc/sqrt(en1*en2) in Q14 */
w16_ccShiftL = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_ccShiftL);
- w16_bestcorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrt_en1en2);
+ w16_bestcorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrt_en1en2);
w16_bestcorr = WEBRTC_SPL_MIN(16384, w16_bestcorr); /* set maximum to 1.0 */
}
@@ -462,10 +462,10 @@
/* calculate w32_en1/w32_en2 in Q13 */
w32_en1_mul_en2 = WebRtcSpl_DivW32W16(
WEBRTC_SPL_SHIFT_W32(w32_en1, -w16_en1Scale),
- (WebRtc_Word16) (WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale)));
+ (int16_t) (WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale)));
/* calculate factor in Q13 (sqrt of en1/en2 in Q26) */
- w16_factor = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+ w16_factor = (int16_t) WebRtcSpl_SqrtFloor(
WEBRTC_SPL_LSHIFT_W32(w32_en1_mul_en2, 13));
/* Copy the two vectors and give them the same energy */
@@ -587,17 +587,17 @@
if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
{
WEBRTC_SPL_MEMCPY_W16(pw16_randVec,
- (WebRtc_Word16*) WebRtcNetEQ_kRandnTbl, w16_randLen);
+ (int16_t*) WebRtcNetEQ_kRandnTbl, w16_randLen);
}
else
{ /* only applies to SWB where length could be larger than 256 */
#if FSMULT >= 2 /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
- WEBRTC_SPL_MEMCPY_W16(pw16_randVec, (WebRtc_Word16*) WebRtcNetEQ_kRandnTbl,
+ WEBRTC_SPL_MEMCPY_W16(pw16_randVec, (int16_t*) WebRtcNetEQ_kRandnTbl,
RANDVEC_NO_OF_SAMPLES);
inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
assert(w16_randLen <= FSMULT * 120 + 30);
WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
- (WebRtc_Word16) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
+ (int16_t) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
#else
assert(0);
#endif
@@ -630,7 +630,7 @@
w32_tmp = WEBRTC_SPL_SHIFT_W32(w32_tmp, w16_tmp);
w32_tmp = WebRtcSpl_SqrtFloor(w32_tmp);
ExpandState->w16_arGainScale = 13 + ((w16_tmp + 7 - w16_scale) >> 1);
- ExpandState->w16_arGain = (WebRtc_Word16) w32_tmp;
+ ExpandState->w16_arGain = (int16_t) w32_tmp;
/********************************************************************
* Calculate vfraction from bestcorr *
@@ -647,21 +647,21 @@
if (w16_bestcorr > 7875)
{
/* if x>0.480665 */
- WebRtc_Word16 w16_x1, w16_x2, w16_x3;
+ int16_t w16_x1, w16_x2, w16_x3;
w16_x1 = w16_bestcorr;
- w32_tmp = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) w16_x1, w16_x1);
- w16_x2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
+ w32_tmp = WEBRTC_SPL_MUL_16_16((int32_t) w16_x1, w16_x1);
+ w16_x2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_x1, w16_x2);
- w16_x3 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
+ w16_x3 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
w32_tmp
- = (WebRtc_Word32) WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) WebRtcNetEQ_kMixFractionFuncTbl[0], 14);
+ = (int32_t) WEBRTC_SPL_LSHIFT_W32((int32_t) WebRtcNetEQ_kMixFractionFuncTbl[0], 14);
w32_tmp
- += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[1], w16_x1);
+ += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[1], w16_x1);
w32_tmp
- += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[2], w16_x2);
+ += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[2], w16_x2);
w32_tmp
- += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[3], w16_x3);
- ExpandState->w16_vFraction = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 12);
+ += (int32_t) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[3], w16_x3);
+ ExpandState->w16_vFraction = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 12);
ExpandState->w16_vFraction = WEBRTC_SPL_MIN(ExpandState->w16_vFraction, 16384);
ExpandState->w16_vFraction = WEBRTC_SPL_MAX(ExpandState->w16_vFraction, 0);
}
@@ -681,9 +681,9 @@
/* Calculate (1-(1/slope))/w16_DistLag = (slope-1)/(w16_DistLag*slope) */
w32_tmp = w16_slope - 8192;
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 12); /* Value in Q25 (13+12=25) */
- w16_tmp = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(w16_DistLag,
+ w16_tmp = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(w16_DistLag,
w16_slope, 8); /* Value in Q5 (13-8=5) */
- w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+ w16_tmp = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
w16_tmp); /* Res in Q20 (25-5=20) */
if (w16_slope > 14746)
@@ -709,7 +709,7 @@
/* Calculate (1-slope)/w16_DistLag */
w32_tmp = 8192 - w16_slope;
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
- ExpandState->w16_muteSlope = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+ ExpandState->w16_muteSlope = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
w16_DistLag); /* Res in Q20 (20-0=20) */
}
ExpandState->w16_onset = 0;
@@ -723,7 +723,7 @@
w32_tmp = 8192 - w16_slope;
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
w32_tmp = WEBRTC_SPL_MAX(w32_tmp, 0);
- ExpandState->w16_muteSlope = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+ ExpandState->w16_muteSlope = (int16_t) WebRtcSpl_DivW32W16(w32_tmp,
w16_DistLag); /* Res in Q20 (20-0=20) */
w16_tmp = WebRtcNetEQ_k5243div[fs_mult]; /* 0.005/fs_mult = 5243/fs_mult */
ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(w16_tmp, ExpandState->w16_muteSlope);
@@ -772,7 +772,7 @@
inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
assert(w16_randLen <= FSMULT * 120 + 30);
WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
- (WebRtc_Word16) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
+ (int16_t) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
#else
assert(0);
#endif
@@ -880,7 +880,7 @@
for (i = 0; i < ExpandState->w16_overlap; i++)
{
/* Do overlap add between new vector and overlap */
- ExpandState->pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ ExpandState->pw16_overlapVec[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_16(ExpandState->pw16_overlapVec[i], w16_winMute) +
WEBRTC_SPL_MUL_16_16(
WEBRTC_SPL_MUL_16_16_RSFT(ExpandState->w16_expandMuteFactor,
@@ -913,7 +913,7 @@
UNVOICED_LPC_ORDER);
if (ExpandState->w16_arGainScale > 0)
{
- w32_tmp = ((WebRtc_Word32) 1) << (ExpandState->w16_arGainScale - 1);
+ w32_tmp = ((int32_t) 1) << (ExpandState->w16_arGainScale - 1);
}
else
{
@@ -941,7 +941,7 @@
>=64*fs_mult => go from 1 to 0 in about 32 ms
*/
w16_tmp = (31 - WebRtcSpl_NormW32(ExpandState->w16_maxLag)) - 5; /* getbits(w16_maxLag) -5 */
- w16_vfractionChange = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(256, w16_tmp);
+ w16_vfractionChange = (int16_t) WEBRTC_SPL_RSHIFT_W32(256, w16_tmp);
if (ExpandState->w16_stopMuting == 1)
{
w16_vfractionChange = 0;
@@ -963,7 +963,7 @@
w16_tmp2 = 16384 - ExpandState->w16_currentVFraction;
WebRtcSpl_ScaleAndAddVectorsWithRound(pw16_voicedVec + w16_tmp,
ExpandState->w16_currentVFraction, pw16_unvoicedVec + w16_tmp, w16_tmp2, 14,
- pw16_outData + w16_tmp, (WebRtc_Word16) (w16_lag - w16_tmp));
+ pw16_outData + w16_tmp, (int16_t) (w16_lag - w16_tmp));
}
/* Select muting factor */
@@ -991,9 +991,9 @@
{
WebRtcNetEQ_MuteSignal(pw16_outData, ExpandState->w16_muteSlope, w16_lag);
- w16_tmp = 16384 - (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_lag,
+ w16_tmp = 16384 - (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_lag,
ExpandState->w16_muteSlope) + 8192) >> 6); /* 20-14 = 6 */
- w16_tmp = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp,
+ w16_tmp = (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_tmp,
ExpandState->w16_expandMuteFactor) + 8192) >> 14);
/* Guard against getting stuck with very small (but sometimes audible) gain */
@@ -1025,7 +1025,7 @@
if (BGNState->w16_scaleShift > 1)
{
- w32_tmp = ((WebRtc_Word32) 1) << (BGNState->w16_scaleShift - 1);
+ w32_tmp = ((int32_t) 1) << (BGNState->w16_scaleShift - 1);
}
else
{
@@ -1051,7 +1051,7 @@
{
/* fade BGN to zero */
/* calculate muting slope, approx 2^18/fsHz */
- WebRtc_Word16 muteFactor;
+ int16_t muteFactor;
if (fs_mult == 1)
{
muteFactor = -32;
@@ -1136,7 +1136,7 @@
* Only do this if StopMuting != 1 or if explicitly BGNonly, otherwise Expand is
* called from Merge or Normal and special measures must be taken.
*/
- inst->statInst.expandLength += (WebRtc_UWord32) *pw16_len;
+ inst->statInst.expandLength += (uint32_t) *pw16_len;
if (ExpandState->w16_expandMuteFactor == 0 || BGNonly)
{
/* Only noise expansion */
@@ -1177,13 +1177,13 @@
int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 len)
+ int16_t *pw16_outData, int16_t len)
{
- WebRtc_Word16 pos = 0;
- WebRtc_Word16 tempLen = len;
+ int16_t pos = 0;
+ int16_t tempLen = len;
while (tempLen > 0)
{
diff --git a/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h b/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
index 10adfa0..d6c68fd 100644
--- a/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
+++ b/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq.h
@@ -111,25 +111,25 @@
* API function calls for each codec
*/
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecode)(void* state, WebRtc_Word16* encoded,
- WebRtc_Word16 len, WebRtc_Word16* decoded,
- WebRtc_Word16* speechType);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodePLC)(void* state, WebRtc_Word16* decoded,
- WebRtc_Word16 frames);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodeInit)(void* state);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncAddLatePkt)(void* state, WebRtc_Word16* encoded,
- WebRtc_Word16 len);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
- WebRtc_Word16* length);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
- WebRtc_Word32 packet_size,
- WebRtc_UWord16 rtp_seq_number,
- WebRtc_UWord32 send_ts,
- WebRtc_UWord32 arr_ts);
+typedef int16_t (*WebRtcNetEQ_FuncDecode)(void* state, int16_t* encoded,
+ int16_t len, int16_t* decoded,
+ int16_t* speechType);
+typedef int16_t (*WebRtcNetEQ_FuncDecodePLC)(void* state, int16_t* decoded,
+ int16_t frames);
+typedef int16_t (*WebRtcNetEQ_FuncDecodeInit)(void* state);
+typedef int16_t (*WebRtcNetEQ_FuncAddLatePkt)(void* state, int16_t* encoded,
+ int16_t len);
+typedef int16_t (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
+typedef int16_t (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, int16_t* encoded,
+ int16_t* length);
+typedef int16_t (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const uint16_t *encoded,
+ int32_t packet_size,
+ uint16_t rtp_seq_number,
+ uint32_t send_ts,
+ uint32_t arr_ts);
typedef int (*WebRtcNetEQ_FuncDurationEst)(void* state, const uint8_t* payload,
int payload_length_bytes);
-typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
+typedef int16_t (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
/**********************************************************
* Structures
@@ -138,7 +138,7 @@
typedef struct
{
enum WebRtcNetEQDecoder codec;
- WebRtc_Word16 payloadType;
+ int16_t payloadType;
WebRtcNetEQ_FuncDecode funcDecode;
WebRtcNetEQ_FuncDecode funcDecodeRCU;
WebRtcNetEQ_FuncDecodePLC funcDecodePLC;
@@ -150,15 +150,15 @@
WebRtcNetEQ_FuncDurationEst funcDurationEst;
WebRtcNetEQ_FuncGetErrorCode funcGetErrorCode;
void* codec_state;
- WebRtc_UWord16 codec_fs;
+ uint16_t codec_fs;
} WebRtcNetEQ_CodecDef;
typedef struct
{
- WebRtc_UWord16 fraction_lost;
- WebRtc_UWord32 cum_lost;
- WebRtc_UWord32 ext_max;
- WebRtc_UWord32 jitter;
+ uint16_t fraction_lost;
+ uint32_t cum_lost;
+ uint32_t ext_max;
+ uint32_t jitter;
} WebRtcNetEQ_RTCPStat;
/**********************************************************
@@ -184,7 +184,7 @@
/* Init functions */
-int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs);
+int WebRtcNetEQ_Init(void *inst, uint16_t fs);
int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon);
int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs);
int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode);
@@ -196,29 +196,29 @@
int WebRtcNetEQ_CodecDbReset(void *inst);
int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst);
int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec);
-int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
- WebRtc_Word16 *MaxEntries);
-int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
+int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, int16_t *UsedEntries,
+ int16_t *MaxEntries);
+int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, int16_t Entry,
enum WebRtcNetEQDecoder *codec);
/* Real-time functions */
-int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
- WebRtc_UWord32 uw32_timeRec);
-int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
+int WebRtcNetEQ_RecIn(void *inst, int16_t *p_w16datagramstart, int16_t w16_RTPlen,
+ uint32_t uw32_timeRec);
+int WebRtcNetEQ_RecOut(void *inst, int16_t *pw16_outData, int16_t *pw16_len);
int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
-int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp);
+int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, uint32_t *timestamp);
int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType);
/* VQmon related functions */
-int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
- WebRtc_UWord16 *concealedVoiceDurationMs,
- WebRtc_UWord8 *concealedVoiceFlags);
-int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
- WebRtc_UWord8 *adaptationRate);
-int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
- WebRtc_UWord16 *maxDelayMs);
+int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, uint16_t *validVoiceDurationMs,
+ uint16_t *concealedVoiceDurationMs,
+ uint8_t *concealedVoiceFlags);
+int WebRtcNetEQ_VQmonGetConfiguration(void *inst, uint16_t *absMaxDelayMs,
+ uint8_t *adaptationRate);
+int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, uint16_t *avgDelayMs,
+ uint16_t *maxDelayMs);
#ifdef __cplusplus
}
diff --git a/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h b/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
index 3509cf7..c124e26f 100644
--- a/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
+++ b/webrtc/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
@@ -24,11 +24,11 @@
typedef struct
{
- WebRtc_UWord8 payloadType;
- WebRtc_UWord16 sequenceNumber;
- WebRtc_UWord32 timeStamp;
- WebRtc_UWord32 SSRC;
- WebRtc_UWord8 markerBit;
+ uint8_t payloadType;
+ uint16_t sequenceNumber;
+ uint32_t timeStamp;
+ uint32_t SSRC;
+ uint8_t markerBit;
} WebRtcNetEQ_RTPInfo;
/****************************************************************************
@@ -48,8 +48,8 @@
* -1 - Error
*/
int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
- const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
- WebRtc_UWord32 timeRec);
+ const uint8_t *payloadPtr, int16_t payloadLenBytes,
+ uint32_t timeRec);
/****************************************************************************
* WebRtcNetEQ_GetMasterSlaveInfoSize(...)
@@ -86,9 +86,9 @@
* -1 - Error
*/
-int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len, void *msInfo,
- WebRtc_Word16 isMaster);
+int WebRtcNetEQ_RecOutMasterSlave(void *inst, int16_t *pw16_outData,
+ int16_t *pw16_len, void *msInfo,
+ int16_t isMaster);
typedef struct
{
@@ -143,7 +143,7 @@
typedef int (*WebRtcNetEQ_VADInitFunction)(void *VAD_inst);
typedef int (*WebRtcNetEQ_VADSetmodeFunction)(void *VAD_inst, int mode);
typedef int (*WebRtcNetEQ_VADFunction)(void *VAD_inst, int fs,
- WebRtc_Word16 *frame, int frameLen);
+ int16_t *frame, int frameLen);
/****************************************************************************
* WebRtcNetEQ_SetVADInstance(...)
@@ -210,8 +210,8 @@
* -1 - Error
*/
-int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len);
+int WebRtcNetEQ_RecOutNoDecode(void *inst, int16_t *pw16_outData,
+ int16_t *pw16_len);
/****************************************************************************
* WebRtcNetEQ_FlushBuffers(...)
diff --git a/webrtc/modules/audio_coding/neteq/mcu.h b/webrtc/modules/audio_coding/neteq/mcu.h
index 499684a..6994a36 100644
--- a/webrtc/modules/audio_coding/neteq/mcu.h
+++ b/webrtc/modules/audio_coding/neteq/mcu.h
@@ -43,19 +43,19 @@
typedef struct
{
- WebRtc_Word16 current_Codec;
- WebRtc_Word16 current_Payload;
- WebRtc_UWord32 timeStamp; /* Next timestamp that should be played */
- WebRtc_Word16 millisecondsPerCall;
- WebRtc_UWord16 timestampsPerCall; /* Output chunk size */
- WebRtc_UWord16 fs;
- WebRtc_UWord32 ssrc; /* Current ssrc */
- WebRtc_Word16 new_codec;
- WebRtc_Word16 first_packet;
+ int16_t current_Codec;
+ int16_t current_Payload;
+ uint32_t timeStamp; /* Next timestamp that should be played */
+ int16_t millisecondsPerCall;
+ uint16_t timestampsPerCall; /* Output chunk size */
+ uint16_t fs;
+ uint32_t ssrc; /* Current ssrc */
+ int16_t new_codec;
+ int16_t first_packet;
/* MCU/DSP Communication layer */
- WebRtc_Word16 *pw16_readAddress;
- WebRtc_Word16 *pw16_writeAddress;
+ int16_t *pw16_readAddress;
+ int16_t *pw16_writeAddress;
void *main_inst;
CodecDbInst_t codec_DB_inst; /* Information about all the codecs, i.e. which
@@ -71,21 +71,21 @@
dtmf_inst_t DTMF_inst;
#endif
int NoOfExpandCalls;
- WebRtc_Word16 AVT_PlayoutOn;
+ int16_t AVT_PlayoutOn;
enum WebRtcNetEQPlayoutMode NetEqPlayoutMode;
- WebRtc_Word16 one_desc; /* Number of times running on one desc */
+ int16_t one_desc; /* Number of times running on one desc */
- WebRtc_UWord32 lostTS; /* Number of timestamps lost */
- WebRtc_UWord32 lastReportTS; /* Timestamp elapsed since last report was given */
+ uint32_t lostTS; /* Number of timestamps lost */
+ uint32_t lastReportTS; /* Timestamp elapsed since last report was given */
int waiting_times[kLenWaitingTimes]; /* Waiting time statistics storage. */
int len_waiting_times;
int next_waiting_time_index;
- WebRtc_UWord32 externalTS;
- WebRtc_UWord32 internalTS;
- WebRtc_Word16 TSscalingInitialized;
+ uint32_t externalTS;
+ uint32_t internalTS;
+ int16_t TSscalingInitialized;
enum TsScaling scalingFactor;
#ifdef NETEQ_STEREO
@@ -187,7 +187,7 @@
* Return value : 0 - Ok
* <0 - Error
*/
-int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs_hz);
+int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, uint16_t fs_hz);
/****************************************************************************
* WebRtcNetEQ_SignalMcu(...)
@@ -217,7 +217,7 @@
*/
int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacket,
- WebRtc_UWord32 uw32_timeRec);
+ uint32_t uw32_timeRec);
/****************************************************************************
* WebRtcNetEQ_RecInInternal(...)
@@ -234,7 +234,7 @@
* -1 - Error
*/
int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
- SplitInfo_t *split_inst, WebRtc_Word16 *flushed);
+ SplitInfo_t *split_inst, int16_t *flushed);
/****************************************************************************
* WebRtcNetEQ_GetTimestampScaling(...)
@@ -264,8 +264,8 @@
* Return value : Internal timestamp
*/
-WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
- WebRtc_UWord32 externalTS);
+uint32_t WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
+ uint32_t externalTS);
/****************************************************************************
* WebRtcNetEQ_ScaleTimestampInternalToExternal(...)
@@ -279,6 +279,6 @@
* Return value : External timestamp
*/
-WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
- WebRtc_UWord32 internalTS);
+uint32_t WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
+ uint32_t internalTS);
#endif
diff --git a/webrtc/modules/audio_coding/neteq/mcu_address_init.c b/webrtc/modules/audio_coding/neteq/mcu_address_init.c
index 0306a85..666ecc8 100644
--- a/webrtc/modules/audio_coding/neteq/mcu_address_init.c
+++ b/webrtc/modules/audio_coding/neteq/mcu_address_init.c
@@ -19,8 +19,8 @@
void * Data2DspAddress, void *main_inst)
{
- inst->pw16_readAddress = (WebRtc_Word16*) Data2McuAddress;
- inst->pw16_writeAddress = (WebRtc_Word16*) Data2DspAddress;
+ inst->pw16_readAddress = (int16_t*) Data2McuAddress;
+ inst->pw16_writeAddress = (int16_t*) Data2DspAddress;
inst->main_inst = main_inst;
inst->millisecondsPerCall = 10;
diff --git a/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c b/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
index 13025d4..744a131 100644
--- a/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
+++ b/webrtc/modules/audio_coding/neteq/mcu_dsp_common.c
@@ -29,7 +29,7 @@
}
/* The DSP side will call this function to interrupt the MCU side */
-int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem)
+int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, int16_t *pw16_shared_mem)
{
inst->MCUinst.pw16_readAddress = pw16_shared_mem;
inst->MCUinst.pw16_writeAddress = pw16_shared_mem;
diff --git a/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h b/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
index e3f4213..badffa1 100644
--- a/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
+++ b/webrtc/modules/audio_coding/neteq/mcu_dsp_common.h
@@ -36,26 +36,26 @@
{
DSPInst_t DSPinst; /* DSP part of the NetEQ instance */
MCUInst_t MCUinst; /* MCU part of the NetEQ instance */
- WebRtc_Word16 ErrorCode; /* Store last error code */
+ int16_t ErrorCode; /* Store last error code */
#ifdef NETEQ_STEREO
- WebRtc_Word16 masterSlave; /* 0 = not set, 1 = master, 2 = slave */
+ int16_t masterSlave; /* 0 = not set, 1 = master, 2 = slave */
#endif /* NETEQ_STEREO */
} MainInst_t;
/* Struct used for communication between DSP and MCU sides of NetEQ */
typedef struct
{
- WebRtc_UWord32 playedOutTS; /* Timestamp position at end of DSP data */
- WebRtc_UWord16 samplesLeft; /* Number of samples stored */
- WebRtc_Word16 MD; /* Multiple description codec information */
- WebRtc_Word16 lastMode; /* Latest mode of NetEQ playout */
- WebRtc_Word16 frameLen; /* Frame length of previously decoded packet */
+ uint32_t playedOutTS; /* Timestamp position at end of DSP data */
+ uint16_t samplesLeft; /* Number of samples stored */
+ int16_t MD; /* Multiple description codec information */
+ int16_t lastMode; /* Latest mode of NetEQ playout */
+ int16_t frameLen; /* Frame length of previously decoded packet */
} DSP2MCU_info_t;
/* Initialize instances with read and write address */
int WebRtcNetEQ_DSPinit(MainInst_t *inst);
/* The DSP side will call this function to interrupt the MCU side */
-int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem);
+int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, int16_t *pw16_shared_mem);
#endif
diff --git a/webrtc/modules/audio_coding/neteq/merge.c b/webrtc/modules/audio_coding/neteq/merge.c
index 5f020a9..bd5239c 100644
--- a/webrtc/modules/audio_coding/neteq/merge.c
+++ b/webrtc/modules/audio_coding/neteq/merge.c
@@ -44,11 +44,11 @@
/* Scratch usage:
Type Name size startpos endpos
- WebRtc_Word16 pw16_expanded 210*fs/8000 0 209*fs/8000
- WebRtc_Word16 pw16_expandedLB 100 210*fs/8000 99+210*fs/8000
- WebRtc_Word16 pw16_decodedLB 40 100+210*fs/8000 139+210*fs/8000
- WebRtc_Word32 pw32_corr 2*60 140+210*fs/8000 260+210*fs/8000
- WebRtc_Word16 pw16_corrVec 68 210*fs/8000 67+210*fs/8000
+ int16_t pw16_expanded 210*fs/8000 0 209*fs/8000
+ int16_t pw16_expandedLB 100 210*fs/8000 99+210*fs/8000
+ int16_t pw16_decodedLB 40 100+210*fs/8000 139+210*fs/8000
+ int32_t pw32_corr 2*60 140+210*fs/8000 260+210*fs/8000
+ int16_t pw16_corrVec 68 210*fs/8000 67+210*fs/8000
[gap in scratch vector]
@@ -86,40 +86,40 @@
int WebRtcNetEQ_Merge(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len)
+ int16_t *pw16_decoded, int len, int16_t *pw16_outData,
+ int16_t *pw16_len)
{
- WebRtc_Word16 fs_mult;
- WebRtc_Word16 fs_shift;
- WebRtc_Word32 w32_En_new_frame, w32_En_old_frame;
- WebRtc_Word16 w16_expmax, w16_newmax;
- WebRtc_Word16 w16_tmp, w16_tmp2;
- WebRtc_Word32 w32_tmp;
+ int16_t fs_mult;
+ int16_t fs_shift;
+ int32_t w32_En_new_frame, w32_En_old_frame;
+ int16_t w16_expmax, w16_newmax;
+ int16_t w16_tmp, w16_tmp2;
+ int32_t w32_tmp;
#ifdef SCRATCH
- WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
- WebRtc_Word16 *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
- WebRtc_Word16 *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
- WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
- WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
+ int16_t *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
+ int16_t *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
+ int16_t *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
+ int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_pw32_corr);
+ int16_t *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
#else
- WebRtc_Word16 pw16_expanded[(125+80+5)*FSMULT];
- WebRtc_Word16 pw16_expandedLB[100];
- WebRtc_Word16 pw16_decodedLB[40];
- WebRtc_Word32 pw32_corr[60];
- WebRtc_Word16 pw16_corrVec[4+60+4];
+ int16_t pw16_expanded[(125+80+5)*FSMULT];
+ int16_t pw16_expandedLB[100];
+ int16_t pw16_decodedLB[40];
+ int32_t pw32_corr[60];
+ int16_t pw16_corrVec[4+60+4];
#endif
- WebRtc_Word16 *pw16_corr = &pw16_corrVec[4];
- WebRtc_Word16 w16_stopPos = 0, w16_bestIndex, w16_interpLen;
- WebRtc_Word16 w16_bestVal; /* bestVal is dummy */
- WebRtc_Word16 w16_startfact, w16_inc;
- WebRtc_Word16 w16_expandedLen;
- WebRtc_Word16 w16_startPos;
- WebRtc_Word16 w16_expLen, w16_newLen = 0;
- WebRtc_Word16 *pw16_decodedOut;
- WebRtc_Word16 w16_muted;
+ int16_t *pw16_corr = &pw16_corrVec[4];
+ int16_t w16_stopPos = 0, w16_bestIndex, w16_interpLen;
+ int16_t w16_bestVal; /* bestVal is dummy */
+ int16_t w16_startfact, w16_inc;
+ int16_t w16_expandedLen;
+ int16_t w16_startPos;
+ int16_t w16_expLen, w16_newLen = 0;
+ int16_t *pw16_decodedOut;
+ int16_t w16_muted;
int w16_decodedLen = len;
@@ -209,13 +209,13 @@
/* Adjust muting factor (main muting factor times expand muting factor) */
inst->w16_muteFactor
- = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
+ = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
inst->ExpandInst.w16_expandMuteFactor, 14);
/* Adjust muting factor if new vector is more or less of the BGN energy */
len = WEBRTC_SPL_MIN(64*fs_mult, w16_decodedLen);
- w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (WebRtc_Word16) len);
- w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+ w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (int16_t) len);
+ w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
/* Calculate energy of old data */
w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_expmax, w16_expmax));
@@ -251,10 +251,10 @@
w16_tmp = w16_tmp + 14;
w32_En_old_frame = WEBRTC_SPL_SHIFT_W32(w32_En_old_frame, w16_tmp);
w16_tmp
- = WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (WebRtc_Word16) w32_En_new_frame);
+ = WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (int16_t) w32_En_new_frame);
/* Calculate sqrt(w32_En_old_frame/w32_En_new_frame) in Q14 */
- w16_muted = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,14));
+ w16_muted = (int16_t) WebRtcSpl_SqrtFloor(
+ WEBRTC_SPL_LSHIFT_W32((int32_t)w16_tmp,14));
}
else
{
@@ -288,56 +288,56 @@
/* Downsample to 4 kHz */
if (inst->fs == 8000)
{
- WebRtcSpl_DownsampleFast(&pw16_expanded[2], (WebRtc_Word16) (w16_expandedLen - 2),
- pw16_expandedLB, (WebRtc_Word16) (100),
- (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl, (WebRtc_Word16) 3,
- (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+ WebRtcSpl_DownsampleFast(&pw16_expanded[2], (int16_t) (w16_expandedLen - 2),
+ pw16_expandedLB, (int16_t) (100),
+ (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl, (int16_t) 3,
+ (int16_t) 2, (int16_t) 0);
if (w16_decodedLen <= 80)
{
/* Not quite long enough, so we have to cheat a bit... */
- WebRtc_Word16 temp_len = w16_decodedLen - 2;
+ int16_t temp_len = w16_decodedLen - 2;
w16_tmp = temp_len / 2;
WebRtcSpl_DownsampleFast(&pw16_decoded[2], temp_len,
pw16_decodedLB, w16_tmp,
- (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
- (WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+ (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl,
+ (int16_t) 3, (int16_t) 2, (int16_t) 0);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40 - w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(&pw16_decoded[2],
- (WebRtc_Word16) (w16_decodedLen - 2), pw16_decodedLB,
- (WebRtc_Word16) (40), (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
- (WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+ (int16_t) (w16_decodedLen - 2), pw16_decodedLB,
+ (int16_t) (40), (int16_t*) WebRtcNetEQ_kDownsample8kHzTbl,
+ (int16_t) 3, (int16_t) 2, (int16_t) 0);
}
#ifdef NETEQ_WIDEBAND
}
else if (inst->fs==16000)
{
WebRtcSpl_DownsampleFast(
- &pw16_expanded[4], (WebRtc_Word16)(w16_expandedLen-4),
- pw16_expandedLB, (WebRtc_Word16)(100),
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
- (WebRtc_Word16)4, (WebRtc_Word16)0);
+ &pw16_expanded[4], (int16_t)(w16_expandedLen-4),
+ pw16_expandedLB, (int16_t)(100),
+ (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
+ (int16_t)4, (int16_t)0);
if (w16_decodedLen<=160)
{
/* Not quite long enough, so we have to cheat a bit... */
- WebRtc_Word16 temp_len = w16_decodedLen - 4;
+ int16_t temp_len = w16_decodedLen - 4;
w16_tmp = temp_len / 4;
WebRtcSpl_DownsampleFast(
&pw16_decoded[4], temp_len,
pw16_decodedLB, w16_tmp,
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
- (WebRtc_Word16)4, (WebRtc_Word16)0);
+ (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
+ (int16_t)4, (int16_t)0);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(
- &pw16_decoded[4], (WebRtc_Word16)(w16_decodedLen-4),
- pw16_decodedLB, (WebRtc_Word16)(40),
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
- (WebRtc_Word16)4, (WebRtc_Word16)0);
+ &pw16_decoded[4], (int16_t)(w16_decodedLen-4),
+ pw16_decodedLB, (int16_t)(40),
+ (int16_t*)WebRtcNetEQ_kDownsample16kHzTbl, (int16_t)5,
+ (int16_t)4, (int16_t)0);
}
#endif
#ifdef NETEQ_32KHZ_WIDEBAND
@@ -348,29 +348,29 @@
* TODO(hlundin) Why is the offset into pw16_expanded 6?
*/
WebRtcSpl_DownsampleFast(
- &pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
- pw16_expandedLB, (WebRtc_Word16)(100),
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
- (WebRtc_Word16)8, (WebRtc_Word16)0);
+ &pw16_expanded[6], (int16_t)(w16_expandedLen-6),
+ pw16_expandedLB, (int16_t)(100),
+ (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
+ (int16_t)8, (int16_t)0);
if (w16_decodedLen<=320)
{
/* Not quite long enough, so we have to cheat a bit... */
- WebRtc_Word16 temp_len = w16_decodedLen - 6;
+ int16_t temp_len = w16_decodedLen - 6;
w16_tmp = temp_len / 8;
WebRtcSpl_DownsampleFast(
&pw16_decoded[6], temp_len,
pw16_decodedLB, w16_tmp,
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
- (WebRtc_Word16)8, (WebRtc_Word16)0);
+ (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
+ (int16_t)8, (int16_t)0);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
- pw16_decodedLB, (WebRtc_Word16)(40),
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
- (WebRtc_Word16)8, (WebRtc_Word16)0);
+ &pw16_decoded[6], (int16_t)(w16_decodedLen-6),
+ pw16_decodedLB, (int16_t)(40),
+ (int16_t*)WebRtcNetEQ_kDownsample32kHzTbl, (int16_t)7,
+ (int16_t)8, (int16_t)0);
}
#endif
#ifdef NETEQ_48KHZ_WIDEBAND
@@ -381,10 +381,10 @@
* TODO(hlundin) Why is the offset into pw16_expanded 6?
*/
WebRtcSpl_DownsampleFast(
- &pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
- pw16_expandedLB, (WebRtc_Word16)(100),
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
- (WebRtc_Word16)12, (WebRtc_Word16)0);
+ &pw16_expanded[6], (int16_t)(w16_expandedLen-6),
+ pw16_expandedLB, (int16_t)(100),
+ (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
+ (int16_t)12, (int16_t)0);
if (w16_decodedLen<=320)
{
/* Not quite long enough, so we have to cheat a bit... */
@@ -393,29 +393,29 @@
* but w16_tmp = temp_len / 8.
* (Was w16_tmp = ((w16_decodedLen-6)>>3) before re-write.)
*/
- WebRtc_Word16 temp_len = w16_decodedLen - 6;
+ int16_t temp_len = w16_decodedLen - 6;
w16_tmp = temp_len / 8;
WebRtcSpl_DownsampleFast(
&pw16_decoded[6], temp_len,
pw16_decodedLB, w16_tmp,
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
- (WebRtc_Word16)12, (WebRtc_Word16)0);
+ (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
+ (int16_t)12, (int16_t)0);
WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
}
else
{
WebRtcSpl_DownsampleFast(
- &pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
- pw16_decodedLB, (WebRtc_Word16)(40),
- (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
- (WebRtc_Word16)12, (WebRtc_Word16)0);
+ &pw16_decoded[6], (int16_t)(w16_decodedLen-6),
+ pw16_decodedLB, (int16_t)(40),
+ (int16_t*)WebRtcNetEQ_kDownsample48kHzTbl, (int16_t)7,
+ (int16_t)12, (int16_t)0);
}
#endif
}
/* Calculate correlation without any normalization (40 samples) */
- w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) inst->ExpandInst.w16_maxLag,
- (WebRtc_Word16) (fs_mult * 2)) + 1;
+ w16_tmp = WebRtcSpl_DivW32W16ResW16((int32_t) inst->ExpandInst.w16_maxLag,
+ (int16_t) (fs_mult * 2)) + 1;
w16_stopPos = WEBRTC_SPL_MIN(60, w16_tmp);
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_expmax, w16_newmax);
if (w32_tmp > 26843546)
@@ -428,9 +428,9 @@
}
WebRtcNetEQ_CrossCorr(pw32_corr, pw16_decodedLB, pw16_expandedLB, 40,
- (WebRtc_Word16) w16_stopPos, w16_tmp, 1);
+ (int16_t) w16_stopPos, w16_tmp, 1);
- /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+ /* Normalize correlation to 14 bits and put in a int16_t vector */
WebRtcSpl_MemSetW16(pw16_corrVec, 0, (4 + 60 + 4));
w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_stopPos);
w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
@@ -445,8 +445,8 @@
w16_tmp = WEBRTC_SPL_MAX(0, WEBRTC_SPL_MAX(w16_startPos,
inst->timestampsPerCall+inst->ExpandInst.w16_overlap) - w16_decodedLen);
/* Downscale starting index to 4kHz domain */
- w16_tmp2 = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) w16_tmp,
- (WebRtc_Word16) (fs_mult << 1));
+ w16_tmp2 = WebRtcSpl_DivW32W16ResW16((int32_t) w16_tmp,
+ (int16_t) (fs_mult << 1));
#ifdef NETEQ_STEREO
} /* end if (msInfo->msMode != NETEQ_SLAVE) */
@@ -500,10 +500,10 @@
if (inst->w16_muteFactor < 16384)
{
WebRtcNetEQ_UnmuteSignal(pw16_decoded, &inst->w16_muteFactor, pw16_decoded, w16_inc,
- (WebRtc_Word16) w16_interpLen);
+ (int16_t) w16_interpLen);
WebRtcNetEQ_UnmuteSignal(&pw16_decoded[w16_interpLen], &inst->w16_muteFactor,
&pw16_decodedOut[w16_interpLen], w16_inc,
- (WebRtc_Word16) (w16_decodedLen - w16_interpLen));
+ (int16_t) (w16_decodedLen - w16_interpLen));
}
else
{
@@ -514,7 +514,7 @@
}
/* Do overlap and interpolate linearly */
- w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (WebRtc_Word16) (w16_interpLen + 1)); /* Q14 */
+ w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (int16_t) (w16_interpLen + 1)); /* Q14 */
w16_startfact = (16384 - w16_inc);
WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_expanded, w16_bestIndex);
WebRtcNetEQ_MixVoiceUnvoice(pw16_decodedOut, &pw16_expanded[w16_bestIndex], pw16_decoded,
diff --git a/webrtc/modules/audio_coding/neteq/min_distortion.c b/webrtc/modules/audio_coding/neteq/min_distortion.c
index 4c9ee1c..47e2b44 100644
--- a/webrtc/modules/audio_coding/neteq/min_distortion.c
+++ b/webrtc/modules/audio_coding/neteq/min_distortion.c
@@ -16,17 +16,17 @@
#include "signal_processing_library.h"
-WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
- WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
- WebRtc_Word16 len, WebRtc_Word32 *pw16_dist)
+int16_t WebRtcNetEQ_MinDistortion(const int16_t *pw16_data,
+ int16_t w16_minLag, int16_t w16_maxLag,
+ int16_t len, int32_t *pw16_dist)
{
int i, j;
- const WebRtc_Word16 *pw16_data1;
- const WebRtc_Word16 *pw16_data2;
- WebRtc_Word32 w32_diff;
- WebRtc_Word32 w32_sumdiff;
- WebRtc_Word16 bestIndex = -1;
- WebRtc_Word32 minDist = WEBRTC_SPL_WORD32_MAX;
+ const int16_t *pw16_data1;
+ const int16_t *pw16_data2;
+ int32_t w32_diff;
+ int32_t w32_sumdiff;
+ int16_t bestIndex = -1;
+ int32_t minDist = WEBRTC_SPL_WORD32_MAX;
for (i = w16_minLag; i <= w16_maxLag; i++)
{
diff --git a/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c b/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
index 9895630..6c70d49 100644
--- a/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
+++ b/webrtc/modules/audio_coding/neteq/mix_voice_unvoice.c
@@ -17,19 +17,19 @@
#include "signal_processing_library.h"
-void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
- WebRtc_Word16 *pw16_unvoicedVec,
- WebRtc_Word16 *w16_current_vfraction,
- WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N)
+void WebRtcNetEQ_MixVoiceUnvoice(int16_t *pw16_outData, int16_t *pw16_voicedVec,
+ int16_t *pw16_unvoicedVec,
+ int16_t *w16_current_vfraction,
+ int16_t w16_vfraction_change, int16_t N)
{
int i;
- WebRtc_Word16 w16_tmp2;
- WebRtc_Word16 vfraction = *w16_current_vfraction;
+ int16_t w16_tmp2;
+ int16_t vfraction = *w16_current_vfraction;
w16_tmp2 = 16384 - vfraction;
for (i = 0; i < N; i++)
{
- pw16_outData[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ pw16_outData[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_16(vfraction, pw16_voicedVec[i]) +
WEBRTC_SPL_MUL_16_16(w16_tmp2, pw16_unvoicedVec[i]) + 8192,
14);
diff --git a/webrtc/modules/audio_coding/neteq/mute_signal.c b/webrtc/modules/audio_coding/neteq/mute_signal.c
index ee899cf..767a71d 100644
--- a/webrtc/modules/audio_coding/neteq/mute_signal.c
+++ b/webrtc/modules/audio_coding/neteq/mute_signal.c
@@ -16,16 +16,16 @@
#include "signal_processing_library.h"
-void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
- WebRtc_Word16 N)
+void WebRtcNetEQ_MuteSignal(int16_t *pw16_inout, int16_t muteSlope,
+ int16_t N)
{
int i;
- WebRtc_Word32 w32_tmp = 1048608; /* (16384<<6 + 32) */
+ int32_t w32_tmp = 1048608; /* (16384<<6 + 32) */
for (i = 0; i < N; i++)
{
pw16_inout[i]
- = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16((WebRtc_Word16)(w32_tmp>>6), pw16_inout[i])
+ = (int16_t) ((WEBRTC_SPL_MUL_16_16((int16_t)(w32_tmp>>6), pw16_inout[i])
+ 8192) >> 14);
w32_tmp -= muteSlope;
}
diff --git a/webrtc/modules/audio_coding/neteq/neteq_defines.h b/webrtc/modules/audio_coding/neteq/neteq_defines.h
index 1f092df..9390f2b 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_defines.h
+++ b/webrtc/modules/audio_coding/neteq/neteq_defines.h
@@ -143,8 +143,8 @@
/* Payload data will contain the SID frame if there is one*/
#define DSP_INSTR_DTMF_GENERATE 0x6000
-/* Payload data will be one WebRtc_Word16 with the current DTMF value and one
- * WebRtc_Word16 with the current volume value
+/* Payload data will be one int16_t with the current DTMF value and one
+ * int16_t with the current volume value
*/
#define DSP_INSTR_NORMAL_ONE_DESC 0x7000
/* No encoded frames */
diff --git a/webrtc/modules/audio_coding/neteq/neteq_statistics.h b/webrtc/modules/audio_coding/neteq/neteq_statistics.h
index 727b109..f355b58 100644
--- a/webrtc/modules/audio_coding/neteq/neteq_statistics.h
+++ b/webrtc/modules/audio_coding/neteq/neteq_statistics.h
@@ -24,15 +24,15 @@
{
/* variables for in-call statistics; queried through WebRtcNetEQ_GetNetworkStatistics */
- WebRtc_UWord32 expandLength; /* number of samples produced through expand */
- WebRtc_UWord32 preemptiveLength; /* number of samples produced through pre-emptive
+ uint32_t expandLength; /* number of samples produced through expand */
+ uint32_t preemptiveLength; /* number of samples produced through pre-emptive
expand */
- WebRtc_UWord32 accelerateLength; /* number of samples removed through accelerate */
+ uint32_t accelerateLength; /* number of samples removed through accelerate */
int addedSamples; /* number of samples inserted in off mode */
/* variables for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics */
- WebRtc_UWord32 expandedVoiceSamples; /* number of voice samples produced through expand */
- WebRtc_UWord32 expandedNoiseSamples; /* number of noise (background) samples produced
+ uint32_t expandedVoiceSamples; /* number of voice samples produced through expand */
+ uint32_t expandedNoiseSamples; /* number of noise (background) samples produced
through expand */
} DSPStats_t;
diff --git a/webrtc/modules/audio_coding/neteq/normal.c b/webrtc/modules/audio_coding/neteq/normal.c
index b33940a..8cbda52 100644
--- a/webrtc/modules/audio_coding/neteq/normal.c
+++ b/webrtc/modules/audio_coding/neteq/normal.c
@@ -20,7 +20,7 @@
/* Scratch usage:
Type Name size startpos endpos
- WebRtc_Word16 pw16_expanded 125*fs/8000 0 125*fs/8000-1
+ int16_t pw16_expanded 125*fs/8000 0 125*fs/8000-1
func WebRtcNetEQ_Expand 40+370*fs/8000 125*fs/8000 39+495*fs/8000
@@ -66,21 +66,21 @@
int WebRtcNetEQ_Normal(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
+ int16_t *pw16_decoded, int16_t len,
+ int16_t *pw16_outData, int16_t *pw16_len)
{
int i;
- WebRtc_Word16 fs_mult;
- WebRtc_Word16 fs_shift;
- WebRtc_Word32 w32_En_speech;
- WebRtc_Word16 enLen;
- WebRtc_Word16 w16_muted;
- WebRtc_Word16 w16_inc, w16_frac;
- WebRtc_Word16 w16_tmp;
- WebRtc_Word32 w32_tmp;
+ int16_t fs_mult;
+ int16_t fs_shift;
+ int32_t w32_En_speech;
+ int16_t enLen;
+ int16_t w16_muted;
+ int16_t w16_inc, w16_frac;
+ int16_t w16_tmp;
+ int32_t w32_tmp;
/* Sanity check */
if (len < 0)
@@ -108,15 +108,15 @@
/* Define memory where temporary result from Expand algorithm can be stored. */
#ifdef SCRATCH
- WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
+ int16_t *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
#else
- WebRtc_Word16 pw16_expanded[FSMULT * 125];
+ int16_t pw16_expanded[FSMULT * 125];
#endif
- WebRtc_Word16 expandedLen = 0;
- WebRtc_Word16 w16_decodedMax;
+ int16_t expandedLen = 0;
+ int16_t w16_decodedMax;
/* Find largest value in new data */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+ w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
/* Generate interpolation data using Expand */
/* First, set Expand parameters to appropriate values. */
@@ -129,7 +129,7 @@
#ifdef SCRATCH
pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
#endif
- pw16_expanded, &expandedLen, (WebRtc_Word16) (inst->w16_mode == MODE_FADE_TO_BGN));
+ pw16_expanded, &expandedLen, (int16_t) (inst->w16_mode == MODE_FADE_TO_BGN));
inst->ExpandInst.w16_stopMuting = 0; /* Restore value */
inst->ExpandInst.w16_consecExp = 0; /* Last was not Expand any more */
@@ -144,7 +144,7 @@
{
/* w16_muteFactor * w16_expandMuteFactor */
inst->w16_muteFactor
- = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
+ = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
inst->ExpandInst.w16_expandMuteFactor, 14);
}
@@ -154,7 +154,7 @@
WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
w16_tmp = WEBRTC_SPL_MAX(w16_tmp, 0);
w32_En_speech = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, enLen, w16_tmp);
- w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (WebRtc_Word16) (enLen >> w16_tmp));
+ w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (int16_t) (enLen >> w16_tmp));
if ((w32_En_speech != 0) && (w32_En_speech > inst->BGNInst.w32_energy))
{
@@ -162,10 +162,10 @@
w16_tmp = WebRtcSpl_NormW32(w32_En_speech) - 16;
/* we want inst->BGNInst.energy/En_speech in Q14 */
w32_tmp = WEBRTC_SPL_SHIFT_W32(inst->BGNInst.w32_energy, (w16_tmp+14));
- w16_tmp = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
- w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
- w16_muted = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
- WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) w16_tmp,
+ w16_tmp = (int16_t) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
+ w16_tmp = (int16_t) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
+ w16_muted = (int16_t) WebRtcSpl_SqrtFloor(
+ WEBRTC_SPL_LSHIFT_W32((int32_t) w16_tmp,
14)); /* w16_muted in Q14 (sqrt(Q28)) */
}
else
@@ -184,7 +184,7 @@
/* scale with mute factor */
w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
/* shift 14 with proper rounding */
- pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
+ pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
/* increase mute_factor towards 16384 */
inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
}
@@ -198,7 +198,7 @@
w16_frac = w16_inc;
for (i = 0; i < 8 * fs_mult; i++)
{
- pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
WEBRTC_SPL_MUL_16_16((32 - w16_frac), pw16_expanded[i]) + 8),
5);
@@ -209,7 +209,7 @@
}
else if (inst->w16_mode==MODE_RFC3389CNG)
{ /* previous was RFC 3389 CNG...*/
- WebRtc_Word16 pw16_CngInterp[32];
+ int16_t pw16_CngInterp[32];
/* Reset mute factor and start up fresh */
inst->w16_muteFactor = 16384;
if (inst->CNG_Codec_inst != NULL)
@@ -238,7 +238,7 @@
w16_frac = w16_inc;
for (i = 0; i < 8 * fs_mult; i++)
{
- pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32(
(WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
WEBRTC_SPL_MUL_16_16((32-w16_frac), pw16_CngInterp[i]) + 8),
5);
@@ -260,7 +260,7 @@
/* scale with mute factor */
w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
/* shift 14 with proper rounding */
- pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
+ pw16_decoded[i] = (int16_t) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
/* increase mute_factor towards 16384 */
inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
}
diff --git a/webrtc/modules/audio_coding/neteq/packet_buffer.c b/webrtc/modules/audio_coding/neteq/packet_buffer.c
index c51805e..39f40ef 100644
--- a/webrtc/modules/audio_coding/neteq/packet_buffer.c
+++ b/webrtc/modules/audio_coding/neteq/packet_buffer.c
@@ -26,12 +26,12 @@
#include <stdio.h>
extern FILE *delay_fid2; /* file pointer to delay log file */
-extern WebRtc_UWord32 tot_received_packets;
+extern uint32_t tot_received_packets;
#endif /* NETEQ_DELAY_LOGGING */
int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
- WebRtc_Word16 *pw16_memory, int memorySize)
+ int16_t *pw16_memory, int memorySize)
{
int i;
int pos = 0;
@@ -45,11 +45,11 @@
}
/* Clear the buffer instance */
- WebRtcSpl_MemSetW16((WebRtc_Word16*) bufferInst, 0,
- sizeof(PacketBuf_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t*) bufferInst, 0,
+ sizeof(PacketBuf_t) / sizeof(int16_t));
/* Clear the buffer memory */
- WebRtcSpl_MemSetW16((WebRtc_Word16*) pw16_memory, 0, memorySize);
+ WebRtcSpl_MemSetW16((int16_t*) pw16_memory, 0, memorySize);
/* Set maximum number of packets */
bufferInst->maxInsertPositions = maxNoOfPackets;
@@ -57,26 +57,26 @@
/* Initialize array pointers */
/* After each pointer has been set, the index pos is advanced to point immediately
* after the the recently allocated vector. Note that one step for the pos index
- * corresponds to a WebRtc_Word16.
+ * corresponds to a int16_t.
*/
- bufferInst->timeStamp = (WebRtc_UWord32*) &pw16_memory[pos];
- pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * WebRtc_UWord32 */
+ bufferInst->timeStamp = (uint32_t*) &pw16_memory[pos];
+ pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * uint32_t */
- bufferInst->payloadLocation = (WebRtc_Word16**) &pw16_memory[pos];
- pos += maxNoOfPackets * (sizeof(WebRtc_Word16*) / sizeof(WebRtc_Word16)); /* advance */
+ bufferInst->payloadLocation = (int16_t**) &pw16_memory[pos];
+ pos += maxNoOfPackets * (sizeof(int16_t*) / sizeof(int16_t)); /* advance */
- bufferInst->seqNumber = (WebRtc_UWord16*) &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_UWord16 */
+ bufferInst->seqNumber = (uint16_t*) &pw16_memory[pos];
+ pos += maxNoOfPackets; /* advance maxNoOfPackets * uint16_t */
bufferInst->payloadType = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+ pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
bufferInst->payloadLengthBytes = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+ pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
bufferInst->rcuPlCntr = &pw16_memory[pos];
- pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+ pos += maxNoOfPackets; /* advance maxNoOfPackets * int16_t */
bufferInst->waitingTime = (int*) (&pw16_memory[pos]);
/* Advance maxNoOfPackets * sizeof(waitingTime element). */
@@ -140,7 +140,7 @@
int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
- WebRtc_Word16 *flushed)
+ int16_t *flushed)
{
int nextPos;
int i;
@@ -161,7 +161,7 @@
}
/* Sanity check for payload length
- (payloadLen in bytes and memory size in WebRtc_Word16) */
+ (payloadLen in bytes and memory size in int16_t) */
if ((RTPpacket->payloadLen > (bufferInst->memorySizeW16 << 1)) || (RTPpacket->payloadLen
<= 0))
{
@@ -184,7 +184,7 @@
if (bufferInst->currentMemoryPos + ((RTPpacket->payloadLen + 1) >> 1)
>= &bufferInst->startPayloadMemory[bufferInst->memorySizeW16])
{
- WebRtc_Word16 *tempMemAddress;
+ int16_t *tempMemAddress;
/*
* Payload does not fit at the end of the memory, put it in the beginning
@@ -323,13 +323,13 @@
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_RECIN;
if ((fwrite(&temp_var, sizeof(int),
1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->timeStamp, sizeof(WebRtc_UWord32),
+ (fwrite(&RTPpacket->timeStamp, sizeof(uint32_t),
1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->seqNumber, sizeof(WebRtc_UWord16),
+ (fwrite(&RTPpacket->seqNumber, sizeof(uint16_t),
1, delay_fid2) != 1) ||
(fwrite(&RTPpacket->payloadType, sizeof(int),
1, delay_fid2) != 1) ||
- (fwrite(&RTPpacket->payloadLen, sizeof(WebRtc_Word16),
+ (fwrite(&RTPpacket->payloadLen, sizeof(int16_t),
1, delay_fid2) != 1)) {
return -1;
}
@@ -369,9 +369,9 @@
/* Copy the actual data payload to RTP packet struct */
- WEBRTC_SPL_MEMCPY_W16((WebRtc_Word16*) RTPpacket->payload,
+ WEBRTC_SPL_MEMCPY_W16((int16_t*) RTPpacket->payload,
bufferInst->payloadLocation[bufferPosition],
- (bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in WebRtc_Word16*/
+ (bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in int16_t*/
/* Copy payload parameters */
RTPpacket->payloadLen = bufferInst->payloadLengthBytes[bufferPosition];
@@ -503,14 +503,13 @@
buffer_inst->payloadLengthBytes[buffer_pos]);
}
-WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
- const CodecDbInst_t*
- codec_database) {
+int32_t WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
+ const CodecDbInst_t* codec_database) {
int i, count;
int last_duration;
int last_codec_pos;
int last_payload_type;
- WebRtc_Word32 size_samples;
+ int32_t size_samples;
count = 0;
last_duration = buffer_inst->packSizeSamples;
@@ -583,9 +582,9 @@
{
int i;
int ok = 0;
- WebRtc_Word16 w16_tmp;
- WebRtc_Word16 codecBytes;
- WebRtc_Word16 codecBuffers;
+ int16_t w16_tmp;
+ int16_t codecBytes;
+ int16_t codecBuffers;
/* Initialize return variables to zero */
*maxBytes = 0;
@@ -786,12 +785,12 @@
* Add size needed by the additional pointers for each slot inside struct,
* as indicated on each line below.
*/
- w16_tmp = (sizeof(WebRtc_UWord32) /* timeStamp */
- + sizeof(WebRtc_Word16*) /* payloadLocation */
- + sizeof(WebRtc_UWord16) /* seqNumber */
- + sizeof(WebRtc_Word16) /* payloadType */
- + sizeof(WebRtc_Word16) /* payloadLengthBytes */
- + sizeof(WebRtc_Word16) /* rcuPlCntr */
+ w16_tmp = (sizeof(uint32_t) /* timeStamp */
+ + sizeof(int16_t*) /* payloadLocation */
+ + sizeof(uint16_t) /* seqNumber */
+ + sizeof(int16_t) /* payloadType */
+ + sizeof(int16_t) /* payloadLengthBytes */
+ + sizeof(int16_t) /* rcuPlCntr */
+ sizeof(int)); /* waitingTime */
/* Add the extra size per slot to the memory count */
*maxBytes += w16_tmp * (*maxSlots);
diff --git a/webrtc/modules/audio_coding/neteq/packet_buffer.h b/webrtc/modules/audio_coding/neteq/packet_buffer.h
index 1aa435f..afd74db 100644
--- a/webrtc/modules/audio_coding/neteq/packet_buffer.h
+++ b/webrtc/modules/audio_coding/neteq/packet_buffer.h
@@ -31,10 +31,10 @@
{
/* Variables common to the entire buffer */
- WebRtc_UWord16 packSizeSamples; /* packet size in samples of last decoded packet */
- WebRtc_Word16 *startPayloadMemory; /* pointer to the payload memory */
- int memorySizeW16; /* the size (in WebRtc_Word16) of the payload memory */
- WebRtc_Word16 *currentMemoryPos; /* The memory position to insert next payload */
+ uint16_t packSizeSamples; /* packet size in samples of last decoded packet */
+ int16_t *startPayloadMemory; /* pointer to the payload memory */
+ int memorySizeW16; /* the size (in int16_t) of the payload memory */
+ int16_t *currentMemoryPos; /* The memory position to insert next payload */
int numPacketsInBuffer; /* The number of packets in the buffer */
int insertPosition; /* The position to insert next packet */
int maxInsertPositions; /* Maximum number of packets allowed */
@@ -42,18 +42,18 @@
/* Arrays with one entry per packet slot */
/* NOTE: If these are changed, the changes must be accounted for at the end of
the function WebRtcNetEQ_GetDefaultCodecSettings(). */
- WebRtc_UWord32 *timeStamp; /* Timestamp in slot n */
- WebRtc_Word16 **payloadLocation; /* Memory location of payload in slot n */
- WebRtc_UWord16 *seqNumber; /* Sequence number in slot n */
- WebRtc_Word16 *payloadType; /* Payload type of packet in slot n */
- WebRtc_Word16 *payloadLengthBytes; /* Payload length of packet in slot n */
- WebRtc_Word16 *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
+ uint32_t *timeStamp; /* Timestamp in slot n */
+ int16_t **payloadLocation; /* Memory location of payload in slot n */
+ uint16_t *seqNumber; /* Sequence number in slot n */
+ int16_t *payloadType; /* Payload type of packet in slot n */
+ int16_t *payloadLengthBytes; /* Payload length of packet in slot n */
+ int16_t *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
2 for redundant payload */
int *waitingTime;
/* Statistics counter */
- WebRtc_UWord16 discardedPackets; /* Number of discarded packets */
+ uint16_t discardedPackets; /* Number of discarded packets */
} PacketBuf_t;
@@ -70,7 +70,7 @@
* - bufferInst : Buffer instance to be initialized
* - noOfPackets : Maximum number of packets that buffer should hold
* - memory : Pointer to the storage memory for the payloads
- * - memorySize : The size of the payload memory (in WebRtc_Word16)
+ * - memorySize : The size of the payload memory (in int16_t)
*
* Output:
* - bufferInst : Updated buffer instance
@@ -80,7 +80,7 @@
*/
int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
- WebRtc_Word16 *pw16_memory, int memorySize);
+ int16_t *pw16_memory, int memorySize);
/****************************************************************************
* WebRtcNetEQ_PacketBufferFlush(...)
@@ -117,7 +117,7 @@
*/
int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
- WebRtc_Word16 *flushed);
+ int16_t *flushed);
/****************************************************************************
* WebRtcNetEQ_PacketBufferExtract(...)
@@ -208,9 +208,8 @@
* Return value : The buffer size in samples
*/
-WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
- const CodecDbInst_t*
- codec_database);
+int32_t WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t* buffer_inst,
+ const CodecDbInst_t* codec_database);
/****************************************************************************
* WebRtcNetEQ_IncrementWaitingTimes(...)
diff --git a/webrtc/modules/audio_coding/neteq/peak_detection.c b/webrtc/modules/audio_coding/neteq/peak_detection.c
index 678c7f9..8c85d2a 100644
--- a/webrtc/modules/audio_coding/neteq/peak_detection.c
+++ b/webrtc/modules/audio_coding/neteq/peak_detection.c
@@ -17,7 +17,7 @@
#include "signal_processing_library.h"
/* Table of constants used in parabolic fit function WebRtcNetEQ_PrblFit */
-const WebRtc_Word16 WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
+const int16_t WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
{ 150, 50, 80 }, { 160, 57, 85 },
{ 180, 72, 96 }, { 200, 89, 107 },
{ 210, 98, 112 }, { 220, 108, 117 },
@@ -27,17 +27,17 @@
{ 330, 242, 176 }, { 340, 257, 181 },
{ 360, 288, 192 } };
-WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
- WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
- WebRtc_Word16 *pw16_winIndex,
- WebRtc_Word16 *pw16_winValue)
+int16_t WebRtcNetEQ_PeakDetection(int16_t *pw16_data, int16_t w16_dataLen,
+ int16_t w16_nmbPeaks, int16_t fs_mult,
+ int16_t *pw16_winIndex,
+ int16_t *pw16_winValue)
{
/* Local variables */
int i;
- WebRtc_Word16 w16_tmp;
- WebRtc_Word16 w16_tmp2;
- WebRtc_Word16 indMin = 0;
- WebRtc_Word16 indMax = 0;
+ int16_t w16_tmp;
+ int16_t w16_tmp2;
+ int16_t indMin = 0;
+ int16_t indMax = 0;
/* Peak detection */
@@ -53,7 +53,7 @@
w16_dataLen++;
}
- pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (WebRtc_Word16) (w16_dataLen - 1));
+ pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (int16_t) (w16_dataLen - 1));
if (i != w16_nmbPeaks - 1)
{
@@ -103,14 +103,14 @@
return 0;
}
-WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
- WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult)
+int16_t WebRtcNetEQ_PrblFit(int16_t *pw16_3pts, int16_t *pw16_Ind,
+ int16_t *pw16_outVal, int16_t fs_mult)
{
/* Variables */
- WebRtc_Word32 Num, Den;
- WebRtc_Word32 temp;
- WebRtc_Word16 flag, stp, strt, lmt;
- WebRtc_UWord16 PFind[13];
+ int32_t Num, Den;
+ int32_t temp;
+ int16_t flag, stp, strt, lmt;
+ uint16_t PFind[13];
if (fs_mult == 1)
{
@@ -162,24 +162,24 @@
Den = pw16_3pts[0] + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],-2) + pw16_3pts[2];
- temp = (WebRtc_Word32) WEBRTC_SPL_MUL(Num, (WebRtc_Word32)120); /* need 32_16 really */
+ temp = (int32_t) WEBRTC_SPL_MUL(Num, (int32_t)120); /* need 32_16 really */
flag = 1;
stp = WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0] - WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0];
strt = (WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0]
+ WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0]) >> 1;
- if (temp < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)strt))
+ if (temp < (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)strt))
{
lmt = strt - stp;
while (flag)
{
if ((flag == fs_mult) || (temp
- > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
+ > (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)lmt)))
{
*pw16_outVal
- = (WebRtc_Word16)
- (((WebRtc_Word32) ((WebRtc_Word32) WEBRTC_SPL_MUL(Den,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
- + (WebRtc_Word32) WEBRTC_SPL_MUL(Num,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
+ = (int16_t)
+ (((int32_t) ((int32_t) WEBRTC_SPL_MUL(Den,(int32_t)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
+ + (int32_t) WEBRTC_SPL_MUL(Num,(int32_t)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
+ WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256))) >> 8);
*pw16_Ind = (*pw16_Ind) * (fs_mult << 1) - flag;
flag = 0;
@@ -191,24 +191,24 @@
}
}
}
- else if (temp > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)(strt+stp)))
+ else if (temp > (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)(strt+stp)))
{
lmt = strt + (stp << 1);
while (flag)
{
if ((flag == fs_mult) || (temp
- < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
+ < (int32_t) WEBRTC_SPL_MUL(-Den,(int32_t)lmt)))
{
- WebRtc_Word32 temp_term_1, temp_term_2, temp_term_3;
+ int32_t temp_term_1, temp_term_2, temp_term_3;
temp_term_1 = WEBRTC_SPL_MUL(Den,
- (WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
+ (int32_t) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
temp_term_2 = WEBRTC_SPL_MUL(Num,
- (WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
+ (int32_t) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
temp_term_3 = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256);
*pw16_outVal
- = (WebRtc_Word16) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
+ = (int16_t) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
*pw16_Ind = (*pw16_Ind) * (fs_mult << 1) + flag;
flag = 0;
diff --git a/webrtc/modules/audio_coding/neteq/preemptive_expand.c b/webrtc/modules/audio_coding/neteq/preemptive_expand.c
index 167bc3a..172a171 100644
--- a/webrtc/modules/audio_coding/neteq/preemptive_expand.c
+++ b/webrtc/modules/audio_coding/neteq/preemptive_expand.c
@@ -28,9 +28,9 @@
/* Scratch usage:
Type Name size startpos endpos
- WebRtc_Word16 pw16_downSampSpeech 110 0 109
- WebRtc_Word32 pw32_corr 2*50 110 209
- WebRtc_Word16 pw16_corr 50 0 49
+ int16_t pw16_downSampSpeech 110 0 109
+ int32_t pw32_corr 2*50 110 209
+ int16_t pw16_corr 50 0 49
Total: 110+2*50
*/
@@ -72,45 +72,45 @@
int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
#ifdef SCRATCH
- WebRtc_Word16 *pw16_scratchPtr,
+ int16_t *pw16_scratchPtr,
#endif
- const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
- WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
- WebRtc_Word16 BGNonly)
+ const int16_t *pw16_decoded, int len, int oldDataLen,
+ int16_t *pw16_outData, int16_t *pw16_len,
+ int16_t BGNonly)
{
#ifdef SCRATCH
/* Use scratch memory for internal temporary vectors */
- WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
- WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
- WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
+ int16_t *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
+ int32_t *pw32_corr = (int32_t*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
+ int16_t *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
#else
/* Allocate memory for temporary vectors */
- WebRtc_Word16 pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
- WebRtc_Word32 pw32_corr[PREEMPTIVE_CORR_LEN];
- WebRtc_Word16 pw16_corr[PREEMPTIVE_CORR_LEN];
+ int16_t pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
+ int32_t pw32_corr[PREEMPTIVE_CORR_LEN];
+ int16_t pw16_corr[PREEMPTIVE_CORR_LEN];
#endif
- WebRtc_Word16 w16_decodedMax = 0;
- WebRtc_Word16 w16_tmp = 0;
- WebRtc_Word16 w16_tmp2;
- WebRtc_Word32 w32_tmp;
- WebRtc_Word32 w32_tmp2;
+ int16_t w16_decodedMax = 0;
+ int16_t w16_tmp = 0;
+ int16_t w16_tmp2;
+ int32_t w32_tmp;
+ int32_t w32_tmp2;
- const WebRtc_Word16 w16_startLag = PREEMPTIVE_MIN_LAG;
- const WebRtc_Word16 w16_endLag = PREEMPTIVE_MAX_LAG;
- const WebRtc_Word16 w16_corrLen = PREEMPTIVE_CORR_LEN;
- const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
- WebRtc_Word16 *pw16_vectmp;
- WebRtc_Word16 w16_inc, w16_startfact;
- WebRtc_Word16 w16_bestIndex, w16_bestVal;
- WebRtc_Word16 w16_VAD = 1;
- WebRtc_Word16 fsMult;
- WebRtc_Word16 fsMult120;
- WebRtc_Word32 w32_en1, w32_en2, w32_cc;
- WebRtc_Word16 w16_en1, w16_en2;
- WebRtc_Word16 w16_en1Scale, w16_en2Scale;
- WebRtc_Word16 w16_sqrtEn1En2;
- WebRtc_Word16 w16_bestCorr = 0;
+ const int16_t w16_startLag = PREEMPTIVE_MIN_LAG;
+ const int16_t w16_endLag = PREEMPTIVE_MAX_LAG;
+ const int16_t w16_corrLen = PREEMPTIVE_CORR_LEN;
+ const int16_t *pw16_vec1, *pw16_vec2;
+ int16_t *pw16_vectmp;
+ int16_t w16_inc, w16_startfact;
+ int16_t w16_bestIndex, w16_bestVal;
+ int16_t w16_VAD = 1;
+ int16_t fsMult;
+ int16_t fsMult120;
+ int32_t w32_en1, w32_en2, w32_cc;
+ int16_t w16_en1, w16_en2;
+ int16_t w16_en1Scale, w16_en2Scale;
+ int16_t w16_sqrtEn1En2;
+ int16_t w16_bestCorr = 0;
int ok;
#ifdef NETEQ_STEREO
@@ -120,7 +120,7 @@
fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
/* Pre-calculate common multiplication with fsMult */
- fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
+ fsMult120 = (int16_t) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
@@ -128,7 +128,7 @@
* Sanity check for len variable; must be (almost) 30 ms (120*fsMult + max(bestIndex)).
* Also, the new part must be at least .625 ms (w16_overlap).
*/
- if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
+ if (len < (int16_t) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
- inst->ExpandInst.w16_overlap)
{
/* Length of decoded data too short */
@@ -138,7 +138,7 @@
/* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return NETEQ_OTHER_ERROR;
}
@@ -162,7 +162,7 @@
/* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return NETEQ_OTHER_ERROR;
}
@@ -207,7 +207,7 @@
/****************************************************************/
/* find maximum absolute value */
- w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+ w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (int16_t) len);
/* downsample the decoded speech to 4 kHz */
ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
@@ -221,7 +221,7 @@
/* simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return NETEQ_OTHER_ERROR;
}
@@ -236,9 +236,9 @@
/* Perform correlation from lag 10 to lag 60 in 4 kHz domain */WebRtcNetEQ_CrossCorr(
pw32_corr, &pw16_downSampSpeech[w16_endLag],
&pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
- (WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
+ (int16_t) (w16_endLag - w16_startLag), w16_tmp, -1);
- /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+ /* Normalize correlation to 14 bits and put in a int16_t vector */
w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
@@ -247,8 +247,8 @@
/* Find limits for peak finding, in order to avoid overful NetEQ algorithm buffer. */
/* Calculate difference between MAX_OUTPUT_SIZE and len in 4 kHz domain. */
- w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) (NETEQ_MAX_OUTPUT_SIZE - len),
- (WebRtc_Word16) (fsMult << 1)) - w16_startLag;
+ w16_tmp = WebRtcSpl_DivW32W16ResW16((int32_t) (NETEQ_MAX_OUTPUT_SIZE - len),
+ (int16_t) (fsMult << 1)) - w16_startLag;
w16_tmp = WEBRTC_SPL_MIN(w16_corrLen, w16_tmp); /* no more than corrLen = 50 */
#ifdef NETEQ_STEREO
@@ -326,13 +326,13 @@
pw16_vec2 = &pw16_decoded[fsMult120];
/* Calculate energies for vec1 and vec2 */
- w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
- (WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
- w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
- (WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
+ w32_en1 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1,
+ (int16_t*) pw16_vec1, w16_bestIndex, w16_tmp);
+ w32_en2 = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec2,
+ (int16_t*) pw16_vec2, w16_bestIndex, w16_tmp);
/* Calculate cross-correlation at the found lag */
- w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
+ w32_cc = WebRtcNetEQ_DotW16W16((int16_t*) pw16_vec1, (int16_t*) pw16_vec2,
w16_bestIndex, w16_tmp);
/* Check VAD constraint
@@ -350,7 +350,7 @@
w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
- w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
+ w16_tmp2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
/* Scale w32_tmp properly before comparing with w32_tmp2 */
@@ -358,7 +358,7 @@
if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
{
/* Cannot scale only w32_tmp, must scale w32_temp2 too */
- WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
+ int16_t tempshift = WebRtcSpl_NormW32(w32_tmp);
w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
@@ -398,21 +398,21 @@
w16_en1Scale += 1;
}
- /* Convert energies to WebRtc_Word16 */
- w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
- w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+ /* Convert energies to int16_t */
+ w16_en1 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+ w16_en2 = (int16_t) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
/* Calculate energy product */
w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
/* Calculate square-root of energy product */
- w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_tmp);
+ w16_sqrtEn1En2 = (int16_t) WebRtcSpl_SqrtFloor(w32_tmp);
/* Calculate cc/sqrt(en1*en2) in Q14 */
w16_tmp = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
- w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
+ w16_bestCorr = (int16_t) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
}
@@ -440,14 +440,14 @@
/* Do expand operation by overlap add */
/* Set length of the first part, not to be modified */
- WebRtc_Word16 w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
+ int16_t w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
/*
* Calculate cross-fading slope so that the fading factor goes from
* 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
*/
- w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
- (WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
+ w16_inc = (int16_t) WebRtcSpl_DivW32W16((int32_t) 16384,
+ (int16_t) (w16_bestIndex + 1)); /* in Q14 */
/* Initiate fading factor */
w16_startfact = 16384 - w16_inc;
@@ -465,14 +465,14 @@
/* Generate interpolated part of length bestIndex (1 pitch period) */
pw16_vectmp = pw16_outData + w16_startIndex;
/* Reuse mixing function from Expand */
- WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec2,
- (WebRtc_Word16*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
+ WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (int16_t*) pw16_vec2,
+ (int16_t*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
/* Move the last part (also unmodified) */
/* Take from decoded at 15 ms */
pw16_vec2 = &pw16_decoded[w16_startIndex];
WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[w16_startIndex + w16_bestIndex], pw16_vec2,
- (WebRtc_Word16) (len - w16_startIndex));
+ (int16_t) (len - w16_startIndex));
/* Set the mode flag */
if (w16_VAD)
@@ -513,7 +513,7 @@
/* Simply move all data from decoded to outData */
- WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+ WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (int16_t) len);
return 0;
}
diff --git a/webrtc/modules/audio_coding/neteq/random_vector.c b/webrtc/modules/audio_coding/neteq/random_vector.c
index 217bacd..c168ab5 100644
--- a/webrtc/modules/audio_coding/neteq/random_vector.c
+++ b/webrtc/modules/audio_coding/neteq/random_vector.c
@@ -18,7 +18,7 @@
* Values are normalized so that
* sqrt(dot(pw16_NETEQFIX_RANDN_TBL,pw16_NETEQFIX_RANDN_TBL)/256)=2^13
*/
-const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
+const int16_t WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
{
2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380, -4959, -1280, -21716, 7133, -1522,
13458, -3902, 2789, -675, 3441, 5016, -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314,
@@ -39,15 +39,15 @@
};
-void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
- WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval)
+void WebRtcNetEQ_RandomVec(uint32_t *w32_seed, int16_t *pw16_randVec,
+ int16_t w16_len, int16_t w16_incval)
{
int i;
- WebRtc_Word16 w16_pos;
+ int16_t w16_pos;
for (i = 0; i < w16_len; i++)
{
*w32_seed = (*w32_seed) + w16_incval;
- w16_pos = (WebRtc_Word16) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
+ w16_pos = (int16_t) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
pw16_randVec[i] = WebRtcNetEQ_kRandnTbl[w16_pos];
}
}
diff --git a/webrtc/modules/audio_coding/neteq/recin.c b/webrtc/modules/audio_coding/neteq/recin.c
index c2f0d2d..75733b2 100644
--- a/webrtc/modules/audio_coding/neteq/recin.c
+++ b/webrtc/modules/audio_coding/neteq/recin.c
@@ -26,16 +26,16 @@
int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput,
- WebRtc_UWord32 uw32_timeRec)
+ uint32_t uw32_timeRec)
{
RTPPacket_t RTPpacket[2];
int i_k;
int i_ok = 0, i_No_Of_Payloads = 1;
- WebRtc_Word16 flushed = 0;
- WebRtc_Word16 codecPos;
+ int16_t flushed = 0;
+ int16_t codecPos;
int curr_Codec;
- WebRtc_Word16 isREDPayload = 0;
- WebRtc_Word32 temp_bufsize;
+ int16_t isREDPayload = 0;
+ int32_t temp_bufsize;
#ifdef NETEQ_RED_CODEC
RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */
RTPpacketPtr[0] = &RTPpacket[0];
@@ -158,7 +158,7 @@
/* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */
if (MCU_inst->TSscalingInitialized == 1)
{
- WebRtc_UWord32 newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
+ uint32_t newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
RTPpacket[i_k].timeStamp);
/* save the incoming timestamp for next time */
@@ -202,7 +202,7 @@
/* Is this a CNG packet? how should we handle this?*/
#ifdef NETEQ_CNG_CODEC
/* Get CNG sample rate */
- WebRtc_UWord16 fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
+ uint16_t fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
RTPpacket[i_k].payloadType);
/* Force sampling frequency to 32000 Hz CNG 48000 Hz. */
@@ -301,9 +301,9 @@
MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos](
MCU_inst->codec_DB_inst.codec_state[codecPos],
- (G_CONST WebRtc_UWord16 *) RTPpacket[0].payload,
- (WebRtc_Word32) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
- (WebRtc_UWord32) RTPpacket[0].timeStamp, (WebRtc_UWord32) uw32_timeRec);
+ (G_CONST uint16_t *) RTPpacket[0].payload,
+ (int32_t) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
+ (uint32_t) RTPpacket[0].timeStamp, (uint32_t) uw32_timeRec);
}
}
@@ -320,11 +320,11 @@
{
/* Change the auto-mode parameters if packet length has changed */
WebRtcNetEQ_SetPacketSpeechLen(&(MCU_inst->BufferStat_inst.Automode_inst),
- (WebRtc_Word16) temp_bufsize, MCU_inst->fs);
+ (int16_t) temp_bufsize, MCU_inst->fs);
}
/* update statistics */
- if ((WebRtc_Word32) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
+ if ((int32_t) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
&& !MCU_inst->new_codec)
{
/*
@@ -395,7 +395,7 @@
{
/* TODO(tlegrand): remove scaling once ACM has full 48 kHz
* support. */
- WebRtc_UWord16 sample_freq =
+ uint16_t sample_freq =
WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
rtpPayloadType);
if (sample_freq == 48000) {
@@ -416,11 +416,11 @@
return 0;
}
-WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
- WebRtc_UWord32 externalTS)
+uint32_t WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
+ uint32_t externalTS)
{
- WebRtc_Word32 timestampDiff;
- WebRtc_UWord32 internalTS;
+ int32_t timestampDiff;
+ uint32_t internalTS;
/* difference between this and last incoming timestamp */
timestampDiff = externalTS - MCU_inst->externalTS;
@@ -459,14 +459,14 @@
return internalTS;
}
-WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
- WebRtc_UWord32 internalTS)
+uint32_t WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
+ uint32_t internalTS)
{
- WebRtc_Word32 timestampDiff;
- WebRtc_UWord32 externalTS;
+ int32_t timestampDiff;
+ uint32_t externalTS;
/* difference between this and last incoming timestamp */
- timestampDiff = (WebRtc_Word32) internalTS - MCU_inst->internalTS;
+ timestampDiff = (int32_t) internalTS - MCU_inst->internalTS;
switch (MCU_inst->scalingFactor)
{
diff --git a/webrtc/modules/audio_coding/neteq/recout.c b/webrtc/modules/audio_coding/neteq/recout.c
index 8eb49e3..63abbd1 100644
--- a/webrtc/modules/audio_coding/neteq/recout.c
+++ b/webrtc/modules/audio_coding/neteq/recout.c
@@ -41,7 +41,7 @@
/* Scratch usage:
Type Name size startpos endpos
- WebRtc_Word16 pw16_NetEqAlgorithm_buffer 1080*fs/8000 0 1080*fs/8000-1
+ int16_t pw16_NetEqAlgorithm_buffer 1080*fs/8000 0 1080*fs/8000-1
struct dspInfo 6 1080*fs/8000 1085*fs/8000
func WebRtcNetEQ_Normal 40+495*fs/8000 0 39+495*fs/8000
@@ -91,49 +91,49 @@
#ifdef NETEQ_DELAY_LOGGING
extern FILE *delay_fid2; /* file pointer to delay log file */
-extern WebRtc_UWord32 tot_received_packets;
+extern uint32_t tot_received_packets;
#endif
-int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len, WebRtc_Word16 BGNonly)
+int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, int16_t *pw16_outData,
+ int16_t *pw16_len, int16_t BGNonly)
{
- WebRtc_Word16 blockLen, payloadLen, len = 0, pos;
- WebRtc_Word16 w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
- WebRtc_Word16 *blockPtr;
- WebRtc_Word16 MD = 0;
+ int16_t blockLen, payloadLen, len = 0, pos;
+ int16_t w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
+ int16_t *blockPtr;
+ int16_t MD = 0;
- WebRtc_Word16 speechType = TYPE_SPEECH;
- WebRtc_UWord16 instr;
- WebRtc_UWord16 uw16_tmp;
+ int16_t speechType = TYPE_SPEECH;
+ uint16_t instr;
+ uint16_t uw16_tmp;
#ifdef SCRATCH
char pw8_ScratchBuffer[((SIZE_SCRATCH_BUFFER + 1) * 2)];
- WebRtc_Word16 *pw16_scratchPtr = (WebRtc_Word16*) pw8_ScratchBuffer;
+ int16_t *pw16_scratchPtr = (int16_t*) pw8_ScratchBuffer;
/* pad with 240*fs_mult to match the overflow guard below */
- WebRtc_Word16 pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
- WebRtc_Word16 *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
+ int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
+ int16_t *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
+ SCRATCH_ALGORITHM_BUFFER;
DSP2MCU_info_t *dspInfo = (DSP2MCU_info_t*) (pw16_scratchPtr + SCRATCH_DSP_INFO);
#else
/* pad with 240*fs_mult to match the overflow guard below */
- WebRtc_Word16 pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
- WebRtc_Word16 pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE+240*6];
+ int16_t pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE+240*6];
+ int16_t pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE+240*6];
DSP2MCU_info_t dspInfoStruct;
DSP2MCU_info_t *dspInfo = &dspInfoStruct;
#endif
- WebRtc_Word16 fs_mult;
+ int16_t fs_mult;
int borrowedSamples;
int oldBorrowedSamples;
int return_value = 0;
- WebRtc_Word16 lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
+ int16_t lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
void *mainInstBackup = inst->main_inst;
#ifdef NETEQ_DELAY_LOGGING
int temp_var;
#endif
- WebRtc_Word16 dtmfValue = -1;
- WebRtc_Word16 dtmfVolume = -1;
+ int16_t dtmfValue = -1;
+ int16_t dtmfVolume = -1;
int playDtmf = 0;
#ifdef NETEQ_ATEVENT_DECODE
int dtmfSwitch = 0;
@@ -141,7 +141,7 @@
#ifdef NETEQ_STEREO
MasterSlaveInfo *msInfo = inst->msInfo;
#endif
- WebRtc_Word16 *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
+ int16_t *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
inst->pw16_readAddress = sharedMem;
inst->pw16_writeAddress = sharedMem;
@@ -164,8 +164,8 @@
*/
/* Get the information from master to correct synchronization */
- WebRtc_UWord32 currentMasterTimestamp;
- WebRtc_UWord32 currentSlaveTimestamp;
+ uint32_t currentMasterTimestamp;
+ uint32_t currentSlaveTimestamp;
currentMasterTimestamp = msInfo->endTimestamp - msInfo->samplesLeftWithOverlap;
currentSlaveTimestamp = inst->endTimestamp - (inst->endPosition - inst->curPosition);
@@ -244,8 +244,8 @@
if (msInfo->msMode == NETEQ_MASTER)
{
/* clear info to slave */
- WebRtcSpl_MemSetW16((WebRtc_Word16 *) msInfo, 0,
- sizeof(MasterSlaveInfo) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t *) msInfo, 0,
+ sizeof(MasterSlaveInfo) / sizeof(int16_t));
/* re-set mode */
msInfo->msMode = NETEQ_MASTER;
@@ -263,7 +263,7 @@
return_value = WebRtcNetEQ_DSP2MCUinterrupt((MainInst_t *) inst->main_inst, sharedMem);
/* Read MCU data and instructions */
- instr = (WebRtc_UWord16) (inst->pw16_readAddress[0] & 0xf000);
+ instr = (uint16_t) (inst->pw16_readAddress[0] & 0xf000);
#ifdef NETEQ_STEREO
if (msInfo->msMode == NETEQ_MASTER)
@@ -303,7 +303,7 @@
#endif
}
- blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of WebRtc_Word16 */
+ blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of int16_t */
payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
blockPtr++;
@@ -322,7 +322,7 @@
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS;
if ((fwrite(&temp_var, sizeof(int),
1, delay_fid2) != 1) ||
- (fwrite(&inst->fs, sizeof(WebRtc_UWord16),
+ (fwrite(&inst->fs, sizeof(uint16_t),
1, delay_fid2) != 1)) {
return -1;
}
@@ -410,7 +410,7 @@
{
if (inst->codec_ptr_inst.funcDecode != NULL)
{
- WebRtc_Word16 dec_Len;
+ int16_t dec_Len;
if (!BGNonly)
{
/* Do decoding as normal
@@ -517,11 +517,11 @@
if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
return -1;
}
- if (fwrite(&inst->endTimestamp, sizeof(WebRtc_UWord32),
+ if (fwrite(&inst->endTimestamp, sizeof(uint32_t),
1, delay_fid2) != 1) {
return -1;
}
- if (fwrite(&dspInfo->samplesLeft, sizeof(WebRtc_UWord16),
+ if (fwrite(&dspInfo->samplesLeft, sizeof(uint16_t),
1, delay_fid2) != 1) {
return -1;
}
@@ -625,7 +625,7 @@
/* call VAD with new decoded data */
inst->VADInst.VADDecision |= inst->VADInst.VADFunction(
inst->VADInst.VADState, (int) inst->fs,
- (WebRtc_Word16 *) &pw16_decoded_buffer[VADSamplePtr],
+ (int16_t *) &pw16_decoded_buffer[VADSamplePtr],
(VADframeSize * fs_mult * 8));
VADSamplePtr += VADframeSize * fs_mult * 8; /* increment sample counter */
@@ -642,9 +642,9 @@
#endif /* NETEQ_VAD */
/* Adjust timestamp if needed */
- uw16_tmp = (WebRtc_UWord16) inst->pw16_readAddress[1];
- inst->endTimestamp += (((WebRtc_UWord32) uw16_tmp) << 16);
- uw16_tmp = (WebRtc_UWord16) inst->pw16_readAddress[2];
+ uw16_tmp = (uint16_t) inst->pw16_readAddress[1];
+ inst->endTimestamp += (((uint32_t) uw16_tmp) << 16);
+ uw16_tmp = (uint16_t) inst->pw16_readAddress[2];
inst->endTimestamp += uw16_tmp;
if (BGNonly && len > 0)
@@ -907,7 +907,7 @@
#ifdef NETEQ_CNG_CODEC
if (blockLen > 0)
{
- if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (WebRtc_UWord8*) blockPtr,
+ if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (uint8_t*) blockPtr,
payloadLen) < 0)
{
/* error returned from CNG function */
@@ -1022,7 +1022,7 @@
* Generate extra DTMF data to fill the space between
* curPosition and endPosition
*/
- WebRtc_Word16 tempLen;
+ int16_t tempLen;
tempLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
&pw16_NetEqAlgorithm_buffer[len], inst->fs,
@@ -1269,7 +1269,7 @@
{
inst->speechBuffer[inst->curPosition + pos]
=
- (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+ (int16_t) WEBRTC_SPL_RSHIFT_W32(
WEBRTC_SPL_MUL_16_16( inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
16384-w16_tmp1 ) +
WEBRTC_SPL_MUL_16_16( pw16_NetEqAlgorithm_buffer[pos], w16_tmp1 ),
@@ -1336,9 +1336,9 @@
if (playDtmf != 0)
{
#ifdef NETEQ_ATEVENT_DECODE
- WebRtc_Word16 outDataIndex = 0;
- WebRtc_Word16 overdubLen = -1; /* default len */
- WebRtc_Word16 dtmfLen;
+ int16_t outDataIndex = 0;
+ int16_t overdubLen = -1; /* default len */
+ int16_t dtmfLen;
/*
* Overdub the output with DTMF. Note that this is not executed if the
@@ -1455,9 +1455,9 @@
*/
if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_RFC3389CNG))
{
- WebRtc_UWord32 uw32_tmpTS;
+ uint32_t uw32_tmpTS;
uw32_tmpTS = inst->endTimestamp - (inst->endPosition - inst->curPosition);
- if ((WebRtc_Word32) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
+ if ((int32_t) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
{
inst->videoSyncTimestamp = uw32_tmpTS;
}
diff --git a/webrtc/modules/audio_coding/neteq/rtcp.c b/webrtc/modules/audio_coding/neteq/rtcp.c
index 35f73da..d1ce934 100644
--- a/webrtc/modules/audio_coding/neteq/rtcp.c
+++ b/webrtc/modules/audio_coding/neteq/rtcp.c
@@ -18,24 +18,24 @@
#include "signal_processing_library.h"
-int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo)
+int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo)
{
/*
* Initialize everything to zero and then set the start values for the RTP packet stream.
*/
- WebRtcSpl_MemSetW16((WebRtc_Word16*) RTCP_inst, 0,
- sizeof(WebRtcNetEQ_RTCP_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t*) RTCP_inst, 0,
+ sizeof(WebRtcNetEQ_RTCP_t) / sizeof(int16_t));
RTCP_inst->base_seq = uw16_seqNo;
RTCP_inst->max_seq = uw16_seqNo;
return 0;
}
-int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
- WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime)
+int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo,
+ uint32_t uw32_timeStamp, uint32_t uw32_recTime)
{
- WebRtc_Word16 w16_SeqDiff;
- WebRtc_Word32 w32_TimeDiff;
- WebRtc_Word32 w32_JitterDiff;
+ int16_t w16_SeqDiff;
+ int32_t w32_TimeDiff;
+ int32_t w32_JitterDiff;
/*
* Update number of received packets, and largest packet number received.
@@ -66,16 +66,16 @@
}
int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
- WebRtc_UWord16 *puw16_fraction_lost,
- WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
- WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset)
+ uint16_t *puw16_fraction_lost,
+ uint32_t *puw32_cum_lost, uint32_t *puw32_ext_max,
+ uint32_t *puw32_jitter, int16_t doNotReset)
{
- WebRtc_UWord32 uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
- WebRtc_Word32 w32_lost;
+ uint32_t uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
+ int32_t w32_lost;
/* Extended highest sequence number received */
*puw32_ext_max
- = (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)RTCP_inst->cycles, 16)
+ = (uint32_t) WEBRTC_SPL_LSHIFT_W32((uint32_t)RTCP_inst->cycles, 16)
+ RTCP_inst->max_seq;
/*
@@ -91,7 +91,7 @@
else if (uw32_exp_nr > RTCP_inst->received)
{
*puw32_cum_lost = uw32_exp_nr - RTCP_inst->received;
- if (*puw32_cum_lost > (WebRtc_UWord32) 0xFFFFFF)
+ if (*puw32_cum_lost > (uint32_t) 0xFFFFFF)
{
*puw32_cum_lost = 0xFFFFFF;
}
@@ -112,14 +112,14 @@
{
RTCP_inst->rec_prior = RTCP_inst->received;
}
- w32_lost = (WebRtc_Word32) (uw32_exp_interval - uw32_rec_interval);
+ w32_lost = (int32_t) (uw32_exp_interval - uw32_rec_interval);
if (uw32_exp_interval == 0 || w32_lost <= 0 || RTCP_inst->received == 0)
{
*puw16_fraction_lost = 0;
}
else
{
- *puw16_fraction_lost = (WebRtc_UWord16) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
+ *puw16_fraction_lost = (uint16_t) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
/ uw32_exp_interval);
}
if (*puw16_fraction_lost > 0xFF)
diff --git a/webrtc/modules/audio_coding/neteq/rtcp.h b/webrtc/modules/audio_coding/neteq/rtcp.h
index 009e019..5e066eb 100644
--- a/webrtc/modules/audio_coding/neteq/rtcp.h
+++ b/webrtc/modules/audio_coding/neteq/rtcp.h
@@ -19,16 +19,16 @@
typedef struct
{
- WebRtc_UWord16 cycles; /* The number of wrap-arounds for the sequence number */
- WebRtc_UWord16 max_seq; /* The maximum sequence number received
+ uint16_t cycles; /* The number of wrap-arounds for the sequence number */
+ uint16_t max_seq; /* The maximum sequence number received
(starts from 0 again after wrap around) */
- WebRtc_UWord16 base_seq; /* The sequence number of the first packet that arrived */
- WebRtc_UWord32 received; /* The number of packets that has been received */
- WebRtc_UWord32 rec_prior; /* Number of packets received when last report was generated */
- WebRtc_UWord32 exp_prior; /* Number of packets that should have been received if no
+ uint16_t base_seq; /* The sequence number of the first packet that arrived */
+ uint32_t received; /* The number of packets that has been received */
+ uint32_t rec_prior; /* Number of packets received when last report was generated */
+ uint32_t exp_prior; /* Number of packets that should have been received if no
packets were lost. Stored value from last report. */
- WebRtc_UWord32 jitter; /* Jitter statistics at this instance (calculated according to RFC) */
- WebRtc_Word32 transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
+ uint32_t jitter; /* Jitter statistics at this instance (calculated according to RFC) */
+ int32_t transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
} WebRtcNetEQ_RTCP_t;
/****************************************************************************
@@ -46,7 +46,7 @@
* -1 - Error
*/
-int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo);
+int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo);
/****************************************************************************
* WebRtcNetEQ_RTCPUpdate(...)
@@ -65,8 +65,8 @@
* -1 - Error
*/
-int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
- WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime);
+int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, uint16_t uw16_seqNo,
+ uint32_t uw32_timeStamp, uint32_t uw32_recTime);
/****************************************************************************
* WebRtcNetEQ_RTCPGetStats(...)
@@ -95,8 +95,8 @@
*/
int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
- WebRtc_UWord16 *puw16_fraction_lost,
- WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
- WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset);
+ uint16_t *puw16_fraction_lost,
+ uint32_t *puw32_cum_lost, uint32_t *puw32_ext_max,
+ uint32_t *puw32_jitter, int16_t doNotReset);
#endif
diff --git a/webrtc/modules/audio_coding/neteq/rtp.c b/webrtc/modules/audio_coding/neteq/rtp.c
index 63cdf65..f23f351 100644
--- a/webrtc/modules/audio_coding/neteq/rtp.c
+++ b/webrtc/modules/audio_coding/neteq/rtp.c
@@ -18,7 +18,7 @@
#include "neteq_error_codes.h"
-int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
+int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen,
RTPPacket_t* RTPheader)
{
int i_P, i_X, i_CC, i_startPosition;
@@ -32,20 +32,20 @@
}
#ifdef WEBRTC_BIG_ENDIAN
- i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
- i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
- i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
- i_CC = ((WebRtc_UWord16) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
+ i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
+ i_P = (((uint16_t) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
+ i_X = (((uint16_t) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
+ i_CC = ((uint16_t) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
RTPheader->payloadType = pw16_Datagram[0] & 0x7F; /* Get the coder type */
RTPheader->seqNumber = pw16_Datagram[1]; /* Get the sequence number */
- RTPheader->timeStamp = ((((WebRtc_UWord32) ((WebRtc_UWord16) pw16_Datagram[2])) << 16)
- | (WebRtc_UWord16) (pw16_Datagram[3])); /* Get timestamp */
- RTPheader->ssrc = (((WebRtc_UWord32) pw16_Datagram[4]) << 16)
- + (((WebRtc_UWord32) pw16_Datagram[5])); /* Get the SSRC */
+ RTPheader->timeStamp = ((((uint32_t) ((uint16_t) pw16_Datagram[2])) << 16)
+ | (uint16_t) (pw16_Datagram[3])); /* Get timestamp */
+ RTPheader->ssrc = (((uint32_t) pw16_Datagram[4]) << 16)
+ + (((uint32_t) pw16_Datagram[5])); /* Get the SSRC */
if (i_X == 1)
{
- /* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
+ /* Extension header exists. Find out how many int32_t it consists of. */
i_extlength = pw16_Datagram[7 + 2 * i_CC];
}
if (i_P == 1)
@@ -54,7 +54,7 @@
if (i_DatagramLen & 0x1)
{
/* odd number of bytes => last byte in higher byte */
- i_padlength = (((WebRtc_UWord16) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
+ i_padlength = (((uint16_t) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
}
else
{
@@ -63,27 +63,27 @@
}
}
#else /* WEBRTC_LITTLE_ENDIAN */
- i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
- i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
- i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
- i_CC = (WebRtc_UWord16) (pw16_Datagram[0] & 0xF); /* Get the CC number */
+ i_IPver = (((uint16_t) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
+ i_P = (((uint16_t) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
+ i_X = (((uint16_t) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
+ i_CC = (uint16_t) (pw16_Datagram[0] & 0xF); /* Get the CC number */
RTPheader->payloadType = (pw16_Datagram[0] >> 8) & 0x7F; /* Get the coder type */
- RTPheader->seqNumber = (((((WebRtc_UWord16) pw16_Datagram[1]) >> 8) & 0xFF)
- | (((WebRtc_UWord16) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
- RTPheader->timeStamp = ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF) << 24)
- | ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF00) << 8)
- | ((((WebRtc_UWord16) pw16_Datagram[3]) >> 8) & 0xFF)
- | ((((WebRtc_UWord16) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
- RTPheader->ssrc = ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF) << 24)
- | ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF00) << 8)
- | ((((WebRtc_UWord16) pw16_Datagram[5]) >> 8) & 0xFF)
- | ((((WebRtc_UWord16) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
+ RTPheader->seqNumber = (((((uint16_t) pw16_Datagram[1]) >> 8) & 0xFF)
+ | (((uint16_t) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
+ RTPheader->timeStamp = ((((uint16_t) pw16_Datagram[2]) & 0xFF) << 24)
+ | ((((uint16_t) pw16_Datagram[2]) & 0xFF00) << 8)
+ | ((((uint16_t) pw16_Datagram[3]) >> 8) & 0xFF)
+ | ((((uint16_t) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
+ RTPheader->ssrc = ((((uint16_t) pw16_Datagram[4]) & 0xFF) << 24)
+ | ((((uint16_t) pw16_Datagram[4]) & 0xFF00) << 8)
+ | ((((uint16_t) pw16_Datagram[5]) >> 8) & 0xFF)
+ | ((((uint16_t) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
if (i_X == 1)
{
- /* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
- i_extlength = (((((WebRtc_UWord16) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
- | (((WebRtc_UWord16) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
+ /* Extension header exists. Find out how many int32_t it consists of. */
+ i_extlength = (((((uint16_t) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
+ | (((uint16_t) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
}
if (i_P == 1)
{
@@ -96,7 +96,7 @@
else
{
/* even number of bytes => last byte in lower byte */
- i_padlength = (((WebRtc_UWord16) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
+ i_padlength = (((uint16_t) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
}
}
#endif
@@ -120,8 +120,8 @@
int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
int *i_No_Of_Payloads)
{
- const WebRtc_Word16 *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
- WebRtc_UWord16 uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
+ const int16_t *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
+ uint16_t uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
int i_blockLength, i_k;
int i_discardedBlockLength = 0;
int singlePayload = 0;
@@ -133,7 +133,7 @@
singlePayload = 1;
/* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
i_blockLength = -4;
- RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
+ RTPheader[0]->payloadType = ((((uint16_t)pw16_data[0]) & 0x7F00) >> 8);
}
else
{
@@ -141,7 +141,7 @@
while (((pw16_data[2] & 0x8000) != 0) &&
(pw16_data<((RTPheader[0]->payload)+((RTPheader[0]->payloadLen+1)>>1))))
{
- i_discardedBlockLength += (4+(((WebRtc_UWord16)pw16_data[1]) & 0x3FF));
+ i_discardedBlockLength += (4+(((uint16_t)pw16_data[1]) & 0x3FF));
pw16_data+=2;
}
if (pw16_data>=(RTPheader[0]->payload+((RTPheader[0]->payloadLen+1)>>1)))
@@ -149,11 +149,11 @@
return RED_SPLIT_ERROR2; /* Error, we are outside the packet */
}
singlePayload = 0; /* the packet contains more than one payload */
- uw16_secondPayload = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
- RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[2]) & 0x7F00) >> 8);
- uw16_offsetTimeStamp = ((((WebRtc_UWord16)pw16_data[0]) & 0xFF) << 6) +
- ((((WebRtc_UWord16)pw16_data[1]) & 0xFC00) >> 10);
- i_blockLength = (((WebRtc_UWord16)pw16_data[1]) & 0x3FF);
+ uw16_secondPayload = ((((uint16_t)pw16_data[0]) & 0x7F00) >> 8);
+ RTPheader[0]->payloadType = ((((uint16_t)pw16_data[2]) & 0x7F00) >> 8);
+ uw16_offsetTimeStamp = ((((uint16_t)pw16_data[0]) & 0xFF) << 6) +
+ ((((uint16_t)pw16_data[1]) & 0xFC00) >> 10);
+ i_blockLength = (((uint16_t)pw16_data[1]) & 0x3FF);
}
#else /* WEBRTC_LITTLE_ENDIAN */
if ((pw16_data[0] & 0x80) == 0)
@@ -162,7 +162,7 @@
singlePayload = 1;
/* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
i_blockLength = -4;
- RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
+ RTPheader[0]->payloadType = (((uint16_t) pw16_data[0]) & 0x7F);
}
else
{
@@ -170,8 +170,8 @@
while (((pw16_data[2] & 0x80) != 0) && (pw16_data < ((RTPheader[0]->payload)
+ ((RTPheader[0]->payloadLen + 1) >> 1))))
{
- i_discardedBlockLength += (4 + ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
- + ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8));
+ i_discardedBlockLength += (4 + ((((uint16_t) pw16_data[1]) & 0x3) << 8)
+ + ((((uint16_t) pw16_data[1]) & 0xFF00) >> 8));
pw16_data += 2;
}
if (pw16_data >= (RTPheader[0]->payload + ((RTPheader[0]->payloadLen + 1) >> 1)))
@@ -179,12 +179,12 @@
return RED_SPLIT_ERROR2; /* Error, we are outside the packet */;
}
singlePayload = 0; /* the packet contains more than one payload */
- uw16_secondPayload = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
- RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[2]) & 0x7F);
- uw16_offsetTimeStamp = ((((WebRtc_UWord16) pw16_data[0]) & 0xFF00) >> 2)
- + ((((WebRtc_UWord16) pw16_data[1]) & 0xFC) >> 2);
- i_blockLength = ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
- + ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8);
+ uw16_secondPayload = (((uint16_t) pw16_data[0]) & 0x7F);
+ RTPheader[0]->payloadType = (((uint16_t) pw16_data[2]) & 0x7F);
+ uw16_offsetTimeStamp = ((((uint16_t) pw16_data[0]) & 0xFF00) >> 2)
+ + ((((uint16_t) pw16_data[1]) & 0xFC) >> 2);
+ i_blockLength = ((((uint16_t) pw16_data[1]) & 0x3) << 8)
+ + ((((uint16_t) pw16_data[1]) & 0xFF00) >> 8);
}
#endif
diff --git a/webrtc/modules/audio_coding/neteq/rtp.h b/webrtc/modules/audio_coding/neteq/rtp.h
index 8490d62..4642eae 100644
--- a/webrtc/modules/audio_coding/neteq/rtp.h
+++ b/webrtc/modules/audio_coding/neteq/rtp.h
@@ -21,14 +21,14 @@
typedef struct
{
- WebRtc_UWord16 seqNumber;
- WebRtc_UWord32 timeStamp;
- WebRtc_UWord32 ssrc;
+ uint16_t seqNumber;
+ uint32_t timeStamp;
+ uint32_t ssrc;
int payloadType;
- const WebRtc_Word16 *payload;
- WebRtc_Word16 payloadLen;
- WebRtc_Word16 starts_byte1;
- WebRtc_Word16 rcuPlCntr;
+ const int16_t *payload;
+ int16_t payloadLen;
+ int16_t starts_byte1;
+ int16_t rcuPlCntr;
} RTPPacket_t;
/****************************************************************************
@@ -47,7 +47,7 @@
* -1 - Error
*/
-int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
+int WebRtcNetEQ_RTPPayloadInfo(int16_t* pw16_Datagram, int i_DatagramLen,
RTPPacket_t* RTPheader);
/****************************************************************************
diff --git a/webrtc/modules/audio_coding/neteq/set_fs.c b/webrtc/modules/audio_coding/neteq/set_fs.c
index b2ad5ca..ac97454 100644
--- a/webrtc/modules/audio_coding/neteq/set_fs.c
+++ b/webrtc/modules/audio_coding/neteq/set_fs.c
@@ -17,9 +17,9 @@
#include "dtmf_buffer.h"
#include "neteq_error_codes.h"
-int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs)
+int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, uint16_t fs)
{
- WebRtc_Word16 ok = 0;
+ int16_t ok = 0;
switch (fs)
{
diff --git a/webrtc/modules/audio_coding/neteq/signal_mcu.c b/webrtc/modules/audio_coding/neteq/signal_mcu.c
index e51d5f2..ebe035d 100644
--- a/webrtc/modules/audio_coding/neteq/signal_mcu.c
+++ b/webrtc/modules/audio_coding/neteq/signal_mcu.c
@@ -59,21 +59,21 @@
{
int i_bufferpos, i_res;
- WebRtc_UWord16 uw16_instr;
+ uint16_t uw16_instr;
DSP2MCU_info_t dspInfo;
- WebRtc_Word16 *blockPtr, blockLen;
- WebRtc_UWord32 uw32_availableTS;
+ int16_t *blockPtr, blockLen;
+ uint32_t uw32_availableTS;
RTPPacket_t temp_pkt;
- WebRtc_Word32 w32_bufsize, w32_tmp;
- WebRtc_Word16 payloadType = -1;
- WebRtc_Word16 wantedNoOfTimeStamps;
- WebRtc_Word32 totalTS;
- WebRtc_Word16 oldPT, latePacketExist = 0;
- WebRtc_UWord32 oldTS, prevTS, uw32_tmp;
- WebRtc_UWord16 prevSeqNo;
- WebRtc_Word16 nextSeqNoAvail;
- WebRtc_Word16 fs_mult, w16_tmp;
- WebRtc_Word16 lastModeBGNonly = 0;
+ int32_t w32_bufsize, w32_tmp;
+ int16_t payloadType = -1;
+ int16_t wantedNoOfTimeStamps;
+ int32_t totalTS;
+ int16_t oldPT, latePacketExist = 0;
+ uint32_t oldTS, prevTS, uw32_tmp;
+ uint16_t prevSeqNo;
+ int16_t nextSeqNoAvail;
+ int16_t fs_mult, w16_tmp;
+ int16_t lastModeBGNonly = 0;
#ifdef NETEQ_DELAY_LOGGING
int temp_var;
#endif
@@ -94,7 +94,7 @@
/* Set blockPtr to first payload block */
blockPtr = &inst->pw16_writeAddress[3];
- /* Clear instruction word and number of lost samples (2*WebRtc_Word16) */
+ /* Clear instruction word and number of lost samples (2*int16_t) */
inst->pw16_writeAddress[0] = 0;
inst->pw16_writeAddress[1] = 0;
inst->pw16_writeAddress[2] = 0;
@@ -342,7 +342,7 @@
if (WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType))
{
/* The currently extracted packet is CNG; get CNG fs */
- WebRtc_UWord16 tempFs;
+ uint16_t tempFs;
tempFs = WebRtcNetEQ_DbGetSampleRate(&inst->codec_DB_inst, payloadType);
/* TODO(tlegrand): Remove this limitation once ACM has full
@@ -356,12 +356,12 @@
inst->fs = tempFs;
}
}
- WebRtcSpl_MemSetW16((WebRtc_Word16*) &cinst, 0,
- sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t*) &cinst, 0,
+ sizeof(CodecFuncInst_t) / sizeof(int16_t));
cinst.codec_fs = inst->fs;
}
cinst.timeStamp = inst->timeStamp;
- blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(WebRtc_Word16) - 1); /* in Word16 */
+ blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(int16_t) - 1); /* in Word16 */
*blockPtr = blockLen * 2;
blockPtr++;
WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst,sizeof(CodecFuncInst_t));
@@ -395,7 +395,7 @@
else
{
/* CNG exists */
- blockLen = (sizeof(cinst.codec_state)) >> (sizeof(WebRtc_Word16) - 1);
+ blockLen = (sizeof(cinst.codec_state)) >> (sizeof(int16_t) - 1);
*blockPtr = blockLen * 2;
blockPtr++;
WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst.codec_state,sizeof(cinst.codec_state));
@@ -500,15 +500,15 @@
/* Do DTMF without extracting any new packets from buffer */
if (uw16_instr == BUFSTATS_DO_DTMF_ONLY)
{
- WebRtc_UWord32 timeStampJump = 0;
+ uint32_t timeStampJump = 0;
/* Update timestamp */
if ((inst->BufferStat_inst.uw32_CNGplayedTS > 0) && (dspInfo.lastMode != MODE_DTMF))
{
/* Jump in timestamps if needed */
timeStampJump = inst->BufferStat_inst.uw32_CNGplayedTS;
- inst->pw16_writeAddress[1] = (WebRtc_UWord16) (timeStampJump >> 16);
- inst->pw16_writeAddress[2] = (WebRtc_UWord16) (timeStampJump & 0xFFFF);
+ inst->pw16_writeAddress[1] = (uint16_t) (timeStampJump >> 16);
+ inst->pw16_writeAddress[2] = (uint16_t) (timeStampJump & 0xFFFF);
}
inst->timeStamp = dspInfo.playedOutTS + timeStampJump;
@@ -532,7 +532,7 @@
| DSP_INSTR_ACCELERATE;
*blockPtr = 0;
inst->BufferStat_inst.Automode_inst.sampleMemory
- = (WebRtc_Word32) dspInfo.samplesLeft;
+ = (int32_t) dspInfo.samplesLeft;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
return 0;
}
@@ -576,7 +576,7 @@
| DSP_INSTR_PREEMPTIVE_EXPAND;
*blockPtr = 0;
inst->BufferStat_inst.Automode_inst.sampleMemory
- = (WebRtc_Word32) dspInfo.samplesLeft;
+ = (int32_t) dspInfo.samplesLeft;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
return 0;
}
@@ -591,7 +591,7 @@
| DSP_INSTR_PREEMPTIVE_EXPAND;
*blockPtr = 0;
inst->BufferStat_inst.Automode_inst.sampleMemory
- = (WebRtc_Word32) dspInfo.samplesLeft;
+ = (int32_t) dspInfo.samplesLeft;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
return 0;
}
@@ -631,8 +631,8 @@
&& (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION_INC_TS))
{
uw32_tmp = (uw32_availableTS - dspInfo.playedOutTS);
- inst->pw16_writeAddress[1] = (WebRtc_UWord16) (uw32_tmp >> 16);
- inst->pw16_writeAddress[2] = (WebRtc_UWord16) (uw32_tmp & 0xFFFF);
+ inst->pw16_writeAddress[1] = (uint16_t) (uw32_tmp >> 16);
+ inst->pw16_writeAddress[2] = (uint16_t) (uw32_tmp & 0xFFFF);
if (inst->BufferStat_inst.w16_cngOn == CNG_OFF)
{
/*
@@ -682,9 +682,9 @@
temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE;
if ((fwrite(&temp_var, sizeof(int),
1, delay_fid2) != 1) ||
- (fwrite(&temp_pkt.timeStamp, sizeof(WebRtc_UWord32),
+ (fwrite(&temp_pkt.timeStamp, sizeof(uint32_t),
1, delay_fid2) != 1) ||
- (fwrite(&dspInfo.samplesLeft, sizeof(WebRtc_UWord16),
+ (fwrite(&dspInfo.samplesLeft, sizeof(uint16_t),
1, delay_fid2) != 1)) {
return -1;
}
@@ -744,7 +744,7 @@
else
{
inst->BufferStat_inst.Automode_inst.sampleMemory
- = (WebRtc_Word32) dspInfo.samplesLeft + totalTS;
+ = (int32_t) dspInfo.samplesLeft + totalTS;
inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
}
}
diff --git a/webrtc/modules/audio_coding/neteq/split_and_insert.c b/webrtc/modules/audio_coding/neteq/split_and_insert.c
index 03c1569..ce2e821 100644
--- a/webrtc/modules/audio_coding/neteq/split_and_insert.c
+++ b/webrtc/modules/audio_coding/neteq/split_and_insert.c
@@ -21,15 +21,15 @@
#include "neteq_error_codes.h"
int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
- SplitInfo_t *split_inst, WebRtc_Word16 *flushed)
+ SplitInfo_t *split_inst, int16_t *flushed)
{
int i_ok;
int len;
int i;
RTPPacket_t temp_packet;
- WebRtc_Word16 localFlushed = 0;
- const WebRtc_Word16 *pw16_startPayload;
+ int16_t localFlushed = 0;
+ const int16_t *pw16_startPayload;
*flushed = 0;
len = packet->payloadLen;
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
index 25f10b0..838f3d9 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
@@ -14,7 +14,7 @@
#include "webrtc_neteq_help_macros.h"
-NETEQTEST_Decoder::NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt)
+NETEQTEST_Decoder::NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, uint16_t fs, const char * name, uint8_t pt)
:
_decoder(NULL),
_decoderType(type),
@@ -42,11 +42,11 @@
#ifdef CODEC_ISAC
#include "isac.h"
-decoder_iSAC::decoder_iSAC(WebRtc_UWord8 pt)
+decoder_iSAC::decoder_iSAC(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderISAC, 16000, "iSAC", pt)
{
- WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+ int16_t err = WebRtcIsac_Create((ISACStruct **) &_decoder);
if (err)
{
exit(EXIT_FAILURE);
@@ -79,11 +79,11 @@
#endif
#ifdef CODEC_ISAC_SWB
-decoder_iSACSWB::decoder_iSACSWB(WebRtc_UWord8 pt)
+decoder_iSACSWB::decoder_iSACSWB(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderISACswb, 32000, "iSAC swb", pt)
{
- WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+ int16_t err = WebRtcIsac_Create((ISACStruct **) &_decoder);
if (err)
{
exit(EXIT_FAILURE);
@@ -114,9 +114,9 @@
#endif
#ifdef CODEC_ISAC_FB
-decoder_iSACFB::decoder_iSACFB(WebRtc_UWord8 pt)
+decoder_iSACFB::decoder_iSACFB(uint8_t pt)
: NETEQTEST_Decoder(kDecoderISACfb, 32000, "iSAC fb", pt) {
- WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+ int16_t err = WebRtcIsac_Create((ISACStruct **) &_decoder);
if (err) {
exit(EXIT_FAILURE);
}
@@ -143,7 +143,7 @@
#ifdef CODEC_G711
#include "g711_interface.h"
-decoder_PCMU::decoder_PCMU(WebRtc_UWord8 pt)
+decoder_PCMU::decoder_PCMU(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderPCMu, 8000, "G.711-u", pt)
{
@@ -160,7 +160,7 @@
}
-decoder_PCMA::decoder_PCMA(WebRtc_UWord8 pt)
+decoder_PCMA::decoder_PCMA(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderPCMa, 8000, "G.711-A", pt)
{
@@ -229,11 +229,11 @@
#ifdef CODEC_ILBC
#include "ilbc.h"
-decoder_ILBC::decoder_ILBC(WebRtc_UWord8 pt)
+decoder_ILBC::decoder_ILBC(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderILBC, 8000, "iLBC", pt)
{
- WebRtc_Word16 err = WebRtcIlbcfix_DecoderCreate((iLBC_decinst_t **) &_decoder);
+ int16_t err = WebRtcIlbcfix_DecoderCreate((iLBC_decinst_t **) &_decoder);
if (err)
{
exit(EXIT_FAILURE);
@@ -257,11 +257,11 @@
#ifdef CODEC_G729
#include "G729Interface.h"
-decoder_G729::decoder_G729(WebRtc_UWord8 pt)
+decoder_G729::decoder_G729(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG729, 8000, "G.729", pt)
{
- WebRtc_Word16 err = WebRtcG729_CreateDec((G729_decinst_t **) &_decoder);
+ int16_t err = WebRtcG729_CreateDec((G729_decinst_t **) &_decoder);
if (err)
{
exit(EXIT_FAILURE);
@@ -285,11 +285,11 @@
#ifdef CODEC_G729_1
#include "G729_1Interface.h"
-decoder_G729_1::decoder_G729_1(WebRtc_UWord8 pt)
+decoder_G729_1::decoder_G729_1(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG729_1, 16000, "G.729.1", pt)
{
- WebRtc_Word16 err = WebRtcG7291_Create((G729_1_inst_t **) &_decoder);
+ int16_t err = WebRtcG7291_Create((G729_1_inst_t **) &_decoder);
if (err)
{
exit(EXIT_FAILURE);
@@ -313,11 +313,11 @@
#ifdef CODEC_G722
#include "g722_interface.h"
-decoder_G722::decoder_G722(WebRtc_UWord8 pt)
+decoder_G722::decoder_G722(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722, 16000, "G.722", pt)
{
- WebRtc_Word16 err = WebRtcG722_CreateDecoder((G722DecInst **) &_decoder);
+ int16_t err = WebRtcG722_CreateDecoder((G722DecInst **) &_decoder);
if (err)
{
exit(EXIT_FAILURE);
@@ -346,7 +346,7 @@
#endif
#ifdef CODEC_G722_1_16
-decoder_G722_1_16::decoder_G722_1_16(WebRtc_UWord8 pt)
+decoder_G722_1_16::decoder_G722_1_16(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722_1_16, 16000, "G.722.1 (16 kbps)", pt)
{
@@ -372,7 +372,7 @@
#endif
#ifdef CODEC_G722_1_24
-decoder_G722_1_24::decoder_G722_1_24(WebRtc_UWord8 pt)
+decoder_G722_1_24::decoder_G722_1_24(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722_1_24, 16000, "G.722.1 (24 kbps)", pt)
{
@@ -398,7 +398,7 @@
#endif
#ifdef CODEC_G722_1_32
-decoder_G722_1_32::decoder_G722_1_32(WebRtc_UWord8 pt)
+decoder_G722_1_32::decoder_G722_1_32(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722_1_32, 16000, "G.722.1 (32 kbps)", pt)
{
@@ -424,7 +424,7 @@
#endif
#ifdef CODEC_G722_1C_24
-decoder_G722_1C_24::decoder_G722_1C_24(WebRtc_UWord8 pt)
+decoder_G722_1C_24::decoder_G722_1C_24(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722_1C_24, 32000, "G.722.1C (24 kbps)", pt)
{
@@ -448,7 +448,7 @@
#endif
#ifdef CODEC_G722_1C_32
-decoder_G722_1C_32::decoder_G722_1C_32(WebRtc_UWord8 pt)
+decoder_G722_1C_32::decoder_G722_1C_32(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722_1C_32, 32000, "G.722.1C (32 kbps)", pt)
{
@@ -472,7 +472,7 @@
#endif
#ifdef CODEC_G722_1C_48
-decoder_G722_1C_48::decoder_G722_1C_48(WebRtc_UWord8 pt)
+decoder_G722_1C_48::decoder_G722_1C_48(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderG722_1C_48, 32000, "G.722.1C (48 kbps)", pt)
{
@@ -498,7 +498,7 @@
#ifdef CODEC_AMR
#include "AMRInterface.h"
#include "AMRCreation.h"
-decoder_AMR::decoder_AMR(WebRtc_UWord8 pt)
+decoder_AMR::decoder_AMR(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderAMR, 8000, "AMR", pt)
{
@@ -526,7 +526,7 @@
#ifdef CODEC_AMRWB
#include "AMRWBInterface.h"
#include "AMRWBCreation.h"
-decoder_AMRWB::decoder_AMRWB(WebRtc_UWord8 pt)
+decoder_AMRWB::decoder_AMRWB(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderAMRWB, 16000, "AMR wb", pt)
{
@@ -554,7 +554,7 @@
#ifdef CODEC_GSMFR
#include "GSMFRInterface.h"
#include "GSMFRCreation.h"
-decoder_GSMFR::decoder_GSMFR(WebRtc_UWord8 pt)
+decoder_GSMFR::decoder_GSMFR(uint8_t pt)
:
NETEQTEST_Decoder(kDecoderGSMFR, 8000, "GSM-FR", pt)
{
@@ -579,7 +579,7 @@
#if (defined(CODEC_SPEEX_8) || defined (CODEC_SPEEX_16))
#include "SpeexInterface.h"
-decoder_SPEEX::decoder_SPEEX(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+decoder_SPEEX::decoder_SPEEX(uint8_t pt, uint16_t fs)
:
NETEQTEST_Decoder(fs == 8000 ? kDecoderSPEEX_8 : kDecoderSPEEX_16,
fs, "SPEEX", pt)
@@ -608,7 +608,7 @@
#ifdef CODEC_CELT_32
#include "celt_interface.h"
-decoder_CELT::decoder_CELT(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+decoder_CELT::decoder_CELT(uint8_t pt, uint16_t fs)
:
NETEQTEST_Decoder(kDecoderCELT_32, fs, "CELT", pt)
{
@@ -630,7 +630,7 @@
return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
}
-decoder_CELTslave::decoder_CELTslave(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+decoder_CELTslave::decoder_CELTslave(uint8_t pt, uint16_t fs)
:
NETEQTEST_Decoder(kDecoderCELT_32, fs, "CELT", pt)
{
@@ -677,7 +677,7 @@
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
#include "webrtc_cng.h"
-decoder_CNG::decoder_CNG(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+decoder_CNG::decoder_CNG(uint8_t pt, uint16_t fs)
:
NETEQTEST_Decoder(kDecoderCNG, fs, "CNG", pt)
{
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
index 43f16a5..a58bf0d 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
@@ -21,25 +21,25 @@
class NETEQTEST_Decoder
{
public:
- NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt = 0);
+ NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, uint16_t fs, const char * name, uint8_t pt = 0);
virtual ~NETEQTEST_Decoder() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
int getName(char * name, int maxLen) const { strncpy( name, _name.c_str(), maxLen ); return 0;};
- void setPT(WebRtc_UWord8 pt) { _pt = pt; };
- WebRtc_UWord16 getFs() const { return (_fs); };
+ void setPT(uint8_t pt) { _pt = pt; };
+ uint16_t getFs() const { return (_fs); };
enum WebRtcNetEQDecoder getType() const { return (_decoderType); };
- WebRtc_UWord8 getPT() const { return (_pt); };
+ uint8_t getPT() const { return (_pt); };
protected:
int loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst);
void * _decoder;
enum WebRtcNetEQDecoder _decoderType;
- WebRtc_UWord8 _pt;
- WebRtc_UWord16 _fs;
+ uint8_t _pt;
+ uint16_t _fs;
std::string _name;
private:
@@ -49,7 +49,7 @@
class decoder_iSAC : public NETEQTEST_Decoder
{
public:
- decoder_iSAC(WebRtc_UWord8 pt = 0);
+ decoder_iSAC(uint8_t pt = 0);
virtual ~decoder_iSAC();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -58,7 +58,7 @@
class decoder_iSACSWB : public NETEQTEST_Decoder
{
public:
- decoder_iSACSWB(WebRtc_UWord8 pt = 0);
+ decoder_iSACSWB(uint8_t pt = 0);
virtual ~decoder_iSACSWB();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -66,7 +66,7 @@
class decoder_iSACFB : public NETEQTEST_Decoder {
public:
- decoder_iSACFB(WebRtc_UWord8 pt = 0);
+ decoder_iSACFB(uint8_t pt = 0);
virtual ~decoder_iSACFB();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -75,7 +75,7 @@
class decoder_PCMU : public NETEQTEST_Decoder
{
public:
- decoder_PCMU(WebRtc_UWord8 pt = 0);
+ decoder_PCMU(uint8_t pt = 0);
virtual ~decoder_PCMU() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -84,7 +84,7 @@
class decoder_PCMA : public NETEQTEST_Decoder
{
public:
- decoder_PCMA(WebRtc_UWord8 pt = 0);
+ decoder_PCMA(uint8_t pt = 0);
virtual ~decoder_PCMA() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -92,28 +92,28 @@
class decoder_PCM16B_NB : public NETEQTEST_Decoder
{
public:
- decoder_PCM16B_NB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16B, 8000, "PCM16 nb", pt) {};
+ decoder_PCM16B_NB(uint8_t pt = 0) : NETEQTEST_Decoder(kDecoderPCM16B, 8000, "PCM16 nb", pt) {};
virtual ~decoder_PCM16B_NB() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_WB : public NETEQTEST_Decoder
{
public:
- decoder_PCM16B_WB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bwb, 16000, "PCM16 wb", pt) {};
+ decoder_PCM16B_WB(uint8_t pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bwb, 16000, "PCM16 wb", pt) {};
virtual ~decoder_PCM16B_WB() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_SWB32 : public NETEQTEST_Decoder
{
public:
- decoder_PCM16B_SWB32(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb32kHz, 32000, "PCM16 swb32", pt) {};
+ decoder_PCM16B_SWB32(uint8_t pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb32kHz, 32000, "PCM16 swb32", pt) {};
virtual ~decoder_PCM16B_SWB32() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_PCM16B_SWB48 : public NETEQTEST_Decoder
{
public:
- decoder_PCM16B_SWB48(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb48kHz, 48000, "PCM16 swb48", pt) {};
+ decoder_PCM16B_SWB48(uint8_t pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb48kHz, 48000, "PCM16 swb48", pt) {};
virtual ~decoder_PCM16B_SWB48() {};
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -122,7 +122,7 @@
class decoder_ILBC : public NETEQTEST_Decoder
{
public:
- decoder_ILBC(WebRtc_UWord8 pt = 0);
+ decoder_ILBC(uint8_t pt = 0);
virtual ~decoder_ILBC();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -131,7 +131,7 @@
class decoder_G729 : public NETEQTEST_Decoder
{
public:
- decoder_G729(WebRtc_UWord8 pt = 0);
+ decoder_G729(uint8_t pt = 0);
virtual ~decoder_G729();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -139,7 +139,7 @@
class decoder_G729_1 : public NETEQTEST_Decoder
{
public:
- decoder_G729_1(WebRtc_UWord8 pt = 0);
+ decoder_G729_1(uint8_t pt = 0);
virtual ~decoder_G729_1();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -148,7 +148,7 @@
class decoder_G722 : public NETEQTEST_Decoder
{
public:
- decoder_G722(WebRtc_UWord8 pt = 0);
+ decoder_G722(uint8_t pt = 0);
virtual ~decoder_G722();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -157,7 +157,7 @@
class decoder_G722_1_16 : public NETEQTEST_Decoder
{
public:
- decoder_G722_1_16(WebRtc_UWord8 pt = 0);
+ decoder_G722_1_16(uint8_t pt = 0);
virtual ~decoder_G722_1_16();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -165,7 +165,7 @@
class decoder_G722_1_24 : public NETEQTEST_Decoder
{
public:
- decoder_G722_1_24(WebRtc_UWord8 pt = 0);
+ decoder_G722_1_24(uint8_t pt = 0);
virtual ~decoder_G722_1_24();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -173,7 +173,7 @@
class decoder_G722_1_32 : public NETEQTEST_Decoder
{
public:
- decoder_G722_1_32(WebRtc_UWord8 pt = 0);
+ decoder_G722_1_32(uint8_t pt = 0);
virtual ~decoder_G722_1_32();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -182,7 +182,7 @@
class decoder_G722_1C_24 : public NETEQTEST_Decoder
{
public:
- decoder_G722_1C_24(WebRtc_UWord8 pt = 0);
+ decoder_G722_1C_24(uint8_t pt = 0);
virtual ~decoder_G722_1C_24();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -190,7 +190,7 @@
class decoder_G722_1C_32 : public NETEQTEST_Decoder
{
public:
- decoder_G722_1C_32(WebRtc_UWord8 pt = 0);
+ decoder_G722_1C_32(uint8_t pt = 0);
virtual ~decoder_G722_1C_32();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -198,7 +198,7 @@
class decoder_G722_1C_48 : public NETEQTEST_Decoder
{
public:
- decoder_G722_1C_48(WebRtc_UWord8 pt = 0);
+ decoder_G722_1C_48(uint8_t pt = 0);
virtual ~decoder_G722_1C_48();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -207,7 +207,7 @@
class decoder_AMR : public NETEQTEST_Decoder
{
public:
- decoder_AMR(WebRtc_UWord8 pt = 0);
+ decoder_AMR(uint8_t pt = 0);
virtual ~decoder_AMR();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -215,7 +215,7 @@
class decoder_AMRWB : public NETEQTEST_Decoder
{
public:
- decoder_AMRWB(WebRtc_UWord8 pt = 0);
+ decoder_AMRWB(uint8_t pt = 0);
virtual ~decoder_AMRWB();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -223,7 +223,7 @@
class decoder_GSMFR : public NETEQTEST_Decoder
{
public:
- decoder_GSMFR(WebRtc_UWord8 pt = 0);
+ decoder_GSMFR(uint8_t pt = 0);
virtual ~decoder_GSMFR();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -231,8 +231,8 @@
class decoder_G726 : public NETEQTEST_Decoder
{
public:
- //virtual decoder_G726(WebRtc_UWord8 pt = 0) = 0;
- decoder_G726(enum WebRtcNetEQDecoder type, const char * name, WebRtc_UWord8 pt = 0);
+ //virtual decoder_G726(uint8_t pt = 0) = 0;
+ decoder_G726(enum WebRtcNetEQDecoder type, const char * name, uint8_t pt = 0);
virtual ~decoder_G726();
virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
};
@@ -240,35 +240,35 @@
class decoder_G726_16 : public decoder_G726
{
public:
- decoder_G726_16(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_16, "G.726 (16 kbps)", pt) {};
+ decoder_G726_16(uint8_t pt = 0) : decoder_G726(kDecoderG726_16, "G.726 (16 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726_24 : public decoder_G726
{
public:
- decoder_G726_24(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_24, "G.726 (24 kbps)", pt) {};
+ decoder_G726_24(uint8_t pt = 0) : decoder_G726(kDecoderG726_24, "G.726 (24 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726_32 : public decoder_G726
{
public:
- decoder_G726_32(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_32, "G.726 (32 kbps)", pt) {};
+ decoder_G726_32(uint8_t pt = 0) : decoder_G726(kDecoderG726_32, "G.726 (32 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_G726_40 : public decoder_G726
{
public:
- decoder_G726_40(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_40, "G.726 (40 kbps)", pt) {};
+ decoder_G726_40(uint8_t pt = 0) : decoder_G726(kDecoderG726_40, "G.726 (40 kbps)", pt) {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_SPEEX : public NETEQTEST_Decoder
{
public:
- decoder_SPEEX(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
+ decoder_SPEEX(uint8_t pt = 0, uint16_t fs = 8000);
virtual ~decoder_SPEEX();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -276,14 +276,14 @@
class decoder_CELT : public NETEQTEST_Decoder
{
public:
- decoder_CELT(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 32000);
+ decoder_CELT(uint8_t pt = 0, uint16_t fs = 32000);
virtual ~decoder_CELT();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
class decoder_CELTslave : public NETEQTEST_Decoder
{
public:
- decoder_CELTslave(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 32000);
+ decoder_CELTslave(uint8_t pt = 0, uint16_t fs = 32000);
virtual ~decoder_CELTslave();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -291,7 +291,7 @@
class decoder_RED : public NETEQTEST_Decoder
{
public:
- decoder_RED(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderRED, 8000, "RED", pt) {};
+ decoder_RED(uint8_t pt = 0) : NETEQTEST_Decoder(kDecoderRED, 8000, "RED", pt) {};
virtual ~decoder_RED() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -299,7 +299,7 @@
class decoder_AVT : public NETEQTEST_Decoder
{
public:
- decoder_AVT(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderAVT, 8000, "AVT", pt) {};
+ decoder_AVT(uint8_t pt = 0) : NETEQTEST_Decoder(kDecoderAVT, 8000, "AVT", pt) {};
virtual ~decoder_AVT() {};
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
@@ -308,7 +308,7 @@
class decoder_CNG : public NETEQTEST_Decoder
{
public:
- decoder_CNG(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
+ decoder_CNG(uint8_t pt = 0, uint16_t fs = 8000);
virtual ~decoder_CNG();
int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
};
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
index d37cd64..5c28d49 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
@@ -29,8 +29,8 @@
return -1;
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
if (fread(&length, 2, 1, fp) == 0)
{
@@ -52,10 +52,10 @@
return -1;
}
// Store in local variable until we have passed the reset below.
- WebRtc_UWord32 receiveTime = ntohl(offset);
+ uint32_t receiveTime = ntohl(offset);
// Use length here because a plen of 0 specifies rtcp.
- length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+ length = (uint16_t) (length - _kRDHeaderLen);
// check buffer size
if (_datagram && _memSize < length + 1)
@@ -66,7 +66,7 @@
if (!_datagram)
{
// Add one extra byte, to be able to fake a dummy payload of one byte.
- _datagram = new WebRtc_UWord8[length + 1];
+ _datagram = new uint8_t[length + 1];
_memSize = length + 1;
}
memset(_datagram, 0, length + 1);
@@ -154,8 +154,8 @@
return -1;
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
// length including RTPplay header
length = htons(_datagramLen + _kRDHeaderLen);
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
index d175c76..b77c305 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
@@ -30,7 +30,7 @@
}
NETEQTEST_NetEQClass::NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
- WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
+ uint16_t fs, WebRtcNetEQNetworkType nwType)
:
_inst(NULL),
_instMem(NULL),
@@ -84,7 +84,7 @@
_instMem = NULL;
}
- _instMem = new WebRtc_Word8[memSize];
+ _instMem = new int8_t[memSize];
int ret = WebRtcNetEQ_Assign(&_inst, _instMem);
@@ -97,7 +97,7 @@
}
-int NETEQTEST_NetEQClass::init(WebRtc_UWord16 fs)
+int NETEQTEST_NetEQClass::init(uint16_t fs)
{
int ret;
@@ -165,7 +165,7 @@
_bufferMem = NULL;
}
- _bufferMem = new WebRtc_Word8[memSize];
+ _bufferMem = new int8_t[memSize];
memset(_bufferMem, -1, memSize);
@@ -253,7 +253,7 @@
QueryPerformanceCounter(&countA); // get start count for processor
#endif
- err = WebRtcNetEQ_RecIn(_inst, (WebRtc_Word16 *) rtp.datagram(), rtp.dataLen(), rtp.time() * _fsmult * 8);
+ err = WebRtcNetEQ_RecIn(_inst, (int16_t *) rtp.datagram(), rtp.dataLen(), rtp.time() * _fsmult * 8);
#ifdef WINDOWS_TIMING
QueryPerformanceCounter(&countB); // get stop count for processor
@@ -272,10 +272,10 @@
}
-WebRtc_Word16 NETEQTEST_NetEQClass::recOut(WebRtc_Word16 *outData, void *msInfo, enum WebRtcNetEQOutputType *outputType)
+int16_t NETEQTEST_NetEQClass::recOut(int16_t *outData, void *msInfo, enum WebRtcNetEQOutputType *outputType)
{
int err;
- WebRtc_Word16 outLen = 0;
+ int16_t outLen = 0;
#ifdef WINDOWS_TIMING
LARGE_INTEGER countA, countB;
#endif
@@ -299,7 +299,7 @@
else
{
// master/slave mode
- err = WebRtcNetEQ_RecOutMasterSlave(_inst, outData, &outLen, msInfo, static_cast<WebRtc_Word16>(_isMaster));
+ err = WebRtcNetEQ_RecOutMasterSlave(_inst, outData, &outLen, msInfo, static_cast<int16_t>(_isMaster));
}
#ifdef WINDOWS_TIMING
@@ -338,10 +338,10 @@
}
-WebRtc_UWord32 NETEQTEST_NetEQClass::getSpeechTimeStamp()
+uint32_t NETEQTEST_NetEQClass::getSpeechTimeStamp()
{
- WebRtc_UWord32 ts = 0;
+ uint32_t ts = 0;
int err;
err = WebRtcNetEQ_GetSpeechTimeStamp(_inst, &ts);
@@ -379,7 +379,7 @@
//}
//
//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
-// WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
+// uint16_t fs, WebRtcNetEQNetworkType nwType)
// :
//channels(numChannels, new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType))
//{
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
index 3e43125..8e987b8 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
@@ -30,16 +30,16 @@
public:
NETEQTEST_NetEQClass();
NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
- WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
+ uint16_t fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
~NETEQTEST_NetEQClass();
int assign();
- int init(WebRtc_UWord16 fs = 8000);
+ int init(uint16_t fs = 8000);
int assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
int loadCodec(WebRtcNetEQ_CodecDef & codecInst);
int recIn(NETEQTEST_RTPpacket & rtp);
- WebRtc_Word16 recOut(WebRtc_Word16 *outData, void *msInfo = NULL, enum WebRtcNetEQOutputType *outputType = NULL);
- WebRtc_UWord32 getSpeechTimeStamp();
+ int16_t recOut(int16_t *outData, void *msInfo = NULL, enum WebRtcNetEQOutputType *outputType = NULL);
+ uint32_t getSpeechTimeStamp();
WebRtcNetEQOutputType getOutputType();
void * instance() { return (_inst); };
@@ -66,8 +66,8 @@
private:
void * _inst;
- WebRtc_Word8 * _instMem;
- WebRtc_Word8 * _bufferMem;
+ int8_t * _instMem;
+ int8_t * _bufferMem;
bool _preparseRTP;
int _fsmult;
bool _isMaster;
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
index ecca018..49ce02d 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
@@ -105,8 +105,8 @@
return(-1);
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
if (fread(&length,2,1,fp)==0)
{
@@ -127,10 +127,10 @@
reset();
return(-1);
}
- WebRtc_UWord32 receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
+ uint32_t receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
// Use length here because a plen of 0 specifies rtcp
- length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+ length = (uint16_t) (length - _kRDHeaderLen);
// check buffer size
if (_datagram && _memSize < length)
@@ -140,7 +140,7 @@
if (!_datagram)
{
- _datagram = new WebRtc_UWord8[length];
+ _datagram = new uint8_t[length];
_memSize = length;
}
@@ -185,7 +185,7 @@
if (!_datagram)
{
- _datagram = new WebRtc_UWord8[length];
+ _datagram = new uint8_t[length];
_memSize = length;
}
@@ -216,8 +216,8 @@
return -1;
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
// length including RTPplay header
length = htons(_datagramLen + _kRDHeaderLen);
@@ -253,7 +253,7 @@
}
-void NETEQTEST_RTPpacket::blockPT(WebRtc_UWord8 pt)
+void NETEQTEST_RTPpacket::blockPT(uint8_t pt)
{
_blockList[pt] = true;
}
@@ -309,7 +309,7 @@
}
}
-WebRtc_UWord8 * NETEQTEST_RTPpacket::datagram() const
+uint8_t * NETEQTEST_RTPpacket::datagram() const
{
if (_datagramLen > 0)
{
@@ -321,7 +321,7 @@
}
}
-WebRtc_UWord8 * NETEQTEST_RTPpacket::payload() const
+uint8_t * NETEQTEST_RTPpacket::payload() const
{
if (_payloadLen > 0)
{
@@ -333,13 +333,13 @@
}
}
-WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen()
+int16_t NETEQTEST_RTPpacket::payloadLen()
{
parseHeader();
return _payloadLen;
}
-WebRtc_Word16 NETEQTEST_RTPpacket::dataLen() const
+int16_t NETEQTEST_RTPpacket::dataLen() const
{
return _datagramLen;
}
@@ -354,7 +354,7 @@
return _lost;
}
-WebRtc_UWord8 NETEQTEST_RTPpacket::payloadType() const
+uint8_t NETEQTEST_RTPpacket::payloadType() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
@@ -370,7 +370,7 @@
return tempRTPinfo.payloadType;
}
-WebRtc_UWord16 NETEQTEST_RTPpacket::sequenceNumber() const
+uint16_t NETEQTEST_RTPpacket::sequenceNumber() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
@@ -386,7 +386,7 @@
return tempRTPinfo.sequenceNumber;
}
-WebRtc_UWord32 NETEQTEST_RTPpacket::timeStamp() const
+uint32_t NETEQTEST_RTPpacket::timeStamp() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
@@ -402,7 +402,7 @@
return tempRTPinfo.timeStamp;
}
-WebRtc_UWord32 NETEQTEST_RTPpacket::SSRC() const
+uint32_t NETEQTEST_RTPpacket::SSRC() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
@@ -418,7 +418,7 @@
return tempRTPinfo.SSRC;
}
-WebRtc_UWord8 NETEQTEST_RTPpacket::markerBit() const
+uint8_t NETEQTEST_RTPpacket::markerBit() const
{
WebRtcNetEQ_RTPInfo tempRTPinfo;
@@ -436,7 +436,7 @@
-int NETEQTEST_RTPpacket::setPayloadType(WebRtc_UWord8 pt)
+int NETEQTEST_RTPpacket::setPayloadType(uint8_t pt)
{
if (_datagramLen < 12)
@@ -455,7 +455,7 @@
}
-int NETEQTEST_RTPpacket::setSequenceNumber(WebRtc_UWord16 sn)
+int NETEQTEST_RTPpacket::setSequenceNumber(uint16_t sn)
{
if (_datagramLen < 12)
@@ -475,7 +475,7 @@
}
-int NETEQTEST_RTPpacket::setTimeStamp(WebRtc_UWord32 ts)
+int NETEQTEST_RTPpacket::setTimeStamp(uint32_t ts)
{
if (_datagramLen < 12)
@@ -497,7 +497,7 @@
}
-int NETEQTEST_RTPpacket::setSSRC(WebRtc_UWord32 ssrc)
+int NETEQTEST_RTPpacket::setSSRC(uint32_t ssrc)
{
if (_datagramLen < 12)
@@ -519,7 +519,7 @@
}
-int NETEQTEST_RTPpacket::setMarkerBit(WebRtc_UWord8 mb)
+int NETEQTEST_RTPpacket::setMarkerBit(uint8_t mb)
{
if (_datagramLen < 12)
@@ -628,7 +628,7 @@
}
-void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const
+void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, uint8_t payloadType, uint16_t seqNo, uint32_t timestamp, uint32_t ssrc, uint8_t markerBit) const
{
rtp_data[0]=(unsigned char)0x80;
if (markerBit)
@@ -655,11 +655,11 @@
rtp_data[11]=(unsigned char)(ssrc & 0xFF);
}
-WebRtc_UWord16
+uint16_t
NETEQTEST_RTPpacket::parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
- WebRtc_UWord8 **payloadPtr) const
+ uint8_t **payloadPtr) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
int i_P, i_X, i_CC;
assert(_datagramLen >= 12);
@@ -671,59 +671,59 @@
if (payloadPtr)
{
- *payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition >> 1];
+ *payloadPtr = (uint8_t*) &rtp_data[i_startPosition >> 1];
}
- return (WebRtc_UWord16) (_datagramLen - i_startPosition - i_padlength);
+ return (uint16_t) (_datagramLen - i_startPosition - i_padlength);
}
void NETEQTEST_RTPpacket::parseBasicHeader(WebRtcNetEQ_RTPInfo *RTPinfo,
int *i_P, int *i_X, int *i_CC) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
if (_datagramLen < 12)
{
assert(false);
return;
}
- *i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
- *i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
- *i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF); /* Get the CC number */
+ *i_P=(((uint16_t)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
+ *i_X=(((uint16_t)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
+ *i_CC=(uint16_t)(rtp_data[0] & 0xF); /* Get the CC number */
/* Get the marker bit */
- RTPinfo->markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01);
+ RTPinfo->markerBit = (uint8_t) ((rtp_data[0] >> 15) & 0x01);
/* Get the coder type */
- RTPinfo->payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F);
+ RTPinfo->payloadType = (uint8_t) ((rtp_data[0] >> 8) & 0x7F);
/* Get the packet number */
- RTPinfo->sequenceNumber = ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) |
- ( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8));
+ RTPinfo->sequenceNumber = ((( ((uint16_t)rtp_data[1]) >> 8) & 0xFF) |
+ ( ((uint16_t)(rtp_data[1] & 0xFF)) << 8));
/* Get timestamp */
- RTPinfo->timeStamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) |
- ((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) |
- ((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
- ((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8);
+ RTPinfo->timeStamp = ((((uint16_t)rtp_data[2]) & 0xFF) << 24) |
+ ((((uint16_t)rtp_data[2]) & 0xFF00) << 8) |
+ ((((uint16_t)rtp_data[3]) >> 8) & 0xFF) |
+ ((((uint16_t)rtp_data[3]) & 0xFF) << 8);
/* Get the SSRC */
- RTPinfo->SSRC=((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) |
- ((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) |
- ((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
- ((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8);
+ RTPinfo->SSRC=((((uint16_t)rtp_data[4]) & 0xFF) << 24) |
+ ((((uint16_t)rtp_data[4]) & 0xFF00) << 8) |
+ ((((uint16_t)rtp_data[5]) >> 8) & 0xFF) |
+ ((((uint16_t)rtp_data[5]) & 0xFF) << 8);
}
int NETEQTEST_RTPpacket::calcHeaderLength(int i_X, int i_CC) const
{
int i_extlength = 0;
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
if (i_X == 1)
{
// Extension header exists.
- // Find out how many WebRtc_Word32 it consists of.
+ // Find out how many int32_t it consists of.
assert(_datagramLen > 2 * (7 + 2 * i_CC));
if (_datagramLen > 2 * (7 + 2 * i_CC))
{
- i_extlength = (((((WebRtc_UWord16) rtp_data[7 + 2 * i_CC]) >> 8)
- & 0xFF) | (((WebRtc_UWord16) (rtp_data[7 + 2 * i_CC] & 0xFF))
+ i_extlength = (((((uint16_t) rtp_data[7 + 2 * i_CC]) >> 8)
+ & 0xFF) | (((uint16_t) (rtp_data[7 + 2 * i_CC] & 0xFF))
<< 8)) + 1;
}
}
@@ -733,7 +733,7 @@
int NETEQTEST_RTPpacket::calcPadLength(int i_P) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
if (i_P == 1)
{
/* Padding exists. Find out how many bytes the padding consists of. */
@@ -745,7 +745,7 @@
else
{
/* even number of bytes => last byte in lower byte */
- return ((WebRtc_UWord16) rtp_data[(_datagramLen >> 1) - 1]) >> 8;
+ return ((uint16_t) rtp_data[(_datagramLen >> 1) - 1]) >> 8;
}
}
return 0;
@@ -760,9 +760,9 @@
return;
}
- WebRtc_UWord8 *readDataPtr = _payloadPtr;
- WebRtc_UWord8 *writeDataPtr = _payloadPtr;
- WebRtc_UWord8 *slaveData = slaveRtp->_payloadPtr;
+ uint8_t *readDataPtr = _payloadPtr;
+ uint8_t *writeDataPtr = _payloadPtr;
+ uint8_t *slaveData = slaveRtp->_payloadPtr;
while (readDataPtr - _payloadPtr < _payloadLen)
{
@@ -829,8 +829,8 @@
parseHeader();
- WebRtc_UWord8* ptr = payload();
- WebRtc_UWord8* payloadEndPtr = ptr + payloadLen();
+ uint8_t* ptr = payload();
+ uint8_t* payloadEndPtr = ptr + payloadLen();
int num_encodings = 0;
int total_len = 0;
@@ -841,7 +841,7 @@
{
// Header found.
red.payloadType = ptr[0] & 0x7F;
- WebRtc_UWord32 offset = (ptr[1] << 6) + ((ptr[2] & 0xFC) >> 2);
+ uint32_t offset = (ptr[1] << 6) + ((ptr[2] & 0xFC) >> 2);
red.sequenceNumber = sequenceNumber();
red.timeStamp = timeStamp() - offset;
red.markerBit = markerBit();
@@ -873,6 +873,6 @@
for (int i = 0; i < _payloadLen; ++i)
{
- _payloadPtr[i] = static_cast<WebRtc_UWord8>(rand());
+ _payloadPtr[i] = static_cast<uint8_t>(rand());
}
}
diff --git a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
index 8ce04c5..fda7b95 100644
--- a/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
+++ b/webrtc/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
@@ -35,32 +35,32 @@
virtual int readFromFile(FILE *fp);
int readFixedFromFile(FILE *fp, size_t len);
virtual int writeToFile(FILE *fp);
- void blockPT(WebRtc_UWord8 pt);
+ void blockPT(uint8_t pt);
void selectSSRC(uint32_t ssrc);
- //WebRtc_Word16 payloadType();
+ //int16_t payloadType();
void parseHeader();
void parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo);
WebRtcNetEQ_RTPInfo const * RTPinfo() const;
- WebRtc_UWord8 * datagram() const;
- WebRtc_UWord8 * payload() const;
- WebRtc_Word16 payloadLen();
- WebRtc_Word16 dataLen() const;
+ uint8_t * datagram() const;
+ uint8_t * payload() const;
+ int16_t payloadLen();
+ int16_t dataLen() const;
bool isParsed() const;
bool isLost() const;
- WebRtc_UWord32 time() const { return _receiveTime; };
+ uint32_t time() const { return _receiveTime; };
- WebRtc_UWord8 payloadType() const;
- WebRtc_UWord16 sequenceNumber() const;
- WebRtc_UWord32 timeStamp() const;
- WebRtc_UWord32 SSRC() const;
- WebRtc_UWord8 markerBit() const;
+ uint8_t payloadType() const;
+ uint16_t sequenceNumber() const;
+ uint32_t timeStamp() const;
+ uint32_t SSRC() const;
+ uint8_t markerBit() const;
- int setPayloadType(WebRtc_UWord8 pt);
- int setSequenceNumber(WebRtc_UWord16 sn);
- int setTimeStamp(WebRtc_UWord32 ts);
- int setSSRC(WebRtc_UWord32 ssrc);
- int setMarkerBit(WebRtc_UWord8 mb);
- void setTime(WebRtc_UWord32 receiveTime) { _receiveTime = receiveTime; };
+ int setPayloadType(uint8_t pt);
+ int setSequenceNumber(uint16_t sn);
+ int setTimeStamp(uint32_t ts);
+ int setSSRC(uint32_t ssrc);
+ int setMarkerBit(uint8_t mb);
+ void setTime(uint32_t receiveTime) { _receiveTime = receiveTime; };
int setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo);
@@ -70,16 +70,16 @@
void scramblePayload(void);
- WebRtc_UWord8 * _datagram;
- WebRtc_UWord8 * _payloadPtr;
+ uint8_t * _datagram;
+ uint8_t * _payloadPtr;
int _memSize;
- WebRtc_Word16 _datagramLen;
- WebRtc_Word16 _payloadLen;
+ int16_t _datagramLen;
+ int16_t _payloadLen;
WebRtcNetEQ_RTPInfo _rtpInfo;
bool _rtpParsed;
- WebRtc_UWord32 _receiveTime;
+ uint32_t _receiveTime;
bool _lost;
- std::map<WebRtc_UWord8, bool> _blockList;
+ std::map<uint8_t, bool> _blockList;
uint32_t _selectSSRC;
bool _filterSSRC;
@@ -92,12 +92,12 @@
int calcHeaderLength(int i_X, int i_CC) const;
private:
- void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType,
- WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp,
- WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
- WebRtc_UWord16 parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
- WebRtc_UWord8 **payloadPtr = NULL) const;
- WebRtc_UWord16 parseRTPheader(WebRtc_UWord8 **payloadPtr = NULL)
+ void makeRTPheader(unsigned char* rtp_data, uint8_t payloadType,
+ uint16_t seqNo, uint32_t timestamp,
+ uint32_t ssrc, uint8_t markerBit) const;
+ uint16_t parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
+ uint8_t **payloadPtr = NULL) const;
+ uint16_t parseRTPheader(uint8_t **payloadPtr = NULL)
{ return parseRTPheader(&_rtpInfo, payloadPtr);};
int calcPadLength(int i_P) const;
void splitStereoSample(NETEQTEST_RTPpacket* slaveRtp, int stride);
diff --git a/webrtc/modules/audio_coding/neteq/test/NetEqRTPplay.cc b/webrtc/modules/audio_coding/neteq/test/NetEqRTPplay.cc
index c8b742d..3ad8e05 100644
--- a/webrtc/modules/audio_coding/neteq/test/NetEqRTPplay.cc
+++ b/webrtc/modules/audio_coding/neteq/test/NetEqRTPplay.cc
@@ -113,19 +113,19 @@
/* Function declarations */
/*************************/
-void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen);
-int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime);
-void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d);
+void stereoInterleave(int16_t *data, int16_t totalLen);
+int getNextRecoutTime(FILE *fp, uint32_t *nextTime);
+void getNextExtraDelay(FILE *fp, uint32_t *t, int *d);
bool splitStereo(NETEQTEST_RTPpacket* rtp, NETEQTEST_RTPpacket* rtpSlave,
- const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
- const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
+ const int16_t *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
+ const int16_t *cngPtype, int noOfCngCodecs,
bool *isStereo);
-void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders);
-int populateUsedCodec(std::map<WebRtc_UWord8, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec);
-void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<WebRtc_UWord8, decoderStruct>* decoders, int channelNumber);
-void free_coders(std::map<WebRtc_UWord8, decoderStruct> & decoders);
+void parsePtypeFile(FILE *ptypeFile, std::map<uint8_t, decoderStruct>* decoders);
+int populateUsedCodec(std::map<uint8_t, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec);
+void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<uint8_t, decoderStruct>* decoders, int channelNumber);
+void free_coders(std::map<uint8_t, decoderStruct> & decoders);
int doAPItest();
-bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<WebRtc_UWord8, decoderStruct> & decoders, enum stereoModes *stereoMode);
+bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<uint8_t, decoderStruct> & decoders, enum stereoModes *stereoMode);
@@ -133,13 +133,13 @@
/* Global variables */
/********************/
-WebRtc_Word16 NetEqPacketBuffer[MAX_NETEQ_BUFFERSIZE>>1];
-WebRtc_Word16 NetEqPacketBufferSlave[MAX_NETEQ_BUFFERSIZE>>1];
+int16_t NetEqPacketBuffer[MAX_NETEQ_BUFFERSIZE>>1];
+int16_t NetEqPacketBufferSlave[MAX_NETEQ_BUFFERSIZE>>1];
#ifdef NETEQ_DELAY_LOGGING
extern "C" {
FILE *delay_fid2; /* file pointer */
- WebRtc_UWord32 tot_received_packets=0;
+ uint32_t tot_received_packets=0;
}
#endif
@@ -147,8 +147,8 @@
extern char BUILD_DATE;
#endif
-WebRtc_UWord32 writtenSamples = 0;
-WebRtc_UWord32 simClock=0;
+uint32_t writtenSamples = 0;
+uint32_t simClock=0;
int main(int argc, char* argv[])
{
@@ -157,8 +157,8 @@
enum WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd-1];
int noOfCodecs;
int ok;
- WebRtc_Word16 out_data[640*2];
- WebRtc_Word16 outLen, writeLen;
+ int16_t out_data[640*2];
+ int16_t outLen, writeLen;
int fs = 8000;
WebRtcNetEQ_RTCPStat RTCPstat;
#ifdef WIN32
@@ -182,7 +182,7 @@
bool rtpOnly = false;
int packetLen = 0;
int packetCount = 0;
- std::map<WebRtc_UWord8, decoderStruct> decoders;
+ std::map<uint8_t, decoderStruct> decoders;
bool dummyRtp = false;
bool noDecode = false;
bool filterSSRC = false;
@@ -489,7 +489,7 @@
for (int i = 0; i < numInst; i++)
{
// create memory, allocate, initialize, and allocate packet buffer memory
- NetEQvector.push_back (new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, static_cast<WebRtc_UWord16>(fs), kTCPLargeJitter));
+ NetEQvector.push_back (new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, static_cast<uint16_t>(fs), kTCPLargeJitter));
createAndInsertDecoders (NetEQvector[i], &decoders, i /* channel */);
@@ -519,10 +519,10 @@
#ifdef ZERO_TS_START
- WebRtc_UWord32 firstTS = rtp->timeStamp();
+ uint32_t firstTS = rtp->timeStamp();
rtp->setTimeStamp(0);
#else
- WebRtc_UWord32 firstTS = 0;
+ uint32_t firstTS = 0;
#endif
// check stereo mode
@@ -535,8 +535,8 @@
}
#ifdef PLAY_CLEAN
- WebRtc_UWord32 prevTS = rtp->timeStamp();
- WebRtc_UWord32 currTS, prev_time;
+ uint32_t prevTS = rtp->timeStamp();
+ uint32_t currTS, prev_time;
#endif
#ifdef JUNK_DATA
@@ -552,7 +552,7 @@
}
#endif
- WebRtc_UWord32 nextRecoutTime;
+ uint32_t nextRecoutTime;
int lastRecout = getNextRecoutTime(recoutTimes, &nextRecoutTime); // does nothing if recoutTimes == NULL
if (recoutTimes)
@@ -560,9 +560,9 @@
else
simClock = rtp->time(); // start immediately with first packet
- WebRtc_UWord32 start_clock = simClock;
+ uint32_t start_clock = simClock;
- WebRtc_UWord32 nextExtraDelayTime;
+ uint32_t nextExtraDelayTime;
int extraDelay = -1;
getNextExtraDelay(extraDelays, &nextExtraDelayTime, &extraDelay);
@@ -660,7 +660,7 @@
if (stereoMode > stereoModeMono)
{
// stereo
- WebRtc_Word16 tempLen;
+ int16_t tempLen;
tempLen = NetEQvector[0]->recOut( out_data, msInfo ); // master
outLen = NetEQvector[1]->recOut( &out_data[tempLen], msInfo ); // slave
@@ -703,7 +703,7 @@
if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
return -1;
}
- if (fwrite(&tot_received_packets, sizeof(WebRtc_UWord32),
+ if (fwrite(&tot_received_packets, sizeof(uint32_t),
1, delay_fid2) != 1) {
return -1;
}
@@ -767,8 +767,8 @@
/****************/
bool splitStereo(NETEQTEST_RTPpacket* rtp, NETEQTEST_RTPpacket* rtpSlave,
- const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
- const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
+ const int16_t *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
+ const int16_t *cngPtype, int noOfCngCodecs,
bool *isStereo)
{
@@ -784,7 +784,7 @@
}
// check payload type
- WebRtc_Word16 ptype = rtp->payloadType();
+ int16_t ptype = rtp->payloadType();
// is this a cng payload?
for (int k = 0; k < noOfCngCodecs; k++) {
@@ -824,19 +824,19 @@
}
-void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen)
+void stereoInterleave(int16_t *data, int16_t totalLen)
{
int k;
for(k = totalLen/2; k < totalLen; k++) {
- WebRtc_Word16 temp = data[k];
- memmove(&data[2*k - totalLen + 2], &data[2*k - totalLen + 1], (totalLen - k -1) * sizeof(WebRtc_Word16));
+ int16_t temp = data[k];
+ memmove(&data[2*k - totalLen + 2], &data[2*k - totalLen + 1], (totalLen - k -1) * sizeof(int16_t));
data[2*k - totalLen + 1] = temp;
}
}
-int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime) {
+int getNextRecoutTime(FILE *fp, uint32_t *nextTime) {
float tempTime;
@@ -846,7 +846,7 @@
if (fread(&tempTime, sizeof(float), 1, fp) != 0) {
// not end of file
- *nextTime = (WebRtc_UWord32) tempTime;
+ *nextTime = (uint32_t) tempTime;
return 0;
}
@@ -856,7 +856,7 @@
return 1;
}
-void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d) {
+void getNextExtraDelay(FILE *fp, uint32_t *t, int *d) {
float temp[2];
@@ -867,7 +867,7 @@
if (fread(&temp, sizeof(float), 2, fp) != 0) {
// not end of file
- *t = (WebRtc_UWord32) temp[0];
+ *t = (uint32_t) temp[0];
*d = (int) temp[1];
return;
}
@@ -879,7 +879,7 @@
}
-void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders)
+void parsePtypeFile(FILE *ptypeFile, std::map<uint8_t, decoderStruct>* decoders)
{
int n, pt;
char codec[100];
@@ -1264,7 +1264,7 @@
}
// insert into codecs map
- (*decoders)[static_cast<WebRtc_UWord8>(pt)] = tempDecoder;
+ (*decoders)[static_cast<uint8_t>(pt)] = tempDecoder;
}
@@ -1274,7 +1274,7 @@
}
-bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<WebRtc_UWord8, decoderStruct> & decoders, enum stereoModes *stereoMode)
+bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<uint8_t, decoderStruct> & decoders, enum stereoModes *stereoMode)
{
if (decoders.count(rtp.payloadType()) > 0
&& decoders[rtp.payloadType()].codec != kDecoderRED
@@ -1292,11 +1292,11 @@
}
-int populateUsedCodec(std::map<WebRtc_UWord8, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec)
+int populateUsedCodec(std::map<uint8_t, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec)
{
int numCodecs = 0;
- std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+ std::map<uint8_t, decoderStruct>::iterator it;
it = decoders->begin();
@@ -1310,9 +1310,9 @@
}
-void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<WebRtc_UWord8, decoderStruct>* decoders, int channelNumber)
+void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<uint8_t, decoderStruct>* decoders, int channelNumber)
{
- std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+ std::map<uint8_t, decoderStruct>::iterator it;
for (it = decoders->begin(); it != decoders->end(); it++)
{
@@ -1320,7 +1320,7 @@
((*it).second.stereo > stereoModeMono ))
{
// create decoder instance
- WebRtc_UWord8 pt = static_cast<WebRtc_UWord8>( (*it).first );
+ uint8_t pt = static_cast<uint8_t>( (*it).first );
NETEQTEST_Decoder **dec = &((*it).second.decoder[channelNumber]);
enum WebRtcNetEQDecoder type = (*it).second.codec;
@@ -1524,7 +1524,7 @@
#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
case kDecoderCNG:
- *dec = new decoder_CNG( pt, static_cast<WebRtc_UWord16>((*it).second.fs) );
+ *dec = new decoder_CNG( pt, static_cast<uint16_t>((*it).second.fs) );
break;
#endif
#ifdef CODEC_ISACLC
@@ -1577,9 +1577,9 @@
}
-void free_coders(std::map<WebRtc_UWord8, decoderStruct> & decoders)
+void free_coders(std::map<uint8_t, decoderStruct> & decoders)
{
- std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+ std::map<uint8_t, decoderStruct>::iterator it;
for (it = decoders.begin(); it != decoders.end(); it++)
{
@@ -1608,7 +1608,7 @@
int NetEqBufferMaxPackets, BufferSizeInBytes;
WebRtcNetEQ_CodecDef codecInst;
WebRtcNetEQ_RTCPStat RTCPstat;
- WebRtc_UWord32 timestamp;
+ uint32_t timestamp;
int memorySize;
int ok;
int overhead_bytes;
@@ -1635,7 +1635,7 @@
CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbReset(inst))
CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbRemove(inst, usedCodec))
- WebRtc_Word16 temp1, temp2;
+ int16_t temp1, temp2;
CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2))
CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetCodecInfo(inst, 0, &usedCodec))
@@ -1646,8 +1646,8 @@
WebRtcNetEQOutputType temptype;
CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechOutputType(inst, &temptype))
- WebRtc_UWord8 tempFlags;
- WebRtc_UWord16 utemp1, utemp2;
+ uint8_t tempFlags;
+ uint16_t utemp1, utemp2;
CHECK_MINUS_ONE(WebRtcNetEQ_VQmonRecOutStatistics(inst, &utemp1, &utemp2, &tempFlags))
CHECK_MINUS_ONE(WebRtcNetEQ_VQmonGetRxStatistics(inst, &utemp1, &utemp2))
diff --git a/webrtc/modules/audio_coding/neteq/test/RTPencode.cc b/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
index 32b0bcc..45313be 100644
--- a/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
+++ b/webrtc/modules/audio_coding/neteq/test/RTPencode.cc
@@ -73,12 +73,12 @@
int NetEQTest_init_coders(enum WebRtcNetEQDecoder coder, int enc_frameSize, int bitrate, int sampfreq , int vad, int numChannels);
void defineCodecs(enum WebRtcNetEQDecoder *usedCodec, int *noOfCodecs );
int NetEQTest_free_coders(enum WebRtcNetEQDecoder coder, int numChannels);
-int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate , int * vad, int useVAD, int bitrate, int numChannels);
-void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc);
-int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
- int seqNo, WebRtc_UWord32 ssrc);
+int NetEQTest_encode(int coder, int16_t *indata, int frameLen, unsigned char * encoded,int sampleRate , int * vad, int useVAD, int bitrate, int numChannels);
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, uint32_t timestamp, uint32_t ssrc);
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, uint32_t *timestamp, uint16_t *blockLen,
+ int seqNo, uint32_t ssrc);
int makeDTMFpayload(unsigned char* payload_data, int Event, int End, int Volume, int Duration);
-void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples);
+void stereoDeInterleave(int16_t* audioSamples, int numSamples);
void stereoInterleave(unsigned char* data, int dataLen, int stride);
/*********************/
@@ -199,11 +199,11 @@
#endif
#ifdef CODEC_AMR
AMR_encinst_t *AMRenc_inst[2];
- WebRtc_Word16 AMR_bitrate;
+ int16_t AMR_bitrate;
#endif
#ifdef CODEC_AMRWB
AMRWB_encinst_t *AMRWBenc_inst[2];
- WebRtc_Word16 AMRWB_bitrate;
+ int16_t AMRWB_bitrate;
#endif
#ifdef CODEC_ILBC
iLBC_encinst_t *iLBCenc_inst[2];
@@ -250,21 +250,21 @@
int useVAD, vad;
int useRed=0;
int len, enc_len;
- WebRtc_Word16 org_data[4000];
+ int16_t org_data[4000];
unsigned char rtp_data[8000];
- WebRtc_Word16 seqNo=0xFFF;
- WebRtc_UWord32 ssrc=1235412312;
- WebRtc_UWord32 timestamp=0xAC1245;
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ int16_t seqNo=0xFFF;
+ uint32_t ssrc=1235412312;
+ uint32_t timestamp=0xAC1245;
+ uint16_t length, plen;
+ uint32_t offset;
double sendtime = 0;
int red_PT[2] = {0};
- WebRtc_UWord32 red_TS[2] = {0};
- WebRtc_UWord16 red_len[2] = {0};
+ uint32_t red_TS[2] = {0};
+ uint16_t red_len[2] = {0};
int RTPheaderLen=12;
unsigned char red_data[8000];
#ifdef INSERT_OLD_PACKETS
- WebRtc_UWord16 old_length, old_plen;
+ uint16_t old_length, old_plen;
int old_enc_len;
int first_old_packet=1;
unsigned char old_rtp_data[8000];
@@ -273,7 +273,7 @@
#ifdef INSERT_DTMF_PACKETS
int NTone = 1;
int DTMFfirst = 1;
- WebRtc_UWord32 DTMFtimestamp;
+ uint32_t DTMFtimestamp;
bool dtmfSent = false;
#endif
bool usingStereo = false;
@@ -553,7 +553,7 @@
/* write file header */
//fprintf(out_file, "#!RTPencode%s\n", "1.0");
fprintf(out_file, "#!rtpplay%s \n", "1.0"); // this is the string that rtpplay needs
- WebRtc_UWord32 dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
+ uint32_t dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
if (fwrite(&dummy_variable, 4, 1, out_file) != 1) {
return -1;
}
@@ -615,7 +615,7 @@
/* write RTP packet to file */
length = htons(12 + enc_len + 8);
plen = htons(12 + enc_len);
- offset = (WebRtc_UWord32) sendtime; //(timestamp/(fs/1000));
+ offset = (uint32_t) sendtime; //(timestamp/(fs/1000));
offset = htonl(offset);
if (fwrite(&length, 2, 1, out_file) != 1) {
return -1;
@@ -710,7 +710,7 @@
/* write RTP packet to file */
length = htons(12 + enc_len + 8);
plen = htons(12 + enc_len);
- offset = (WebRtc_UWord32) sendtime;
+ offset = (uint32_t) sendtime;
//(timestamp/(fs/1000));
offset = htonl(offset);
if (fwrite(&length, 2, 1, out_file) != 1) {
@@ -778,7 +778,7 @@
if(usedCodec==kDecoderISAC)
{
assert(!usingStereo); // Cannot handle stereo yet
- red_len[0] = WebRtcIsac_GetRedPayload(ISAC_inst[0], (WebRtc_Word16*)red_data);
+ red_len[0] = WebRtcIsac_GetRedPayload(ISAC_inst[0], (int16_t*)red_data);
}
else
{
@@ -1726,13 +1726,13 @@
-int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate ,
+int NetEQTest_encode(int coder, int16_t *indata, int frameLen, unsigned char * encoded,int sampleRate ,
int * vad, int useVAD, int bitrate, int numChannels){
short cdlen = 0;
- WebRtc_Word16 *tempdata;
+ int16_t *tempdata;
static int first_cng=1;
- WebRtc_Word16 tempLen;
+ int16_t tempLen;
*vad =1;
@@ -1797,91 +1797,91 @@
/* Encode with the selected coder type */
if (coder==kDecoderPCMu) { /*g711 u-law */
#ifdef CODEC_G711
- cdlen = WebRtcG711_EncodeU(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+ cdlen = WebRtcG711_EncodeU(G711state[k], indata, frameLen, (int16_t*) encoded);
#endif
}
else if (coder==kDecoderPCMa) { /*g711 A-law */
#ifdef CODEC_G711
- cdlen = WebRtcG711_EncodeA(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+ cdlen = WebRtcG711_EncodeA(G711state[k], indata, frameLen, (int16_t*) encoded);
}
#endif
#ifdef CODEC_PCM16B
else if ((coder==kDecoderPCM16B)||(coder==kDecoderPCM16Bwb)||
(coder==kDecoderPCM16Bswb32kHz)||(coder==kDecoderPCM16Bswb48kHz)) { /*pcm16b (8kHz, 16kHz, 32kHz or 48kHz) */
- cdlen = WebRtcPcm16b_EncodeW16(indata, frameLen, (WebRtc_Word16*) encoded);
+ cdlen = WebRtcPcm16b_EncodeW16(indata, frameLen, (int16_t*) encoded);
}
#endif
#ifdef CODEC_G722
else if (coder==kDecoderG722) { /*g722 */
- cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (int16_t*)encoded);
cdlen=frameLen>>1;
}
#endif
#ifdef CODEC_G722_1_16
else if (coder==kDecoderG722_1_16) { /* g722.1 16kbit/s mode */
- cdlen=WebRtcG7221_Encode16((G722_1_16_encinst_t*)G722_1_16enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG7221_Encode16((G722_1_16_encinst_t*)G722_1_16enc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_G722_1_24
else if (coder==kDecoderG722_1_24) { /* g722.1 24kbit/s mode*/
- cdlen=WebRtcG7221_Encode24((G722_1_24_encinst_t*)G722_1_24enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG7221_Encode24((G722_1_24_encinst_t*)G722_1_24enc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_G722_1_32
else if (coder==kDecoderG722_1_32) { /* g722.1 32kbit/s mode */
- cdlen=WebRtcG7221_Encode32((G722_1_32_encinst_t*)G722_1_32enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG7221_Encode32((G722_1_32_encinst_t*)G722_1_32enc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_G722_1C_24
else if (coder==kDecoderG722_1C_24) { /* g722.1 32 kHz 24kbit/s mode*/
- cdlen=WebRtcG7221C_Encode24((G722_1C_24_encinst_t*)G722_1C_24enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG7221C_Encode24((G722_1C_24_encinst_t*)G722_1C_24enc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_G722_1C_32
else if (coder==kDecoderG722_1C_32) { /* g722.1 32 kHz 32kbit/s mode */
- cdlen=WebRtcG7221C_Encode32((G722_1C_32_encinst_t*)G722_1C_32enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG7221C_Encode32((G722_1C_32_encinst_t*)G722_1C_32enc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_G722_1C_48
else if (coder==kDecoderG722_1C_48) { /* g722.1 32 kHz 48kbit/s mode */
- cdlen=WebRtcG7221C_Encode48((G722_1C_48_encinst_t*)G722_1C_48enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG7221C_Encode48((G722_1C_48_encinst_t*)G722_1C_48enc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_G729
else if (coder==kDecoderG729) { /*g729 */
- WebRtc_Word16 dataPos=0;
- WebRtc_Word16 len=0;
+ int16_t dataPos=0;
+ int16_t len=0;
cdlen = 0;
for (dataPos=0;dataPos<frameLen;dataPos+=80) {
- len=WebRtcG729_Encode(G729enc_inst[k], &indata[dataPos], 80, (WebRtc_Word16*)(&encoded[cdlen]));
+ len=WebRtcG729_Encode(G729enc_inst[k], &indata[dataPos], 80, (int16_t*)(&encoded[cdlen]));
cdlen += len;
}
}
#endif
#ifdef CODEC_G729_1
else if (coder==kDecoderG729_1) { /*g729.1 */
- WebRtc_Word16 dataPos=0;
- WebRtc_Word16 len=0;
+ int16_t dataPos=0;
+ int16_t len=0;
cdlen = 0;
for (dataPos=0;dataPos<frameLen;dataPos+=160) {
- len=WebRtcG7291_Encode(G729_1_inst[k], &indata[dataPos], (WebRtc_Word16*)(&encoded[cdlen]), bitrate, frameLen/320 /* num 20ms frames*/);
+ len=WebRtcG7291_Encode(G729_1_inst[k], &indata[dataPos], (int16_t*)(&encoded[cdlen]), bitrate, frameLen/320 /* num 20ms frames*/);
cdlen += len;
}
}
#endif
#ifdef CODEC_AMR
else if (coder==kDecoderAMR) { /*AMR */
- cdlen=WebRtcAmr_Encode(AMRenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded, AMR_bitrate);
+ cdlen=WebRtcAmr_Encode(AMRenc_inst[k], indata, frameLen, (int16_t*)encoded, AMR_bitrate);
}
#endif
#ifdef CODEC_AMRWB
else if (coder==kDecoderAMRWB) { /*AMR-wb */
- cdlen=WebRtcAmrWb_Encode(AMRWBenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded, AMRWB_bitrate);
+ cdlen=WebRtcAmrWb_Encode(AMRWBenc_inst[k], indata, frameLen, (int16_t*)encoded, AMRWB_bitrate);
}
#endif
#ifdef CODEC_ILBC
else if (coder==kDecoderILBC) { /*iLBC */
- cdlen=WebRtcIlbcfix_Encode(iLBCenc_inst[k], indata,frameLen,(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIlbcfix_Encode(iLBCenc_inst[k], indata,frameLen,(int16_t*)encoded);
}
#endif
#if (defined(CODEC_ISAC) || defined(NETEQ_ISACFIX_CODEC)) // TODO(hlundin): remove all NETEQ_ISACFIX_CODEC
@@ -1890,9 +1890,9 @@
cdlen=0;
while (cdlen<=0) {
#ifdef CODEC_ISAC /* floating point */
- cdlen=WebRtcIsac_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIsac_Encode(ISAC_inst[k],&indata[noOfCalls*160],(int16_t*)encoded);
#else /* fixed point */
- cdlen=WebRtcIsacfix_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIsacfix_Encode(ISAC_inst[k],&indata[noOfCalls*160],(int16_t*)encoded);
#endif
noOfCalls++;
}
@@ -1903,7 +1903,7 @@
int noOfCalls=0;
cdlen=0;
while (cdlen<=0) {
- cdlen=WebRtcIsac_Encode(ISACSWB_inst[k],&indata[noOfCalls*320],(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIsac_Encode(ISACSWB_inst[k],&indata[noOfCalls*320],(int16_t*)encoded);
noOfCalls++;
}
}
@@ -1915,14 +1915,14 @@
while (cdlen <= 0) {
cdlen = WebRtcIsac_Encode(ISACFB_inst[k],
&indata[noOfCalls * 480],
- (WebRtc_Word16*)encoded);
+ (int16_t*)encoded);
noOfCalls++;
}
}
#endif
#ifdef CODEC_GSMFR
else if (coder==kDecoderGSMFR) { /* GSM FR */
- cdlen=WebRtcGSMFR_Encode(GSMFRenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcGSMFR_Encode(GSMFRenc_inst[k], indata, frameLen, (int16_t*)encoded);
}
#endif
#ifdef CODEC_SPEEX_8
@@ -1937,7 +1937,7 @@
printf("Error encoding speex frame!\n");
exit(0);
}
- cdlen=WebRtcSpeex_GetBitstream(SPEEX8enc_inst[k], (WebRtc_Word16*)encoded);
+ cdlen=WebRtcSpeex_GetBitstream(SPEEX8enc_inst[k], (int16_t*)encoded);
}
#endif
#ifdef CODEC_SPEEX_16
@@ -1952,7 +1952,7 @@
printf("Error encoding speex frame!\n");
exit(0);
}
- cdlen=WebRtcSpeex_GetBitstream(SPEEX16enc_inst[k], (WebRtc_Word16*)encoded);
+ cdlen=WebRtcSpeex_GetBitstream(SPEEX16enc_inst[k], (int16_t*)encoded);
}
#endif
#ifdef CODEC_CELT_32
@@ -1982,7 +1982,7 @@
-void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc){
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, uint32_t timestamp, uint32_t ssrc){
rtp_data[0]=(unsigned char)0x80;
rtp_data[1]=(unsigned char)(payloadType & 0xFF);
@@ -2002,13 +2002,13 @@
}
-int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
- int seqNo, WebRtc_UWord32 ssrc)
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, uint32_t *timestamp, uint16_t *blockLen,
+ int seqNo, uint32_t ssrc)
{
int i;
unsigned char *rtpPointer;
- WebRtc_UWord16 offset;
+ uint16_t offset;
/* first create "standard" RTP header */
makeRTPheader(rtp_data, NETEQ_CODEC_RED_PT, seqNo, timestamp[numPayloads-1], ssrc);
@@ -2018,7 +2018,7 @@
/* add one sub-header for each redundant payload (not the primary) */
for(i=0; i<numPayloads-1; i++) { /* |0 1 2 3 4 5 6 7| */
if(blockLen[i] > 0) {
- offset = (WebRtc_UWord16) (timestamp[numPayloads-1] - timestamp[i]);
+ offset = (uint16_t) (timestamp[numPayloads-1] - timestamp[i]);
rtpPointer[0] = (unsigned char) ( 0x80 | (0x7F & payloadType[i]) ); /* |F| block PT | */
rtpPointer[1] = (unsigned char) ((offset >> 6) & 0xFF); /* | timestamp- | */
@@ -2056,22 +2056,22 @@
return(4);
}
-void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples)
+void stereoDeInterleave(int16_t* audioSamples, int numSamples)
{
- WebRtc_Word16 *tempVec;
- WebRtc_Word16 *readPtr, *writeL, *writeR;
+ int16_t *tempVec;
+ int16_t *readPtr, *writeL, *writeR;
if (numSamples <= 0)
return;
- tempVec = (WebRtc_Word16 *) malloc(sizeof(WebRtc_Word16) * numSamples);
+ tempVec = (int16_t *) malloc(sizeof(int16_t) * numSamples);
if (tempVec == NULL) {
printf("Error allocating memory\n");
exit(0);
}
- memcpy(tempVec, audioSamples, numSamples*sizeof(WebRtc_Word16));
+ memcpy(tempVec, audioSamples, numSamples*sizeof(int16_t));
writeL = audioSamples;
writeR = &audioSamples[numSamples/2];
diff --git a/webrtc/modules/audio_coding/neteq/test/RTPjitter.cc b/webrtc/modules/audio_coding/neteq/test/RTPjitter.cc
index e3270be..3f94350 100644
--- a/webrtc/modules/audio_coding/neteq/test/RTPjitter.cc
+++ b/webrtc/modules/audio_coding/neteq/test/RTPjitter.cc
@@ -35,7 +35,7 @@
struct arr_time {
float time;
- WebRtc_UWord32 ix;
+ uint32_t ix;
};
int filelen(FILE *fid)
@@ -64,8 +64,8 @@
char firstline[FIRSTLINELEN];
unsigned char *rtp_vec = NULL, **packet_ptr, *temp_packet;
const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
- WebRtc_UWord16 len;
- WebRtc_UWord32 *offset;
+ uint16_t len;
+ uint32_t *offset;
/* check number of parameters */
if (argc != 4) {
@@ -131,9 +131,9 @@
// read all RTP packets into vector
rtp_len=0;
Npack=0;
- len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
+ len=(uint16_t) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
while(len==2) {
- len = ntohs(*((WebRtc_UWord16 *)(rtp_vec + rtp_len)));
+ len = ntohs(*((uint16_t *)(rtp_vec + rtp_len)));
rtp_len += 2;
if(fread(&rtp_vec[rtp_len], sizeof(unsigned char), len-2, in_file)!=(unsigned) (len-2)) {
fprintf(stderr,"Error: currupt packet length\n");
@@ -141,7 +141,7 @@
}
rtp_len += len-2;
Npack++;
- len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
+ len=(uint16_t) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
}
packet_ptr = (unsigned char **) malloc(Npack*sizeof(unsigned char*));
@@ -149,7 +149,7 @@
packet_ptr[0]=rtp_vec;
k=1;
while(k<Npack) {
- len = ntohs(*((WebRtc_UWord16 *) packet_ptr[k-1]));
+ len = ntohs(*((uint16_t *) packet_ptr[k-1]));
packet_ptr[k]=packet_ptr[k-1]+len;
k++;
}
@@ -157,20 +157,20 @@
for(k=0; k<dat_len && k<Npack; k++) {
if(time_vec[k].time < FLT_MAX && time_vec[k].ix < Npack){
temp_packet = packet_ptr[time_vec[k].ix];
- offset = (WebRtc_UWord32 *) (temp_packet+4);
+ offset = (uint32_t *) (temp_packet+4);
if ( time_vec[k].time >= 0 ) {
- *offset = htonl((WebRtc_UWord32) time_vec[k].time);
+ *offset = htonl((uint32_t) time_vec[k].time);
}
else {
- *offset = htonl((WebRtc_UWord32) 0);
+ *offset = htonl((uint32_t) 0);
fprintf(stderr, "Warning: negative receive time in dat file transformed to 0.\n");
}
// write packet to file
if (fwrite(temp_packet, sizeof(unsigned char),
- ntohs(*((WebRtc_UWord16*) temp_packet)),
+ ntohs(*((uint16_t*) temp_packet)),
out_file) !=
- ntohs(*((WebRtc_UWord16*) temp_packet))) {
+ ntohs(*((uint16_t*) temp_packet))) {
return -1;
}
}
diff --git a/webrtc/modules/audio_coding/neteq/test/RTPtimeshift.cc b/webrtc/modules/audio_coding/neteq/test/RTPtimeshift.cc
index 66a00bd..1a0fc672f 100644
--- a/webrtc/modules/audio_coding/neteq/test/RTPtimeshift.cc
+++ b/webrtc/modules/audio_coding/neteq/test/RTPtimeshift.cc
@@ -60,9 +60,9 @@
}
// get new start TS and start SeqNo from arguments
- WebRtc_UWord32 TSdiff = atoi(argv[3]) - packet.timeStamp();
- WebRtc_UWord16 SNdiff = 0;
- WebRtc_UWord32 ATdiff = 0;
+ uint32_t TSdiff = atoi(argv[3]) - packet.timeStamp();
+ uint16_t SNdiff = 0;
+ uint32_t ATdiff = 0;
if (argc > 4) {
SNdiff = atoi(argv[4]) - packet.sequenceNumber();
if (argc > 5) {
diff --git a/webrtc/modules/audio_coding/neteq/unmute_signal.c b/webrtc/modules/audio_coding/neteq/unmute_signal.c
index ee9daa8..3128f21 100644
--- a/webrtc/modules/audio_coding/neteq/unmute_signal.c
+++ b/webrtc/modules/audio_coding/neteq/unmute_signal.c
@@ -17,25 +17,25 @@
#include "signal_processing_library.h"
-void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
- WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
- WebRtc_Word16 N)
+void WebRtcNetEQ_UnmuteSignal(int16_t *pw16_inVec, int16_t *startMuteFact,
+ int16_t *pw16_outVec, int16_t unmuteFact,
+ int16_t N)
{
int i;
- WebRtc_UWord16 w16_tmp;
- WebRtc_Word32 w32_tmp;
+ uint16_t w16_tmp;
+ int32_t w32_tmp;
- w16_tmp = (WebRtc_UWord16) *startMuteFact;
- w32_tmp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,6) + 32;
+ w16_tmp = (uint16_t) *startMuteFact;
+ w32_tmp = WEBRTC_SPL_LSHIFT_W32((int32_t)w16_tmp,6) + 32;
for (i = 0; i < N; i++)
{
pw16_outVec[i]
- = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
+ = (int16_t) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
w32_tmp += unmuteFact;
w32_tmp = WEBRTC_SPL_MAX(0, w32_tmp);
- w16_tmp = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
+ w16_tmp = (uint16_t) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
w16_tmp = WEBRTC_SPL_MIN(16384, w16_tmp);
}
- *startMuteFact = (WebRtc_Word16) w16_tmp;
+ *startMuteFact = (int16_t) w16_tmp;
}
diff --git a/webrtc/modules/audio_coding/neteq/webrtc_neteq.c b/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
index 477b0d0..38b3ce4 100644
--- a/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
+++ b/webrtc/modules/audio_coding/neteq/webrtc_neteq.c
@@ -30,7 +30,7 @@
if ((macroExpr) == -1) { \
(macroInstPtr)->ErrorCode = - (NETEQ_OTHER_ERROR); \
} else { \
- (macroInstPtr)->ErrorCode = -((WebRtc_Word16) (macroExpr)); \
+ (macroInstPtr)->ErrorCode = -((int16_t) (macroExpr)); \
} \
return(-1); \
} }
@@ -280,7 +280,7 @@
/* Assign functions (create not allowed in order to avoid malloc in lib) */
int WebRtcNetEQ_AssignSize(int *sizeinbytes)
{
- *sizeinbytes = (sizeof(MainInst_t) * 2) / sizeof(WebRtc_Word16);
+ *sizeinbytes = (sizeof(MainInst_t) * 2) / sizeof(int16_t);
return (0);
}
@@ -294,8 +294,8 @@
WebRtcSpl_Init();
/* Clear memory */
- WebRtcSpl_MemSetW16((WebRtc_Word16*) NetEqMainInst, 0,
- (sizeof(MainInst_t) / sizeof(WebRtc_Word16)));
+ WebRtcSpl_MemSetW16((int16_t*) NetEqMainInst, 0,
+ (sizeof(MainInst_t) / sizeof(int16_t)));
ok = WebRtcNetEQ_McuReset(&NetEqMainInst->MCUinst);
if (ok != 0)
{
@@ -361,7 +361,7 @@
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
if (NetEqMainInst == NULL) return (-1);
ok = WebRtcNetEQ_PacketBufferInit(&NetEqMainInst->MCUinst.PacketBuffer_inst,
- MaxNoOfPackets, (WebRtc_Word16*) NETEQ_Buffer_Addr, (sizeinbytes >> 1));
+ MaxNoOfPackets, (int16_t*) NETEQ_Buffer_Addr, (sizeinbytes >> 1));
if (ok != 0)
{
NetEqMainInst->ErrorCode = -ok;
@@ -390,7 +390,7 @@
* -1 - Error
*/
-int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs)
+int WebRtcNetEQ_Init(void *inst, uint16_t fs)
{
int ok = 0;
@@ -450,8 +450,8 @@
WebRtcNetEQ_RTCPInit(&(NetEqMainInst->MCUinst.RTCP_inst), 0);
/* set BufferStat struct to zero */
- WebRtcSpl_MemSetW16((WebRtc_Word16*) &(NetEqMainInst->MCUinst.BufferStat_inst), 0,
- sizeof(BufstatsInst_t) / sizeof(WebRtc_Word16));
+ WebRtcSpl_MemSetW16((int16_t*) &(NetEqMainInst->MCUinst.BufferStat_inst), 0,
+ sizeof(BufstatsInst_t) / sizeof(int16_t));
/* reset automode */
WebRtcNetEQ_ResetAutomode(&(NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst),
@@ -607,8 +607,8 @@
return (0);
}
-int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
- WebRtc_Word16 *MaxEntries)
+int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, int16_t *UsedEntries,
+ int16_t *MaxEntries)
{
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
if (NetEqMainInst == NULL) return (-1);
@@ -617,7 +617,7 @@
return (0);
}
-int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
+int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, int16_t Entry,
enum WebRtcNetEQDecoder *codec)
{
int i;
@@ -668,7 +668,7 @@
if (NetEqMainInst == NULL) return (-1);
/* check if currently used codec is being removed */
- if (NetEqMainInst->MCUinst.current_Codec == (WebRtc_Word16) codec)
+ if (NetEqMainInst->MCUinst.current_Codec == (int16_t) codec)
{
/* set function pointers to NULL to prevent RecOut from using the codec */
NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
@@ -695,8 +695,8 @@
* Real-time functions
*/
-int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
- WebRtc_UWord32 uw32_timeRec)
+int WebRtcNetEQ_RecIn(void *inst, int16_t *p_w16datagramstart, int16_t w16_RTPlen,
+ uint32_t uw32_timeRec)
{
int ok = 0;
RTPPacket_t RTPpacket;
@@ -745,8 +745,8 @@
* -1 - Error
*/
int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
- const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
- WebRtc_UWord32 uw32_timeRec)
+ const uint8_t *payloadPtr, int16_t payloadLenBytes,
+ uint32_t uw32_timeRec)
{
int ok = 0;
RTPPacket_t RTPpacket;
@@ -769,7 +769,7 @@
RTPpacket.seqNumber = rtpInfo->sequenceNumber;
RTPpacket.timeStamp = rtpInfo->timeStamp;
RTPpacket.ssrc = rtpInfo->SSRC;
- RTPpacket.payload = (const WebRtc_Word16*) payloadPtr;
+ RTPpacket.payload = (const int16_t*) payloadPtr;
RTPpacket.payloadLen = payloadLenBytes;
RTPpacket.starts_byte1 = 0;
@@ -782,7 +782,7 @@
return (ok);
}
-int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
+int WebRtcNetEQ_RecOut(void *inst, int16_t *pw16_outData, int16_t *pw16_len)
{
int ok = 0;
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
@@ -836,9 +836,9 @@
* -1 - Error
*/
-int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len, void *msInfo,
- WebRtc_Word16 isMaster)
+int WebRtcNetEQ_RecOutMasterSlave(void *inst, int16_t *pw16_outData,
+ int16_t *pw16_len, void *msInfo,
+ int16_t isMaster)
{
#ifndef NETEQ_STEREO
/* Stereo not supported */
@@ -914,8 +914,8 @@
}
/* Special RecOut that does not do any decoding. */
-int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
- WebRtc_Word16 *pw16_len)
+int WebRtcNetEQ_RecOutNoDecode(void *inst, int16_t *pw16_outData,
+ int16_t *pw16_len)
{
int ok = 0;
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
@@ -999,7 +999,7 @@
return (ok);
}
-int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp)
+int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, uint32_t *timestamp)
{
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
if (NetEqMainInst == NULL) return (-1);
@@ -1101,18 +1101,18 @@
#define WEBRTC_NETEQ_CONCEALMENTFLAG_SUPRESS 0x04
#define WEBRTC_NETEQ_CONCEALMENTFLAG_CNGACTIVE 0x80
-int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
- WebRtc_UWord16 *concealedVoiceDurationMs,
- WebRtc_UWord8 *concealedVoiceFlags)
+int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, uint16_t *validVoiceDurationMs,
+ uint16_t *concealedVoiceDurationMs,
+ uint8_t *concealedVoiceFlags)
{
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
- WebRtc_Word16 fs_mult;
- WebRtc_Word16 ms_lost;
+ int16_t fs_mult;
+ int16_t ms_lost;
if (NetEqMainInst == NULL) return (-1);
fs_mult = WebRtcSpl_DivW32W16ResW16(NetEqMainInst->MCUinst.fs, 8000);
ms_lost = WebRtcSpl_DivW32W16ResW16(
- (WebRtc_Word32) NetEqMainInst->DSPinst.w16_concealedTS, (WebRtc_Word16) (8 * fs_mult));
+ (int32_t) NetEqMainInst->DSPinst.w16_concealedTS, (int16_t) (8 * fs_mult));
if (ms_lost > NetEqMainInst->DSPinst.millisecondsPerCall) ms_lost
= NetEqMainInst->DSPinst.millisecondsPerCall;
@@ -1131,8 +1131,8 @@
return (0);
}
-int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
- WebRtc_UWord8 *adaptationRate)
+int WebRtcNetEQ_VQmonGetConfiguration(void *inst, uint16_t *absMaxDelayMs,
+ uint8_t *adaptationRate)
{
/* Dummy check the inst, just to avoid compiler warnings. */
if (inst == NULL)
@@ -1146,13 +1146,13 @@
return (0);
}
-int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
- WebRtc_UWord16 *maxDelayMs)
+int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, uint16_t *avgDelayMs,
+ uint16_t *maxDelayMs)
{
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
if (NetEqMainInst == NULL) return (-1);
- *avgDelayMs = (WebRtc_UWord16) (NetEqMainInst->MCUinst.BufferStat_inst.avgDelayMsQ8 >> 8);
- *maxDelayMs = (WebRtc_UWord16) NetEqMainInst->MCUinst.BufferStat_inst.maxDelayMs;
+ *avgDelayMs = (uint16_t) (NetEqMainInst->MCUinst.BufferStat_inst.avgDelayMsQ8 >> 8);
+ *maxDelayMs = (uint16_t) NetEqMainInst->MCUinst.BufferStat_inst.maxDelayMs;
return (0);
}
@@ -1166,8 +1166,8 @@
{
- WebRtc_UWord16 tempU16;
- WebRtc_UWord32 tempU32, tempU32_2;
+ uint16_t tempU16;
+ uint32_t tempU32, tempU32_2;
int numShift;
MainInst_t *NetEqMainInst = (MainInst_t*) inst;
@@ -1182,7 +1182,7 @@
if (NetEqMainInst->MCUinst.fs != 0)
{
- WebRtc_Word32 temp32;
+ int32_t temp32;
/* Query packet buffer for number of samples. */
temp32 = WebRtcNetEQ_PacketBufferGetSize(
&NetEqMainInst->MCUinst.PacketBuffer_inst,
@@ -1190,13 +1190,13 @@
/* Divide by sample rate.
* Calculate temp32 * 1000 / fs to get result in ms. */
- stats->currentBufferSize = (WebRtc_UWord16)
+ stats->currentBufferSize = (uint16_t)
WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
/* Add number of samples yet to play in sync buffer. */
- temp32 = (WebRtc_Word32) (NetEqMainInst->DSPinst.endPosition -
+ temp32 = (int32_t) (NetEqMainInst->DSPinst.endPosition -
NetEqMainInst->DSPinst.curPosition);
- stats->currentBufferSize += (WebRtc_UWord16)
+ stats->currentBufferSize += (uint16_t)
WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
}
else
@@ -1213,11 +1213,11 @@
{
/* preferredBufferSize = Bopt * packSizeSamples / (fs/1000) */
stats->preferredBufferSize
- = (WebRtc_UWord16) WEBRTC_SPL_MUL_16_16(
- (WebRtc_Word16) ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.optBufLevel) >> 8), /* optimal buffer level in packets shifted to Q0 */
+ = (uint16_t) WEBRTC_SPL_MUL_16_16(
+ (int16_t) ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.optBufLevel) >> 8), /* optimal buffer level in packets shifted to Q0 */
WebRtcSpl_DivW32W16ResW16(
- (WebRtc_Word32) NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.packetSpeechLenSamp, /* samples per packet */
- WebRtcSpl_DivW32W16ResW16( (WebRtc_Word32) NetEqMainInst->MCUinst.fs, (WebRtc_Word16) 1000 ) /* samples per ms */
+ (int32_t) NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.packetSpeechLenSamp, /* samples per packet */
+ WebRtcSpl_DivW32W16ResW16( (int32_t) NetEqMainInst->MCUinst.fs, (int16_t) 1000 ) /* samples per ms */
) );
/* add extra delay */
@@ -1281,13 +1281,13 @@
tempU32 >>= 1; /* right-shift 1 step */
numShift--; /* compensate in numerator */
}
- tempU16 = (WebRtc_UWord16) tempU32;
+ tempU16 = (uint16_t) tempU32;
/* do the shift of numerator */
tempU32
- = WEBRTC_SPL_SHIFT_W32( (WebRtc_UWord32) NetEqMainInst->MCUinst.lostTS, numShift);
+ = WEBRTC_SPL_SHIFT_W32( (uint32_t) NetEqMainInst->MCUinst.lostTS, numShift);
- stats->currentPacketLossRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+ stats->currentPacketLossRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
tempU16);
}
}
@@ -1307,7 +1307,7 @@
/* number of discarded samples */
tempU32_2
- = WEBRTC_SPL_MUL_16_U16( (WebRtc_Word16) NetEqMainInst->MCUinst.PacketBuffer_inst.packSizeSamples,
+ = WEBRTC_SPL_MUL_16_U16( (int16_t) NetEqMainInst->MCUinst.PacketBuffer_inst.packSizeSamples,
NetEqMainInst->MCUinst.PacketBuffer_inst.discardedPackets);
if (tempU32_2 == 0)
@@ -1344,12 +1344,12 @@
tempU32 >>= 1; /* right-shift 1 step */
numShift--; /* compensate in numerator */
}
- tempU16 = (WebRtc_UWord16) tempU32;
+ tempU16 = (uint16_t) tempU32;
/* do the shift of numerator */
tempU32 = WEBRTC_SPL_SHIFT_W32( tempU32_2, numShift);
- stats->currentDiscardRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32, tempU16);
+ stats->currentDiscardRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32, tempU16);
}
}
else
@@ -1400,13 +1400,13 @@
tempU32 >>= 1; /* right-shift 1 step */
numShift--; /* compensate in numerator */
}
- tempU16 = (WebRtc_UWord16) tempU32;
+ tempU16 = (uint16_t) tempU32;
/* do the shift of numerator */
tempU32
= WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.accelerateLength, numShift);
- stats->currentAccelerateRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+ stats->currentAccelerateRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
tempU16);
}
}
@@ -1454,13 +1454,13 @@
tempU32 >>= 1; /* right-shift 1 step */
numShift--; /* compensate in numerator */
}
- tempU16 = (WebRtc_UWord16) tempU32;
+ tempU16 = (uint16_t) tempU32;
/* do the shift of numerator */
tempU32
= WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.expandLength, numShift);
- stats->currentExpandRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32, tempU16);
+ stats->currentExpandRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32, tempU16);
}
}
else
@@ -1507,13 +1507,13 @@
tempU32 >>= 1; /* right-shift 1 step */
numShift--; /* compensate in numerator */
}
- tempU16 = (WebRtc_UWord16) tempU32;
+ tempU16 = (uint16_t) tempU32;
/* do the shift of numerator */
tempU32
= WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.preemptiveLength, numShift);
- stats->currentPreemptiveRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+ stats->currentPreemptiveRate = (uint16_t) WebRtcSpl_DivU32U16(tempU32,
tempU16);
}
}
diff --git a/webrtc/modules/audio_coding/neteq4/neteq_impl.cc b/webrtc/modules/audio_coding/neteq4/neteq_impl.cc
index 08a8272..7c485c6 100644
--- a/webrtc/modules/audio_coding/neteq4/neteq_impl.cc
+++ b/webrtc/modules/audio_coding/neteq4/neteq_impl.cc
@@ -548,7 +548,7 @@
}
// Update statistics.
- if ((WebRtc_Word32) (main_header.timestamp - timestamp_) >= 0 &&
+ if ((int32_t) (main_header.timestamp - timestamp_) >= 0 &&
!new_codec_) {
// Only update statistics if incoming packet is not older than last played
// out packet, and if new codec flag is not set.
diff --git a/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc b/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc
index 9929078..8522368 100644
--- a/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc
+++ b/webrtc/modules/audio_coding/neteq4/preemptive_expand.cc
@@ -53,7 +53,7 @@
}
PreemptiveExpand::ReturnCodes PreemptiveExpand::CheckCriteriaAndStretch(
- const WebRtc_Word16 *input, int input_length, size_t peak_index,
+ const int16_t *input, int input_length, size_t peak_index,
int16_t best_correlation, bool active_speech,
AudioMultiVector<int16_t>* output) const {
// Pre-calculate common multiplication with |fs_mult_|.
diff --git a/webrtc/modules/audio_coding/neteq4/preemptive_expand.h b/webrtc/modules/audio_coding/neteq4/preemptive_expand.h
index 96a8511..9e22f47 100644
--- a/webrtc/modules/audio_coding/neteq4/preemptive_expand.h
+++ b/webrtc/modules/audio_coding/neteq4/preemptive_expand.h
@@ -43,7 +43,7 @@
// |output|. The number of samples added through time-stretching is
// is provided in the output |length_change_samples|. The method returns
// the outcome of the operation as an enumerator value.
- ReturnCodes Process(const WebRtc_Word16 *pw16_decoded,
+ ReturnCodes Process(const int16_t *pw16_decoded,
int len,
int oldDataLen,
AudioMultiVector<int16_t>* output,
@@ -59,7 +59,7 @@
// Checks the criteria for performing the time-stretching operation and,
// if possible, performs the time-stretching.
virtual ReturnCodes CheckCriteriaAndStretch(
- const WebRtc_Word16 *pw16_decoded, int len, size_t w16_bestIndex,
+ const int16_t *pw16_decoded, int len, size_t w16_bestIndex,
int16_t w16_bestCorr, bool w16_VAD,
AudioMultiVector<int16_t>* output) const;
diff --git a/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc b/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc
index b215bd3..5c47f2c 100644
--- a/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc
+++ b/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_DummyRTPpacket.cc
@@ -27,8 +27,8 @@
return -1;
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
if (fread(&length, 2, 1, fp) == 0)
{
@@ -50,10 +50,10 @@
return -1;
}
// Store in local variable until we have passed the reset below.
- WebRtc_UWord32 receiveTime = ntohl(offset);
+ uint32_t receiveTime = ntohl(offset);
// Use length here because a plen of 0 specifies rtcp.
- length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+ length = (uint16_t) (length - _kRDHeaderLen);
// check buffer size
if (_datagram && _memSize < length)
@@ -63,7 +63,7 @@
if (!_datagram)
{
- _datagram = new WebRtc_UWord8[length];
+ _datagram = new uint8_t[length];
_memSize = length;
}
memset(_datagram, 0, length);
@@ -142,8 +142,8 @@
return -1;
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
// length including RTPplay header
length = htons(_datagramLen + _kRDHeaderLen);
diff --git a/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc b/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc
index 49c8bc9..c6d3270 100644
--- a/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc
+++ b/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.cc
@@ -103,8 +103,8 @@
return(-1);
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
if (fread(&length,2,1,fp)==0)
{
@@ -125,10 +125,10 @@
reset();
return(-1);
}
- WebRtc_UWord32 receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
+ uint32_t receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
// Use length here because a plen of 0 specifies rtcp
- length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+ length = (uint16_t) (length - _kRDHeaderLen);
// check buffer size
if (_datagram && _memSize < length)
@@ -138,7 +138,7 @@
if (!_datagram)
{
- _datagram = new WebRtc_UWord8[length];
+ _datagram = new uint8_t[length];
_memSize = length;
}
@@ -178,7 +178,7 @@
if (!_datagram)
{
- _datagram = new WebRtc_UWord8[length];
+ _datagram = new uint8_t[length];
_memSize = length;
}
@@ -210,8 +210,8 @@
return -1;
}
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ uint16_t length, plen;
+ uint32_t offset;
// length including RTPplay header
length = htons(_datagramLen + _kRDHeaderLen);
@@ -247,7 +247,7 @@
}
-void NETEQTEST_RTPpacket::blockPT(WebRtc_UWord8 pt)
+void NETEQTEST_RTPpacket::blockPT(uint8_t pt)
{
_blockList[pt] = true;
}
@@ -300,7 +300,7 @@
}
}
-WebRtc_UWord8 * NETEQTEST_RTPpacket::datagram() const
+uint8_t * NETEQTEST_RTPpacket::datagram() const
{
if (_datagramLen > 0)
{
@@ -312,7 +312,7 @@
}
}
-WebRtc_UWord8 * NETEQTEST_RTPpacket::payload() const
+uint8_t * NETEQTEST_RTPpacket::payload() const
{
if (_payloadLen > 0)
{
@@ -324,13 +324,13 @@
}
}
-WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen()
+int16_t NETEQTEST_RTPpacket::payloadLen()
{
parseHeader();
return _payloadLen;
}
-WebRtc_Word16 NETEQTEST_RTPpacket::dataLen() const
+int16_t NETEQTEST_RTPpacket::dataLen() const
{
return _datagramLen;
}
@@ -345,7 +345,7 @@
return _lost;
}
-WebRtc_UWord8 NETEQTEST_RTPpacket::payloadType() const
+uint8_t NETEQTEST_RTPpacket::payloadType() const
{
webrtc::WebRtcRTPHeader tempRTPinfo;
@@ -361,7 +361,7 @@
return tempRTPinfo.header.payloadType;
}
-WebRtc_UWord16 NETEQTEST_RTPpacket::sequenceNumber() const
+uint16_t NETEQTEST_RTPpacket::sequenceNumber() const
{
webrtc::WebRtcRTPHeader tempRTPinfo;
@@ -377,7 +377,7 @@
return tempRTPinfo.header.sequenceNumber;
}
-WebRtc_UWord32 NETEQTEST_RTPpacket::timeStamp() const
+uint32_t NETEQTEST_RTPpacket::timeStamp() const
{
webrtc::WebRtcRTPHeader tempRTPinfo;
@@ -393,7 +393,7 @@
return tempRTPinfo.header.timestamp;
}
-WebRtc_UWord32 NETEQTEST_RTPpacket::SSRC() const
+uint32_t NETEQTEST_RTPpacket::SSRC() const
{
webrtc::WebRtcRTPHeader tempRTPinfo;
@@ -409,7 +409,7 @@
return tempRTPinfo.header.ssrc;
}
-WebRtc_UWord8 NETEQTEST_RTPpacket::markerBit() const
+uint8_t NETEQTEST_RTPpacket::markerBit() const
{
webrtc::WebRtcRTPHeader tempRTPinfo;
@@ -427,7 +427,7 @@
-int NETEQTEST_RTPpacket::setPayloadType(WebRtc_UWord8 pt)
+int NETEQTEST_RTPpacket::setPayloadType(uint8_t pt)
{
if (_datagramLen < 12)
@@ -446,7 +446,7 @@
}
-int NETEQTEST_RTPpacket::setSequenceNumber(WebRtc_UWord16 sn)
+int NETEQTEST_RTPpacket::setSequenceNumber(uint16_t sn)
{
if (_datagramLen < 12)
@@ -466,7 +466,7 @@
}
-int NETEQTEST_RTPpacket::setTimeStamp(WebRtc_UWord32 ts)
+int NETEQTEST_RTPpacket::setTimeStamp(uint32_t ts)
{
if (_datagramLen < 12)
@@ -488,7 +488,7 @@
}
-int NETEQTEST_RTPpacket::setSSRC(WebRtc_UWord32 ssrc)
+int NETEQTEST_RTPpacket::setSSRC(uint32_t ssrc)
{
if (_datagramLen < 12)
@@ -510,7 +510,7 @@
}
-int NETEQTEST_RTPpacket::setMarkerBit(WebRtc_UWord8 mb)
+int NETEQTEST_RTPpacket::setMarkerBit(uint8_t mb)
{
if (_datagramLen < 12)
@@ -619,7 +619,7 @@
}
-void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const
+void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, uint8_t payloadType, uint16_t seqNo, uint32_t timestamp, uint32_t ssrc, uint8_t markerBit) const
{
rtp_data[0]=(unsigned char)0x80;
if (markerBit)
@@ -646,11 +646,11 @@
rtp_data[11]=(unsigned char)(ssrc & 0xFF);
}
-WebRtc_UWord16
+uint16_t
NETEQTEST_RTPpacket::parseRTPheader(webrtc::WebRtcRTPHeader* RTPinfo,
- WebRtc_UWord8 **payloadPtr) const
+ uint8_t **payloadPtr) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
int i_P, i_X, i_CC;
assert(_datagramLen >= 12);
@@ -662,60 +662,60 @@
if (payloadPtr)
{
- *payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition >> 1];
+ *payloadPtr = (uint8_t*) &rtp_data[i_startPosition >> 1];
}
- return (WebRtc_UWord16) (_datagramLen - i_startPosition - i_padlength);
+ return (uint16_t) (_datagramLen - i_startPosition - i_padlength);
}
void NETEQTEST_RTPpacket::parseBasicHeader(webrtc::WebRtcRTPHeader* RTPinfo,
int *i_P, int *i_X, int *i_CC) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
if (_datagramLen < 12)
{
assert(false);
return;
}
- *i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
- *i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
- *i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF); /* Get the CC number */
+ *i_P=(((uint16_t)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
+ *i_X=(((uint16_t)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
+ *i_CC=(uint16_t)(rtp_data[0] & 0xF); /* Get the CC number */
/* Get the marker bit */
- RTPinfo->header.markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01);
+ RTPinfo->header.markerBit = (uint8_t) ((rtp_data[0] >> 15) & 0x01);
/* Get the coder type */
- RTPinfo->header.payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F);
+ RTPinfo->header.payloadType = (uint8_t) ((rtp_data[0] >> 8) & 0x7F);
/* Get the packet number */
RTPinfo->header.sequenceNumber =
- ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) |
- ( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8));
+ ((( ((uint16_t)rtp_data[1]) >> 8) & 0xFF) |
+ ( ((uint16_t)(rtp_data[1] & 0xFF)) << 8));
/* Get timestamp */
- RTPinfo->header.timestamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) |
- ((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) |
- ((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
- ((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8);
+ RTPinfo->header.timestamp = ((((uint16_t)rtp_data[2]) & 0xFF) << 24) |
+ ((((uint16_t)rtp_data[2]) & 0xFF00) << 8) |
+ ((((uint16_t)rtp_data[3]) >> 8) & 0xFF) |
+ ((((uint16_t)rtp_data[3]) & 0xFF) << 8);
/* Get the SSRC */
- RTPinfo->header.ssrc = ((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) |
- ((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) |
- ((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
- ((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8);
+ RTPinfo->header.ssrc = ((((uint16_t)rtp_data[4]) & 0xFF) << 24) |
+ ((((uint16_t)rtp_data[4]) & 0xFF00) << 8) |
+ ((((uint16_t)rtp_data[5]) >> 8) & 0xFF) |
+ ((((uint16_t)rtp_data[5]) & 0xFF) << 8);
}
int NETEQTEST_RTPpacket::calcHeaderLength(int i_X, int i_CC) const
{
int i_extlength = 0;
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
if (i_X == 1)
{
// Extension header exists.
- // Find out how many WebRtc_Word32 it consists of.
+ // Find out how many int32_t it consists of.
assert(_datagramLen > 2 * (7 + 2 * i_CC));
if (_datagramLen > 2 * (7 + 2 * i_CC))
{
- i_extlength = (((((WebRtc_UWord16) rtp_data[7 + 2 * i_CC]) >> 8)
- & 0xFF) | (((WebRtc_UWord16) (rtp_data[7 + 2 * i_CC] & 0xFF))
+ i_extlength = (((((uint16_t) rtp_data[7 + 2 * i_CC]) >> 8)
+ & 0xFF) | (((uint16_t) (rtp_data[7 + 2 * i_CC] & 0xFF))
<< 8)) + 1;
}
}
@@ -725,7 +725,7 @@
int NETEQTEST_RTPpacket::calcPadLength(int i_P) const
{
- WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+ int16_t *rtp_data = (int16_t *) _datagram;
if (i_P == 1)
{
/* Padding exists. Find out how many bytes the padding consists of. */
@@ -737,7 +737,7 @@
else
{
/* even number of bytes => last byte in lower byte */
- return ((WebRtc_UWord16) rtp_data[(_datagramLen >> 1) - 1]) >> 8;
+ return ((uint16_t) rtp_data[(_datagramLen >> 1) - 1]) >> 8;
}
}
return 0;
@@ -752,9 +752,9 @@
return;
}
- WebRtc_UWord8 *readDataPtr = _payloadPtr;
- WebRtc_UWord8 *writeDataPtr = _payloadPtr;
- WebRtc_UWord8 *slaveData = slaveRtp->_payloadPtr;
+ uint8_t *readDataPtr = _payloadPtr;
+ uint8_t *writeDataPtr = _payloadPtr;
+ uint8_t *slaveData = slaveRtp->_payloadPtr;
while (readDataPtr - _payloadPtr < _payloadLen)
{
@@ -821,8 +821,8 @@
parseHeader();
- WebRtc_UWord8* ptr = payload();
- WebRtc_UWord8* payloadEndPtr = ptr + payloadLen();
+ uint8_t* ptr = payload();
+ uint8_t* payloadEndPtr = ptr + payloadLen();
int num_encodings = 0;
int total_len = 0;
@@ -833,7 +833,7 @@
{
// Header found.
red.header.payloadType = ptr[0] & 0x7F;
- WebRtc_UWord32 offset = (ptr[1] << 6) + ((ptr[2] & 0xFC) >> 2);
+ uint32_t offset = (ptr[1] << 6) + ((ptr[2] & 0xFC) >> 2);
red.header.sequenceNumber = sequenceNumber();
red.header.timestamp = timeStamp() - offset;
red.header.markerBit = markerBit();
@@ -865,6 +865,6 @@
for (int i = 0; i < _payloadLen; ++i)
{
- _payloadPtr[i] = static_cast<WebRtc_UWord8>(rand());
+ _payloadPtr[i] = static_cast<uint8_t>(rand());
}
}
diff --git a/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h b/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h
index 1199d97..684a160 100644
--- a/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h
+++ b/webrtc/modules/audio_coding/neteq4/test/NETEQTEST_RTPpacket.h
@@ -35,31 +35,31 @@
virtual int readFromFile(FILE *fp);
int readFixedFromFile(FILE *fp, size_t len);
virtual int writeToFile(FILE *fp);
- void blockPT(WebRtc_UWord8 pt);
- //WebRtc_Word16 payloadType();
+ void blockPT(uint8_t pt);
+ //int16_t payloadType();
void parseHeader();
void parseHeader(webrtc::WebRtcRTPHeader* rtp_header);
const webrtc::WebRtcRTPHeader* RTPinfo() const;
- WebRtc_UWord8 * datagram() const;
- WebRtc_UWord8 * payload() const;
- WebRtc_Word16 payloadLen();
- WebRtc_Word16 dataLen() const;
+ uint8_t * datagram() const;
+ uint8_t * payload() const;
+ int16_t payloadLen();
+ int16_t dataLen() const;
bool isParsed() const;
bool isLost() const;
- WebRtc_UWord32 time() const { return _receiveTime; };
+ uint32_t time() const { return _receiveTime; };
- WebRtc_UWord8 payloadType() const;
- WebRtc_UWord16 sequenceNumber() const;
- WebRtc_UWord32 timeStamp() const;
- WebRtc_UWord32 SSRC() const;
- WebRtc_UWord8 markerBit() const;
+ uint8_t payloadType() const;
+ uint16_t sequenceNumber() const;
+ uint32_t timeStamp() const;
+ uint32_t SSRC() const;
+ uint8_t markerBit() const;
- int setPayloadType(WebRtc_UWord8 pt);
- int setSequenceNumber(WebRtc_UWord16 sn);
- int setTimeStamp(WebRtc_UWord32 ts);
- int setSSRC(WebRtc_UWord32 ssrc);
- int setMarkerBit(WebRtc_UWord8 mb);
- void setTime(WebRtc_UWord32 receiveTime) { _receiveTime = receiveTime; };
+ int setPayloadType(uint8_t pt);
+ int setSequenceNumber(uint16_t sn);
+ int setTimeStamp(uint32_t ts);
+ int setSSRC(uint32_t ssrc);
+ int setMarkerBit(uint8_t mb);
+ void setTime(uint32_t receiveTime) { _receiveTime = receiveTime; };
int setRTPheader(const webrtc::WebRtcRTPHeader* RTPinfo);
@@ -69,16 +69,16 @@
void scramblePayload(void);
- WebRtc_UWord8 * _datagram;
- WebRtc_UWord8 * _payloadPtr;
+ uint8_t * _datagram;
+ uint8_t * _payloadPtr;
int _memSize;
- WebRtc_Word16 _datagramLen;
- WebRtc_Word16 _payloadLen;
+ int16_t _datagramLen;
+ int16_t _payloadLen;
webrtc::WebRtcRTPHeader _rtpInfo;
bool _rtpParsed;
- WebRtc_UWord32 _receiveTime;
+ uint32_t _receiveTime;
bool _lost;
- std::map<WebRtc_UWord8, bool> _blockList;
+ std::map<uint8_t, bool> _blockList;
protected:
static const int _kRDHeaderLen;
@@ -89,12 +89,12 @@
int calcHeaderLength(int i_X, int i_CC) const;
private:
- void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType,
- WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp,
- WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
- WebRtc_UWord16 parseRTPheader(webrtc::WebRtcRTPHeader* RTPinfo,
- WebRtc_UWord8 **payloadPtr = NULL) const;
- WebRtc_UWord16 parseRTPheader(WebRtc_UWord8 **payloadPtr = NULL)
+ void makeRTPheader(unsigned char* rtp_data, uint8_t payloadType,
+ uint16_t seqNo, uint32_t timestamp,
+ uint32_t ssrc, uint8_t markerBit) const;
+ uint16_t parseRTPheader(webrtc::WebRtcRTPHeader* RTPinfo,
+ uint8_t **payloadPtr = NULL) const;
+ uint16_t parseRTPheader(uint8_t **payloadPtr = NULL)
{ return parseRTPheader(&_rtpInfo, payloadPtr);};
int calcPadLength(int i_P) const;
void splitStereoSample(NETEQTEST_RTPpacket* slaveRtp, int stride);
diff --git a/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc b/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc
index c79d5db..2c38556 100644
--- a/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc
+++ b/webrtc/modules/audio_coding/neteq4/test/RTPencode.cc
@@ -75,12 +75,12 @@
int NetEQTest_init_coders(webrtc::NetEqDecoder coder, int enc_frameSize, int bitrate, int sampfreq , int vad, int numChannels);
void defineCodecs(webrtc::NetEqDecoder *usedCodec, int *noOfCodecs );
int NetEQTest_free_coders(webrtc::NetEqDecoder coder, int numChannels);
-int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate , int * vad, int useVAD, int bitrate, int numChannels);
-void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc);
-int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
- int seqNo, WebRtc_UWord32 ssrc);
+int NetEQTest_encode(int coder, int16_t *indata, int frameLen, unsigned char * encoded,int sampleRate , int * vad, int useVAD, int bitrate, int numChannels);
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, uint32_t timestamp, uint32_t ssrc);
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, uint32_t *timestamp, uint16_t *blockLen,
+ int seqNo, uint32_t ssrc);
int makeDTMFpayload(unsigned char* payload_data, int Event, int End, int Volume, int Duration);
-void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples);
+void stereoDeInterleave(int16_t* audioSamples, int numSamples);
void stereoInterleave(unsigned char* data, int dataLen, int stride);
/*********************/
@@ -201,11 +201,11 @@
#endif
#ifdef CODEC_AMR
AMR_encinst_t *AMRenc_inst[2];
- WebRtc_Word16 AMR_bitrate;
+ int16_t AMR_bitrate;
#endif
#ifdef CODEC_AMRWB
AMRWB_encinst_t *AMRWBenc_inst[2];
- WebRtc_Word16 AMRWB_bitrate;
+ int16_t AMRWB_bitrate;
#endif
#ifdef CODEC_ILBC
iLBC_encinst_t *iLBCenc_inst[2];
@@ -249,21 +249,21 @@
int useVAD, vad;
int useRed=0;
int len, enc_len;
- WebRtc_Word16 org_data[4000];
+ int16_t org_data[4000];
unsigned char rtp_data[8000];
- WebRtc_Word16 seqNo=0xFFF;
- WebRtc_UWord32 ssrc=1235412312;
- WebRtc_UWord32 timestamp=0xAC1245;
- WebRtc_UWord16 length, plen;
- WebRtc_UWord32 offset;
+ int16_t seqNo=0xFFF;
+ uint32_t ssrc=1235412312;
+ uint32_t timestamp=0xAC1245;
+ uint16_t length, plen;
+ uint32_t offset;
double sendtime = 0;
int red_PT[2] = {0};
- WebRtc_UWord32 red_TS[2] = {0};
- WebRtc_UWord16 red_len[2] = {0};
+ uint32_t red_TS[2] = {0};
+ uint16_t red_len[2] = {0};
int RTPheaderLen=12;
unsigned char red_data[8000];
#ifdef INSERT_OLD_PACKETS
- WebRtc_UWord16 old_length, old_plen;
+ uint16_t old_length, old_plen;
int old_enc_len;
int first_old_packet=1;
unsigned char old_rtp_data[8000];
@@ -272,7 +272,7 @@
#ifdef INSERT_DTMF_PACKETS
int NTone = 1;
int DTMFfirst = 1;
- WebRtc_UWord32 DTMFtimestamp;
+ uint32_t DTMFtimestamp;
bool dtmfSent = false;
#endif
bool usingStereo = false;
@@ -530,7 +530,7 @@
/* write file header */
//fprintf(out_file, "#!RTPencode%s\n", "1.0");
fprintf(out_file, "#!rtpplay%s \n", "1.0"); // this is the string that rtpplay needs
- WebRtc_UWord32 dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
+ uint32_t dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
if (fwrite(&dummy_variable, 4, 1, out_file) != 1) {
return -1;
}
@@ -592,7 +592,7 @@
/* write RTP packet to file */
length = htons(12 + enc_len + 8);
plen = htons(12 + enc_len);
- offset = (WebRtc_UWord32) sendtime; //(timestamp/(fs/1000));
+ offset = (uint32_t) sendtime; //(timestamp/(fs/1000));
offset = htonl(offset);
if (fwrite(&length, 2, 1, out_file) != 1) {
return -1;
@@ -687,7 +687,7 @@
/* write RTP packet to file */
length = htons(12 + enc_len + 8);
plen = htons(12 + enc_len);
- offset = (WebRtc_UWord32) sendtime;
+ offset = (uint32_t) sendtime;
//(timestamp/(fs/1000));
offset = htonl(offset);
if (fwrite(&length, 2, 1, out_file) != 1) {
@@ -755,7 +755,7 @@
if(usedCodec==webrtc::kDecoderISAC)
{
assert(!usingStereo); // Cannot handle stereo yet
- red_len[0] = WebRtcIsac_GetRedPayload(ISAC_inst[0], (WebRtc_Word16*)red_data);
+ red_len[0] = WebRtcIsac_GetRedPayload(ISAC_inst[0], (int16_t*)red_data);
}
else
{
@@ -1532,13 +1532,13 @@
-int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate ,
+int NetEQTest_encode(int coder, int16_t *indata, int frameLen, unsigned char * encoded,int sampleRate ,
int * vad, int useVAD, int bitrate, int numChannels){
short cdlen = 0;
- WebRtc_Word16 *tempdata;
+ int16_t *tempdata;
static int first_cng=1;
- WebRtc_Word16 tempLen;
+ int16_t tempLen;
*vad =1;
@@ -1601,29 +1601,29 @@
/* Encode with the selected coder type */
if (coder==webrtc::kDecoderPCMu) { /*g711 u-law */
#ifdef CODEC_G711
- cdlen = WebRtcG711_EncodeU(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+ cdlen = WebRtcG711_EncodeU(G711state[k], indata, frameLen, (int16_t*) encoded);
#endif
}
else if (coder==webrtc::kDecoderPCMa) { /*g711 A-law */
#ifdef CODEC_G711
- cdlen = WebRtcG711_EncodeA(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+ cdlen = WebRtcG711_EncodeA(G711state[k], indata, frameLen, (int16_t*) encoded);
}
#endif
#ifdef CODEC_PCM16B
else if ((coder==webrtc::kDecoderPCM16B)||(coder==webrtc::kDecoderPCM16Bwb)||
(coder==webrtc::kDecoderPCM16Bswb32kHz)||(coder==webrtc::kDecoderPCM16Bswb48kHz)) { /*pcm16b (8kHz, 16kHz, 32kHz or 48kHz) */
- cdlen = WebRtcPcm16b_EncodeW16(indata, frameLen, (WebRtc_Word16*) encoded);
+ cdlen = WebRtcPcm16b_EncodeW16(indata, frameLen, (int16_t*) encoded);
}
#endif
#ifdef CODEC_G722
else if (coder==webrtc::kDecoderG722) { /*g722 */
- cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (WebRtc_Word16*)encoded);
+ cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (int16_t*)encoded);
cdlen=frameLen>>1;
}
#endif
#ifdef CODEC_ILBC
else if (coder==webrtc::kDecoderILBC) { /*iLBC */
- cdlen=WebRtcIlbcfix_Encode(iLBCenc_inst[k], indata,frameLen,(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIlbcfix_Encode(iLBCenc_inst[k], indata,frameLen,(int16_t*)encoded);
}
#endif
#if (defined(CODEC_ISAC) || defined(NETEQ_ISACFIX_CODEC)) // TODO(hlundin): remove all NETEQ_ISACFIX_CODEC
@@ -1632,9 +1632,9 @@
cdlen=0;
while (cdlen<=0) {
#ifdef CODEC_ISAC /* floating point */
- cdlen=WebRtcIsac_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIsac_Encode(ISAC_inst[k],&indata[noOfCalls*160],(int16_t*)encoded);
#else /* fixed point */
- cdlen=WebRtcIsacfix_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIsacfix_Encode(ISAC_inst[k],&indata[noOfCalls*160],(int16_t*)encoded);
#endif
noOfCalls++;
}
@@ -1645,7 +1645,7 @@
int noOfCalls=0;
cdlen=0;
while (cdlen<=0) {
- cdlen=WebRtcIsac_Encode(ISACSWB_inst[k],&indata[noOfCalls*320],(WebRtc_Word16*)encoded);
+ cdlen=WebRtcIsac_Encode(ISACSWB_inst[k],&indata[noOfCalls*320],(int16_t*)encoded);
noOfCalls++;
}
}
@@ -1677,7 +1677,7 @@
-void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc){
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, uint32_t timestamp, uint32_t ssrc){
rtp_data[0]=(unsigned char)0x80;
rtp_data[1]=(unsigned char)(payloadType & 0xFF);
@@ -1697,13 +1697,13 @@
}
-int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
- int seqNo, WebRtc_UWord32 ssrc)
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, uint32_t *timestamp, uint16_t *blockLen,
+ int seqNo, uint32_t ssrc)
{
int i;
unsigned char *rtpPointer;
- WebRtc_UWord16 offset;
+ uint16_t offset;
/* first create "standard" RTP header */
makeRTPheader(rtp_data, NETEQ_CODEC_RED_PT, seqNo, timestamp[numPayloads-1], ssrc);
@@ -1713,7 +1713,7 @@
/* add one sub-header for each redundant payload (not the primary) */
for(i=0; i<numPayloads-1; i++) { /* |0 1 2 3 4 5 6 7| */
if(blockLen[i] > 0) {
- offset = (WebRtc_UWord16) (timestamp[numPayloads-1] - timestamp[i]);
+ offset = (uint16_t) (timestamp[numPayloads-1] - timestamp[i]);
rtpPointer[0] = (unsigned char) ( 0x80 | (0x7F & payloadType[i]) ); /* |F| block PT | */
rtpPointer[1] = (unsigned char) ((offset >> 6) & 0xFF); /* | timestamp- | */
@@ -1751,22 +1751,22 @@
return(4);
}
-void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples)
+void stereoDeInterleave(int16_t* audioSamples, int numSamples)
{
- WebRtc_Word16 *tempVec;
- WebRtc_Word16 *readPtr, *writeL, *writeR;
+ int16_t *tempVec;
+ int16_t *readPtr, *writeL, *writeR;
if (numSamples <= 0)
return;
- tempVec = (WebRtc_Word16 *) malloc(sizeof(WebRtc_Word16) * numSamples);
+ tempVec = (int16_t *) malloc(sizeof(int16_t) * numSamples);
if (tempVec == NULL) {
printf("Error allocating memory\n");
exit(0);
}
- memcpy(tempVec, audioSamples, numSamples*sizeof(WebRtc_Word16));
+ memcpy(tempVec, audioSamples, numSamples*sizeof(int16_t));
writeL = audioSamples;
writeR = &audioSamples[numSamples/2];
diff --git a/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc b/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc
index 77b29ec..301ceb6 100644
--- a/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc
+++ b/webrtc/modules/audio_coding/neteq4/test/RTPjitter.cc
@@ -37,7 +37,7 @@
struct arr_time {
float time;
- WebRtc_UWord32 ix;
+ uint32_t ix;
};
int filelen(FILE *fid)
@@ -66,8 +66,8 @@
char firstline[FIRSTLINELEN];
unsigned char *rtp_vec = NULL, **packet_ptr, *temp_packet;
const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
- WebRtc_UWord16 len;
- WebRtc_UWord32 *offset;
+ uint16_t len;
+ uint32_t *offset;
/* check number of parameters */
if (argc != 4) {
@@ -133,9 +133,9 @@
// read all RTP packets into vector
rtp_len=0;
Npack=0;
- len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
+ len=(uint16_t) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
while(len==2) {
- len = ntohs(*((WebRtc_UWord16 *)(rtp_vec + rtp_len)));
+ len = ntohs(*((uint16_t *)(rtp_vec + rtp_len)));
rtp_len += 2;
if(fread(&rtp_vec[rtp_len], sizeof(unsigned char), len-2, in_file)!=(unsigned) (len-2)) {
fprintf(stderr,"Error: currupt packet length\n");
@@ -143,7 +143,7 @@
}
rtp_len += len-2;
Npack++;
- len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
+ len=(uint16_t) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
}
packet_ptr = (unsigned char **) malloc(Npack*sizeof(unsigned char*));
@@ -151,7 +151,7 @@
packet_ptr[0]=rtp_vec;
k=1;
while(k<Npack) {
- len = ntohs(*((WebRtc_UWord16 *) packet_ptr[k-1]));
+ len = ntohs(*((uint16_t *) packet_ptr[k-1]));
packet_ptr[k]=packet_ptr[k-1]+len;
k++;
}
@@ -159,20 +159,20 @@
for(k=0; k<dat_len && k<Npack; k++) {
if(time_vec[k].time < FLT_MAX && time_vec[k].ix < Npack){
temp_packet = packet_ptr[time_vec[k].ix];
- offset = (WebRtc_UWord32 *) (temp_packet+4);
+ offset = (uint32_t *) (temp_packet+4);
if ( time_vec[k].time >= 0 ) {
- *offset = htonl((WebRtc_UWord32) time_vec[k].time);
+ *offset = htonl((uint32_t) time_vec[k].time);
}
else {
- *offset = htonl((WebRtc_UWord32) 0);
+ *offset = htonl((uint32_t) 0);
fprintf(stderr, "Warning: negative receive time in dat file transformed to 0.\n");
}
// write packet to file
if (fwrite(temp_packet, sizeof(unsigned char),
- ntohs(*((WebRtc_UWord16*) temp_packet)),
+ ntohs(*((uint16_t*) temp_packet)),
out_file) !=
- ntohs(*((WebRtc_UWord16*) temp_packet))) {
+ ntohs(*((uint16_t*) temp_packet))) {
return -1;
}
}
diff --git a/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc b/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc
index dc7ff9f..ba3a08e 100644
--- a/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc
+++ b/webrtc/modules/audio_coding/neteq4/test/RTPtimeshift.cc
@@ -63,9 +63,9 @@
}
// get new start TS and start SeqNo from arguments
- WebRtc_UWord32 TSdiff = atoi(argv[3]) - packet.timeStamp();
- WebRtc_UWord16 SNdiff = 0;
- WebRtc_UWord32 ATdiff = 0;
+ uint32_t TSdiff = atoi(argv[3]) - packet.timeStamp();
+ uint16_t SNdiff = 0;
+ uint32_t ATdiff = 0;
if (argc > 4)
{
if (argv[4] >= 0)