Use suffixed {uint,int}{8,16,32,64}_t types.
Removes the use of uint8, etc. in favor of uint8_t.
BUG=webrtc:5024
R=henrik.lundin@webrtc.org, henrikg@webrtc.org, perkj@webrtc.org, solenberg@webrtc.org, stefan@webrtc.org, tina.legrand@webrtc.org
Review URL: https://codereview.webrtc.org/1362503003 .
Cr-Commit-Position: refs/heads/master@{#10196}
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
index 618fcb3..0ee60c2 100644
--- a/talk/app/webrtc/androidvideocapturer.cc
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -50,13 +50,13 @@
captured_frame_.pixel_width = 1;
captured_frame_.data = nullptr;
captured_frame_.data_size = cricket::CapturedFrame::kUnknownDataSize;
- captured_frame_.fourcc = static_cast<uint32>(cricket::FOURCC_ANY);
+ captured_frame_.fourcc = static_cast<uint32_t>(cricket::FOURCC_ANY);
}
void UpdateCapturedFrame(
const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
int rotation,
- int64 time_stamp_in_ns) {
+ int64_t time_stamp_in_ns) {
buffer_ = buffer;
captured_frame_.width = buffer->width();
captured_frame_.height = buffer->height();
@@ -169,7 +169,7 @@
return running_;
}
-bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
+bool AndroidVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
RTC_CHECK(thread_checker_.CalledOnValidThread());
fourccs->push_back(cricket::FOURCC_YV12);
return true;
@@ -192,7 +192,7 @@
void AndroidVideoCapturer::OnIncomingFrame(
rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer,
int rotation,
- int64 time_stamp) {
+ int64_t time_stamp) {
RTC_CHECK(thread_checker_.CalledOnValidThread());
frame_factory_->UpdateCapturedFrame(buffer, rotation, time_stamp);
SignalFrameCaptured(this, frame_factory_->GetCapturedFrame());
diff --git a/talk/app/webrtc/androidvideocapturer.h b/talk/app/webrtc/androidvideocapturer.h
index ad45004..fdc8629 100644
--- a/talk/app/webrtc/androidvideocapturer.h
+++ b/talk/app/webrtc/androidvideocapturer.h
@@ -69,7 +69,7 @@
// Argument |buffer| is intentionally by value, for use with rtc::Bind.
void OnIncomingFrame(rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer,
int rotation,
- int64 time_stamp);
+ int64_t time_stamp);
// Called from JNI to request a new video format.
void OnOutputFormatRequest(int width, int height, int fps);
@@ -89,7 +89,7 @@
void Stop() override;
bool IsRunning() override;
bool IsScreencast() const override { return false; }
- bool GetPreferredFourccs(std::vector<uint32>* fourccs) override;
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
bool running_;
rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
diff --git a/talk/app/webrtc/datachannel.cc b/talk/app/webrtc/datachannel.cc
index 690ee65..2028dc9 100644
--- a/talk/app/webrtc/datachannel.cc
+++ b/talk/app/webrtc/datachannel.cc
@@ -193,7 +193,7 @@
}
}
-uint64 DataChannel::buffered_amount() const {
+uint64_t DataChannel::buffered_amount() const {
return queued_send_data_.byte_count();
}
@@ -239,7 +239,7 @@
return true;
}
-void DataChannel::SetReceiveSsrc(uint32 receive_ssrc) {
+void DataChannel::SetReceiveSsrc(uint32_t receive_ssrc) {
ASSERT(data_channel_type_ == cricket::DCT_RTP);
if (receive_ssrc_set_) {
@@ -276,7 +276,7 @@
}
}
-void DataChannel::SetSendSsrc(uint32 send_ssrc) {
+void DataChannel::SetSendSsrc(uint32_t send_ssrc) {
ASSERT(data_channel_type_ == cricket::DCT_RTP);
if (send_ssrc_set_) {
return;
@@ -304,7 +304,7 @@
void DataChannel::OnDataReceived(cricket::DataChannel* channel,
const cricket::ReceiveDataParams& params,
const rtc::Buffer& payload) {
- uint32 expected_ssrc =
+ uint32_t expected_ssrc =
(data_channel_type_ == cricket::DCT_RTP) ? receive_ssrc_ : config_.id;
if (params.ssrc != expected_ssrc) {
return;
@@ -476,7 +476,7 @@
ASSERT(state_ == kOpen || state_ == kClosing);
- uint64 start_buffered_amount = buffered_amount();
+ uint64_t start_buffered_amount = buffered_amount();
while (!queued_send_data_.Empty()) {
DataBuffer* buffer = queued_send_data_.Front();
if (!SendDataMessage(*buffer, false)) {
diff --git a/talk/app/webrtc/datachannel.h b/talk/app/webrtc/datachannel.h
index 8e58d06..4506f71 100644
--- a/talk/app/webrtc/datachannel.h
+++ b/talk/app/webrtc/datachannel.h
@@ -114,16 +114,14 @@
virtual std::string label() const { return label_; }
virtual bool reliable() const;
virtual bool ordered() const { return config_.ordered; }
- virtual uint16 maxRetransmitTime() const {
+ virtual uint16_t maxRetransmitTime() const {
return config_.maxRetransmitTime;
}
- virtual uint16 maxRetransmits() const {
- return config_.maxRetransmits;
- }
+ virtual uint16_t maxRetransmits() const { return config_.maxRetransmits; }
virtual std::string protocol() const { return config_.protocol; }
virtual bool negotiated() const { return config_.negotiated; }
virtual int id() const { return config_.id; }
- virtual uint64 buffered_amount() const;
+ virtual uint64_t buffered_amount() const;
virtual void Close();
virtual DataState state() const { return state_; }
virtual bool Send(const DataBuffer& buffer);
@@ -160,10 +158,10 @@
// Set the SSRC this channel should use to send data on the
// underlying data engine. |send_ssrc| == 0 means that the channel is no
// longer part of the session negotiation.
- void SetSendSsrc(uint32 send_ssrc);
+ void SetSendSsrc(uint32_t send_ssrc);
// Set the SSRC this channel should use to receive data from the
// underlying data engine.
- void SetReceiveSsrc(uint32 receive_ssrc);
+ void SetReceiveSsrc(uint32_t receive_ssrc);
cricket::DataChannelType data_channel_type() const {
return data_channel_type_;
@@ -240,8 +238,8 @@
bool send_ssrc_set_;
bool receive_ssrc_set_;
bool writable_;
- uint32 send_ssrc_;
- uint32 receive_ssrc_;
+ uint32_t send_ssrc_;
+ uint32_t receive_ssrc_;
// Control messages that always have to get sent out before any queued
// data.
PacketQueue queued_control_data_;
@@ -266,13 +264,13 @@
PROXY_CONSTMETHOD0(std::string, label)
PROXY_CONSTMETHOD0(bool, reliable)
PROXY_CONSTMETHOD0(bool, ordered)
- PROXY_CONSTMETHOD0(uint16, maxRetransmitTime)
- PROXY_CONSTMETHOD0(uint16, maxRetransmits)
+ PROXY_CONSTMETHOD0(uint16_t, maxRetransmitTime)
+ PROXY_CONSTMETHOD0(uint16_t, maxRetransmits)
PROXY_CONSTMETHOD0(std::string, protocol)
PROXY_CONSTMETHOD0(bool, negotiated)
PROXY_CONSTMETHOD0(int, id)
PROXY_CONSTMETHOD0(DataState, state)
- PROXY_CONSTMETHOD0(uint64, buffered_amount)
+ PROXY_CONSTMETHOD0(uint64_t, buffered_amount)
PROXY_METHOD0(void, Close)
PROXY_METHOD1(bool, Send, const DataBuffer&)
END_PROXY()
diff --git a/talk/app/webrtc/datachannel_unittest.cc b/talk/app/webrtc/datachannel_unittest.cc
index e3c290b..b4f611e 100644
--- a/talk/app/webrtc/datachannel_unittest.cc
+++ b/talk/app/webrtc/datachannel_unittest.cc
@@ -43,7 +43,7 @@
++on_state_change_count_;
}
- void OnBufferedAmountChange(uint64 previous_amount) {
+ void OnBufferedAmountChange(uint64_t previous_amount) {
++on_buffered_amount_change_count_;
}
@@ -215,7 +215,7 @@
EXPECT_GE(webrtc_data_channel_->id(), 0);
EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
EXPECT_EQ(provider_.last_send_data_params().ssrc,
- static_cast<uint32>(webrtc_data_channel_->id()));
+ static_cast<uint32_t>(webrtc_data_channel_->id()));
}
TEST_F(SctpDataChannelTest, QueuedOpenMessageSent) {
@@ -225,7 +225,7 @@
EXPECT_EQ(cricket::DMT_CONTROL, provider_.last_send_data_params().type);
EXPECT_EQ(provider_.last_send_data_params().ssrc,
- static_cast<uint32>(webrtc_data_channel_->id()));
+ static_cast<uint32_t>(webrtc_data_channel_->id()));
}
// Tests that the DataChannel created after transport gets ready can enter OPEN
diff --git a/talk/app/webrtc/datachannelinterface.h b/talk/app/webrtc/datachannelinterface.h
index 9d2cd44..d70972f 100644
--- a/talk/app/webrtc/datachannelinterface.h
+++ b/talk/app/webrtc/datachannelinterface.h
@@ -92,7 +92,7 @@
// A data buffer was successfully received.
virtual void OnMessage(const DataBuffer& buffer) = 0;
// The data channel's buffered_amount has changed.
- virtual void OnBufferedAmountChange(uint64 previous_amount){};
+ virtual void OnBufferedAmountChange(uint64_t previous_amount){};
protected:
virtual ~DataChannelObserver() {}
@@ -135,8 +135,8 @@
// implemented these APIs. They should all just return the values the
// DataChannel was created with.
virtual bool ordered() const { return false; }
- virtual uint16 maxRetransmitTime() const { return 0; }
- virtual uint16 maxRetransmits() const { return 0; }
+ virtual uint16_t maxRetransmitTime() const { return 0; }
+ virtual uint16_t maxRetransmits() const { return 0; }
virtual std::string protocol() const { return std::string(); }
virtual bool negotiated() const { return false; }
@@ -145,7 +145,7 @@
// The buffered_amount returns the number of bytes of application data
// (UTF-8 text and binary data) that have been queued using SendBuffer but
// have not yet been transmitted to the network.
- virtual uint64 buffered_amount() const = 0;
+ virtual uint64_t buffered_amount() const = 0;
virtual void Close() = 0;
// Sends |data| to the remote peer.
virtual bool Send(const DataBuffer& buffer) = 0;
diff --git a/talk/app/webrtc/dtmfsender_unittest.cc b/talk/app/webrtc/dtmfsender_unittest.cc
index 64f43e4..f686aa2 100644
--- a/talk/app/webrtc/dtmfsender_unittest.cc
+++ b/talk/app/webrtc/dtmfsender_unittest.cc
@@ -132,7 +132,7 @@
private:
std::set<std::string> can_insert_dtmf_tracks_;
std::vector<DtmfInfo> dtmf_info_queue_;
- int64 last_insert_dtmf_call_;
+ int64_t last_insert_dtmf_call_;
sigslot::signal0<> SignalDestroyed;
};
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index f859410..1f63131 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -519,10 +519,10 @@
// Copy encoded data to Java ByteBuffer.
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
- uint8* buffer =
- reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
+ uint8_t* buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
RTC_CHECK(buffer) << "Indirect buffer??";
- int64 buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
+ int64_t buffer_capacity = jni->GetDirectBufferCapacity(j_input_buffer);
if (CheckException(jni) || buffer_capacity < inputImage._length) {
ALOGE("Input frame size %d is bigger than buffer size %d.",
inputImage._length, buffer_capacity);
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index a25a3cc..8817df4 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -487,7 +487,7 @@
for (size_t i = 0; i < num_input_buffers; ++i) {
input_buffers_[i] =
jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
- int64 yuv_buffer_capacity =
+ int64_t yuv_buffer_capacity =
jni->GetDirectBufferCapacity(input_buffers_[i]);
CHECK_EXCEPTION(jni);
RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
@@ -572,8 +572,8 @@
frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
jobject j_input_buffer = input_buffers_[j_input_buffer_index];
- uint8* yuv_buffer =
- reinterpret_cast<uint8*>(jni->GetDirectBufferAddress(j_input_buffer));
+ uint8_t* yuv_buffer =
+ reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
CHECK_EXCEPTION(jni);
RTC_CHECK(yuv_buffer) << "Indirect buffer??";
RTC_CHECK(!libyuv::ConvertFromI420(
@@ -726,7 +726,7 @@
// Extract payload.
size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
- uint8* payload = reinterpret_cast<uint8_t*>(
+ uint8_t* payload = reinterpret_cast<uint8_t*>(
jni->GetDirectBufferAddress(j_output_buffer));
CHECK_EXCEPTION(jni);
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 93b1695..9ac6406 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -126,7 +126,7 @@
invoker_->AsyncInvoke<void>(rtc::Bind(method, capturer_, args...));
}
-void AndroidVideoCapturerJni::ReturnBuffer(int64 time_stamp) {
+void AndroidVideoCapturerJni::ReturnBuffer(int64_t time_stamp) {
jmethodID m = GetMethodID(jni(), *j_video_capturer_class_,
"returnBuffer", "(J)V");
jni()->CallVoidMethod(*j_capturer_global_, m, time_stamp);
@@ -155,7 +155,7 @@
int width,
int height,
int rotation,
- int64 time_stamp) {
+ int64_t time_stamp) {
const uint8_t* y_plane = static_cast<uint8_t*>(video_frame);
// Android guarantees that the stride is a multiple of 16.
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index 9a356d8..cd3dd9a 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -61,14 +61,14 @@
int width,
int height,
int rotation,
- int64 time_stamp);
+ int64_t time_stamp);
void OnOutputFormatRequest(int width, int height, int fps);
protected:
~AndroidVideoCapturerJni();
private:
- void ReturnBuffer(int64 time_stamp);
+ void ReturnBuffer(int64_t time_stamp);
JNIEnv* jni();
// Helper function to make safe asynchronous calls to |capturer_|. The calls
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index 33b0890..fc6ce50c 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -606,7 +606,7 @@
virtual ~DataChannelObserverWrapper() {}
- void OnBufferedAmountChange(uint64 previous_amount) override {
+ void OnBufferedAmountChange(uint64_t previous_amount) override {
ScopedLocalRefFrame local_ref_frame(jni());
jni()->CallVoidMethod(*j_observer_global_, j_on_buffered_amount_change_mid_,
previous_amount);
@@ -806,13 +806,13 @@
strides_array[2] = frame->GetVPitch();
jni()->ReleaseIntArrayElements(strides, strides_array, 0);
jobjectArray planes = jni()->NewObjectArray(3, *j_byte_buffer_class_, NULL);
- jobject y_buffer = jni()->NewDirectByteBuffer(
- const_cast<uint8*>(frame->GetYPlane()),
- frame->GetYPitch() * frame->GetHeight());
+ jobject y_buffer =
+ jni()->NewDirectByteBuffer(const_cast<uint8_t*>(frame->GetYPlane()),
+ frame->GetYPitch() * frame->GetHeight());
jobject u_buffer = jni()->NewDirectByteBuffer(
- const_cast<uint8*>(frame->GetUPlane()), frame->GetChromaSize());
+ const_cast<uint8_t*>(frame->GetUPlane()), frame->GetChromaSize());
jobject v_buffer = jni()->NewDirectByteBuffer(
- const_cast<uint8*>(frame->GetVPlane()), frame->GetChromaSize());
+ const_cast<uint8_t*>(frame->GetVPlane()), frame->GetChromaSize());
jni()->SetObjectArrayElement(planes, 0, y_buffer);
jni()->SetObjectArrayElement(planes, 1, u_buffer);
jni()->SetObjectArrayElement(planes, 2, v_buffer);
@@ -880,8 +880,8 @@
}
JOW(jlong, DataChannel_bufferedAmount)(JNIEnv* jni, jobject j_dc) {
- uint64 buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
- RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64>::max())
+ uint64_t buffered_amount = ExtractNativeDC(jni, j_dc)->buffered_amount();
+ RTC_CHECK_LE(buffered_amount, std::numeric_limits<int64_t>::max())
<< "buffered_amount overflowed jlong!";
return static_cast<jlong>(buffered_amount);
}
diff --git a/talk/app/webrtc/mediastreamprovider.h b/talk/app/webrtc/mediastreamprovider.h
index 7e25b66..b80f6b2 100644
--- a/talk/app/webrtc/mediastreamprovider.h
+++ b/talk/app/webrtc/mediastreamprovider.h
@@ -55,17 +55,19 @@
class AudioProviderInterface {
public:
// Enable/disable the audio playout of a remote audio track with |ssrc|.
- virtual void SetAudioPlayout(uint32 ssrc, bool enable,
+ virtual void SetAudioPlayout(uint32_t ssrc,
+ bool enable,
cricket::AudioRenderer* renderer) = 0;
// Enable/disable sending audio on the local audio track with |ssrc|.
// When |enable| is true |options| should be applied to the audio track.
- virtual void SetAudioSend(uint32 ssrc, bool enable,
+ virtual void SetAudioSend(uint32_t ssrc,
+ bool enable,
const cricket::AudioOptions& options,
cricket::AudioRenderer* renderer) = 0;
// Sets the audio playout volume of a remote audio track with |ssrc|.
// |volume| is in the range of [0, 10].
- virtual void SetAudioPlayoutVolume(uint32 ssrc, double volume) = 0;
+ virtual void SetAudioPlayoutVolume(uint32_t ssrc, double volume) = 0;
protected:
virtual ~AudioProviderInterface() {}
@@ -76,13 +78,15 @@
// PeerConnection.
class VideoProviderInterface {
public:
- virtual bool SetCaptureDevice(uint32 ssrc,
+ virtual bool SetCaptureDevice(uint32_t ssrc,
cricket::VideoCapturer* camera) = 0;
// Enable/disable the video playout of a remote video track with |ssrc|.
- virtual void SetVideoPlayout(uint32 ssrc, bool enable,
+ virtual void SetVideoPlayout(uint32_t ssrc,
+ bool enable,
cricket::VideoRenderer* renderer) = 0;
// Enable sending video on the local video track with |ssrc|.
- virtual void SetVideoSend(uint32 ssrc, bool enable,
+ virtual void SetVideoSend(uint32_t ssrc,
+ bool enable,
const cricket::VideoOptions* options) = 0;
protected:
diff --git a/talk/app/webrtc/mediastreamsignaling.cc b/talk/app/webrtc/mediastreamsignaling.cc
index 4f2615f..c12471c 100644
--- a/talk/app/webrtc/mediastreamsignaling.cc
+++ b/talk/app/webrtc/mediastreamsignaling.cc
@@ -612,7 +612,7 @@
// track id.
const std::string& stream_label = it->sync_label;
const std::string& track_id = it->id;
- uint32 ssrc = it->first_ssrc();
+ uint32_t ssrc = it->first_ssrc();
rtc::scoped_refptr<MediaStreamInterface> stream =
remote_streams_->find(stream_label);
@@ -634,7 +634,7 @@
void MediaStreamSignaling::OnRemoteTrackSeen(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc,
+ uint32_t ssrc,
cricket::MediaType media_type) {
MediaStreamInterface* stream = remote_streams_->find(stream_label);
@@ -801,7 +801,7 @@
// track id.
const std::string& stream_label = it->sync_label;
const std::string& track_id = it->id;
- uint32 ssrc = it->first_ssrc();
+ uint32_t ssrc = it->first_ssrc();
const TrackInfo* track_info = FindTrackInfo(*current_tracks,
stream_label,
track_id);
@@ -814,7 +814,7 @@
void MediaStreamSignaling::OnLocalTrackSeen(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc,
+ uint32_t ssrc,
cricket::MediaType media_type) {
MediaStreamInterface* stream = local_streams_->find(stream_label);
if (!stream) {
@@ -844,11 +844,10 @@
}
}
-void MediaStreamSignaling::OnLocalTrackRemoved(
- const std::string& stream_label,
- const std::string& track_id,
- uint32 ssrc,
- cricket::MediaType media_type) {
+void MediaStreamSignaling::OnLocalTrackRemoved(const std::string& stream_label,
+ const std::string& track_id,
+ uint32_t ssrc,
+ cricket::MediaType media_type) {
MediaStreamInterface* stream = local_streams_->find(stream_label);
if (!stream) {
// This is the normal case. Ie RemoveLocalStream has been called and the
@@ -953,7 +952,7 @@
}
void MediaStreamSignaling::CreateRemoteDataChannel(const std::string& label,
- uint32 remote_ssrc) {
+ uint32_t remote_ssrc) {
if (!data_channel_factory_) {
LOG(LS_WARNING) << "Remote peer requested a DataChannel but DataChannels "
<< "are not supported.";
@@ -991,8 +990,7 @@
}
}
-
-void MediaStreamSignaling::OnRemoteSctpDataChannelClosed(uint32 sid) {
+void MediaStreamSignaling::OnRemoteSctpDataChannelClosed(uint32_t sid) {
int index = FindDataChannelBySid(sid);
if (index < 0) {
LOG(LS_WARNING) << "Unexpected sid " << sid
diff --git a/talk/app/webrtc/mediastreamsignaling.h b/talk/app/webrtc/mediastreamsignaling.h
index 08f9cba..b858b5b 100644
--- a/talk/app/webrtc/mediastreamsignaling.h
+++ b/talk/app/webrtc/mediastreamsignaling.h
@@ -66,12 +66,12 @@
// Triggered when the remote SessionDescription has a new audio track.
virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) = 0;
+ uint32_t ssrc) = 0;
// Triggered when the remote SessionDescription has a new video track.
virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) = 0;
+ uint32_t ssrc) = 0;
// Triggered when the remote SessionDescription has removed an audio track.
virtual void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
@@ -84,17 +84,17 @@
// Triggered when the local SessionDescription has a new audio track.
virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) = 0;
+ uint32_t ssrc) = 0;
// Triggered when the local SessionDescription has a new video track.
virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) = 0;
+ uint32_t ssrc) = 0;
// Triggered when the local SessionDescription has removed an audio track.
virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) = 0;
+ uint32_t ssrc) = 0;
// Triggered when the local SessionDescription has removed a video track.
virtual void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
@@ -254,7 +254,7 @@
}
void OnDataTransportCreatedForSctp();
void OnDtlsRoleReadyForSctp(rtc::SSLRole role);
- void OnRemoteSctpDataChannelClosed(uint32 sid);
+ void OnRemoteSctpDataChannelClosed(uint32_t sid);
const SctpDataChannels& sctp_data_channels() const {
return sctp_data_channels_;
@@ -286,11 +286,11 @@
TrackInfo() : ssrc(0) {}
TrackInfo(const std::string& stream_label,
const std::string track_id,
- uint32 ssrc)
+ uint32_t ssrc)
: stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
std::string stream_label;
std::string track_id;
- uint32 ssrc;
+ uint32_t ssrc;
};
typedef std::vector<TrackInfo> TrackInfos;
@@ -309,7 +309,7 @@
// MediaStreamSignaling::OnAddRemoteVideoTrack.
void OnRemoteTrackSeen(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc,
+ uint32_t ssrc,
cricket::MediaType media_type);
// Triggered when a remote track has been removed from a remote session
@@ -350,7 +350,7 @@
// |local_streams_|
void OnLocalTrackSeen(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc,
+ uint32_t ssrc,
cricket::MediaType media_type);
// Triggered when a local track has been removed from a local session
@@ -361,14 +361,14 @@
// MediaStreamTrack in a MediaStream in |local_streams_|.
void OnLocalTrackRemoved(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc,
+ uint32_t ssrc,
cricket::MediaType media_type);
void UpdateLocalRtpDataChannels(const cricket::StreamParamsVec& streams);
void UpdateRemoteRtpDataChannels(const cricket::StreamParamsVec& streams);
void UpdateClosingDataChannels(
const std::vector<std::string>& active_channels, bool is_local_update);
- void CreateRemoteDataChannel(const std::string& label, uint32 remote_ssrc);
+ void CreateRemoteDataChannel(const std::string& label, uint32_t remote_ssrc);
const TrackInfo* FindTrackInfo(const TrackInfos& infos,
const std::string& stream_label,
diff --git a/talk/app/webrtc/mediastreamsignaling_unittest.cc b/talk/app/webrtc/mediastreamsignaling_unittest.cc
index 4f54df4..2333705 100644
--- a/talk/app/webrtc/mediastreamsignaling_unittest.cc
+++ b/talk/app/webrtc/mediastreamsignaling_unittest.cc
@@ -311,19 +311,19 @@
virtual void OnAddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
AddTrack(&local_audio_tracks_, stream, audio_track, ssrc);
}
virtual void OnAddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
AddTrack(&local_video_tracks_, stream, video_track, ssrc);
}
virtual void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
RemoveTrack(&local_audio_tracks_, stream, audio_track);
}
@@ -334,13 +334,13 @@
virtual void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
AddTrack(&remote_audio_tracks_, stream, audio_track, ssrc);
}
virtual void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
AddTrack(&remote_video_tracks_, stream, video_track, ssrc);
}
@@ -369,7 +369,7 @@
void VerifyRemoteAudioTrack(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc) {
+ uint32_t ssrc) {
VerifyTrack(remote_audio_tracks_, stream_label, track_id, ssrc);
}
@@ -377,14 +377,14 @@
void VerifyRemoteVideoTrack(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc) {
+ uint32_t ssrc) {
VerifyTrack(remote_video_tracks_, stream_label, track_id, ssrc);
}
size_t NumberOfLocalAudioTracks() { return local_audio_tracks_.size(); }
void VerifyLocalAudioTrack(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc) {
+ uint32_t ssrc) {
VerifyTrack(local_audio_tracks_, stream_label, track_id, ssrc);
}
@@ -392,7 +392,7 @@
void VerifyLocalVideoTrack(const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc) {
+ uint32_t ssrc) {
VerifyTrack(local_video_tracks_, stream_label, track_id, ssrc);
}
@@ -401,18 +401,18 @@
TrackInfo() {}
TrackInfo(const std::string& stream_label,
const std::string track_id,
- uint32 ssrc)
+ uint32_t ssrc)
: stream_label(stream_label), track_id(track_id), ssrc(ssrc) {}
std::string stream_label;
std::string track_id;
- uint32 ssrc;
+ uint32_t ssrc;
};
typedef std::vector<TrackInfo> TrackInfos;
void AddTrack(TrackInfos* track_infos,
MediaStreamInterface* stream,
MediaStreamTrackInterface* track,
- uint32 ssrc) {
+ uint32_t ssrc) {
(*track_infos).push_back(TrackInfo(stream->label(), track->id(), ssrc));
}
@@ -442,7 +442,7 @@
void VerifyTrack(const TrackInfos& track_infos,
const std::string& stream_label,
const std::string& track_id,
- uint32 ssrc) {
+ uint32_t ssrc) {
const TrackInfo* track_info = FindTrackInfo(track_infos,
stream_label,
track_id);
diff --git a/talk/app/webrtc/objc/RTCDataChannel.mm b/talk/app/webrtc/objc/RTCDataChannel.mm
index 8a9b6b6..fdb5c99a 100644
--- a/talk/app/webrtc/objc/RTCDataChannel.mm
+++ b/talk/app/webrtc/objc/RTCDataChannel.mm
@@ -43,7 +43,7 @@
[_channel.delegate channelDidChangeState:_channel];
}
- void OnBufferedAmountChange(uint64 previousAmount) override {
+ void OnBufferedAmountChange(uint64_t previousAmount) override {
RTCDataChannel* channel = _channel;
id<RTCDataChannelDelegate> delegate = channel.delegate;
if ([delegate
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.h b/talk/app/webrtc/objc/avfoundationvideocapturer.h
index dd32909..ded80f6 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.h
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.h
@@ -49,7 +49,7 @@
bool IsScreencast() const override {
return false;
}
- bool GetPreferredFourccs(std::vector<uint32>* fourccs) override {
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override {
fourccs->push_back(cricket::FOURCC_NV12);
return true;
}
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
index b4d7ee2..e1b0f88 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -415,13 +415,13 @@
uvPlaneAddress == yPlaneAddress + yPlaneHeight * yPlaneBytesPerRow);
// Stuff data into a cricket::CapturedFrame.
- int64 currentTime = rtc::TimeNanos();
+ int64_t currentTime = rtc::TimeNanos();
cricket::CapturedFrame frame;
frame.width = yPlaneWidth;
frame.height = yPlaneHeight;
frame.pixel_width = 1;
frame.pixel_height = 1;
- frame.fourcc = static_cast<uint32>(cricket::FOURCC_NV12);
+ frame.fourcc = static_cast<uint32_t>(cricket::FOURCC_NV12);
frame.time_stamp = currentTime;
frame.data = yPlaneAddress;
frame.data_size = frameSize;
diff --git a/talk/app/webrtc/peerconnection.cc b/talk/app/webrtc/peerconnection.cc
index bf9a80d..86902b0 100644
--- a/talk/app/webrtc/peerconnection.cc
+++ b/talk/app/webrtc/peerconnection.cc
@@ -862,13 +862,13 @@
void PeerConnection::OnAddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
receivers_.push_back(new AudioRtpReceiver(audio_track, ssrc, session_.get()));
}
void PeerConnection::OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
receivers_.push_back(new VideoRtpReceiver(video_track, ssrc, session_.get()));
}
@@ -902,14 +902,14 @@
void PeerConnection::OnAddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
senders_.push_back(new AudioRtpSender(audio_track, ssrc, session_.get()));
stats_->AddLocalAudioTrack(audio_track, ssrc);
}
void PeerConnection::OnAddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
senders_.push_back(new VideoRtpSender(video_track, ssrc, session_.get()));
}
@@ -917,7 +917,7 @@
// description.
void PeerConnection::OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
auto it = FindSenderForTrack(audio_track);
if (it == senders_.end()) {
LOG(LS_WARNING) << "RtpSender for track with id " << audio_track->id()
diff --git a/talk/app/webrtc/peerconnection.h b/talk/app/webrtc/peerconnection.h
index 8a87019..3d6ce1b 100644
--- a/talk/app/webrtc/peerconnection.h
+++ b/talk/app/webrtc/peerconnection.h
@@ -133,23 +133,23 @@
void OnAddDataChannel(DataChannelInterface* data_channel) override;
void OnAddRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) override;
+ uint32_t ssrc) override;
void OnAddRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) override;
+ uint32_t ssrc) override;
void OnRemoveRemoteAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track) override;
void OnRemoveRemoteVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) override;
void OnAddLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) override;
+ uint32_t ssrc) override;
void OnAddLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track,
- uint32 ssrc) override;
+ uint32_t ssrc) override;
void OnRemoveLocalAudioTrack(MediaStreamInterface* stream,
AudioTrackInterface* audio_track,
- uint32 ssrc) override;
+ uint32_t ssrc) override;
void OnRemoveLocalVideoTrack(MediaStreamInterface* stream,
VideoTrackInterface* video_track) override;
void OnRemoveLocalStream(MediaStreamInterface* stream) override;
diff --git a/talk/app/webrtc/peerconnectioninterface_unittest.cc b/talk/app/webrtc/peerconnectioninterface_unittest.cc
index 5e135df..8b7c9cf 100644
--- a/talk/app/webrtc/peerconnectioninterface_unittest.cc
+++ b/talk/app/webrtc/peerconnectioninterface_unittest.cc
@@ -58,7 +58,7 @@
static const char kTurnUsername[] = "user";
static const char kTurnPassword[] = "password";
static const char kTurnHostname[] = "turn.example.org";
-static const uint32 kTimeout = 10000U;
+static const uint32_t kTimeout = 10000U;
#define MAYBE_SKIP_TEST(feature) \
if (!(feature())) { \
diff --git a/talk/app/webrtc/remotevideocapturer.cc b/talk/app/webrtc/remotevideocapturer.cc
index 439e52c..b0c9f9f 100644
--- a/talk/app/webrtc/remotevideocapturer.cc
+++ b/talk/app/webrtc/remotevideocapturer.cc
@@ -65,7 +65,7 @@
return capture_state() == cricket::CS_RUNNING;
}
-bool RemoteVideoCapturer::GetPreferredFourccs(std::vector<uint32>* fourccs) {
+bool RemoteVideoCapturer::GetPreferredFourccs(std::vector<uint32_t>* fourccs) {
if (!fourccs)
return false;
fourccs->push_back(cricket::FOURCC_I420);
diff --git a/talk/app/webrtc/remotevideocapturer.h b/talk/app/webrtc/remotevideocapturer.h
index 1429ffb..b5298d9 100644
--- a/talk/app/webrtc/remotevideocapturer.h
+++ b/talk/app/webrtc/remotevideocapturer.h
@@ -51,7 +51,7 @@
const cricket::VideoFormat& capture_format) override;
void Stop() override;
bool IsRunning() override;
- bool GetPreferredFourccs(std::vector<uint32>* fourccs) override;
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override;
bool GetBestCaptureFormat(const cricket::VideoFormat& desired,
cricket::VideoFormat* best_format) override;
bool IsScreencast() const override;
diff --git a/talk/app/webrtc/remotevideocapturer_unittest.cc b/talk/app/webrtc/remotevideocapturer_unittest.cc
index 2ba4a1a..88277b6 100644
--- a/talk/app/webrtc/remotevideocapturer_unittest.cc
+++ b/talk/app/webrtc/remotevideocapturer_unittest.cc
@@ -104,7 +104,7 @@
TEST_F(RemoteVideoCapturerTest, GetPreferredFourccs) {
EXPECT_FALSE(capturer_.GetPreferredFourccs(NULL));
- std::vector<uint32> fourccs;
+ std::vector<uint32_t> fourccs;
EXPECT_TRUE(capturer_.GetPreferredFourccs(&fourccs));
EXPECT_EQ(1u, fourccs.size());
EXPECT_EQ(cricket::FOURCC_I420, fourccs.at(0));
diff --git a/talk/app/webrtc/rtpreceiver.cc b/talk/app/webrtc/rtpreceiver.cc
index b8eca30..faf3de3 100644
--- a/talk/app/webrtc/rtpreceiver.cc
+++ b/talk/app/webrtc/rtpreceiver.cc
@@ -32,7 +32,7 @@
namespace webrtc {
AudioRtpReceiver::AudioRtpReceiver(AudioTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
AudioProviderInterface* provider)
: id_(track->id()),
track_(track),
@@ -82,7 +82,7 @@
}
VideoRtpReceiver::VideoRtpReceiver(VideoTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
VideoProviderInterface* provider)
: id_(track->id()), track_(track), ssrc_(ssrc), provider_(provider) {
provider_->SetVideoPlayout(ssrc_, true, track_->GetSource()->FrameInput());
diff --git a/talk/app/webrtc/rtpreceiver.h b/talk/app/webrtc/rtpreceiver.h
index f5bcb2e..a93ccbc 100644
--- a/talk/app/webrtc/rtpreceiver.h
+++ b/talk/app/webrtc/rtpreceiver.h
@@ -45,7 +45,7 @@
public rtc::RefCountedObject<RtpReceiverInterface> {
public:
AudioRtpReceiver(AudioTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
AudioProviderInterface* provider);
virtual ~AudioRtpReceiver();
@@ -70,7 +70,7 @@
std::string id_;
rtc::scoped_refptr<AudioTrackInterface> track_;
- uint32 ssrc_;
+ uint32_t ssrc_;
AudioProviderInterface* provider_;
bool cached_track_enabled_;
};
@@ -78,7 +78,7 @@
class VideoRtpReceiver : public rtc::RefCountedObject<RtpReceiverInterface> {
public:
VideoRtpReceiver(VideoTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
VideoProviderInterface* provider);
virtual ~VideoRtpReceiver();
@@ -95,7 +95,7 @@
private:
std::string id_;
rtc::scoped_refptr<VideoTrackInterface> track_;
- uint32 ssrc_;
+ uint32_t ssrc_;
VideoProviderInterface* provider_;
};
diff --git a/talk/app/webrtc/rtpsender.cc b/talk/app/webrtc/rtpsender.cc
index 28ba073..3a78f45 100644
--- a/talk/app/webrtc/rtpsender.cc
+++ b/talk/app/webrtc/rtpsender.cc
@@ -59,7 +59,7 @@
}
AudioRtpSender::AudioRtpSender(AudioTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
AudioProviderInterface* provider)
: id_(track->id()),
track_(track),
@@ -136,7 +136,7 @@
}
VideoRtpSender::VideoRtpSender(VideoTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
VideoProviderInterface* provider)
: id_(track->id()),
track_(track),
diff --git a/talk/app/webrtc/rtpsender.h b/talk/app/webrtc/rtpsender.h
index a0eae5d..3741909 100644
--- a/talk/app/webrtc/rtpsender.h
+++ b/talk/app/webrtc/rtpsender.h
@@ -71,7 +71,7 @@
public rtc::RefCountedObject<RtpSenderInterface> {
public:
AudioRtpSender(AudioTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
AudioProviderInterface* provider);
virtual ~AudioRtpSender();
@@ -94,7 +94,7 @@
std::string id_;
rtc::scoped_refptr<AudioTrackInterface> track_;
- uint32 ssrc_;
+ uint32_t ssrc_;
AudioProviderInterface* provider_;
bool cached_track_enabled_;
@@ -107,7 +107,7 @@
public rtc::RefCountedObject<RtpSenderInterface> {
public:
VideoRtpSender(VideoTrackInterface* track,
- uint32 ssrc,
+ uint32_t ssrc,
VideoProviderInterface* provider);
virtual ~VideoRtpSender();
@@ -130,7 +130,7 @@
std::string id_;
rtc::scoped_refptr<VideoTrackInterface> track_;
- uint32 ssrc_;
+ uint32_t ssrc_;
VideoProviderInterface* provider_;
bool cached_track_enabled_;
};
diff --git a/talk/app/webrtc/rtpsenderreceiver_unittest.cc b/talk/app/webrtc/rtpsenderreceiver_unittest.cc
index 973b854..b69221b 100644
--- a/talk/app/webrtc/rtpsenderreceiver_unittest.cc
+++ b/talk/app/webrtc/rtpsenderreceiver_unittest.cc
@@ -47,8 +47,8 @@
static const char kStreamLabel1[] = "local_stream_1";
static const char kVideoTrackId[] = "video_1";
static const char kAudioTrackId[] = "audio_1";
-static const uint32 kVideoSsrc = 98;
-static const uint32 kAudioSsrc = 99;
+static const uint32_t kVideoSsrc = 98;
+static const uint32_t kAudioSsrc = 99;
namespace webrtc {
@@ -57,15 +57,15 @@
public:
virtual ~MockAudioProvider() {}
MOCK_METHOD3(SetAudioPlayout,
- void(uint32 ssrc,
+ void(uint32_t ssrc,
bool enable,
cricket::AudioRenderer* renderer));
MOCK_METHOD4(SetAudioSend,
- void(uint32 ssrc,
+ void(uint32_t ssrc,
bool enable,
const cricket::AudioOptions& options,
cricket::AudioRenderer* renderer));
- MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32 ssrc, double volume));
+ MOCK_METHOD2(SetAudioPlayoutVolume, void(uint32_t ssrc, double volume));
};
// Helper class to test RtpSender/RtpReceiver.
@@ -73,13 +73,13 @@
public:
virtual ~MockVideoProvider() {}
MOCK_METHOD2(SetCaptureDevice,
- bool(uint32 ssrc, cricket::VideoCapturer* camera));
+ bool(uint32_t ssrc, cricket::VideoCapturer* camera));
MOCK_METHOD3(SetVideoPlayout,
- void(uint32 ssrc,
+ void(uint32_t ssrc,
bool enable,
cricket::VideoRenderer* renderer));
MOCK_METHOD3(SetVideoSend,
- void(uint32 ssrc,
+ void(uint32_t ssrc,
bool enable,
const cricket::VideoOptions* options));
};
diff --git a/talk/app/webrtc/sctputils.cc b/talk/app/webrtc/sctputils.cc
index 21174c3..a643837 100644
--- a/talk/app/webrtc/sctputils.cc
+++ b/talk/app/webrtc/sctputils.cc
@@ -36,8 +36,8 @@
// Format defined at
// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-01#section
-static const uint8 DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
-static const uint8 DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE = 0x02;
+static const uint8_t DATA_CHANNEL_OPEN_MESSAGE_TYPE = 0x03;
+static const uint8_t DATA_CHANNEL_OPEN_ACK_MESSAGE_TYPE = 0x02;
enum DataChannelOpenMessageChannelType {
DCOMCT_ORDERED_RELIABLE = 0x00,
@@ -55,7 +55,7 @@
// http://tools.ietf.org/html/draft-jesup-rtcweb-data-protocol-04
rtc::ByteBuffer buffer(payload);
- uint8 message_type;
+ uint8_t message_type;
if (!buffer.ReadUInt8(&message_type)) {
LOG(LS_WARNING) << "Could not read OPEN message type.";
return false;
@@ -66,28 +66,28 @@
return false;
}
- uint8 channel_type;
+ uint8_t channel_type;
if (!buffer.ReadUInt8(&channel_type)) {
LOG(LS_WARNING) << "Could not read OPEN message channel type.";
return false;
}
- uint16 priority;
+ uint16_t priority;
if (!buffer.ReadUInt16(&priority)) {
LOG(LS_WARNING) << "Could not read OPEN message reliabilility prioirty.";
return false;
}
- uint32 reliability_param;
+ uint32_t reliability_param;
if (!buffer.ReadUInt32(&reliability_param)) {
LOG(LS_WARNING) << "Could not read OPEN message reliabilility param.";
return false;
}
- uint16 label_length;
+ uint16_t label_length;
if (!buffer.ReadUInt16(&label_length)) {
LOG(LS_WARNING) << "Could not read OPEN message label length.";
return false;
}
- uint16 protocol_length;
+ uint16_t protocol_length;
if (!buffer.ReadUInt16(&protocol_length)) {
LOG(LS_WARNING) << "Could not read OPEN message protocol length.";
return false;
@@ -126,7 +126,7 @@
bool ParseDataChannelOpenAckMessage(const rtc::Buffer& payload) {
rtc::ByteBuffer buffer(payload);
- uint8 message_type;
+ uint8_t message_type;
if (!buffer.ReadUInt8(&message_type)) {
LOG(LS_WARNING) << "Could not read OPEN_ACK message type.";
return false;
@@ -144,9 +144,9 @@
rtc::Buffer* payload) {
// Format defined at
// http://tools.ietf.org/html/draft-ietf-rtcweb-data-protocol-00#section-6.1
- uint8 channel_type = 0;
- uint32 reliability_param = 0;
- uint16 priority = 0;
+ uint8_t channel_type = 0;
+ uint32_t reliability_param = 0;
+ uint16_t priority = 0;
if (config.ordered) {
if (config.maxRetransmits > -1) {
channel_type = DCOMCT_ORDERED_PARTIAL_RTXS;
@@ -176,8 +176,8 @@
buffer.WriteUInt8(channel_type);
buffer.WriteUInt16(priority);
buffer.WriteUInt32(reliability_param);
- buffer.WriteUInt16(static_cast<uint16>(label.length()));
- buffer.WriteUInt16(static_cast<uint16>(config.protocol.length()));
+ buffer.WriteUInt16(static_cast<uint16_t>(label.length()));
+ buffer.WriteUInt16(static_cast<uint16_t>(config.protocol.length()));
buffer.WriteString(label);
buffer.WriteString(config.protocol);
payload->SetData(buffer.Data(), buffer.Length());
diff --git a/talk/app/webrtc/sctputils_unittest.cc b/talk/app/webrtc/sctputils_unittest.cc
index 164f6dd..e5f323a 100644
--- a/talk/app/webrtc/sctputils_unittest.cc
+++ b/talk/app/webrtc/sctputils_unittest.cc
@@ -34,12 +34,12 @@
void VerifyOpenMessageFormat(const rtc::Buffer& packet,
const std::string& label,
const webrtc::DataChannelInit& config) {
- uint8 message_type;
- uint8 channel_type;
- uint32 reliability;
- uint16 priority;
- uint16 label_length;
- uint16 protocol_length;
+ uint8_t message_type;
+ uint8_t channel_type;
+ uint32_t reliability;
+ uint16_t priority;
+ uint16_t label_length;
+ uint16_t protocol_length;
rtc::ByteBuffer buffer(packet.data(), packet.length());
ASSERT_TRUE(buffer.ReadUInt8(&message_type));
@@ -152,7 +152,7 @@
rtc::Buffer packet;
webrtc::WriteDataChannelOpenAckMessage(&packet);
- uint8 message_type;
+ uint8_t message_type;
rtc::ByteBuffer buffer(packet.data(), packet.length());
ASSERT_TRUE(buffer.ReadUInt8(&message_type));
EXPECT_EQ(0x02, message_type);
diff --git a/talk/app/webrtc/statscollector.cc b/talk/app/webrtc/statscollector.cc
index 70cc44d..5b527ec 100644
--- a/talk/app/webrtc/statscollector.cc
+++ b/talk/app/webrtc/statscollector.cc
@@ -66,7 +66,7 @@
typedef TypeForAdd<bool> BoolForAdd;
typedef TypeForAdd<float> FloatForAdd;
-typedef TypeForAdd<int64> Int64ForAdd;
+typedef TypeForAdd<int64_t> Int64ForAdd;
typedef TypeForAdd<int> IntForAdd;
StatsReport::Id GetTransportIdFromProxy(const cricket::ProxyTransportMap& map,
@@ -301,7 +301,7 @@
StatsCollector* collector,
StatsReport::Direction direction) {
for (const auto& d : data) {
- uint32 ssrc = d.ssrc();
+ uint32_t ssrc = d.ssrc();
// Each track can have stats for both local and remote objects.
// TODO(hta): Handle the case of multiple SSRCs per object.
StatsReport* report = collector->PrepareReport(true, ssrc, transport_id,
@@ -383,7 +383,7 @@
}
void StatsCollector::AddLocalAudioTrack(AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
RTC_DCHECK(session_->signaling_thread()->IsCurrent());
RTC_DCHECK(audio_track != NULL);
#if (!defined(NDEBUG) || defined(DCHECK_ALWAYS_ON))
@@ -405,7 +405,7 @@
}
void StatsCollector::RemoveLocalAudioTrack(AudioTrackInterface* audio_track,
- uint32 ssrc) {
+ uint32_t ssrc) {
RTC_DCHECK(audio_track != NULL);
local_audio_tracks_.erase(std::remove_if(local_audio_tracks_.begin(),
local_audio_tracks_.end(),
@@ -482,16 +482,15 @@
}
}
-StatsReport* StatsCollector::PrepareReport(
- bool local,
- uint32 ssrc,
- const StatsReport::Id& transport_id,
- StatsReport::Direction direction) {
+StatsReport* StatsCollector::PrepareReport(bool local,
+ uint32_t ssrc,
+ const StatsReport::Id& transport_id,
+ StatsReport::Direction direction) {
RTC_DCHECK(session_->signaling_thread()->IsCurrent());
StatsReport::Id id(StatsReport::NewIdWithDirection(
- local ? StatsReport::kStatsReportTypeSsrc :
- StatsReport::kStatsReportTypeRemoteSsrc,
- rtc::ToString<uint32>(ssrc), direction));
+ local ? StatsReport::kStatsReportTypeSsrc
+ : StatsReport::kStatsReportTypeRemoteSsrc,
+ rtc::ToString<uint32_t>(ssrc), direction));
StatsReport* report = reports_.Find(id);
// Use the ID of the track that is currently mapped to the SSRC, if any.
@@ -861,10 +860,10 @@
// Loop through the existing local audio tracks.
for (const auto& it : local_audio_tracks_) {
AudioTrackInterface* track = it.first;
- uint32 ssrc = it.second;
- StatsReport* report = GetReport(StatsReport::kStatsReportTypeSsrc,
- rtc::ToString<uint32>(ssrc),
- StatsReport::kSend);
+ uint32_t ssrc = it.second;
+ StatsReport* report =
+ GetReport(StatsReport::kStatsReportTypeSsrc,
+ rtc::ToString<uint32_t>(ssrc), StatsReport::kSend);
if (report == NULL) {
// This can happen if a local audio track is added to a stream on the
// fly and the report has not been set up yet. Do nothing in this case.
@@ -905,7 +904,8 @@
stats.echo_delay_std_ms);
}
-bool StatsCollector::GetTrackIdBySsrc(uint32 ssrc, std::string* track_id,
+bool StatsCollector::GetTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id,
StatsReport::Direction direction) {
RTC_DCHECK(session_->signaling_thread()->IsCurrent());
if (direction == StatsReport::kSend) {
diff --git a/talk/app/webrtc/statscollector.h b/talk/app/webrtc/statscollector.h
index 99130a3..add26c6 100644
--- a/talk/app/webrtc/statscollector.h
+++ b/talk/app/webrtc/statscollector.h
@@ -67,11 +67,11 @@
void AddStream(MediaStreamInterface* stream);
// Adds a local audio track that is used for getting some voice statistics.
- void AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32 ssrc);
+ void AddLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc);
// Removes a local audio tracks that is used for getting some voice
// statistics.
- void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, uint32 ssrc);
+ void RemoveLocalAudioTrack(AudioTrackInterface* audio_track, uint32_t ssrc);
// Gather statistics from the session and store them for future use.
void UpdateStats(PeerConnectionInterface::StatsOutputLevel level);
@@ -89,8 +89,10 @@
// Prepare a local or remote SSRC report for the given ssrc. Used internally
// in the ExtractStatsFromList template.
- StatsReport* PrepareReport(bool local, uint32 ssrc,
- const StatsReport::Id& transport_id, StatsReport::Direction direction);
+ StatsReport* PrepareReport(bool local,
+ uint32_t ssrc,
+ const StatsReport::Id& transport_id,
+ StatsReport::Direction direction);
// Method used by the unittest to force a update of stats since UpdateStats()
// that occur less than kMinGatherStatsPeriod number of ms apart will be
@@ -139,7 +141,8 @@
// Helper method to get the id for the track identified by ssrc.
// |direction| tells if the track is for sending or receiving.
- bool GetTrackIdBySsrc(uint32 ssrc, std::string* track_id,
+ bool GetTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id,
StatsReport::Direction direction);
// Helper method to update the timestamp of track records.
@@ -155,7 +158,7 @@
// TODO(tommi): We appear to be holding on to raw pointers to reference
// counted objects? We should be using scoped_refptr here.
- typedef std::vector<std::pair<AudioTrackInterface*, uint32> >
+ typedef std::vector<std::pair<AudioTrackInterface*, uint32_t> >
LocalAudioTrackVector;
LocalAudioTrackVector local_audio_tracks_;
};
diff --git a/talk/app/webrtc/statscollector_unittest.cc b/talk/app/webrtc/statscollector_unittest.cc
index 34a1c90..21f9df8 100644
--- a/talk/app/webrtc/statscollector_unittest.cc
+++ b/talk/app/webrtc/statscollector_unittest.cc
@@ -78,7 +78,7 @@
// Constant names for track identification.
const char kLocalTrackId[] = "local_track_id";
const char kRemoteTrackId[] = "remote_track_id";
-const uint32 kSsrcOfTrack = 1234;
+const uint32_t kSsrcOfTrack = 1234;
class MockWebRtcSession : public webrtc::WebRtcSession {
public:
@@ -91,8 +91,8 @@
MOCK_CONST_METHOD0(mediastream_signaling, const MediaStreamSignaling*());
// Libjingle uses "local" for a outgoing track, and "remote" for a incoming
// track.
- MOCK_METHOD2(GetLocalTrackIdBySsrc, bool(uint32, std::string*));
- MOCK_METHOD2(GetRemoteTrackIdBySsrc, bool(uint32, std::string*));
+ MOCK_METHOD2(GetLocalTrackIdBySsrc, bool(uint32_t, std::string*));
+ MOCK_METHOD2(GetRemoteTrackIdBySsrc, bool(uint32_t, std::string*));
MOCK_METHOD1(GetTransportStats, bool(cricket::SessionStats*));
MOCK_METHOD2(GetLocalCertificate,
bool(const std::string& transport_name,
@@ -301,7 +301,7 @@
EXPECT_EQ(rtc::ToString<int>(info.audio_level), value_in_report);
EXPECT_TRUE(GetValue(
report, StatsReport::kStatsValueNameBytesReceived, &value_in_report));
- EXPECT_EQ(rtc::ToString<int64>(info.bytes_rcvd), value_in_report);
+ EXPECT_EQ(rtc::ToString<int64_t>(info.bytes_rcvd), value_in_report);
EXPECT_TRUE(GetValue(
report, StatsReport::kStatsValueNameJitterReceived, &value_in_report));
EXPECT_EQ(rtc::ToString<int>(info.jitter_ms), value_in_report);
@@ -367,7 +367,7 @@
EXPECT_EQ(sinfo.codec_name, value_in_report);
EXPECT_TRUE(GetValue(
report, StatsReport::kStatsValueNameBytesSent, &value_in_report));
- EXPECT_EQ(rtc::ToString<int64>(sinfo.bytes_sent), value_in_report);
+ EXPECT_EQ(rtc::ToString<int64_t>(sinfo.bytes_sent), value_in_report);
EXPECT_TRUE(GetValue(
report, StatsReport::kStatsValueNamePacketsSent, &value_in_report));
EXPECT_EQ(rtc::ToString<int>(sinfo.packets_sent), value_in_report);
@@ -610,7 +610,7 @@
EXPECT_EQ(audio_track->id(), track_id);
std::string ssrc_id = ExtractSsrcStatsValue(
*reports, StatsReport::kStatsValueNameSsrc);
- EXPECT_EQ(rtc::ToString<uint32>(kSsrcOfTrack), ssrc_id);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
// Verifies the values in the track report.
if (voice_sender_info) {
@@ -633,7 +633,7 @@
EXPECT_EQ(audio_track->id(), track_id);
ssrc_id = ExtractSsrcStatsValue(track_reports,
StatsReport::kStatsValueNameSsrc);
- EXPECT_EQ(rtc::ToString<uint32>(kSsrcOfTrack), ssrc_id);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
if (voice_sender_info)
VerifyVoiceSenderInfoReport(track_report, *voice_sender_info);
if (voice_receiver_info)
@@ -775,9 +775,8 @@
EXPECT_EQ(label, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
reports,
StatsReport::kStatsValueNameLabel));
- EXPECT_EQ(rtc::ToString<int64>(id),
- ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
- reports,
+ EXPECT_EQ(rtc::ToString<int64_t>(id),
+ ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel, reports,
StatsReport::kStatsValueNameDataChannelId));
EXPECT_EQ(state, ExtractStatsValue(StatsReport::kStatsReportTypeDataChannel,
reports,
@@ -810,7 +809,7 @@
cricket::VideoSenderInfo video_sender_info;
cricket::VideoMediaInfo stats_read;
// The number of bytes must be larger than 0xFFFFFFFF for this test.
- const int64 kBytesSent = 12345678901234LL;
+ const int64_t kBytesSent = 12345678901234LL;
const std::string kBytesSentString("12345678901234");
AddOutgoingVideoTrackStats();
@@ -858,7 +857,7 @@
cricket::VideoMediaInfo stats_read;
// Set up an SSRC just to test that we get both kinds of stats back: SSRC and
// BWE.
- const int64 kBytesSent = 12345678901234LL;
+ const int64_t kBytesSent = 12345678901234LL;
const std::string kBytesSentString("12345678901234");
AddOutgoingVideoTrackStats();
@@ -973,7 +972,7 @@
// Constructs an ssrc stats update.
cricket::VideoSenderInfo video_sender_info;
cricket::VideoMediaInfo stats_read;
- const int64 kBytesSent = 12345678901234LL;
+ const int64_t kBytesSent = 12345678901234LL;
// Construct a stats value to read.
video_sender_info.add_ssrc(1234);
@@ -1009,7 +1008,7 @@
std::string ssrc_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameSsrc);
- EXPECT_EQ(rtc::ToString<uint32>(kSsrcOfTrack), ssrc_id);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
std::string track_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameTrackId);
@@ -1037,7 +1036,7 @@
// Constructs an ssrc stats update.
cricket::VideoSenderInfo video_sender_info;
cricket::VideoMediaInfo stats_read;
- const int64 kBytesSent = 12345678901234LL;
+ const int64_t kBytesSent = 12345678901234LL;
// Construct a stats value to read.
video_sender_info.add_ssrc(1234);
@@ -1179,7 +1178,7 @@
// Constructs an ssrc stats update.
cricket::VideoReceiverInfo video_receiver_info;
cricket::VideoMediaInfo stats_read;
- const int64 kNumOfPacketsConcealed = 54321;
+ const int64_t kNumOfPacketsConcealed = 54321;
// Construct a stats value to read.
video_receiver_info.add_ssrc(1234);
@@ -1205,7 +1204,7 @@
std::string ssrc_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameSsrc);
- EXPECT_EQ(rtc::ToString<uint32>(kSsrcOfTrack), ssrc_id);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
std::string track_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameTrackId);
@@ -1227,7 +1226,7 @@
rtc::SocketAddress local_address(local_ip, local_port);
rtc::SocketAddress remote_address(remote_ip, remote_port);
rtc::AdapterType network_type = rtc::ADAPTER_TYPE_ETHERNET;
- uint32 priority = 1000;
+ uint32_t priority = 1000;
cricket::Candidate c;
ASSERT(c.id().length() > 0);
@@ -1590,7 +1589,7 @@
EXPECT_EQ(kLocalTrackId, track_id);
std::string ssrc_id = ExtractSsrcStatsValue(
reports, StatsReport::kStatsValueNameSsrc);
- EXPECT_EQ(rtc::ToString<uint32>(kSsrcOfTrack), ssrc_id);
+ EXPECT_EQ(rtc::ToString<uint32_t>(kSsrcOfTrack), ssrc_id);
// Verifies the values in the track report, no value will be changed by the
// AudioTrackInterface::GetSignalValue() and
diff --git a/talk/app/webrtc/statstypes.cc b/talk/app/webrtc/statstypes.cc
index 51ec7fd..e45833c 100644
--- a/talk/app/webrtc/statstypes.cc
+++ b/talk/app/webrtc/statstypes.cc
@@ -229,7 +229,7 @@
return other.type_ == type_;
}
-StatsReport::Value::Value(StatsValueName name, int64 value, Type int_type)
+StatsReport::Value::Value(StatsValueName name, int64_t value, Type int_type)
: name(name), type_(int_type) {
RTC_DCHECK(type_ == kInt || type_ == kInt64);
type_ == kInt ? value_.int_ = static_cast<int>(value) : value_.int64_ = value;
@@ -331,7 +331,7 @@
return value == value_.static_string_;
}
-bool StatsReport::Value::operator==(int64 value) const {
+bool StatsReport::Value::operator==(int64_t value) const {
return type_ == kInt ? value_.int_ == static_cast<int>(value) :
(type_ == kInt64 ? value_.int64_ == value : false);
}
@@ -353,7 +353,7 @@
return value_.int_;
}
-int64 StatsReport::Value::int64_val() const {
+int64_t StatsReport::Value::int64_val() const {
RTC_DCHECK(type_ == kInt64);
return value_.int64_;
}
@@ -682,7 +682,7 @@
values_[name] = ValuePtr(new Value(name, value));
}
-void StatsReport::AddInt64(StatsReport::StatsValueName name, int64 value) {
+void StatsReport::AddInt64(StatsReport::StatsValueName name, int64_t value) {
const Value* found = FindValue(name);
if (!found || !(*found == value))
values_[name] = ValuePtr(new Value(name, value, Value::kInt64));
@@ -690,7 +690,7 @@
void StatsReport::AddInt(StatsReport::StatsValueName name, int value) {
const Value* found = FindValue(name);
- if (!found || !(*found == static_cast<int64>(value)))
+ if (!found || !(*found == static_cast<int64_t>(value)))
values_[name] = ValuePtr(new Value(name, value, Value::kInt));
}
diff --git a/talk/app/webrtc/statstypes.h b/talk/app/webrtc/statstypes.h
index 33b2fa7..7fa9f32 100644
--- a/talk/app/webrtc/statstypes.h
+++ b/talk/app/webrtc/statstypes.h
@@ -250,16 +250,16 @@
struct Value {
enum Type {
- kInt, // int.
- kInt64, // int64.
- kFloat, // float.
- kString, // std::string
+ kInt, // int.
+ kInt64, // int64_t.
+ kFloat, // float.
+ kString, // std::string
kStaticString, // const char*.
- kBool, // bool.
- kId, // Id.
+ kBool, // bool.
+ kId, // Id.
};
- Value(StatsValueName name, int64 value, Type int_type);
+ Value(StatsValueName name, int64_t value, Type int_type);
Value(StatsValueName name, float f);
Value(StatsValueName name, const std::string& value);
Value(StatsValueName name, const char* value);
@@ -281,7 +281,7 @@
// kString and kStaticString too.
bool operator==(const std::string& value) const;
bool operator==(const char* value) const;
- bool operator==(int64 value) const;
+ bool operator==(int64_t value) const;
bool operator==(bool value) const;
bool operator==(float value) const;
bool operator==(const Id& value) const;
@@ -289,7 +289,7 @@
// Getters that allow getting the native value directly.
// The caller must know the type beforehand or else hit a check.
int int_val() const;
- int64 int64_val() const;
+ int64_t int64_val() const;
float float_val() const;
const char* static_string_val() const;
const std::string& string_val() const;
@@ -312,7 +312,7 @@
// TODO(tommi): Use C++ 11 union and make value_ const.
union InternalType {
int int_;
- int64 int64_;
+ int64_t int64_;
float float_;
bool bool_;
std::string* string_;
@@ -355,7 +355,7 @@
void AddString(StatsValueName name, const std::string& value);
void AddString(StatsValueName name, const char* value);
- void AddInt64(StatsValueName name, int64 value);
+ void AddInt64(StatsValueName name, int64_t value);
void AddInt(StatsValueName name, int value);
void AddFloat(StatsValueName name, float value);
void AddBoolean(StatsValueName name, bool value);
diff --git a/talk/app/webrtc/test/fakeaudiocapturemodule.cc b/talk/app/webrtc/test/fakeaudiocapturemodule.cc
index 32f9c84..3564d28 100644
--- a/talk/app/webrtc/test/fakeaudiocapturemodule.cc
+++ b/talk/app/webrtc/test/fakeaudiocapturemodule.cc
@@ -40,7 +40,7 @@
// Same value as src/modules/audio_device/main/source/audio_device_config.h in
// https://code.google.com/p/webrtc/
-static const uint32 kAdmMaxIdleTimeProcess = 1000;
+static const uint32_t kAdmMaxIdleTimeProcess = 1000;
// Constants here are derived by running VoE using a real ADM.
// The constants correspond to 10ms of mono audio at 44kHz.
@@ -90,12 +90,12 @@
}
int64_t FakeAudioCaptureModule::TimeUntilNextProcess() {
- const uint32 current_time = rtc::Time();
+ const uint32_t current_time = rtc::Time();
if (current_time < last_process_time_ms_) {
// TODO: wraparound could be handled more gracefully.
return 0;
}
- const uint32 elapsed_time = current_time - last_process_time_ms_;
+ const uint32_t elapsed_time = current_time - last_process_time_ms_;
if (kAdmMaxIdleTimeProcess < elapsed_time) {
return 0;
}
@@ -684,9 +684,9 @@
}
next_frame_time_ += kTimePerFrameMs;
- const uint32 current_time = rtc::Time();
- const uint32 wait_time = (next_frame_time_ > current_time) ?
- next_frame_time_ - current_time : 0;
+ const uint32_t current_time = rtc::Time();
+ const uint32_t wait_time =
+ (next_frame_time_ > current_time) ? next_frame_time_ - current_time : 0;
process_thread_->PostDelayed(wait_time, this, MSG_RUN_PROCESS);
}
diff --git a/talk/app/webrtc/test/fakeaudiocapturemodule.h b/talk/app/webrtc/test/fakeaudiocapturemodule.h
index 65a03c8..4284b9e 100644
--- a/talk/app/webrtc/test/fakeaudiocapturemodule.h
+++ b/talk/app/webrtc/test/fakeaudiocapturemodule.h
@@ -53,7 +53,7 @@
: public webrtc::AudioDeviceModule,
public rtc::MessageHandler {
public:
- typedef uint16 Sample;
+ typedef uint16_t Sample;
// The value for the following constants have been derived by running VoE
// using a real ADM. The constants correspond to 10ms of mono audio at 44kHz.
@@ -242,7 +242,7 @@
// The time in milliseconds when Process() was last called or 0 if no call
// has been made.
- uint32 last_process_time_ms_;
+ uint32_t last_process_time_ms_;
// Callback for playout and recording.
webrtc::AudioTransport* audio_callback_;
@@ -262,7 +262,7 @@
// wall clock time the next frame should be generated and received. started_
// ensures that next_frame_time_ can be initialized properly on first call.
bool started_;
- uint32 next_frame_time_;
+ uint32_t next_frame_time_;
rtc::scoped_ptr<rtc::Thread> process_thread_;
diff --git a/talk/app/webrtc/test/fakedatachannelprovider.h b/talk/app/webrtc/test/fakedatachannelprovider.h
index eb86873..9a8352e 100644
--- a/talk/app/webrtc/test/fakedatachannelprovider.h
+++ b/talk/app/webrtc/test/fakedatachannelprovider.h
@@ -137,11 +137,11 @@
return connected_channels_.find(data_channel) != connected_channels_.end();
}
- bool IsSendStreamAdded(uint32 stream) const {
+ bool IsSendStreamAdded(uint32_t stream) const {
return send_ssrcs_.find(stream) != send_ssrcs_.end();
}
- bool IsRecvStreamAdded(uint32 stream) const {
+ bool IsRecvStreamAdded(uint32_t stream) const {
return recv_ssrcs_.find(stream) != recv_ssrcs_.end();
}
@@ -152,6 +152,6 @@
bool ready_to_send_;
bool transport_error_;
std::set<webrtc::DataChannel*> connected_channels_;
- std::set<uint32> send_ssrcs_;
- std::set<uint32> recv_ssrcs_;
+ std::set<uint32_t> send_ssrcs_;
+ std::set<uint32_t> recv_ssrcs_;
};
diff --git a/talk/app/webrtc/test/fakemediastreamsignaling.h b/talk/app/webrtc/test/fakemediastreamsignaling.h
index c98a24d..562c4ad 100644
--- a/talk/app/webrtc/test/fakemediastreamsignaling.h
+++ b/talk/app/webrtc/test/fakemediastreamsignaling.h
@@ -90,16 +90,16 @@
virtual void OnAddDataChannel(webrtc::DataChannelInterface* data_channel) {}
virtual void OnAddLocalAudioTrack(webrtc::MediaStreamInterface* stream,
webrtc::AudioTrackInterface* audio_track,
- uint32 ssrc) {}
+ uint32_t ssrc) {}
virtual void OnAddLocalVideoTrack(webrtc::MediaStreamInterface* stream,
webrtc::VideoTrackInterface* video_track,
- uint32 ssrc) {}
+ uint32_t ssrc) {}
virtual void OnAddRemoteAudioTrack(webrtc::MediaStreamInterface* stream,
webrtc::AudioTrackInterface* audio_track,
- uint32 ssrc) {}
+ uint32_t ssrc) {}
virtual void OnAddRemoteVideoTrack(webrtc::MediaStreamInterface* stream,
webrtc::VideoTrackInterface* video_track,
- uint32 ssrc) {}
+ uint32_t ssrc) {}
virtual void OnRemoveRemoteAudioTrack(
webrtc::MediaStreamInterface* stream,
webrtc::AudioTrackInterface* audio_track) {}
@@ -108,7 +108,7 @@
webrtc::VideoTrackInterface* video_track) {}
virtual void OnRemoveLocalAudioTrack(webrtc::MediaStreamInterface* stream,
webrtc::AudioTrackInterface* audio_track,
- uint32 ssrc) {}
+ uint32_t ssrc) {}
virtual void OnRemoveLocalVideoTrack(
webrtc::MediaStreamInterface* stream,
webrtc::VideoTrackInterface* video_track) {}
diff --git a/talk/app/webrtc/test/mockpeerconnectionobservers.h b/talk/app/webrtc/test/mockpeerconnectionobservers.h
index d2697b4..f1bdbee 100644
--- a/talk/app/webrtc/test/mockpeerconnectionobservers.h
+++ b/talk/app/webrtc/test/mockpeerconnectionobservers.h
@@ -98,7 +98,7 @@
channel_->UnregisterObserver();
}
- void OnBufferedAmountChange(uint64 previous_amount) override {}
+ void OnBufferedAmountChange(uint64_t previous_amount) override {}
void OnStateChange() override { state_ = channel_->state(); }
void OnMessage(const DataBuffer& buffer) override {
diff --git a/talk/app/webrtc/videosource.cc b/talk/app/webrtc/videosource.cc
index af5f628..b33f5f9 100644
--- a/talk/app/webrtc/videosource.cc
+++ b/talk/app/webrtc/videosource.cc
@@ -250,10 +250,10 @@
std::vector<cricket::VideoFormat>::const_iterator it = formats.begin();
std::vector<cricket::VideoFormat>::const_iterator best_it = formats.begin();
int best_diff_area = std::abs(default_area - it->width * it->height);
- int64 best_diff_interval = kDefaultFormat.interval;
+ int64_t best_diff_interval = kDefaultFormat.interval;
for (; it != formats.end(); ++it) {
int diff_area = std::abs(default_area - it->width * it->height);
- int64 diff_interval = std::abs(kDefaultFormat.interval - it->interval);
+ int64_t diff_interval = std::abs(kDefaultFormat.interval - it->interval);
if (diff_area < best_diff_area ||
(diff_area == best_diff_area && diff_interval < best_diff_interval)) {
best_diff_area = diff_area;
diff --git a/talk/app/webrtc/webrtcsdp.cc b/talk/app/webrtc/webrtcsdp.cc
index 28d4e9e..9618335 100644
--- a/talk/app/webrtc/webrtcsdp.cc
+++ b/talk/app/webrtc/webrtcsdp.cc
@@ -231,7 +231,7 @@
// Create random string (which will be used as track label later)?
msid_appdata(rtc::CreateRandomString(8)) {
}
- uint32 ssrc_id;
+ uint32_t ssrc_id;
std::string cname;
std::string msid_identifier;
std::string msid_appdata;
@@ -525,8 +525,10 @@
return (line.compare(kLinePrefixLength, attribute.size(), attribute) == 0);
}
-static bool AddSsrcLine(uint32 ssrc_id, const std::string& attribute,
- const std::string& value, std::string* message) {
+static bool AddSsrcLine(uint32_t ssrc_id,
+ const std::string& attribute,
+ const std::string& value,
+ std::string* message) {
// RFC 5576
// a=ssrc:<ssrc-id> <attribute>:<value>
std::ostringstream os;
@@ -1004,7 +1006,7 @@
return false;
}
const std::string& transport = fields[2];
- uint32 priority = 0;
+ uint32_t priority = 0;
if (!GetValueFromString(first_line, fields[3], &priority, error)) {
return false;
}
@@ -1078,7 +1080,7 @@
// kept for backwards compatibility.
std::string username;
std::string password;
- uint32 generation = 0;
+ uint32_t generation = 0;
for (size_t i = current_position; i + 1 < fields.size(); ++i) {
// RFC 5245
// *(SP extension-att-name SP extension-att-value)
@@ -1441,16 +1443,16 @@
std::ostringstream os;
InitAttrLine(kAttributeSsrcGroup, &os);
os << kSdpDelimiterColon << track->ssrc_groups[i].semantics;
- std::vector<uint32>::const_iterator ssrc =
+ std::vector<uint32_t>::const_iterator ssrc =
track->ssrc_groups[i].ssrcs.begin();
for (; ssrc != track->ssrc_groups[i].ssrcs.end(); ++ssrc) {
- os << kSdpDelimiterSpace << rtc::ToString<uint32>(*ssrc);
+ os << kSdpDelimiterSpace << rtc::ToString<uint32_t>(*ssrc);
}
AddLine(os.str(), message);
}
// Build the ssrc lines for each ssrc.
for (size_t i = 0; i < track->ssrcs.size(); ++i) {
- uint32 ssrc = track->ssrcs[i];
+ uint32_t ssrc = track->ssrcs[i];
// RFC 5576
// a=ssrc:<ssrc-id> cname:<value>
AddSsrcLine(ssrc, kSsrcAttributeCname,
@@ -2634,7 +2636,7 @@
if (ssrc_group->ssrcs.empty()) {
continue;
}
- uint32 ssrc = ssrc_group->ssrcs.front();
+ uint32_t ssrc = ssrc_group->ssrcs.front();
for (StreamParamsVec::iterator track = tracks.begin();
track != tracks.end(); ++track) {
if (track->has_ssrc(ssrc)) {
@@ -2706,7 +2708,7 @@
if (!GetValue(field1, kAttributeSsrc, &ssrc_id_s, error)) {
return false;
}
- uint32 ssrc_id = 0;
+ uint32_t ssrc_id = 0;
if (!GetValueFromString(line, ssrc_id_s, &ssrc_id, error)) {
return false;
}
@@ -2783,9 +2785,9 @@
if (!GetValue(fields[0], kAttributeSsrcGroup, &semantics, error)) {
return false;
}
- std::vector<uint32> ssrcs;
+ std::vector<uint32_t> ssrcs;
for (size_t i = 1; i < fields.size(); ++i) {
- uint32 ssrc = 0;
+ uint32_t ssrc = 0;
if (!GetValueFromString(line, fields[i], &ssrc, error)) {
return false;
}
diff --git a/talk/app/webrtc/webrtcsdp_unittest.cc b/talk/app/webrtc/webrtcsdp_unittest.cc
index a534c23..a972daa 100644
--- a/talk/app/webrtc/webrtcsdp_unittest.cc
+++ b/talk/app/webrtc/webrtcsdp_unittest.cc
@@ -75,9 +75,9 @@
typedef std::vector<AudioCodec> AudioCodecs;
typedef std::vector<Candidate> Candidates;
-static const uint32 kDefaultSctpPort = 5000;
+static const uint32_t kDefaultSctpPort = 5000;
static const char kSessionTime[] = "t=0 0\r\n";
-static const uint32 kCandidatePriority = 2130706432U; // pref = 1.0
+static const uint32_t kCandidatePriority = 2130706432U; // pref = 1.0
static const char kCandidateUfragVoice[] = "ufrag_voice";
static const char kCandidatePwdVoice[] = "pwd_voice";
static const char kAttributeIcePwdVoice[] = "a=ice-pwd:pwd_voice\r\n";
@@ -86,7 +86,7 @@
static const char kCandidateUfragData[] = "ufrag_data";
static const char kCandidatePwdData[] = "pwd_data";
static const char kAttributeIcePwdVideo[] = "a=ice-pwd:pwd_video\r\n";
-static const uint32 kCandidateGeneration = 2;
+static const uint32_t kCandidateGeneration = 2;
static const char kCandidateFoundation1[] = "a0+B/1";
static const char kCandidateFoundation2[] = "a0+B/2";
static const char kCandidateFoundation3[] = "a0+B/3";
@@ -107,11 +107,9 @@
static const char kExtmapWithDirectionAndAttribute[] =
"a=extmap:1/sendrecv http://example.com/082005/ext.htm#ttime a1 a2\r\n";
-static const uint8 kIdentityDigest[] = {0x4A, 0xAD, 0xB9, 0xB1,
- 0x3F, 0x82, 0x18, 0x3B,
- 0x54, 0x02, 0x12, 0xDF,
- 0x3E, 0x5D, 0x49, 0x6B,
- 0x19, 0xE5, 0x7C, 0xAB};
+static const uint8_t kIdentityDigest[] = {
+ 0x4A, 0xAD, 0xB9, 0xB1, 0x3F, 0x82, 0x18, 0x3B, 0x54, 0x02,
+ 0x12, 0xDF, 0x3E, 0x5D, 0x49, 0x6B, 0x19, 0xE5, 0x7C, 0xAB};
static const char kDtlsSctp[] = "DTLS/SCTP";
static const char kUdpDtlsSctp[] = "UDP/DTLS/SCTP";
@@ -409,26 +407,26 @@
static const char kStreamLabel1[] = "local_stream_1";
static const char kStream1Cname[] = "stream_1_cname";
static const char kAudioTrackId1[] = "audio_track_id_1";
-static const uint32 kAudioTrack1Ssrc = 1;
+static const uint32_t kAudioTrack1Ssrc = 1;
static const char kVideoTrackId1[] = "video_track_id_1";
-static const uint32 kVideoTrack1Ssrc = 2;
+static const uint32_t kVideoTrack1Ssrc = 2;
static const char kVideoTrackId2[] = "video_track_id_2";
-static const uint32 kVideoTrack2Ssrc = 3;
+static const uint32_t kVideoTrack2Ssrc = 3;
// MediaStream 2
static const char kStreamLabel2[] = "local_stream_2";
static const char kStream2Cname[] = "stream_2_cname";
static const char kAudioTrackId2[] = "audio_track_id_2";
-static const uint32 kAudioTrack2Ssrc = 4;
+static const uint32_t kAudioTrack2Ssrc = 4;
static const char kVideoTrackId3[] = "video_track_id_3";
-static const uint32 kVideoTrack3Ssrc = 5;
-static const uint32 kVideoTrack4Ssrc = 6;
+static const uint32_t kVideoTrack3Ssrc = 5;
+static const uint32_t kVideoTrack4Ssrc = 6;
// DataChannel
static const char kDataChannelLabel[] = "data_channel";
static const char kDataChannelMsid[] = "data_channeld0";
static const char kDataChannelCname[] = "data_channel_cname";
-static const uint32 kDataChannelSsrc = 10;
+static const uint32_t kDataChannelSsrc = 10;
// Candidate
static const char kDummyMid[] = "dummy_mid";
@@ -2157,7 +2155,7 @@
TEST_F(WebRtcSdpTest, DeserializeSdpWithSctpDataChannelAndNewPort) {
AddSctpDataChannel();
- const uint16 kUnusualSctpPort = 9556;
+ const uint16_t kUnusualSctpPort = 9556;
char default_portstr[16];
char unusual_portstr[16];
rtc::sprintfn(default_portstr, sizeof(default_portstr), "%d",
diff --git a/talk/app/webrtc/webrtcsession.cc b/talk/app/webrtc/webrtcsession.cc
index 15ddc28..2ab9a1e 100644
--- a/talk/app/webrtc/webrtcsession.cc
+++ b/talk/app/webrtc/webrtcsession.cc
@@ -265,9 +265,9 @@
}
}
-static bool GetAudioSsrcByTrackId(
- const SessionDescription* session_description,
- const std::string& track_id, uint32 *ssrc) {
+static bool GetAudioSsrcByTrackId(const SessionDescription* session_description,
+ const std::string& track_id,
+ uint32_t* ssrc) {
const cricket::ContentInfo* audio_info =
cricket::GetFirstAudioContent(session_description);
if (!audio_info) {
@@ -289,7 +289,8 @@
}
static bool GetTrackIdBySsrc(const SessionDescription* session_description,
- uint32 ssrc, std::string* track_id) {
+ uint32_t ssrc,
+ std::string* track_id) {
ASSERT(track_id != NULL);
const cricket::ContentInfo* audio_info =
@@ -461,7 +462,7 @@
}
}
-uint32 ConvertIceTransportTypeToCandidateFilter(
+uint32_t ConvertIceTransportTypeToCandidateFilter(
PeerConnectionInterface::IceTransportsType type) {
switch (type) {
case PeerConnectionInterface::kNone:
@@ -1212,13 +1213,15 @@
ConvertIceTransportTypeToCandidateFilter(type));
}
-bool WebRtcSession::GetLocalTrackIdBySsrc(uint32 ssrc, std::string* track_id) {
+bool WebRtcSession::GetLocalTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id) {
if (!base_local_description())
return false;
return webrtc::GetTrackIdBySsrc(base_local_description(), ssrc, track_id);
}
-bool WebRtcSession::GetRemoteTrackIdBySsrc(uint32 ssrc, std::string* track_id) {
+bool WebRtcSession::GetRemoteTrackIdBySsrc(uint32_t ssrc,
+ std::string* track_id) {
if (!base_remote_description())
return false;
return webrtc::GetTrackIdBySsrc(base_remote_description(), ssrc, track_id);
@@ -1230,7 +1233,8 @@
return desc.str();
}
-void WebRtcSession::SetAudioPlayout(uint32 ssrc, bool enable,
+void WebRtcSession::SetAudioPlayout(uint32_t ssrc,
+ bool enable,
cricket::AudioRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
if (!voice_channel_) {
@@ -1250,7 +1254,8 @@
}
}
-void WebRtcSession::SetAudioSend(uint32 ssrc, bool enable,
+void WebRtcSession::SetAudioSend(uint32_t ssrc,
+ bool enable,
const cricket::AudioOptions& options,
cricket::AudioRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
@@ -1263,7 +1268,7 @@
}
}
-void WebRtcSession::SetAudioPlayoutVolume(uint32 ssrc, double volume) {
+void WebRtcSession::SetAudioPlayoutVolume(uint32_t ssrc, double volume) {
ASSERT(signaling_thread()->IsCurrent());
ASSERT(volume >= 0 && volume <= 10);
if (!voice_channel_) {
@@ -1276,7 +1281,7 @@
}
}
-bool WebRtcSession::SetCaptureDevice(uint32 ssrc,
+bool WebRtcSession::SetCaptureDevice(uint32_t ssrc,
cricket::VideoCapturer* camera) {
ASSERT(signaling_thread()->IsCurrent());
@@ -1296,7 +1301,7 @@
return true;
}
-void WebRtcSession::SetVideoPlayout(uint32 ssrc,
+void WebRtcSession::SetVideoPlayout(uint32_t ssrc,
bool enable,
cricket::VideoRenderer* renderer) {
ASSERT(signaling_thread()->IsCurrent());
@@ -1312,7 +1317,8 @@
}
}
-void WebRtcSession::SetVideoSend(uint32 ssrc, bool enable,
+void WebRtcSession::SetVideoSend(uint32_t ssrc,
+ bool enable,
const cricket::VideoOptions* options) {
ASSERT(signaling_thread()->IsCurrent());
if (!video_channel_) {
@@ -1333,7 +1339,7 @@
LOG(LS_ERROR) << "CanInsertDtmf: No audio channel exists.";
return false;
}
- uint32 send_ssrc = 0;
+ uint32_t send_ssrc = 0;
// The Dtmf is negotiated per channel not ssrc, so we only check if the ssrc
// exists.
if (!GetAudioSsrcByTrackId(base_local_description(), track_id,
@@ -1351,7 +1357,7 @@
LOG(LS_ERROR) << "InsertDtmf: No audio channel exists.";
return false;
}
- uint32 send_ssrc = 0;
+ uint32_t send_ssrc = 0;
if (!VERIFY(GetAudioSsrcByTrackId(base_local_description(),
track_id, &send_ssrc))) {
LOG(LS_ERROR) << "InsertDtmf: Track does not exist: " << track_id;
diff --git a/talk/app/webrtc/webrtcsession.h b/talk/app/webrtc/webrtcsession.h
index b3d76bf..6e4a9e9 100644
--- a/talk/app/webrtc/webrtcsession.h
+++ b/talk/app/webrtc/webrtcsession.h
@@ -198,25 +198,25 @@
}
// Get the id used as a media stream track's "id" field from ssrc.
- virtual bool GetLocalTrackIdBySsrc(uint32 ssrc, std::string* track_id);
- virtual bool GetRemoteTrackIdBySsrc(uint32 ssrc, std::string* track_id);
+ virtual bool GetLocalTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
+ virtual bool GetRemoteTrackIdBySsrc(uint32_t ssrc, std::string* track_id);
// AudioMediaProviderInterface implementation.
- void SetAudioPlayout(uint32 ssrc,
+ void SetAudioPlayout(uint32_t ssrc,
bool enable,
cricket::AudioRenderer* renderer) override;
- void SetAudioSend(uint32 ssrc,
+ void SetAudioSend(uint32_t ssrc,
bool enable,
const cricket::AudioOptions& options,
cricket::AudioRenderer* renderer) override;
- void SetAudioPlayoutVolume(uint32 ssrc, double volume) override;
+ void SetAudioPlayoutVolume(uint32_t ssrc, double volume) override;
// Implements VideoMediaProviderInterface.
- bool SetCaptureDevice(uint32 ssrc, cricket::VideoCapturer* camera) override;
- void SetVideoPlayout(uint32 ssrc,
+ bool SetCaptureDevice(uint32_t ssrc, cricket::VideoCapturer* camera) override;
+ void SetVideoPlayout(uint32_t ssrc,
bool enable,
cricket::VideoRenderer* renderer) override;
- void SetVideoSend(uint32 ssrc,
+ void SetVideoSend(uint32_t ssrc,
bool enable,
const cricket::VideoOptions* options) override;
diff --git a/talk/app/webrtc/webrtcsession_unittest.cc b/talk/app/webrtc/webrtcsession_unittest.cc
index dbe485c..2853ca4 100644
--- a/talk/app/webrtc/webrtcsession_unittest.cc
+++ b/talk/app/webrtc/webrtcsession_unittest.cc
@@ -1465,8 +1465,8 @@
// Verify the session id is the same and the session version is
// increased.
EXPECT_EQ(session_id_orig, offer->session_id());
- EXPECT_LT(rtc::FromString<uint64>(session_version_orig),
- rtc::FromString<uint64>(offer->session_version()));
+ EXPECT_LT(rtc::FromString<uint64_t>(session_version_orig),
+ rtc::FromString<uint64_t>(offer->session_version()));
SetLocalDescriptionWithoutError(offer);
EXPECT_EQ(0u, video_channel_->send_streams().size());
@@ -1525,8 +1525,8 @@
// Verify the session id is the same and the session version is
// increased.
EXPECT_EQ(session_id_orig, answer->session_id());
- EXPECT_LT(rtc::FromString<uint64>(session_version_orig),
- rtc::FromString<uint64>(answer->session_version()));
+ EXPECT_LT(rtc::FromString<uint64_t>(session_version_orig),
+ rtc::FromString<uint64_t>(answer->session_version()));
SetLocalDescriptionWithoutError(answer);
ASSERT_EQ(2u, video_channel_->recv_streams().size());
@@ -3107,7 +3107,7 @@
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->recv_streams().size());
- uint32 receive_ssrc = channel->recv_streams()[0].first_ssrc();
+ uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
double left_vol, right_vol;
EXPECT_TRUE(channel->GetOutputScaling(receive_ssrc, &left_vol, &right_vol));
EXPECT_EQ(1, left_vol);
@@ -3132,7 +3132,7 @@
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->send_streams().size());
- uint32 send_ssrc = channel->send_streams()[0].first_ssrc();
+ uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
cricket::AudioOptions options;
@@ -3162,7 +3162,7 @@
cricket::FakeVoiceMediaChannel* channel = media_engine_->GetVoiceChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->send_streams().size());
- uint32 send_ssrc = channel->send_streams()[0].first_ssrc();
+ uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
rtc::scoped_ptr<FakeAudioRenderer> renderer(new FakeAudioRenderer());
cricket::AudioOptions options;
@@ -3187,7 +3187,7 @@
ASSERT_LT(0u, channel->renderers().size());
EXPECT_TRUE(channel->renderers().begin()->second == NULL);
ASSERT_EQ(1u, channel->recv_streams().size());
- uint32 receive_ssrc = channel->recv_streams()[0].first_ssrc();
+ uint32_t receive_ssrc = channel->recv_streams()[0].first_ssrc();
cricket::FakeVideoRenderer renderer;
session_->SetVideoPlayout(receive_ssrc, true, &renderer);
EXPECT_TRUE(channel->renderers().begin()->second == &renderer);
@@ -3202,7 +3202,7 @@
cricket::FakeVideoMediaChannel* channel = media_engine_->GetVideoChannel(0);
ASSERT_TRUE(channel != NULL);
ASSERT_EQ(1u, channel->send_streams().size());
- uint32 send_ssrc = channel->send_streams()[0].first_ssrc();
+ uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_FALSE(channel->IsStreamMuted(send_ssrc));
cricket::VideoOptions* options = NULL;
session_->SetVideoSend(send_ssrc, false, options);
@@ -3236,7 +3236,7 @@
// Verify
ASSERT_EQ(3U, channel->dtmf_info_queue().size());
- const uint32 send_ssrc = channel->send_streams()[0].first_ssrc();
+ const uint32_t send_ssrc = channel->send_streams()[0].first_ssrc();
EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[0], send_ssrc, 0,
expected_duration, expected_flags));
EXPECT_TRUE(CompareDtmfInfo(channel->dtmf_info_queue()[1], send_ssrc, 1,
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
index f6414d3..8769315 100644
--- a/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.cc
@@ -44,7 +44,7 @@
static const char kFailedDueToSessionShutdown[] =
" failed because the session was shut down";
-static const uint64 kInitSessionVersion = 2;
+static const uint64_t kInitSessionVersion = 2;
static bool CompareStream(const MediaSessionOptions::Stream& stream1,
const MediaSessionOptions::Stream& stream2) {
@@ -415,7 +415,7 @@
// Just increase the version number by one each time when a new offer
// is created regardless if it's identical to the previous one or not.
- // The |session_version_| is a uint64, the wrap around should not happen.
+ // The |session_version_| is a uint64_t, the wrap around should not happen.
ASSERT(session_version_ + 1 > session_version_);
JsepSessionDescription* offer(new JsepSessionDescription(
JsepSessionDescription::kOffer));
@@ -459,7 +459,7 @@
// In that case, the version number in the "o=" line of the answer is
// unrelated to the version number in the o line of the offer.
// Get a new version number by increasing the |session_version_answer_|.
- // The |session_version_| is a uint64, the wrap around should not happen.
+ // The |session_version_| is a uint64_t, the wrap around should not happen.
ASSERT(session_version_ + 1 > session_version_);
JsepSessionDescription* answer(new JsepSessionDescription(
JsepSessionDescription::kAnswer));
diff --git a/talk/app/webrtc/webrtcsessiondescriptionfactory.h b/talk/app/webrtc/webrtcsessiondescriptionfactory.h
index 52b8da5..95fab63 100644
--- a/talk/app/webrtc/webrtcsessiondescriptionfactory.h
+++ b/talk/app/webrtc/webrtcsessiondescriptionfactory.h
@@ -186,7 +186,7 @@
MediaStreamSignaling* const mediastream_signaling_;
cricket::TransportDescriptionFactory transport_desc_factory_;
cricket::MediaSessionDescriptionFactory session_desc_factory_;
- uint64 session_version_;
+ uint64_t session_version_;
const rtc::scoped_ptr<DtlsIdentityStoreInterface> dtls_identity_store_;
const rtc::scoped_refptr<WebRtcIdentityRequestObserver>
identity_request_observer_;