Fix the number of frames used when interleaving in AudioBuffer::InterleaveTo()

R=henrik.lundin@webrtc.org, peah@webrtc.org
TBR=tina.legrand@webrtc.org

Review URL: https://codereview.webrtc.org/1862553002 .

Cr-Commit-Position: refs/heads/master@{#12249}
diff --git a/data/audio_processing/output_data_fixed.pb b/data/audio_processing/output_data_fixed.pb
index 0e9307a..ae5b812 100644
--- a/data/audio_processing/output_data_fixed.pb
+++ b/data/audio_processing/output_data_fixed.pb
Binary files differ
diff --git a/webrtc/modules/audio_processing/audio_buffer.cc b/webrtc/modules/audio_processing/audio_buffer.cc
index f300bb8..f9b9f63 100644
--- a/webrtc/modules/audio_processing/audio_buffer.cc
+++ b/webrtc/modules/audio_processing/audio_buffer.cc
@@ -430,10 +430,10 @@
   }
 
   if (frame->num_channels_ == num_channels_) {
-    Interleave(data_ptr->ibuf()->channels(), proc_num_frames_, num_channels_,
+    Interleave(data_ptr->ibuf()->channels(), output_num_frames_, num_channels_,
                frame->data_);
   } else {
-    UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], proc_num_frames_,
+    UpmixMonoToInterleaved(data_ptr->ibuf()->channels()[0], output_num_frames_,
                            frame->num_channels_, frame->data_);
   }
 }
diff --git a/webrtc/modules/audio_processing/test/audio_processing_unittest.cc b/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
index 5dbfc14..359cd03 100644
--- a/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
+++ b/webrtc/modules/audio_processing/test/audio_processing_unittest.cc
@@ -54,12 +54,7 @@
 const google::protobuf::int32 kChannels[] = {1, 2};
 const int kSampleRates[] = {8000, 16000, 32000, 48000};
 
-#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
-// Android doesn't support 48kHz.
-const int kProcessSampleRates[] = {8000, 16000, 32000};
-#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
 const int kProcessSampleRates[] = {8000, 16000, 32000, 48000};
-#endif
 
 enum StreamDirection { kForward = 0, kReverse };