概述
status_t AudioALSACaptureHandlerAEC::open()
{
if (mStreamAttributeTarget->input_device == AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)
{
// open BT
data provider
if (WCNChipController::GetInstance()->IsBTMergeInterfaceSupported() == true)
{
mCaptureDataClient = new AudioALSACaptureDataClient(AudioALSACaptureDataProviderBTSCO::getInstance(),
mStreamAttributeTarget);
} else
{
mCaptureDataClient = new AudioALSACaptureDataClient(AudioALSACaptureDataProviderBTCVSD::getInstance(),
mStreamAttributeTarget);
}
} else //创建输入设备的数据读取客户端
{
mCaptureDataClient = new AudioALSACaptureDataClient(AudioALSACaptureDataProviderNormal::getInstance(),
mStreamAttributeTarget);
}
//打开输出设备反馈信号,用来进行回音消除
if (mCaptureDataClient != NULL)
{
memcpy(&mStreamAttributeTargetEchoRef, mStreamAttributeTarget, sizeof(stream_attribute_t));
if (mStreamAttributeTarget->input_device == AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)
{
//open BT
echoref data provider
mCaptureDataClient->AddEchoRefDataProvider(AudioALSACaptureDataProviderEchoRefBTSCO::getInstance(),
&mStreamAttributeTargetEchoRef);
} else {
if (mStreamAttributeTarget->output_devices == AUDIO_DEVICE_OUT_SPEAKER &&
AudioSmartPaController::getInstance()->isEchoReferenceSupport())//扬声器输出的回音消除
{
mCaptureDataClient->AddEchoRefDataProvider(AudioALSACaptureDataProviderEchoRefExt::getInstance(),
&mStreamAttributeTargetEchoRef);
} else //听筒输出的回音消除
{
mCaptureDataClient->AddEchoRefDataProvider(AudioALSACaptureDataProviderEchoRef::getInstance(),
&mStreamAttributeTargetEchoRef);
}
}
}
if (mStreamAttributeTarget->input_device != AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)
{
//no need to config analog part while BT case
mHardwareResourceManager->startInputDevice(mStreamAttributeTarget->input_device);
}
return NO_ERROR;
}
打开输出设备反馈信号,用来进行回音消除
void AudioALSACaptureDataClient::AddEchoRefDataProvider(AudioALSACaptureDataProviderBase *pCaptureDataProvider,
stream_attribute_t *stream_attribute_target)
{
mStreamAttributeTargetEchoRef = stream_attribute_target;
mCaptureDataProviderEchoRef = pCaptureDataProvider;
mStreamAttributeSourceEchoRef = mCaptureDataProviderEchoRef->getStreamAttributeSource();
// fix the channel count of echo reference data to stereo since native echo_reference_itfe supports stereo only
mStreamAttributeTargetEchoRef->num_channels = 2;
mStreamAttributeTargetEchoRef->audio_channel_mask = AUDIO_CHANNEL_IN_STEREO;
//check SRC needed and created
// raw data
memset((void *)&mEchoRefRawDataBuf, 0, sizeof(mEchoRefRawDataBuf));
mEchoRefRawDataBuf.pBufBase = new char[kClientBufferSize];
mEchoRefRawDataBuf.bufLen
= kClientBufferSize;
mEchoRefRawDataBuf.pRead
= mEchoRefRawDataBuf.pBufBase;
mEchoRefRawDataBuf.pWrite
= mEchoRefRawDataBuf.pBufBase;
// src data
memset((void *)&mEchoRefSrcDataBuf, 0, sizeof(mEchoRefSrcDataBuf));
mEchoRefSrcDataBuf.pBufBase = new char[kClientBufferSize];
mEchoRefSrcDataBuf.bufLen
= kClientBufferSize;
mEchoRefSrcDataBuf.pRead
= mEchoRefSrcDataBuf.pBufBase;
mEchoRefSrcDataBuf.pWrite
= mEchoRefSrcDataBuf.pBufBase;
ASSERT(mEchoRefSrcDataBuf.pBufBase != NULL);
// attach client to capture EchoRef data provider
mCaptureDataProviderEchoRef->attach(this); // mStreamAttributeSource will be updated when first client attached
//assume starts after PCM open
mSPELayer->SetOutputStreamRunning(true, true);
mSPELayer->SetEchoRefStartTime(GetSystemTime(false));
mSPELayer->SetDownLinkLatencyTime(mStreamAttributeSourceEchoRef->latency);
// init SRC, this SRC is for Android Native.
if (mStreamAttributeSourceEchoRef->sample_rate
!= mStreamAttributeTargetEchoRef->sample_rate
||
mStreamAttributeSourceEchoRef->num_channels != mStreamAttributeTargetEchoRef->num_channels ||
mStreamAttributeSourceEchoRef->audio_format != mStreamAttributeTargetEchoRef->audio_format)
{
mBliSrcEchoRef = newMtkAudioSrc(
mStreamAttributeSourceEchoRef->sample_rate, mStreamAttributeSourceEchoRef->num_channels,
mStreamAttributeTargetEchoRef->sample_rate, mStreamAttributeTargetEchoRef->num_channels,
SRC_IN_Q1P15_OUT_Q1P15); // TODO(Harvey, Ship): 24bit
mBliSrcEchoRef->open();
}
// init SRC, this SRC is for MTK VoIP
if ((mStreamAttributeTargetEchoRef->sample_rate != 16000) || (mStreamAttributeTargetEchoRef->num_channels != 1))
{
mBliSrcEchoRefBesRecord = newMtkAudioSrc(
mStreamAttributeTargetEchoRef->sample_rate, mStreamAttributeTargetEchoRef->num_channels,
16000, 1,
SRC_IN_Q1P15_OUT_Q1P15);
mBliSrcEchoRefBesRecord->open();
}
}
void AudioALSACaptureDataProviderBase::attach(AudioALSACaptureDataClient *pCaptureDataClient)
{
pCaptureDataClient->setIdentity(mCaptureDataClientIndex);
mCaptureDataClientVector.add(pCaptureDataClient->getIdentity(), pCaptureDataClient);
mCaptureDataClientIndex++;
// open pcm interface when 1st attach
if (mCaptureDataClientVector.size() == 1)
{
mOpenIndex++;
open();
}
}
status_t AudioALSACaptureDataProviderEchoRefExt::open()
{
AudioALSASampleRateController *pAudioALSASampleRateController = AudioALSASampleRateController::getInstance();
pAudioALSASampleRateController->setScenarioStatus(PLAYBACK_SCENARIO_ECHO_REF_EXT);
// config attribute (will used in client SRC/Enh/... later) // TODO(Sam): query the mConfig?
mStreamAttributeSource.audio_format = AUDIO_FORMAT_PCM_16_BIT;
mStreamAttributeSource.audio_channel_mask = AUDIO_CHANNEL_IN_STEREO;
mStreamAttributeSource.num_channels =
android_audio_legacy::AudioSystem::popCount(mStreamAttributeSource.audio_channel_mask);
mStreamAttributeSource.sample_rate =
AudioALSASampleRateController::getInstance()->getPrimaryStreamOutSampleRate();
// Reset frames readed counter
mStreamAttributeSource.Time_Info.total_frames_readed = 0;
uint32_t latency = getLatencyTime();
mConfig.rate = mStreamAttributeSource.sample_rate;
mConfig.channels = mStreamAttributeSource.num_channels;
mConfig.format = PCM_FORMAT_S16_LE;
kReadBufferSize =
(((uint32_t)((mStreamAttributeSource.sample_rate / 1000) * latency * mConfig.channels *
(pcm_format_to_bits(mConfig.format) / 8))) & 0xFFFFFFC0); // (DL1)44.1K20ms datastereo2byte(Align64byte)
mConfig.period_size = kReadBufferSize / mConfig.channels / (pcm_format_to_bits(mConfig.format) / 8);
mConfig.period_count = 2;
if (latency == UPLINK_LOW_LATENCY_MS)
{
mConfig.period_count = 8; // 2*(20ms/5ms);
}
mConfig.start_threshold = 0;
mConfig.stop_threshold = 0;
mConfig.silence_threshold = 0;
//latency time, set as hardware buffer size
mStreamAttributeSource.latency = (mConfig.period_size * mConfig.period_count * 1000) / mConfig.rate;
OpenPCMDump(LOG_TAG);
// enable pcm
int pcmIdx = AudioALSADeviceParser::getInstance()->GetPcmIndexByString(keypcmI2SAwbCapture);
int cardIdx = AudioALSADeviceParser::getInstance()->GetCardIndexByString(keypcmI2SAwbCapture);
mPcm = pcm_open(cardIdx, pcmIdx, PCM_IN | PCM_MONOTONIC, &mConfig);
pcm_start(mPcm);
// create reading thread
mEnable = true;
int ret = pthread_create(&hReadThread, NULL,
AudioALSACaptureDataProviderEchoRefExt::readThread, (void *)this);
return NO_ERROR;
}
读取下行数据的线程
void *AudioALSACaptureDataProviderEchoRef::readThread(void *arg)
{
status_t retval = NO_ERROR;
AudioALSACaptureDataProviderEchoRef *pDataProvider = static_cast<AudioALSACaptureDataProviderEchoRef *>(arg);
uint32_t open_index = pDataProvider->mOpenIndex;
char nameset[32];
sprintf(nameset, "%s%d", __FUNCTION__, pDataProvider->mCaptureDataProviderType);
prctl(PR_SET_NAME, (unsigned long)nameset, 0, 0, 0);
// read raw data from alsa driver
char linear_buffer[kReadBufferSize];
while (pDataProvider->mEnable == true)
{
clock_gettime(CLOCK_REALTIME, &pDataProvider->mNewtime);
pDataProvider->timerec[0] = calc_time_diff(pDataProvider->mNewtime, pDataProvider->mOldtime);
pDataProvider->mOldtime = pDataProvider->mNewtime;
int retval = pcm_read(pDataProvider->mPcm, linear_buffer, kReadBufferSize);
clock_gettime(CLOCK_REALTIME, &pDataProvider->mNewtime);
pDataProvider->timerec[1] = calc_time_diff(pDataProvider->mNewtime, pDataProvider->mOldtime);
pDataProvider->mOldtime = pDataProvider->mNewtime;
pDataProvider->GetCaptureTimeStamp(&pDataProvider->mStreamAttributeSource.Time_Info, kReadBufferSize);
// use ringbuf format to save buffer info
pDataProvider->mPcmReadBuf.pBufBase = linear_buffer;
//linear_buffer>>mPcmReadBuf
pDataProvider->mPcmReadBuf.bufLen
= kReadBufferSize + 1; // +1: avoid pRead == pWrite
pDataProvider->mPcmReadBuf.pRead
= linear_buffer;
pDataProvider->mPcmReadBuf.pWrite
= linear_buffer + kReadBufferSize;
//Provide EchoRef data
pDataProvider->provideEchoRefCaptureDataToAllClients(open_index);
clock_gettime(CLOCK_REALTIME, &pDataProvider->mNewtime);
pDataProvider->timerec[2] = calc_time_diff(pDataProvider->mNewtime, pDataProvider->mOldtime);
pDataProvider->mOldtime = pDataProvider->mNewtime;
}
pthread_exit(NULL);
return NULL;
}
void AudioALSACaptureDataProviderBase::provideEchoRefCaptureDataToAllClients(const uint32_t open_index)
{
AudioALSACaptureDataClient *pCaptureDataClient = NULL;
WritePcmDumpData();
for (size_t i = 0; i < mCaptureDataClientVector.size(); i++)
{
pCaptureDataClient = mCaptureDataClientVector[i];
pCaptureDataClient->copyEchoRefCaptureDataToClient(mPcmReadBuf);
}
}
uint32_t AudioALSACaptureDataClient::copyEchoRefCaptureDataToClient(RingBuf pcm_read_buf)
{
uint32_t freeSpace = RingBuf_getFreeSpace(&mEchoRefRawDataBuf);
uint32_t dataSize = RingBuf_getDataCount(&pcm_read_buf);
//pcm_read_buf>>mEchoRefRawDataBuf
if (freeSpace < dataSize)
{
RingBuf_copyFromRingBuf(&mEchoRefRawDataBuf, &pcm_read_buf, freeSpace);
} else
{
RingBuf_copyFromRingBuf(&mEchoRefRawDataBuf, &pcm_read_buf, dataSize);
}
// SRC to to Native AEC need format (as StreaminTarget format since AWB data might be the same as DL1 before)
const uint32_t kNumRawData = RingBuf_getDataCount(&mEchoRefRawDataBuf);
uint32_t num_free_space = RingBuf_getFreeSpace(&mEchoRefSrcDataBuf);
if (mBliSrcEchoRef == NULL) // No need SRC, mEchoRefRawDataBuf>>mEchoRefSrcDataBuf
{
if (num_free_space < kNumRawData)
{
RingBuf_copyFromRingBuf(&mEchoRefSrcDataBuf, &mEchoRefRawDataBuf, num_free_space);
} else
{
RingBuf_copyFromRingBuf(&mEchoRefSrcDataBuf, mEchoRefRawDataBuf, kNumRawData);
}
} else // Need SRC, mEchoRefRawDataBuf>>pEchoRefRawDataLinearBuf
{
char *pEchoRefRawDataLinearBuf = new char[kNumRawData];
RingBuf_copyToLinear(pEchoRefRawDataLinearBuf, &mEchoRefRawDataBuf, kNumRawData);
char *pEchoRefSrcDataLinearBuf = new char[num_free_space];
char *p_read = pEchoRefRawDataLinearBuf;
uint32_t num_raw_data_left = kNumRawData;
uint32_t num_converted_data = num_free_space; // max convert num_free_space
uint32_t consumed = num_raw_data_left;
//pEchoRefRawDataLinearBuf>>pEchoRefSrcDataLinearBuf
mBliSrcEchoRef->process((int16_t *)p_read, &num_raw_data_left,
(int16_t *)pEchoRefSrcDataLinearBuf, &num_converted_data);
consumed -= num_raw_data_left;
p_read += consumed;
//pEchoRefSrcDataLinearBuf>>mEchoRefSrcDataBuf
RingBuf_copyFromLinear(&mEchoRefSrcDataBuf, pEchoRefSrcDataLinearBuf, num_converted_data);
}
//for Preprocess
const uint32_t kNumEchoRefSrcData = RingBuf_getDataCount(&mEchoRefSrcDataBuf);
char *pEchoRefProcessDataLinearBuf = new char[kNumEchoRefSrcData];
RingBuf_copyToLinear(pEchoRefProcessDataLinearBuf, &mEchoRefSrcDataBuf, kNumEchoRefSrcData);
#ifdef BOOST_ECHOREF
if((mStreamAttributeTarget->output_devices & AUDIO_DEVICE_OUT_SPEAKER))
{
for(int i=0; i<kNumEchoRefSrcData/2 ; i++) {
// over flow protection
int16_t temp = *((int16_t*)(pEchoRefProcessDataLinearBuf+(i*2)));
if(temp >8191)
temp = 8191;
else if(temp <-8192)
temp =-8192;
temp = temp <<2;
pEchoRefProcessDataLinearBuf[2*i]= (char)temp;
pEchoRefProcessDataLinearBuf[2*i+1]= (char)(temp>>8);
}
}
#endif
//here to queue the EchoRef data to Native effect, since it doesn't need to SRC here
if ((mAudioPreProcessEffect->num_preprocessors > 0))
//&& echoref is enabled
{
//copy pEchoRefProcessDataLinearBuf to native preprocess for echo ref
mAudioPreProcessEffect->WriteEchoRefData(pEchoRefProcessDataLinearBuf, kNumEchoRefSrcData,
&mStreamAttributeSourceEchoRef->Time_Info);
}
//If need MTK VoIP process
if ((mStreamAttributeTarget->BesRecord_Info.besrecord_enable) && !mBypassBesRecord)
{
struct InBufferInfo BufInfo;
//for MTK native SRC
if (mBliSrcEchoRefBesRecord == NULL) // No need SRC
{
BufInfo.pBufBase = (short *)pEchoRefProcessDataLinearBuf;
BufInfo.BufLen = kNumEchoRefSrcData;
BufInfo.time_stamp_queued = GetSystemTime(false);
BufInfo.bHasRemainInfo = true;
BufInfo.time_stamp_predict = GetEchoRefTimeStamp();
#ifdef SRC_DROP_DATA
if (mFirstEchoSRC == true)
{
mFirstEchoSRC = false;
delete[] pEchoRefProcessDataLinearBuf;
return 0;
}
#endif
mSPELayer->WriteReferenceBuffer(&BufInfo);
} else // Need SRC
{
char *pEchoRefProcessSRCDataLinearBuf = new char[kNumEchoRefSrcData];
char *p_read = pEchoRefProcessDataLinearBuf;
uint32_t num_raw_data_left = kNumEchoRefSrcData;
uint32_t num_converted_data = kNumEchoRefSrcData; // max convert num_free_space
uint32_t consumed = num_raw_data_left;
//pEchoRefProcessDataLinearBuf>>pEchoRefProcessSRCDataLinearBuf
mBliSrcEchoRefBesRecord->process((int16_t *)p_read, &num_raw_data_left,
(int16_t *)pEchoRefProcessSRCDataLinearBuf, &num_converted_data);
consumed -= num_raw_data_left;
p_read += consumed;
//TODO: (Sam )copy pEchoRefSrcDataLinearBuf to MTK VoIP write echo ref data
BufInfo.pBufBase = (short *)pEchoRefProcessSRCDataLinearBuf;
BufInfo.BufLen = num_converted_data;
BufInfo.time_stamp_queued = GetSystemTime(false);
BufInfo.bHasRemainInfo = true;
BufInfo.time_stamp_predict = GetEchoRefTimeStamp();
#ifdef SRC_DROP_DATA
if (mFirstEchoSRC == true)
{
mFirstEchoSRC = false;
delete[] pEchoRefProcessSRCDataLinearBuf;
delete[] pEchoRefProcessDataLinearBuf;
return 0;
}
#endif
mSPELayer->WriteReferenceBuffer(&BufInfo);
delete[] pEchoRefProcessSRCDataLinearBuf;
}
}
delete[] pEchoRefProcessDataLinearBuf;
return 0;
}
读取下行数据,用于消回音
void SPELayer::WriteReferenceBuffer(struct InBufferInfo *Binfo)
{
struct timespec entertime;
struct timespec leavetime;
unsigned long long timediff = 0;
entertime = GetSystemTime();
mNewReferenceBufferComes = true;
//normal VoIP is running case, and path routing case
if (((mState == SPE_STATE_RUNNING) && ((mMode == SPE_MODE_VOIP) || (mMode == SPE_MODE_AECREC)))
|| mVoIPRunningbefore)
{
AddtoInputBuffer(DOWNLINK, Binfo);//添加未处理的下行数据
} else if ((mState != SPE_STATE_CLEANING) && (mMode != SPE_MODE_REC))
{
AddtoInputBuffer(DOWNLINK, Binfo, true);//添加未处理的下行数据
}
mNewReferenceBufferComes = false;
leavetime = GetSystemTime();
timediff = TimeDifference(leavetime, entertime);
}
void SPELayer::AddtoInputBuffer(SPE_DATA_DIRECTION dir, struct InBufferInfo *BInputInfo, bool prequeue)
{
//pthread_mutex_lock(&mBufMutex );
int inBufLen = BInputInfo->BufLen;
short *inBufAddr = BInputInfo->pBufBase;
bool bRemainInfo = BInputInfo->bHasRemainInfo;
bool bPreQueue = prequeue;
Dump_PCM_In(dir, inBufAddr, inBufLen);
BufferInfo *newInBuffer = new BufferInfo;
memset(newInBuffer, 0, sizeof(BufferInfo));
struct timespec tstamp_queue;
newInBuffer->pBufBase = (short *) malloc(inBufLen);
memcpy(newInBuffer->pBufBase, inBufAddr, inBufLen);
tstamp_queue = BInputInfo->time_stamp_queued;
newInBuffer->BufLen = inBufLen;
newInBuffer->pRead = newInBuffer->pBufBase;
newInBuffer->pWrite = newInBuffer->pBufBase;
newInBuffer->time_stamp_queued = tstamp_queue;
newInBuffer->time_stamp_process = {0};
newInBuffer->DLfirstBuf = false;
if ((dir == UPLINK) && ((mMode == SPE_MODE_VOIP) || (mMode == SPE_MODE_AECREC)))//普通录音
{
if (mFirstVoIPUplink)
{
mFirstVoIPUplink = false;
if (bRemainInfo)
{
mPreUplinkEstTime.tv_sec = BInputInfo->time_stamp_predict.tv_sec;
mPreUplinkEstTime.tv_nsec = BInputInfo->time_stamp_predict.tv_nsec;
} else
{
mPreUplinkEstTime.tv_sec = mUplinkIntrStartTime.tv_sec;
if (mUplinkIntrStartTime.tv_nsec + mULDropTime * 1000000 >= 1000000000)
{
mPreUplinkEstTime.tv_sec++;
mPreUplinkEstTime.tv_nsec = mUplinkIntrStartTime.tv_nsec
+ mULDropTime * 1000000 - 1000000000;
} else
{
mPreUplinkEstTime.tv_nsec = mUplinkIntrStartTime.tv_nsec + mULDropTime * 1000000;
}
}
newInBuffer->time_stamp_estimate.tv_sec = mPreUplinkEstTime.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = mPreUplinkEstTime.tv_nsec;
mPreULBufLen = inBufLen;
} else
{
if (bRemainInfo)
{
struct timespec tempTime;
tempTime.tv_sec = BInputInfo->time_stamp_predict.tv_sec;
tempTime.tv_nsec = BInputInfo->time_stamp_predict.tv_nsec;
mPreUplinkEstTime.tv_sec = BInputInfo->time_stamp_predict.tv_sec;
mPreUplinkEstTime.tv_nsec = BInputInfo->time_stamp_predict.tv_nsec;
newInBuffer->time_stamp_estimate.tv_sec = BInputInfo->time_stamp_predict.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = BInputInfo->time_stamp_predict.tv_nsec;
} else
{
struct timespec Esttstamp;
unsigned long long ns = ((mPreULBufLen * (unsigned long long)1000000) / 64);
Esttstamp.tv_sec = mPreUplinkEstTime.tv_sec;
if (mPreUplinkEstTime.tv_nsec + ns >= 1000000000)
{
Esttstamp.tv_sec++;
Esttstamp.tv_nsec = mPreUplinkEstTime.tv_nsec + ns - 1000000000;
} else
{
Esttstamp.tv_nsec = mPreUplinkEstTime.tv_nsec + ns;
}
newInBuffer->time_stamp_estimate.tv_sec = Esttstamp.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = Esttstamp.tv_nsec;
mPreUplinkEstTime.tv_sec = Esttstamp.tv_sec;
mPreUplinkEstTime.tv_nsec = Esttstamp.tv_nsec;
}
mPreULBufLen = inBufLen;
}
}
if (dir == DOWNLINK)//VoIP
{
if (mFirstVoIPDownlink)
{
mFirstVoIPDownlink = false;
//downlink starts first time, the first DL buffer queue will earlier than interrupt enable,
//it happens when output starts after input stream create
if (mDLNewStart) //
{
newInBuffer->DLfirstBuf = true;
//need to modify the estimate start time again when downlink Interrupt set.
newInBuffer->time_stamp_estimate.tv_sec = BInputInfo->time_stamp_queued.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = BInputInfo->time_stamp_queued.tv_nsec;
if (mDLLatencyTime * 1000000 + newInBuffer->time_stamp_estimate.tv_nsec >= 1000000000)
{
newInBuffer->time_stamp_estimate.tv_sec++;
newInBuffer->time_stamp_estimate.tv_nsec = mDLLatencyTime * 1000000
+ newInBuffer->time_stamp_estimate.tv_nsec - 1000000000;
}
mPreDownlinkEstTime.tv_sec = newInBuffer->time_stamp_estimate.tv_sec;
mPreDownlinkEstTime.tv_nsec = newInBuffer->time_stamp_estimate.tv_nsec;
} else
{
//the first DL buffer queue after downlink already start,
//it happens when input stream create after output is running
if (bRemainInfo)
{
newInBuffer->time_stamp_estimate.tv_sec = BInputInfo->time_stamp_predict.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = BInputInfo->time_stamp_predict.tv_nsec;
} else
{
//use DL hardware buffer latency for estimate? or buffer length?
newInBuffer->time_stamp_estimate.tv_sec = BInputInfo->time_stamp_queued.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = BInputInfo->time_stamp_queued.tv_nsec;
ALOGD("mDLLatencyTime=%d", mDLLatencyTime);
if ((mDLLatencyTime / 2) * 1000000 + newInBuffer->time_stamp_estimate.tv_nsec
>= 1000000000)
{
newInBuffer->time_stamp_estimate.tv_sec++;
newInBuffer->time_stamp_estimate.tv_nsec = (mDLLatencyTime / 2) * 1000000
+ newInBuffer->time_stamp_estimate.tv_nsec - 1000000000;
} else
{
newInBuffer->time_stamp_estimate.tv_nsec = (mDLLatencyTime / 2) * 1000000
+ newInBuffer->time_stamp_estimate.tv_nsec;
}
}
mPreDownlinkEstTime.tv_sec = newInBuffer->time_stamp_estimate.tv_sec;
mPreDownlinkEstTime.tv_nsec = newInBuffer->time_stamp_estimate.tv_nsec;
mPreDownlinkQueueTime.tv_sec = BInputInfo->time_stamp_queued.tv_sec;
mPreDownlinkQueueTime.tv_nsec = BInputInfo->time_stamp_queued.tv_nsec;
}
mPreDLBufLen = inBufLen;
} else
//not the first DL buffer queue, continuos queue
{
if (bRemainInfo)
{
newInBuffer->time_stamp_estimate.tv_sec = BInputInfo->time_stamp_predict.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = BInputInfo->time_stamp_predict.tv_nsec;
} else
{
struct timespec Esttstamp;
unsigned long long diffns = 0;
unsigned long long ns = ((mPreDLBufLen * (unsigned long long)1000000) / 32);
newInBuffer->time_stamp_estimate.tv_sec = BInputInfo->time_stamp_queued.tv_sec;
newInBuffer->time_stamp_estimate.tv_nsec = BInputInfo->time_stamp_queued.tv_nsec;
if (TimeDifference(BInputInfo->time_stamp_queued, mPreDownlinkQueueTime)
> (mDLLatencyTime * (unsigned long long)1000000))
{
//two downlink queue interval is larger than hardware buffer latency time,
//this buffer is playing directly since no previous data in the hardware buffer
ALOGD("downlink late time queue sec= %ld, nsec=%ld, mPreDownlinkQueueTime sec=%ld,
nsec=%ld" , BInputInfo->time_stamp_queued.tv_sec, BInputInfo->time_stamp_queued.tv_nsec,
mPreDownlinkQueueTime.tv_sec, mPreDownlinkQueueTime.tv_nsec);
} else
{
if ((mDLLatencyTime / 2) * 1000000 + newInBuffer->time_stamp_estimate.tv_nsec
>= 1000000000)
{
newInBuffer->time_stamp_estimate.tv_sec++;
newInBuffer->time_stamp_estimate.tv_nsec = (mDLLatencyTime / 2) * 1000000
+ newInBuffer->time_stamp_estimate.tv_nsec - 1000000000;
} else
{
newInBuffer->time_stamp_estimate.tv_nsec = (mDLLatencyTime / 2) * 1000000
+ newInBuffer->time_stamp_estimate.tv_nsec;
}
}
}
mPreDownlinkQueueTime.tv_sec = BInputInfo->time_stamp_queued.tv_sec;
mPreDownlinkQueueTime.tv_nsec = BInputInfo->time_stamp_queued.tv_nsec;
mPreDownlinkEstTime.tv_sec = newInBuffer->time_stamp_estimate.tv_sec;
mPreDownlinkEstTime.tv_nsec = newInBuffer->time_stamp_estimate.tv_nsec;
mPreDLBufLen = inBufLen;
}
}
if (dir == UPLINK)//普通录音,将原生数据放入下行处理队列
{
mULInBufferQ.add(newInBuffer);
mULInBufQLenTotal += inBufLen;
} else
{
//queue to the downlink input buffer queue, downlink data channel is mono
mDLInBufferQ.add(newInBuffer);
mDLInBufQLenTotal += inBufLen;
//also add to delay buffer queue
newInBuffer->BufLen4Delay = inBufLen;
newInBuffer->pRead4Delay = newInBuffer->pBufBase;
newInBuffer->pWrite4Delay = newInBuffer->pBufBase;
mDLDelayBufferQ.add(newInBuffer);
mDLDelayBufQLenTotal += inBufLen;
if (bPreQueue)
{
//wait for uplink comes, only queue five buffer for reference
if ((mDLPreQLimit) || (!mDLPreQLimit && mFirstVoIPUplink))
{
while (mDLInBufferQ.size() > mDLPreQnum)
{
mDLInBufQLenTotal -= mDLInBufferQ[0]->BufLen;
mDLInBufferQ.removeAt(0);
}
} else
//uplink interrupt starts, remove previous queue
{
//for(int i; i<mDLInBufferQ.size(); i++)
while (!mDLInBufferQ.isEmpty())
{
uint32_t tempSec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec;
unsigned long long tempNSec = mDLInBufferQ[0]->time_stamp_estimate.tv_nsec;
uint32_t tempsample = mDLInBufferQ[0]->BufLen / 2;
unsigned long long tempdeltaNSec = tempsample * (unsigned long long)1000000 / 16;
unsigned long long tempEndNSec = tempNSec + tempdeltaNSec;
unsigned long long tempFinalNSec = 0;
uint32_t tempFinalSec = tempSec;
if (tempEndNSec > 1000000000)
{
tempFinalNSec = tempEndNSec - 1000000000;
tempFinalSec = tempFinalSec + 1;
} else
{
tempFinalNSec = tempEndNSec;
}
if (mUplinkIntrStartTime.tv_sec > tempFinalSec)
{
mDLInBufQLenTotal -= mDLInBufferQ[0]->BufLen;
mDLInBufferQ.removeAt(0);
} else if (mUplinkIntrStartTime.tv_sec == tempFinalSec)
{
if (mUplinkIntrStartTime.tv_nsec >= tempFinalNSec)
{
mDLInBufQLenTotal -= mDLInBufferQ[0]->BufLen;
mDLInBufferQ.removeAt(0);
} else
{
//remove previous data in this buffer queue, will do it in the prepare data?
ALOGD("remove DL pre queue finish 1");
break;
}
} else
{
ALOGD("remove DL pre queue finish 2");
break;
}
}
}
//wait for uplink comes, only queue five buffer for
if (mDLPreQLimit || (!mDLPreQLimit && mFirstVoIPUplink))
{
while (mDLDelayBufferQ.size() > mDLPreQnum)
{
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen4Delay;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
} else
//uplink interrupt starts, remove previous queue
{
while (!mDLDelayBufferQ.isEmpty())
{
uint32_t tempSec = mDLDelayBufferQ[0]->time_stamp_estimate.tv_sec;
unsigned long long tempNSec = mDLDelayBufferQ[0]->time_stamp_estimate.tv_nsec;
uint32_t tempsample = mDLDelayBufferQ[0]->BufLen / 2;
unsigned long long tempdeltaNSec = tempsample * (unsigned long long)1000000 / 16;
unsigned long long tempEndNSec = tempNSec + tempdeltaNSec;
unsigned long long tempFinalNSec = 0;
uint32_t tempFinalSec = tempSec;
if (tempEndNSec > 1000000000)
{
tempFinalNSec = tempEndNSec - 1000000000;
tempFinalSec = tempFinalSec + 1;
} else
{
tempFinalNSec = tempEndNSec;
}
if (mUplinkIntrStartTime.tv_sec > tempFinalSec)
{
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
} else if (mUplinkIntrStartTime.tv_sec == tempFinalSec)
{
if (mUplinkIntrStartTime.tv_nsec >= tempFinalNSec)
{
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
} else
{
//remove previous data in this buffer queue, will do it in the prepare data?
ALOGD("remove DL delay pre queue finish 1");
break;
}
} else
{
ALOGD("remove DL delay pre queue finish 2");
break;
}
}
}
}
mBuf_Cond.signal();
}
}
创建输入设备的数据读取客户端,读取上行数据,同时进行消回音处理
void *AudioALSACaptureDataProviderNormal::readThread(void *arg)
void AudioALSACaptureDataProviderBase::provideCaptureDataToAllClients(const uint32_t open_index)
uint32_t AudioALSACaptureDataClient::copyCaptureDataToClient(RingBuf pcm_read_buf)
uint32_t AudioALSACaptureDataClient::BesRecordPreprocess(void *buffer , uint32_t bytes)
int SPELayer::Process(InBufferInfo *InBufinfo)
{
mState = SPE_STATE_RUNNING;
AddtoInputBuffer(UPLINK, InBufinfo);//添加未处理的上行数据
int inBufLength = InBufinfo->BufLen;
short *inBuf = InBufinfo->pBufBase;
int retSize = inBufLength;
//process the input buffer queue
if (mMode == SPE_MODE_REC)
//普通录音
{
mVoIPRunningbefore = false;
retSize = Process_Record(inBuf, inBufLength);//对录音数据进行处理
}
else
//VoIP
{
mVoIPRunningbefore = true;
Process_VoIP(inBuf, inBufLength);//对VoIP数据进行处理
}
Dump_PCM_Out(UPLINK, inBuf, retSize);
return retSize;
}
bool SPELayer::Process_VoIP(short *inBuf, int
inBufLength)
{
if (mULInBufQLenTotal < mSPEProcessBufSize) //not enough UL input buffer for process
{
int tmpInBufLength = inBufLength;
if (mULOutBufferQ.isEmpty() || mULOutBufQLenTotal < inBufLength) //TODO:fixme, return data we have?
{
ALOGD("not enough UL output buffer, inBuf=%p,inBufLength=%d", inBuf, inBufLength);
memset(inBuf, 0, inBufLength); //reset data
return true;
}
int count = 0;
int tempULCopy = mULOutBufferQ[0]->BufLen >> 2;
while (tmpInBufLength)
{
if (mULOutBufferQ.isEmpty())
{
break;
}
if (tempULCopy > 0) //get the buffer data from the first downlink input buffer queue
{
*(inBuf + count) = *(mULOutBufferQ[0]->pRead);
*(inBuf + count + 1) = *(mULOutBufferQ[0]->pRead + 1);
mULOutBufferQ[0]->pRead += 2;
tmpInBufLength -= 4; //int and short transform
tempULCopy--;
count += 2;
mULOutBufQLenTotal -= 4; //int and short transform
mULOutBufferQ[0]->BufLen -= 4;
} else
//consume all the data in first queue buffer
{
free(mULOutBufferQ[0]->pBufBase);
delete mULOutBufferQ[0];
mULOutBufferQ.removeAt(0);
if (!mULOutBufferQ.isEmpty())
{
tempULCopy = mULOutBufferQ[0]->BufLen >> 2;
} else
{
ALOGD("%s, mULOutBufferQ is empty!!! size=%d", __FUNCTION__, mULOutBufferQ.size());
}
}
}
return true;
}
//fix me!!: process when UL data is enough, DL data need to compensated as zero
//processing if have enough input UL data (UL is stereo, DL is mono data)
if (mULInBufQLenTotal >= mSPEProcessBufSize)
{
while (mULInBufQLenTotal >= mSPEProcessBufSize)
{
if (mDLInBufQLenTotal < mSPEProcessBufSize / 2) //not enough downlink data to process
{
if (mULOutBufQLenTotal >= inBufLength)
{
ALOGD("Process_VoIP have enough uplink processed data, skip this time");
break;
}
}
if (PrepareProcessData())
//sync ok, could start process
{
if (mDLInBufQLenTotal < mSPEProcessBufSize / 2) //not enough downlink data to process
{
if (WaitforDownlinkData())
//got new DL data queue
{
if (mDLInBufQLenTotal < mSPEProcessBufSize / 2) //not enough data to process
{
ALOGD("got new DL buffer, but still not enough data to process");
continue;
}
} else
//no new DL data queue
{
ALOGD("no new DL buffer queue, process directly");
}
} else//has enough downlink data to
process
{
if (mNewReferenceBufferComes)
{
InsertDownlinkData();
}
}
} else
//no sync yet, no need to check or wait for downlink data
{
if (mNewReferenceBufferComes)
{
ALOGD("also check if new downlink data comes even the sync is not ready");
InsertDownlinkData();
}
}
//fill in the data to process buffer
int tmpSPEProcessBufSize = mSPEProcessBufSize;
int indexIn = 0;
int ULIncopysize = mULInBufferQ[0]->BufLen >> 2;
struct timespec tstamp_process;
struct timespec DLtstamp_compen;
if (mDLInBufferQ.isEmpty())
{
DLtstamp_compen = mULInBufferQ[0]->time_stamp_estimate;
}
while (tmpSPEProcessBufSize)
{
if (mULInBufferQ.isEmpty()) //||mDLInBufferQ.isEmpty()||mDLDelayBufferQ.isEmpty())
{
ALOGD("%s,input buffer queue is empty, something wrong!!", __FUNCTION__);
mError = true;
break;
}
tstamp_process = GetSystemTime();
if (ULIncopysize > 0) //get the buffer data from the first uplink input buffer queue
{
//fill in uplink data
*(mpSPEBufferUL1 + indexIn) = *(mULInBufferQ[0]->pRead);
*(mpSPEBufferUL2 + indexIn) = *(mULInBufferQ[0]->pRead + 1);
mULInBufferQ[0]->pRead += 2;
mULInBufQLenTotal -= 4; //int and short transform
mULInBufferQ[0]->BufLen -= 4; //record the buffer you consumed
mULInBufferQ[0]->time_stamp_process = tstamp_process;
//update estimate time, when use the corresponding time
mULInBufferQ[0]->time_stamp_estimate.tv_sec = mULInBufferQ[0]->time_stamp_estimate.tv_sec
+ (mULInBufferQ[0]->time_stamp_estimate.tv_nsec + mNsecPerSample) / 1000000000;
mULInBufferQ[0]->time_stamp_estimate.tv_nsec = (mULInBufferQ[0]->time_stamp_estimate.tv_nsec
+ mNsecPerSample) % 1000000000;
//fill in downlink data
if (mDLInBufferQ.isEmpty())
{
CompensateBuffer(tmpSPEProcessBufSize / 2, DLtstamp_compen);
}
if (mDLInBufferQ[0]->BufLen <= 0) //run out of DL queue0 buffer
{
mDLInBufferQ.removeAt(0);
if (mDLInBufferQ.isEmpty())
{
CompensateBuffer(tmpSPEProcessBufSize / 2, DLtstamp_compen);
}
}
mDLInBufferQ[0]->time_stamp_process = tstamp_process;
*(mpSPEBufferDL + indexIn) = *(mDLInBufferQ[0]->pRead); //already mono data
mDLInBufferQ[0]->pRead++;
mDLInBufQLenTotal -= 2; //int and short transform
mDLInBufferQ[0]->BufLen -= 2; //record the buffer you consumed
mDLInBufferQ[0]->time_stamp_estimate.tv_sec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
+ (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec + mNsecPerSample) / 1000000000;
mDLInBufferQ[0]->time_stamp_estimate.tv_nsec = (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
+ mNsecPerSample) % 1000000000;
DLtstamp_compen = mDLInBufferQ[0]->time_stamp_estimate;
if (mDLInBufferQ[0]->BufLen <= 0) //run out of DL queue0 buffer
{
//not to free the buffer here due to the data still queue in the DLDelay buffer
mDLInBufferQ.removeAt(0);
}
//fill in delay latency data
if (mDLDelayBufferQ[0]->BufLen4Delay <= 0) //run out of DL
delay queue0 buffer
{
//ALOGD("DL delay consume");
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
if (mDLDelayBufferQ.isEmpty())
{
ALOGD("no DL delay buffer, should already compensate something wrong");
mError = true;
break;
}
}
mDLDelayBufferQ[0]->time_stamp_process = tstamp_process;
*(mpSPEBufferDLDelay + indexIn) = *(mDLDelayBufferQ[0]->pRead4Delay);
mDLDelayBufferQ[0]->pRead4Delay++;
mDLDelayBufQLenTotal -= 2; //int and short transform
mDLDelayBufferQ[0]->BufLen4Delay -= 2; //record the buffer you consumed
if (mDLDelayBufferQ[0]->BufLen4Delay <= 0) //run out of DL
delay queue0 buffer
{
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
ULIncopysize--;
indexIn++;
tmpSPEProcessBufSize -= 4;
} else
//consume all the data in first queue buffer
{
free(mULInBufferQ[0]->pBufBase);
delete mULInBufferQ[0];
mULInBufferQ.removeAt(0);
ULIncopysize = mULInBufferQ[0]->BufLen >> 2;
}
}
//after fill buffer, process
mSphEnhOps.ENH_API_Process(&mSph_Enh_ctrl);
Dump_EPL(&mSph_Enh_ctrl.EPL_buffer, EPLBufSize * sizeof(short));
EPLTransVMDump();
BufferInfo *newULOutBuffer = new BufferInfo;
newULOutBuffer->pBufBase = (short *) malloc(mSPEProcessBufSize);
newULOutBuffer->BufLen = mSPEProcessBufSize;
newULOutBuffer->pRead = newULOutBuffer->pBufBase;
newULOutBuffer->pWrite = newULOutBuffer->pBufBase;
int indexOut = 0;
int copysizetest = newULOutBuffer->BufLen >> 2;
while (copysizetest)
{
*(newULOutBuffer->pWrite) = *(mpSPEBufferNE + indexOut);
*(newULOutBuffer->pWrite + 1) = *(mpSPEBufferNE + indexOut);
newULOutBuffer->pWrite += 2;
indexOut++;
copysizetest--;
}
mULOutBufferQ.add(newULOutBuffer);
mULOutBufQLenTotal += newULOutBuffer->BufLen;
}
} else
//not enough UL
data, not process
{
ALOGD("not enough uplink data, not process");
}
//process the uplink output processed buffer queue
memset(inBuf, 0, inBufLength); //clean the buffer will be used
if (mULOutBufferQ.isEmpty() || mULOutBufQLenTotal < inBufLength) //fixme, return data we have?
{
ALOGD("SPELayer not enought UL output buffer return size");
return true;
} else
{
int tmpInBufLength = inBufLength;
if (mULOutBufQLenTotal < inBufLength)
{
ALOGD("Process_VoIP mULOutBufQLenTotal<inBufLength");
tmpInBufLength = mULOutBufQLenTotal;
}
int count = 0;
int tempULCopy = mULOutBufferQ[0]->BufLen >> 2;
while (tmpInBufLength)
{
if (mULOutBufferQ.isEmpty())
{
ALOGD("Process_VoIP run out of
output buffer queue");
break;
}
if (tempULCopy > 0) //get the buffer data from the first uplink input buffer queue
{
*(inBuf + count) = *(mULOutBufferQ[0]->pRead);
*(inBuf + count + 1) = *(mULOutBufferQ[0]->pRead + 1);
mULOutBufferQ[0]->pRead += 2;
tmpInBufLength -= 4; //int and short transform
tempULCopy--;
count += 2;
mULOutBufQLenTotal -= 4; //int and short transform
mULOutBufferQ[0]->BufLen -= 4;
} else
//consume all the data in first queue buffer
{
free(mULOutBufferQ[0]->pBufBase);
delete mULOutBufferQ[0];
mULOutBufferQ.removeAt(0);
if (!mULOutBufferQ.isEmpty())
{
tempULCopy = mULOutBufferQ[0]->BufLen >> 2;
} else
{
ALOGD("Process_VoIP mULOutBufferQ empty no more data 2, size=%d", mULOutBufferQ.size());
}
}
}
}
return true;
}
对齐上下行数据的时间戳
bool SPELayer::PrepareProcessData()
{
bool bRet = false;
if (mPrepareProcessDataReady)
{
bRet = true;
return bRet;
}
if (mDLNewStart || (DLdataPrepareCount > 0))
{
//compensate data to DL and DL delay as zero data for first uplink buffer process
BypassDLBuffer();
if (mDLNewStart)
{
CalPrepareCount(); // 2 * 4;
} else
{
DLdataPrepareCount--;
}
bRet = false;
} else
//when all data is ready, check the estimate time to let DL/UL could start together.
{
if (mDLInBufferQ.isEmpty() || mDLDelayBufferQ.isEmpty())
{
ALOGD("no downlink data, no need to sync");
return bRet;
}
if (DLdataPrepareCount > 0)
{
DLdataPrepareCount--;
ALOGD("prepare data DLdataPrepareCount=%d", DLdataPrepareCount);
return bRet;
}
if (TimeCompare(mDownlinkIntrStartTime, mULInBufferQ[0]->time_stamp_estimate))
{
//compensate data to DL and DL delay as zero data for first uplink buffer process;
BypassDLBuffer();
return bRet;
}
bool bULlate = false;
int deltaSec = 0;
unsigned long long deltaNSec = 0;
//下行数据较早出现,需要丢弃掉这部分数据
if (TimeStampCompare(mULInBufferQ[0], mDLInBufferQ[0], 0))
//drop downlink and downlink delay data
{
//remove previous queue downlink data, to match the nearlist DL buffer timestamp as uplink one
while ((!mDLInBufferQ.isEmpty()) && (TimeStampCompare(mULInBufferQ[0], mDLInBufferQ[0], 1)))
{
//drop DL data
uint32_t droplength = mDLInBufferQ[0]->BufLen;
mDLInBufQLenTotal -= mDLInBufferQ[0]->BufLen;
mDLInBufferQ.removeAt(0);
//drop DL delay data
while (droplength > 0)
{
if (droplength < mDLDelayBufferQ[0]->BufLen4Delay)
{
mDLDelayBufferQ[0]->BufLen4Delay -= droplength;
mDLDelayBufQLenTotal -= droplength;
mDLDelayBufferQ[0]->pRead4Delay = mDLDelayBufferQ[0]->pRead4Delay + droplength / 2;
droplength = 0;
} else
{
droplength -= mDLDelayBufferQ[0]->BufLen4Delay;
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen4Delay;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
}
if (mDLInBufferQ.isEmpty())
{
ALOGD("%s, something wrong? no DL buffer data, sync again", __FUNCTION__);
return false;
}
}
if (TimeStampCompare(mULInBufferQ[0], mDLInBufferQ[0], 0))
{
ALOGD("%s, calculate drop downlink data time", __FUNCTION__);
bULlate = true; //calculate drop downlink data time
if (mULInBufferQ[0]->time_stamp_estimate.tv_nsec >= mDLInBufferQ[0]->time_stamp_estimate.tv_nsec)
{
deltaSec = mULInBufferQ[0]->time_stamp_estimate.tv_sec
- mDLInBufferQ[0]->time_stamp_estimate.tv_sec;
deltaNSec = mULInBufferQ[0]->time_stamp_estimate.tv_nsec
- mDLInBufferQ[0]->time_stamp_estimate.tv_nsec;
} else
{
deltaSec = mULInBufferQ[0]->time_stamp_estimate.tv_sec
- mDLInBufferQ[0]->time_stamp_estimate.tv_sec - 1;
deltaNSec = 1000000000 + mULInBufferQ[0]->time_stamp_estimate.tv_nsec
- mDLInBufferQ[0]->time_stamp_estimate.tv_nsec;
}
} else
{
bULlate = false;
ALOGD("%s, actually uplink is earlier!!! need compensate downlink as zero", __FUNCTION__);
if (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec >= mULInBufferQ[0]->time_stamp_estimate.tv_nsec)
{
deltaSec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
- mULInBufferQ[0]->time_stamp_estimate.tv_sec;
deltaNSec = mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
- mULInBufferQ[0]->time_stamp_estimate.tv_nsec;
} else
{
deltaSec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
- mULInBufferQ[0]->time_stamp_estimate.tv_sec - 1;
deltaNSec = 1000000000 + mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
- mULInBufferQ[0]->time_stamp_estimate.tv_nsec;
}
}
} else
//上行数据较早出现
{
//将下行数据缺少的部分填充为0
bULlate = false;
if (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec >= mULInBufferQ[0]->time_stamp_estimate.tv_nsec)
{
deltaSec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
- mULInBufferQ[0]->time_stamp_estimate.tv_sec;
deltaNSec = mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
- mULInBufferQ[0]->time_stamp_estimate.tv_nsec;
} else
{
deltaSec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
- mULInBufferQ[0]->time_stamp_estimate.tv_sec - 1;
deltaNSec = 1000000000 + mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
- mULInBufferQ[0]->time_stamp_estimate.tv_nsec;
}
}
if (deltaSec < 0)
{
ALOGW("%s, deltaSec < 0? sync again", __FUNCTION__);
return false;
}
unsigned long long diffnsec = deltaSec * 1000000000 + deltaNSec;
uint32_t diffSample = 16 * diffnsec / 1000000;
uint32_t diffBufLength = diffSample * sizeof(short);
while (diffBufLength > 0)
{
if (bULlate == true) //drop DL data and DL delay data
{
if (mDLInBufferQ.isEmpty() || mDLDelayBufferQ.isEmpty())
{
ALOGW("%s, no mDLInBufferQ data, something wrong? sync again", __FUNCTION__);
return false;
}
if ((diffBufLength > mDLInBufQLenTotal) || (diffBufLength > mDLDelayBufQLenTotal))
{
//time diff more than DL preQ data
ALOGW("%s, something wrong happened?, sync again", __FUNCTION__);
diffBufLength = mDLInBufQLenTotal;
return false;
}
if (diffBufLength >= mDLInBufferQ[0]->BufLen)
{
//drop DL data
uint32_t droplength = mDLInBufferQ[0]->BufLen;
diffBufLength -= mDLInBufferQ[0]->BufLen;
mDLInBufQLenTotal -= mDLInBufferQ[0]->BufLen;
mDLInBufferQ.removeAt(0);
//drop DL delay data
while (droplength > 0)
{
if (droplength < mDLDelayBufferQ[0]->BufLen4Delay)
{
mDLDelayBufferQ[0]->BufLen4Delay -= droplength;
mDLDelayBufQLenTotal -= droplength;
mDLDelayBufferQ[0]->pRead4Delay = mDLDelayBufferQ[0]->pRead4Delay + droplength / 2;
droplength = 0;
} else
{
droplength -= mDLDelayBufferQ[0]->BufLen4Delay;
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen4Delay;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
}
} else
{
uint32_t droplength = diffBufLength;
mDLInBufferQ[0]->BufLen -= diffBufLength;
//record the buffer you consumed
mDLInBufQLenTotal -= diffBufLength;
mDLInBufferQ[0]->pRead = mDLInBufferQ[0]->pRead + diffBufLength / 2;
uint32_t adjustsample = diffBufLength / 2;
unsigned long long updateDLnsecdiff = 0;
updateDLnsecdiff = (adjustsample * (unsigned long long)1000000) / 16;
mDLInBufferQ[0]->time_stamp_estimate.tv_sec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
+ (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec + updateDLnsecdiff) / 1000000000;
mDLInBufferQ[0]->time_stamp_estimate.tv_nsec = (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
+ updateDLnsecdiff) % 1000000000;
diffBufLength = 0;
//drop DL delay data
while (droplength > 0)
{
if (droplength < mDLDelayBufferQ[0]->BufLen4Delay)
{
mDLDelayBufferQ[0]->BufLen4Delay -= droplength;
mDLDelayBufQLenTotal -= droplength;
mDLDelayBufferQ[0]->pRead4Delay = mDLDelayBufferQ[0]->pRead4Delay + droplength / 2;
droplength = 0;
} else
{
droplength -= mDLDelayBufferQ[0]->BufLen4Delay;
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen4Delay;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
}
}
} else
//add DL zero data at the beginning
{
BufferInfo *newInBuffer = new BufferInfo;
struct timespec tstamp;
uint32_t BufLength = diffBufLength;
newInBuffer->pBufBase = (short *) malloc(BufLength);
ALOGD("%s, data is ready but need adjust", __FUNCTION__);
memset(newInBuffer->pBufBase, 0xEE, BufLength);
tstamp = GetSystemTime();
newInBuffer->BufLen = BufLength;
newInBuffer->pRead = newInBuffer->pBufBase;
newInBuffer->pWrite = newInBuffer->pBufBase;
newInBuffer->time_stamp_queued = tstamp;
newInBuffer->time_stamp_estimate = tstamp;
newInBuffer->time_stamp_process = {0};
//queue to the begging of the downlink input buffer queue, downlink data channel is mono
mDLInBufferQ.push_front(newInBuffer);
mDLInBufQLenTotal += BufLength;
newInBuffer->BufLen4Delay = BufLength;
newInBuffer->pRead4Delay = newInBuffer->pBufBase;
newInBuffer->pWrite4Delay = newInBuffer->pBufBase;
mDLDelayBufferQ.push_front(newInBuffer);
mDLDelayBufQLenTotal += BufLength;
diffBufLength = 0;
}
}
mPrepareProcessDataReady = true;
bRet = true;
if (mNeedJitterBuffer && (mJitterSampleCount != 0))
{
mNeedJitterBuffer = false;
BufferInfo *newJitterBuffer = new BufferInfo;
newJitterBuffer->pBufBase = (short *) malloc(mJitterSampleCount * sizeof(short));
newJitterBuffer->BufLen = mJitterSampleCount * sizeof(short);
newJitterBuffer->pRead = newJitterBuffer->pBufBase;
newJitterBuffer->pWrite = newJitterBuffer->pBufBase;
newJitterBuffer->BufLen4Delay = mJitterSampleCount * sizeof(short);
newJitterBuffer->pRead4Delay = newJitterBuffer->pBufBase;
newJitterBuffer->pWrite4Delay = newJitterBuffer->pBufBase;
memset(newJitterBuffer->pBufBase, 0, newJitterBuffer->BufLen);
newJitterBuffer->time_stamp_process = {0};
mDLInBufferQ.push_front(newJitterBuffer);
mDLInBufQLenTotal += newJitterBuffer->BufLen;
mDLDelayBufferQ.push_front(newJitterBuffer);
mDLDelayBufQLenTotal += newJitterBuffer->BufLen;
}
if (mNeedDelayLatency && (mLatencySampleCount != 0))
{
if (mLatencyDir == true)
{
BufferInfo *newDelayBuffer = new BufferInfo;
newDelayBuffer->pBufBase = (short *) malloc(mLatencySampleCount * sizeof(short));
newDelayBuffer->BufLen = mLatencySampleCount * sizeof(short);
newDelayBuffer->pRead = newDelayBuffer->pBufBase;
newDelayBuffer->pWrite = newDelayBuffer->pBufBase;
newDelayBuffer->BufLen4Delay = mLatencySampleCount * sizeof(short);
newDelayBuffer->pRead4Delay = newDelayBuffer->pBufBase;
newDelayBuffer->pWrite4Delay = newDelayBuffer->pBufBase;
memset(newDelayBuffer->pBufBase, 0, newDelayBuffer->BufLen);
newDelayBuffer->time_stamp_process = {0};
mDLDelayBufferQ.push_front(newDelayBuffer);
mDLDelayBufQLenTotal += newDelayBuffer->BufLen;
} else
{
uint32_t diffLatencyBufLength = mLatencySampleCount * sizeof(short);
while (diffLatencyBufLength > 0)
{
if (mDLInBufferQ.isEmpty() || mDLDelayBufferQ.isEmpty())
{
ALOGW("adjust downlink data no mDLInBufferQ data");
break;
}
if ((diffLatencyBufLength > mDLInBufQLenTotal)
|| (diffLatencyBufLength > mDLDelayBufQLenTotal))
{
//time diff more than DL preQ data
ALOGW("adjust downlink data something wrong happened?");
diffLatencyBufLength = mDLInBufQLenTotal;
//break;
}
if (diffLatencyBufLength >= mDLInBufferQ[0]->BufLen)
{
//drop DL data
uint32_t droplength = mDLInBufferQ[0]->BufLen;
diffLatencyBufLength -= mDLInBufferQ[0]->BufLen;
mDLInBufQLenTotal -= mDLInBufferQ[0]->BufLen;
mDLInBufferQ.removeAt(0);
//drop DL delay data
while (droplength > 0)
{
if (droplength < mDLDelayBufferQ[0]->BufLen4Delay)
{
mDLDelayBufferQ[0]->BufLen4Delay -= droplength;
mDLDelayBufQLenTotal -= droplength;
mDLDelayBufferQ[0]->pRead4Delay = mDLDelayBufferQ[0]->pRead4Delay + droplength / 2;
droplength = 0;
} else
{
droplength -= mDLDelayBufferQ[0]->BufLen4Delay;
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen4Delay;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
}
} else
{
uint32_t droplength = diffLatencyBufLength;
mDLInBufferQ[0]->BufLen -= diffLatencyBufLength;
//record the buffer you consumed
mDLInBufQLenTotal -= diffLatencyBufLength;
mDLInBufferQ[0]->pRead = mDLInBufferQ[0]->pRead + diffLatencyBufLength / 2;
uint32_t adjustsample = diffLatencyBufLength / 2;
unsigned long long updateDLnsecdiff = 0;
updateDLnsecdiff = (adjustsample * (unsigned long long)1000000) / 16;
mDLInBufferQ[0]->time_stamp_estimate.tv_sec = mDLInBufferQ[0]->time_stamp_estimate.tv_sec
+ (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec + updateDLnsecdiff) / 1000000000;
mDLInBufferQ[0]->time_stamp_estimate.tv_nsec = (mDLInBufferQ[0]->time_stamp_estimate.tv_nsec
+ updateDLnsecdiff) % 1000000000;
diffLatencyBufLength = 0;
//drop DL delay data
while (droplength > 0)
{
if (droplength < mDLDelayBufferQ[0]->BufLen4Delay)
{
mDLDelayBufferQ[0]->BufLen4Delay -= droplength;
mDLDelayBufQLenTotal -= droplength;
mDLDelayBufferQ[0]->pRead4Delay = mDLDelayBufferQ[0]->pRead4Delay + droplength / 2;
droplength = 0;
} else
{
droplength -= mDLDelayBufferQ[0]->BufLen4Delay;
mDLDelayBufQLenTotal -= mDLDelayBufferQ[0]->BufLen4Delay;
free(mDLDelayBufferQ[0]->pBufBase);
delete mDLDelayBufferQ[0];
mDLDelayBufferQ.removeAt(0);
}
}
}
}
}
}
}
return bRet;
}
最后
以上就是爱笑手链为你收集整理的VoIP通话的输入路由的全部内容,希望文章能够帮你解决VoIP通话的输入路由所遇到的程序开发问题。
如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。
本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
发表评论 取消回复