mirror of
https://github.com/mozilla/gecko-dev.git
synced 2024-11-30 16:22:00 +00:00
Bug 932112: Webrtc updated to 5041, pull made Mon Oct 28 12:17:00 EDT 2013 rs=jesup
--HG-- rename : media/webrtc/trunk/webrtc/modules/audio_coding/main/source/acm_common_defs.h => media/webrtc/trunk/webrtc/modules/audio_coding/main/acm2/acm_common_defs.h rename : media/webrtc/trunk/webrtc/modules/audio_device/android/org/webrtc/voiceengine/WebRTCAudioDevice.java => media/webrtc/trunk/webrtc/modules/audio_device/android/java/src/org/webrtc/voiceengine/WebRTCAudioDevice.java rename : media/webrtc/trunk/webrtc/modules/audio_processing/test/unit_test.cc => media/webrtc/trunk/webrtc/modules/audio_processing/test/audio_processing_unittest.cc rename : media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_payload_registry.h => media/webrtc/trunk/webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h rename : media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/receiver_fec_unittest.cc => media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/fec_receiver_unittest.cc rename : media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java => media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/CaptureCapabilityAndroid.java rename : media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java => media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java rename : media/webrtc/trunk/webrtc/modules/video_coding/main/source/stream_generator.cc => media/webrtc/trunk/webrtc/modules/video_coding/main/source/test/stream_generator.cc rename : media/webrtc/trunk/webrtc/modules/video_coding/main/source/stream_generator.h => media/webrtc/trunk/webrtc/modules/video_coding/main/source/test/stream_generator.h rename : media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/unit_test.cc => media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.cc rename : media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/unit_test.h => media/webrtc/trunk/webrtc/modules/video_processing/main/test/unit_test/video_processing_unittest.h rename : media/webrtc/trunk/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java => media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViEAndroidGLES20.java rename : media/webrtc/trunk/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViERenderer.java => media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViERenderer.java rename : media/webrtc/trunk/webrtc/modules/video_render/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java => media/webrtc/trunk/webrtc/modules/video_render/android/java/src/org/webrtc/videoengine/ViESurfaceRenderer.java
This commit is contained in:
parent
aa20a25d38
commit
8338325b64
45
media/webrtc/trunk/webrtc/PRESUBMIT.py
Normal file
45
media/webrtc/trunk/webrtc/PRESUBMIT.py
Normal file
@ -0,0 +1,45 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
def _LicenseHeader(input_api):
|
||||
"""Returns the license header regexp."""
|
||||
# Accept any year number from 2011 to the current year
|
||||
current_year = int(input_api.time.strftime('%Y'))
|
||||
allowed_years = (str(s) for s in reversed(xrange(2011, current_year + 1)))
|
||||
years_re = '(' + '|'.join(allowed_years) + ')'
|
||||
license_header = (
|
||||
r'.*? Copyright \(c\) %(year)s The WebRTC project authors\. '
|
||||
r'All Rights Reserved\.\n'
|
||||
r'.*?\n'
|
||||
r'.*? Use of this source code is governed by a BSD-style license\n'
|
||||
r'.*? that can be found in the LICENSE file in the root of the source\n'
|
||||
r'.*? tree\. An additional intellectual property rights grant can be '
|
||||
r'found\n'
|
||||
r'.*? in the file PATENTS\. All contributing project authors may\n'
|
||||
r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
|
||||
) % {
|
||||
'year': years_re,
|
||||
}
|
||||
return license_header
|
||||
|
||||
def _CommonChecks(input_api, output_api):
|
||||
"""Checks common to both upload and commit."""
|
||||
results = []
|
||||
results.extend(input_api.canned_checks.CheckLicense(
|
||||
input_api, output_api, _LicenseHeader(input_api)))
|
||||
return results
|
||||
|
||||
def CheckChangeOnUpload(input_api, output_api):
|
||||
results = []
|
||||
results.extend(_CommonChecks(input_api, output_api))
|
||||
return results
|
||||
|
||||
def CheckChangeOnCommit(input_api, output_api):
|
||||
results = []
|
||||
results.extend(_CommonChecks(input_api, output_api))
|
||||
return results
|
192
media/webrtc/trunk/webrtc/build/apk_tests.gyp
Normal file
192
media/webrtc/trunk/webrtc/build/apk_tests.gyp
Normal file
@ -0,0 +1,192 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
# This file exists in two versions. A no-op version under
|
||||
# webrtc/build/apk_tests_noop.gyp and this one. This gyp file builds the apk
|
||||
# unit tests (for Android) assuming that WebRTC is built inside a Chromium
|
||||
# workspace. The no-op version is included when building WebRTC without
|
||||
# Chromium. This is a workaround for the fact that 'includes' don't expand
|
||||
# variables and that the relative location of apk_test.gypi is different for
|
||||
# WebRTC when built as part of Chromium and when it is built without Chromium.
|
||||
{
|
||||
'includes': [
|
||||
'common.gypi',
|
||||
],
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'audio_decoder_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'audio_decoder_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)audio_decoder_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/modules/modules.gyp:audio_decoder_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'common_audio_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'common_audio_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)common_audio_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/common_audio/common_audio.gyp:common_audio_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'common_video_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'common_video_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)common_video_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/common_video/common_video.gyp:common_video_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'metrics_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'metrics_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)metrics_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/test/metrics.gyp:metrics_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'modules_tests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'modules_tests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)modules_tests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/modules/modules.gyp:modules_tests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'modules_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'modules_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)modules_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/modules/modules.gyp:modules_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'neteq_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'neteq_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)neteq_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/modules/modules.gyp:neteq_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'system_wrappers_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'system_wrappers_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)system_wrappers_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/system_wrappers/source/system_wrappers_tests.gyp:system_wrappers_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'test_support_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'test_support_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)test_support_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/test/test.gyp:test_support_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'tools_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'tools_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)tools_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/tools/tools.gyp:tools_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'video_engine_core_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'video_engine_core_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)video_engine_core_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/video_engine/video_engine.gyp:video_engine_core_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'voice_engine_unittests_apk',
|
||||
'type': 'none',
|
||||
'variables': {
|
||||
'test_suite_name': 'voice_engine_unittests',
|
||||
'input_shlib_path': '<(SHARED_LIB_DIR)/<(SHARED_LIB_PREFIX)voice_engine_unittests<(SHARED_LIB_SUFFIX)',
|
||||
},
|
||||
'dependencies': [
|
||||
'<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
61
media/webrtc/trunk/webrtc/build/apk_tests_noop.gyp
Normal file
61
media/webrtc/trunk/webrtc/build/apk_tests_noop.gyp
Normal file
@ -0,0 +1,61 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
# See webrtc/build/apk_tests.gyp for more information about this file.
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'audio_decoder_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'common_audio_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'common_video_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'metrics_unittests',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'modules_tests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'modules_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'neteq_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'system_wrappers_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'test_support_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'tools_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'video_engine_core_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
{
|
||||
'target_name': 'voice_engine_unittests_apk',
|
||||
'type': 'none',
|
||||
},
|
||||
],
|
||||
}
|
@ -23,30 +23,8 @@
|
||||
'cflags!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'cflags_mozilla!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'cflags': [
|
||||
'-mfpu=neon',
|
||||
'-flax-vector-conversions',
|
||||
],
|
||||
'cflags_mozilla': [
|
||||
'-mfpu=neon',
|
||||
'-flax-vector-conversions',
|
||||
],
|
||||
'asflags!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'asflags_mozilla!': [
|
||||
'-mfpu=vfpv3-d16',
|
||||
],
|
||||
'asflags': [
|
||||
'-mfpu=neon',
|
||||
'-flax-vector-conversions',
|
||||
],
|
||||
'asflags_mozilla': [
|
||||
'-mfpu=neon',
|
||||
'-flax-vector-conversions',
|
||||
],
|
||||
|
||||
}
|
||||
|
@ -17,42 +17,45 @@
|
||||
# This will be set to zero in the supplement.gypi triggered by a
|
||||
# gclient hook in the standalone build.
|
||||
'build_with_chromium%': 1,
|
||||
'build_with_libjingle%': 0,
|
||||
},
|
||||
'build_with_chromium%': '<(build_with_chromium)',
|
||||
'build_with_libjingle%': '<(build_with_libjingle)',
|
||||
|
||||
'conditions': [
|
||||
['build_with_chromium==1 or build_with_libjingle==1', {
|
||||
['build_with_chromium==1', {
|
||||
'build_with_libjingle': 1,
|
||||
'webrtc_root%': '<(DEPTH)/third_party/webrtc',
|
||||
'apk_tests_path%': '<(DEPTH)/third_party/webrtc/build/apk_tests.gyp',
|
||||
'import_isolate_path%': '<(DEPTH)/third_party/webrtc/build/import_isolate_chromium.gyp',
|
||||
'modules_java_gyp_path%': '<(DEPTH)/third_party/webrtc/modules/modules_java_chromium.gyp',
|
||||
}, {
|
||||
'build_with_libjingle%': 0,
|
||||
'webrtc_root%': '<(DEPTH)/webrtc',
|
||||
'apk_tests_path%': '<(DEPTH)/webrtc/build/apk_test_noop.gyp',
|
||||
'import_isolate_path%': '<(DEPTH)/webrtc/build/import_isolate_webrtc.gyp',
|
||||
'modules_java_gyp_path%': '<(DEPTH)/webrtc/modules/modules_java.gyp',
|
||||
}],
|
||||
],
|
||||
},
|
||||
'build_with_chromium%': '<(build_with_chromium)',
|
||||
'build_with_libjingle%': '<(build_with_libjingle)',
|
||||
'webrtc_root%': '<(webrtc_root)',
|
||||
'apk_tests_path%': '<(apk_tests_path)',
|
||||
'import_isolate_path%': '<(import_isolate_path)',
|
||||
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
|
||||
|
||||
'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
|
||||
'include_g711%': 1,
|
||||
'include_g722%': 1,
|
||||
'include_ilbc%': 1,
|
||||
'rbe_components_path%': '<(webrtc_root)/modules/remote_bitrate_estimator',
|
||||
'include_opus%': 1,
|
||||
'include_isac%': 1,
|
||||
'include_pcm16b%': 1,
|
||||
},
|
||||
'build_with_chromium%': '<(build_with_chromium)',
|
||||
'build_with_libjingle%': '<(build_with_libjingle)',
|
||||
'webrtc_root%': '<(webrtc_root)',
|
||||
'apk_tests_path%': '<(apk_tests_path)',
|
||||
'import_isolate_path%': '<(import_isolate_path)',
|
||||
'modules_java_gyp_path%': '<(modules_java_gyp_path)',
|
||||
'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
|
||||
|
||||
'include_g711%': '<(include_g711)',
|
||||
'include_g722%': '<(include_g722)',
|
||||
'include_ilbc%': '<(include_ilbc)',
|
||||
'include_opus%': '<(include_opus)',
|
||||
'include_isac%': '<(include_isac)',
|
||||
'include_pcm16b%': '<(include_pcm16b)',
|
||||
'rbe_components_path%': '<(rbe_components_path)',
|
||||
|
||||
# The Chromium common.gypi we use treats all gyp files without
|
||||
# chromium_code==1 as third party code. This disables many of the
|
||||
@ -108,57 +111,37 @@
|
||||
# Exclude internal video render module in Chromium build.
|
||||
'include_internal_video_render%': 0,
|
||||
|
||||
'include_tests%': 0,
|
||||
|
||||
'enable_tracing%': 0,
|
||||
# lazily allocate the ~4MB of trace message buffers if set
|
||||
'enable_lazy_trace_alloc%': 0,
|
||||
|
||||
'enable_android_opensl%': 0,
|
||||
# Include ndk cpu features in Chromium build.
|
||||
'include_ndk_cpu_features%': 1,
|
||||
}, { # Settings for the standalone (not-in-Chromium) build.
|
||||
'include_pulse_audio%': 1,
|
||||
'include_internal_audio_device%': 1,
|
||||
'include_internal_video_capture%': 1,
|
||||
'include_internal_video_render%': 1,
|
||||
'enable_tracing%': 1,
|
||||
'include_tests%': 1,
|
||||
|
||||
# TODO(andrew): For now, disable the Chrome plugins, which causes a
|
||||
# flood of chromium-style warnings. Investigate enabling them:
|
||||
# http://code.google.com/p/webrtc/issues/detail?id=163
|
||||
'clang_use_chrome_plugins%': 0,
|
||||
|
||||
'include_pulse_audio%': 1,
|
||||
'include_internal_audio_device%': 1,
|
||||
'include_internal_video_capture%': 1,
|
||||
'include_internal_video_render%': 1,
|
||||
'include_ndk_cpu_features%': 0,
|
||||
}],
|
||||
['build_with_libjingle==1', {
|
||||
'include_tests%': 0,
|
||||
'enable_tracing%': 0,
|
||||
'enable_android_opensl%': 0,
|
||||
}, {
|
||||
'include_tests%': 1,
|
||||
'enable_tracing%': 1,
|
||||
# Switch between Android audio device OpenSL ES implementation
|
||||
# and Java Implementation
|
||||
'enable_android_opensl%': 0,
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
'include_alsa_audio%': 1,
|
||||
}, {
|
||||
'include_alsa_audio%': 0,
|
||||
}],
|
||||
['OS=="solaris" or os_bsd==1', {
|
||||
'include_pulse_audio%': 1,
|
||||
}, {
|
||||
'include_pulse_audio%': 0,
|
||||
}],
|
||||
['OS=="linux" or OS=="solaris" or os_bsd==1', {
|
||||
'include_v4l2_video_capture%': 1,
|
||||
}, {
|
||||
'include_v4l2_video_capture%': 0,
|
||||
}],
|
||||
['OS=="ios"', {
|
||||
'enable_video%': 0,
|
||||
'enable_protobuf%': 0,
|
||||
'build_libjpeg%': 0,
|
||||
'build_libyuv%': 0,
|
||||
'build_libvpx%': 0,
|
||||
'enable_protobuf%': 0,
|
||||
'include_tests%': 0,
|
||||
}],
|
||||
['build_with_libjingle==1', {
|
||||
'include_tests%': 0,
|
||||
}],
|
||||
['target_arch=="arm"', {
|
||||
['target_arch=="arm" or target_arch=="armv7"', {
|
||||
'prefer_fixed_point%': 1,
|
||||
}],
|
||||
], # conditions
|
||||
@ -177,15 +160,10 @@
|
||||
'defines': [
|
||||
# TODO(leozwang): Run this as a gclient hook rather than at build-time:
|
||||
# http://code.google.com/p/webrtc/issues/detail?id=687
|
||||
'WEBRTC_SVNREVISION="\\\"Unavailable_issue687\\\""',
|
||||
'WEBRTC_SVNREVISION="Unavailable(issue687)"',
|
||||
#'WEBRTC_SVNREVISION="<!(python <(webrtc_root)/build/version.py)"',
|
||||
],
|
||||
'conditions': [
|
||||
['moz_widget_toolkit_gonk==1', {
|
||||
'defines' : [
|
||||
'WEBRTC_GONK',
|
||||
],
|
||||
}],
|
||||
['enable_tracing==1', {
|
||||
'defines': ['WEBRTC_LOGGING',],
|
||||
}],
|
||||
@ -217,14 +195,13 @@
|
||||
}],
|
||||
],
|
||||
}],
|
||||
['target_arch=="arm"', {
|
||||
['target_arch=="arm" or target_arch=="armv7"', {
|
||||
'defines': [
|
||||
'WEBRTC_ARCH_ARM',
|
||||
],
|
||||
'conditions': [
|
||||
['armv7==1', {
|
||||
'defines': ['WEBRTC_ARCH_ARM_V7',
|
||||
'WEBRTC_BUILD_NEON_LIBS'],
|
||||
'defines': ['WEBRTC_ARCH_ARM_V7',],
|
||||
'conditions': [
|
||||
['arm_neon==1', {
|
||||
'defines': ['WEBRTC_ARCH_ARM_NEON',],
|
||||
@ -235,19 +212,6 @@
|
||||
}],
|
||||
],
|
||||
}],
|
||||
['os_bsd==1', {
|
||||
'defines': [
|
||||
'WEBRTC_BSD',
|
||||
'WEBRTC_THREAD_RR',
|
||||
],
|
||||
}],
|
||||
['OS=="dragonfly" or OS=="netbsd"', {
|
||||
'defines': [
|
||||
# doesn't support pthread_condattr_setclock
|
||||
'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||
],
|
||||
}],
|
||||
# Mozilla: if we support Mozilla on MIPS, we'll need to mod the cflags entries here
|
||||
['target_arch=="mipsel"', {
|
||||
'defines': [
|
||||
'MIPS32_LE',
|
||||
@ -308,13 +272,6 @@
|
||||
],
|
||||
}],
|
||||
['OS=="linux"', {
|
||||
# 'conditions': [
|
||||
# ['have_clock_monotonic==1', {
|
||||
# 'defines': [
|
||||
# 'WEBRTC_CLOCK_TYPE_REALTIME',
|
||||
# ],
|
||||
# }],
|
||||
# ],
|
||||
'defines': [
|
||||
'WEBRTC_LINUX',
|
||||
],
|
||||
|
22
media/webrtc/trunk/webrtc/build/import_isolate_chromium.gyp
Normal file
22
media/webrtc/trunk/webrtc/build/import_isolate_chromium.gyp
Normal file
@ -0,0 +1,22 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
# See webrtc/build/import_isolate_webrtc.gyp for information about this file.
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'import_isolate_gypi',
|
||||
'type': 'none',
|
||||
'includes': [
|
||||
# Relative path to isolate.gypi when WebRTC is built from inside
|
||||
# Chromium (i.e. the webrtc/ folder is checked out into third_party/).
|
||||
'../../../build/apk_test.gypi',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
25
media/webrtc/trunk/webrtc/build/import_isolate_webrtc.gyp
Normal file
25
media/webrtc/trunk/webrtc/build/import_isolate_webrtc.gyp
Normal file
@ -0,0 +1,25 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
# This file exists so we can find the isolate.gypi both when WebRTC is built
|
||||
# stand-alone and when built as a part of Chrome.
|
||||
# This is needed since GYP does not support evaluating variables in the
|
||||
# includes sections of a target, so we cannot use <(DEPTH) or <(webrtc_root).
|
||||
{
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'import_isolate_gypi',
|
||||
'type': 'none',
|
||||
'includes': [
|
||||
# Relative path to isolate.gypi when WebRTC built as a stand-alone
|
||||
# project (i.e. Chromium's build/ folder is checked out into the root).
|
||||
'../../build/isolate.gypi',
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
#!/bin/bash
|
||||
#!/bin/bash -e
|
||||
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
@ -8,66 +8,18 @@
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
function build_project() {
|
||||
# make the target string
|
||||
if [[ -z "$2" ]]; then
|
||||
target_string=""
|
||||
else
|
||||
declare -a arg_target=("${!2}")
|
||||
# Work in trunk/.
|
||||
cd "$(dirname $0)/../.."
|
||||
|
||||
for item in ${arg_target[*]}
|
||||
do
|
||||
temp_string="-target $item "
|
||||
target_string=$target_string$temp_string
|
||||
done
|
||||
fi
|
||||
export GYP_DEFINES="build_with_libjingle=1 build_with_chromium=0"
|
||||
GYP_DEFINES="$GYP_DEFINES OS=ios target_arch=armv7 key_id=\"\""
|
||||
export GYP_GENERATORS="ninja"
|
||||
export GYP_CROSSCOMPILE=1
|
||||
|
||||
# xcodebuild
|
||||
xcodebuild -project "$1" -sdk iphoneos \
|
||||
-configuration ${CONFIGURATION} \
|
||||
-CONFIGURATION_BUILD_DIR=${CONFIGURATION_BUILD_DIR} $target_string
|
||||
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "[Error] build $1 failed!" 1>&2
|
||||
echo "@@@STEP_FAILURE@@@"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# change the working directory to trunk
|
||||
cd "$( dirname "${BASH_SOURCE[0]}" )/../.."
|
||||
|
||||
# build setting
|
||||
CONFIGURATION_BUILD_DIR=./xcodebuild
|
||||
CONFIGURATION=Debug
|
||||
GYPDEF="OS=ios target_arch=arm armv7=1 arm_neon=1 enable_video=0 include_opus=1"
|
||||
|
||||
export GYP_DEFINES=$GYPDEF
|
||||
echo "[Running gclient runhooks...]"
|
||||
echo "@@@BUILD_STEP runhooks@@@"
|
||||
gclient runhooks
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "[Error] gclient runhooks failed!" 1>&2
|
||||
echo "@@@STEP_FAILURE@@@"
|
||||
exit 2
|
||||
fi
|
||||
echo "[Projects updated]\n"
|
||||
gclient runhooks || { echo "@@@STEP_FAILURE@@@"; exit 2; }
|
||||
|
||||
echo "@@@BUILD_STEP compile@@@"
|
||||
echo "[Building XCode projects...]"
|
||||
array_target_module=(
|
||||
"bitrate_controller" "media_file" "paced_sender" "remote_bitrate_estimator"
|
||||
"webrtc_utility" "rtp_rtcp" "CNG" "G711" "G722" "iLBC" "iSACFix" "PCM16B"
|
||||
"audio_coding_module" "NetEq" "audio_conference_mixer" "audio_device"
|
||||
"audio_processing" "iSAC" "isac_neon" "audio_processing_neon" "webrtc_opus"
|
||||
)
|
||||
array_target_opus=("opus")
|
||||
|
||||
build_project "webrtc/common_audio/common_audio.xcodeproj"
|
||||
build_project "webrtc/modules/modules.xcodeproj" array_target_module[@]
|
||||
build_project "webrtc/system_wrappers/source/system_wrappers.xcodeproj"
|
||||
build_project "webrtc/voice_engine/voice_engine.xcodeproj"
|
||||
build_project "third_party/opus/opus.xcodeproj" array_target_opus[@]
|
||||
echo "[Building XCode projects is successful]\n"
|
||||
ninja -C out/Debug || { echo "@@@STEP_FAILURE@@@"; exit 2; }
|
||||
|
||||
exit 0
|
||||
|
@ -44,7 +44,5 @@
|
||||
},
|
||||
],
|
||||
},
|
||||
# }],
|
||||
# ],
|
||||
],
|
||||
}
|
||||
|
69
media/webrtc/trunk/webrtc/build/vie-webrtc.sh
Executable file
69
media/webrtc/trunk/webrtc/build/vie-webrtc.sh
Executable file
@ -0,0 +1,69 @@
|
||||
#!/bin/sh
|
||||
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
set -e
|
||||
|
||||
# TODO(sjlee): remove this whole script file.
|
||||
# (https://code.google.com/p/webrtc/issues/detail?id=2028)
|
||||
function build_project() {
|
||||
# make the target string
|
||||
local target_string=""
|
||||
if [[ -n "$2" ]]; then
|
||||
target_string="-target $2"
|
||||
fi
|
||||
|
||||
xcodebuild -project "$1" -sdk iphoneos -arch armv7 \
|
||||
-configuration ${CONFIGURATION} \
|
||||
-CONFIGURATION_BUILD_DIR=${CONFIGURATION_BUILD_DIR} $target_string
|
||||
}
|
||||
|
||||
# change the working directory to trunk
|
||||
cd "$( dirname "$0" )/../.."
|
||||
|
||||
# build setting
|
||||
CONFIGURATION_BUILD_DIR=./xcodebuild
|
||||
CONFIGURATION=Debug
|
||||
export GYP_DEFINES="OS=ios target_arch=arm armv7=1 arm_neon=1"
|
||||
# TODO(sjlee): remove this script.
|
||||
# (https://webrtc-codereview.appspot.com/1874005)
|
||||
|
||||
# update gyp settings
|
||||
echo '[Updating gyp settings...]'
|
||||
gclient runhooks
|
||||
./build/gyp_chromium --depth=. \
|
||||
webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components.gyp
|
||||
./build/gyp_chromium --depth=. \
|
||||
webrtc/modules/video_coding/utility/video_coding_utility.gyp
|
||||
./build/gyp_chromium --depth=. third_party/opus/opus.gyp
|
||||
./build/gyp_chromium --depth=. third_party/libyuv/libyuv.gyp
|
||||
./build/gyp_chromium --depth=. third_party/libjpeg/libjpeg.gyp
|
||||
|
||||
# build the xcode projects
|
||||
echo '[Building xcode projects...]'
|
||||
|
||||
build_project "webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_components.xcodeproj"
|
||||
build_project "webrtc/modules/video_coding/utility/video_coding_utility.xcodeproj"
|
||||
build_project "third_party/opus/opus.xcodeproj" "opus"
|
||||
build_project "third_party/libjpeg/libjpeg.xcodeproj"
|
||||
build_project "third_party/libyuv/libyuv.xcodeproj"
|
||||
|
||||
# build the libvpx
|
||||
cd third_party/libvpx/source/libvpx
|
||||
|
||||
./configure --target=armv7-darwin-gcc --disable-vp9 \
|
||||
--libc=/Applications/Xcode.app/Contents/Developer/Platforms/iPhoneOS.platform/Developer/SDKs/iPhoneOS6.1.sdk
|
||||
|
||||
make
|
||||
|
||||
cd -
|
||||
|
||||
cp third_party/libvpx/source/libvpx/libvpx.a \
|
||||
${CONFIGURATION_BUILD_DIR}/${CONFIGURATION}-iphoneos
|
||||
|
||||
echo "[Building xcode projects is success...]\n"
|
@ -8,8 +8,8 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_COMMON_H
|
||||
#define WEBRTC_COMMON_H
|
||||
#ifndef WEBRTC_COMMON_H_
|
||||
#define WEBRTC_COMMON_H_
|
||||
|
||||
#include <map>
|
||||
|
||||
@ -38,14 +38,14 @@ namespace webrtc {
|
||||
class Config {
|
||||
public:
|
||||
// Returns the option if set or a default constructed one.
|
||||
// Callers that access options to often are encouraged to cache the result.
|
||||
// Callers that access options too often are encouraged to cache the result.
|
||||
// Returned references are owned by this.
|
||||
//
|
||||
// Requires std::is_default_constructible<T>
|
||||
template<typename T> const T& Get() const;
|
||||
|
||||
// Set the option, deleting any previous instance of the same.
|
||||
// This instance gets ownership of the newly setted value.
|
||||
// This instance gets ownership of the newly set value.
|
||||
template<typename T> void Set(T* value);
|
||||
|
||||
Config() {}
|
||||
@ -116,5 +116,7 @@ void Config::Set(T* value) {
|
||||
delete it;
|
||||
it = new Option<T>(value);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
#endif // WEBRTC_COMMON_H
|
||||
|
||||
#endif // WEBRTC_COMMON_H_
|
||||
|
@ -20,6 +20,20 @@ void ExpectArraysEq(const int16_t* ref, const int16_t* test, int length) {
|
||||
}
|
||||
}
|
||||
|
||||
TEST(AudioUtilTest, Clamp) {
|
||||
EXPECT_EQ(1000.f, ClampInt16(1000.f));
|
||||
EXPECT_EQ(32767.f, ClampInt16(32767.5f));
|
||||
EXPECT_EQ(-32768.f, ClampInt16(-32768.5f));
|
||||
}
|
||||
|
||||
TEST(AudioUtilTest, Round) {
|
||||
EXPECT_EQ(0, RoundToInt16(0.f));
|
||||
EXPECT_EQ(0, RoundToInt16(0.4f));
|
||||
EXPECT_EQ(1, RoundToInt16(0.5f));
|
||||
EXPECT_EQ(0, RoundToInt16(-0.4f));
|
||||
EXPECT_EQ(-1, RoundToInt16(-0.5f));
|
||||
}
|
||||
|
||||
TEST(AudioUtilTest, InterleavingStereo) {
|
||||
const int16_t kInterleaved[] = {2, 3, 4, 9, 8, 27, 16, 81};
|
||||
const int kSamplesPerChannel = 4;
|
||||
|
@ -93,7 +93,7 @@
|
||||
['target_arch=="ia32" or target_arch=="x64"', {
|
||||
'dependencies': ['common_audio_sse2',],
|
||||
}],
|
||||
['target_arch=="arm"', {
|
||||
['target_arch=="arm" or target_arch=="armv7"', {
|
||||
'sources': [
|
||||
'signal_processing/complex_bit_reverse_arm.S',
|
||||
'signal_processing/spl_sqrt_floor_arm.S',
|
||||
@ -116,17 +116,28 @@
|
||||
}],
|
||||
['target_arch=="mipsel"', {
|
||||
'sources': [
|
||||
'signal_processing/include/spl_inl_mips.h',
|
||||
'signal_processing/complex_bit_reverse_mips.c',
|
||||
'signal_processing/complex_fft_mips.c',
|
||||
'signal_processing/cross_correlation_mips.c',
|
||||
'signal_processing/downsample_fast_mips.c',
|
||||
'signal_processing/filter_ar_fast_q12_mips.c',
|
||||
'signal_processing/min_max_operations_mips.c',
|
||||
'signal_processing/resample_by_2_mips.c',
|
||||
'signal_processing/spl_sqrt_floor_mips.c',
|
||||
],
|
||||
'sources!': [
|
||||
'signal_processing/complex_bit_reverse.c',
|
||||
'signal_processing/complex_fft.c',
|
||||
'signal_processing/filter_ar_fast_q12.c',
|
||||
'signal_processing/spl_sqrt_floor.c',
|
||||
],
|
||||
'conditions': [
|
||||
['mips_dsp_rev>0', {
|
||||
'sources': [
|
||||
'signal_processing/vector_scaling_operations_mips.c',
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
], # conditions
|
||||
@ -144,14 +155,13 @@
|
||||
'resampler/sinc_resampler_sse.cc',
|
||||
],
|
||||
'cflags': ['-msse2',],
|
||||
'cflags_mozilla': ['-msse2',],
|
||||
'xcode_settings': {
|
||||
'OTHER_CFLAGS': ['-msse2',],
|
||||
},
|
||||
},
|
||||
], # targets
|
||||
}],
|
||||
['target_arch=="arm" and armv7==1', {
|
||||
['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'common_audio_neon',
|
||||
@ -171,7 +181,7 @@
|
||||
'targets' : [
|
||||
{
|
||||
'target_name': 'common_audio_unittests',
|
||||
'type': 'executable',
|
||||
'type': '<(gtest_target_type)',
|
||||
'dependencies': [
|
||||
'common_audio',
|
||||
'<(webrtc_root)/test/test.gyp:test_support_main',
|
||||
@ -195,8 +205,50 @@
|
||||
'vad/vad_unittest.cc',
|
||||
'vad/vad_unittest.h',
|
||||
],
|
||||
'conditions': [
|
||||
# TODO(henrike): remove build_with_chromium==1 when the bots are
|
||||
# using Chromium's buildbots.
|
||||
['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
|
||||
'dependencies': [
|
||||
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
'conditions': [
|
||||
# TODO(henrike): remove build_with_chromium==1 when the bots are using
|
||||
# Chromium's buildbots.
|
||||
['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'common_audio_unittests_apk_target',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'<(apk_tests_path):common_audio_unittests_apk',
|
||||
],
|
||||
},
|
||||
],
|
||||
}],
|
||||
['test_isolation_mode != "noop"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'common_audio_unittests_run',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'<(import_isolate_path):import_isolate_gypi',
|
||||
'common_audio_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'common_audio_unittests.isolate',
|
||||
],
|
||||
'sources': [
|
||||
'common_audio_unittests.isolate',
|
||||
],
|
||||
},
|
||||
],
|
||||
}],
|
||||
],
|
||||
}],
|
||||
], # conditions
|
||||
}
|
||||
|
@ -0,0 +1,37 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
{
|
||||
'conditions': [
|
||||
['OS=="android"', {
|
||||
# When doing Android builds, the WebRTC code is put in third_party/webrtc
|
||||
# of a Chromium checkout, this is one level above the standalone build.
|
||||
'variables': {
|
||||
'isolate_dependency_untracked': [
|
||||
'../../../data/',
|
||||
'../../../resources/',
|
||||
],
|
||||
},
|
||||
}],
|
||||
['OS=="linux" or OS=="mac" or OS=="win"', {
|
||||
'variables': {
|
||||
'command': [
|
||||
'../../testing/test_env.py',
|
||||
'../../tools/swarm_client/googletest/run_test_cases.py',
|
||||
'<(PRODUCT_DIR)/common_audio_unittests<(EXECUTABLE_SUFFIX)',
|
||||
],
|
||||
'isolate_dependency_tracked': [
|
||||
'../../testing/test_env.py',
|
||||
'../../tools/swarm_client/run_isolated.py',
|
||||
'../../tools/swarm_client/googletest/run_test_cases.py',
|
||||
'../../tools/swarm_client/third_party/upload.py',
|
||||
'<(PRODUCT_DIR)/common_audio_unittests<(EXECUTABLE_SUFFIX)',
|
||||
],
|
||||
},
|
||||
}],
|
||||
],
|
||||
}
|
@ -15,6 +15,20 @@
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Clamp the floating |value| to the range representable by an int16_t.
|
||||
static inline float ClampInt16(float value) {
|
||||
const float kMaxInt16 = 32767.f;
|
||||
const float kMinInt16 = -32768.f;
|
||||
return value < kMinInt16 ? kMinInt16 :
|
||||
(value > kMaxInt16 ? kMaxInt16 : value);
|
||||
}
|
||||
|
||||
// Return a rounded int16_t of the floating |value|. Doesn't handle overflow;
|
||||
// use ClampInt16 if necessary.
|
||||
static inline int16_t RoundToInt16(float value) {
|
||||
return static_cast<int16_t>(value < 0.f ? value - 0.5f : value + 0.5f);
|
||||
}
|
||||
|
||||
// Deinterleave audio from |interleaved| to the channel buffers pointed to
|
||||
// by |deinterleaved|. There must be sufficient space allocated in the
|
||||
// |deinterleaved| buffers (|num_channel| buffers with |samples_per_channel|
|
||||
|
@ -17,50 +17,100 @@
|
||||
#define WEBRTC_RESAMPLER_RESAMPLER_H_
|
||||
|
||||
#include "webrtc/typedefs.h"
|
||||
#include "speex/speex_resampler.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
#define FIXED_RATE_RESAMPLER 0x10
|
||||
// TODO(andrew): the implementation depends on the exact values of this enum.
|
||||
// It should be rewritten in a less fragile way.
|
||||
enum ResamplerType
|
||||
{
|
||||
kResamplerSynchronous = 0x00,
|
||||
kResamplerSynchronousStereo = 0x01,
|
||||
kResamplerFixedSynchronous = 0x00 | FIXED_RATE_RESAMPLER,
|
||||
kResamplerFixedSynchronousStereo = 0x01 | FIXED_RATE_RESAMPLER,
|
||||
// 4 MSB = Number of channels
|
||||
// 4 LSB = Synchronous or asynchronous
|
||||
|
||||
kResamplerSynchronous = 0x10,
|
||||
kResamplerAsynchronous = 0x11,
|
||||
kResamplerSynchronousStereo = 0x20,
|
||||
kResamplerAsynchronousStereo = 0x21,
|
||||
kResamplerInvalid = 0xff
|
||||
};
|
||||
|
||||
// TODO(andrew): doesn't need to be part of the interface.
|
||||
enum ResamplerMode
|
||||
{
|
||||
kResamplerMode1To1,
|
||||
kResamplerMode1To2,
|
||||
kResamplerMode1To3,
|
||||
kResamplerMode1To4,
|
||||
kResamplerMode1To6,
|
||||
kResamplerMode1To12,
|
||||
kResamplerMode2To3,
|
||||
kResamplerMode2To11,
|
||||
kResamplerMode4To11,
|
||||
kResamplerMode8To11,
|
||||
kResamplerMode11To16,
|
||||
kResamplerMode11To32,
|
||||
kResamplerMode2To1,
|
||||
kResamplerMode3To1,
|
||||
kResamplerMode4To1,
|
||||
kResamplerMode6To1,
|
||||
kResamplerMode12To1,
|
||||
kResamplerMode3To2,
|
||||
kResamplerMode11To2,
|
||||
kResamplerMode11To4,
|
||||
kResamplerMode11To8
|
||||
};
|
||||
|
||||
class Resampler
|
||||
{
|
||||
|
||||
public:
|
||||
Resampler();
|
||||
// TODO(andrew): use an init function instead.
|
||||
Resampler(int in_freq, int out_freq, ResamplerType type);
|
||||
Resampler(int inFreq, int outFreq, ResamplerType type);
|
||||
~Resampler();
|
||||
|
||||
// Reset all states
|
||||
int Reset(int in_freq, int out_freq, ResamplerType type);
|
||||
int Reset(int inFreq, int outFreq, ResamplerType type);
|
||||
|
||||
// Reset all states if any parameter has changed
|
||||
int ResetIfNeeded(int in_freq, int out_freq, ResamplerType type);
|
||||
int ResetIfNeeded(int inFreq, int outFreq, ResamplerType type);
|
||||
|
||||
// Synchronous resampling, all output samples are written to samplesOut
|
||||
int Push(const int16_t* samples_in, int length_in,
|
||||
int16_t* samples_out, int max_len, int &out_len);
|
||||
int Push(const int16_t* samplesIn, int lengthIn, int16_t* samplesOut,
|
||||
int maxLen, int &outLen);
|
||||
|
||||
// Asynchronous resampling, input
|
||||
int Insert(int16_t* samplesIn, int lengthIn);
|
||||
|
||||
// Asynchronous resampling output, remaining samples are buffered
|
||||
int Pull(int16_t* samplesOut, int desiredLen, int &outLen);
|
||||
|
||||
private:
|
||||
bool IsFixedRate() { return !!(type_ & FIXED_RATE_RESAMPLER); }
|
||||
// Generic pointers since we don't know what states we'll need
|
||||
void* state1_;
|
||||
void* state2_;
|
||||
void* state3_;
|
||||
|
||||
SpeexResamplerState* state_;
|
||||
// Storage if needed
|
||||
int16_t* in_buffer_;
|
||||
int16_t* out_buffer_;
|
||||
int in_buffer_size_;
|
||||
int out_buffer_size_;
|
||||
int in_buffer_size_max_;
|
||||
int out_buffer_size_max_;
|
||||
|
||||
// State
|
||||
int in_freq_;
|
||||
int out_freq_;
|
||||
int channels_;
|
||||
ResamplerType type_;
|
||||
int my_in_frequency_khz_;
|
||||
int my_out_frequency_khz_;
|
||||
ResamplerMode my_mode_;
|
||||
ResamplerType my_type_;
|
||||
|
||||
// Extra instance for stereo
|
||||
Resampler* slave_left_;
|
||||
Resampler* slave_right_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_RESAMPLER_RESAMPLER_H_
|
||||
|
@ -10,9 +10,10 @@
|
||||
|
||||
#include "webrtc/common_audio/resampler/include/push_resampler.h"
|
||||
|
||||
#include <cstring>
|
||||
#include <string.h>
|
||||
|
||||
#include "webrtc/common_audio/include/audio_util.h"
|
||||
#include "webrtc/common_audio/resampler/include/resampler.h"
|
||||
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -37,15 +38,13 @@ int PushResampler::InitializeIfNeeded(int src_sample_rate_hz,
|
||||
int num_channels) {
|
||||
if (src_sample_rate_hz == src_sample_rate_hz_ &&
|
||||
dst_sample_rate_hz == dst_sample_rate_hz_ &&
|
||||
num_channels == num_channels_) {
|
||||
num_channels == num_channels_)
|
||||
// No-op if settings haven't changed.
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (src_sample_rate_hz <= 0 || dst_sample_rate_hz <= 0 ||
|
||||
num_channels <= 0 || num_channels > 2) {
|
||||
num_channels <= 0 || num_channels > 2)
|
||||
return -1;
|
||||
}
|
||||
|
||||
src_sample_rate_hz_ = src_sample_rate_hz;
|
||||
dst_sample_rate_hz_ = dst_sample_rate_hz;
|
||||
@ -71,9 +70,8 @@ int PushResampler::Resample(const int16_t* src, int src_length,
|
||||
int16_t* dst, int dst_capacity) {
|
||||
const int src_size_10ms = src_sample_rate_hz_ * num_channels_ / 100;
|
||||
const int dst_size_10ms = dst_sample_rate_hz_ * num_channels_ / 100;
|
||||
if (src_length != src_size_10ms || dst_capacity < dst_size_10ms) {
|
||||
if (src_length != src_size_10ms || dst_capacity < dst_size_10ms)
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (src_sample_rate_hz_ == dst_sample_rate_hz_) {
|
||||
// The old resampler provides this memcpy facility in the case of matching
|
||||
|
@ -8,23 +8,23 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/common_audio/include/audio_util.h"
|
||||
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
|
||||
|
||||
#include <cmath>
|
||||
|
||||
#include <algorithm>
|
||||
#include <string.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
PushSincResampler::PushSincResampler(int src_block_size,
|
||||
int dst_block_size)
|
||||
PushSincResampler::PushSincResampler(int source_frames,
|
||||
int destination_frames)
|
||||
: resampler_(NULL),
|
||||
float_buffer_(NULL),
|
||||
float_buffer_(new float[destination_frames]),
|
||||
source_ptr_(NULL),
|
||||
dst_size_(dst_block_size) {
|
||||
resampler_.reset(new SincResampler(src_block_size * 1.0 / dst_block_size,
|
||||
this, src_block_size));
|
||||
float_buffer_.reset(new float[dst_block_size]);
|
||||
destination_frames_(destination_frames),
|
||||
first_pass_(true),
|
||||
source_available_(0) {
|
||||
resampler_.reset(new SincResampler(source_frames * 1.0 / destination_frames,
|
||||
source_frames, this));
|
||||
}
|
||||
|
||||
PushSincResampler::~PushSincResampler() {
|
||||
@ -34,32 +34,51 @@ int PushSincResampler::Resample(const int16_t* source,
|
||||
int source_length,
|
||||
int16_t* destination,
|
||||
int destination_capacity) {
|
||||
assert(source_length == resampler_->BlockSize());
|
||||
assert(destination_capacity >= dst_size_);
|
||||
assert(source_length == resampler_->request_frames());
|
||||
assert(destination_capacity >= destination_frames_);
|
||||
// Cache the source pointer. Calling Resample() will immediately trigger
|
||||
// the Run() callback whereupon we provide the cached value.
|
||||
source_ptr_ = source;
|
||||
resampler_->Resample(float_buffer_.get(), dst_size_);
|
||||
for (int i = 0; i < dst_size_; ++i) {
|
||||
float clipped = std::max(std::min(float_buffer_[i], 32767.0f), -32768.0f);
|
||||
destination[i] = static_cast<int16_t>(std::floor(clipped + 0.5));
|
||||
}
|
||||
source_available_ = source_length;
|
||||
|
||||
// On the first pass, we call Resample() twice. During the first call, we
|
||||
// provide dummy input and discard the output. This is done to prime the
|
||||
// SincResampler buffer with the correct delay (half the kernel size), thereby
|
||||
// ensuring that all later Resample() calls will only result in one input
|
||||
// request through Run().
|
||||
//
|
||||
// If this wasn't done, SincResampler would call Run() twice on the first
|
||||
// pass, and we'd have to introduce an entire |source_frames| of delay, rather
|
||||
// than the minimum half kernel.
|
||||
//
|
||||
// It works out that ChunkSize() is exactly the amount of output we need to
|
||||
// request in order to prime the buffer with a single Run() request for
|
||||
// |source_frames|.
|
||||
if (first_pass_)
|
||||
resampler_->Resample(resampler_->ChunkSize(), float_buffer_.get());
|
||||
|
||||
resampler_->Resample(destination_frames_, float_buffer_.get());
|
||||
for (int i = 0; i < destination_frames_; ++i)
|
||||
destination[i] = RoundToInt16(ClampInt16(float_buffer_[i]));
|
||||
source_ptr_ = NULL;
|
||||
return dst_size_;
|
||||
return destination_frames_;
|
||||
}
|
||||
|
||||
void PushSincResampler::Run(float* destination, int frames) {
|
||||
void PushSincResampler::Run(int frames, float* destination) {
|
||||
assert(source_ptr_ != NULL);
|
||||
assert(frames >= resampler_->BlockSize());
|
||||
// We will have exactly |BlockSize| number of source samples available. If
|
||||
// the resampler asks for more, zero pad the beginning. This will only happen
|
||||
// on the first call while priming the buffer.
|
||||
int i = 0;
|
||||
for (; i < frames - resampler_->BlockSize(); ++i) {
|
||||
destination[i] = 0;
|
||||
}
|
||||
for (int j = 0; i < frames; ++i, ++j) {
|
||||
destination[i] = static_cast<float>(source_ptr_[j]);
|
||||
// Ensure we are only asked for the available samples. This would fail if
|
||||
// Run() was triggered more than once per Resample() call.
|
||||
assert(source_available_ == frames);
|
||||
|
||||
if (first_pass_) {
|
||||
// Provide dummy input on the first pass, the output of which will be
|
||||
// discarded, as described in Resample().
|
||||
memset(destination, 0, frames * sizeof(float));
|
||||
first_pass_ = false;
|
||||
} else {
|
||||
for (int i = 0; i < frames; ++i)
|
||||
destination[i] = static_cast<float>(source_ptr_[i]);
|
||||
source_available_ -= frames;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -25,25 +25,33 @@ class PushSincResampler : public SincResamplerCallback {
|
||||
// Provide the size of the source and destination blocks in samples. These
|
||||
// must correspond to the same time duration (typically 10 ms) as the sample
|
||||
// ratio is inferred from them.
|
||||
PushSincResampler(int src_block_size, int dst_block_size);
|
||||
PushSincResampler(int source_frames, int destination_frames);
|
||||
virtual ~PushSincResampler();
|
||||
|
||||
// Perform the resampling. |source_length| must always equal the
|
||||
// |src_block_size| provided at construction. |destination_capacity| must be
|
||||
// at least as large as |dst_block_size|. Returns the number of samples
|
||||
// Perform the resampling. |source_frames| must always equal the
|
||||
// |source_frames| provided at construction. |destination_capacity| must be
|
||||
// at least as large as |destination_frames|. Returns the number of samples
|
||||
// provided in destination (for convenience, since this will always be equal
|
||||
// to |dst_block_size|).
|
||||
int Resample(const int16_t* source, int source_length,
|
||||
// to |destination_frames|).
|
||||
int Resample(const int16_t* source, int source_frames,
|
||||
int16_t* destination, int destination_capacity);
|
||||
|
||||
// Implements SincResamplerCallback.
|
||||
virtual void Run(float* destination, int frames);
|
||||
virtual void Run(int frames, float* destination) OVERRIDE;
|
||||
|
||||
SincResampler* get_resampler_for_testing() { return resampler_.get(); }
|
||||
|
||||
private:
|
||||
scoped_ptr<SincResampler> resampler_;
|
||||
scoped_array<float> float_buffer_;
|
||||
const int16_t* source_ptr_;
|
||||
const int dst_size_;
|
||||
const int destination_frames_;
|
||||
|
||||
// True on the first call to Resample(), to prime the SincResampler buffer.
|
||||
bool first_pass_;
|
||||
|
||||
// Used to assert we are only requested for as much data as is available.
|
||||
int source_available_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(PushSincResampler);
|
||||
};
|
||||
|
@ -8,13 +8,14 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <cmath>
|
||||
#include <math.h>
|
||||
|
||||
#include "testing/gmock/include/gmock/gmock.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
#include "webrtc/common_audio/resampler/push_sinc_resampler.h"
|
||||
#include "webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h"
|
||||
#include "webrtc/system_wrappers/interface/scoped_ptr.h"
|
||||
#include "webrtc/system_wrappers/interface/tick_util.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
@ -39,6 +40,59 @@ class PushSincResamplerTest
|
||||
double low_freq_error_;
|
||||
};
|
||||
|
||||
class ZeroSource : public SincResamplerCallback {
|
||||
public:
|
||||
void Run(int frames, float* destination) {
|
||||
memset(destination, 0, sizeof(float) * frames);
|
||||
}
|
||||
};
|
||||
|
||||
// Disabled because it takes too long to run routinely. Use for performance
|
||||
// benchmarking when needed.
|
||||
TEST_P(PushSincResamplerTest, DISABLED_ResampleBenchmark) {
|
||||
const int input_samples = input_rate_ / 100;
|
||||
const int output_samples = output_rate_ / 100;
|
||||
const int kResampleIterations = 200000;
|
||||
|
||||
// Source for data to be resampled.
|
||||
ZeroSource resampler_source;
|
||||
|
||||
scoped_array<float> resampled_destination(new float[output_samples]);
|
||||
scoped_array<float> source(new float[input_samples]);
|
||||
scoped_array<int16_t> source_int(new int16_t[input_samples]);
|
||||
scoped_array<int16_t> destination_int(new int16_t[output_samples]);
|
||||
|
||||
resampler_source.Run(input_samples, source.get());
|
||||
for (int i = 0; i < input_samples; ++i) {
|
||||
source_int[i] = static_cast<int16_t>(floor(32767 * source[i] + 0.5));
|
||||
}
|
||||
|
||||
printf("Benchmarking %d iterations of %d Hz -> %d Hz:\n",
|
||||
kResampleIterations, input_rate_, output_rate_);
|
||||
const double io_ratio = input_rate_ / static_cast<double>(output_rate_);
|
||||
SincResampler sinc_resampler(io_ratio, SincResampler::kDefaultRequestSize,
|
||||
&resampler_source);
|
||||
TickTime start = TickTime::Now();
|
||||
for (int i = 0; i < kResampleIterations; ++i) {
|
||||
sinc_resampler.Resample(output_samples, resampled_destination.get());
|
||||
}
|
||||
double total_time_sinc_us = (TickTime::Now() - start).Microseconds();
|
||||
printf("SincResampler took %.2f us per frame.\n",
|
||||
total_time_sinc_us / kResampleIterations);
|
||||
|
||||
PushSincResampler resampler(input_samples, output_samples);
|
||||
start = TickTime::Now();
|
||||
for (int i = 0; i < kResampleIterations; ++i) {
|
||||
EXPECT_EQ(output_samples,
|
||||
resampler.Resample(source_int.get(), input_samples,
|
||||
destination_int.get(), output_samples));
|
||||
}
|
||||
double total_time_us = (TickTime::Now() - start).Microseconds();
|
||||
printf("PushSincResampler took %.2f us per frame; which is a %.1f%% overhead "
|
||||
"on SincResampler.\n\n", total_time_us / kResampleIterations,
|
||||
(total_time_us - total_time_sinc_us) / total_time_sinc_us * 100);
|
||||
}
|
||||
|
||||
// Tests resampling using a given input and output sample rate.
|
||||
TEST_P(PushSincResamplerTest, Resample) {
|
||||
// Make comparisons using one second of data.
|
||||
@ -67,13 +121,22 @@ TEST_P(PushSincResamplerTest, Resample) {
|
||||
scoped_array<int16_t> source_int(new int16_t[input_block_size]);
|
||||
scoped_array<int16_t> destination_int(new int16_t[output_block_size]);
|
||||
|
||||
// The sinc resampler has an implicit delay of approximately half the kernel
|
||||
// size at the input sample rate. By moving to a push model, this delay
|
||||
// becomes explicit and is managed by zero-stuffing in PushSincResampler. We
|
||||
// deal with it in the test by delaying the "pure" source to match. It must be
|
||||
// checked before the first call to Resample(), because ChunkSize() will
|
||||
// change afterwards.
|
||||
const int output_delay_samples = output_block_size -
|
||||
resampler.get_resampler_for_testing()->ChunkSize();
|
||||
|
||||
// Generate resampled signal.
|
||||
// With the PushSincResampler, we produce the signal block-by-10ms-block
|
||||
// rather than in a single pass, to exercise how it will be used in WebRTC.
|
||||
resampler_source.Run(source.get(), input_samples);
|
||||
resampler_source.Run(input_samples, source.get());
|
||||
for (int i = 0; i < kNumBlocks; ++i) {
|
||||
for (int j = 0; j < input_block_size; ++j) {
|
||||
source_int[j] = static_cast<int16_t>(std::floor(32767 *
|
||||
source_int[j] = static_cast<int16_t>(floor(32767 *
|
||||
source[i * input_block_size + j] + 0.5));
|
||||
}
|
||||
EXPECT_EQ(output_block_size,
|
||||
@ -86,17 +149,9 @@ TEST_P(PushSincResamplerTest, Resample) {
|
||||
}
|
||||
|
||||
// Generate pure signal.
|
||||
// The sinc resampler has an implicit delay of half the kernel size (32) at
|
||||
// the input sample rate. By moving to a push model, this delay becomes
|
||||
// explicit and is managed by zero-stuffing in PushSincResampler. This delay
|
||||
// can be a fractional sample amount, so we deal with it in the test by
|
||||
// delaying the "pure" source to match.
|
||||
static const int kInputKernelDelaySamples = 16;
|
||||
double output_delay_samples = static_cast<double>(output_rate_)
|
||||
/ input_rate_ * kInputKernelDelaySamples;
|
||||
SinusoidalLinearChirpSource pure_source(
|
||||
output_rate_, output_samples, input_nyquist_freq, output_delay_samples);
|
||||
pure_source.Run(pure_destination.get(), output_samples);
|
||||
pure_source.Run(output_samples, pure_destination.get());
|
||||
|
||||
// Range of the Nyquist frequency (0.5 * min(input rate, output_rate)) which
|
||||
// we refer to as low and high.
|
||||
@ -216,17 +271,17 @@ INSTANTIATE_TEST_CASE_P(
|
||||
std::tr1::make_tuple(8000, 16000, kResamplingRMSError, -70.30),
|
||||
std::tr1::make_tuple(16000, 16000, kResamplingRMSError, -75.51),
|
||||
std::tr1::make_tuple(32000, 16000, -18.48, -28.59),
|
||||
std::tr1::make_tuple(44100, 16000, -19.59, -19.77),
|
||||
std::tr1::make_tuple(48000, 16000, -20.01, -18.11),
|
||||
std::tr1::make_tuple(96000, 16000, -20.95, -10.99),
|
||||
std::tr1::make_tuple(44100, 16000, -19.30, -19.67),
|
||||
std::tr1::make_tuple(48000, 16000, -19.81, -18.11),
|
||||
std::tr1::make_tuple(96000, 16000, -20.95, -10.96),
|
||||
|
||||
// To 32 kHz
|
||||
std::tr1::make_tuple(8000, 32000, kResamplingRMSError, -70.30),
|
||||
std::tr1::make_tuple(16000, 32000, kResamplingRMSError, -75.51),
|
||||
std::tr1::make_tuple(32000, 32000, kResamplingRMSError, -75.56),
|
||||
std::tr1::make_tuple(44100, 32000, -16.52, -51.10),
|
||||
std::tr1::make_tuple(48000, 32000, -16.90, -44.17),
|
||||
std::tr1::make_tuple(96000, 32000, -19.80, -18.05),
|
||||
std::tr1::make_tuple(44100, 32000, -16.44, -51.10),
|
||||
std::tr1::make_tuple(48000, 32000, -16.90, -44.03),
|
||||
std::tr1::make_tuple(96000, 32000, -19.61, -18.04),
|
||||
std::tr1::make_tuple(192000, 32000, -21.02, -10.94)));
|
||||
|
||||
} // namespace webrtc
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -8,8 +8,6 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <math.h>
|
||||
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
#include "webrtc/common_audio/resampler/include/resampler.h"
|
||||
@ -20,7 +18,10 @@ namespace webrtc {
|
||||
namespace {
|
||||
const ResamplerType kTypes[] = {
|
||||
kResamplerSynchronous,
|
||||
kResamplerAsynchronous,
|
||||
kResamplerSynchronousStereo,
|
||||
kResamplerAsynchronousStereo
|
||||
// kResamplerInvalid excluded
|
||||
};
|
||||
const size_t kTypesSize = sizeof(kTypes) / sizeof(*kTypes);
|
||||
|
||||
@ -30,7 +31,7 @@ const int kRates[] = {
|
||||
8000,
|
||||
16000,
|
||||
32000,
|
||||
44100,
|
||||
44000,
|
||||
48000,
|
||||
kMaxRate
|
||||
};
|
||||
@ -38,19 +39,26 @@ const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
|
||||
const int kMaxChannels = 2;
|
||||
const size_t kDataSize = static_cast<size_t> (kMaxChannels * kMaxRate / 100);
|
||||
|
||||
// TODO(andrew): should we be supporting these combinations?
|
||||
bool ValidRates(int in_rate, int out_rate) {
|
||||
// Not the most compact notation, for clarity.
|
||||
if ((in_rate == 44000 && (out_rate == 48000 || out_rate == 96000)) ||
|
||||
(out_rate == 44000 && (in_rate == 48000 || in_rate == 96000))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
class ResamplerTest : public testing::Test {
|
||||
protected:
|
||||
ResamplerTest();
|
||||
virtual void SetUp();
|
||||
virtual void TearDown();
|
||||
void RunResampleTest(int channels,
|
||||
int src_sample_rate_hz,
|
||||
int dst_sample_rate_hz);
|
||||
|
||||
Resampler rs_;
|
||||
int16_t data_in_[kDataSize];
|
||||
int16_t data_out_[kDataSize];
|
||||
int16_t data_reference_[kDataSize];
|
||||
};
|
||||
|
||||
ResamplerTest::ResamplerTest() {}
|
||||
@ -75,119 +83,34 @@ TEST_F(ResamplerTest, Reset) {
|
||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j]
|
||||
<< ", type: " << kTypes[k];
|
||||
SCOPED_TRACE(ss.str());
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
||||
if (ValidRates(kRates[i], kRates[j]))
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
||||
else
|
||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kTypes[k]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sets the signal value to increase by |data| with every sample. Floats are
|
||||
// used so non-integer values result in rounding error, but not an accumulating
|
||||
// error.
|
||||
void SetMonoFrame(int16_t* buffer, float data, int sample_rate_hz) {
|
||||
for (int i = 0; i < sample_rate_hz / 100; i++) {
|
||||
buffer[i] = data * i;
|
||||
}
|
||||
}
|
||||
|
||||
// Sets the signal value to increase by |left| and |right| with every sample in
|
||||
// each channel respectively.
|
||||
void SetStereoFrame(int16_t* buffer, float left, float right,
|
||||
int sample_rate_hz) {
|
||||
for (int i = 0; i < sample_rate_hz / 100; i++) {
|
||||
buffer[i * 2] = left * i;
|
||||
buffer[i * 2 + 1] = right * i;
|
||||
}
|
||||
}
|
||||
|
||||
// Computes the best SNR based on the error between |ref_frame| and
|
||||
// |test_frame|. It allows for a sample delay between the signals to
|
||||
// compensate for the resampling delay.
|
||||
float ComputeSNR(const int16_t* reference, const int16_t* test,
|
||||
int sample_rate_hz, int channels, int max_delay) {
|
||||
float best_snr = 0;
|
||||
int best_delay = 0;
|
||||
int samples_per_channel = sample_rate_hz/100;
|
||||
for (int delay = 0; delay < max_delay; delay++) {
|
||||
float mse = 0;
|
||||
float variance = 0;
|
||||
for (int i = 0; i < samples_per_channel * channels - delay; i++) {
|
||||
int error = reference[i] - test[i + delay];
|
||||
mse += error * error;
|
||||
variance += reference[i] * reference[i];
|
||||
}
|
||||
float snr = 100; // We assign 100 dB to the zero-error case.
|
||||
if (mse > 0)
|
||||
snr = 10 * log10(variance / mse);
|
||||
if (snr > best_snr) {
|
||||
best_snr = snr;
|
||||
best_delay = delay;
|
||||
}
|
||||
}
|
||||
printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay);
|
||||
return best_snr;
|
||||
}
|
||||
|
||||
void ResamplerTest::RunResampleTest(int channels,
|
||||
int src_sample_rate_hz,
|
||||
int dst_sample_rate_hz) {
|
||||
Resampler resampler; // Create a new one with every test.
|
||||
const int16_t kSrcLeft = 60; // Shouldn't overflow for any used sample rate.
|
||||
const int16_t kSrcRight = 30;
|
||||
const float kResamplingFactor = (1.0 * src_sample_rate_hz) /
|
||||
dst_sample_rate_hz;
|
||||
const float kDstLeft = kResamplingFactor * kSrcLeft;
|
||||
const float kDstRight = kResamplingFactor * kSrcRight;
|
||||
if (channels == 1)
|
||||
SetMonoFrame(data_in_, kSrcLeft, src_sample_rate_hz);
|
||||
else
|
||||
SetStereoFrame(data_in_, kSrcLeft, kSrcRight, src_sample_rate_hz);
|
||||
|
||||
if (channels == 1) {
|
||||
SetMonoFrame(data_out_, 0, dst_sample_rate_hz);
|
||||
SetMonoFrame(data_reference_, kDstLeft, dst_sample_rate_hz);
|
||||
} else {
|
||||
SetStereoFrame(data_out_, 0, 0, dst_sample_rate_hz);
|
||||
SetStereoFrame(data_reference_, kDstLeft, kDstRight, dst_sample_rate_hz);
|
||||
}
|
||||
|
||||
// The speex resampler has a known delay dependent on quality and rates,
|
||||
// which we approximate here. Multiplying by two gives us a crude maximum
|
||||
// for any resampling, as the old resampler typically (but not always)
|
||||
// has lower delay. The actual delay is calculated internally based on the
|
||||
// filter length in the QualityMap.
|
||||
static const int kInputKernelDelaySamples = 16*3;
|
||||
const int max_delay = std::min(1.0f, 1/kResamplingFactor) *
|
||||
kInputKernelDelaySamples * channels * 2;
|
||||
printf("(%d, %d Hz) -> (%d, %d Hz) ", // SNR reported on the same line later.
|
||||
channels, src_sample_rate_hz, channels, dst_sample_rate_hz);
|
||||
|
||||
int in_length = channels * src_sample_rate_hz / 100;
|
||||
int out_length = 0;
|
||||
EXPECT_EQ(0, rs_.Reset(src_sample_rate_hz, dst_sample_rate_hz,
|
||||
(channels == 1 ?
|
||||
kResamplerSynchronous :
|
||||
kResamplerSynchronousStereo)));
|
||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||
out_length));
|
||||
EXPECT_EQ(channels * dst_sample_rate_hz / 100, out_length);
|
||||
|
||||
// EXPECT_EQ(0, Resample(src_frame_, &resampler, &dst_frame_));
|
||||
EXPECT_GT(ComputeSNR(data_reference_, data_out_, dst_sample_rate_hz,
|
||||
channels, max_delay), 40.0f);
|
||||
}
|
||||
|
||||
// TODO(tlegrand): Replace code inside the two tests below with a function
|
||||
// with number of channels and ResamplerType as input.
|
||||
TEST_F(ResamplerTest, Synchronous) {
|
||||
// Number of channels is 1, mono mode.
|
||||
const int kChannels = 1;
|
||||
// We don't attempt to be exhaustive here, but just get good coverage. Some
|
||||
// combinations of rates will not be resampled, and some give an odd
|
||||
// resampling factor which makes it more difficult to evaluate.
|
||||
const int kSampleRates[] = {16000, 32000, 44100, 48000};
|
||||
const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
|
||||
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
|
||||
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
|
||||
RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]);
|
||||
for (size_t i = 0; i < kRatesSize; ++i) {
|
||||
for (size_t j = 0; j < kRatesSize; ++j) {
|
||||
std::ostringstream ss;
|
||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
|
||||
SCOPED_TRACE(ss.str());
|
||||
|
||||
if (ValidRates(kRates[i], kRates[j])) {
|
||||
int in_length = kRates[i] / 100;
|
||||
int out_length = 0;
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous));
|
||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||
out_length));
|
||||
EXPECT_EQ(kRates[j] / 100, out_length);
|
||||
} else {
|
||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j], kResamplerSynchronous));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -195,14 +118,24 @@ TEST_F(ResamplerTest, Synchronous) {
|
||||
TEST_F(ResamplerTest, SynchronousStereo) {
|
||||
// Number of channels is 2, stereo mode.
|
||||
const int kChannels = 2;
|
||||
// We don't attempt to be exhaustive here, but just get good coverage. Some
|
||||
// combinations of rates will not be resampled, and some give an odd
|
||||
// resampling factor which makes it more difficult to evaluate.
|
||||
const int kSampleRates[] = {16000, 32000, 44100, 48000};
|
||||
const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
|
||||
for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
|
||||
for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
|
||||
RunResampleTest(kChannels, kSampleRates[src_rate], kSampleRates[dst_rate]);
|
||||
for (size_t i = 0; i < kRatesSize; ++i) {
|
||||
for (size_t j = 0; j < kRatesSize; ++j) {
|
||||
std::ostringstream ss;
|
||||
ss << "Input rate: " << kRates[i] << ", output rate: " << kRates[j];
|
||||
SCOPED_TRACE(ss.str());
|
||||
|
||||
if (ValidRates(kRates[i], kRates[j])) {
|
||||
int in_length = kChannels * kRates[i] / 100;
|
||||
int out_length = 0;
|
||||
EXPECT_EQ(0, rs_.Reset(kRates[i], kRates[j],
|
||||
kResamplerSynchronousStereo));
|
||||
EXPECT_EQ(0, rs_.Push(data_in_, in_length, data_out_, kDataSize,
|
||||
out_length));
|
||||
EXPECT_EQ(kChannels * kRates[j] / 100, out_length);
|
||||
} else {
|
||||
EXPECT_EQ(-1, rs_.Reset(kRates[i], kRates[j],
|
||||
kResamplerSynchronousStereo));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -11,31 +11,73 @@
|
||||
// Modified from the Chromium original:
|
||||
// src/media/base/sinc_resampler.cc
|
||||
|
||||
// Input buffer layout, dividing the total buffer into regions (r0_ - r5_):
|
||||
// Initial input buffer layout, dividing into regions r0_ to r4_ (note: r0_, r3_
|
||||
// and r4_ will move after the first load):
|
||||
//
|
||||
// |----------------|-----------------------------------------|----------------|
|
||||
//
|
||||
// kBlockSize + kKernelSize / 2
|
||||
// request_frames_
|
||||
// <--------------------------------------------------------->
|
||||
// r0_
|
||||
// r0_ (during first load)
|
||||
//
|
||||
// kKernelSize / 2 kKernelSize / 2 kKernelSize / 2 kKernelSize / 2
|
||||
// <---------------> <---------------> <---------------> <--------------->
|
||||
// r1_ r2_ r3_ r4_
|
||||
//
|
||||
// kBlockSize
|
||||
// <--------------------------------------->
|
||||
// r5_
|
||||
// block_size_ == r4_ - r2_
|
||||
// <--------------------------------------->
|
||||
//
|
||||
// request_frames_
|
||||
// <------------------ ... ----------------->
|
||||
// r0_ (during second load)
|
||||
//
|
||||
// On the second request r0_ slides to the right by kKernelSize / 2 and r3_, r4_
|
||||
// and block_size_ are reinitialized via step (3) in the algorithm below.
|
||||
//
|
||||
// These new regions remain constant until a Flush() occurs. While complicated,
|
||||
// this allows us to reduce jitter by always requesting the same amount from the
|
||||
// provided callback.
|
||||
//
|
||||
// The algorithm:
|
||||
//
|
||||
// 1) Consume input frames into r0_ (r1_ is zero-initialized).
|
||||
// 2) Position kernel centered at start of r0_ (r2_) and generate output frames
|
||||
// until kernel is centered at start of r4_ or we've finished generating all
|
||||
// the output frames.
|
||||
// 3) Copy r3_ to r1_ and r4_ to r2_.
|
||||
// 4) Consume input frames into r5_ (zero-pad if we run out of input).
|
||||
// 5) Goto (2) until all of input is consumed.
|
||||
// 1) Allocate input_buffer of size: request_frames_ + kKernelSize; this ensures
|
||||
// there's enough room to read request_frames_ from the callback into region
|
||||
// r0_ (which will move between the first and subsequent passes).
|
||||
//
|
||||
// 2) Let r1_, r2_ each represent half the kernel centered around r0_:
|
||||
//
|
||||
// r0_ = input_buffer_ + kKernelSize / 2
|
||||
// r1_ = input_buffer_
|
||||
// r2_ = r0_
|
||||
//
|
||||
// r0_ is always request_frames_ in size. r1_, r2_ are kKernelSize / 2 in
|
||||
// size. r1_ must be zero initialized to avoid convolution with garbage (see
|
||||
// step (5) for why).
|
||||
//
|
||||
// 3) Let r3_, r4_ each represent half the kernel right aligned with the end of
|
||||
// r0_ and choose block_size_ as the distance in frames between r4_ and r2_:
|
||||
//
|
||||
// r3_ = r0_ + request_frames_ - kKernelSize
|
||||
// r4_ = r0_ + request_frames_ - kKernelSize / 2
|
||||
// block_size_ = r4_ - r2_ = request_frames_ - kKernelSize / 2
|
||||
//
|
||||
// 4) Consume request_frames_ frames into r0_.
|
||||
//
|
||||
// 5) Position kernel centered at start of r2_ and generate output frames until
|
||||
// the kernel is centered at the start of r4_ or we've finished generating
|
||||
// all the output frames.
|
||||
//
|
||||
// 6) Wrap left over data from the r3_ to r1_ and r4_ to r2_.
|
||||
//
|
||||
// 7) If we're on the second load, in order to avoid overwriting the frames we
|
||||
// just wrapped from r4_ we need to slide r0_ to the right by the size of
|
||||
// r4_, which is kKernelSize / 2:
|
||||
//
|
||||
// r0_ = r0_ + kKernelSize / 2 = input_buffer_ + kKernelSize
|
||||
//
|
||||
// r3_, r4_, and block_size_ then need to be reinitialized, so goto (3).
|
||||
//
|
||||
// 8) Else, if we're not on the second load, goto (4).
|
||||
//
|
||||
// Note: we're glossing over how the sub-sample handling works with
|
||||
// |virtual_source_idx_|, etc.
|
||||
@ -48,8 +90,9 @@
|
||||
#include "webrtc/system_wrappers/interface/cpu_features_wrapper.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
#include <cmath>
|
||||
#include <cstring>
|
||||
#include <math.h>
|
||||
#include <string.h>
|
||||
|
||||
#include <limits>
|
||||
|
||||
namespace webrtc {
|
||||
@ -70,49 +113,49 @@ static double SincScaleFactor(double io_ratio) {
|
||||
return sinc_scale_factor;
|
||||
}
|
||||
|
||||
SincResampler::SincResampler(double io_sample_rate_ratio,
|
||||
SincResamplerCallback* read_cb,
|
||||
int block_size)
|
||||
: io_sample_rate_ratio_(io_sample_rate_ratio),
|
||||
virtual_source_idx_(0),
|
||||
buffer_primed_(false),
|
||||
read_cb_(read_cb),
|
||||
block_size_(block_size),
|
||||
buffer_size_(block_size_ + kKernelSize),
|
||||
// Create input buffers with a 16-byte alignment for SSE optimizations.
|
||||
kernel_storage_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))),
|
||||
kernel_pre_sinc_storage_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))),
|
||||
kernel_window_storage_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))),
|
||||
input_buffer_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * buffer_size_, 16))),
|
||||
#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE__)
|
||||
convolve_proc_(WebRtc_GetCPUInfo(kSSE2) ? Convolve_SSE : Convolve_C),
|
||||
#elif defined(WEBRTC_ARCH_ARM_V7) && !defined(WEBRTC_ARCH_ARM_NEON)
|
||||
convolve_proc_(WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON ?
|
||||
Convolve_NEON : Convolve_C),
|
||||
#endif
|
||||
// Setup various region pointers in the buffer (see diagram above).
|
||||
r0_(input_buffer_.get() + kKernelSize / 2),
|
||||
r1_(input_buffer_.get()),
|
||||
r2_(r0_),
|
||||
r3_(r0_ + block_size_ - kKernelSize / 2),
|
||||
r4_(r0_ + block_size_),
|
||||
r5_(r0_ + kKernelSize / 2) {
|
||||
Initialize();
|
||||
InitializeKernel();
|
||||
// If we know the minimum architecture at compile time, avoid CPU detection.
|
||||
// iOS lies about its architecture, so we also need to exclude it here.
|
||||
#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(WEBRTC_IOS)
|
||||
#if defined(__SSE__)
|
||||
#define CONVOLVE_FUNC Convolve_SSE
|
||||
void SincResampler::InitializeCPUSpecificFeatures() {}
|
||||
#else
|
||||
// X86 CPU detection required. Function will be set by
|
||||
// InitializeCPUSpecificFeatures().
|
||||
// TODO(dalecurtis): Once Chrome moves to an SSE baseline this can be removed.
|
||||
#define CONVOLVE_FUNC convolve_proc_
|
||||
|
||||
void SincResampler::InitializeCPUSpecificFeatures() {
|
||||
convolve_proc_ = WebRtc_GetCPUInfo(kSSE2) ? Convolve_SSE : Convolve_C;
|
||||
}
|
||||
#endif
|
||||
#elif defined(WEBRTC_ARCH_ARM_V7)
|
||||
#if defined(WEBRTC_ARCH_ARM_NEON)
|
||||
#define CONVOLVE_FUNC Convolve_NEON
|
||||
void SincResampler::InitializeCPUSpecificFeatures() {}
|
||||
#else
|
||||
// NEON CPU detection required. Function will be set by
|
||||
// InitializeCPUSpecificFeatures().
|
||||
#define CONVOLVE_FUNC convolve_proc_
|
||||
|
||||
void SincResampler::InitializeCPUSpecificFeatures() {
|
||||
convolve_proc_ = WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON ?
|
||||
Convolve_NEON : Convolve_C;
|
||||
}
|
||||
#endif
|
||||
#else
|
||||
// Unknown architecture.
|
||||
#define CONVOLVE_FUNC Convolve_C
|
||||
void SincResampler::InitializeCPUSpecificFeatures() {}
|
||||
#endif
|
||||
|
||||
SincResampler::SincResampler(double io_sample_rate_ratio,
|
||||
int request_frames,
|
||||
SincResamplerCallback* read_cb)
|
||||
: io_sample_rate_ratio_(io_sample_rate_ratio),
|
||||
virtual_source_idx_(0),
|
||||
buffer_primed_(false),
|
||||
read_cb_(read_cb),
|
||||
block_size_(kDefaultBlockSize),
|
||||
buffer_size_(kDefaultBufferSize),
|
||||
request_frames_(request_frames),
|
||||
input_buffer_size_(request_frames_ + kKernelSize),
|
||||
// Create input buffers with a 16-byte alignment for SSE optimizations.
|
||||
kernel_storage_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))),
|
||||
@ -121,45 +164,19 @@ SincResampler::SincResampler(double io_sample_rate_ratio,
|
||||
kernel_window_storage_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * kKernelStorageSize, 16))),
|
||||
input_buffer_(static_cast<float*>(
|
||||
AlignedMalloc(sizeof(float) * buffer_size_, 16))),
|
||||
#if defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE__)
|
||||
convolve_proc_(WebRtc_GetCPUInfo(kSSE2) ? Convolve_SSE : Convolve_C),
|
||||
#elif defined(WEBRTC_ARCH_ARM_V7) && !defined(WEBRTC_ARCH_ARM_NEON)
|
||||
convolve_proc_(WebRtc_GetCPUFeaturesARM() & kCPUFeatureNEON ?
|
||||
Convolve_NEON : Convolve_C),
|
||||
AlignedMalloc(sizeof(float) * input_buffer_size_, 16))),
|
||||
#if defined(WEBRTC_RESAMPLER_CPU_DETECTION)
|
||||
convolve_proc_(NULL),
|
||||
#endif
|
||||
// Setup various region pointers in the buffer (see diagram above).
|
||||
r0_(input_buffer_.get() + kKernelSize / 2),
|
||||
r1_(input_buffer_.get()),
|
||||
r2_(r0_),
|
||||
r3_(r0_ + block_size_ - kKernelSize / 2),
|
||||
r4_(r0_ + block_size_),
|
||||
r5_(r0_ + kKernelSize / 2) {
|
||||
Initialize();
|
||||
InitializeKernel();
|
||||
}
|
||||
|
||||
SincResampler::~SincResampler() {}
|
||||
|
||||
void SincResampler::Initialize() {
|
||||
// Ensure kKernelSize is a multiple of 32 for easy SSE optimizations; causes
|
||||
// r0_ and r5_ (used for input) to always be 16-byte aligned by virtue of
|
||||
// input_buffer_ being 16-byte aligned.
|
||||
COMPILE_ASSERT(kKernelSize % 32 == 0);
|
||||
r2_(input_buffer_.get() + kKernelSize / 2) {
|
||||
#if defined(WEBRTC_RESAMPLER_CPU_DETECTION)
|
||||
InitializeCPUSpecificFeatures();
|
||||
assert(convolve_proc_);
|
||||
#endif
|
||||
assert(request_frames_ > 0);
|
||||
Flush();
|
||||
assert(block_size_ > kKernelSize);
|
||||
// Basic sanity checks to ensure buffer regions are laid out correctly:
|
||||
// r0_ and r2_ should always be the same position.
|
||||
assert(r0_ == r2_);
|
||||
// r1_ at the beginning of the buffer.
|
||||
assert(r1_ == input_buffer_.get());
|
||||
// r1_ left of r2_, r2_ left of r5_ and r1_, r2_ size correct.
|
||||
assert(r2_ - r1_ == r5_ - r2_);
|
||||
// r3_ left of r4_, r5_ left of r0_ and r3_ size correct.
|
||||
assert(r4_ - r3_ == r5_ - r0_);
|
||||
// r3_, r4_ size correct and r4_ at the end of the buffer.
|
||||
assert(r4_ + (r4_ - r3_) == r1_ + buffer_size_);
|
||||
// r5_ size correct and at the end of the buffer.
|
||||
assert(r5_ + block_size_ == r1_ + buffer_size_);
|
||||
|
||||
memset(kernel_storage_.get(), 0,
|
||||
sizeof(*kernel_storage_.get()) * kKernelStorageSize);
|
||||
@ -167,7 +184,26 @@ void SincResampler::Initialize() {
|
||||
sizeof(*kernel_pre_sinc_storage_.get()) * kKernelStorageSize);
|
||||
memset(kernel_window_storage_.get(), 0,
|
||||
sizeof(*kernel_window_storage_.get()) * kKernelStorageSize);
|
||||
memset(input_buffer_.get(), 0, sizeof(*input_buffer_.get()) * buffer_size_);
|
||||
|
||||
InitializeKernel();
|
||||
}
|
||||
|
||||
SincResampler::~SincResampler() {}
|
||||
|
||||
void SincResampler::UpdateRegions(bool second_load) {
|
||||
// Setup various region pointers in the buffer (see diagram above). If we're
|
||||
// on the second load we need to slide r0_ to the right by kKernelSize / 2.
|
||||
r0_ = input_buffer_.get() + (second_load ? kKernelSize : kKernelSize / 2);
|
||||
r3_ = r0_ + request_frames_ - kKernelSize;
|
||||
r4_ = r0_ + request_frames_ - kKernelSize / 2;
|
||||
block_size_ = r4_ - r2_;
|
||||
|
||||
// r1_ at the beginning of the buffer.
|
||||
assert(r1_ == input_buffer_.get());
|
||||
// r1_ left of r2_, r4_ left of r3_ and size correct.
|
||||
assert(r2_ - r1_ == r4_ - r3_);
|
||||
// r2_ left of r3.
|
||||
assert(r2_ < r3_);
|
||||
}
|
||||
|
||||
void SincResampler::InitializeKernel() {
|
||||
@ -234,67 +270,59 @@ void SincResampler::SetRatio(double io_sample_rate_ratio) {
|
||||
}
|
||||
}
|
||||
|
||||
// If we know the minimum architecture avoid function hopping for CPU detection.
|
||||
#if defined(WEBRTC_ARCH_X86_FAMILY)
|
||||
#if defined(__SSE__)
|
||||
#define CONVOLVE_FUNC Convolve_SSE
|
||||
#else
|
||||
// X86 CPU detection required. |convolve_proc_| will be set upon construction.
|
||||
// TODO(dalecurtis): Once Chrome moves to a SSE baseline this can be removed.
|
||||
#define CONVOLVE_FUNC convolve_proc_
|
||||
#endif
|
||||
#elif defined(WEBRTC_ARCH_ARM_V7)
|
||||
#if defined(WEBRTC_ARCH_ARM_NEON)
|
||||
#define CONVOLVE_FUNC Convolve_NEON
|
||||
#else
|
||||
// NEON CPU detection required. |convolve_proc_| will be set upon construction.
|
||||
#define CONVOLVE_FUNC convolve_proc_
|
||||
#endif
|
||||
#else
|
||||
// Unknown architecture.
|
||||
#define CONVOLVE_FUNC Convolve_C
|
||||
#endif
|
||||
|
||||
void SincResampler::Resample(float* destination, int frames) {
|
||||
void SincResampler::Resample(int frames, float* destination) {
|
||||
int remaining_frames = frames;
|
||||
|
||||
// Step (1) -- Prime the input buffer at the start of the input stream.
|
||||
if (!buffer_primed_) {
|
||||
read_cb_->Run(r0_, block_size_ + kKernelSize / 2);
|
||||
if (!buffer_primed_ && remaining_frames) {
|
||||
read_cb_->Run(request_frames_, r0_);
|
||||
buffer_primed_ = true;
|
||||
}
|
||||
|
||||
// Step (2) -- Resample!
|
||||
// Step (2) -- Resample! const what we can outside of the loop for speed. It
|
||||
// actually has an impact on ARM performance. See inner loop comment below.
|
||||
const double current_io_ratio = io_sample_rate_ratio_;
|
||||
const float* const kernel_ptr = kernel_storage_.get();
|
||||
while (remaining_frames) {
|
||||
while (virtual_source_idx_ < block_size_) {
|
||||
// |i| may be negative if the last Resample() call ended on an iteration
|
||||
// that put |virtual_source_idx_| over the limit.
|
||||
//
|
||||
// Note: The loop construct here can severely impact performance on ARM
|
||||
// or when built with clang. See https://codereview.chromium.org/18566009/
|
||||
for (int i = ceil((block_size_ - virtual_source_idx_) / current_io_ratio);
|
||||
i > 0; --i) {
|
||||
assert(virtual_source_idx_ < block_size_);
|
||||
|
||||
// |virtual_source_idx_| lies in between two kernel offsets so figure out
|
||||
// what they are.
|
||||
int source_idx = static_cast<int>(virtual_source_idx_);
|
||||
double subsample_remainder = virtual_source_idx_ - source_idx;
|
||||
const int source_idx = virtual_source_idx_;
|
||||
const double subsample_remainder = virtual_source_idx_ - source_idx;
|
||||
|
||||
double virtual_offset_idx = subsample_remainder * kKernelOffsetCount;
|
||||
int offset_idx = static_cast<int>(virtual_offset_idx);
|
||||
const double virtual_offset_idx =
|
||||
subsample_remainder * kKernelOffsetCount;
|
||||
const int offset_idx = virtual_offset_idx;
|
||||
|
||||
// We'll compute "convolutions" for the two kernels which straddle
|
||||
// |virtual_source_idx_|.
|
||||
float* k1 = kernel_storage_.get() + offset_idx * kKernelSize;
|
||||
float* k2 = k1 + kKernelSize;
|
||||
const float* const k1 = kernel_ptr + offset_idx * kKernelSize;
|
||||
const float* const k2 = k1 + kKernelSize;
|
||||
|
||||
// Ensure |k1|, |k2| are 16-byte aligned for SIMD usage. Should always be
|
||||
// true so long as kKernelSize is a multiple of 16.
|
||||
assert((reinterpret_cast<uintptr_t>(k1) & 0x0F) == 0u);
|
||||
assert((reinterpret_cast<uintptr_t>(k2) & 0x0F) == 0u);
|
||||
assert(0u == (reinterpret_cast<uintptr_t>(k1) & 0x0F));
|
||||
assert(0u == (reinterpret_cast<uintptr_t>(k2) & 0x0F));
|
||||
|
||||
// Initialize input pointer based on quantized |virtual_source_idx_|.
|
||||
float* input_ptr = r1_ + source_idx;
|
||||
const float* const input_ptr = r1_ + source_idx;
|
||||
|
||||
// Figure out how much to weight each kernel's "convolution".
|
||||
double kernel_interpolation_factor = virtual_offset_idx - offset_idx;
|
||||
const double kernel_interpolation_factor =
|
||||
virtual_offset_idx - offset_idx;
|
||||
*destination++ = CONVOLVE_FUNC(
|
||||
input_ptr, k1, k2, kernel_interpolation_factor);
|
||||
|
||||
// Advance the virtual index.
|
||||
virtual_source_idx_ += io_sample_rate_ratio_;
|
||||
virtual_source_idx_ += current_io_ratio;
|
||||
|
||||
if (!--remaining_frames)
|
||||
return;
|
||||
@ -303,31 +331,31 @@ void SincResampler::Resample(float* destination, int frames) {
|
||||
// Wrap back around to the start.
|
||||
virtual_source_idx_ -= block_size_;
|
||||
|
||||
// Step (3) Copy r3_ to r1_ and r4_ to r2_.
|
||||
// Step (3) -- Copy r3_, r4_ to r1_, r2_.
|
||||
// This wraps the last input frames back to the start of the buffer.
|
||||
memcpy(r1_, r3_, sizeof(*input_buffer_.get()) * (kKernelSize / 2));
|
||||
memcpy(r2_, r4_, sizeof(*input_buffer_.get()) * (kKernelSize / 2));
|
||||
memcpy(r1_, r3_, sizeof(*input_buffer_.get()) * kKernelSize);
|
||||
|
||||
// Step (4)
|
||||
// Refresh the buffer with more input.
|
||||
read_cb_->Run(r5_, block_size_);
|
||||
// Step (4) -- Reinitialize regions if necessary.
|
||||
if (r0_ == r2_)
|
||||
UpdateRegions(true);
|
||||
|
||||
// Step (5) -- Refresh the buffer with more input.
|
||||
read_cb_->Run(request_frames_, r0_);
|
||||
}
|
||||
}
|
||||
|
||||
#undef CONVOLVE_FUNC
|
||||
|
||||
int SincResampler::ChunkSize() {
|
||||
int SincResampler::ChunkSize() const {
|
||||
return block_size_ / io_sample_rate_ratio_;
|
||||
}
|
||||
|
||||
int SincResampler::BlockSize() {
|
||||
return block_size_;
|
||||
}
|
||||
|
||||
void SincResampler::Flush() {
|
||||
virtual_source_idx_ = 0;
|
||||
buffer_primed_ = false;
|
||||
memset(input_buffer_.get(), 0, sizeof(*input_buffer_.get()) * buffer_size_);
|
||||
memset(input_buffer_.get(), 0,
|
||||
sizeof(*input_buffer_.get()) * input_buffer_size_);
|
||||
UpdateRegions(false);
|
||||
}
|
||||
|
||||
float SincResampler::Convolve_C(const float* input_ptr, const float* k1,
|
||||
|
@ -20,6 +20,13 @@
|
||||
#include "webrtc/test/testsupport/gtest_prod_util.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
#if (defined(WEBRTC_ARCH_X86_FAMILY) && !defined(WEBRTC_IOS) && \
|
||||
!defined(__SSE__)) || \
|
||||
(defined(WEBRTC_ARCH_ARM_V7) && !defined(WEBRTC_ARCH_ARM_NEON))
|
||||
// Convenience define.
|
||||
#define WEBRTC_RESAMPLER_CPU_DETECTION
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Callback class for providing more data into the resampler. Expects |frames|
|
||||
@ -28,7 +35,7 @@ namespace webrtc {
|
||||
class SincResamplerCallback {
|
||||
public:
|
||||
virtual ~SincResamplerCallback() {}
|
||||
virtual void Run(float* destination, int frames) = 0;
|
||||
virtual void Run(int frames, float* destination) = 0;
|
||||
};
|
||||
|
||||
// SincResampler is a high-quality single-channel sample-rate converter.
|
||||
@ -40,43 +47,36 @@ class SincResampler {
|
||||
// TODO(dalecurtis): Test performance to see if we can jack this up to 64+.
|
||||
kKernelSize = 32,
|
||||
|
||||
// The number of destination frames generated per processing pass. Affects
|
||||
// how often and for how much SincResampler calls back for input. Must be
|
||||
// greater than kKernelSize.
|
||||
kDefaultBlockSize = 512,
|
||||
// Default request size. Affects how often and for how much SincResampler
|
||||
// calls back for input. Must be greater than kKernelSize.
|
||||
kDefaultRequestSize = 512,
|
||||
|
||||
// The kernel offset count is used for interpolation and is the number of
|
||||
// sub-sample kernel shifts. Can be adjusted for quality (higher is better)
|
||||
// at the expense of allocating more memory.
|
||||
kKernelOffsetCount = 32,
|
||||
kKernelStorageSize = kKernelSize * (kKernelOffsetCount + 1),
|
||||
|
||||
// The size (in samples) of the internal buffer used by the resampler.
|
||||
kDefaultBufferSize = kDefaultBlockSize + kKernelSize,
|
||||
};
|
||||
|
||||
// Constructs a SincResampler with the specified |read_cb|, which is used to
|
||||
// acquire audio data for resampling. |io_sample_rate_ratio| is the ratio of
|
||||
// input / output sample rates. If desired, the number of destination frames
|
||||
// generated per processing pass can be specified through |block_size|.
|
||||
// acquire audio data for resampling. |io_sample_rate_ratio| is the ratio
|
||||
// of input / output sample rates. |request_frames| controls the size in
|
||||
// frames of the buffer requested by each |read_cb| call. The value must be
|
||||
// greater than kKernelSize. Specify kDefaultRequestSize if there are no
|
||||
// request size constraints.
|
||||
SincResampler(double io_sample_rate_ratio,
|
||||
int request_frames,
|
||||
SincResamplerCallback* read_cb);
|
||||
SincResampler(double io_sample_rate_ratio,
|
||||
SincResamplerCallback* read_cb,
|
||||
int block_size);
|
||||
virtual ~SincResampler();
|
||||
|
||||
// Resample |frames| of data from |read_cb_| into |destination|.
|
||||
void Resample(float* destination, int frames);
|
||||
void Resample(int frames, float* destination);
|
||||
|
||||
// The maximum size in frames that guarantees Resample() will only make a
|
||||
// single call to |read_cb_| for more data.
|
||||
int ChunkSize();
|
||||
int ChunkSize() const;
|
||||
|
||||
// The number of source frames requested per processing pass (and equal to
|
||||
// |block_size| if provided at construction). The first pass will request
|
||||
// more to prime the buffer.
|
||||
int BlockSize();
|
||||
int request_frames() const { return request_frames_; }
|
||||
|
||||
// Flush all buffered data and reset internal indices. Not thread safe, do
|
||||
// not call while Resample() is in progress.
|
||||
@ -86,8 +86,8 @@ class SincResampler {
|
||||
// the kernels used for resampling. Not thread safe, do not call while
|
||||
// Resample() is in progress.
|
||||
//
|
||||
// TODO(ajm): use this in PushSincResampler rather than reconstructing
|
||||
// SincResampler.
|
||||
// TODO(ajm): Use this in PushSincResampler rather than reconstructing
|
||||
// SincResampler. We would also need a way to update |request_frames_|.
|
||||
void SetRatio(double io_sample_rate_ratio);
|
||||
|
||||
float* get_kernel_for_testing() { return kernel_storage_.get(); }
|
||||
@ -96,8 +96,14 @@ class SincResampler {
|
||||
FRIEND_TEST_ALL_PREFIXES(SincResamplerTest, Convolve);
|
||||
FRIEND_TEST_ALL_PREFIXES(SincResamplerTest, ConvolveBenchmark);
|
||||
|
||||
void Initialize();
|
||||
void InitializeKernel();
|
||||
void UpdateRegions(bool second_load);
|
||||
|
||||
// Selects runtime specific CPU features like SSE. Must be called before
|
||||
// using SincResampler.
|
||||
// TODO(ajm): Currently managed by the class internally. See the note with
|
||||
// |convolve_proc_| below.
|
||||
void InitializeCPUSpecificFeatures();
|
||||
|
||||
// Compute convolution of |k1| and |k2| over |input_ptr|, resultant sums are
|
||||
// linearly interpolated using |kernel_interpolation_factor|. On x86, the
|
||||
@ -128,11 +134,14 @@ class SincResampler {
|
||||
// Source of data for resampling.
|
||||
SincResamplerCallback* read_cb_;
|
||||
|
||||
// See kDefaultBlockSize.
|
||||
// The size (in samples) to request from each |read_cb_| execution.
|
||||
const int request_frames_;
|
||||
|
||||
// The number of source frames processed per pass.
|
||||
int block_size_;
|
||||
|
||||
// See kDefaultBufferSize.
|
||||
int buffer_size_;
|
||||
// The size (in samples) of the internal buffer used by the resampler.
|
||||
const int input_buffer_size_;
|
||||
|
||||
// Contains kKernelOffsetCount kernels back-to-back, each of size kKernelSize.
|
||||
// The kernel offsets are sub-sample shifts of a windowed sinc shifted from
|
||||
@ -145,21 +154,22 @@ class SincResampler {
|
||||
scoped_ptr_malloc<float, AlignedFree> input_buffer_;
|
||||
|
||||
// Stores the runtime selection of which Convolve function to use.
|
||||
#if (defined(WEBRTC_ARCH_X86_FAMILY) && !defined(__SSE__)) || \
|
||||
(defined(WEBRTC_ARCH_ARM_V7) && !defined(WEBRTC_ARCH_ARM_NEON))
|
||||
// TODO(ajm): Move to using a global static which must only be initialized
|
||||
// once by the user. We're not doing this initially, because we don't have
|
||||
// e.g. a LazyInstance helper in webrtc.
|
||||
#if defined(WEBRTC_RESAMPLER_CPU_DETECTION)
|
||||
typedef float (*ConvolveProc)(const float*, const float*, const float*,
|
||||
double);
|
||||
const ConvolveProc convolve_proc_;
|
||||
ConvolveProc convolve_proc_;
|
||||
#endif
|
||||
|
||||
// Pointers to the various regions inside |input_buffer_|. See the diagram at
|
||||
// the top of the .cc file for more information.
|
||||
float* const r0_;
|
||||
float* r0_;
|
||||
float* const r1_;
|
||||
float* const r2_;
|
||||
float* const r3_;
|
||||
float* const r4_;
|
||||
float* const r5_;
|
||||
float* r3_;
|
||||
float* r4_;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(SincResampler);
|
||||
};
|
||||
|
@ -26,11 +26,11 @@ float SincResampler::Convolve_NEON(const float* input_ptr, const float* k1,
|
||||
|
||||
const float* upper = input_ptr + kKernelSize;
|
||||
for (; input_ptr < upper; ) {
|
||||
m_input = vld1q_f32((const float32_t *) input_ptr);
|
||||
m_input = vld1q_f32(input_ptr);
|
||||
input_ptr += 4;
|
||||
m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32((const float32_t *) k1));
|
||||
m_sums1 = vmlaq_f32(m_sums1, m_input, vld1q_f32(k1));
|
||||
k1 += 4;
|
||||
m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32((const float32_t *) k2));
|
||||
m_sums2 = vmlaq_f32(m_sums2, m_input, vld1q_f32(k2));
|
||||
k2 += 4;
|
||||
}
|
||||
|
||||
|
@ -14,7 +14,7 @@
|
||||
// MSVC++ requires this to be set before any other includes to get M_PI.
|
||||
#define _USE_MATH_DEFINES
|
||||
|
||||
#include <cmath>
|
||||
#include <math.h>
|
||||
|
||||
#include "testing/gmock/include/gmock/gmock.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
@ -36,18 +36,18 @@ static const double kKernelInterpolationFactor = 0.5;
|
||||
// Helper class to ensure ChunkedResample() functions properly.
|
||||
class MockSource : public SincResamplerCallback {
|
||||
public:
|
||||
MOCK_METHOD2(Run, void(float* destination, int frames));
|
||||
MOCK_METHOD2(Run, void(int frames, float* destination));
|
||||
};
|
||||
|
||||
ACTION(ClearBuffer) {
|
||||
memset(arg0, 0, arg1 * sizeof(float));
|
||||
memset(arg1, 0, arg0 * sizeof(float));
|
||||
}
|
||||
|
||||
ACTION(FillBuffer) {
|
||||
// Value chosen arbitrarily such that SincResampler resamples it to something
|
||||
// easily representable on all platforms; e.g., using kSampleRateRatio this
|
||||
// becomes 1.81219.
|
||||
memset(arg0, 64, arg1 * sizeof(float));
|
||||
memset(arg1, 64, arg0 * sizeof(float));
|
||||
}
|
||||
|
||||
// Test requesting multiples of ChunkSize() frames results in the proper number
|
||||
@ -57,7 +57,8 @@ TEST(SincResamplerTest, ChunkedResample) {
|
||||
|
||||
// Choose a high ratio of input to output samples which will result in quick
|
||||
// exhaustion of SincResampler's internal buffers.
|
||||
SincResampler resampler(kSampleRateRatio, &mock_source);
|
||||
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
|
||||
&mock_source);
|
||||
|
||||
static const int kChunks = 2;
|
||||
int max_chunk_size = resampler.ChunkSize() * kChunks;
|
||||
@ -66,25 +67,26 @@ TEST(SincResamplerTest, ChunkedResample) {
|
||||
// Verify requesting ChunkSize() frames causes a single callback.
|
||||
EXPECT_CALL(mock_source, Run(_, _))
|
||||
.Times(1).WillOnce(ClearBuffer());
|
||||
resampler.Resample(resampled_destination.get(), resampler.ChunkSize());
|
||||
resampler.Resample(resampler.ChunkSize(), resampled_destination.get());
|
||||
|
||||
// Verify requesting kChunks * ChunkSize() frames causes kChunks callbacks.
|
||||
testing::Mock::VerifyAndClear(&mock_source);
|
||||
EXPECT_CALL(mock_source, Run(_, _))
|
||||
.Times(kChunks).WillRepeatedly(ClearBuffer());
|
||||
resampler.Resample(resampled_destination.get(), max_chunk_size);
|
||||
resampler.Resample(max_chunk_size, resampled_destination.get());
|
||||
}
|
||||
|
||||
// Test flush resets the internal state properly.
|
||||
TEST(SincResamplerTest, Flush) {
|
||||
MockSource mock_source;
|
||||
SincResampler resampler(kSampleRateRatio, &mock_source);
|
||||
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
|
||||
&mock_source);
|
||||
scoped_array<float> resampled_destination(new float[resampler.ChunkSize()]);
|
||||
|
||||
// Fill the resampler with junk data.
|
||||
EXPECT_CALL(mock_source, Run(_, _))
|
||||
.Times(1).WillOnce(FillBuffer());
|
||||
resampler.Resample(resampled_destination.get(), resampler.ChunkSize() / 2);
|
||||
resampler.Resample(resampler.ChunkSize() / 2, resampled_destination.get());
|
||||
ASSERT_NE(resampled_destination[0], 0);
|
||||
|
||||
// Flush and request more data, which should all be zeros now.
|
||||
@ -92,11 +94,25 @@ TEST(SincResamplerTest, Flush) {
|
||||
testing::Mock::VerifyAndClear(&mock_source);
|
||||
EXPECT_CALL(mock_source, Run(_, _))
|
||||
.Times(1).WillOnce(ClearBuffer());
|
||||
resampler.Resample(resampled_destination.get(), resampler.ChunkSize() / 2);
|
||||
resampler.Resample(resampler.ChunkSize() / 2, resampled_destination.get());
|
||||
for (int i = 0; i < resampler.ChunkSize() / 2; ++i)
|
||||
ASSERT_FLOAT_EQ(resampled_destination[i], 0);
|
||||
}
|
||||
|
||||
// Test flush resets the internal state properly.
|
||||
TEST(SincResamplerTest, DISABLED_SetRatioBench) {
|
||||
MockSource mock_source;
|
||||
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
|
||||
&mock_source);
|
||||
|
||||
TickTime start = TickTime::Now();
|
||||
for (int i = 1; i < 10000; ++i)
|
||||
resampler.SetRatio(1.0 / i);
|
||||
double total_time_c_us = (TickTime::Now() - start).Microseconds();
|
||||
printf("SetRatio() took %.2fms.\n", total_time_c_us / 1000);
|
||||
}
|
||||
|
||||
|
||||
// Define platform independent function name for Convolve* tests.
|
||||
#if defined(WEBRTC_ARCH_X86_FAMILY)
|
||||
#define CONVOLVE_FUNC Convolve_SSE
|
||||
@ -117,7 +133,8 @@ TEST(SincResamplerTest, Convolve) {
|
||||
|
||||
// Initialize a dummy resampler.
|
||||
MockSource mock_source;
|
||||
SincResampler resampler(kSampleRateRatio, &mock_source);
|
||||
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
|
||||
&mock_source);
|
||||
|
||||
// The optimized Convolve methods are slightly more precise than Convolve_C(),
|
||||
// so comparison must be done using an epsilon.
|
||||
@ -150,7 +167,8 @@ TEST(SincResamplerTest, Convolve) {
|
||||
TEST(SincResamplerTest, ConvolveBenchmark) {
|
||||
// Initialize a dummy resampler.
|
||||
MockSource mock_source;
|
||||
SincResampler resampler(kSampleRateRatio, &mock_source);
|
||||
SincResampler resampler(kSampleRateRatio, SincResampler::kDefaultRequestSize,
|
||||
&mock_source);
|
||||
|
||||
// Retrieve benchmark iterations from command line.
|
||||
// TODO(ajm): Reintroduce this as a command line option.
|
||||
@ -243,9 +261,8 @@ TEST_P(SincResamplerTest, Resample) {
|
||||
input_rate_, input_samples, input_nyquist_freq, 0);
|
||||
|
||||
const double io_ratio = input_rate_ / static_cast<double>(output_rate_);
|
||||
SincResampler resampler(
|
||||
io_ratio,
|
||||
&resampler_source);
|
||||
SincResampler resampler(io_ratio, SincResampler::kDefaultRequestSize,
|
||||
&resampler_source);
|
||||
|
||||
// Force an update to the sample rate ratio to ensure dyanmic sample rate
|
||||
// changes are working correctly.
|
||||
@ -265,12 +282,12 @@ TEST_P(SincResamplerTest, Resample) {
|
||||
scoped_array<float> pure_destination(new float[output_samples]);
|
||||
|
||||
// Generate resampled signal.
|
||||
resampler.Resample(resampled_destination.get(), output_samples);
|
||||
resampler.Resample(output_samples, resampled_destination.get());
|
||||
|
||||
// Generate pure signal.
|
||||
SinusoidalLinearChirpSource pure_source(
|
||||
output_rate_, output_samples, input_nyquist_freq, 0);
|
||||
pure_source.Run(pure_destination.get(), output_samples);
|
||||
pure_source.Run(output_samples, pure_destination.get());
|
||||
|
||||
// Range of the Nyquist frequency (0.5 * min(input rate, output_rate)) which
|
||||
// we refer to as low and high.
|
||||
|
@ -13,7 +13,7 @@
|
||||
|
||||
#include "webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h"
|
||||
|
||||
#include <cmath>
|
||||
#include <math.h>
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -29,7 +29,7 @@ SinusoidalLinearChirpSource::SinusoidalLinearChirpSource(int sample_rate,
|
||||
k_ = (max_frequency_ - kMinFrequency) / duration;
|
||||
}
|
||||
|
||||
void SinusoidalLinearChirpSource::Run(float* destination, int frames) {
|
||||
void SinusoidalLinearChirpSource::Run(int frames, float* destination) {
|
||||
for (int i = 0; i < frames; ++i, ++current_index_) {
|
||||
// Filter out frequencies higher than Nyquist.
|
||||
if (Frequency(current_index_) > 0.5 * sample_rate_) {
|
||||
|
@ -31,7 +31,7 @@ class SinusoidalLinearChirpSource : public SincResamplerCallback {
|
||||
|
||||
virtual ~SinusoidalLinearChirpSource() {}
|
||||
|
||||
virtual void Run(float* destination, int frames);
|
||||
virtual void Run(int frames, float* destination) OVERRIDE;
|
||||
|
||||
double Frequency(int position);
|
||||
|
||||
|
@ -174,4 +174,3 @@ void WebRtcSpl_ComplexBitReverse(int16_t frfi[], int stages) {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -15,8 +15,8 @@
|
||||
*
|
||||
*/
|
||||
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
#include "webrtc/common_audio/signal_processing/complex_fft_tables.h"
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
|
||||
#define CFFTSFT 14
|
||||
#define CFFTRND 1
|
||||
|
@ -9,8 +9,8 @@
|
||||
*/
|
||||
|
||||
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
#include "webrtc/common_audio/signal_processing/complex_fft_tables.h"
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
|
||||
#define CFFTSFT 14
|
||||
#define CFFTRND 1
|
||||
@ -320,4 +320,3 @@ int WebRtcSpl_ComplexIFFT(int16_t frfi[], int stages, int mode) {
|
||||
return scale;
|
||||
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,104 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
|
||||
void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation,
|
||||
const int16_t* seq1,
|
||||
const int16_t* seq2,
|
||||
int16_t dim_seq,
|
||||
int16_t dim_cross_correlation,
|
||||
int16_t right_shifts,
|
||||
int16_t step_seq2) {
|
||||
|
||||
int32_t t0 = 0, t1 = 0, t2 = 0, t3 = 0, sum = 0;
|
||||
int16_t *pseq2 = NULL;
|
||||
int16_t *pseq1 = NULL;
|
||||
int16_t *pseq1_0 = (int16_t*)&seq1[0];
|
||||
int16_t *pseq2_0 = (int16_t*)&seq2[0];
|
||||
int k = 0;
|
||||
|
||||
__asm __volatile (
|
||||
".set push \n\t"
|
||||
".set noreorder \n\t"
|
||||
"sll %[step_seq2], %[step_seq2], 1 \n\t"
|
||||
"andi %[t0], %[dim_seq], 1 \n\t"
|
||||
"bgtz %[t0], 3f \n\t"
|
||||
" nop \n\t"
|
||||
"1: \n\t"
|
||||
"move %[pseq1], %[pseq1_0] \n\t"
|
||||
"move %[pseq2], %[pseq2_0] \n\t"
|
||||
"sra %[k], %[dim_seq], 1 \n\t"
|
||||
"addiu %[dim_cc], %[dim_cc], -1 \n\t"
|
||||
"xor %[sum], %[sum], %[sum] \n\t"
|
||||
"2: \n\t"
|
||||
"lh %[t0], 0(%[pseq1]) \n\t"
|
||||
"lh %[t1], 0(%[pseq2]) \n\t"
|
||||
"lh %[t2], 2(%[pseq1]) \n\t"
|
||||
"lh %[t3], 2(%[pseq2]) \n\t"
|
||||
"mul %[t0], %[t0], %[t1] \n\t"
|
||||
"addiu %[k], %[k], -1 \n\t"
|
||||
"mul %[t2], %[t2], %[t3] \n\t"
|
||||
"addiu %[pseq1], %[pseq1], 4 \n\t"
|
||||
"addiu %[pseq2], %[pseq2], 4 \n\t"
|
||||
"srav %[t0], %[t0], %[right_shifts] \n\t"
|
||||
"addu %[sum], %[sum], %[t0] \n\t"
|
||||
"srav %[t2], %[t2], %[right_shifts] \n\t"
|
||||
"bgtz %[k], 2b \n\t"
|
||||
" addu %[sum], %[sum], %[t2] \n\t"
|
||||
"addu %[pseq2_0], %[pseq2_0], %[step_seq2] \n\t"
|
||||
"sw %[sum], 0(%[cc]) \n\t"
|
||||
"bgtz %[dim_cc], 1b \n\t"
|
||||
" addiu %[cc], %[cc], 4 \n\t"
|
||||
"b 6f \n\t"
|
||||
" nop \n\t"
|
||||
"3: \n\t"
|
||||
"move %[pseq1], %[pseq1_0] \n\t"
|
||||
"move %[pseq2], %[pseq2_0] \n\t"
|
||||
"sra %[k], %[dim_seq], 1 \n\t"
|
||||
"addiu %[dim_cc], %[dim_cc], -1 \n\t"
|
||||
"beqz %[k], 5f \n\t"
|
||||
" xor %[sum], %[sum], %[sum] \n\t"
|
||||
"4: \n\t"
|
||||
"lh %[t0], 0(%[pseq1]) \n\t"
|
||||
"lh %[t1], 0(%[pseq2]) \n\t"
|
||||
"lh %[t2], 2(%[pseq1]) \n\t"
|
||||
"lh %[t3], 2(%[pseq2]) \n\t"
|
||||
"mul %[t0], %[t0], %[t1] \n\t"
|
||||
"addiu %[k], %[k], -1 \n\t"
|
||||
"mul %[t2], %[t2], %[t3] \n\t"
|
||||
"addiu %[pseq1], %[pseq1], 4 \n\t"
|
||||
"addiu %[pseq2], %[pseq2], 4 \n\t"
|
||||
"srav %[t0], %[t0], %[right_shifts] \n\t"
|
||||
"addu %[sum], %[sum], %[t0] \n\t"
|
||||
"srav %[t2], %[t2], %[right_shifts] \n\t"
|
||||
"bgtz %[k], 4b \n\t"
|
||||
" addu %[sum], %[sum], %[t2] \n\t"
|
||||
"5: \n\t"
|
||||
"lh %[t0], 0(%[pseq1]) \n\t"
|
||||
"lh %[t1], 0(%[pseq2]) \n\t"
|
||||
"mul %[t0], %[t0], %[t1] \n\t"
|
||||
"srav %[t0], %[t0], %[right_shifts] \n\t"
|
||||
"addu %[sum], %[sum], %[t0] \n\t"
|
||||
"addu %[pseq2_0], %[pseq2_0], %[step_seq2] \n\t"
|
||||
"sw %[sum], 0(%[cc]) \n\t"
|
||||
"bgtz %[dim_cc], 3b \n\t"
|
||||
" addiu %[cc], %[cc], 4 \n\t"
|
||||
"6: \n\t"
|
||||
".set pop \n\t"
|
||||
: [step_seq2] "+r" (step_seq2), [t0] "=&r" (t0), [t1] "=&r" (t1),
|
||||
[t2] "=&r" (t2), [t3] "=&r" (t3), [pseq1] "=&r" (pseq1),
|
||||
[pseq2] "=&r" (pseq2), [pseq1_0] "+r" (pseq1_0), [pseq2_0] "+r" (pseq2_0),
|
||||
[k] "=&r" (k), [dim_cc] "+r" (dim_cross_correlation), [sum] "=&r" (sum),
|
||||
[cc] "+r" (cross_correlation)
|
||||
: [dim_seq] "r" (dim_seq), [right_shifts] "r" (right_shifts)
|
||||
: "hi", "lo", "memory"
|
||||
);
|
||||
}
|
@ -167,4 +167,3 @@ int WebRtcSpl_DownsampleFast_mips(const int16_t* data_in,
|
||||
#endif // #if defined(MIPS_DSP_R2_LE)
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
*/
|
||||
#include <assert.h>
|
||||
|
||||
#include "signal_processing_library.h"
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
|
||||
void WebRtcSpl_FilterARFastQ12(const int16_t* data_in,
|
||||
int16_t* data_out,
|
||||
|
@ -13,70 +13,112 @@
|
||||
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
// For ComplexFFT(), the maximum fft order is 10;
|
||||
// for OpenMax FFT in ARM, it is 12;
|
||||
// WebRTC APM uses orders of only 7 and 8.
|
||||
enum {kMaxFFTOrder = 10};
|
||||
|
||||
struct RealFFT;
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
typedef struct RealFFT* (*CreateRealFFT)(int order);
|
||||
typedef void (*FreeRealFFT)(struct RealFFT* self);
|
||||
typedef int (*RealForwardFFT)(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out);
|
||||
const int16_t* real_data_in,
|
||||
int16_t* complex_data_out);
|
||||
typedef int (*RealInverseFFT)(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out);
|
||||
const int16_t* complex_data_in,
|
||||
int16_t* real_data_out);
|
||||
|
||||
extern CreateRealFFT WebRtcSpl_CreateRealFFT;
|
||||
extern FreeRealFFT WebRtcSpl_FreeRealFFT;
|
||||
extern RealForwardFFT WebRtcSpl_RealForwardFFT;
|
||||
extern RealInverseFFT WebRtcSpl_RealInverseFFT;
|
||||
|
||||
struct RealFFT* WebRtcSpl_CreateRealFFT(int order);
|
||||
void WebRtcSpl_FreeRealFFT(struct RealFFT* self);
|
||||
struct RealFFT* WebRtcSpl_CreateRealFFTC(int order);
|
||||
void WebRtcSpl_FreeRealFFTC(struct RealFFT* self);
|
||||
|
||||
// TODO(kma): Implement FFT functions for real signals.
|
||||
#if (defined WEBRTC_DETECT_ARM_NEON) || (defined WEBRTC_ARCH_ARM_NEON)
|
||||
struct RealFFT* WebRtcSpl_CreateRealFFTNeon(int order);
|
||||
void WebRtcSpl_FreeRealFFTNeon(struct RealFFT* self);
|
||||
#endif
|
||||
|
||||
// Compute the forward FFT for a complex signal of length 2^order.
|
||||
// Compute an FFT for a real-valued signal of length of 2^order,
|
||||
// where 1 < order <= MAX_FFT_ORDER. Transform length is determined by the
|
||||
// specification structure, which must be initialized prior to calling the FFT
|
||||
// function with WebRtcSpl_CreateRealFFT().
|
||||
// The relationship between the input and output sequences can
|
||||
// be expressed in terms of the DFT, i.e.:
|
||||
// x[n] = (2^(-scalefactor)/N) . SUM[k=0,...,N-1] X[k].e^(jnk.2.pi/N)
|
||||
// n=0,1,2,...N-1
|
||||
// N=2^order.
|
||||
// The conjugate-symmetric output sequence is represented using a CCS vector,
|
||||
// which is of length N+2, and is organized as follows:
|
||||
// Index: 0 1 2 3 4 5 . . . N-2 N-1 N N+1
|
||||
// Component: R0 0 R1 I1 R2 I2 . . . R[N/2-1] I[N/2-1] R[N/2] 0
|
||||
// where R[n] and I[n], respectively, denote the real and imaginary components
|
||||
// for FFT bin 'n'. Bins are numbered from 0 to N/2, where N is the FFT length.
|
||||
// Bin index 0 corresponds to the DC component, and bin index N/2 corresponds to
|
||||
// the foldover frequency.
|
||||
//
|
||||
// Input Arguments:
|
||||
// self - pointer to preallocated and initialized FFT specification structure.
|
||||
// data_in - the input signal.
|
||||
// real_data_in - the input signal. For an ARM Neon platform, it must be
|
||||
// aligned on a 32-byte boundary.
|
||||
//
|
||||
// Output Arguments:
|
||||
// data_out - the output signal; must be different to data_in.
|
||||
// complex_data_out - the output complex signal with (2^order + 2) 16-bit
|
||||
// elements. For an ARM Neon platform, it must be different
|
||||
// from real_data_in, and aligned on a 32-byte boundary.
|
||||
//
|
||||
// Return Value:
|
||||
// 0 - FFT calculation is successful.
|
||||
// -1 - Error
|
||||
//
|
||||
// -1 - Error with bad arguments (NULL pointers).
|
||||
int WebRtcSpl_RealForwardFFTC(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out);
|
||||
const int16_t* real_data_in,
|
||||
int16_t* complex_data_out);
|
||||
|
||||
#if (defined WEBRTC_DETECT_ARM_NEON) || (defined WEBRTC_ARCH_ARM_NEON)
|
||||
int WebRtcSpl_RealForwardFFTNeon(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out);
|
||||
const int16_t* real_data_in,
|
||||
int16_t* complex_data_out);
|
||||
#endif
|
||||
|
||||
// Compute the inverse FFT for a complex signal of length 2^order.
|
||||
// Compute the inverse FFT for a conjugate-symmetric input sequence of length of
|
||||
// 2^order, where 1 < order <= MAX_FFT_ORDER. Transform length is determined by
|
||||
// the specification structure, which must be initialized prior to calling the
|
||||
// FFT function with WebRtcSpl_CreateRealFFT().
|
||||
// For a transform of length M, the input sequence is represented using a packed
|
||||
// CCS vector of length M+2, which is explained in the comments for
|
||||
// WebRtcSpl_RealForwardFFTC above.
|
||||
//
|
||||
// Input Arguments:
|
||||
// self - pointer to preallocated and initialized FFT specification structure.
|
||||
// data_in - the input signal.
|
||||
// complex_data_in - the input complex signal with (2^order + 2) 16-bit
|
||||
// elements. For an ARM Neon platform, it must be aligned on
|
||||
// a 32-byte boundary.
|
||||
//
|
||||
// Output Arguments:
|
||||
// data_out - the output signal; must be different to data_in.
|
||||
// real_data_out - the output real signal. For an ARM Neon platform, it must
|
||||
// be different to complex_data_in, and aligned on a 32-byte
|
||||
// boundary.
|
||||
//
|
||||
// Return Value:
|
||||
// 0 or a positive number - a value that the elements in the |data_out| should
|
||||
// be shifted left with in order to get correct
|
||||
// physical values.
|
||||
// -1 - Error
|
||||
// 0 or a positive number - a value that the elements in the |real_data_out|
|
||||
// should be shifted left with in order to get
|
||||
// correct physical values.
|
||||
// -1 - Error with bad arguments (NULL pointers).
|
||||
int WebRtcSpl_RealInverseFFTC(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out);
|
||||
const int16_t* complex_data_in,
|
||||
int16_t* real_data_out);
|
||||
|
||||
#if (defined WEBRTC_DETECT_ARM_NEON) || (defined WEBRTC_ARCH_ARM_NEON)
|
||||
int WebRtcSpl_RealInverseFFTNeon(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out);
|
||||
const int16_t* complex_data_in,
|
||||
int16_t* real_data_out);
|
||||
#endif
|
||||
|
||||
#ifdef __cplusplus
|
||||
|
@ -73,6 +73,8 @@
|
||||
|
||||
#ifndef WEBRTC_ARCH_ARM_V7
|
||||
// For ARMv7 platforms, these are inline functions in spl_inl_armv7.h
|
||||
#ifndef MIPS32_LE
|
||||
// For MIPS platforms, these are inline functions in spl_inl_mips.h
|
||||
#define WEBRTC_SPL_MUL_16_16(a, b) \
|
||||
((int32_t) (((int16_t)(a)) * ((int16_t)(b))))
|
||||
#define WEBRTC_SPL_MUL_16_32_RSFT16(a, b) \
|
||||
@ -87,6 +89,7 @@
|
||||
(WEBRTC_SPL_MUL_16_32_RSFT16(( \
|
||||
(int16_t)((a32 & 0x0000FFFF) >> 1)), b32) >> 15)))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#define WEBRTC_SPL_MUL_16_32_RSFT11(a, b) \
|
||||
((WEBRTC_SPL_MUL_16_16(a, (b) >> 16) << 5) \
|
||||
@ -456,6 +459,15 @@ int WebRtcSpl_ScaleAndAddVectorsWithRoundNeon(const int16_t* in_vector1,
|
||||
int16_t* out_vector,
|
||||
int length);
|
||||
#endif
|
||||
#if defined(MIPS_DSP_R1_LE)
|
||||
int WebRtcSpl_ScaleAndAddVectorsWithRound_mips(const int16_t* in_vector1,
|
||||
int16_t in_vector1_scale,
|
||||
const int16_t* in_vector2,
|
||||
int16_t in_vector2_scale,
|
||||
int right_shifts,
|
||||
int16_t* out_vector,
|
||||
int length);
|
||||
#endif
|
||||
// End: Vector scaling operations.
|
||||
|
||||
// iLBC specific functions. Implementations in ilbc_specific_functions.c.
|
||||
@ -627,6 +639,15 @@ void WebRtcSpl_CrossCorrelationNeon(int32_t* cross_correlation,
|
||||
int16_t right_shifts,
|
||||
int16_t step_seq2);
|
||||
#endif
|
||||
#if defined(MIPS32_LE)
|
||||
void WebRtcSpl_CrossCorrelation_mips(int32_t* cross_correlation,
|
||||
const int16_t* seq1,
|
||||
const int16_t* seq2,
|
||||
int16_t dim_seq,
|
||||
int16_t dim_cross_correlation,
|
||||
int16_t right_shifts,
|
||||
int16_t step_seq2);
|
||||
#endif
|
||||
|
||||
// Creates (the first half of) a Hanning window. Size must be at least 1 and
|
||||
// at most 512.
|
||||
|
@ -19,6 +19,11 @@
|
||||
#include "webrtc/common_audio/signal_processing/include/spl_inl_armv7.h"
|
||||
#else
|
||||
|
||||
#if defined(MIPS32_LE)
|
||||
#include "webrtc/common_audio/signal_processing/include/spl_inl_mips.h"
|
||||
#endif
|
||||
|
||||
#if !defined(MIPS_DSP_R1_LE)
|
||||
static __inline int16_t WebRtcSpl_SatW32ToW16(int32_t value32) {
|
||||
int16_t out16 = (int16_t) value32;
|
||||
|
||||
@ -37,7 +42,9 @@ static __inline int16_t WebRtcSpl_AddSatW16(int16_t a, int16_t b) {
|
||||
static __inline int16_t WebRtcSpl_SubSatW16(int16_t var1, int16_t var2) {
|
||||
return WebRtcSpl_SatW32ToW16((int32_t) var1 - (int32_t) var2);
|
||||
}
|
||||
#endif // #if !defined(MIPS_DSP_R1_LE)
|
||||
|
||||
#if !defined(MIPS32_LE)
|
||||
static __inline int16_t WebRtcSpl_GetSizeInBits(uint32_t n) {
|
||||
int bits;
|
||||
|
||||
@ -121,11 +128,13 @@ static __inline int WebRtcSpl_NormW16(int16_t a) {
|
||||
static __inline int32_t WebRtc_MulAccumW16(int16_t a, int16_t b, int32_t c) {
|
||||
return (a * b + c);
|
||||
}
|
||||
#endif // #if !defined(MIPS32_LE)
|
||||
|
||||
#endif // WEBRTC_ARCH_ARM_V7
|
||||
|
||||
// The following functions have no optimized versions.
|
||||
// TODO(kma): Consider saturating add/sub instructions in X86 platform.
|
||||
#if !defined(MIPS_DSP_R1_LE)
|
||||
static __inline int32_t WebRtcSpl_AddSatW32(int32_t l_var1, int32_t l_var2) {
|
||||
int32_t l_sum;
|
||||
|
||||
@ -163,5 +172,6 @@ static __inline int32_t WebRtcSpl_SubSatW32(int32_t l_var1, int32_t l_var2) {
|
||||
|
||||
return l_diff;
|
||||
}
|
||||
#endif // #if !defined(MIPS_DSP_R1_LE)
|
||||
|
||||
#endif // WEBRTC_SPL_SPL_INL_H_
|
||||
|
@ -0,0 +1,281 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
// This header file includes the inline functions in
|
||||
// the fix point signal processing library.
|
||||
|
||||
#ifndef WEBRTC_SPL_SPL_INL_MIPS_H_
|
||||
#define WEBRTC_SPL_SPL_INL_MIPS_H_
|
||||
|
||||
static __inline int32_t WEBRTC_SPL_MUL_16_16(int32_t a,
|
||||
int32_t b) {
|
||||
int32_t value32 = 0;
|
||||
int32_t a1 = 0, b1 = 0;
|
||||
|
||||
__asm __volatile(
|
||||
#if defined(MIPS32_R2_LE)
|
||||
"seh %[a1], %[a] \n\t"
|
||||
"seh %[b1], %[b] \n\t"
|
||||
#else
|
||||
"sll %[a1], %[a], 16 \n\t"
|
||||
"sll %[b1], %[b], 16 \n\t"
|
||||
"sra %[a1], %[a1], 16 \n\t"
|
||||
"sra %[b1], %[b1], 16 \n\t"
|
||||
#endif
|
||||
"mul %[value32], %[a1], %[b1] \n\t"
|
||||
: [value32] "=r" (value32), [a1] "=&r" (a1), [b1] "=&r" (b1)
|
||||
: [a] "r" (a), [b] "r" (b)
|
||||
: "hi", "lo"
|
||||
);
|
||||
return value32;
|
||||
}
|
||||
|
||||
static __inline int32_t WEBRTC_SPL_MUL_16_32_RSFT16(int16_t a,
|
||||
int32_t b) {
|
||||
int32_t value32 = 0, b1 = 0, b2 = 0;
|
||||
int32_t a1 = 0;
|
||||
|
||||
__asm __volatile(
|
||||
#if defined(MIPS32_R2_LE)
|
||||
"seh %[a1], %[a] \n\t"
|
||||
#else
|
||||
"sll %[a1], %[a], 16 \n\t"
|
||||
"sra %[a1], %[a1], 16 \n\t"
|
||||
#endif
|
||||
"andi %[b2], %[b], 0xFFFF \n\t"
|
||||
"sra %[b1], %[b], 16 \n\t"
|
||||
"sra %[b2], %[b2], 1 \n\t"
|
||||
"mul %[value32], %[a1], %[b1] \n\t"
|
||||
"mul %[b2], %[a1], %[b2] \n\t"
|
||||
"addiu %[b2], %[b2], 0x4000 \n\t"
|
||||
"sra %[b2], %[b2], 15 \n\t"
|
||||
"addu %[value32], %[value32], %[b2] \n\t"
|
||||
: [value32] "=&r" (value32), [b1] "=&r" (b1), [b2] "=&r" (b2),
|
||||
[a1] "=&r" (a1)
|
||||
: [a] "r" (a), [b] "r" (b)
|
||||
: "hi", "lo"
|
||||
);
|
||||
return value32;
|
||||
}
|
||||
|
||||
static __inline int32_t WEBRTC_SPL_MUL_32_32_RSFT32BI(int32_t a,
|
||||
int32_t b) {
|
||||
int32_t tmp = 0;
|
||||
|
||||
if ((32767 < a) || (a < 0))
|
||||
tmp = WEBRTC_SPL_MUL_16_32_RSFT16(((int16_t)(a >> 16)), b);
|
||||
tmp += WEBRTC_SPL_MUL_16_32_RSFT16(((int16_t)((a & 0x0000FFFF) >> 1)),
|
||||
b) >> 15;
|
||||
|
||||
return tmp;
|
||||
}
|
||||
|
||||
static __inline int32_t WEBRTC_SPL_MUL_32_32_RSFT32(int16_t a,
|
||||
int16_t b,
|
||||
int32_t c) {
|
||||
int32_t tmp1 = 0, tmp2 = 0, tmp3 = 0, tmp4 = 0;
|
||||
|
||||
__asm __volatile(
|
||||
"sra %[tmp1], %[c], 16 \n\t"
|
||||
"andi %[tmp2], %[c], 0xFFFF \n\t"
|
||||
#if defined(MIPS32_R2_LE)
|
||||
"seh %[a], %[a] \n\t"
|
||||
"seh %[b], %[b] \n\t"
|
||||
#else
|
||||
"sll %[a], %[a], 16 \n\t"
|
||||
"sra %[a], %[a], 16 \n\t"
|
||||
"sll %[b], %[b], 16 \n\t"
|
||||
"sra %[b], %[b], 16 \n\t"
|
||||
#endif
|
||||
"sra %[tmp2], %[tmp2], 1 \n\t"
|
||||
"mul %[tmp3], %[a], %[tmp2] \n\t"
|
||||
"mul %[tmp4], %[b], %[tmp2] \n\t"
|
||||
"mul %[tmp2], %[a], %[tmp1] \n\t"
|
||||
"mul %[tmp1], %[b], %[tmp1] \n\t"
|
||||
#if defined(MIPS_DSP_R1_LE)
|
||||
"shra_r.w %[tmp3], %[tmp3], 15 \n\t"
|
||||
"shra_r.w %[tmp4], %[tmp4], 15 \n\t"
|
||||
#else
|
||||
"addiu %[tmp3], %[tmp3], 0x4000 \n\t"
|
||||
"sra %[tmp3], %[tmp3], 15 \n\t"
|
||||
"addiu %[tmp4], %[tmp4], 0x4000 \n\t"
|
||||
"sra %[tmp4], %[tmp4], 15 \n\t"
|
||||
#endif
|
||||
"addu %[tmp3], %[tmp3], %[tmp2] \n\t"
|
||||
"addu %[tmp4], %[tmp4], %[tmp1] \n\t"
|
||||
"sra %[tmp4], %[tmp4], 16 \n\t"
|
||||
"addu %[tmp1], %[tmp3], %[tmp4] \n\t"
|
||||
: [tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2),
|
||||
[tmp3] "=&r" (tmp3), [tmp4] "=&r" (tmp4),
|
||||
[a] "+r" (a), [b] "+r" (b)
|
||||
: [c] "r" (c)
|
||||
: "hi", "lo"
|
||||
);
|
||||
return tmp1;
|
||||
}
|
||||
|
||||
#if defined(MIPS_DSP_R1_LE)
|
||||
static __inline int16_t WebRtcSpl_SatW32ToW16(int32_t value32) {
|
||||
__asm __volatile(
|
||||
"shll_s.w %[value32], %[value32], 16 \n\t"
|
||||
"sra %[value32], %[value32], 16 \n\t"
|
||||
: [value32] "+r" (value32)
|
||||
:
|
||||
);
|
||||
int16_t out16 = (int16_t)value32;
|
||||
return out16;
|
||||
}
|
||||
|
||||
static __inline int16_t WebRtcSpl_AddSatW16(int16_t a, int16_t b) {
|
||||
int32_t value32 = 0;
|
||||
|
||||
__asm __volatile(
|
||||
"addq_s.ph %[value32], %[a], %[b] \n\t"
|
||||
: [value32] "=r" (value32)
|
||||
: [a] "r" (a), [b] "r" (b)
|
||||
);
|
||||
return (int16_t)value32;
|
||||
}
|
||||
|
||||
static __inline int32_t WebRtcSpl_AddSatW32(int32_t l_var1, int32_t l_var2) {
|
||||
int32_t l_sum;
|
||||
|
||||
__asm __volatile(
|
||||
"addq_s.w %[l_sum], %[l_var1], %[l_var2] \n\t"
|
||||
: [l_sum] "=r" (l_sum)
|
||||
: [l_var1] "r" (l_var1), [l_var2] "r" (l_var2)
|
||||
);
|
||||
|
||||
return l_sum;
|
||||
}
|
||||
|
||||
static __inline int16_t WebRtcSpl_SubSatW16(int16_t var1, int16_t var2) {
|
||||
int32_t value32;
|
||||
|
||||
__asm __volatile(
|
||||
"subq_s.ph %[value32], %[var1], %[var2] \n\t"
|
||||
: [value32] "=r" (value32)
|
||||
: [var1] "r" (var1), [var2] "r" (var2)
|
||||
);
|
||||
|
||||
return (int16_t)value32;
|
||||
}
|
||||
|
||||
static __inline int32_t WebRtcSpl_SubSatW32(int32_t l_var1, int32_t l_var2) {
|
||||
int32_t l_diff;
|
||||
|
||||
__asm __volatile(
|
||||
"subq_s.w %[l_diff], %[l_var1], %[l_var2] \n\t"
|
||||
: [l_diff] "=r" (l_diff)
|
||||
: [l_var1] "r" (l_var1), [l_var2] "r" (l_var2)
|
||||
);
|
||||
|
||||
return l_diff;
|
||||
}
|
||||
#endif
|
||||
|
||||
static __inline int16_t WebRtcSpl_GetSizeInBits(uint32_t n) {
|
||||
int bits = 0;
|
||||
int i32 = 32;
|
||||
|
||||
__asm __volatile(
|
||||
"clz %[bits], %[n] \n\t"
|
||||
"subu %[bits], %[i32], %[bits] \n\t"
|
||||
: [bits] "=&r" (bits)
|
||||
: [n] "r" (n), [i32] "r" (i32)
|
||||
);
|
||||
|
||||
return bits;
|
||||
}
|
||||
|
||||
static __inline int WebRtcSpl_NormW32(int32_t a) {
|
||||
int zeros = 0;
|
||||
|
||||
__asm __volatile(
|
||||
".set push \n\t"
|
||||
".set noreorder \n\t"
|
||||
"bnez %[a], 1f \n\t"
|
||||
" sra %[zeros], %[a], 31 \n\t"
|
||||
"b 2f \n\t"
|
||||
" move %[zeros], $zero \n\t"
|
||||
"1: \n\t"
|
||||
"xor %[zeros], %[a], %[zeros] \n\t"
|
||||
"clz %[zeros], %[zeros] \n\t"
|
||||
"addiu %[zeros], %[zeros], -1 \n\t"
|
||||
"2: \n\t"
|
||||
".set pop \n\t"
|
||||
: [zeros]"=&r"(zeros)
|
||||
: [a] "r" (a)
|
||||
);
|
||||
|
||||
return zeros;
|
||||
}
|
||||
|
||||
static __inline int WebRtcSpl_NormU32(uint32_t a) {
|
||||
int zeros = 0;
|
||||
|
||||
__asm __volatile(
|
||||
"clz %[zeros], %[a] \n\t"
|
||||
: [zeros] "=r" (zeros)
|
||||
: [a] "r" (a)
|
||||
);
|
||||
|
||||
return (zeros & 0x1f);
|
||||
}
|
||||
|
||||
static __inline int WebRtcSpl_NormW16(int16_t a) {
|
||||
int zeros = 0;
|
||||
int a0 = a << 16;
|
||||
|
||||
__asm __volatile(
|
||||
".set push \n\t"
|
||||
".set noreorder \n\t"
|
||||
"bnez %[a0], 1f \n\t"
|
||||
" sra %[zeros], %[a0], 31 \n\t"
|
||||
"b 2f \n\t"
|
||||
" move %[zeros], $zero \n\t"
|
||||
"1: \n\t"
|
||||
"xor %[zeros], %[a0], %[zeros] \n\t"
|
||||
"clz %[zeros], %[zeros] \n\t"
|
||||
"addiu %[zeros], %[zeros], -1 \n\t"
|
||||
"2: \n\t"
|
||||
".set pop \n\t"
|
||||
: [zeros]"=&r"(zeros)
|
||||
: [a0] "r" (a0)
|
||||
);
|
||||
|
||||
return zeros;
|
||||
}
|
||||
|
||||
static __inline int32_t WebRtc_MulAccumW16(int16_t a,
|
||||
int16_t b,
|
||||
int32_t c) {
|
||||
int32_t res = 0, c1 = 0;
|
||||
__asm __volatile(
|
||||
#if defined(MIPS32_R2_LE)
|
||||
"seh %[a], %[a] \n\t"
|
||||
"seh %[b], %[b] \n\t"
|
||||
#else
|
||||
"sll %[a], %[a], 16 \n\t"
|
||||
"sll %[b], %[b], 16 \n\t"
|
||||
"sra %[a], %[a], 16 \n\t"
|
||||
"sra %[b], %[b], 16 \n\t"
|
||||
#endif
|
||||
"mul %[res], %[a], %[b] \n\t"
|
||||
"addu %[c1], %[c], %[res] \n\t"
|
||||
: [c1] "=r" (c1), [res] "=&r" (res)
|
||||
: [a] "r" (a), [b] "r" (b), [c] "r" (c)
|
||||
: "hi", "lo"
|
||||
);
|
||||
return (c1);
|
||||
}
|
||||
|
||||
#endif // WEBRTC_SPL_SPL_INL_MIPS_H_
|
@ -18,55 +18,109 @@ struct RealFFT {
|
||||
int order;
|
||||
};
|
||||
|
||||
struct RealFFT* WebRtcSpl_CreateRealFFT(int order) {
|
||||
struct RealFFT* WebRtcSpl_CreateRealFFTC(int order) {
|
||||
struct RealFFT* self = NULL;
|
||||
|
||||
// This constraint comes from ComplexFFT().
|
||||
if (order > 10 || order < 0) {
|
||||
if (order > kMaxFFTOrder || order < 0) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self = malloc(sizeof(struct RealFFT));
|
||||
if (self == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
self->order = order;
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
void WebRtcSpl_FreeRealFFT(struct RealFFT* self) {
|
||||
free(self);
|
||||
void WebRtcSpl_FreeRealFFTC(struct RealFFT* self) {
|
||||
if (self != NULL) {
|
||||
free(self);
|
||||
}
|
||||
}
|
||||
|
||||
// WebRtcSpl_ComplexFFT and WebRtcSpl_ComplexIFFT use in-place algorithm,
|
||||
// so copy data from data_in to data_out in the next two functions.
|
||||
// The C version FFT functions (i.e. WebRtcSpl_RealForwardFFTC and
|
||||
// WebRtcSpl_RealInverseFFTC) are real-valued FFT wrappers for complex-valued
|
||||
// FFT implementation in SPL.
|
||||
|
||||
int WebRtcSpl_RealForwardFFTC(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out) {
|
||||
memcpy(data_out, data_in, sizeof(int16_t) * (1 << (self->order + 1)));
|
||||
WebRtcSpl_ComplexBitReverse(data_out, self->order);
|
||||
return WebRtcSpl_ComplexFFT(data_out, self->order, 1);
|
||||
const int16_t* real_data_in,
|
||||
int16_t* complex_data_out) {
|
||||
int i = 0;
|
||||
int j = 0;
|
||||
int result = 0;
|
||||
int n = 1 << self->order;
|
||||
// The complex-value FFT implementation needs a buffer to hold 2^order
|
||||
// 16-bit COMPLEX numbers, for both time and frequency data.
|
||||
int16_t complex_buffer[2 << kMaxFFTOrder];
|
||||
|
||||
// Insert zeros to the imaginary parts for complex forward FFT input.
|
||||
for (i = 0, j = 0; i < n; i += 1, j += 2) {
|
||||
complex_buffer[j] = real_data_in[i];
|
||||
complex_buffer[j + 1] = 0;
|
||||
};
|
||||
|
||||
WebRtcSpl_ComplexBitReverse(complex_buffer, self->order);
|
||||
result = WebRtcSpl_ComplexFFT(complex_buffer, self->order, 1);
|
||||
|
||||
// For real FFT output, use only the first N + 2 elements from
|
||||
// complex forward FFT.
|
||||
memcpy(complex_data_out, complex_buffer, sizeof(int16_t) * (n + 2));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
int WebRtcSpl_RealInverseFFTC(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out) {
|
||||
memcpy(data_out, data_in, sizeof(int16_t) * (1 << (self->order + 1)));
|
||||
WebRtcSpl_ComplexBitReverse(data_out, self->order);
|
||||
return WebRtcSpl_ComplexIFFT(data_out, self->order, 1);
|
||||
const int16_t* complex_data_in,
|
||||
int16_t* real_data_out) {
|
||||
int i = 0;
|
||||
int j = 0;
|
||||
int result = 0;
|
||||
int n = 1 << self->order;
|
||||
// Create the buffer specific to complex-valued FFT implementation.
|
||||
int16_t complex_buffer[2 << kMaxFFTOrder];
|
||||
|
||||
// For n-point FFT, first copy the first n + 2 elements into complex
|
||||
// FFT, then construct the remaining n - 2 elements by real FFT's
|
||||
// conjugate-symmetric properties.
|
||||
memcpy(complex_buffer, complex_data_in, sizeof(int16_t) * (n + 2));
|
||||
for (i = n + 2; i < 2 * n; i += 2) {
|
||||
complex_buffer[i] = complex_data_in[2 * n - i];
|
||||
complex_buffer[i + 1] = -complex_data_in[2 * n - i + 1];
|
||||
}
|
||||
|
||||
WebRtcSpl_ComplexBitReverse(complex_buffer, self->order);
|
||||
result = WebRtcSpl_ComplexIFFT(complex_buffer, self->order, 1);
|
||||
|
||||
// Strip out the imaginary parts of the complex inverse FFT output.
|
||||
for (i = 0, j = 0; i < n; i += 1, j += 2) {
|
||||
real_data_out[i] = complex_buffer[j];
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
#if defined(WEBRTC_DETECT_ARM_NEON) || defined(WEBRTC_ARCH_ARM_NEON)
|
||||
// TODO(kma): Replace the following function bodies into optimized functions
|
||||
// for ARM Neon.
|
||||
struct RealFFT* WebRtcSpl_CreateRealFFTNeon(int order) {
|
||||
return WebRtcSpl_CreateRealFFTC(order);
|
||||
}
|
||||
|
||||
void WebRtcSpl_FreeRealFFTNeon(struct RealFFT* self) {
|
||||
WebRtcSpl_FreeRealFFTC(self);
|
||||
}
|
||||
|
||||
int WebRtcSpl_RealForwardFFTNeon(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out) {
|
||||
return WebRtcSpl_RealForwardFFTC(self, data_in, data_out);
|
||||
const int16_t* real_data_in,
|
||||
int16_t* complex_data_out) {
|
||||
return WebRtcSpl_RealForwardFFTC(self, real_data_in, complex_data_out);
|
||||
}
|
||||
|
||||
int WebRtcSpl_RealInverseFFTNeon(struct RealFFT* self,
|
||||
const int16_t* data_in,
|
||||
int16_t* data_out) {
|
||||
return WebRtcSpl_RealInverseFFTC(self, data_in, data_out);
|
||||
const int16_t* complex_data_in,
|
||||
int16_t* real_data_out) {
|
||||
return WebRtcSpl_RealInverseFFTC(self, complex_data_in, real_data_out);
|
||||
}
|
||||
#endif
|
||||
#endif // WEBRTC_DETECT_ARM_NEON || WEBRTC_ARCH_ARM_NEON
|
||||
|
@ -17,9 +17,17 @@
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
const int kOrder = 4;
|
||||
const int kLength = 1 << (kOrder + 1); // +1 to hold complex data.
|
||||
const int16_t kRefData[kLength] = {
|
||||
// FFT order.
|
||||
const int kOrder = 5;
|
||||
// Lengths for real FFT's time and frequency bufffers.
|
||||
// For N-point FFT, the length requirements from API are N and N+2 respectively.
|
||||
const int kTimeDataLength = 1 << kOrder;
|
||||
const int kFreqDataLength = (1 << kOrder) + 2;
|
||||
// For complex FFT's time and freq buffer. The implementation requires
|
||||
// 2*N 16-bit words.
|
||||
const int kComplexFftDataLength = 2 << kOrder;
|
||||
// Reference data for time signal.
|
||||
const int16_t kRefData[kTimeDataLength] = {
|
||||
11739, 6848, -8688, 31980, -30295, 25242, 27085, 19410,
|
||||
-26299, 15607, -10791, 11778, -23819, 14498, -25772, 10076,
|
||||
1173, 6848, -8688, 31980, -30295, 2522, 27085, 19410,
|
||||
@ -40,36 +48,58 @@ TEST_F(RealFFTTest, CreateFailsOnBadInput) {
|
||||
EXPECT_TRUE(fft == NULL);
|
||||
}
|
||||
|
||||
// TODO(andrew): This won't always be the case, but verifies the current code
|
||||
// at least.
|
||||
TEST_F(RealFFTTest, RealAndComplexAreIdentical) {
|
||||
int16_t real_data[kLength] = {0};
|
||||
int16_t real_data_out[kLength] = {0};
|
||||
int16_t complex_data[kLength] = {0};
|
||||
memcpy(real_data, kRefData, sizeof(kRefData));
|
||||
memcpy(complex_data, kRefData, sizeof(kRefData));
|
||||
TEST_F(RealFFTTest, RealAndComplexMatch) {
|
||||
int i = 0;
|
||||
int j = 0;
|
||||
int16_t real_fft_time[kTimeDataLength] = {0};
|
||||
int16_t real_fft_freq[kFreqDataLength] = {0};
|
||||
// One common buffer for complex FFT's time and frequency data.
|
||||
int16_t complex_fft_buff[kComplexFftDataLength] = {0};
|
||||
|
||||
// Prepare the inputs to forward FFT's.
|
||||
memcpy(real_fft_time, kRefData, sizeof(kRefData));
|
||||
for (i = 0, j = 0; i < kTimeDataLength; i += 1, j += 2) {
|
||||
complex_fft_buff[j] = kRefData[i];
|
||||
complex_fft_buff[j + 1] = 0; // Insert zero's to imaginary parts.
|
||||
};
|
||||
|
||||
// Create and run real forward FFT.
|
||||
RealFFT* fft = WebRtcSpl_CreateRealFFT(kOrder);
|
||||
EXPECT_TRUE(fft != NULL);
|
||||
EXPECT_EQ(0, WebRtcSpl_RealForwardFFT(fft, real_fft_time, real_fft_freq));
|
||||
|
||||
EXPECT_EQ(0, WebRtcSpl_RealForwardFFT(fft, real_data, real_data_out));
|
||||
WebRtcSpl_ComplexBitReverse(complex_data, kOrder);
|
||||
EXPECT_EQ(0, WebRtcSpl_ComplexFFT(complex_data, kOrder, 1));
|
||||
// Run complex forward FFT.
|
||||
WebRtcSpl_ComplexBitReverse(complex_fft_buff, kOrder);
|
||||
EXPECT_EQ(0, WebRtcSpl_ComplexFFT(complex_fft_buff, kOrder, 1));
|
||||
|
||||
for (int i = 0; i < kLength; i++) {
|
||||
EXPECT_EQ(real_data_out[i], complex_data[i]);
|
||||
// Verify the results between complex and real forward FFT.
|
||||
for (i = 0; i < kFreqDataLength; i++) {
|
||||
EXPECT_EQ(real_fft_freq[i], complex_fft_buff[i]);
|
||||
}
|
||||
|
||||
memcpy(complex_data, kRefData, sizeof(kRefData));
|
||||
// Prepare the inputs to inverse real FFT.
|
||||
// We use whatever data in complex_fft_buff[] since we don't care
|
||||
// about data contents. Only kFreqDataLength 16-bit words are copied
|
||||
// from complex_fft_buff to real_fft_freq since remaining words (2nd half)
|
||||
// are conjugate-symmetric to the first half in theory.
|
||||
memcpy(real_fft_freq, complex_fft_buff, sizeof(real_fft_freq));
|
||||
|
||||
int real_scale = WebRtcSpl_RealInverseFFT(fft, real_data, real_data_out);
|
||||
// Run real inverse FFT.
|
||||
int real_scale = WebRtcSpl_RealInverseFFT(fft, real_fft_freq, real_fft_time);
|
||||
EXPECT_GE(real_scale, 0);
|
||||
WebRtcSpl_ComplexBitReverse(complex_data, kOrder);
|
||||
int complex_scale = WebRtcSpl_ComplexIFFT(complex_data, kOrder, 1);
|
||||
|
||||
// Run complex inverse FFT.
|
||||
WebRtcSpl_ComplexBitReverse(complex_fft_buff, kOrder);
|
||||
int complex_scale = WebRtcSpl_ComplexIFFT(complex_fft_buff, kOrder, 1);
|
||||
|
||||
// Verify the results between complex and real inverse FFT.
|
||||
// They are not bit-exact, since complex IFFT doesn't produce
|
||||
// exactly conjugate-symmetric data (between first and second half).
|
||||
EXPECT_EQ(real_scale, complex_scale);
|
||||
for (int i = 0; i < kLength; i++) {
|
||||
EXPECT_EQ(real_data_out[i], complex_data[i]);
|
||||
for (i = 0, j = 0; i < kTimeDataLength; i += 1, j += 2) {
|
||||
EXPECT_LE(abs(real_fft_time[i] - complex_fft_buff[j]), 1);
|
||||
}
|
||||
|
||||
WebRtcSpl_FreeRealFFT(fft);
|
||||
}
|
||||
|
||||
|
@ -529,12 +529,14 @@ TEST_F(SplTest, CrossCorrelationTest) {
|
||||
// are not bit-exact.
|
||||
const int32_t kExpected[kCrossCorrelationDimension] =
|
||||
{-266947903, -15579555, -171282001};
|
||||
const int32_t* expected = kExpected;
|
||||
#if !defined(MIPS32_LE)
|
||||
const int32_t kExpectedNeon[kCrossCorrelationDimension] =
|
||||
{-266947901, -15579553, -171281999};
|
||||
const int32_t* expected = kExpected;
|
||||
if (WebRtcSpl_CrossCorrelation != WebRtcSpl_CrossCorrelationC) {
|
||||
expected = kExpectedNeon;
|
||||
}
|
||||
#endif
|
||||
for (int i = 0; i < kCrossCorrelationDimension; ++i) {
|
||||
EXPECT_EQ(expected[i], vector32[i]);
|
||||
}
|
||||
|
@ -28,6 +28,8 @@ MinValueW32 WebRtcSpl_MinValueW32;
|
||||
CrossCorrelation WebRtcSpl_CrossCorrelation;
|
||||
DownsampleFast WebRtcSpl_DownsampleFast;
|
||||
ScaleAndAddVectorsWithRound WebRtcSpl_ScaleAndAddVectorsWithRound;
|
||||
CreateRealFFT WebRtcSpl_CreateRealFFT;
|
||||
FreeRealFFT WebRtcSpl_FreeRealFFT;
|
||||
RealForwardFFT WebRtcSpl_RealForwardFFT;
|
||||
RealInverseFFT WebRtcSpl_RealInverseFFT;
|
||||
|
||||
@ -45,6 +47,8 @@ static void InitPointersToC() {
|
||||
WebRtcSpl_DownsampleFast = WebRtcSpl_DownsampleFastC;
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRound =
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRoundC;
|
||||
WebRtcSpl_CreateRealFFT = WebRtcSpl_CreateRealFFTC;
|
||||
WebRtcSpl_FreeRealFFT = WebRtcSpl_FreeRealFFTC;
|
||||
WebRtcSpl_RealForwardFFT = WebRtcSpl_RealForwardFFTC;
|
||||
WebRtcSpl_RealInverseFFT = WebRtcSpl_RealInverseFFTC;
|
||||
}
|
||||
@ -63,6 +67,8 @@ static void InitPointersToNeon() {
|
||||
WebRtcSpl_DownsampleFast = WebRtcSpl_DownsampleFastNeon;
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRound =
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRoundNeon;
|
||||
WebRtcSpl_CreateRealFFT = WebRtcSpl_CreateRealFFTNeon;
|
||||
WebRtcSpl_FreeRealFFT = WebRtcSpl_FreeRealFFTNeon;
|
||||
WebRtcSpl_RealForwardFFT = WebRtcSpl_RealForwardFFTNeon;
|
||||
WebRtcSpl_RealInverseFFT = WebRtcSpl_RealInverseFFTNeon;
|
||||
}
|
||||
@ -76,16 +82,20 @@ static void InitPointersToMIPS() {
|
||||
WebRtcSpl_MaxValueW32 = WebRtcSpl_MaxValueW32_mips;
|
||||
WebRtcSpl_MinValueW16 = WebRtcSpl_MinValueW16_mips;
|
||||
WebRtcSpl_MinValueW32 = WebRtcSpl_MinValueW32_mips;
|
||||
WebRtcSpl_CrossCorrelation = WebRtcSpl_CrossCorrelationC;
|
||||
WebRtcSpl_CrossCorrelation = WebRtcSpl_CrossCorrelation_mips;
|
||||
WebRtcSpl_DownsampleFast = WebRtcSpl_DownsampleFast_mips;
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRound =
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRoundC;
|
||||
WebRtcSpl_CreateRealFFT = WebRtcSpl_CreateRealFFTC;
|
||||
WebRtcSpl_FreeRealFFT = WebRtcSpl_FreeRealFFTC;
|
||||
WebRtcSpl_RealForwardFFT = WebRtcSpl_RealForwardFFTC;
|
||||
WebRtcSpl_RealInverseFFT = WebRtcSpl_RealInverseFFTC;
|
||||
#if defined(MIPS_DSP_R1_LE)
|
||||
WebRtcSpl_MaxAbsValueW32 = WebRtcSpl_MaxAbsValueW32_mips;
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRound =
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRound_mips;
|
||||
#else
|
||||
WebRtcSpl_MaxAbsValueW32 = WebRtcSpl_MaxAbsValueW32C;
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRound =
|
||||
WebRtcSpl_ScaleAndAddVectorsWithRoundC;
|
||||
#endif
|
||||
}
|
||||
#endif
|
||||
|
@ -0,0 +1,207 @@
|
||||
/*
|
||||
* Written by Wilco Dijkstra, 1996. The following email exchange establishes the
|
||||
* license.
|
||||
*
|
||||
* From: Wilco Dijkstra <Wilco.Dijkstra@ntlworld.com>
|
||||
* Date: Fri, Jun 24, 2011 at 3:20 AM
|
||||
* Subject: Re: sqrt routine
|
||||
* To: Kevin Ma <kma@google.com>
|
||||
* Hi Kevin,
|
||||
* Thanks for asking. Those routines are public domain (originally posted to
|
||||
* comp.sys.arm a long time ago), so you can use them freely for any purpose.
|
||||
* Cheers,
|
||||
* Wilco
|
||||
*
|
||||
* ----- Original Message -----
|
||||
* From: "Kevin Ma" <kma@google.com>
|
||||
* To: <Wilco.Dijkstra@ntlworld.com>
|
||||
* Sent: Thursday, June 23, 2011 11:44 PM
|
||||
* Subject: Fwd: sqrt routine
|
||||
* Hi Wilco,
|
||||
* I saw your sqrt routine from several web sites, including
|
||||
* http://www.finesse.demon.co.uk/steven/sqrt.html.
|
||||
* Just wonder if there's any copyright information with your Successive
|
||||
* approximation routines, or if I can freely use it for any purpose.
|
||||
* Thanks.
|
||||
* Kevin
|
||||
*/
|
||||
|
||||
// Minor modifications in code style for WebRTC, 2012.
|
||||
// Code optimizations for MIPS, 2013.
|
||||
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
|
||||
/*
|
||||
* Algorithm:
|
||||
* Successive approximation of the equation (root + delta) ^ 2 = N
|
||||
* until delta < 1. If delta < 1 we have the integer part of SQRT (N).
|
||||
* Use delta = 2^i for i = 15 .. 0.
|
||||
*
|
||||
* Output precision is 16 bits. Note for large input values (close to
|
||||
* 0x7FFFFFFF), bit 15 (the highest bit of the low 16-bit half word)
|
||||
* contains the MSB information (a non-sign value). Do with caution
|
||||
* if you need to cast the output to int16_t type.
|
||||
*
|
||||
* If the input value is negative, it returns 0.
|
||||
*/
|
||||
|
||||
|
||||
int32_t WebRtcSpl_SqrtFloor(int32_t value)
|
||||
{
|
||||
int32_t root = 0, tmp1, tmp2, tmp3, tmp4;
|
||||
|
||||
__asm __volatile(
|
||||
".set push \n\t"
|
||||
".set noreorder \n\t"
|
||||
|
||||
"lui %[tmp1], 0x4000 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"sub %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"lui %[tmp1], 0x1 \n\t"
|
||||
"or %[tmp4], %[root], %[tmp1] \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x4000 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 14 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x8000 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x2000 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 13 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x4000 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x1000 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 12 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x2000 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x800 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 11 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x1000 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x400 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 10 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x800 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x200 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 9 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x400 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x100 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 8 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x200 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x80 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 7 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x100 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x40 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 6 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x80 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x20 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 5 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x40 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x10 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 4 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x20 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x8 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 3 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x10 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x4 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 2 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x8 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x2 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"sll %[tmp1], 1 \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"subu %[tmp3], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x4 \n\t"
|
||||
"movz %[value], %[tmp3], %[tmp2] \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
"addiu %[tmp1], $0, 0x1 \n\t"
|
||||
"addu %[tmp1], %[tmp1], %[root] \n\t"
|
||||
"slt %[tmp2], %[value], %[tmp1] \n\t"
|
||||
"ori %[tmp4], %[root], 0x2 \n\t"
|
||||
"movz %[root], %[tmp4], %[tmp2] \n\t"
|
||||
|
||||
".set pop \n\t"
|
||||
|
||||
: [root] "+r" (root), [value] "+r" (value),
|
||||
[tmp1] "=&r" (tmp1), [tmp2] "=&r" (tmp2),
|
||||
[tmp3] "=&r" (tmp3), [tmp4] "=&r" (tmp4)
|
||||
:
|
||||
);
|
||||
|
||||
return root >> 1;
|
||||
}
|
||||
|
@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* This file contains implementations of the functions
|
||||
* WebRtcSpl_ScaleAndAddVectorsWithRound_mips()
|
||||
*/
|
||||
|
||||
#include "webrtc/common_audio/signal_processing/include/signal_processing_library.h"
|
||||
|
||||
int WebRtcSpl_ScaleAndAddVectorsWithRound_mips(const int16_t* in_vector1,
|
||||
int16_t in_vector1_scale,
|
||||
const int16_t* in_vector2,
|
||||
int16_t in_vector2_scale,
|
||||
int right_shifts,
|
||||
int16_t* out_vector,
|
||||
int length) {
|
||||
int16_t r0 = 0, r1 = 0;
|
||||
int16_t *in1 = (int16_t*)in_vector1;
|
||||
int16_t *in2 = (int16_t*)in_vector2;
|
||||
int16_t *out = out_vector;
|
||||
int i = 0, value32 = 0;
|
||||
|
||||
if (in_vector1 == NULL || in_vector2 == NULL || out_vector == NULL ||
|
||||
length <= 0 || right_shifts < 0) {
|
||||
return -1;
|
||||
}
|
||||
for (i = 0; i < length; i++) {
|
||||
__asm __volatile (
|
||||
"lh %[r0], 0(%[in1]) \n\t"
|
||||
"lh %[r1], 0(%[in2]) \n\t"
|
||||
"mult %[r0], %[in_vector1_scale] \n\t"
|
||||
"madd %[r1], %[in_vector2_scale] \n\t"
|
||||
"extrv_r.w %[value32], $ac0, %[right_shifts] \n\t"
|
||||
"addiu %[in1], %[in1], 2 \n\t"
|
||||
"addiu %[in2], %[in2], 2 \n\t"
|
||||
"sh %[value32], 0(%[out]) \n\t"
|
||||
"addiu %[out], %[out], 2 \n\t"
|
||||
: [value32] "=&r" (value32), [out] "+r" (out), [in1] "+r" (in1),
|
||||
[in2] "+r" (in2), [r0] "=&r" (r0), [r1] "=&r" (r1)
|
||||
: [in_vector1_scale] "r" (in_vector1_scale),
|
||||
[in_vector2_scale] "r" (in_vector2_scale),
|
||||
[right_shifts] "r" (right_shifts)
|
||||
: "hi", "lo", "memory"
|
||||
);
|
||||
}
|
||||
return 0;
|
||||
}
|
@ -1,704 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* This file contains the Q14 radix-8 tables used in ARM9e optimizations.
|
||||
*
|
||||
*/
|
||||
|
||||
extern const int s_Q14S_8;
|
||||
const int s_Q14S_8 = 1024;
|
||||
extern const unsigned short t_Q14S_8[2032];
|
||||
const unsigned short t_Q14S_8[2032] = {
|
||||
0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
|
||||
0x22a3,0x187e ,0x3249,0x0c7c ,0x11a8,0x238e ,
|
||||
0x0000,0x2d41 ,0x22a3,0x187e ,0xdd5d,0x3b21 ,
|
||||
0xdd5d,0x3b21 ,0x11a8,0x238e ,0xb4be,0x3ec5 ,
|
||||
0xc000,0x4000 ,0x0000,0x2d41 ,0xa57e,0x2d41 ,
|
||||
0xac61,0x3b21 ,0xee58,0x3537 ,0xb4be,0x0c7c ,
|
||||
0xa57e,0x2d41 ,0xdd5d,0x3b21 ,0xdd5d,0xe782 ,
|
||||
0xac61,0x187e ,0xcdb7,0x3ec5 ,0x11a8,0xcac9 ,
|
||||
0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
|
||||
0x396b,0x0646 ,0x3cc8,0x0324 ,0x35eb,0x0964 ,
|
||||
0x3249,0x0c7c ,0x396b,0x0646 ,0x2aaa,0x1294 ,
|
||||
0x2aaa,0x1294 ,0x35eb,0x0964 ,0x1e7e,0x1b5d ,
|
||||
0x22a3,0x187e ,0x3249,0x0c7c ,0x11a8,0x238e ,
|
||||
0x1a46,0x1e2b ,0x2e88,0x0f8d ,0x0471,0x2afb ,
|
||||
0x11a8,0x238e ,0x2aaa,0x1294 ,0xf721,0x3179 ,
|
||||
0x08df,0x289a ,0x26b3,0x1590 ,0xea02,0x36e5 ,
|
||||
0x0000,0x2d41 ,0x22a3,0x187e ,0xdd5d,0x3b21 ,
|
||||
0xf721,0x3179 ,0x1e7e,0x1b5d ,0xd178,0x3e15 ,
|
||||
0xee58,0x3537 ,0x1a46,0x1e2b ,0xc695,0x3fb1 ,
|
||||
0xe5ba,0x3871 ,0x15fe,0x20e7 ,0xbcf0,0x3fec ,
|
||||
0xdd5d,0x3b21 ,0x11a8,0x238e ,0xb4be,0x3ec5 ,
|
||||
0xd556,0x3d3f ,0x0d48,0x2620 ,0xae2e,0x3c42 ,
|
||||
0xcdb7,0x3ec5 ,0x08df,0x289a ,0xa963,0x3871 ,
|
||||
0xc695,0x3fb1 ,0x0471,0x2afb ,0xa678,0x3368 ,
|
||||
0xc000,0x4000 ,0x0000,0x2d41 ,0xa57e,0x2d41 ,
|
||||
0xba09,0x3fb1 ,0xfb8f,0x2f6c ,0xa678,0x2620 ,
|
||||
0xb4be,0x3ec5 ,0xf721,0x3179 ,0xa963,0x1e2b ,
|
||||
0xb02d,0x3d3f ,0xf2b8,0x3368 ,0xae2e,0x1590 ,
|
||||
0xac61,0x3b21 ,0xee58,0x3537 ,0xb4be,0x0c7c ,
|
||||
0xa963,0x3871 ,0xea02,0x36e5 ,0xbcf0,0x0324 ,
|
||||
0xa73b,0x3537 ,0xe5ba,0x3871 ,0xc695,0xf9ba ,
|
||||
0xa5ed,0x3179 ,0xe182,0x39db ,0xd178,0xf073 ,
|
||||
0xa57e,0x2d41 ,0xdd5d,0x3b21 ,0xdd5d,0xe782 ,
|
||||
0xa5ed,0x289a ,0xd94d,0x3c42 ,0xea02,0xdf19 ,
|
||||
0xa73b,0x238e ,0xd556,0x3d3f ,0xf721,0xd766 ,
|
||||
0xa963,0x1e2b ,0xd178,0x3e15 ,0x0471,0xd094 ,
|
||||
0xac61,0x187e ,0xcdb7,0x3ec5 ,0x11a8,0xcac9 ,
|
||||
0xb02d,0x1294 ,0xca15,0x3f4f ,0x1e7e,0xc625 ,
|
||||
0xb4be,0x0c7c ,0xc695,0x3fb1 ,0x2aaa,0xc2c1 ,
|
||||
0xba09,0x0646 ,0xc338,0x3fec ,0x35eb,0xc0b1 ,
|
||||
0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
|
||||
0x3e69,0x0192 ,0x3f36,0x00c9 ,0x3d9a,0x025b ,
|
||||
0x3cc8,0x0324 ,0x3e69,0x0192 ,0x3b1e,0x04b5 ,
|
||||
0x3b1e,0x04b5 ,0x3d9a,0x025b ,0x388e,0x070e ,
|
||||
0x396b,0x0646 ,0x3cc8,0x0324 ,0x35eb,0x0964 ,
|
||||
0x37af,0x07d6 ,0x3bf4,0x03ed ,0x3334,0x0bb7 ,
|
||||
0x35eb,0x0964 ,0x3b1e,0x04b5 ,0x306c,0x0e06 ,
|
||||
0x341e,0x0af1 ,0x3a46,0x057e ,0x2d93,0x1050 ,
|
||||
0x3249,0x0c7c ,0x396b,0x0646 ,0x2aaa,0x1294 ,
|
||||
0x306c,0x0e06 ,0x388e,0x070e ,0x27b3,0x14d2 ,
|
||||
0x2e88,0x0f8d ,0x37af,0x07d6 ,0x24ae,0x1709 ,
|
||||
0x2c9d,0x1112 ,0x36ce,0x089d ,0x219c,0x1937 ,
|
||||
0x2aaa,0x1294 ,0x35eb,0x0964 ,0x1e7e,0x1b5d ,
|
||||
0x28b2,0x1413 ,0x3505,0x0a2b ,0x1b56,0x1d79 ,
|
||||
0x26b3,0x1590 ,0x341e,0x0af1 ,0x1824,0x1f8c ,
|
||||
0x24ae,0x1709 ,0x3334,0x0bb7 ,0x14ea,0x2193 ,
|
||||
0x22a3,0x187e ,0x3249,0x0c7c ,0x11a8,0x238e ,
|
||||
0x2093,0x19ef ,0x315b,0x0d41 ,0x0e61,0x257e ,
|
||||
0x1e7e,0x1b5d ,0x306c,0x0e06 ,0x0b14,0x2760 ,
|
||||
0x1c64,0x1cc6 ,0x2f7b,0x0eca ,0x07c4,0x2935 ,
|
||||
0x1a46,0x1e2b ,0x2e88,0x0f8d ,0x0471,0x2afb ,
|
||||
0x1824,0x1f8c ,0x2d93,0x1050 ,0x011c,0x2cb2 ,
|
||||
0x15fe,0x20e7 ,0x2c9d,0x1112 ,0xfdc7,0x2e5a ,
|
||||
0x13d5,0x223d ,0x2ba4,0x11d3 ,0xfa73,0x2ff2 ,
|
||||
0x11a8,0x238e ,0x2aaa,0x1294 ,0xf721,0x3179 ,
|
||||
0x0f79,0x24da ,0x29af,0x1354 ,0xf3d2,0x32ef ,
|
||||
0x0d48,0x2620 ,0x28b2,0x1413 ,0xf087,0x3453 ,
|
||||
0x0b14,0x2760 ,0x27b3,0x14d2 ,0xed41,0x35a5 ,
|
||||
0x08df,0x289a ,0x26b3,0x1590 ,0xea02,0x36e5 ,
|
||||
0x06a9,0x29ce ,0x25b1,0x164c ,0xe6cb,0x3812 ,
|
||||
0x0471,0x2afb ,0x24ae,0x1709 ,0xe39c,0x392b ,
|
||||
0x0239,0x2c21 ,0x23a9,0x17c4 ,0xe077,0x3a30 ,
|
||||
0x0000,0x2d41 ,0x22a3,0x187e ,0xdd5d,0x3b21 ,
|
||||
0xfdc7,0x2e5a ,0x219c,0x1937 ,0xda4f,0x3bfd ,
|
||||
0xfb8f,0x2f6c ,0x2093,0x19ef ,0xd74e,0x3cc5 ,
|
||||
0xf957,0x3076 ,0x1f89,0x1aa7 ,0xd45c,0x3d78 ,
|
||||
0xf721,0x3179 ,0x1e7e,0x1b5d ,0xd178,0x3e15 ,
|
||||
0xf4ec,0x3274 ,0x1d72,0x1c12 ,0xcea5,0x3e9d ,
|
||||
0xf2b8,0x3368 ,0x1c64,0x1cc6 ,0xcbe2,0x3f0f ,
|
||||
0xf087,0x3453 ,0x1b56,0x1d79 ,0xc932,0x3f6b ,
|
||||
0xee58,0x3537 ,0x1a46,0x1e2b ,0xc695,0x3fb1 ,
|
||||
0xec2b,0x3612 ,0x1935,0x1edc ,0xc40c,0x3fe1 ,
|
||||
0xea02,0x36e5 ,0x1824,0x1f8c ,0xc197,0x3ffb ,
|
||||
0xe7dc,0x37b0 ,0x1711,0x203a ,0xbf38,0x3fff ,
|
||||
0xe5ba,0x3871 ,0x15fe,0x20e7 ,0xbcf0,0x3fec ,
|
||||
0xe39c,0x392b ,0x14ea,0x2193 ,0xbabf,0x3fc4 ,
|
||||
0xe182,0x39db ,0x13d5,0x223d ,0xb8a6,0x3f85 ,
|
||||
0xdf6d,0x3a82 ,0x12bf,0x22e7 ,0xb6a5,0x3f30 ,
|
||||
0xdd5d,0x3b21 ,0x11a8,0x238e ,0xb4be,0x3ec5 ,
|
||||
0xdb52,0x3bb6 ,0x1091,0x2435 ,0xb2f2,0x3e45 ,
|
||||
0xd94d,0x3c42 ,0x0f79,0x24da ,0xb140,0x3daf ,
|
||||
0xd74e,0x3cc5 ,0x0e61,0x257e ,0xafa9,0x3d03 ,
|
||||
0xd556,0x3d3f ,0x0d48,0x2620 ,0xae2e,0x3c42 ,
|
||||
0xd363,0x3daf ,0x0c2e,0x26c1 ,0xacd0,0x3b6d ,
|
||||
0xd178,0x3e15 ,0x0b14,0x2760 ,0xab8e,0x3a82 ,
|
||||
0xcf94,0x3e72 ,0x09fa,0x27fe ,0xaa6a,0x3984 ,
|
||||
0xcdb7,0x3ec5 ,0x08df,0x289a ,0xa963,0x3871 ,
|
||||
0xcbe2,0x3f0f ,0x07c4,0x2935 ,0xa87b,0x374b ,
|
||||
0xca15,0x3f4f ,0x06a9,0x29ce ,0xa7b1,0x3612 ,
|
||||
0xc851,0x3f85 ,0x058d,0x2a65 ,0xa705,0x34c6 ,
|
||||
0xc695,0x3fb1 ,0x0471,0x2afb ,0xa678,0x3368 ,
|
||||
0xc4e2,0x3fd4 ,0x0355,0x2b8f ,0xa60b,0x31f8 ,
|
||||
0xc338,0x3fec ,0x0239,0x2c21 ,0xa5bc,0x3076 ,
|
||||
0xc197,0x3ffb ,0x011c,0x2cb2 ,0xa58d,0x2ee4 ,
|
||||
0xc000,0x4000 ,0x0000,0x2d41 ,0xa57e,0x2d41 ,
|
||||
0xbe73,0x3ffb ,0xfee4,0x2dcf ,0xa58d,0x2b8f ,
|
||||
0xbcf0,0x3fec ,0xfdc7,0x2e5a ,0xa5bc,0x29ce ,
|
||||
0xbb77,0x3fd4 ,0xfcab,0x2ee4 ,0xa60b,0x27fe ,
|
||||
0xba09,0x3fb1 ,0xfb8f,0x2f6c ,0xa678,0x2620 ,
|
||||
0xb8a6,0x3f85 ,0xfa73,0x2ff2 ,0xa705,0x2435 ,
|
||||
0xb74d,0x3f4f ,0xf957,0x3076 ,0xa7b1,0x223d ,
|
||||
0xb600,0x3f0f ,0xf83c,0x30f9 ,0xa87b,0x203a ,
|
||||
0xb4be,0x3ec5 ,0xf721,0x3179 ,0xa963,0x1e2b ,
|
||||
0xb388,0x3e72 ,0xf606,0x31f8 ,0xaa6a,0x1c12 ,
|
||||
0xb25e,0x3e15 ,0xf4ec,0x3274 ,0xab8e,0x19ef ,
|
||||
0xb140,0x3daf ,0xf3d2,0x32ef ,0xacd0,0x17c4 ,
|
||||
0xb02d,0x3d3f ,0xf2b8,0x3368 ,0xae2e,0x1590 ,
|
||||
0xaf28,0x3cc5 ,0xf19f,0x33df ,0xafa9,0x1354 ,
|
||||
0xae2e,0x3c42 ,0xf087,0x3453 ,0xb140,0x1112 ,
|
||||
0xad41,0x3bb6 ,0xef6f,0x34c6 ,0xb2f2,0x0eca ,
|
||||
0xac61,0x3b21 ,0xee58,0x3537 ,0xb4be,0x0c7c ,
|
||||
0xab8e,0x3a82 ,0xed41,0x35a5 ,0xb6a5,0x0a2b ,
|
||||
0xaac8,0x39db ,0xec2b,0x3612 ,0xb8a6,0x07d6 ,
|
||||
0xaa0f,0x392b ,0xeb16,0x367d ,0xbabf,0x057e ,
|
||||
0xa963,0x3871 ,0xea02,0x36e5 ,0xbcf0,0x0324 ,
|
||||
0xa8c5,0x37b0 ,0xe8ef,0x374b ,0xbf38,0x00c9 ,
|
||||
0xa834,0x36e5 ,0xe7dc,0x37b0 ,0xc197,0xfe6e ,
|
||||
0xa7b1,0x3612 ,0xe6cb,0x3812 ,0xc40c,0xfc13 ,
|
||||
0xa73b,0x3537 ,0xe5ba,0x3871 ,0xc695,0xf9ba ,
|
||||
0xa6d3,0x3453 ,0xe4aa,0x38cf ,0xc932,0xf763 ,
|
||||
0xa678,0x3368 ,0xe39c,0x392b ,0xcbe2,0xf50f ,
|
||||
0xa62c,0x3274 ,0xe28e,0x3984 ,0xcea5,0xf2bf ,
|
||||
0xa5ed,0x3179 ,0xe182,0x39db ,0xd178,0xf073 ,
|
||||
0xa5bc,0x3076 ,0xe077,0x3a30 ,0xd45c,0xee2d ,
|
||||
0xa599,0x2f6c ,0xdf6d,0x3a82 ,0xd74e,0xebed ,
|
||||
0xa585,0x2e5a ,0xde64,0x3ad3 ,0xda4f,0xe9b4 ,
|
||||
0xa57e,0x2d41 ,0xdd5d,0x3b21 ,0xdd5d,0xe782 ,
|
||||
0xa585,0x2c21 ,0xdc57,0x3b6d ,0xe077,0xe559 ,
|
||||
0xa599,0x2afb ,0xdb52,0x3bb6 ,0xe39c,0xe33a ,
|
||||
0xa5bc,0x29ce ,0xda4f,0x3bfd ,0xe6cb,0xe124 ,
|
||||
0xa5ed,0x289a ,0xd94d,0x3c42 ,0xea02,0xdf19 ,
|
||||
0xa62c,0x2760 ,0xd84d,0x3c85 ,0xed41,0xdd19 ,
|
||||
0xa678,0x2620 ,0xd74e,0x3cc5 ,0xf087,0xdb26 ,
|
||||
0xa6d3,0x24da ,0xd651,0x3d03 ,0xf3d2,0xd93f ,
|
||||
0xa73b,0x238e ,0xd556,0x3d3f ,0xf721,0xd766 ,
|
||||
0xa7b1,0x223d ,0xd45c,0x3d78 ,0xfa73,0xd59b ,
|
||||
0xa834,0x20e7 ,0xd363,0x3daf ,0xfdc7,0xd3df ,
|
||||
0xa8c5,0x1f8c ,0xd26d,0x3de3 ,0x011c,0xd231 ,
|
||||
0xa963,0x1e2b ,0xd178,0x3e15 ,0x0471,0xd094 ,
|
||||
0xaa0f,0x1cc6 ,0xd085,0x3e45 ,0x07c4,0xcf07 ,
|
||||
0xaac8,0x1b5d ,0xcf94,0x3e72 ,0x0b14,0xcd8c ,
|
||||
0xab8e,0x19ef ,0xcea5,0x3e9d ,0x0e61,0xcc21 ,
|
||||
0xac61,0x187e ,0xcdb7,0x3ec5 ,0x11a8,0xcac9 ,
|
||||
0xad41,0x1709 ,0xcccc,0x3eeb ,0x14ea,0xc983 ,
|
||||
0xae2e,0x1590 ,0xcbe2,0x3f0f ,0x1824,0xc850 ,
|
||||
0xaf28,0x1413 ,0xcafb,0x3f30 ,0x1b56,0xc731 ,
|
||||
0xb02d,0x1294 ,0xca15,0x3f4f ,0x1e7e,0xc625 ,
|
||||
0xb140,0x1112 ,0xc932,0x3f6b ,0x219c,0xc52d ,
|
||||
0xb25e,0x0f8d ,0xc851,0x3f85 ,0x24ae,0xc44a ,
|
||||
0xb388,0x0e06 ,0xc772,0x3f9c ,0x27b3,0xc37b ,
|
||||
0xb4be,0x0c7c ,0xc695,0x3fb1 ,0x2aaa,0xc2c1 ,
|
||||
0xb600,0x0af1 ,0xc5ba,0x3fc4 ,0x2d93,0xc21d ,
|
||||
0xb74d,0x0964 ,0xc4e2,0x3fd4 ,0x306c,0xc18e ,
|
||||
0xb8a6,0x07d6 ,0xc40c,0x3fe1 ,0x3334,0xc115 ,
|
||||
0xba09,0x0646 ,0xc338,0x3fec ,0x35eb,0xc0b1 ,
|
||||
0xbb77,0x04b5 ,0xc266,0x3ff5 ,0x388e,0xc064 ,
|
||||
0xbcf0,0x0324 ,0xc197,0x3ffb ,0x3b1e,0xc02c ,
|
||||
0xbe73,0x0192 ,0xc0ca,0x3fff ,0x3d9a,0xc00b ,
|
||||
0x4000,0x0000 ,0x3f9b,0x0065 ,0x3f36,0x00c9 ,
|
||||
0x3ed0,0x012e ,0x3e69,0x0192 ,0x3e02,0x01f7 ,
|
||||
0x3d9a,0x025b ,0x3d31,0x02c0 ,0x3cc8,0x0324 ,
|
||||
0x3c5f,0x0388 ,0x3bf4,0x03ed ,0x3b8a,0x0451 ,
|
||||
0x3b1e,0x04b5 ,0x3ab2,0x051a ,0x3a46,0x057e ,
|
||||
0x39d9,0x05e2 ,0x396b,0x0646 ,0x38fd,0x06aa ,
|
||||
0x388e,0x070e ,0x381f,0x0772 ,0x37af,0x07d6 ,
|
||||
0x373f,0x0839 ,0x36ce,0x089d ,0x365d,0x0901 ,
|
||||
0x35eb,0x0964 ,0x3578,0x09c7 ,0x3505,0x0a2b ,
|
||||
0x3492,0x0a8e ,0x341e,0x0af1 ,0x33a9,0x0b54 ,
|
||||
0x3334,0x0bb7 ,0x32bf,0x0c1a ,0x3249,0x0c7c ,
|
||||
0x31d2,0x0cdf ,0x315b,0x0d41 ,0x30e4,0x0da4 ,
|
||||
0x306c,0x0e06 ,0x2ff4,0x0e68 ,0x2f7b,0x0eca ,
|
||||
0x2f02,0x0f2b ,0x2e88,0x0f8d ,0x2e0e,0x0fee ,
|
||||
0x2d93,0x1050 ,0x2d18,0x10b1 ,0x2c9d,0x1112 ,
|
||||
0x2c21,0x1173 ,0x2ba4,0x11d3 ,0x2b28,0x1234 ,
|
||||
0x2aaa,0x1294 ,0x2a2d,0x12f4 ,0x29af,0x1354 ,
|
||||
0x2931,0x13b4 ,0x28b2,0x1413 ,0x2833,0x1473 ,
|
||||
0x27b3,0x14d2 ,0x2733,0x1531 ,0x26b3,0x1590 ,
|
||||
0x2632,0x15ee ,0x25b1,0x164c ,0x252f,0x16ab ,
|
||||
0x24ae,0x1709 ,0x242b,0x1766 ,0x23a9,0x17c4 ,
|
||||
0x2326,0x1821 ,0x22a3,0x187e ,0x221f,0x18db ,
|
||||
0x219c,0x1937 ,0x2117,0x1993 ,0x2093,0x19ef ,
|
||||
0x200e,0x1a4b ,0x1f89,0x1aa7 ,0x1f04,0x1b02 ,
|
||||
0x1e7e,0x1b5d ,0x1df8,0x1bb8 ,0x1d72,0x1c12 ,
|
||||
0x1ceb,0x1c6c ,0x1c64,0x1cc6 ,0x1bdd,0x1d20 ,
|
||||
0x1b56,0x1d79 ,0x1ace,0x1dd3 ,0x1a46,0x1e2b ,
|
||||
0x19be,0x1e84 ,0x1935,0x1edc ,0x18ad,0x1f34 ,
|
||||
0x1824,0x1f8c ,0x179b,0x1fe3 ,0x1711,0x203a ,
|
||||
0x1688,0x2091 ,0x15fe,0x20e7 ,0x1574,0x213d ,
|
||||
0x14ea,0x2193 ,0x145f,0x21e8 ,0x13d5,0x223d ,
|
||||
0x134a,0x2292 ,0x12bf,0x22e7 ,0x1234,0x233b ,
|
||||
0x11a8,0x238e ,0x111d,0x23e2 ,0x1091,0x2435 ,
|
||||
0x1005,0x2488 ,0x0f79,0x24da ,0x0eed,0x252c ,
|
||||
0x0e61,0x257e ,0x0dd4,0x25cf ,0x0d48,0x2620 ,
|
||||
0x0cbb,0x2671 ,0x0c2e,0x26c1 ,0x0ba1,0x2711 ,
|
||||
0x0b14,0x2760 ,0x0a87,0x27af ,0x09fa,0x27fe ,
|
||||
0x096d,0x284c ,0x08df,0x289a ,0x0852,0x28e7 ,
|
||||
0x07c4,0x2935 ,0x0736,0x2981 ,0x06a9,0x29ce ,
|
||||
0x061b,0x2a1a ,0x058d,0x2a65 ,0x04ff,0x2ab0 ,
|
||||
0x0471,0x2afb ,0x03e3,0x2b45 ,0x0355,0x2b8f ,
|
||||
0x02c7,0x2bd8 ,0x0239,0x2c21 ,0x01aa,0x2c6a ,
|
||||
0x011c,0x2cb2 ,0x008e,0x2cfa ,0x0000,0x2d41 ,
|
||||
0xff72,0x2d88 ,0xfee4,0x2dcf ,0xfe56,0x2e15 ,
|
||||
0xfdc7,0x2e5a ,0xfd39,0x2e9f ,0xfcab,0x2ee4 ,
|
||||
0xfc1d,0x2f28 ,0xfb8f,0x2f6c ,0xfb01,0x2faf ,
|
||||
0xfa73,0x2ff2 ,0xf9e5,0x3034 ,0xf957,0x3076 ,
|
||||
0xf8ca,0x30b8 ,0xf83c,0x30f9 ,0xf7ae,0x3139 ,
|
||||
0xf721,0x3179 ,0xf693,0x31b9 ,0xf606,0x31f8 ,
|
||||
0xf579,0x3236 ,0xf4ec,0x3274 ,0xf45f,0x32b2 ,
|
||||
0xf3d2,0x32ef ,0xf345,0x332c ,0xf2b8,0x3368 ,
|
||||
0xf22c,0x33a3 ,0xf19f,0x33df ,0xf113,0x3419 ,
|
||||
0xf087,0x3453 ,0xeffb,0x348d ,0xef6f,0x34c6 ,
|
||||
0xeee3,0x34ff ,0xee58,0x3537 ,0xedcc,0x356e ,
|
||||
0xed41,0x35a5 ,0xecb6,0x35dc ,0xec2b,0x3612 ,
|
||||
0xeba1,0x3648 ,0xeb16,0x367d ,0xea8c,0x36b1 ,
|
||||
0xea02,0x36e5 ,0xe978,0x3718 ,0xe8ef,0x374b ,
|
||||
0xe865,0x377e ,0xe7dc,0x37b0 ,0xe753,0x37e1 ,
|
||||
0xe6cb,0x3812 ,0xe642,0x3842 ,0xe5ba,0x3871 ,
|
||||
0xe532,0x38a1 ,0xe4aa,0x38cf ,0xe423,0x38fd ,
|
||||
0xe39c,0x392b ,0xe315,0x3958 ,0xe28e,0x3984 ,
|
||||
0xe208,0x39b0 ,0xe182,0x39db ,0xe0fc,0x3a06 ,
|
||||
0xe077,0x3a30 ,0xdff2,0x3a59 ,0xdf6d,0x3a82 ,
|
||||
0xdee9,0x3aab ,0xde64,0x3ad3 ,0xdde1,0x3afa ,
|
||||
0xdd5d,0x3b21 ,0xdcda,0x3b47 ,0xdc57,0x3b6d ,
|
||||
0xdbd5,0x3b92 ,0xdb52,0x3bb6 ,0xdad1,0x3bda ,
|
||||
0xda4f,0x3bfd ,0xd9ce,0x3c20 ,0xd94d,0x3c42 ,
|
||||
0xd8cd,0x3c64 ,0xd84d,0x3c85 ,0xd7cd,0x3ca5 ,
|
||||
0xd74e,0x3cc5 ,0xd6cf,0x3ce4 ,0xd651,0x3d03 ,
|
||||
0xd5d3,0x3d21 ,0xd556,0x3d3f ,0xd4d8,0x3d5b ,
|
||||
0xd45c,0x3d78 ,0xd3df,0x3d93 ,0xd363,0x3daf ,
|
||||
0xd2e8,0x3dc9 ,0xd26d,0x3de3 ,0xd1f2,0x3dfc ,
|
||||
0xd178,0x3e15 ,0xd0fe,0x3e2d ,0xd085,0x3e45 ,
|
||||
0xd00c,0x3e5c ,0xcf94,0x3e72 ,0xcf1c,0x3e88 ,
|
||||
0xcea5,0x3e9d ,0xce2e,0x3eb1 ,0xcdb7,0x3ec5 ,
|
||||
0xcd41,0x3ed8 ,0xcccc,0x3eeb ,0xcc57,0x3efd ,
|
||||
0xcbe2,0x3f0f ,0xcb6e,0x3f20 ,0xcafb,0x3f30 ,
|
||||
0xca88,0x3f40 ,0xca15,0x3f4f ,0xc9a3,0x3f5d ,
|
||||
0xc932,0x3f6b ,0xc8c1,0x3f78 ,0xc851,0x3f85 ,
|
||||
0xc7e1,0x3f91 ,0xc772,0x3f9c ,0xc703,0x3fa7 ,
|
||||
0xc695,0x3fb1 ,0xc627,0x3fbb ,0xc5ba,0x3fc4 ,
|
||||
0xc54e,0x3fcc ,0xc4e2,0x3fd4 ,0xc476,0x3fdb ,
|
||||
0xc40c,0x3fe1 ,0xc3a1,0x3fe7 ,0xc338,0x3fec ,
|
||||
0xc2cf,0x3ff1 ,0xc266,0x3ff5 ,0xc1fe,0x3ff8 ,
|
||||
0xc197,0x3ffb ,0xc130,0x3ffd ,0xc0ca,0x3fff ,
|
||||
0xc065,0x4000 ,0xc000,0x4000 ,0xbf9c,0x4000 ,
|
||||
0xbf38,0x3fff ,0xbed5,0x3ffd ,0xbe73,0x3ffb ,
|
||||
0xbe11,0x3ff8 ,0xbdb0,0x3ff5 ,0xbd50,0x3ff1 ,
|
||||
0xbcf0,0x3fec ,0xbc91,0x3fe7 ,0xbc32,0x3fe1 ,
|
||||
0xbbd4,0x3fdb ,0xbb77,0x3fd4 ,0xbb1b,0x3fcc ,
|
||||
0xbabf,0x3fc4 ,0xba64,0x3fbb ,0xba09,0x3fb1 ,
|
||||
0xb9af,0x3fa7 ,0xb956,0x3f9c ,0xb8fd,0x3f91 ,
|
||||
0xb8a6,0x3f85 ,0xb84f,0x3f78 ,0xb7f8,0x3f6b ,
|
||||
0xb7a2,0x3f5d ,0xb74d,0x3f4f ,0xb6f9,0x3f40 ,
|
||||
0xb6a5,0x3f30 ,0xb652,0x3f20 ,0xb600,0x3f0f ,
|
||||
0xb5af,0x3efd ,0xb55e,0x3eeb ,0xb50e,0x3ed8 ,
|
||||
0xb4be,0x3ec5 ,0xb470,0x3eb1 ,0xb422,0x3e9d ,
|
||||
0xb3d5,0x3e88 ,0xb388,0x3e72 ,0xb33d,0x3e5c ,
|
||||
0xb2f2,0x3e45 ,0xb2a7,0x3e2d ,0xb25e,0x3e15 ,
|
||||
0xb215,0x3dfc ,0xb1cd,0x3de3 ,0xb186,0x3dc9 ,
|
||||
0xb140,0x3daf ,0xb0fa,0x3d93 ,0xb0b5,0x3d78 ,
|
||||
0xb071,0x3d5b ,0xb02d,0x3d3f ,0xafeb,0x3d21 ,
|
||||
0xafa9,0x3d03 ,0xaf68,0x3ce4 ,0xaf28,0x3cc5 ,
|
||||
0xaee8,0x3ca5 ,0xaea9,0x3c85 ,0xae6b,0x3c64 ,
|
||||
0xae2e,0x3c42 ,0xadf2,0x3c20 ,0xadb6,0x3bfd ,
|
||||
0xad7b,0x3bda ,0xad41,0x3bb6 ,0xad08,0x3b92 ,
|
||||
0xacd0,0x3b6d ,0xac98,0x3b47 ,0xac61,0x3b21 ,
|
||||
0xac2b,0x3afa ,0xabf6,0x3ad3 ,0xabc2,0x3aab ,
|
||||
0xab8e,0x3a82 ,0xab5b,0x3a59 ,0xab29,0x3a30 ,
|
||||
0xaaf8,0x3a06 ,0xaac8,0x39db ,0xaa98,0x39b0 ,
|
||||
0xaa6a,0x3984 ,0xaa3c,0x3958 ,0xaa0f,0x392b ,
|
||||
0xa9e3,0x38fd ,0xa9b7,0x38cf ,0xa98d,0x38a1 ,
|
||||
0xa963,0x3871 ,0xa93a,0x3842 ,0xa912,0x3812 ,
|
||||
0xa8eb,0x37e1 ,0xa8c5,0x37b0 ,0xa89f,0x377e ,
|
||||
0xa87b,0x374b ,0xa857,0x3718 ,0xa834,0x36e5 ,
|
||||
0xa812,0x36b1 ,0xa7f1,0x367d ,0xa7d0,0x3648 ,
|
||||
0xa7b1,0x3612 ,0xa792,0x35dc ,0xa774,0x35a5 ,
|
||||
0xa757,0x356e ,0xa73b,0x3537 ,0xa71f,0x34ff ,
|
||||
0xa705,0x34c6 ,0xa6eb,0x348d ,0xa6d3,0x3453 ,
|
||||
0xa6bb,0x3419 ,0xa6a4,0x33df ,0xa68e,0x33a3 ,
|
||||
0xa678,0x3368 ,0xa664,0x332c ,0xa650,0x32ef ,
|
||||
0xa63e,0x32b2 ,0xa62c,0x3274 ,0xa61b,0x3236 ,
|
||||
0xa60b,0x31f8 ,0xa5fb,0x31b9 ,0xa5ed,0x3179 ,
|
||||
0xa5e0,0x3139 ,0xa5d3,0x30f9 ,0xa5c7,0x30b8 ,
|
||||
0xa5bc,0x3076 ,0xa5b2,0x3034 ,0xa5a9,0x2ff2 ,
|
||||
0xa5a1,0x2faf ,0xa599,0x2f6c ,0xa593,0x2f28 ,
|
||||
0xa58d,0x2ee4 ,0xa588,0x2e9f ,0xa585,0x2e5a ,
|
||||
0xa581,0x2e15 ,0xa57f,0x2dcf ,0xa57e,0x2d88 ,
|
||||
0xa57e,0x2d41 ,0xa57e,0x2cfa ,0xa57f,0x2cb2 ,
|
||||
0xa581,0x2c6a ,0xa585,0x2c21 ,0xa588,0x2bd8 ,
|
||||
0xa58d,0x2b8f ,0xa593,0x2b45 ,0xa599,0x2afb ,
|
||||
0xa5a1,0x2ab0 ,0xa5a9,0x2a65 ,0xa5b2,0x2a1a ,
|
||||
0xa5bc,0x29ce ,0xa5c7,0x2981 ,0xa5d3,0x2935 ,
|
||||
0xa5e0,0x28e7 ,0xa5ed,0x289a ,0xa5fb,0x284c ,
|
||||
0xa60b,0x27fe ,0xa61b,0x27af ,0xa62c,0x2760 ,
|
||||
0xa63e,0x2711 ,0xa650,0x26c1 ,0xa664,0x2671 ,
|
||||
0xa678,0x2620 ,0xa68e,0x25cf ,0xa6a4,0x257e ,
|
||||
0xa6bb,0x252c ,0xa6d3,0x24da ,0xa6eb,0x2488 ,
|
||||
0xa705,0x2435 ,0xa71f,0x23e2 ,0xa73b,0x238e ,
|
||||
0xa757,0x233b ,0xa774,0x22e7 ,0xa792,0x2292 ,
|
||||
0xa7b1,0x223d ,0xa7d0,0x21e8 ,0xa7f1,0x2193 ,
|
||||
0xa812,0x213d ,0xa834,0x20e7 ,0xa857,0x2091 ,
|
||||
0xa87b,0x203a ,0xa89f,0x1fe3 ,0xa8c5,0x1f8c ,
|
||||
0xa8eb,0x1f34 ,0xa912,0x1edc ,0xa93a,0x1e84 ,
|
||||
0xa963,0x1e2b ,0xa98d,0x1dd3 ,0xa9b7,0x1d79 ,
|
||||
0xa9e3,0x1d20 ,0xaa0f,0x1cc6 ,0xaa3c,0x1c6c ,
|
||||
0xaa6a,0x1c12 ,0xaa98,0x1bb8 ,0xaac8,0x1b5d ,
|
||||
0xaaf8,0x1b02 ,0xab29,0x1aa7 ,0xab5b,0x1a4b ,
|
||||
0xab8e,0x19ef ,0xabc2,0x1993 ,0xabf6,0x1937 ,
|
||||
0xac2b,0x18db ,0xac61,0x187e ,0xac98,0x1821 ,
|
||||
0xacd0,0x17c4 ,0xad08,0x1766 ,0xad41,0x1709 ,
|
||||
0xad7b,0x16ab ,0xadb6,0x164c ,0xadf2,0x15ee ,
|
||||
0xae2e,0x1590 ,0xae6b,0x1531 ,0xaea9,0x14d2 ,
|
||||
0xaee8,0x1473 ,0xaf28,0x1413 ,0xaf68,0x13b4 ,
|
||||
0xafa9,0x1354 ,0xafeb,0x12f4 ,0xb02d,0x1294 ,
|
||||
0xb071,0x1234 ,0xb0b5,0x11d3 ,0xb0fa,0x1173 ,
|
||||
0xb140,0x1112 ,0xb186,0x10b1 ,0xb1cd,0x1050 ,
|
||||
0xb215,0x0fee ,0xb25e,0x0f8d ,0xb2a7,0x0f2b ,
|
||||
0xb2f2,0x0eca ,0xb33d,0x0e68 ,0xb388,0x0e06 ,
|
||||
0xb3d5,0x0da4 ,0xb422,0x0d41 ,0xb470,0x0cdf ,
|
||||
0xb4be,0x0c7c ,0xb50e,0x0c1a ,0xb55e,0x0bb7 ,
|
||||
0xb5af,0x0b54 ,0xb600,0x0af1 ,0xb652,0x0a8e ,
|
||||
0xb6a5,0x0a2b ,0xb6f9,0x09c7 ,0xb74d,0x0964 ,
|
||||
0xb7a2,0x0901 ,0xb7f8,0x089d ,0xb84f,0x0839 ,
|
||||
0xb8a6,0x07d6 ,0xb8fd,0x0772 ,0xb956,0x070e ,
|
||||
0xb9af,0x06aa ,0xba09,0x0646 ,0xba64,0x05e2 ,
|
||||
0xbabf,0x057e ,0xbb1b,0x051a ,0xbb77,0x04b5 ,
|
||||
0xbbd4,0x0451 ,0xbc32,0x03ed ,0xbc91,0x0388 ,
|
||||
0xbcf0,0x0324 ,0xbd50,0x02c0 ,0xbdb0,0x025b ,
|
||||
0xbe11,0x01f7 ,0xbe73,0x0192 ,0xbed5,0x012e ,
|
||||
0xbf38,0x00c9 ,0xbf9c,0x0065 };
|
||||
|
||||
|
||||
extern const int s_Q14R_8;
|
||||
const int s_Q14R_8 = 1024;
|
||||
extern const unsigned short t_Q14R_8[2032];
|
||||
const unsigned short t_Q14R_8[2032] = {
|
||||
0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
|
||||
0x3b21,0x187e ,0x3ec5,0x0c7c ,0x3537,0x238e ,
|
||||
0x2d41,0x2d41 ,0x3b21,0x187e ,0x187e,0x3b21 ,
|
||||
0x187e,0x3b21 ,0x3537,0x238e ,0xf384,0x3ec5 ,
|
||||
0x0000,0x4000 ,0x2d41,0x2d41 ,0xd2bf,0x2d41 ,
|
||||
0xe782,0x3b21 ,0x238e,0x3537 ,0xc13b,0x0c7c ,
|
||||
0xd2bf,0x2d41 ,0x187e,0x3b21 ,0xc4df,0xe782 ,
|
||||
0xc4df,0x187e ,0x0c7c,0x3ec5 ,0xdc72,0xcac9 ,
|
||||
0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
|
||||
0x3fb1,0x0646 ,0x3fec,0x0324 ,0x3f4f,0x0964 ,
|
||||
0x3ec5,0x0c7c ,0x3fb1,0x0646 ,0x3d3f,0x1294 ,
|
||||
0x3d3f,0x1294 ,0x3f4f,0x0964 ,0x39db,0x1b5d ,
|
||||
0x3b21,0x187e ,0x3ec5,0x0c7c ,0x3537,0x238e ,
|
||||
0x3871,0x1e2b ,0x3e15,0x0f8d ,0x2f6c,0x2afb ,
|
||||
0x3537,0x238e ,0x3d3f,0x1294 ,0x289a,0x3179 ,
|
||||
0x3179,0x289a ,0x3c42,0x1590 ,0x20e7,0x36e5 ,
|
||||
0x2d41,0x2d41 ,0x3b21,0x187e ,0x187e,0x3b21 ,
|
||||
0x289a,0x3179 ,0x39db,0x1b5d ,0x0f8d,0x3e15 ,
|
||||
0x238e,0x3537 ,0x3871,0x1e2b ,0x0646,0x3fb1 ,
|
||||
0x1e2b,0x3871 ,0x36e5,0x20e7 ,0xfcdc,0x3fec ,
|
||||
0x187e,0x3b21 ,0x3537,0x238e ,0xf384,0x3ec5 ,
|
||||
0x1294,0x3d3f ,0x3368,0x2620 ,0xea70,0x3c42 ,
|
||||
0x0c7c,0x3ec5 ,0x3179,0x289a ,0xe1d5,0x3871 ,
|
||||
0x0646,0x3fb1 ,0x2f6c,0x2afb ,0xd9e0,0x3368 ,
|
||||
0x0000,0x4000 ,0x2d41,0x2d41 ,0xd2bf,0x2d41 ,
|
||||
0xf9ba,0x3fb1 ,0x2afb,0x2f6c ,0xcc98,0x2620 ,
|
||||
0xf384,0x3ec5 ,0x289a,0x3179 ,0xc78f,0x1e2b ,
|
||||
0xed6c,0x3d3f ,0x2620,0x3368 ,0xc3be,0x1590 ,
|
||||
0xe782,0x3b21 ,0x238e,0x3537 ,0xc13b,0x0c7c ,
|
||||
0xe1d5,0x3871 ,0x20e7,0x36e5 ,0xc014,0x0324 ,
|
||||
0xdc72,0x3537 ,0x1e2b,0x3871 ,0xc04f,0xf9ba ,
|
||||
0xd766,0x3179 ,0x1b5d,0x39db ,0xc1eb,0xf073 ,
|
||||
0xd2bf,0x2d41 ,0x187e,0x3b21 ,0xc4df,0xe782 ,
|
||||
0xce87,0x289a ,0x1590,0x3c42 ,0xc91b,0xdf19 ,
|
||||
0xcac9,0x238e ,0x1294,0x3d3f ,0xce87,0xd766 ,
|
||||
0xc78f,0x1e2b ,0x0f8d,0x3e15 ,0xd505,0xd094 ,
|
||||
0xc4df,0x187e ,0x0c7c,0x3ec5 ,0xdc72,0xcac9 ,
|
||||
0xc2c1,0x1294 ,0x0964,0x3f4f ,0xe4a3,0xc625 ,
|
||||
0xc13b,0x0c7c ,0x0646,0x3fb1 ,0xed6c,0xc2c1 ,
|
||||
0xc04f,0x0646 ,0x0324,0x3fec ,0xf69c,0xc0b1 ,
|
||||
0x4000,0x0000 ,0x4000,0x0000 ,0x4000,0x0000 ,
|
||||
0x3ffb,0x0192 ,0x3fff,0x00c9 ,0x3ff5,0x025b ,
|
||||
0x3fec,0x0324 ,0x3ffb,0x0192 ,0x3fd4,0x04b5 ,
|
||||
0x3fd4,0x04b5 ,0x3ff5,0x025b ,0x3f9c,0x070e ,
|
||||
0x3fb1,0x0646 ,0x3fec,0x0324 ,0x3f4f,0x0964 ,
|
||||
0x3f85,0x07d6 ,0x3fe1,0x03ed ,0x3eeb,0x0bb7 ,
|
||||
0x3f4f,0x0964 ,0x3fd4,0x04b5 ,0x3e72,0x0e06 ,
|
||||
0x3f0f,0x0af1 ,0x3fc4,0x057e ,0x3de3,0x1050 ,
|
||||
0x3ec5,0x0c7c ,0x3fb1,0x0646 ,0x3d3f,0x1294 ,
|
||||
0x3e72,0x0e06 ,0x3f9c,0x070e ,0x3c85,0x14d2 ,
|
||||
0x3e15,0x0f8d ,0x3f85,0x07d6 ,0x3bb6,0x1709 ,
|
||||
0x3daf,0x1112 ,0x3f6b,0x089d ,0x3ad3,0x1937 ,
|
||||
0x3d3f,0x1294 ,0x3f4f,0x0964 ,0x39db,0x1b5d ,
|
||||
0x3cc5,0x1413 ,0x3f30,0x0a2b ,0x38cf,0x1d79 ,
|
||||
0x3c42,0x1590 ,0x3f0f,0x0af1 ,0x37b0,0x1f8c ,
|
||||
0x3bb6,0x1709 ,0x3eeb,0x0bb7 ,0x367d,0x2193 ,
|
||||
0x3b21,0x187e ,0x3ec5,0x0c7c ,0x3537,0x238e ,
|
||||
0x3a82,0x19ef ,0x3e9d,0x0d41 ,0x33df,0x257e ,
|
||||
0x39db,0x1b5d ,0x3e72,0x0e06 ,0x3274,0x2760 ,
|
||||
0x392b,0x1cc6 ,0x3e45,0x0eca ,0x30f9,0x2935 ,
|
||||
0x3871,0x1e2b ,0x3e15,0x0f8d ,0x2f6c,0x2afb ,
|
||||
0x37b0,0x1f8c ,0x3de3,0x1050 ,0x2dcf,0x2cb2 ,
|
||||
0x36e5,0x20e7 ,0x3daf,0x1112 ,0x2c21,0x2e5a ,
|
||||
0x3612,0x223d ,0x3d78,0x11d3 ,0x2a65,0x2ff2 ,
|
||||
0x3537,0x238e ,0x3d3f,0x1294 ,0x289a,0x3179 ,
|
||||
0x3453,0x24da ,0x3d03,0x1354 ,0x26c1,0x32ef ,
|
||||
0x3368,0x2620 ,0x3cc5,0x1413 ,0x24da,0x3453 ,
|
||||
0x3274,0x2760 ,0x3c85,0x14d2 ,0x22e7,0x35a5 ,
|
||||
0x3179,0x289a ,0x3c42,0x1590 ,0x20e7,0x36e5 ,
|
||||
0x3076,0x29ce ,0x3bfd,0x164c ,0x1edc,0x3812 ,
|
||||
0x2f6c,0x2afb ,0x3bb6,0x1709 ,0x1cc6,0x392b ,
|
||||
0x2e5a,0x2c21 ,0x3b6d,0x17c4 ,0x1aa7,0x3a30 ,
|
||||
0x2d41,0x2d41 ,0x3b21,0x187e ,0x187e,0x3b21 ,
|
||||
0x2c21,0x2e5a ,0x3ad3,0x1937 ,0x164c,0x3bfd ,
|
||||
0x2afb,0x2f6c ,0x3a82,0x19ef ,0x1413,0x3cc5 ,
|
||||
0x29ce,0x3076 ,0x3a30,0x1aa7 ,0x11d3,0x3d78 ,
|
||||
0x289a,0x3179 ,0x39db,0x1b5d ,0x0f8d,0x3e15 ,
|
||||
0x2760,0x3274 ,0x3984,0x1c12 ,0x0d41,0x3e9d ,
|
||||
0x2620,0x3368 ,0x392b,0x1cc6 ,0x0af1,0x3f0f ,
|
||||
0x24da,0x3453 ,0x38cf,0x1d79 ,0x089d,0x3f6b ,
|
||||
0x238e,0x3537 ,0x3871,0x1e2b ,0x0646,0x3fb1 ,
|
||||
0x223d,0x3612 ,0x3812,0x1edc ,0x03ed,0x3fe1 ,
|
||||
0x20e7,0x36e5 ,0x37b0,0x1f8c ,0x0192,0x3ffb ,
|
||||
0x1f8c,0x37b0 ,0x374b,0x203a ,0xff37,0x3fff ,
|
||||
0x1e2b,0x3871 ,0x36e5,0x20e7 ,0xfcdc,0x3fec ,
|
||||
0x1cc6,0x392b ,0x367d,0x2193 ,0xfa82,0x3fc4 ,
|
||||
0x1b5d,0x39db ,0x3612,0x223d ,0xf82a,0x3f85 ,
|
||||
0x19ef,0x3a82 ,0x35a5,0x22e7 ,0xf5d5,0x3f30 ,
|
||||
0x187e,0x3b21 ,0x3537,0x238e ,0xf384,0x3ec5 ,
|
||||
0x1709,0x3bb6 ,0x34c6,0x2435 ,0xf136,0x3e45 ,
|
||||
0x1590,0x3c42 ,0x3453,0x24da ,0xeeee,0x3daf ,
|
||||
0x1413,0x3cc5 ,0x33df,0x257e ,0xecac,0x3d03 ,
|
||||
0x1294,0x3d3f ,0x3368,0x2620 ,0xea70,0x3c42 ,
|
||||
0x1112,0x3daf ,0x32ef,0x26c1 ,0xe83c,0x3b6d ,
|
||||
0x0f8d,0x3e15 ,0x3274,0x2760 ,0xe611,0x3a82 ,
|
||||
0x0e06,0x3e72 ,0x31f8,0x27fe ,0xe3ee,0x3984 ,
|
||||
0x0c7c,0x3ec5 ,0x3179,0x289a ,0xe1d5,0x3871 ,
|
||||
0x0af1,0x3f0f ,0x30f9,0x2935 ,0xdfc6,0x374b ,
|
||||
0x0964,0x3f4f ,0x3076,0x29ce ,0xddc3,0x3612 ,
|
||||
0x07d6,0x3f85 ,0x2ff2,0x2a65 ,0xdbcb,0x34c6 ,
|
||||
0x0646,0x3fb1 ,0x2f6c,0x2afb ,0xd9e0,0x3368 ,
|
||||
0x04b5,0x3fd4 ,0x2ee4,0x2b8f ,0xd802,0x31f8 ,
|
||||
0x0324,0x3fec ,0x2e5a,0x2c21 ,0xd632,0x3076 ,
|
||||
0x0192,0x3ffb ,0x2dcf,0x2cb2 ,0xd471,0x2ee4 ,
|
||||
0x0000,0x4000 ,0x2d41,0x2d41 ,0xd2bf,0x2d41 ,
|
||||
0xfe6e,0x3ffb ,0x2cb2,0x2dcf ,0xd11c,0x2b8f ,
|
||||
0xfcdc,0x3fec ,0x2c21,0x2e5a ,0xcf8a,0x29ce ,
|
||||
0xfb4b,0x3fd4 ,0x2b8f,0x2ee4 ,0xce08,0x27fe ,
|
||||
0xf9ba,0x3fb1 ,0x2afb,0x2f6c ,0xcc98,0x2620 ,
|
||||
0xf82a,0x3f85 ,0x2a65,0x2ff2 ,0xcb3a,0x2435 ,
|
||||
0xf69c,0x3f4f ,0x29ce,0x3076 ,0xc9ee,0x223d ,
|
||||
0xf50f,0x3f0f ,0x2935,0x30f9 ,0xc8b5,0x203a ,
|
||||
0xf384,0x3ec5 ,0x289a,0x3179 ,0xc78f,0x1e2b ,
|
||||
0xf1fa,0x3e72 ,0x27fe,0x31f8 ,0xc67c,0x1c12 ,
|
||||
0xf073,0x3e15 ,0x2760,0x3274 ,0xc57e,0x19ef ,
|
||||
0xeeee,0x3daf ,0x26c1,0x32ef ,0xc493,0x17c4 ,
|
||||
0xed6c,0x3d3f ,0x2620,0x3368 ,0xc3be,0x1590 ,
|
||||
0xebed,0x3cc5 ,0x257e,0x33df ,0xc2fd,0x1354 ,
|
||||
0xea70,0x3c42 ,0x24da,0x3453 ,0xc251,0x1112 ,
|
||||
0xe8f7,0x3bb6 ,0x2435,0x34c6 ,0xc1bb,0x0eca ,
|
||||
0xe782,0x3b21 ,0x238e,0x3537 ,0xc13b,0x0c7c ,
|
||||
0xe611,0x3a82 ,0x22e7,0x35a5 ,0xc0d0,0x0a2b ,
|
||||
0xe4a3,0x39db ,0x223d,0x3612 ,0xc07b,0x07d6 ,
|
||||
0xe33a,0x392b ,0x2193,0x367d ,0xc03c,0x057e ,
|
||||
0xe1d5,0x3871 ,0x20e7,0x36e5 ,0xc014,0x0324 ,
|
||||
0xe074,0x37b0 ,0x203a,0x374b ,0xc001,0x00c9 ,
|
||||
0xdf19,0x36e5 ,0x1f8c,0x37b0 ,0xc005,0xfe6e ,
|
||||
0xddc3,0x3612 ,0x1edc,0x3812 ,0xc01f,0xfc13 ,
|
||||
0xdc72,0x3537 ,0x1e2b,0x3871 ,0xc04f,0xf9ba ,
|
||||
0xdb26,0x3453 ,0x1d79,0x38cf ,0xc095,0xf763 ,
|
||||
0xd9e0,0x3368 ,0x1cc6,0x392b ,0xc0f1,0xf50f ,
|
||||
0xd8a0,0x3274 ,0x1c12,0x3984 ,0xc163,0xf2bf ,
|
||||
0xd766,0x3179 ,0x1b5d,0x39db ,0xc1eb,0xf073 ,
|
||||
0xd632,0x3076 ,0x1aa7,0x3a30 ,0xc288,0xee2d ,
|
||||
0xd505,0x2f6c ,0x19ef,0x3a82 ,0xc33b,0xebed ,
|
||||
0xd3df,0x2e5a ,0x1937,0x3ad3 ,0xc403,0xe9b4 ,
|
||||
0xd2bf,0x2d41 ,0x187e,0x3b21 ,0xc4df,0xe782 ,
|
||||
0xd1a6,0x2c21 ,0x17c4,0x3b6d ,0xc5d0,0xe559 ,
|
||||
0xd094,0x2afb ,0x1709,0x3bb6 ,0xc6d5,0xe33a ,
|
||||
0xcf8a,0x29ce ,0x164c,0x3bfd ,0xc7ee,0xe124 ,
|
||||
0xce87,0x289a ,0x1590,0x3c42 ,0xc91b,0xdf19 ,
|
||||
0xcd8c,0x2760 ,0x14d2,0x3c85 ,0xca5b,0xdd19 ,
|
||||
0xcc98,0x2620 ,0x1413,0x3cc5 ,0xcbad,0xdb26 ,
|
||||
0xcbad,0x24da ,0x1354,0x3d03 ,0xcd11,0xd93f ,
|
||||
0xcac9,0x238e ,0x1294,0x3d3f ,0xce87,0xd766 ,
|
||||
0xc9ee,0x223d ,0x11d3,0x3d78 ,0xd00e,0xd59b ,
|
||||
0xc91b,0x20e7 ,0x1112,0x3daf ,0xd1a6,0xd3df ,
|
||||
0xc850,0x1f8c ,0x1050,0x3de3 ,0xd34e,0xd231 ,
|
||||
0xc78f,0x1e2b ,0x0f8d,0x3e15 ,0xd505,0xd094 ,
|
||||
0xc6d5,0x1cc6 ,0x0eca,0x3e45 ,0xd6cb,0xcf07 ,
|
||||
0xc625,0x1b5d ,0x0e06,0x3e72 ,0xd8a0,0xcd8c ,
|
||||
0xc57e,0x19ef ,0x0d41,0x3e9d ,0xda82,0xcc21 ,
|
||||
0xc4df,0x187e ,0x0c7c,0x3ec5 ,0xdc72,0xcac9 ,
|
||||
0xc44a,0x1709 ,0x0bb7,0x3eeb ,0xde6d,0xc983 ,
|
||||
0xc3be,0x1590 ,0x0af1,0x3f0f ,0xe074,0xc850 ,
|
||||
0xc33b,0x1413 ,0x0a2b,0x3f30 ,0xe287,0xc731 ,
|
||||
0xc2c1,0x1294 ,0x0964,0x3f4f ,0xe4a3,0xc625 ,
|
||||
0xc251,0x1112 ,0x089d,0x3f6b ,0xe6c9,0xc52d ,
|
||||
0xc1eb,0x0f8d ,0x07d6,0x3f85 ,0xe8f7,0xc44a ,
|
||||
0xc18e,0x0e06 ,0x070e,0x3f9c ,0xeb2e,0xc37b ,
|
||||
0xc13b,0x0c7c ,0x0646,0x3fb1 ,0xed6c,0xc2c1 ,
|
||||
0xc0f1,0x0af1 ,0x057e,0x3fc4 ,0xefb0,0xc21d ,
|
||||
0xc0b1,0x0964 ,0x04b5,0x3fd4 ,0xf1fa,0xc18e ,
|
||||
0xc07b,0x07d6 ,0x03ed,0x3fe1 ,0xf449,0xc115 ,
|
||||
0xc04f,0x0646 ,0x0324,0x3fec ,0xf69c,0xc0b1 ,
|
||||
0xc02c,0x04b5 ,0x025b,0x3ff5 ,0xf8f2,0xc064 ,
|
||||
0xc014,0x0324 ,0x0192,0x3ffb ,0xfb4b,0xc02c ,
|
||||
0xc005,0x0192 ,0x00c9,0x3fff ,0xfda5,0xc00b ,
|
||||
0x4000,0x0000 ,0x4000,0x0065 ,0x3fff,0x00c9 ,
|
||||
0x3ffd,0x012e ,0x3ffb,0x0192 ,0x3ff8,0x01f7 ,
|
||||
0x3ff5,0x025b ,0x3ff1,0x02c0 ,0x3fec,0x0324 ,
|
||||
0x3fe7,0x0388 ,0x3fe1,0x03ed ,0x3fdb,0x0451 ,
|
||||
0x3fd4,0x04b5 ,0x3fcc,0x051a ,0x3fc4,0x057e ,
|
||||
0x3fbb,0x05e2 ,0x3fb1,0x0646 ,0x3fa7,0x06aa ,
|
||||
0x3f9c,0x070e ,0x3f91,0x0772 ,0x3f85,0x07d6 ,
|
||||
0x3f78,0x0839 ,0x3f6b,0x089d ,0x3f5d,0x0901 ,
|
||||
0x3f4f,0x0964 ,0x3f40,0x09c7 ,0x3f30,0x0a2b ,
|
||||
0x3f20,0x0a8e ,0x3f0f,0x0af1 ,0x3efd,0x0b54 ,
|
||||
0x3eeb,0x0bb7 ,0x3ed8,0x0c1a ,0x3ec5,0x0c7c ,
|
||||
0x3eb1,0x0cdf ,0x3e9d,0x0d41 ,0x3e88,0x0da4 ,
|
||||
0x3e72,0x0e06 ,0x3e5c,0x0e68 ,0x3e45,0x0eca ,
|
||||
0x3e2d,0x0f2b ,0x3e15,0x0f8d ,0x3dfc,0x0fee ,
|
||||
0x3de3,0x1050 ,0x3dc9,0x10b1 ,0x3daf,0x1112 ,
|
||||
0x3d93,0x1173 ,0x3d78,0x11d3 ,0x3d5b,0x1234 ,
|
||||
0x3d3f,0x1294 ,0x3d21,0x12f4 ,0x3d03,0x1354 ,
|
||||
0x3ce4,0x13b4 ,0x3cc5,0x1413 ,0x3ca5,0x1473 ,
|
||||
0x3c85,0x14d2 ,0x3c64,0x1531 ,0x3c42,0x1590 ,
|
||||
0x3c20,0x15ee ,0x3bfd,0x164c ,0x3bda,0x16ab ,
|
||||
0x3bb6,0x1709 ,0x3b92,0x1766 ,0x3b6d,0x17c4 ,
|
||||
0x3b47,0x1821 ,0x3b21,0x187e ,0x3afa,0x18db ,
|
||||
0x3ad3,0x1937 ,0x3aab,0x1993 ,0x3a82,0x19ef ,
|
||||
0x3a59,0x1a4b ,0x3a30,0x1aa7 ,0x3a06,0x1b02 ,
|
||||
0x39db,0x1b5d ,0x39b0,0x1bb8 ,0x3984,0x1c12 ,
|
||||
0x3958,0x1c6c ,0x392b,0x1cc6 ,0x38fd,0x1d20 ,
|
||||
0x38cf,0x1d79 ,0x38a1,0x1dd3 ,0x3871,0x1e2b ,
|
||||
0x3842,0x1e84 ,0x3812,0x1edc ,0x37e1,0x1f34 ,
|
||||
0x37b0,0x1f8c ,0x377e,0x1fe3 ,0x374b,0x203a ,
|
||||
0x3718,0x2091 ,0x36e5,0x20e7 ,0x36b1,0x213d ,
|
||||
0x367d,0x2193 ,0x3648,0x21e8 ,0x3612,0x223d ,
|
||||
0x35dc,0x2292 ,0x35a5,0x22e7 ,0x356e,0x233b ,
|
||||
0x3537,0x238e ,0x34ff,0x23e2 ,0x34c6,0x2435 ,
|
||||
0x348d,0x2488 ,0x3453,0x24da ,0x3419,0x252c ,
|
||||
0x33df,0x257e ,0x33a3,0x25cf ,0x3368,0x2620 ,
|
||||
0x332c,0x2671 ,0x32ef,0x26c1 ,0x32b2,0x2711 ,
|
||||
0x3274,0x2760 ,0x3236,0x27af ,0x31f8,0x27fe ,
|
||||
0x31b9,0x284c ,0x3179,0x289a ,0x3139,0x28e7 ,
|
||||
0x30f9,0x2935 ,0x30b8,0x2981 ,0x3076,0x29ce ,
|
||||
0x3034,0x2a1a ,0x2ff2,0x2a65 ,0x2faf,0x2ab0 ,
|
||||
0x2f6c,0x2afb ,0x2f28,0x2b45 ,0x2ee4,0x2b8f ,
|
||||
0x2e9f,0x2bd8 ,0x2e5a,0x2c21 ,0x2e15,0x2c6a ,
|
||||
0x2dcf,0x2cb2 ,0x2d88,0x2cfa ,0x2d41,0x2d41 ,
|
||||
0x2cfa,0x2d88 ,0x2cb2,0x2dcf ,0x2c6a,0x2e15 ,
|
||||
0x2c21,0x2e5a ,0x2bd8,0x2e9f ,0x2b8f,0x2ee4 ,
|
||||
0x2b45,0x2f28 ,0x2afb,0x2f6c ,0x2ab0,0x2faf ,
|
||||
0x2a65,0x2ff2 ,0x2a1a,0x3034 ,0x29ce,0x3076 ,
|
||||
0x2981,0x30b8 ,0x2935,0x30f9 ,0x28e7,0x3139 ,
|
||||
0x289a,0x3179 ,0x284c,0x31b9 ,0x27fe,0x31f8 ,
|
||||
0x27af,0x3236 ,0x2760,0x3274 ,0x2711,0x32b2 ,
|
||||
0x26c1,0x32ef ,0x2671,0x332c ,0x2620,0x3368 ,
|
||||
0x25cf,0x33a3 ,0x257e,0x33df ,0x252c,0x3419 ,
|
||||
0x24da,0x3453 ,0x2488,0x348d ,0x2435,0x34c6 ,
|
||||
0x23e2,0x34ff ,0x238e,0x3537 ,0x233b,0x356e ,
|
||||
0x22e7,0x35a5 ,0x2292,0x35dc ,0x223d,0x3612 ,
|
||||
0x21e8,0x3648 ,0x2193,0x367d ,0x213d,0x36b1 ,
|
||||
0x20e7,0x36e5 ,0x2091,0x3718 ,0x203a,0x374b ,
|
||||
0x1fe3,0x377e ,0x1f8c,0x37b0 ,0x1f34,0x37e1 ,
|
||||
0x1edc,0x3812 ,0x1e84,0x3842 ,0x1e2b,0x3871 ,
|
||||
0x1dd3,0x38a1 ,0x1d79,0x38cf ,0x1d20,0x38fd ,
|
||||
0x1cc6,0x392b ,0x1c6c,0x3958 ,0x1c12,0x3984 ,
|
||||
0x1bb8,0x39b0 ,0x1b5d,0x39db ,0x1b02,0x3a06 ,
|
||||
0x1aa7,0x3a30 ,0x1a4b,0x3a59 ,0x19ef,0x3a82 ,
|
||||
0x1993,0x3aab ,0x1937,0x3ad3 ,0x18db,0x3afa ,
|
||||
0x187e,0x3b21 ,0x1821,0x3b47 ,0x17c4,0x3b6d ,
|
||||
0x1766,0x3b92 ,0x1709,0x3bb6 ,0x16ab,0x3bda ,
|
||||
0x164c,0x3bfd ,0x15ee,0x3c20 ,0x1590,0x3c42 ,
|
||||
0x1531,0x3c64 ,0x14d2,0x3c85 ,0x1473,0x3ca5 ,
|
||||
0x1413,0x3cc5 ,0x13b4,0x3ce4 ,0x1354,0x3d03 ,
|
||||
0x12f4,0x3d21 ,0x1294,0x3d3f ,0x1234,0x3d5b ,
|
||||
0x11d3,0x3d78 ,0x1173,0x3d93 ,0x1112,0x3daf ,
|
||||
0x10b1,0x3dc9 ,0x1050,0x3de3 ,0x0fee,0x3dfc ,
|
||||
0x0f8d,0x3e15 ,0x0f2b,0x3e2d ,0x0eca,0x3e45 ,
|
||||
0x0e68,0x3e5c ,0x0e06,0x3e72 ,0x0da4,0x3e88 ,
|
||||
0x0d41,0x3e9d ,0x0cdf,0x3eb1 ,0x0c7c,0x3ec5 ,
|
||||
0x0c1a,0x3ed8 ,0x0bb7,0x3eeb ,0x0b54,0x3efd ,
|
||||
0x0af1,0x3f0f ,0x0a8e,0x3f20 ,0x0a2b,0x3f30 ,
|
||||
0x09c7,0x3f40 ,0x0964,0x3f4f ,0x0901,0x3f5d ,
|
||||
0x089d,0x3f6b ,0x0839,0x3f78 ,0x07d6,0x3f85 ,
|
||||
0x0772,0x3f91 ,0x070e,0x3f9c ,0x06aa,0x3fa7 ,
|
||||
0x0646,0x3fb1 ,0x05e2,0x3fbb ,0x057e,0x3fc4 ,
|
||||
0x051a,0x3fcc ,0x04b5,0x3fd4 ,0x0451,0x3fdb ,
|
||||
0x03ed,0x3fe1 ,0x0388,0x3fe7 ,0x0324,0x3fec ,
|
||||
0x02c0,0x3ff1 ,0x025b,0x3ff5 ,0x01f7,0x3ff8 ,
|
||||
0x0192,0x3ffb ,0x012e,0x3ffd ,0x00c9,0x3fff ,
|
||||
0x0065,0x4000 ,0x0000,0x4000 ,0xff9b,0x4000 ,
|
||||
0xff37,0x3fff ,0xfed2,0x3ffd ,0xfe6e,0x3ffb ,
|
||||
0xfe09,0x3ff8 ,0xfda5,0x3ff5 ,0xfd40,0x3ff1 ,
|
||||
0xfcdc,0x3fec ,0xfc78,0x3fe7 ,0xfc13,0x3fe1 ,
|
||||
0xfbaf,0x3fdb ,0xfb4b,0x3fd4 ,0xfae6,0x3fcc ,
|
||||
0xfa82,0x3fc4 ,0xfa1e,0x3fbb ,0xf9ba,0x3fb1 ,
|
||||
0xf956,0x3fa7 ,0xf8f2,0x3f9c ,0xf88e,0x3f91 ,
|
||||
0xf82a,0x3f85 ,0xf7c7,0x3f78 ,0xf763,0x3f6b ,
|
||||
0xf6ff,0x3f5d ,0xf69c,0x3f4f ,0xf639,0x3f40 ,
|
||||
0xf5d5,0x3f30 ,0xf572,0x3f20 ,0xf50f,0x3f0f ,
|
||||
0xf4ac,0x3efd ,0xf449,0x3eeb ,0xf3e6,0x3ed8 ,
|
||||
0xf384,0x3ec5 ,0xf321,0x3eb1 ,0xf2bf,0x3e9d ,
|
||||
0xf25c,0x3e88 ,0xf1fa,0x3e72 ,0xf198,0x3e5c ,
|
||||
0xf136,0x3e45 ,0xf0d5,0x3e2d ,0xf073,0x3e15 ,
|
||||
0xf012,0x3dfc ,0xefb0,0x3de3 ,0xef4f,0x3dc9 ,
|
||||
0xeeee,0x3daf ,0xee8d,0x3d93 ,0xee2d,0x3d78 ,
|
||||
0xedcc,0x3d5b ,0xed6c,0x3d3f ,0xed0c,0x3d21 ,
|
||||
0xecac,0x3d03 ,0xec4c,0x3ce4 ,0xebed,0x3cc5 ,
|
||||
0xeb8d,0x3ca5 ,0xeb2e,0x3c85 ,0xeacf,0x3c64 ,
|
||||
0xea70,0x3c42 ,0xea12,0x3c20 ,0xe9b4,0x3bfd ,
|
||||
0xe955,0x3bda ,0xe8f7,0x3bb6 ,0xe89a,0x3b92 ,
|
||||
0xe83c,0x3b6d ,0xe7df,0x3b47 ,0xe782,0x3b21 ,
|
||||
0xe725,0x3afa ,0xe6c9,0x3ad3 ,0xe66d,0x3aab ,
|
||||
0xe611,0x3a82 ,0xe5b5,0x3a59 ,0xe559,0x3a30 ,
|
||||
0xe4fe,0x3a06 ,0xe4a3,0x39db ,0xe448,0x39b0 ,
|
||||
0xe3ee,0x3984 ,0xe394,0x3958 ,0xe33a,0x392b ,
|
||||
0xe2e0,0x38fd ,0xe287,0x38cf ,0xe22d,0x38a1 ,
|
||||
0xe1d5,0x3871 ,0xe17c,0x3842 ,0xe124,0x3812 ,
|
||||
0xe0cc,0x37e1 ,0xe074,0x37b0 ,0xe01d,0x377e ,
|
||||
0xdfc6,0x374b ,0xdf6f,0x3718 ,0xdf19,0x36e5 ,
|
||||
0xdec3,0x36b1 ,0xde6d,0x367d ,0xde18,0x3648 ,
|
||||
0xddc3,0x3612 ,0xdd6e,0x35dc ,0xdd19,0x35a5 ,
|
||||
0xdcc5,0x356e ,0xdc72,0x3537 ,0xdc1e,0x34ff ,
|
||||
0xdbcb,0x34c6 ,0xdb78,0x348d ,0xdb26,0x3453 ,
|
||||
0xdad4,0x3419 ,0xda82,0x33df ,0xda31,0x33a3 ,
|
||||
0xd9e0,0x3368 ,0xd98f,0x332c ,0xd93f,0x32ef ,
|
||||
0xd8ef,0x32b2 ,0xd8a0,0x3274 ,0xd851,0x3236 ,
|
||||
0xd802,0x31f8 ,0xd7b4,0x31b9 ,0xd766,0x3179 ,
|
||||
0xd719,0x3139 ,0xd6cb,0x30f9 ,0xd67f,0x30b8 ,
|
||||
0xd632,0x3076 ,0xd5e6,0x3034 ,0xd59b,0x2ff2 ,
|
||||
0xd550,0x2faf ,0xd505,0x2f6c ,0xd4bb,0x2f28 ,
|
||||
0xd471,0x2ee4 ,0xd428,0x2e9f ,0xd3df,0x2e5a ,
|
||||
0xd396,0x2e15 ,0xd34e,0x2dcf ,0xd306,0x2d88 ,
|
||||
0xd2bf,0x2d41 ,0xd278,0x2cfa ,0xd231,0x2cb2 ,
|
||||
0xd1eb,0x2c6a ,0xd1a6,0x2c21 ,0xd161,0x2bd8 ,
|
||||
0xd11c,0x2b8f ,0xd0d8,0x2b45 ,0xd094,0x2afb ,
|
||||
0xd051,0x2ab0 ,0xd00e,0x2a65 ,0xcfcc,0x2a1a ,
|
||||
0xcf8a,0x29ce ,0xcf48,0x2981 ,0xcf07,0x2935 ,
|
||||
0xcec7,0x28e7 ,0xce87,0x289a ,0xce47,0x284c ,
|
||||
0xce08,0x27fe ,0xcdca,0x27af ,0xcd8c,0x2760 ,
|
||||
0xcd4e,0x2711 ,0xcd11,0x26c1 ,0xccd4,0x2671 ,
|
||||
0xcc98,0x2620 ,0xcc5d,0x25cf ,0xcc21,0x257e ,
|
||||
0xcbe7,0x252c ,0xcbad,0x24da ,0xcb73,0x2488 ,
|
||||
0xcb3a,0x2435 ,0xcb01,0x23e2 ,0xcac9,0x238e ,
|
||||
0xca92,0x233b ,0xca5b,0x22e7 ,0xca24,0x2292 ,
|
||||
0xc9ee,0x223d ,0xc9b8,0x21e8 ,0xc983,0x2193 ,
|
||||
0xc94f,0x213d ,0xc91b,0x20e7 ,0xc8e8,0x2091 ,
|
||||
0xc8b5,0x203a ,0xc882,0x1fe3 ,0xc850,0x1f8c ,
|
||||
0xc81f,0x1f34 ,0xc7ee,0x1edc ,0xc7be,0x1e84 ,
|
||||
0xc78f,0x1e2b ,0xc75f,0x1dd3 ,0xc731,0x1d79 ,
|
||||
0xc703,0x1d20 ,0xc6d5,0x1cc6 ,0xc6a8,0x1c6c ,
|
||||
0xc67c,0x1c12 ,0xc650,0x1bb8 ,0xc625,0x1b5d ,
|
||||
0xc5fa,0x1b02 ,0xc5d0,0x1aa7 ,0xc5a7,0x1a4b ,
|
||||
0xc57e,0x19ef ,0xc555,0x1993 ,0xc52d,0x1937 ,
|
||||
0xc506,0x18db ,0xc4df,0x187e ,0xc4b9,0x1821 ,
|
||||
0xc493,0x17c4 ,0xc46e,0x1766 ,0xc44a,0x1709 ,
|
||||
0xc426,0x16ab ,0xc403,0x164c ,0xc3e0,0x15ee ,
|
||||
0xc3be,0x1590 ,0xc39c,0x1531 ,0xc37b,0x14d2 ,
|
||||
0xc35b,0x1473 ,0xc33b,0x1413 ,0xc31c,0x13b4 ,
|
||||
0xc2fd,0x1354 ,0xc2df,0x12f4 ,0xc2c1,0x1294 ,
|
||||
0xc2a5,0x1234 ,0xc288,0x11d3 ,0xc26d,0x1173 ,
|
||||
0xc251,0x1112 ,0xc237,0x10b1 ,0xc21d,0x1050 ,
|
||||
0xc204,0x0fee ,0xc1eb,0x0f8d ,0xc1d3,0x0f2b ,
|
||||
0xc1bb,0x0eca ,0xc1a4,0x0e68 ,0xc18e,0x0e06 ,
|
||||
0xc178,0x0da4 ,0xc163,0x0d41 ,0xc14f,0x0cdf ,
|
||||
0xc13b,0x0c7c ,0xc128,0x0c1a ,0xc115,0x0bb7 ,
|
||||
0xc103,0x0b54 ,0xc0f1,0x0af1 ,0xc0e0,0x0a8e ,
|
||||
0xc0d0,0x0a2b ,0xc0c0,0x09c7 ,0xc0b1,0x0964 ,
|
||||
0xc0a3,0x0901 ,0xc095,0x089d ,0xc088,0x0839 ,
|
||||
0xc07b,0x07d6 ,0xc06f,0x0772 ,0xc064,0x070e ,
|
||||
0xc059,0x06aa ,0xc04f,0x0646 ,0xc045,0x05e2 ,
|
||||
0xc03c,0x057e ,0xc034,0x051a ,0xc02c,0x04b5 ,
|
||||
0xc025,0x0451 ,0xc01f,0x03ed ,0xc019,0x0388 ,
|
||||
0xc014,0x0324 ,0xc00f,0x02c0 ,0xc00b,0x025b ,
|
||||
0xc008,0x01f7 ,0xc005,0x0192 ,0xc003,0x012e ,
|
||||
0xc001,0x00c9 ,0xc000,0x0065 };
|
@ -1,27 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
|
||||
/*
|
||||
* This file contains the Q14 radix-2 tables used in ARM9E optimization routines.
|
||||
*
|
||||
*/
|
||||
|
||||
extern const unsigned short t_Q14S_rad8[2];
|
||||
const unsigned short t_Q14S_rad8[2] = { 0x0000,0x2d41 };
|
||||
|
||||
//extern const int t_Q30S_rad8[2];
|
||||
//const int t_Q30S_rad8[2] = { 0x00000000,0x2d413ccd };
|
||||
|
||||
extern const unsigned short t_Q14R_rad8[2];
|
||||
const unsigned short t_Q14R_rad8[2] = { 0x2d41,0x2d41 };
|
||||
|
||||
//extern const int t_Q30R_rad8[2];
|
||||
//const int t_Q30R_rad8[2] = { 0x2d413ccd,0x2d413ccd };
|
@ -8,10 +8,10 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_COMMON_TYPES_H
|
||||
#define WEBRTC_COMMON_TYPES_H
|
||||
#ifndef WEBRTC_COMMON_TYPES_H_
|
||||
#define WEBRTC_COMMON_TYPES_H_
|
||||
|
||||
#include "typedefs.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
// Disable "new behavior: elements of array will be default initialized"
|
||||
@ -33,6 +33,16 @@
|
||||
|
||||
#define RTP_PAYLOAD_NAME_SIZE 32
|
||||
|
||||
#if defined(WEBRTC_WIN)
|
||||
// Compares two strings without regard to case.
|
||||
#define STR_CASE_CMP(s1, s2) ::_stricmp(s1, s2)
|
||||
// Compares characters of two strings without regard to case.
|
||||
#define STR_NCASE_CMP(s1, s2, n) ::_strnicmp(s1, s2, n)
|
||||
#else
|
||||
#define STR_CASE_CMP(s1, s2) ::strcasecmp(s1, s2)
|
||||
#define STR_NCASE_CMP(s1, s2, n) ::strncasecmp(s1, s2, n)
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class Config;
|
||||
@ -227,7 +237,7 @@ struct CodecInst
|
||||
int plfreq;
|
||||
int pacsize;
|
||||
int channels;
|
||||
int rate;
|
||||
int rate; // bits/sec unlike {start,min,max}Bitrate elsewhere in this file!
|
||||
};
|
||||
|
||||
enum FrameType
|
||||
@ -331,7 +341,7 @@ enum NsModes // type of Noise Suppression
|
||||
kNsLowSuppression, // lowest suppression
|
||||
kNsModerateSuppression,
|
||||
kNsHighSuppression,
|
||||
kNsVeryHighSuppression // highest suppression
|
||||
kNsVeryHighSuppression, // highest suppression
|
||||
};
|
||||
|
||||
enum AgcModes // type of Automatic Gain Control
|
||||
@ -356,7 +366,7 @@ enum EcModes // type of Echo Control
|
||||
kEcDefault, // platform default
|
||||
kEcConference, // conferencing default (aggressive AEC)
|
||||
kEcAec, // Acoustic Echo Cancellation
|
||||
kEcAecm // AEC mobile
|
||||
kEcAecm, // AEC mobile
|
||||
};
|
||||
|
||||
// AECM modes
|
||||
@ -408,7 +418,7 @@ enum NetEqModes // NetEQ playout configurations
|
||||
kNetEqFax = 2,
|
||||
// Minimal buffer management. Inserts zeros for lost packets and during
|
||||
// buffer increases.
|
||||
kNetEqOff = 3
|
||||
kNetEqOff = 3,
|
||||
};
|
||||
|
||||
enum OnHoldModes // On Hold direction
|
||||
@ -422,7 +432,7 @@ enum AmrMode
|
||||
{
|
||||
kRfc3267BwEfficient = 0,
|
||||
kRfc3267OctetAligned = 1,
|
||||
kRfc3267FileStorage = 2
|
||||
kRfc3267FileStorage = 2,
|
||||
};
|
||||
|
||||
// ==================================================================
|
||||
@ -525,9 +535,9 @@ struct SimulcastStream
|
||||
unsigned short width;
|
||||
unsigned short height;
|
||||
unsigned char numberOfTemporalLayers;
|
||||
unsigned int maxBitrate;
|
||||
unsigned int targetBitrate;
|
||||
unsigned int minBitrate;
|
||||
unsigned int maxBitrate; // kilobits/sec.
|
||||
unsigned int targetBitrate; // kilobits/sec.
|
||||
unsigned int minBitrate; // kilobits/sec.
|
||||
unsigned int qpMax; // minimum quality
|
||||
};
|
||||
|
||||
@ -546,9 +556,9 @@ struct VideoCodec
|
||||
unsigned short width;
|
||||
unsigned short height;
|
||||
|
||||
unsigned int startBitrate;
|
||||
unsigned int maxBitrate;
|
||||
unsigned int minBitrate;
|
||||
unsigned int startBitrate; // kilobits/sec.
|
||||
unsigned int maxBitrate; // kilobits/sec.
|
||||
unsigned int minBitrate; // kilobits/sec.
|
||||
unsigned char maxFramerate;
|
||||
|
||||
VideoCodecUnion codecSpecific;
|
||||
@ -590,5 +600,8 @@ struct OverUseDetectorOptions {
|
||||
double initial_var_noise;
|
||||
double initial_threshold;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
#endif // WEBRTC_COMMON_TYPES_H
|
||||
|
||||
#endif // WEBRTC_COMMON_TYPES_H_
|
||||
|
||||
|
@ -7,16 +7,16 @@
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "common.h" // NOLINT
|
||||
#include "webrtc/common.h"
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace {
|
||||
|
||||
struct MyExperiment {
|
||||
enum { kDefaultFactor = 1 };
|
||||
enum { kDefaultOffset = 2 };
|
||||
static const int kDefaultFactor;
|
||||
static const int kDefaultOffset;
|
||||
|
||||
MyExperiment()
|
||||
: factor(kDefaultFactor), offset(kDefaultOffset) {}
|
||||
@ -28,6 +28,9 @@ struct MyExperiment {
|
||||
int offset;
|
||||
};
|
||||
|
||||
const int MyExperiment::kDefaultFactor = 1;
|
||||
const int MyExperiment::kDefaultOffset = 2;
|
||||
|
||||
TEST(Config, ReturnsDefaultInstanceIfNotConfigured) {
|
||||
Config config;
|
||||
const MyExperiment& my_exp = config.Get<MyExperiment>();
|
||||
@ -68,7 +71,7 @@ struct SqrCost : Algo1_CostFunction {
|
||||
}
|
||||
};
|
||||
|
||||
TEST(Config, SupportsPolimorphism) {
|
||||
TEST(Config, SupportsPolymorphism) {
|
||||
Config config;
|
||||
config.Set<Algo1_CostFunction>(new SqrCost());
|
||||
EXPECT_EQ(25, config.Get<Algo1_CostFunction>().cost(5));
|
||||
|
@ -7,16 +7,6 @@
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
{
|
||||
'variables': {
|
||||
'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
|
||||
'conditions': [
|
||||
['use_libjpeg_turbo==1', {
|
||||
'libjpeg_include_dir%': [ '<(DEPTH)/third_party/libjpeg_turbo', ],
|
||||
}, {
|
||||
'libjpeg_include_dir%': [ '<(DEPTH)/third_party/libjpeg', ],
|
||||
}],
|
||||
],
|
||||
},
|
||||
'includes': ['../build/common.gypi'],
|
||||
'targets': [
|
||||
{
|
||||
@ -25,7 +15,6 @@
|
||||
'include_dirs': [
|
||||
'<(webrtc_root)/modules/interface/',
|
||||
'interface',
|
||||
'jpeg/include',
|
||||
'libyuv/include',
|
||||
],
|
||||
'dependencies': [
|
||||
@ -34,17 +23,10 @@
|
||||
'direct_dependent_settings': {
|
||||
'include_dirs': [
|
||||
'interface',
|
||||
'jpeg/include',
|
||||
'libyuv/include',
|
||||
],
|
||||
},
|
||||
'conditions': [
|
||||
['build_libjpeg==1', {
|
||||
'dependencies': ['<(libjpeg_gyp_path):libjpeg',],
|
||||
}, {
|
||||
# Need to add a directory normally exported by libjpeg.gyp.
|
||||
'include_dirs': ['<(libjpeg_include_dir)'],
|
||||
}],
|
||||
['build_libyuv==1', {
|
||||
'dependencies': ['<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv',],
|
||||
}, {
|
||||
@ -54,28 +36,36 @@
|
||||
],
|
||||
'sources': [
|
||||
'interface/i420_video_frame.h',
|
||||
'interface/native_handle.h',
|
||||
'interface/texture_video_frame.h',
|
||||
'i420_video_frame.cc',
|
||||
'jpeg/include/jpeg.h',
|
||||
'jpeg/data_manager.cc',
|
||||
'jpeg/data_manager.h',
|
||||
'jpeg/jpeg.cc',
|
||||
'libyuv/include/webrtc_libyuv.h',
|
||||
'libyuv/include/scaler.h',
|
||||
'libyuv/webrtc_libyuv.cc',
|
||||
'libyuv/scaler.cc',
|
||||
'plane.h',
|
||||
'plane.cc',
|
||||
'texture_video_frame.cc'
|
||||
],
|
||||
# Silence jpeg struct padding warnings.
|
||||
'msvs_disabled_warnings': [ 4324, ],
|
||||
},
|
||||
], # targets
|
||||
'conditions': [
|
||||
['include_tests==1', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'frame_generator',
|
||||
'type': 'static_library',
|
||||
'sources': [
|
||||
'test/frame_generator.h',
|
||||
'test/frame_generator.cc',
|
||||
],
|
||||
'dependencies': [
|
||||
'common_video',
|
||||
],
|
||||
},
|
||||
{
|
||||
'target_name': 'common_video_unittests',
|
||||
'type': 'executable',
|
||||
'type': '<(gtest_target_type)',
|
||||
'dependencies': [
|
||||
'common_video',
|
||||
'<(DEPTH)/testing/gtest.gyp:gtest',
|
||||
@ -84,17 +74,59 @@
|
||||
],
|
||||
'sources': [
|
||||
'i420_video_frame_unittest.cc',
|
||||
'jpeg/jpeg_unittest.cc',
|
||||
'libyuv/libyuv_unittest.cc',
|
||||
'libyuv/scaler_unittest.cc',
|
||||
'plane_unittest.cc',
|
||||
'texture_video_frame_unittest.cc'
|
||||
],
|
||||
# Disable warnings to enable Win64 build, issue 1323.
|
||||
'msvs_disabled_warnings': [
|
||||
4267, # size_t to int truncation.
|
||||
],
|
||||
'conditions': [
|
||||
# TODO(henrike): remove build_with_chromium==1 when the bots are
|
||||
# using Chromium's buildbots.
|
||||
['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
|
||||
'dependencies': [
|
||||
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
|
||||
],
|
||||
}],
|
||||
],
|
||||
},
|
||||
], # targets
|
||||
'conditions': [
|
||||
# TODO(henrike): remove build_with_chromium==1 when the bots are using
|
||||
# Chromium's buildbots.
|
||||
['build_with_chromium==1 and OS=="android" and gtest_target_type=="shared_library"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'common_video_unittests_apk_target',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'<(apk_tests_path):common_video_unittests_apk',
|
||||
],
|
||||
},
|
||||
],
|
||||
}],
|
||||
['test_isolation_mode != "noop"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'common_video_unittests_run',
|
||||
'type': 'none',
|
||||
'dependencies': [
|
||||
'<(import_isolate_path):import_isolate_gypi',
|
||||
'common_video_unittests',
|
||||
],
|
||||
'includes': [
|
||||
'common_video_unittests.isolate',
|
||||
],
|
||||
'sources': [
|
||||
'common_video_unittests.isolate',
|
||||
],
|
||||
},
|
||||
],
|
||||
}],
|
||||
],
|
||||
}], # include_tests
|
||||
],
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,39 @@
|
||||
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
{
|
||||
'conditions': [
|
||||
['OS=="android"', {
|
||||
# When doing Android builds, the WebRTC code is put in third_party/webrtc
|
||||
# of a Chromium checkout, this is one level above the standalone build.
|
||||
'variables': {
|
||||
'isolate_dependency_untracked': [
|
||||
'../../../data/',
|
||||
'../../../resources/',
|
||||
],
|
||||
},
|
||||
}],
|
||||
['OS=="linux" or OS=="mac" or OS=="win"', {
|
||||
'variables': {
|
||||
'command': [
|
||||
'../../testing/test_env.py',
|
||||
'../../tools/swarm_client/googletest/run_test_cases.py',
|
||||
'<(PRODUCT_DIR)/common_video_unittests<(EXECUTABLE_SUFFIX)',
|
||||
],
|
||||
'isolate_dependency_tracked': [
|
||||
'../../DEPS',
|
||||
'../../resources/foreman_cif.yuv',
|
||||
'../../testing/test_env.py',
|
||||
'../../tools/swarm_client/googletest/run_test_cases.py',
|
||||
'../../tools/swarm_client/run_isolated.py',
|
||||
'../../tools/swarm_client/third_party/upload.py',
|
||||
'<(PRODUCT_DIR)/common_video_unittests<(EXECUTABLE_SUFFIX)',
|
||||
],
|
||||
},
|
||||
}],
|
||||
],
|
||||
}
|
@ -142,6 +142,8 @@ void I420VideoFrame::ResetSize() {
|
||||
v_plane_.ResetSize();
|
||||
}
|
||||
|
||||
void* I420VideoFrame::native_handle() const { return NULL; }
|
||||
|
||||
int I420VideoFrame::CheckDimensions(int width, int height,
|
||||
int stride_y, int stride_u, int stride_v) {
|
||||
int half_width = (width + 1) / 2;
|
||||
@ -179,5 +181,4 @@ Plane* I420VideoFrame::GetPlane(PlaneType type) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -16,6 +16,7 @@
|
||||
// Storing and handling of YUV (I420) video frames.
|
||||
|
||||
#include "webrtc/common_video/plane.h"
|
||||
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
/*
|
||||
@ -49,74 +50,81 @@ class I420VideoFrame {
|
||||
// If required size is bigger than the allocated one, new buffers of adequate
|
||||
// size will be allocated.
|
||||
// Return value: 0 on success ,-1 on error.
|
||||
int CreateEmptyFrame(int width, int height,
|
||||
int stride_y, int stride_u, int stride_v);
|
||||
virtual int CreateEmptyFrame(int width, int height,
|
||||
int stride_y, int stride_u, int stride_v);
|
||||
|
||||
// CreateFrame: Sets the frame's members and buffers. If required size is
|
||||
// bigger than allocated one, new buffers of adequate size will be allocated.
|
||||
// Return value: 0 on success ,-1 on error.
|
||||
int CreateFrame(int size_y, const uint8_t* buffer_y,
|
||||
int size_u, const uint8_t* buffer_u,
|
||||
int size_v, const uint8_t* buffer_v,
|
||||
int width, int height,
|
||||
int stride_y, int stride_u, int stride_v);
|
||||
virtual int CreateFrame(int size_y, const uint8_t* buffer_y,
|
||||
int size_u, const uint8_t* buffer_u,
|
||||
int size_v, const uint8_t* buffer_v,
|
||||
int width, int height,
|
||||
int stride_y, int stride_u, int stride_v);
|
||||
|
||||
// Copy frame: If required size is bigger than allocated one, new buffers of
|
||||
// adequate size will be allocated.
|
||||
// Return value: 0 on success ,-1 on error.
|
||||
int CopyFrame(const I420VideoFrame& videoFrame);
|
||||
virtual int CopyFrame(const I420VideoFrame& videoFrame);
|
||||
|
||||
// Swap Frame.
|
||||
void SwapFrame(I420VideoFrame* videoFrame);
|
||||
virtual void SwapFrame(I420VideoFrame* videoFrame);
|
||||
|
||||
// Get pointer to buffer per plane.
|
||||
uint8_t* buffer(PlaneType type);
|
||||
virtual uint8_t* buffer(PlaneType type);
|
||||
// Overloading with const.
|
||||
const uint8_t* buffer(PlaneType type) const;
|
||||
virtual const uint8_t* buffer(PlaneType type) const;
|
||||
|
||||
// Get allocated size per plane.
|
||||
int allocated_size(PlaneType type) const;
|
||||
virtual int allocated_size(PlaneType type) const;
|
||||
|
||||
// Get allocated stride per plane.
|
||||
int stride(PlaneType type) const;
|
||||
virtual int stride(PlaneType type) const;
|
||||
|
||||
// Set frame width.
|
||||
int set_width(int width);
|
||||
virtual int set_width(int width);
|
||||
|
||||
// Set frame height.
|
||||
int set_height(int height);
|
||||
virtual int set_height(int height);
|
||||
|
||||
// Get frame width.
|
||||
int width() const {return width_;}
|
||||
virtual int width() const {return width_;}
|
||||
|
||||
// Get frame height.
|
||||
int height() const {return height_;}
|
||||
virtual int height() const {return height_;}
|
||||
|
||||
// Set frame timestamp (90kHz).
|
||||
void set_timestamp(uint32_t timestamp) {timestamp_ = timestamp;}
|
||||
virtual void set_timestamp(uint32_t timestamp) {timestamp_ = timestamp;}
|
||||
|
||||
// Get frame timestamp (90kHz).
|
||||
uint32_t timestamp() const {return timestamp_;}
|
||||
virtual uint32_t timestamp() const {return timestamp_;}
|
||||
|
||||
// Set render time in miliseconds.
|
||||
void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
|
||||
virtual void set_render_time_ms(int64_t render_time_ms) {render_time_ms_ =
|
||||
render_time_ms;}
|
||||
|
||||
// Get render time in miliseconds.
|
||||
int64_t render_time_ms() const {return render_time_ms_;}
|
||||
virtual int64_t render_time_ms() const {return render_time_ms_;}
|
||||
|
||||
// Return true if underlying plane buffers are of zero size, false if not.
|
||||
bool IsZeroSize() const;
|
||||
virtual bool IsZeroSize() const;
|
||||
|
||||
// Reset underlying plane buffers sizes to 0. This function doesn't
|
||||
// clear memory.
|
||||
void ResetSize();
|
||||
virtual void ResetSize();
|
||||
|
||||
// Return the handle of the underlying video frame. This is used when the
|
||||
// frame is backed by a texture. The object should be destroyed when it is no
|
||||
// longer in use, so the underlying resource can be freed.
|
||||
virtual void* native_handle() const;
|
||||
|
||||
protected:
|
||||
// Verifies legality of parameters.
|
||||
// Return value: 0 on success, -1 on error.
|
||||
virtual int CheckDimensions(int width, int height,
|
||||
int stride_y, int stride_u, int stride_v);
|
||||
|
||||
private:
|
||||
// Verifies legality of parameters.
|
||||
// Return value: 0 on success ,-1 on error.
|
||||
int CheckDimensions(int width, int height,
|
||||
int stride_y, int stride_u, int stride_v);
|
||||
// Get the pointer to a specific plane.
|
||||
const Plane* GetPlane(PlaneType type) const;
|
||||
// Overloading with non-const.
|
||||
|
@ -0,0 +1,36 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef COMMON_VIDEO_INTERFACE_NATIVEHANDLE_H_
|
||||
#define COMMON_VIDEO_INTERFACE_NATIVEHANDLE_H_
|
||||
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// A class to store an opaque handle of the underlying video frame. This is used
|
||||
// when the frame is backed by a texture. WebRTC carries the handle in
|
||||
// TextureVideoFrame. This object keeps a reference to the handle. The reference
|
||||
// is cleared when the object is destroyed. It is important to destroy the
|
||||
// object as soon as possible so the texture can be recycled.
|
||||
class NativeHandle {
|
||||
public:
|
||||
virtual ~NativeHandle() {}
|
||||
// For scoped_refptr
|
||||
virtual int32_t AddRef() = 0;
|
||||
virtual int32_t Release() = 0;
|
||||
|
||||
// Gets the handle.
|
||||
virtual void* GetHandle() = 0;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // COMMON_VIDEO_INTERFACE_NATIVEHANDLE_H_
|
@ -0,0 +1,72 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
|
||||
#define COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
|
||||
|
||||
// TextureVideoFrame class
|
||||
//
|
||||
// Storing and handling of video frames backed by textures.
|
||||
|
||||
#include "webrtc/common_video/interface/i420_video_frame.h"
|
||||
#include "webrtc/common_video/interface/native_handle.h"
|
||||
#include "webrtc/system_wrappers/interface/scoped_refptr.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class TextureVideoFrame : public I420VideoFrame {
|
||||
public:
|
||||
TextureVideoFrame(NativeHandle* handle,
|
||||
int width,
|
||||
int height,
|
||||
uint32_t timestamp,
|
||||
int64_t render_time_ms);
|
||||
virtual ~TextureVideoFrame();
|
||||
|
||||
// I420VideoFrame implementation
|
||||
virtual int CreateEmptyFrame(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) OVERRIDE;
|
||||
virtual int CreateFrame(int size_y,
|
||||
const uint8_t* buffer_y,
|
||||
int size_u,
|
||||
const uint8_t* buffer_u,
|
||||
int size_v,
|
||||
const uint8_t* buffer_v,
|
||||
int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) OVERRIDE;
|
||||
virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
|
||||
virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;
|
||||
virtual uint8_t* buffer(PlaneType type) OVERRIDE;
|
||||
virtual const uint8_t* buffer(PlaneType type) const OVERRIDE;
|
||||
virtual int allocated_size(PlaneType type) const OVERRIDE;
|
||||
virtual int stride(PlaneType type) const OVERRIDE;
|
||||
virtual bool IsZeroSize() const OVERRIDE;
|
||||
virtual void ResetSize() OVERRIDE;
|
||||
virtual void* native_handle() const OVERRIDE;
|
||||
|
||||
protected:
|
||||
virtual int CheckDimensions(
|
||||
int width, int height, int stride_y, int stride_u, int stride_v) OVERRIDE;
|
||||
|
||||
private:
|
||||
// An opaque handle that stores the underlying video frame.
|
||||
scoped_refptr<NativeHandle> handle_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // COMMON_VIDEO_INTERFACE_TEXTURE_VIDEO_FRAME_H
|
@ -64,6 +64,6 @@ public:
|
||||
bool _completeFrame;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
|
||||
|
@ -1,42 +0,0 @@
|
||||
# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
#
|
||||
# Use of this source code is governed by a BSD-style license
|
||||
# that can be found in the LICENSE file in the root of the source
|
||||
# tree. An additional intellectual property rights grant can be found
|
||||
# in the file PATENTS. All contributing project authors may
|
||||
# be found in the AUTHORS file in the root of the source tree.
|
||||
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
include $(LOCAL_PATH)/../../../android-webrtc.mk
|
||||
|
||||
LOCAL_MODULE_CLASS := STATIC_LIBRARIES
|
||||
LOCAL_MODULE := libwebrtc_jpeg
|
||||
LOCAL_MODULE_TAGS := optional
|
||||
LOCAL_CPP_EXTENSION := .cc
|
||||
LOCAL_SRC_FILES := \
|
||||
jpeg.cc \
|
||||
data_manager.cc
|
||||
|
||||
# Flags passed to both C and C++ files.
|
||||
LOCAL_CFLAGS := \
|
||||
$(MY_WEBRTC_COMMON_DEFS)
|
||||
|
||||
LOCAL_C_INCLUDES := \
|
||||
$(LOCAL_PATH)/include \
|
||||
$(LOCAL_PATH)/../../ \
|
||||
$(LOCAL_PATH)/../interface \
|
||||
$(LOCAL_PATH)/../../../../ \
|
||||
external/jpeg
|
||||
|
||||
LOCAL_SHARED_LIBRARIES := \
|
||||
libcutils \
|
||||
libdl \
|
||||
libstlport
|
||||
|
||||
ifndef NDK_ROOT
|
||||
include external/stlport/libstlport.mk
|
||||
endif
|
||||
include $(BUILD_STATIC_LIBRARY)
|
@ -1,86 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/common_video/jpeg/data_manager.h"
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
typedef struct
|
||||
{
|
||||
jpeg_source_mgr mgr;
|
||||
JOCTET* next_input_byte;
|
||||
size_t bytes_in_buffer; /* # of byte spaces remaining in buffer */
|
||||
} DataSrcMgr;
|
||||
|
||||
void
|
||||
jpegSetSrcBuffer(j_decompress_ptr cinfo, JOCTET* srcBuffer, size_t bufferSize)
|
||||
{
|
||||
DataSrcMgr* src;
|
||||
if (cinfo->src == NULL)
|
||||
{ /* first time for this JPEG object? */
|
||||
cinfo->src = (struct jpeg_source_mgr *)
|
||||
(*cinfo->mem->alloc_small) ((j_common_ptr) cinfo,
|
||||
JPOOL_PERMANENT, sizeof(DataSrcMgr));
|
||||
}
|
||||
|
||||
// Setting required functionality
|
||||
src = (DataSrcMgr*) cinfo->src;
|
||||
src->mgr.init_source = initSrc;;
|
||||
src->mgr.fill_input_buffer = fillInputBuffer;
|
||||
src->mgr.skip_input_data = skipInputData;
|
||||
src->mgr.resync_to_restart = jpeg_resync_to_restart; // use default
|
||||
src->mgr.term_source = termSource;
|
||||
// setting buffer/src
|
||||
src->bytes_in_buffer = bufferSize;
|
||||
src->next_input_byte = srcBuffer;
|
||||
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
initSrc(j_decompress_ptr cinfo)
|
||||
{
|
||||
DataSrcMgr *src = (DataSrcMgr*)cinfo->src;
|
||||
src->mgr.next_input_byte = src->next_input_byte;
|
||||
src->mgr.bytes_in_buffer = src->bytes_in_buffer;
|
||||
}
|
||||
|
||||
boolean
|
||||
fillInputBuffer(j_decompress_ptr cinfo)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
skipInputData(j_decompress_ptr cinfo, long num_bytes)
|
||||
{
|
||||
DataSrcMgr* src = (DataSrcMgr*)cinfo->src;
|
||||
if (num_bytes > 0)
|
||||
{
|
||||
if ((unsigned long)num_bytes > src->mgr.bytes_in_buffer)
|
||||
src->mgr.bytes_in_buffer = 0;
|
||||
else
|
||||
{
|
||||
src->mgr.next_input_byte += num_bytes;
|
||||
src->mgr.bytes_in_buffer -= num_bytes;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
termSource (j_decompress_ptr cinfo)
|
||||
{
|
||||
//
|
||||
}
|
||||
|
||||
} // end of namespace webrtc
|
@ -1,68 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Jpeg source data manager
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER
|
||||
#define WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER
|
||||
|
||||
#include <stdio.h>
|
||||
extern "C" {
|
||||
#if defined(USE_SYSTEM_LIBJPEG)
|
||||
#include <jpeglib.h>
|
||||
#else
|
||||
#include "jpeglib.h"
|
||||
#endif
|
||||
}
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
// Source manager:
|
||||
|
||||
|
||||
// a general function that will set these values
|
||||
void
|
||||
jpegSetSrcBuffer(j_decompress_ptr cinfo, JOCTET* srcBuffer, size_t bufferSize);
|
||||
|
||||
|
||||
// Initialize source. This is called by jpeg_read_header() before any
|
||||
// data is actually read.
|
||||
|
||||
void
|
||||
initSrc(j_decompress_ptr cinfo);
|
||||
|
||||
|
||||
// Fill input buffer
|
||||
// This is called whenever bytes_in_buffer has reached zero and more
|
||||
// data is wanted.
|
||||
|
||||
boolean
|
||||
fillInputBuffer(j_decompress_ptr cinfo);
|
||||
|
||||
// Skip input data
|
||||
// Skip num_bytes worth of data.
|
||||
|
||||
void
|
||||
skipInputData(j_decompress_ptr cinfo, long num_bytes);
|
||||
|
||||
|
||||
|
||||
|
||||
// Terminate source
|
||||
void
|
||||
termSource (j_decompress_ptr cinfo);
|
||||
|
||||
} // end of namespace webrtc
|
||||
|
||||
|
||||
#endif /* WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER */
|
@ -1,72 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_COMMON_VIDEO_JPEG
|
||||
#define WEBRTC_COMMON_VIDEO_JPEG
|
||||
|
||||
#include "webrtc/common_video/interface/i420_video_frame.h"
|
||||
#include "webrtc/common_video/interface/video_image.h" // EncodedImage
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
// jpeg forward declaration
|
||||
struct jpeg_compress_struct;
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
// TODO(mikhal): Move this to LibYuv wrapper, when LibYuv will have a JPG
|
||||
// Encode.
|
||||
class JpegEncoder
|
||||
{
|
||||
public:
|
||||
JpegEncoder();
|
||||
~JpegEncoder();
|
||||
|
||||
// SetFileName
|
||||
// Input:
|
||||
// - fileName - Pointer to input vector (should be less than 256) to which the
|
||||
// compressed file will be written to
|
||||
// Output:
|
||||
// - 0 : OK
|
||||
// - (-1) : Error
|
||||
int32_t SetFileName(const char* fileName);
|
||||
|
||||
// Encode an I420 image. The encoded image is saved to a file
|
||||
//
|
||||
// Input:
|
||||
// - inputImage : Image to be encoded
|
||||
//
|
||||
// Output:
|
||||
// - 0 : OK
|
||||
// - (-1) : Error
|
||||
int32_t Encode(const I420VideoFrame& inputImage);
|
||||
|
||||
private:
|
||||
|
||||
jpeg_compress_struct* _cinfo;
|
||||
char _fileName[257];
|
||||
};
|
||||
|
||||
// Decodes a JPEG-stream
|
||||
// Supports 1 image component. 3 interleaved image components,
|
||||
// YCbCr sub-sampling 4:4:4, 4:2:2, 4:2:0.
|
||||
//
|
||||
// Input:
|
||||
// - input_image : encoded image to be decoded.
|
||||
// - output_image : VideoFrame to store decoded output.
|
||||
//
|
||||
// Output:
|
||||
// - 0 : OK
|
||||
// - (-1) : Error
|
||||
// - (-2) : Unsupported format
|
||||
int ConvertJpegToI420(const EncodedImage& input_image,
|
||||
I420VideoFrame* output_image);
|
||||
}
|
||||
#endif /* WEBRTC_COMMON_VIDEO_JPEG */
|
@ -1,234 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#if defined(WIN32)
|
||||
#include <basetsd.h>
|
||||
#endif
|
||||
#include <setjmp.h>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
// NOTE(ajm): Path provided by gyp.
|
||||
#include "libyuv.h" // NOLINT
|
||||
#include "libyuv/mjpeg_decoder.h" // NOLINT
|
||||
|
||||
#include "webrtc/common_video/jpeg/data_manager.h"
|
||||
#include "webrtc/common_video/jpeg/include/jpeg.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
extern "C" {
|
||||
#if defined(USE_SYSTEM_LIBJPEG)
|
||||
#include <jpeglib.h>
|
||||
#else
|
||||
#include "jpeglib.h"
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
namespace webrtc
|
||||
{
|
||||
|
||||
// Error handler
|
||||
struct myErrorMgr {
|
||||
|
||||
struct jpeg_error_mgr pub;
|
||||
jmp_buf setjmp_buffer;
|
||||
};
|
||||
typedef struct myErrorMgr * myErrorPtr;
|
||||
|
||||
METHODDEF(void)
|
||||
MyErrorExit (j_common_ptr cinfo)
|
||||
{
|
||||
myErrorPtr myerr = (myErrorPtr) cinfo->err;
|
||||
|
||||
// Return control to the setjmp point
|
||||
longjmp(myerr->setjmp_buffer, 1);
|
||||
}
|
||||
|
||||
JpegEncoder::JpegEncoder()
|
||||
{
|
||||
_cinfo = new jpeg_compress_struct;
|
||||
strcpy(_fileName, "Snapshot.jpg");
|
||||
}
|
||||
|
||||
JpegEncoder::~JpegEncoder()
|
||||
{
|
||||
if (_cinfo != NULL)
|
||||
{
|
||||
delete _cinfo;
|
||||
_cinfo = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int32_t
|
||||
JpegEncoder::SetFileName(const char* fileName)
|
||||
{
|
||||
if (!fileName)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (fileName)
|
||||
{
|
||||
strncpy(_fileName, fileName, 256);
|
||||
_fileName[256] = 0;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
int32_t
|
||||
JpegEncoder::Encode(const I420VideoFrame& inputImage)
|
||||
{
|
||||
if (inputImage.IsZeroSize())
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
if (inputImage.width() < 1 || inputImage.height() < 1)
|
||||
{
|
||||
return -1;
|
||||
}
|
||||
|
||||
FILE* outFile = NULL;
|
||||
|
||||
const int width = inputImage.width();
|
||||
const int height = inputImage.height();
|
||||
|
||||
// Set error handler
|
||||
myErrorMgr jerr;
|
||||
_cinfo->err = jpeg_std_error(&jerr.pub);
|
||||
jerr.pub.error_exit = MyErrorExit;
|
||||
// Establish the setjmp return context
|
||||
if (setjmp(jerr.setjmp_buffer))
|
||||
{
|
||||
// If we get here, the JPEG code has signaled an error.
|
||||
jpeg_destroy_compress(_cinfo);
|
||||
if (outFile != NULL)
|
||||
{
|
||||
fclose(outFile);
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
if ((outFile = fopen(_fileName, "wb")) == NULL)
|
||||
{
|
||||
return -2;
|
||||
}
|
||||
// Create a compression object
|
||||
jpeg_create_compress(_cinfo);
|
||||
|
||||
// Setting destination file
|
||||
jpeg_stdio_dest(_cinfo, outFile);
|
||||
|
||||
// Set parameters for compression
|
||||
_cinfo->in_color_space = JCS_YCbCr;
|
||||
jpeg_set_defaults(_cinfo);
|
||||
|
||||
_cinfo->image_width = width;
|
||||
_cinfo->image_height = height;
|
||||
_cinfo->input_components = 3;
|
||||
|
||||
_cinfo->comp_info[0].h_samp_factor = 2; // Y
|
||||
_cinfo->comp_info[0].v_samp_factor = 2;
|
||||
_cinfo->comp_info[1].h_samp_factor = 1; // U
|
||||
_cinfo->comp_info[1].v_samp_factor = 1;
|
||||
_cinfo->comp_info[2].h_samp_factor = 1; // V
|
||||
_cinfo->comp_info[2].v_samp_factor = 1;
|
||||
_cinfo->raw_data_in = TRUE;
|
||||
// Converting to a buffer
|
||||
// TODO(mikhal): This is a tmp implementation. Will update to use LibYuv
|
||||
// Encode when that becomes available.
|
||||
unsigned int length = CalcBufferSize(kI420, width, height);
|
||||
scoped_array<uint8_t> image_buffer(new uint8_t[length]);
|
||||
ExtractBuffer(inputImage, length, image_buffer.get());
|
||||
int height16 = (height + 15) & ~15;
|
||||
uint8_t* imgPtr = image_buffer.get();
|
||||
|
||||
uint8_t* origImagePtr = NULL;
|
||||
if (height16 != height)
|
||||
{
|
||||
// Copy image to an adequate size buffer
|
||||
uint32_t requiredSize = CalcBufferSize(kI420, width, height16);
|
||||
origImagePtr = new uint8_t[requiredSize];
|
||||
memset(origImagePtr, 0, requiredSize);
|
||||
memcpy(origImagePtr, image_buffer.get(), length);
|
||||
imgPtr = origImagePtr;
|
||||
}
|
||||
|
||||
jpeg_start_compress(_cinfo, TRUE);
|
||||
|
||||
JSAMPROW y[16],u[8],v[8];
|
||||
JSAMPARRAY data[3];
|
||||
|
||||
data[0] = y;
|
||||
data[1] = u;
|
||||
data[2] = v;
|
||||
|
||||
int i, j;
|
||||
|
||||
for (j = 0; j < height; j += 16)
|
||||
{
|
||||
for (i = 0; i < 16; i++)
|
||||
{
|
||||
y[i] = (JSAMPLE*)imgPtr + width * (i + j);
|
||||
|
||||
if (i % 2 == 0)
|
||||
{
|
||||
u[i / 2] = (JSAMPLE*) imgPtr + width * height +
|
||||
width / 2 * ((i + j) / 2);
|
||||
v[i / 2] = (JSAMPLE*) imgPtr + width * height +
|
||||
width * height / 4 + width / 2 * ((i + j) / 2);
|
||||
}
|
||||
}
|
||||
jpeg_write_raw_data(_cinfo, data, 16);
|
||||
}
|
||||
|
||||
jpeg_finish_compress(_cinfo);
|
||||
jpeg_destroy_compress(_cinfo);
|
||||
|
||||
fclose(outFile);
|
||||
|
||||
if (origImagePtr != NULL)
|
||||
{
|
||||
delete [] origImagePtr;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int ConvertJpegToI420(const EncodedImage& input_image,
|
||||
I420VideoFrame* output_image) {
|
||||
|
||||
if (output_image == NULL)
|
||||
return -1;
|
||||
// TODO(mikhal): Update to use latest API from LibYuv when that becomes
|
||||
// available.
|
||||
libyuv::MJpegDecoder jpeg_decoder;
|
||||
bool ret = jpeg_decoder.LoadFrame(input_image._buffer, input_image._size);
|
||||
if (ret == false)
|
||||
return -1;
|
||||
if (jpeg_decoder.GetNumComponents() == 4)
|
||||
return -2; // not supported.
|
||||
int width = jpeg_decoder.GetWidth();
|
||||
int height = jpeg_decoder.GetHeight();
|
||||
output_image->CreateEmptyFrame(width, height, width,
|
||||
(width + 1) / 2, (width + 1) / 2);
|
||||
return ConvertToI420(kMJPG,
|
||||
input_image._buffer,
|
||||
0, 0, // no cropping
|
||||
width, height,
|
||||
input_image._size,
|
||||
kRotateNone,
|
||||
output_image);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -1,114 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <cstdio>
|
||||
#include <string>
|
||||
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
#include "webrtc/common_video/interface/video_image.h"
|
||||
#include "webrtc/common_video/jpeg/include/jpeg.h"
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
#include "webrtc/modules/interface/module_common_types.h"
|
||||
#include "webrtc/test/testsupport/fileutils.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
const int kImageWidth = 640;
|
||||
const int kImageHeight = 480;
|
||||
|
||||
class JpegTest: public testing::Test {
|
||||
protected:
|
||||
JpegTest()
|
||||
: input_filename_(webrtc::test::ProjectRootPath() +
|
||||
"data/common_video/jpeg/webrtc_logo.jpg"),
|
||||
decoded_filename_(webrtc::test::OutputPath() + "TestJpegDec.yuv"),
|
||||
encoded_filename_(webrtc::test::OutputPath() + "TestJpegEnc.jpg"),
|
||||
encoded_buffer_(NULL) {}
|
||||
virtual ~JpegTest() {}
|
||||
|
||||
void SetUp() {
|
||||
encoder_ = new JpegEncoder();
|
||||
}
|
||||
|
||||
void TearDown() {
|
||||
if (encoded_buffer_ != NULL) {
|
||||
if (encoded_buffer_->_buffer != NULL) {
|
||||
delete [] encoded_buffer_->_buffer;
|
||||
}
|
||||
delete encoded_buffer_;
|
||||
}
|
||||
delete encoder_;
|
||||
}
|
||||
|
||||
// Reads an encoded image. Caller will have to deallocate the memory of this
|
||||
// object and it's _buffer byte array.
|
||||
EncodedImage* ReadEncodedImage(std::string input_filename) {
|
||||
FILE* open_file = fopen(input_filename.c_str(), "rb");
|
||||
assert(open_file != NULL);
|
||||
size_t length = webrtc::test::GetFileSize(input_filename);
|
||||
EncodedImage* encoded_buffer = new EncodedImage();
|
||||
encoded_buffer->_buffer = new uint8_t[length];
|
||||
encoded_buffer->_size = length;
|
||||
encoded_buffer->_length = length;
|
||||
if (fread(encoded_buffer->_buffer, 1, length, open_file) != length) {
|
||||
ADD_FAILURE() << "Error reading file:" << input_filename;
|
||||
}
|
||||
fclose(open_file);
|
||||
return encoded_buffer;
|
||||
}
|
||||
|
||||
std::string input_filename_;
|
||||
std::string decoded_filename_;
|
||||
std::string encoded_filename_;
|
||||
EncodedImage* encoded_buffer_;
|
||||
JpegEncoder* encoder_;
|
||||
};
|
||||
|
||||
TEST_F(JpegTest, Decode) {
|
||||
encoded_buffer_ = ReadEncodedImage(input_filename_);
|
||||
I420VideoFrame image_buffer;
|
||||
EXPECT_EQ(0, ConvertJpegToI420(*encoded_buffer_, &image_buffer));
|
||||
EXPECT_FALSE(image_buffer.IsZeroSize());
|
||||
EXPECT_EQ(kImageWidth, image_buffer.width());
|
||||
EXPECT_EQ(kImageHeight, image_buffer.height());
|
||||
}
|
||||
|
||||
TEST_F(JpegTest, EncodeInvalidInputs) {
|
||||
I420VideoFrame empty;
|
||||
empty.set_width(164);
|
||||
empty.set_height(164);
|
||||
EXPECT_EQ(-1, encoder_->SetFileName(0));
|
||||
// Test empty (null) frame.
|
||||
EXPECT_EQ(-1, encoder_->Encode(empty));
|
||||
// Create empty frame (allocate memory) - arbitrary dimensions.
|
||||
empty.CreateEmptyFrame(10, 10, 10, 5, 5);
|
||||
empty.ResetSize();
|
||||
EXPECT_EQ(-1, encoder_->Encode(empty));
|
||||
}
|
||||
|
||||
TEST_F(JpegTest, Encode) {
|
||||
// Decode our input image then encode it again to a new file:
|
||||
encoded_buffer_ = ReadEncodedImage(input_filename_);
|
||||
I420VideoFrame image_buffer;
|
||||
EXPECT_EQ(0, ConvertJpegToI420(*encoded_buffer_, &image_buffer));
|
||||
|
||||
EXPECT_EQ(0, encoder_->SetFileName(encoded_filename_.c_str()));
|
||||
EXPECT_EQ(0, encoder_->Encode(image_buffer));
|
||||
|
||||
// Save decoded image to file.
|
||||
FILE* save_file = fopen(decoded_filename_.c_str(), "wb");
|
||||
if (PrintI420VideoFrame(image_buffer, save_file)) {
|
||||
return;
|
||||
}
|
||||
fclose(save_file);
|
||||
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -172,17 +172,6 @@ double I420PSNR(const I420VideoFrame* ref_frame,
|
||||
// Compute SSIM for an I420 frame (all planes).
|
||||
double I420SSIM(const I420VideoFrame* ref_frame,
|
||||
const I420VideoFrame* test_frame);
|
||||
|
||||
// TODO(mikhal): Remove these functions and keep only the above functionality.
|
||||
// Compute PSNR for an I420 buffer (all planes).
|
||||
// Returns the PSNR in decibel, to a maximum of kInfinitePSNR.
|
||||
double I420PSNR(const uint8_t* ref_frame,
|
||||
const uint8_t* test_frame,
|
||||
int width, int height);
|
||||
// Compute SSIM for an I420 buffer (all planes).
|
||||
double I420SSIM(const uint8_t* ref_frame,
|
||||
const uint8_t* test_frame,
|
||||
int width, int height);
|
||||
}
|
||||
|
||||
#endif // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
|
||||
|
@ -211,7 +211,9 @@ TEST_F(TestLibYuv, ConvertTest) {
|
||||
return;
|
||||
}
|
||||
|
||||
psnr = I420PSNR(orig_buffer_.get(), res_i420_buffer.get(), width_, height_);
|
||||
ConvertToI420(kI420, res_i420_buffer.get(), 0, 0,
|
||||
width_, height_, 0, kRotateNone, &res_i420_frame);
|
||||
psnr = I420PSNR(&orig_frame_, &res_i420_frame);
|
||||
EXPECT_EQ(48.0, psnr);
|
||||
j++;
|
||||
|
||||
|
@ -55,7 +55,6 @@ int Scaler::Scale(const I420VideoFrame& src_frame,
|
||||
|
||||
// Making sure that destination frame is of sufficient size.
|
||||
// Aligning stride values based on width.
|
||||
|
||||
dst_frame->CreateEmptyFrame(dst_width_, dst_height_,
|
||||
dst_width_, (dst_width_ + 1) / 2,
|
||||
(dst_width_ + 1) / 2);
|
||||
@ -77,7 +76,6 @@ int Scaler::Scale(const I420VideoFrame& src_frame,
|
||||
libyuv::FilterMode(method_));
|
||||
}
|
||||
|
||||
// TODO(mikhal): Add support for more types.
|
||||
bool Scaler::SupportedVideoType(VideoType src_video_type,
|
||||
VideoType dst_video_type) {
|
||||
if (src_video_type != dst_video_type)
|
||||
|
@ -15,6 +15,7 @@
|
||||
#include "webrtc/common_video/libyuv/include/scaler.h"
|
||||
#include "webrtc/system_wrappers/interface/tick_util.h"
|
||||
#include "webrtc/test/testsupport/fileutils.h"
|
||||
#include "webrtc/test/testsupport/gtest_disable.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
@ -28,9 +29,6 @@ class TestScaler : public ::testing::Test {
|
||||
FILE* source_file, std::string out_name,
|
||||
int src_width, int src_height,
|
||||
int dst_width, int dst_height);
|
||||
|
||||
// TODO(mikhal): add a sequence reader to libyuv.
|
||||
|
||||
// Computes the sequence average PSNR between an input sequence in
|
||||
// |input_file| and an output sequence with filename |out_name|. |width| and
|
||||
// |height| are the frame sizes of both sequences.
|
||||
@ -117,7 +115,7 @@ TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
|
||||
}
|
||||
|
||||
//TODO (mikhal): Converge the test into one function that accepts the method.
|
||||
TEST_F(TestScaler, PointScaleTest) {
|
||||
TEST_F(TestScaler, DISABLED_ON_ANDROID(PointScaleTest)) {
|
||||
double avg_psnr;
|
||||
FILE* source_file2;
|
||||
ScaleMethod method = kScalePoint;
|
||||
@ -162,7 +160,7 @@ TEST_F(TestScaler, PointScaleTest) {
|
||||
source_file_, out_name,
|
||||
width_, height_,
|
||||
400, 300);
|
||||
// Dowsample to odd size frame and scale back up.
|
||||
// Down-sample to odd size frame and scale back up.
|
||||
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_282_231.yuv";
|
||||
ScaleSequence(method,
|
||||
source_file_, out_name,
|
||||
@ -182,7 +180,7 @@ TEST_F(TestScaler, PointScaleTest) {
|
||||
// average PSNR under same conditions.
|
||||
ASSERT_GT(avg_psnr, 25.8);
|
||||
ASSERT_EQ(0, fclose(source_file2));
|
||||
// Upsample to odd size frame and scale back down.
|
||||
// Up-sample to odd size frame and scale back down.
|
||||
out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_699_531.yuv";
|
||||
ScaleSequence(method,
|
||||
source_file_, out_name,
|
||||
@ -204,7 +202,7 @@ TEST_F(TestScaler, PointScaleTest) {
|
||||
ASSERT_EQ(0, fclose(source_file2));
|
||||
}
|
||||
|
||||
TEST_F(TestScaler, BiLinearScaleTest) {
|
||||
TEST_F(TestScaler, DISABLED_ON_ANDROID(BiLinearScaleTest)) {
|
||||
double avg_psnr;
|
||||
FILE* source_file2;
|
||||
ScaleMethod method = kScaleBilinear;
|
||||
@ -214,7 +212,7 @@ TEST_F(TestScaler, BiLinearScaleTest) {
|
||||
source_file_, out_name,
|
||||
width_, height_,
|
||||
width_ / 2, height_ / 2);
|
||||
// Upsample back up and check PSNR.
|
||||
// Up-sample back up and check PSNR.
|
||||
source_file2 = fopen(out_name.c_str(), "rb");
|
||||
out_name = webrtc::test::OutputPath() + "LibYuvTest_BilinearScale_352_288_"
|
||||
"upfrom_176_144.yuv";
|
||||
@ -254,7 +252,7 @@ TEST_F(TestScaler, BiLinearScaleTest) {
|
||||
source_file_, out_name,
|
||||
width_, height_,
|
||||
400, 300);
|
||||
// Downsample to odd size frame and scale back up.
|
||||
// Down-sample to odd size frame and scale back up.
|
||||
out_name = webrtc::test::OutputPath() +
|
||||
"LibYuvTest_BilinearScale_282_231.yuv";
|
||||
ScaleSequence(method,
|
||||
@ -298,7 +296,7 @@ TEST_F(TestScaler, BiLinearScaleTest) {
|
||||
ASSERT_EQ(0, fclose(source_file2));
|
||||
}
|
||||
|
||||
TEST_F(TestScaler, BoxScaleTest) {
|
||||
TEST_F(TestScaler, DISABLED_ON_ANDROID(BoxScaleTest)) {
|
||||
double avg_psnr;
|
||||
FILE* source_file2;
|
||||
ScaleMethod method = kScaleBox;
|
||||
@ -308,7 +306,7 @@ TEST_F(TestScaler, BoxScaleTest) {
|
||||
source_file_, out_name,
|
||||
width_, height_,
|
||||
width_ / 2, height_ / 2);
|
||||
// Upsample back up and check PSNR.
|
||||
// Up-sample back up and check PSNR.
|
||||
source_file2 = fopen(out_name.c_str(), "rb");
|
||||
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_352_288_"
|
||||
"upfrom_176_144.yuv";
|
||||
@ -343,7 +341,7 @@ TEST_F(TestScaler, BoxScaleTest) {
|
||||
source_file_, out_name,
|
||||
width_, height_,
|
||||
400, 300);
|
||||
// Downsample to odd size frame and scale back up.
|
||||
// Down-sample to odd size frame and scale back up.
|
||||
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_282_231.yuv";
|
||||
ScaleSequence(method,
|
||||
source_file_, out_name,
|
||||
@ -363,7 +361,7 @@ TEST_F(TestScaler, BoxScaleTest) {
|
||||
// average PSNR under same conditions.
|
||||
ASSERT_GT(avg_psnr, 29.7);
|
||||
ASSERT_EQ(0, fclose(source_file2));
|
||||
// Upsample to odd size frame and scale back down.
|
||||
// Up-sample to odd size frame and scale back down.
|
||||
out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_699_531.yuv";
|
||||
ScaleSequence(method,
|
||||
source_file_, out_name,
|
||||
@ -400,6 +398,7 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
|
||||
|
||||
int frame_count = 0;
|
||||
double avg_psnr = 0;
|
||||
I420VideoFrame in_frame, out_frame;
|
||||
while (feof(input_file) == 0) {
|
||||
if ((size_t)required_size !=
|
||||
fread(input_buffer, 1, required_size, input_file)) {
|
||||
@ -410,7 +409,9 @@ double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
|
||||
break;
|
||||
}
|
||||
frame_count++;
|
||||
double psnr = I420PSNR(input_buffer, output_buffer, width, height);
|
||||
ConvertFromI420(in_frame, kI420, 0, input_buffer);
|
||||
ConvertFromI420(out_frame, kI420, 0, output_buffer);
|
||||
double psnr = I420PSNR(&in_frame, &out_frame);
|
||||
avg_psnr += psnr;
|
||||
}
|
||||
avg_psnr = avg_psnr / frame_count;
|
||||
|
@ -383,60 +383,4 @@ double I420SSIM(const I420VideoFrame* ref_frame,
|
||||
test_frame->stride(kVPlane),
|
||||
test_frame->width(), test_frame->height());
|
||||
}
|
||||
|
||||
// Compute PSNR for an I420 frame (all planes)
|
||||
double I420PSNR(const uint8_t* ref_frame,
|
||||
const uint8_t* test_frame,
|
||||
int width, int height) {
|
||||
if (!ref_frame || !test_frame)
|
||||
return -1;
|
||||
else if (height < 0 || width < 0)
|
||||
return -1;
|
||||
int half_width = (width + 1) >> 1;
|
||||
int half_height = (height + 1) >> 1;
|
||||
const uint8_t* src_y_a = ref_frame;
|
||||
const uint8_t* src_u_a = src_y_a + width * height;
|
||||
const uint8_t* src_v_a = src_u_a + half_width * half_height;
|
||||
const uint8_t* src_y_b = test_frame;
|
||||
const uint8_t* src_u_b = src_y_b + width * height;
|
||||
const uint8_t* src_v_b = src_u_b + half_width * half_height;
|
||||
// In the following: stride is determined by width.
|
||||
double psnr = libyuv::I420Psnr(src_y_a, width,
|
||||
src_u_a, half_width,
|
||||
src_v_a, half_width,
|
||||
src_y_b, width,
|
||||
src_u_b, half_width,
|
||||
src_v_b, half_width,
|
||||
width, height);
|
||||
// LibYuv sets the max psnr value to 128, we restrict it here.
|
||||
// In case of 0 mse in one frame, 128 can skew the results significantly.
|
||||
return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr;
|
||||
}
|
||||
// Compute SSIM for an I420 frame (all planes)
|
||||
double I420SSIM(const uint8_t* ref_frame,
|
||||
const uint8_t* test_frame,
|
||||
int width, int height) {
|
||||
if (!ref_frame || !test_frame)
|
||||
return -1;
|
||||
else if (height < 0 || width < 0)
|
||||
return -1;
|
||||
int half_width = (width + 1) >> 1;
|
||||
int half_height = (height + 1) >> 1;
|
||||
const uint8_t* src_y_a = ref_frame;
|
||||
const uint8_t* src_u_a = src_y_a + width * height;
|
||||
const uint8_t* src_v_a = src_u_a + half_width * half_height;
|
||||
const uint8_t* src_y_b = test_frame;
|
||||
const uint8_t* src_u_b = src_y_b + width * height;
|
||||
const uint8_t* src_v_b = src_u_b + half_width * half_height;
|
||||
int stride_y = width;
|
||||
int stride_uv = half_width;
|
||||
return libyuv::I420Ssim(src_y_a, stride_y,
|
||||
src_u_a, stride_uv,
|
||||
src_v_a, stride_uv,
|
||||
src_y_b, stride_y,
|
||||
src_u_b, stride_uv,
|
||||
src_v_b, stride_uv,
|
||||
width, height);
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
||||
|
@ -10,8 +10,9 @@
|
||||
|
||||
#include "webrtc/common_video/plane.h"
|
||||
|
||||
#include <string.h> // memcpy
|
||||
|
||||
#include <algorithm> // swap
|
||||
#include <cstring> // memcpy
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
|
@ -0,0 +1,80 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#include "webrtc/common_video/test/frame_generator.h"
|
||||
|
||||
#include <stdio.h>
|
||||
|
||||
#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
namespace {
|
||||
|
||||
class YuvFileGenerator : public FrameGenerator {
|
||||
public:
|
||||
YuvFileGenerator(FILE* file, size_t width, size_t height)
|
||||
: file_(file), width_(width), height_(height) {
|
||||
assert(file);
|
||||
assert(width > 0);
|
||||
assert(height > 0);
|
||||
frame_size_ = CalcBufferSize(
|
||||
kI420, static_cast<int>(width_), static_cast<int>(height_));
|
||||
frame_buffer_ = new uint8_t[frame_size_];
|
||||
frame_.CreateEmptyFrame(static_cast<int>(width),
|
||||
static_cast<int>(height),
|
||||
static_cast<int>(width),
|
||||
static_cast<int>((width + 1) / 2),
|
||||
static_cast<int>((width + 1) / 2));
|
||||
}
|
||||
|
||||
virtual ~YuvFileGenerator() {
|
||||
fclose(file_);
|
||||
delete[] frame_buffer_;
|
||||
}
|
||||
|
||||
virtual I420VideoFrame& NextFrame() OVERRIDE {
|
||||
size_t count = fread(frame_buffer_, 1, frame_size_, file_);
|
||||
if (count < frame_size_) {
|
||||
rewind(file_);
|
||||
return NextFrame();
|
||||
}
|
||||
|
||||
ConvertToI420(kI420,
|
||||
frame_buffer_,
|
||||
0,
|
||||
0,
|
||||
static_cast<int>(width_),
|
||||
static_cast<int>(height_),
|
||||
0,
|
||||
kRotateNone,
|
||||
&frame_);
|
||||
return frame_;
|
||||
}
|
||||
|
||||
private:
|
||||
FILE* file_;
|
||||
size_t width_;
|
||||
size_t height_;
|
||||
size_t frame_size_;
|
||||
uint8_t* frame_buffer_;
|
||||
I420VideoFrame frame_;
|
||||
};
|
||||
} // namespace
|
||||
|
||||
FrameGenerator* FrameGenerator::CreateFromYuvFile(const char* file,
|
||||
size_t width,
|
||||
size_t height) {
|
||||
FILE* file_handle = fopen(file, "r");
|
||||
assert(file_handle);
|
||||
return new YuvFileGenerator(file_handle, width, height);
|
||||
}
|
||||
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
@ -0,0 +1,34 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
#ifndef WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
|
||||
#define WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
|
||||
|
||||
#include "webrtc/common_video/interface/i420_video_frame.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
namespace webrtc {
|
||||
namespace test {
|
||||
|
||||
class FrameGenerator {
|
||||
public:
|
||||
FrameGenerator() {}
|
||||
virtual ~FrameGenerator() {}
|
||||
|
||||
// Returns video frame that remains valid until next call.
|
||||
virtual I420VideoFrame& NextFrame() = 0;
|
||||
|
||||
static FrameGenerator* CreateFromYuvFile(const char* file,
|
||||
size_t width,
|
||||
size_t height);
|
||||
};
|
||||
} // namespace test
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_COMMON_VIDEO_TEST_FRAME_GENERATOR_H_
|
108
media/webrtc/trunk/webrtc/common_video/texture_video_frame.cc
Normal file
108
media/webrtc/trunk/webrtc/common_video/texture_video_frame.cc
Normal file
@ -0,0 +1,108 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/common_video/interface/texture_video_frame.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
#define NOTREACHED() \
|
||||
do { \
|
||||
WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1, "Not reached"); \
|
||||
assert(false); \
|
||||
} while (0)
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
TextureVideoFrame::TextureVideoFrame(NativeHandle* handle,
|
||||
int width,
|
||||
int height,
|
||||
uint32_t timestamp,
|
||||
int64_t render_time_ms)
|
||||
: handle_(handle) {
|
||||
set_width(width);
|
||||
set_height(height);
|
||||
set_timestamp(timestamp);
|
||||
set_render_time_ms(render_time_ms);
|
||||
}
|
||||
|
||||
TextureVideoFrame::~TextureVideoFrame() {}
|
||||
|
||||
int TextureVideoFrame::CreateEmptyFrame(int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) {
|
||||
NOTREACHED();
|
||||
return -1;
|
||||
}
|
||||
|
||||
int TextureVideoFrame::CreateFrame(int size_y,
|
||||
const uint8_t* buffer_y,
|
||||
int size_u,
|
||||
const uint8_t* buffer_u,
|
||||
int size_v,
|
||||
const uint8_t* buffer_v,
|
||||
int width,
|
||||
int height,
|
||||
int stride_y,
|
||||
int stride_u,
|
||||
int stride_v) {
|
||||
NOTREACHED();
|
||||
return -1;
|
||||
}
|
||||
|
||||
int TextureVideoFrame::CopyFrame(const I420VideoFrame& videoFrame) {
|
||||
NOTREACHED();
|
||||
return -1;
|
||||
}
|
||||
|
||||
void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
|
||||
NOTREACHED();
|
||||
}
|
||||
|
||||
uint8_t* TextureVideoFrame::buffer(PlaneType type) {
|
||||
NOTREACHED();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const uint8_t* TextureVideoFrame::buffer(PlaneType type) const {
|
||||
NOTREACHED();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int TextureVideoFrame::allocated_size(PlaneType type) const {
|
||||
NOTREACHED();
|
||||
return -1;
|
||||
}
|
||||
|
||||
int TextureVideoFrame::stride(PlaneType type) const {
|
||||
NOTREACHED();
|
||||
return -1;
|
||||
}
|
||||
|
||||
bool TextureVideoFrame::IsZeroSize() const {
|
||||
NOTREACHED();
|
||||
return true;
|
||||
}
|
||||
|
||||
void TextureVideoFrame::ResetSize() {
|
||||
NOTREACHED();
|
||||
}
|
||||
|
||||
void* TextureVideoFrame::native_handle() const { return handle_.get(); }
|
||||
|
||||
int TextureVideoFrame::CheckDimensions(
|
||||
int width, int height, int stride_y, int stride_u, int stride_v) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,58 @@
|
||||
/*
|
||||
* Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "testing/gtest/include/gtest/gtest.h"
|
||||
#include "webrtc/common_video/interface/native_handle.h"
|
||||
#include "webrtc/common_video/interface/texture_video_frame.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class NativeHandleImpl : public NativeHandle {
|
||||
public:
|
||||
NativeHandleImpl() : ref_count_(0) {}
|
||||
virtual ~NativeHandleImpl() {}
|
||||
virtual int32_t AddRef() { return ++ref_count_; }
|
||||
virtual int32_t Release() { return --ref_count_; }
|
||||
virtual void* GetHandle() { return NULL; }
|
||||
|
||||
int32_t ref_count() { return ref_count_; }
|
||||
private:
|
||||
int32_t ref_count_;
|
||||
};
|
||||
|
||||
TEST(TestTextureVideoFrame, InitialValues) {
|
||||
NativeHandleImpl handle;
|
||||
TextureVideoFrame frame(&handle, 640, 480, 100, 10);
|
||||
EXPECT_EQ(640, frame.width());
|
||||
EXPECT_EQ(480, frame.height());
|
||||
EXPECT_EQ(100u, frame.timestamp());
|
||||
EXPECT_EQ(10, frame.render_time_ms());
|
||||
EXPECT_EQ(&handle, frame.native_handle());
|
||||
|
||||
EXPECT_EQ(0, frame.set_width(320));
|
||||
EXPECT_EQ(320, frame.width());
|
||||
EXPECT_EQ(0, frame.set_height(240));
|
||||
EXPECT_EQ(240, frame.height());
|
||||
frame.set_timestamp(200);
|
||||
EXPECT_EQ(200u, frame.timestamp());
|
||||
frame.set_render_time_ms(20);
|
||||
EXPECT_EQ(20, frame.render_time_ms());
|
||||
}
|
||||
|
||||
TEST(TestTextureVideoFrame, RefCount) {
|
||||
NativeHandleImpl handle;
|
||||
EXPECT_EQ(0, handle.ref_count());
|
||||
TextureVideoFrame *frame = new TextureVideoFrame(&handle, 640, 480, 100, 200);
|
||||
EXPECT_EQ(1, handle.ref_count());
|
||||
delete frame;
|
||||
EXPECT_EQ(0, handle.ref_count());
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -11,6 +11,8 @@
|
||||
#ifndef WEBRTC_ENGINE_CONFIGURATIONS_H_
|
||||
#define WEBRTC_ENGINE_CONFIGURATIONS_H_
|
||||
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
// ============================================================================
|
||||
// Voice and Video
|
||||
// ============================================================================
|
||||
@ -32,12 +34,14 @@
|
||||
// (which are mandatory and don't have any defines).
|
||||
#define WEBRTC_CODEC_AVT
|
||||
|
||||
// iLBC, G.722, PCM16B and Redundancy coding are excluded from Chromium and
|
||||
// Mozilla builds.
|
||||
// PCM16 is useful for testing and incurs only a small binary size cost.
|
||||
#define WEBRTC_CODEC_PCM16
|
||||
|
||||
// iLBC, G.722, and Redundancy coding are excluded from Chromium and Mozilla
|
||||
// builds to reduce binary size.
|
||||
#if !defined(WEBRTC_CHROMIUM_BUILD) && !defined(WEBRTC_MOZILLA_BUILD)
|
||||
#define WEBRTC_CODEC_ILBC
|
||||
#define WEBRTC_CODEC_G722
|
||||
#define WEBRTC_CODEC_PCM16
|
||||
#define WEBRTC_CODEC_RED
|
||||
#endif // !WEBRTC_CHROMIUM_BUILD && !WEBRTC_MOZILLA_BUILD
|
||||
|
||||
@ -61,7 +65,7 @@
|
||||
#define WEBRTC_VOICE_ENGINE_NR // Near-end NS
|
||||
#define WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
|
||||
|
||||
#ifndef WEBRTC_CHROMIUM_BUILD
|
||||
#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_IOS)
|
||||
#define WEBRTC_VOICE_ENGINE_TYPING_DETECTION // Typing detection
|
||||
#endif
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
#include <string>
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
#include "testsupport/fileutils.h"
|
||||
#include "webrtc/test/testsupport/fileutils.h"
|
||||
#include "webrtc_cng.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
@ -72,7 +72,7 @@
|
||||
'WEBRTC_LINUX',
|
||||
],
|
||||
}],
|
||||
['target_arch=="arm" and armv7==1', {
|
||||
['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', {
|
||||
'dependencies': [ 'isac_neon', ],
|
||||
'sources': [
|
||||
'lattice_armv7.S',
|
||||
@ -87,7 +87,7 @@
|
||||
},
|
||||
],
|
||||
'conditions': [
|
||||
['target_arch=="arm" and armv7==1', {
|
||||
['(target_arch=="arm" and armv7==1) or target_arch=="armv7"', {
|
||||
'targets': [
|
||||
{
|
||||
'target_name': 'isac_neon',
|
||||
|
@ -1,37 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_TRANSFORM_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_TRANSFORM_H_
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include "webrtc/modules/audio_coding/codecs/isac/fix/source/settings.h"
|
||||
#include "webrtc/typedefs.h"
|
||||
|
||||
/* Cosine table 1 in Q14 */
|
||||
extern const int16_t kCosTab1[FRAMESAMPLES/2];
|
||||
|
||||
/* Sine table 1 in Q14 */
|
||||
extern const int16_t kSinTab1[FRAMESAMPLES/2];
|
||||
|
||||
/* Cosine table 2 in Q14 */
|
||||
extern const int16_t kCosTab2[FRAMESAMPLES/4];
|
||||
|
||||
/* Sine table 2 in Q14 */
|
||||
extern const int16_t kSinTab2[FRAMESAMPLES/4];
|
||||
|
||||
#ifdef __cplusplus
|
||||
} /* extern "C" */
|
||||
#endif
|
||||
|
||||
#endif /* WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_TRANSFORM_H_ */
|
@ -1,173 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <ctype.h>
|
||||
|
||||
//#include "isac_codec.h"
|
||||
//#include "isac_structs.h"
|
||||
#include "isacfix.h"
|
||||
|
||||
|
||||
#define NUM_CODECS 1
|
||||
|
||||
int main(int argc, char* argv[])
|
||||
{
|
||||
FILE *inFileList;
|
||||
FILE *audioFile;
|
||||
FILE *outFile;
|
||||
char audioFileName[501];
|
||||
short audioBuff[960];
|
||||
short encoded[600];
|
||||
short startAudio;
|
||||
short encodedLen;
|
||||
ISACFIX_MainStruct *isac_struct;
|
||||
unsigned long int hist[601];
|
||||
|
||||
// reset the histogram
|
||||
for(short n=0; n < 601; n++)
|
||||
{
|
||||
hist[n] = 0;
|
||||
}
|
||||
|
||||
|
||||
inFileList = fopen(argv[1], "r");
|
||||
if(inFileList == NULL)
|
||||
{
|
||||
printf("Could not open the input file.\n");
|
||||
getchar();
|
||||
exit(-1);
|
||||
}
|
||||
outFile = fopen(argv[2], "w");
|
||||
if(outFile == NULL)
|
||||
{
|
||||
printf("Could not open the histogram file.\n");
|
||||
getchar();
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
short frameSizeMsec = 30;
|
||||
if(argc > 3)
|
||||
{
|
||||
frameSizeMsec = atoi(argv[3]);
|
||||
}
|
||||
|
||||
short audioOffset = 0;
|
||||
if(argc > 4)
|
||||
{
|
||||
audioOffset = atoi(argv[4]);
|
||||
}
|
||||
int ok;
|
||||
ok = WebRtcIsacfix_Create(&isac_struct);
|
||||
// instantaneous mode
|
||||
ok |= WebRtcIsacfix_EncoderInit(isac_struct, 1);
|
||||
// is not used but initialize
|
||||
ok |= WebRtcIsacfix_DecoderInit(isac_struct);
|
||||
ok |= WebRtcIsacfix_Control(isac_struct, 32000, frameSizeMsec);
|
||||
|
||||
if(ok != 0)
|
||||
{
|
||||
printf("\nProblem in seting up iSAC\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
while( fgets(audioFileName, 500, inFileList) != NULL )
|
||||
{
|
||||
// remove trailing white-spaces and any Cntrl character
|
||||
if(strlen(audioFileName) == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
short n = strlen(audioFileName) - 1;
|
||||
while(isspace(audioFileName[n]) || iscntrl(audioFileName[n]))
|
||||
{
|
||||
audioFileName[n] = '\0';
|
||||
n--;
|
||||
if(n < 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// remove leading spaces
|
||||
if(strlen(audioFileName) == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
n = 0;
|
||||
while((isspace(audioFileName[n]) || iscntrl(audioFileName[n])) &&
|
||||
(audioFileName[n] != '\0'))
|
||||
{
|
||||
n++;
|
||||
}
|
||||
memmove(audioFileName, &audioFileName[n], 500 - n);
|
||||
if(strlen(audioFileName) == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
audioFile = fopen(audioFileName, "rb");
|
||||
if(audioFile == NULL)
|
||||
{
|
||||
printf("\nCannot open %s!!!!!\n", audioFileName);
|
||||
exit(0);
|
||||
}
|
||||
|
||||
if(audioOffset > 0)
|
||||
{
|
||||
fseek(audioFile, (audioOffset<<1), SEEK_SET);
|
||||
}
|
||||
|
||||
while(fread(audioBuff, sizeof(short), (480*frameSizeMsec/30), audioFile) >= (480*frameSizeMsec/30))
|
||||
{
|
||||
startAudio = 0;
|
||||
do
|
||||
{
|
||||
encodedLen = WebRtcIsacfix_Encode(isac_struct,
|
||||
&audioBuff[startAudio], encoded);
|
||||
startAudio += 160;
|
||||
} while(encodedLen == 0);
|
||||
|
||||
if(encodedLen < 0)
|
||||
{
|
||||
printf("\nEncoding Error!!!\n");
|
||||
exit(0);
|
||||
}
|
||||
hist[encodedLen]++;
|
||||
}
|
||||
fclose(audioFile);
|
||||
}
|
||||
fclose(inFileList);
|
||||
unsigned long totalFrames = 0;
|
||||
for(short n=0; n < 601; n++)
|
||||
{
|
||||
totalFrames += hist[n];
|
||||
fprintf(outFile, "%10lu\n", hist[n]);
|
||||
}
|
||||
fclose(outFile);
|
||||
|
||||
short topTenCntr = 0;
|
||||
printf("\nTotal number of Frames %lu\n\n", totalFrames);
|
||||
printf("Payload Len # occurences\n");
|
||||
printf("=========== ============\n");
|
||||
|
||||
for(short n = 600; (n >= 0) && (topTenCntr < 10); n--)
|
||||
{
|
||||
if(hist[n] > 0)
|
||||
{
|
||||
topTenCntr++;
|
||||
printf(" %3d %3d\n", n, hist[n]);
|
||||
}
|
||||
}
|
||||
WebRtcIsacfix_Free(isac_struct);
|
||||
return 0;
|
||||
}
|
||||
|
@ -1,260 +0,0 @@
|
||||
/*
|
||||
* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
/******************************************************************
|
||||
Stand Alone test application for ISACFIX and ISAC LC
|
||||
|
||||
******************************************************************/
|
||||
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include "typedefs.h"
|
||||
|
||||
#include "isacfix.h"
|
||||
ISACFIX_MainStruct *ISACfix_inst;
|
||||
|
||||
#define FS 16000
|
||||
|
||||
|
||||
typedef struct {
|
||||
uint32_t arrival_time; /* samples */
|
||||
uint32_t sample_count; /* samples */
|
||||
uint16_t rtp_number;
|
||||
} BottleNeckModel;
|
||||
|
||||
void get_arrival_time(int current_framesamples, /* samples */
|
||||
int packet_size, /* bytes */
|
||||
int bottleneck, /* excluding headers; bits/s */
|
||||
BottleNeckModel *BN_data)
|
||||
{
|
||||
const int HeaderSize = 35;
|
||||
int HeaderRate;
|
||||
|
||||
HeaderRate = HeaderSize * 8 * FS / current_framesamples; /* bits/s */
|
||||
|
||||
/* everything in samples */
|
||||
BN_data->sample_count = BN_data->sample_count + current_framesamples;
|
||||
|
||||
BN_data->arrival_time += ((packet_size + HeaderSize) * 8 * FS) / (bottleneck + HeaderRate);
|
||||
|
||||
if (BN_data->arrival_time < BN_data->sample_count)
|
||||
BN_data->arrival_time = BN_data->sample_count;
|
||||
|
||||
BN_data->rtp_number++;
|
||||
}
|
||||
|
||||
/*
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
*/
|
||||
int main(int argc, char* argv[]){
|
||||
|
||||
/* Parameters */
|
||||
FILE *pInFile, *pOutFile, *pChcFile;
|
||||
int8_t inFile[40];
|
||||
int8_t outFile[40];
|
||||
int8_t chcFile[40];
|
||||
int8_t codec[10];
|
||||
int16_t bitrt, spType, size;
|
||||
uint16_t frameLen;
|
||||
int16_t sigOut[1000], sigIn[1000];
|
||||
uint16_t bitStream[500]; /* double to 32 kbps for 60 ms */
|
||||
|
||||
int16_t chc, ok;
|
||||
int noOfCalls, cdlen;
|
||||
int16_t noOfLostFrames;
|
||||
int err, errtype;
|
||||
|
||||
BottleNeckModel BN_data;
|
||||
|
||||
int totalbits =0;
|
||||
int totalsmpls =0;
|
||||
|
||||
/*End Parameters*/
|
||||
|
||||
|
||||
if ((argc==6)||(argc==7) ){
|
||||
|
||||
strcpy(codec,argv[5]);
|
||||
|
||||
if(argc==7){
|
||||
if (!_stricmp("isac",codec)){
|
||||
bitrt = atoi(argv[6]);
|
||||
if ( (bitrt<10000)&&(bitrt>32000)){
|
||||
printf("Error: Supported bit rate in the range 10000-32000 bps!\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
}else{
|
||||
printf("Error: Codec not recognized. Check spelling!\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
} else {
|
||||
printf("Error: Codec not recognized. Check spelling!\n");
|
||||
exit(-1);
|
||||
}
|
||||
} else {
|
||||
printf("Error: Wrong number of input parameter!\n\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
frameLen = atoi(argv[4]);
|
||||
strcpy(chcFile,argv[3]);
|
||||
strcpy(outFile,argv[2]);
|
||||
strcpy(inFile,argv[1]);
|
||||
|
||||
/* Open file streams */
|
||||
if( (pInFile = fopen(inFile,"rb")) == NULL ) {
|
||||
printf( "Error: Did not find input file!\n" );
|
||||
exit(-1);
|
||||
}
|
||||
strcat(outFile,"_");
|
||||
strcat(outFile, argv[4]);
|
||||
strcat(outFile,"_");
|
||||
strcat(outFile, codec);
|
||||
|
||||
if (argc==7){
|
||||
strcat(outFile,"_");
|
||||
strcat(outFile, argv[6]);
|
||||
}
|
||||
if (_stricmp("none", chcFile)){
|
||||
strcat(outFile,"_");
|
||||
strcat(outFile, "plc");
|
||||
}
|
||||
|
||||
strcat(outFile, ".otp");
|
||||
|
||||
if (_stricmp("none", chcFile)){
|
||||
if( (pChcFile = fopen(chcFile,"rb")) == NULL ) {
|
||||
printf( "Error: Did not find channel file!\n" );
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
/******************************************************************/
|
||||
if (!_stricmp("isac", codec)){ /* ISAC */
|
||||
if ((frameLen!=480)&&(frameLen!=960)) {
|
||||
printf("Error: ISAC only supports 480 and 960 samples per frame (not %d)\n", frameLen);
|
||||
exit(-1);
|
||||
}
|
||||
if( (pOutFile = fopen(outFile,"wb")) == NULL ) {
|
||||
printf( "Could not open output file!\n" );
|
||||
exit(-1);
|
||||
}
|
||||
ok=WebRtcIsacfix_Create(&ISACfix_inst);
|
||||
if (ok!=0) {
|
||||
printf("Couldn't allocate memory for iSAC fix instance\n");
|
||||
exit(-1);
|
||||
}
|
||||
|
||||
BN_data.arrival_time = 0;
|
||||
BN_data.sample_count = 0;
|
||||
BN_data.rtp_number = 0;
|
||||
|
||||
WebRtcIsacfix_EncoderInit(ISACfix_inst,1);
|
||||
WebRtcIsacfix_DecoderInit(ISACfix_inst);
|
||||
err = WebRtcIsacfix_Control(ISACfix_inst, bitrt, (frameLen>>4));
|
||||
if (err < 0) {
|
||||
/* exit if returned with error */
|
||||
errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
|
||||
printf("\n\n Error in initialization: %d.\n\n", errtype);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
/* loop over frame */
|
||||
while (fread(sigIn,sizeof(int16_t),frameLen,pInFile) == frameLen) {
|
||||
|
||||
noOfCalls=0;
|
||||
cdlen=0;
|
||||
while (cdlen<=0) {
|
||||
cdlen=WebRtcIsacfix_Encode(ISACfix_inst,&sigIn[noOfCalls*160],(int16_t*)bitStream);
|
||||
if(cdlen==-1){
|
||||
errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
|
||||
printf("\n\nError in encoder: %d.\n\n", errtype);
|
||||
exit(-1);
|
||||
}
|
||||
noOfCalls++;
|
||||
}
|
||||
|
||||
|
||||
if(_stricmp("none", chcFile)){
|
||||
if (fread(&chc,sizeof(int16_t),1,pChcFile)!=1) /* packet may be lost */
|
||||
break;
|
||||
} else {
|
||||
chc = 1; /* packets never lost */
|
||||
}
|
||||
|
||||
/* simulate packet handling through NetEq and the modem */
|
||||
get_arrival_time(frameLen, cdlen, bitrt, &BN_data);
|
||||
|
||||
if (chc){ /* decode */
|
||||
|
||||
err = WebRtcIsacfix_UpdateBwEstimate1(ISACfix_inst,
|
||||
bitStream,
|
||||
cdlen,
|
||||
BN_data.rtp_number,
|
||||
BN_data.arrival_time);
|
||||
|
||||
if (err < 0) {
|
||||
/* exit if returned with error */
|
||||
errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
|
||||
printf("\n\nError in decoder: %d.\n\n", errtype);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
size = WebRtcIsacfix_Decode(ISACfix_inst, bitStream, cdlen, sigOut, &spType);
|
||||
if(size<=0){
|
||||
/* exit if returned with error */
|
||||
errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
|
||||
printf("\n\nError in decoder: %d.\n\n", errtype);
|
||||
exit(-1);
|
||||
}
|
||||
} else { /* PLC */
|
||||
if (frameLen == 480){
|
||||
noOfLostFrames = 1;
|
||||
} else{
|
||||
noOfLostFrames = 2;
|
||||
}
|
||||
size = WebRtcIsacfix_DecodePlc(ISACfix_inst, sigOut, noOfLostFrames );
|
||||
if(size<=0){
|
||||
errtype=WebRtcIsacfix_GetErrorCode(ISACfix_inst);
|
||||
printf("\n\nError in decoder: %d.\n\n", errtype);
|
||||
exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
/* Write decoded speech to file */
|
||||
fwrite(sigOut,sizeof(short),size,pOutFile);
|
||||
|
||||
totalbits += 8 * cdlen;
|
||||
totalsmpls += size;
|
||||
|
||||
}
|
||||
/******************************************************************/
|
||||
}
|
||||
|
||||
// printf("\n\ntotal bits = %d bits", totalbits);
|
||||
printf("\nmeasured average bitrate = %0.3f kbits/s", (double)totalbits * 16 / totalsmpls);
|
||||
printf("\n");
|
||||
|
||||
|
||||
fclose(pInFile);
|
||||
fclose(pOutFile);
|
||||
if (_stricmp("none", chcFile)){
|
||||
fclose(pChcFile);
|
||||
}
|
||||
|
||||
if (!_stricmp("isac", codec)){
|
||||
WebRtcIsacfix_Free(ISACfix_inst);
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
}
|
@ -78,6 +78,11 @@
|
||||
'defines': [
|
||||
'WEBRTC_LINUX',
|
||||
],
|
||||
'link_settings': {
|
||||
'libraries': [
|
||||
'-lm',
|
||||
],
|
||||
},
|
||||
}],
|
||||
],
|
||||
},
|
||||
|
@ -33,7 +33,7 @@ int16_t WebRtcOpus_EncoderFree(OpusEncInst* inst);
|
||||
* Input:
|
||||
* - inst : Encoder context
|
||||
* - audio_in : Input speech data buffer
|
||||
* - samples : Samples in audio_in
|
||||
* - samples : Samples per channel in audio_in
|
||||
* - length_encoded_buffer : Output buffer size
|
||||
*
|
||||
* Output:
|
||||
@ -101,7 +101,7 @@ int16_t WebRtcOpus_DecoderInitSlave(OpusDecInst* inst);
|
||||
* always return 1 since we're not using Opus's
|
||||
* built-in DTX/CNG scheme)
|
||||
*
|
||||
* Return value : >0 - Samples in decoded vector
|
||||
* Return value : >0 - Samples per channel in decoded vector
|
||||
* -1 - Error
|
||||
*/
|
||||
int16_t WebRtcOpus_DecodeNew(OpusDecInst* inst, const uint8_t* encoded,
|
||||
@ -115,8 +115,11 @@ int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
|
||||
int16_t* audio_type);
|
||||
/****************************************************************************
|
||||
* WebRtcOpus_DecodePlc(...)
|
||||
* TODO(tlegrand): Remove master and slave functions when NetEq4 is in place.
|
||||
* WebRtcOpus_DecodePlcMaster(...)
|
||||
* WebRtcOpus_DecodePlcSlave(...)
|
||||
*
|
||||
* This function precesses PLC for opus frame(s).
|
||||
* This function processes PLC for opus frame(s).
|
||||
* Input:
|
||||
* - inst : Decoder context
|
||||
* - number_of_lost_frames : Number of PLC frames to produce
|
||||
@ -129,6 +132,10 @@ int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
|
||||
*/
|
||||
int16_t WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
||||
int16_t number_of_lost_frames);
|
||||
int16_t WebRtcOpus_DecodePlcMaster(OpusDecInst* inst, int16_t* decoded,
|
||||
int16_t number_of_lost_frames);
|
||||
int16_t WebRtcOpus_DecodePlcSlave(OpusDecInst* inst, int16_t* decoded,
|
||||
int16_t number_of_lost_frames);
|
||||
|
||||
/****************************************************************************
|
||||
* WebRtcOpus_DurationEst(...)
|
||||
|
@ -22,18 +22,28 @@ enum {
|
||||
/* Maximum supported frame size in WebRTC is 60 ms. */
|
||||
kWebRtcOpusMaxEncodeFrameSizeMs = 60,
|
||||
|
||||
/* The format allows up to 120ms frames. Since we
|
||||
* don't control the other side, we must allow
|
||||
* for packets that large. NetEq is currently
|
||||
* limited to 60 ms on the receive side.
|
||||
*/
|
||||
/* The format allows up to 120 ms frames. Since we don't control the other
|
||||
* side, we must allow for packets of that size. NetEq is currently limited
|
||||
* to 60 ms on the receive side. */
|
||||
kWebRtcOpusMaxDecodeFrameSizeMs = 120,
|
||||
|
||||
/* Sample count is 48 kHz * samples per frame * stereo. */
|
||||
kWebRtcOpusMaxFrameSize = 48 * kWebRtcOpusMaxDecodeFrameSizeMs * 2,
|
||||
/* Maximum sample count per channel is 48 kHz * maximum frame size in
|
||||
* milliseconds. */
|
||||
kWebRtcOpusMaxFrameSizePerChannel = 48 * kWebRtcOpusMaxDecodeFrameSizeMs,
|
||||
|
||||
/* Maximum sample count per frame is 48 kHz * maximum frame size in
|
||||
* milliseconds * maximum number of channels. */
|
||||
kWebRtcOpusMaxFrameSize = kWebRtcOpusMaxFrameSizePerChannel * 2,
|
||||
|
||||
/* Maximum sample count per channel for output resampled to 32 kHz,
|
||||
* 32 kHz * maximum frame size in milliseconds. */
|
||||
kWebRtcOpusMaxFrameSizePerChannel32kHz = 32 * kWebRtcOpusMaxDecodeFrameSizeMs,
|
||||
|
||||
/* Number of samples in resampler state. */
|
||||
kWebRtcOpusStateSize = 7,
|
||||
|
||||
/* Default frame size, 20 ms @ 48 kHz, in samples (for one channel). */
|
||||
kWebRtcOpusDefaultFrameSize = 960,
|
||||
};
|
||||
|
||||
struct WebRtcOpusEncInst {
|
||||
@ -47,8 +57,8 @@ int16_t WebRtcOpus_EncoderCreate(OpusEncInst** inst, int32_t channels) {
|
||||
if (state) {
|
||||
int error;
|
||||
/* Default to VoIP application for mono, and AUDIO for stereo. */
|
||||
int application =
|
||||
(channels == 1) ? OPUS_APPLICATION_VOIP : OPUS_APPLICATION_AUDIO;
|
||||
int application = (channels == 1) ? OPUS_APPLICATION_VOIP :
|
||||
OPUS_APPLICATION_AUDIO;
|
||||
|
||||
state->encoder = opus_encoder_create(48000, channels, application,
|
||||
&error);
|
||||
@ -104,6 +114,7 @@ struct WebRtcOpusDecInst {
|
||||
int16_t state_48_32_right[8];
|
||||
OpusDecoder* decoder_left;
|
||||
OpusDecoder* decoder_right;
|
||||
int prev_decoded_samples;
|
||||
int channels;
|
||||
};
|
||||
|
||||
@ -113,7 +124,7 @@ int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels) {
|
||||
OpusDecInst* state;
|
||||
|
||||
if (inst != NULL) {
|
||||
/* Create Opus decoder memory. */
|
||||
/* Create Opus decoder state. */
|
||||
state = (OpusDecInst*) calloc(1, sizeof(OpusDecInst));
|
||||
if (state == NULL) {
|
||||
return -1;
|
||||
@ -126,6 +137,7 @@ int16_t WebRtcOpus_DecoderCreate(OpusDecInst** inst, int channels) {
|
||||
&& state->decoder_right != NULL) {
|
||||
/* Creation of memory all ok. */
|
||||
state->channels = channels;
|
||||
state->prev_decoded_samples = kWebRtcOpusDefaultFrameSize;
|
||||
*inst = state;
|
||||
return 0;
|
||||
}
|
||||
@ -185,14 +197,17 @@ int16_t WebRtcOpus_DecoderInitSlave(OpusDecInst* inst) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* |frame_size| is set to maximum Opus frame size in the normal case, and
|
||||
* is set to the number of samples needed for PLC in case of losses.
|
||||
* It is up to the caller to make sure the value is correct. */
|
||||
static int DecodeNative(OpusDecoder* inst, const int16_t* encoded,
|
||||
int16_t encoded_bytes, int16_t* decoded,
|
||||
int16_t* audio_type) {
|
||||
int16_t encoded_bytes, int frame_size,
|
||||
int16_t* decoded, int16_t* audio_type) {
|
||||
unsigned char* coded = (unsigned char*) encoded;
|
||||
opus_int16* audio = (opus_int16*) decoded;
|
||||
|
||||
int res = opus_decode(inst, coded, encoded_bytes, audio,
|
||||
kWebRtcOpusMaxFrameSize, 0);
|
||||
int res = opus_decode(inst, coded, encoded_bytes, audio, frame_size, 0);
|
||||
|
||||
/* TODO(tlegrand): set to DTX for zero-length packets? */
|
||||
*audio_type = 0;
|
||||
|
||||
@ -210,7 +225,7 @@ static int WebRtcOpus_Resample48to32(const int16_t* samples_in, int length,
|
||||
int i;
|
||||
int blocks;
|
||||
int16_t output_samples;
|
||||
int32_t buffer32[kWebRtcOpusMaxFrameSize + kWebRtcOpusStateSize];
|
||||
int32_t buffer32[kWebRtcOpusMaxFrameSizePerChannel + kWebRtcOpusStateSize];
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz. */
|
||||
for (i = 0; i < kWebRtcOpusStateSize; i++) {
|
||||
@ -232,81 +247,91 @@ static int WebRtcOpus_Resample48to32(const int16_t* samples_in, int length,
|
||||
return output_samples;
|
||||
}
|
||||
|
||||
static int WebRtcOpus_DeInterleaveResample(OpusDecInst* inst, int16_t* input,
|
||||
int sample_pairs, int16_t* output) {
|
||||
int i;
|
||||
int16_t buffer_left[kWebRtcOpusMaxFrameSizePerChannel];
|
||||
int16_t buffer_right[kWebRtcOpusMaxFrameSizePerChannel];
|
||||
int16_t buffer_out[kWebRtcOpusMaxFrameSizePerChannel32kHz];
|
||||
int resampled_samples;
|
||||
|
||||
/* De-interleave the signal in left and right channel. */
|
||||
for (i = 0; i < sample_pairs; i++) {
|
||||
/* Take every second sample, starting at the first sample. */
|
||||
buffer_left[i] = input[i * 2];
|
||||
buffer_right[i] = input[i * 2 + 1];
|
||||
}
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz for left channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(
|
||||
buffer_left, sample_pairs, inst->state_48_32_left, buffer_out);
|
||||
|
||||
/* Add samples interleaved to output vector. */
|
||||
for (i = 0; i < resampled_samples; i++) {
|
||||
output[i * 2] = buffer_out[i];
|
||||
}
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz for right channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(
|
||||
buffer_right, sample_pairs, inst->state_48_32_right, buffer_out);
|
||||
|
||||
/* Add samples interleaved to output vector. */
|
||||
for (i = 0; i < resampled_samples; i++) {
|
||||
output[i * 2 + 1] = buffer_out[i];
|
||||
}
|
||||
|
||||
return resampled_samples;
|
||||
}
|
||||
|
||||
int16_t WebRtcOpus_DecodeNew(OpusDecInst* inst, const uint8_t* encoded,
|
||||
int16_t encoded_bytes, int16_t* decoded,
|
||||
int16_t* audio_type) {
|
||||
/* Enough for 120 ms (the largest Opus packet size) of mono audio at 48 kHz
|
||||
* and resampler overlap. This will need to be enlarged for stereo decoding.
|
||||
*/
|
||||
int16_t buffer16_left[kWebRtcOpusMaxFrameSize];
|
||||
int16_t buffer16_right[kWebRtcOpusMaxFrameSize];
|
||||
int16_t buffer_out[kWebRtcOpusMaxFrameSize];
|
||||
int16_t* coded = (int16_t*) encoded;
|
||||
/* |buffer| is big enough for 120 ms (the largest Opus packet size) of stereo
|
||||
* audio at 48 kHz. */
|
||||
int16_t buffer[kWebRtcOpusMaxFrameSize];
|
||||
int16_t* coded = (int16_t*)encoded;
|
||||
int decoded_samples;
|
||||
int resampled_samples;
|
||||
int i;
|
||||
|
||||
/* If mono case, just do a regular call to the decoder.
|
||||
* If stereo, we need to de-interleave the stereo output in to blocks with
|
||||
* If stereo, we need to de-interleave the stereo output into blocks with
|
||||
* left and right channel. Each block is resampled to 32 kHz, and then
|
||||
* interleaved again. */
|
||||
|
||||
/* Decode to a temporary buffer. */
|
||||
decoded_samples = DecodeNative(inst->decoder_left, coded, encoded_bytes,
|
||||
buffer16_left, audio_type);
|
||||
kWebRtcOpusMaxFrameSizePerChannel,
|
||||
buffer, audio_type);
|
||||
if (decoded_samples < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* De-interleave if stereo. */
|
||||
if (inst->channels == 2) {
|
||||
/* The parameter |decoded_samples| holds the number of samples pairs, in
|
||||
* case of stereo. Number of samples in |buffer16| equals |decoded_samples|
|
||||
* times 2. */
|
||||
for (i = 0; i < decoded_samples; i++) {
|
||||
/* Take every second sample, starting at the first sample. */
|
||||
buffer16_left[i] = buffer16_left[i * 2];
|
||||
buffer16_right[i] = buffer16_left[i * 2 + 1];
|
||||
}
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz for left channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer16_left,
|
||||
decoded_samples,
|
||||
inst->state_48_32_left,
|
||||
buffer_out);
|
||||
|
||||
/* Add samples interleaved to output vector. */
|
||||
for (i = 0; i < resampled_samples; i++) {
|
||||
decoded[i * 2] = buffer_out[i];
|
||||
}
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz for right channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer16_right,
|
||||
decoded_samples,
|
||||
inst->state_48_32_right,
|
||||
buffer_out);
|
||||
|
||||
/* Add samples interleaved to output vector. */
|
||||
for (i = 0; i < decoded_samples; i++) {
|
||||
decoded[i * 2 + 1] = buffer_out[i];
|
||||
}
|
||||
/* De-interleave and resample. */
|
||||
resampled_samples = WebRtcOpus_DeInterleaveResample(inst,
|
||||
buffer,
|
||||
decoded_samples,
|
||||
decoded);
|
||||
} else {
|
||||
/* Resample from 48 kHz to 32 kHz for left channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer16_left,
|
||||
/* Resample from 48 kHz to 32 kHz. Filter state memory for left channel is
|
||||
* used for mono signals. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer,
|
||||
decoded_samples,
|
||||
inst->state_48_32_left,
|
||||
decoded);
|
||||
}
|
||||
|
||||
/* Update decoded sample memory, to be used by the PLC in case of losses. */
|
||||
inst->prev_decoded_samples = decoded_samples;
|
||||
|
||||
return resampled_samples;
|
||||
}
|
||||
|
||||
|
||||
int16_t WebRtcOpus_Decode(OpusDecInst* inst, const int16_t* encoded,
|
||||
int16_t encoded_bytes, int16_t* decoded,
|
||||
int16_t* audio_type) {
|
||||
/* Enough for 120 ms (the largest Opus packet size) of mono audio at 48 kHz
|
||||
* and resampler overlap. This will need to be enlarged for stereo decoding.
|
||||
*/
|
||||
/* |buffer16| is big enough for 120 ms (the largestOpus packet size) of
|
||||
* stereo audio at 48 kHz. */
|
||||
int16_t buffer16[kWebRtcOpusMaxFrameSize];
|
||||
int decoded_samples;
|
||||
int16_t output_samples;
|
||||
@ -320,7 +345,8 @@ int16_t WebRtcOpus_Decode(OpusDecInst* inst, const int16_t* encoded,
|
||||
|
||||
/* Decode to a temporary buffer. */
|
||||
decoded_samples = DecodeNative(inst->decoder_left, encoded, encoded_bytes,
|
||||
buffer16, audio_type);
|
||||
kWebRtcOpusMaxFrameSizePerChannel, buffer16,
|
||||
audio_type);
|
||||
if (decoded_samples < 0) {
|
||||
return -1;
|
||||
}
|
||||
@ -339,15 +365,17 @@ int16_t WebRtcOpus_Decode(OpusDecInst* inst, const int16_t* encoded,
|
||||
output_samples = WebRtcOpus_Resample48to32(buffer16, decoded_samples,
|
||||
inst->state_48_32_left, decoded);
|
||||
|
||||
/* Update decoded sample memory, to be used by the PLC in case of losses. */
|
||||
inst->prev_decoded_samples = decoded_samples;
|
||||
|
||||
return output_samples;
|
||||
}
|
||||
|
||||
int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
|
||||
int16_t encoded_bytes, int16_t* decoded,
|
||||
int16_t* audio_type) {
|
||||
/* Enough for 120 ms (the largest Opus packet size) of mono audio at 48 kHz
|
||||
* and resampler overlap. This will need to be enlarged for stereo decoding.
|
||||
*/
|
||||
/* |buffer16| is big enough for 120 ms (the largestOpus packet size) of
|
||||
* stereo audio at 48 kHz. */
|
||||
int16_t buffer16[kWebRtcOpusMaxFrameSize];
|
||||
int decoded_samples;
|
||||
int16_t output_samples;
|
||||
@ -355,7 +383,8 @@ int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
|
||||
|
||||
/* Decode to a temporary buffer. */
|
||||
decoded_samples = DecodeNative(inst->decoder_right, encoded, encoded_bytes,
|
||||
buffer16, audio_type);
|
||||
kWebRtcOpusMaxFrameSizePerChannel, buffer16,
|
||||
audio_type);
|
||||
if (decoded_samples < 0) {
|
||||
return -1;
|
||||
}
|
||||
@ -381,16 +410,141 @@ int16_t WebRtcOpus_DecodeSlave(OpusDecInst* inst, const int16_t* encoded,
|
||||
|
||||
int16_t WebRtcOpus_DecodePlc(OpusDecInst* inst, int16_t* decoded,
|
||||
int16_t number_of_lost_frames) {
|
||||
/* TODO(tlegrand): We can pass NULL to opus_decode to activate packet
|
||||
* loss concealment, but I don't know how many samples
|
||||
* number_of_lost_frames corresponds to. */
|
||||
return -1;
|
||||
int16_t buffer[kWebRtcOpusMaxFrameSize];
|
||||
int16_t audio_type = 0;
|
||||
int decoded_samples;
|
||||
int resampled_samples;
|
||||
int plc_samples;
|
||||
|
||||
/* If mono case, just do a regular call to the plc function, before
|
||||
* resampling.
|
||||
* If stereo, we need to de-interleave the stereo output into blocks with
|
||||
* left and right channel. Each block is resampled to 32 kHz, and then
|
||||
* interleaved again. */
|
||||
|
||||
/* Decode to a temporary buffer. The number of samples we ask for is
|
||||
* |number_of_lost_frames| times |prev_decoded_samples_|. Limit the number
|
||||
* of samples to maximum |kWebRtcOpusMaxFrameSizePerChannel|. */
|
||||
plc_samples = number_of_lost_frames * inst->prev_decoded_samples;
|
||||
plc_samples = (plc_samples <= kWebRtcOpusMaxFrameSizePerChannel) ?
|
||||
plc_samples : kWebRtcOpusMaxFrameSizePerChannel;
|
||||
decoded_samples = DecodeNative(inst->decoder_left, NULL, 0, plc_samples,
|
||||
buffer, &audio_type);
|
||||
if (decoded_samples < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (inst->channels == 2) {
|
||||
/* De-interleave and resample. */
|
||||
resampled_samples = WebRtcOpus_DeInterleaveResample(inst,
|
||||
buffer,
|
||||
decoded_samples,
|
||||
decoded);
|
||||
} else {
|
||||
/* Resample from 48 kHz to 32 kHz. Filter state memory for left channel is
|
||||
* used for mono signals. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer,
|
||||
decoded_samples,
|
||||
inst->state_48_32_left,
|
||||
decoded);
|
||||
}
|
||||
|
||||
return resampled_samples;
|
||||
}
|
||||
|
||||
int16_t WebRtcOpus_DecodePlcMaster(OpusDecInst* inst, int16_t* decoded,
|
||||
int16_t number_of_lost_frames) {
|
||||
int16_t buffer[kWebRtcOpusMaxFrameSize];
|
||||
int decoded_samples;
|
||||
int resampled_samples;
|
||||
int16_t audio_type = 0;
|
||||
int plc_samples;
|
||||
int i;
|
||||
|
||||
/* If mono case, just do a regular call to the decoder.
|
||||
* If stereo, call to WebRtcOpus_DecodePlcMaster() gives left channel as
|
||||
* output, and calls to WebRtcOpus_DecodePlcSlave() give right channel as
|
||||
* output. This is to make stereo work with the current setup of NetEQ, which
|
||||
* requires two calls to the decoder to produce stereo. */
|
||||
|
||||
/* Decode to a temporary buffer. The number of samples we ask for is
|
||||
* |number_of_lost_frames| times |prev_decoded_samples_|. Limit the number
|
||||
* of samples to maximum |kWebRtcOpusMaxFrameSizePerChannel|. */
|
||||
plc_samples = number_of_lost_frames * inst->prev_decoded_samples;
|
||||
plc_samples = (plc_samples <= kWebRtcOpusMaxFrameSizePerChannel) ?
|
||||
plc_samples : kWebRtcOpusMaxFrameSizePerChannel;
|
||||
decoded_samples = DecodeNative(inst->decoder_left, NULL, 0, plc_samples,
|
||||
buffer, &audio_type);
|
||||
if (decoded_samples < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (inst->channels == 2) {
|
||||
/* The parameter |decoded_samples| holds the number of sample pairs, in
|
||||
* case of stereo. The original number of samples in |buffer| equals
|
||||
* |decoded_samples| times 2. */
|
||||
for (i = 0; i < decoded_samples; i++) {
|
||||
/* Take every second sample, starting at the first sample. This gives
|
||||
* the left channel. */
|
||||
buffer[i] = buffer[i * 2];
|
||||
}
|
||||
}
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz for left channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer,
|
||||
decoded_samples,
|
||||
inst->state_48_32_left,
|
||||
decoded);
|
||||
return resampled_samples;
|
||||
}
|
||||
|
||||
int16_t WebRtcOpus_DecodePlcSlave(OpusDecInst* inst, int16_t* decoded,
|
||||
int16_t number_of_lost_frames) {
|
||||
int16_t buffer[kWebRtcOpusMaxFrameSize];
|
||||
int decoded_samples;
|
||||
int resampled_samples;
|
||||
int16_t audio_type = 0;
|
||||
int plc_samples;
|
||||
int i;
|
||||
|
||||
/* Calls to WebRtcOpus_DecodePlcSlave() give right channel as output.
|
||||
* The function should never be called in the mono case. */
|
||||
if (inst->channels != 2) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* Decode to a temporary buffer. The number of samples we ask for is
|
||||
* |number_of_lost_frames| times |prev_decoded_samples_|. Limit the number
|
||||
* of samples to maximum |kWebRtcOpusMaxFrameSizePerChannel|. */
|
||||
plc_samples = number_of_lost_frames * inst->prev_decoded_samples;
|
||||
plc_samples = (plc_samples <= kWebRtcOpusMaxFrameSizePerChannel)
|
||||
? plc_samples : kWebRtcOpusMaxFrameSizePerChannel;
|
||||
decoded_samples = DecodeNative(inst->decoder_right, NULL, 0, plc_samples,
|
||||
buffer, &audio_type);
|
||||
if (decoded_samples < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
/* The parameter |decoded_samples| holds the number of sample pairs,
|
||||
* The original number of samples in |buffer| equals |decoded_samples|
|
||||
* times 2. */
|
||||
for (i = 0; i < decoded_samples; i++) {
|
||||
/* Take every second sample, starting at the second sample. This gives
|
||||
* the right channel. */
|
||||
buffer[i] = buffer[i * 2 + 1];
|
||||
}
|
||||
|
||||
/* Resample from 48 kHz to 32 kHz for left channel. */
|
||||
resampled_samples = WebRtcOpus_Resample48to32(buffer,
|
||||
decoded_samples,
|
||||
inst->state_48_32_right,
|
||||
decoded);
|
||||
return resampled_samples;
|
||||
}
|
||||
|
||||
int WebRtcOpus_DurationEst(OpusDecInst* inst,
|
||||
const uint8_t* payload,
|
||||
int payload_length_bytes)
|
||||
{
|
||||
int payload_length_bytes) {
|
||||
int frames, samples;
|
||||
frames = opus_packet_get_nb_frames(payload, payload_length_bytes);
|
||||
if (frames < 0) {
|
||||
|
@ -117,8 +117,8 @@ TEST_F(OpusTest, OpusEncodeDecodeMono) {
|
||||
int16_t output_data_decode_new[kOpusNumberOfSamples];
|
||||
int16_t output_data_decode[kOpusNumberOfSamples];
|
||||
int16_t* coded = reinterpret_cast<int16_t*>(bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_mono_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_mono_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodeNew(opus_mono_decoder_new_, bitstream_,
|
||||
encoded_bytes, output_data_decode_new,
|
||||
&audio_type));
|
||||
@ -158,8 +158,8 @@ TEST_F(OpusTest, OpusEncodeDecodeStereo) {
|
||||
int16_t output_data_decode[kOpusNumberOfSamples];
|
||||
int16_t output_data_decode_slave[kOpusNumberOfSamples];
|
||||
int16_t* coded = reinterpret_cast<int16_t*>(bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodeNew(opus_stereo_decoder_new_, bitstream_,
|
||||
encoded_bytes, output_data_decode_new,
|
||||
&audio_type));
|
||||
@ -217,8 +217,8 @@ TEST_F(OpusTest, OpusDecodeInit) {
|
||||
int16_t output_data_decode[kOpusNumberOfSamples];
|
||||
int16_t output_data_decode_slave[kOpusNumberOfSamples];
|
||||
int16_t* coded = reinterpret_cast<int16_t*>(bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodeNew(opus_stereo_decoder_new_, bitstream_,
|
||||
encoded_bytes, output_data_decode_new,
|
||||
&audio_type));
|
||||
@ -265,10 +265,108 @@ TEST_F(OpusTest, OpusDecodeInit) {
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_stereo_decoder_new_));
|
||||
}
|
||||
|
||||
// PLC not implemented.
|
||||
TEST_F(OpusTest, OpusDecodePlc) {
|
||||
// PLC in mono mode.
|
||||
TEST_F(OpusTest, OpusDecodePlcMono) {
|
||||
// Create encoder memory.
|
||||
EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_mono_encoder_, 1));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_mono_decoder_, 1));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_mono_decoder_new_, 1));
|
||||
|
||||
// Set bitrate.
|
||||
EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_mono_encoder_, 32000));
|
||||
|
||||
// Check number of channels for decoder.
|
||||
EXPECT_EQ(1, WebRtcOpus_DecoderChannels(opus_mono_decoder_));
|
||||
EXPECT_EQ(1, WebRtcOpus_DecoderChannels(opus_mono_decoder_new_));
|
||||
|
||||
// Encode & decode.
|
||||
int16_t encoded_bytes;
|
||||
int16_t audio_type;
|
||||
int16_t output_data_decode_new[kOpusNumberOfSamples];
|
||||
int16_t output_data_decode[kOpusNumberOfSamples];
|
||||
int16_t* coded = reinterpret_cast<int16_t*>(bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_mono_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodeNew(opus_mono_decoder_new_, bitstream_,
|
||||
encoded_bytes, output_data_decode_new,
|
||||
&audio_type));
|
||||
EXPECT_EQ(640, WebRtcOpus_Decode(opus_mono_decoder_, coded,
|
||||
encoded_bytes, output_data_decode,
|
||||
&audio_type));
|
||||
|
||||
// Call decoder PLC for both versions of the decoder.
|
||||
int16_t plc_buffer[kOpusNumberOfSamples];
|
||||
EXPECT_EQ(-1, WebRtcOpus_DecodePlc(opus_stereo_decoder_, plc_buffer, 1));
|
||||
int16_t plc_buffer_new[kOpusNumberOfSamples];
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodePlcMaster(opus_mono_decoder_, plc_buffer, 1));
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodePlc(opus_mono_decoder_new_,
|
||||
plc_buffer_new, 1));
|
||||
|
||||
// Data in |plc_buffer| should be the same as in |plc_buffer_new|.
|
||||
for (int i = 0; i < 640; i++) {
|
||||
EXPECT_EQ(plc_buffer[i], plc_buffer_new[i]);
|
||||
}
|
||||
|
||||
// Free memory.
|
||||
EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_mono_encoder_));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_mono_decoder_));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_mono_decoder_new_));
|
||||
}
|
||||
|
||||
// PLC in stereo mode.
|
||||
TEST_F(OpusTest, OpusDecodePlcStereo) {
|
||||
// Create encoder memory.
|
||||
EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_stereo_encoder_, 2));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_stereo_decoder_, 2));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_stereo_decoder_new_, 2));
|
||||
|
||||
// Set bitrate.
|
||||
EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_stereo_encoder_, 64000));
|
||||
|
||||
// Check number of channels for decoder.
|
||||
EXPECT_EQ(2, WebRtcOpus_DecoderChannels(opus_stereo_decoder_));
|
||||
EXPECT_EQ(2, WebRtcOpus_DecoderChannels(opus_stereo_decoder_new_));
|
||||
|
||||
// Encode & decode.
|
||||
int16_t encoded_bytes;
|
||||
int16_t audio_type;
|
||||
int16_t output_data_decode_new[kOpusNumberOfSamples];
|
||||
int16_t output_data_decode[kOpusNumberOfSamples];
|
||||
int16_t output_data_decode_slave[kOpusNumberOfSamples];
|
||||
int16_t* coded = reinterpret_cast<int16_t*>(bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodeNew(opus_stereo_decoder_new_, bitstream_,
|
||||
encoded_bytes, output_data_decode_new,
|
||||
&audio_type));
|
||||
EXPECT_EQ(640, WebRtcOpus_Decode(opus_stereo_decoder_, coded,
|
||||
encoded_bytes, output_data_decode,
|
||||
&audio_type));
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodeSlave(opus_stereo_decoder_, coded,
|
||||
encoded_bytes,
|
||||
output_data_decode_slave,
|
||||
&audio_type));
|
||||
|
||||
// Call decoder PLC for both versions of the decoder.
|
||||
int16_t plc_buffer_left[kOpusNumberOfSamples];
|
||||
int16_t plc_buffer_right[kOpusNumberOfSamples];
|
||||
int16_t plc_buffer_new[kOpusNumberOfSamples];
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodePlcMaster(opus_stereo_decoder_,
|
||||
plc_buffer_left, 1));
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodePlcSlave(opus_stereo_decoder_,
|
||||
plc_buffer_right, 1));
|
||||
EXPECT_EQ(640, WebRtcOpus_DecodePlc(opus_stereo_decoder_new_, plc_buffer_new,
|
||||
1));
|
||||
// Data in |plc_buffer_left| and |plc_buffer_right|should be the same as the
|
||||
// interleaved samples in |plc_buffer_new|.
|
||||
for (int i = 0, j = 0; i < 640; i++) {
|
||||
EXPECT_EQ(plc_buffer_left[i], plc_buffer_new[j++]);
|
||||
EXPECT_EQ(plc_buffer_right[i], plc_buffer_new[j++]);
|
||||
}
|
||||
|
||||
// Free memory.
|
||||
EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_stereo_encoder_));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_stereo_decoder_));
|
||||
EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_stereo_decoder_new_));
|
||||
}
|
||||
|
||||
// Duration estimation.
|
||||
@ -281,14 +379,14 @@ TEST_F(OpusTest, OpusDurationEstimation) {
|
||||
int16_t encoded_bytes;
|
||||
|
||||
// 10 ms.
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 480,
|
||||
kMaxBytes, bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 480,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(320, WebRtcOpus_DurationEst(opus_stereo_decoder_, bitstream_,
|
||||
encoded_bytes));
|
||||
|
||||
// 20 ms
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
encoded_bytes = WebRtcOpus_Encode(opus_stereo_encoder_, speech_data_, 960,
|
||||
kMaxBytes, bitstream_);
|
||||
EXPECT_EQ(640, WebRtcOpus_DurationEst(opus_stereo_decoder_, bitstream_,
|
||||
encoded_bytes));
|
||||
|
||||
|
@ -15,6 +15,10 @@
|
||||
|
||||
#include "typedefs.h"
|
||||
|
||||
#ifdef WEBRTC_BIG_ENDIAN
|
||||
#include "signal_processing_library.h"
|
||||
#endif
|
||||
|
||||
#define HIGHEND 0xFF00
|
||||
#define LOWEND 0xFF
|
||||
|
||||
@ -26,7 +30,7 @@ int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b,
|
||||
int16_t *speechOut16b)
|
||||
{
|
||||
#ifdef WEBRTC_BIG_ENDIAN
|
||||
memcpy(speechOut16b, speechIn16b, len * sizeof(int16_t));
|
||||
WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len);
|
||||
#else
|
||||
int i;
|
||||
for (i=0;i<len;i++) {
|
||||
@ -65,7 +69,7 @@ int16_t WebRtcPcm16b_DecodeW16(void *inst,
|
||||
int16_t* speechType)
|
||||
{
|
||||
#ifdef WEBRTC_BIG_ENDIAN
|
||||
memcpy(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1));
|
||||
WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1));
|
||||
#else
|
||||
int i;
|
||||
int samples=len>>1;
|
||||
|
@ -0,0 +1,310 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_amr.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
// NOTE! GSM AMR is not included in the open-source package. The following
|
||||
// interface file is needed:
|
||||
#include "webrtc/modules/audio_coding/main/codecs/amr/interface/amr_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
// The API in the header file should match the one below.
|
||||
//
|
||||
// int16_t WebRtcAmr_CreateEnc(AMR_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcAmr_CreateDec(AMR_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcAmr_FreeEnc(AMR_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcAmr_FreeDec(AMR_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmr_Encode(AMR_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t*output,
|
||||
// int16_t mode);
|
||||
// int16_t WebRtcAmr_EncoderInit(AMR_encinst_t_* enc_inst,
|
||||
// int16_t dtx_mode);
|
||||
// int16_t WebRtcAmr_EncodeBitmode(AMR_encinst_t_* enc_inst,
|
||||
// int format);
|
||||
// int16_t WebRtcAmr_Decode(AMR_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmr_DecodePlc(AMR_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmr_DecoderInit(AMR_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmr_DecodeBitmode(AMR_decinst_t_* dec_inst,
|
||||
// int format);
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_AMR
|
||||
ACMAMR::ACMAMR(int16_t /* codec_id */)
|
||||
: encoder_inst_ptr_(NULL),
|
||||
encoding_mode_(-1), // Invalid value.
|
||||
encoding_rate_(0), // Invalid value.
|
||||
encoder_packing_format_(AMRBandwidthEfficient) {
|
||||
return;
|
||||
}
|
||||
|
||||
ACMAMR::~ACMAMR() { return; }
|
||||
|
||||
int16_t ACMAMR::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMAMR::EnableDTX() { return -1; }
|
||||
|
||||
int16_t ACMAMR::DisableDTX() { return -1; }
|
||||
|
||||
int16_t ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMAMR::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMAMR::InternalCreateEncoder() { return -1; }
|
||||
|
||||
void ACMAMR::DestructEncoderSafe() { return; }
|
||||
|
||||
int16_t ACMAMR::SetBitRateSafe(const int32_t /* rate */) { return -1; }
|
||||
|
||||
void ACMAMR::InternalDestructEncoderInst(void* /* ptr_inst */) { return; }
|
||||
|
||||
int16_t ACMAMR::SetAMREncoderPackingFormat(
|
||||
ACMAMRPackingFormat /* packing_format */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
|
||||
return AMRUndefined;
|
||||
}
|
||||
|
||||
int16_t ACMAMR::SetAMRDecoderPackingFormat(
|
||||
ACMAMRPackingFormat /* packing_format */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
|
||||
return AMRUndefined;
|
||||
}
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
|
||||
#define WEBRTC_AMR_MR475 0
|
||||
#define WEBRTC_AMR_MR515 1
|
||||
#define WEBRTC_AMR_MR59 2
|
||||
#define WEBRTC_AMR_MR67 3
|
||||
#define WEBRTC_AMR_MR74 4
|
||||
#define WEBRTC_AMR_MR795 5
|
||||
#define WEBRTC_AMR_MR102 6
|
||||
#define WEBRTC_AMR_MR122 7
|
||||
|
||||
ACMAMR::ACMAMR(int16_t codec_id)
|
||||
: encoder_inst_ptr_(NULL),
|
||||
encoding_mode_(-1), // invalid value
|
||||
encoding_rate_(0) { // invalid value
|
||||
codec_id_ = codec_id;
|
||||
has_internal_dtx_ = true;
|
||||
encoder_packing_format_ = AMRBandwidthEfficient;
|
||||
return;
|
||||
}
|
||||
|
||||
ACMAMR::~ACMAMR() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
WebRtcAmr_FreeEnc(encoder_inst_ptr_);
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMAMR::InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte) {
|
||||
int16_t vad_decision = 1;
|
||||
// sanity check, if the rate is set correctly. we might skip this
|
||||
// sanity check. if rate is not set correctly, initialization flag
|
||||
// should be false and should not be here.
|
||||
if ((encoding_mode_ < WEBRTC_AMR_MR475) ||
|
||||
(encoding_mode_ > WEBRTC_AMR_MR122)) {
|
||||
*bitstream_len_byte = 0;
|
||||
return -1;
|
||||
}
|
||||
*bitstream_len_byte = WebRtcAmr_Encode(encoder_inst_ptr_,
|
||||
&in_audio_[in_audio_ix_read_],
|
||||
frame_len_smpl_,
|
||||
reinterpret_cast<int16_t*>(bitstream),
|
||||
encoding_mode_);
|
||||
|
||||
// Update VAD, if internal DTX is used
|
||||
if (has_internal_dtx_ && dtx_enabled_) {
|
||||
if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
|
||||
vad_decision = 0;
|
||||
}
|
||||
for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
|
||||
vad_label_[n] = vad_decision;
|
||||
}
|
||||
}
|
||||
// increment the read index
|
||||
in_audio_ix_read_ += frame_len_smpl_;
|
||||
return *bitstream_len_byte;
|
||||
}
|
||||
|
||||
int16_t ACMAMR::EnableDTX() {
|
||||
if (dtx_enabled_) {
|
||||
return 0;
|
||||
} else if (encoder_exist_) { // check if encoder exist
|
||||
// enable DTX
|
||||
if (WebRtcAmr_EncoderInit(encoder_inst_ptr_, 1) < 0) {
|
||||
return -1;
|
||||
}
|
||||
dtx_enabled_ = true;
|
||||
return 0;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
int16_t ACMAMR::DisableDTX() {
|
||||
if (!dtx_enabled_) {
|
||||
return 0;
|
||||
} else if (encoder_exist_) { // check if encoder exist
|
||||
// disable DTX
|
||||
if (WebRtcAmr_EncoderInit(encoder_inst_ptr_, 0) < 0) {
|
||||
return -1;
|
||||
}
|
||||
dtx_enabled_ = false;
|
||||
return 0;
|
||||
} else {
|
||||
// encoder doesn't exists, therefore disabling is harmless
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int16_t ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
|
||||
int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
|
||||
status += (WebRtcAmr_EncoderInit(encoder_inst_ptr_,
|
||||
((codec_params->enable_dtx) ? 1 : 0)) < 0)
|
||||
? -1
|
||||
: 0;
|
||||
status +=
|
||||
(WebRtcAmr_EncodeBitmode(encoder_inst_ptr_, encoder_packing_format_) < 0)
|
||||
? -1
|
||||
: 0;
|
||||
return (status < 0) ? -1 : 0;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMAMR::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMAMR::InternalCreateEncoder() {
|
||||
return WebRtcAmr_CreateEnc(&encoder_inst_ptr_);
|
||||
}
|
||||
|
||||
void ACMAMR::DestructEncoderSafe() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
WebRtcAmr_FreeEnc(encoder_inst_ptr_);
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
// there is no encoder set the following
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
encoding_mode_ = -1; // invalid value
|
||||
encoding_rate_ = 0; // invalid value
|
||||
}
|
||||
|
||||
int16_t ACMAMR::SetBitRateSafe(const int32_t rate) {
|
||||
switch (rate) {
|
||||
case 4750: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR475;
|
||||
encoding_rate_ = 4750;
|
||||
break;
|
||||
}
|
||||
case 5150: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR515;
|
||||
encoding_rate_ = 5150;
|
||||
break;
|
||||
}
|
||||
case 5900: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR59;
|
||||
encoding_rate_ = 5900;
|
||||
break;
|
||||
}
|
||||
case 6700: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR67;
|
||||
encoding_rate_ = 6700;
|
||||
break;
|
||||
}
|
||||
case 7400: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR74;
|
||||
encoding_rate_ = 7400;
|
||||
break;
|
||||
}
|
||||
case 7950: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR795;
|
||||
encoding_rate_ = 7950;
|
||||
break;
|
||||
}
|
||||
case 10200: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR102;
|
||||
encoding_rate_ = 10200;
|
||||
break;
|
||||
}
|
||||
case 12200: {
|
||||
encoding_mode_ = WEBRTC_AMR_MR122;
|
||||
encoding_rate_ = 12200;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMAMR::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
// Free the memory where ptr_inst is pointing to
|
||||
if (ptr_inst != NULL) {
|
||||
WebRtcAmr_FreeEnc(static_cast<AMR_encinst_t_*>(ptr_inst));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMAMR::SetAMREncoderPackingFormat(ACMAMRPackingFormat packing_format) {
|
||||
if ((packing_format != AMRBandwidthEfficient) &&
|
||||
(packing_format != AMROctetAlligned) &&
|
||||
(packing_format != AMRFileStorage)) {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"Invalid AMR Encoder packing-format.");
|
||||
return -1;
|
||||
} else {
|
||||
if (WebRtcAmr_EncodeBitmode(encoder_inst_ptr_, packing_format) < 0) {
|
||||
return -1;
|
||||
} else {
|
||||
encoder_packing_format_ = packing_format;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
|
||||
return encoder_packing_format_;
|
||||
}
|
||||
|
||||
int16_t ACMAMR::SetAMRDecoderPackingFormat(
|
||||
ACMAMRPackingFormat /* packing_format */) {
|
||||
// Not implemented.
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
|
||||
// Not implemented.
|
||||
return AMRUndefined;
|
||||
}
|
||||
|
||||
#endif
|
||||
} // namespace webrtc
|
@ -0,0 +1,65 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMR_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMR_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
// forward declaration
|
||||
struct AMR_encinst_t_;
|
||||
struct AMR_decinst_t_;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
enum ACMAMRPackingFormat;
|
||||
|
||||
class ACMAMR : public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMAMR(int16_t codec_id);
|
||||
~ACMAMR();
|
||||
|
||||
// for FEC
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
|
||||
|
||||
int16_t SetAMREncoderPackingFormat(const ACMAMRPackingFormat packing_format);
|
||||
|
||||
ACMAMRPackingFormat AMREncoderPackingFormat() const;
|
||||
|
||||
int16_t SetAMRDecoderPackingFormat(const ACMAMRPackingFormat packing_format);
|
||||
|
||||
ACMAMRPackingFormat AMRDecoderPackingFormat() const;
|
||||
|
||||
protected:
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
|
||||
int16_t SetBitRateSafe(const int32_t rate);
|
||||
|
||||
int16_t EnableDTX();
|
||||
|
||||
int16_t DisableDTX();
|
||||
|
||||
AMR_encinst_t_* encoder_inst_ptr_;
|
||||
int16_t encoding_mode_;
|
||||
int16_t encoding_rate_;
|
||||
ACMAMRPackingFormat encoder_packing_format_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMR_H_
|
@ -0,0 +1,316 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_amrwb.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
// NOTE! GSM AMR-wb is not included in the open-source package. The
|
||||
// following interface file is needed:
|
||||
#include "webrtc/modules/audio_coding/main/codecs/amrwb/interface/amrwb_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
// The API in the header file should match the one below.
|
||||
//
|
||||
// int16_t WebRtcAmrWb_CreateEnc(AMRWB_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcAmrWb_CreateDec(AMRWB_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcAmrWb_FreeEnc(AMRWB_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcAmrWb_FreeDec(AMRWB_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmrWb_Encode(AMRWB_encinst_t_* enc_inst, int16_t* input,
|
||||
// int16_t len, int16_t* output, int16_t mode);
|
||||
// int16_t WebRtcAmrWb_EncoderInit(AMRWB_encinst_t_* enc_inst,
|
||||
// int16_t dtx_mode);
|
||||
// int16_t WebRtcAmrWb_EncodeBitmode(AMRWB_encinst_t_* enc_inst,
|
||||
// int format);
|
||||
// int16_t WebRtcAmrWb_Decode(AMRWB_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmrWb_DecodePlc(AMRWB_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmrWb_DecoderInit(AMRWB_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcAmrWb_DecodeBitmode(AMRWB_decinst_t_* dec_inst,
|
||||
// int format);
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_AMRWB
|
||||
ACMAMRwb::ACMAMRwb(int16_t /* codec_id */)
|
||||
: encoder_inst_ptr_(NULL),
|
||||
encoding_mode_(-1), // invalid value
|
||||
encoding_rate_(0), // invalid value
|
||||
encoder_packing_format_(AMRBandwidthEfficient) {}
|
||||
|
||||
ACMAMRwb::~ACMAMRwb() {}
|
||||
|
||||
int16_t ACMAMRwb::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::EnableDTX() { return -1; }
|
||||
|
||||
int16_t ACMAMRwb::DisableDTX() { return -1; }
|
||||
|
||||
int16_t ACMAMRwb::InternalInitEncoder(
|
||||
WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMAMRwb::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMAMRwb::InternalCreateEncoder() { return -1; }
|
||||
|
||||
void ACMAMRwb::DestructEncoderSafe() { return; }
|
||||
|
||||
int16_t ACMAMRwb::SetBitRateSafe(const int32_t /* rate */) { return -1; }
|
||||
|
||||
void ACMAMRwb::InternalDestructEncoderInst(void* /* ptr_inst */) { return; }
|
||||
|
||||
int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
|
||||
ACMAMRPackingFormat /* packing_format */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
|
||||
return AMRUndefined;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
|
||||
ACMAMRPackingFormat /* packing_format */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
|
||||
return AMRUndefined;
|
||||
}
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
|
||||
#define AMRWB_MODE_7k 0
|
||||
#define AMRWB_MODE_9k 1
|
||||
#define AMRWB_MODE_12k 2
|
||||
#define AMRWB_MODE_14k 3
|
||||
#define AMRWB_MODE_16k 4
|
||||
#define AMRWB_MODE_18k 5
|
||||
#define AMRWB_MODE_20k 6
|
||||
#define AMRWB_MODE_23k 7
|
||||
#define AMRWB_MODE_24k 8
|
||||
|
||||
ACMAMRwb::ACMAMRwb(int16_t codec_id)
|
||||
: encoder_inst_ptr_(NULL),
|
||||
encoding_mode_(-1), // invalid value
|
||||
encoding_rate_(0) { // invalid value
|
||||
codec_id_ = codec_id;
|
||||
has_internal_dtx_ = true;
|
||||
encoder_packing_format_ = AMRBandwidthEfficient;
|
||||
return;
|
||||
}
|
||||
|
||||
ACMAMRwb::~ACMAMRwb() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
WebRtcAmrWb_FreeEnc(encoder_inst_ptr_);
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte) {
|
||||
int16_t vad_decision = 1;
|
||||
// sanity check, if the rate is set correctly. we might skip this
|
||||
// sanity check. if rate is not set correctly, initialization flag
|
||||
// should be false and should not be here.
|
||||
if ((encoding_mode_ < AMRWB_MODE_7k) || (encoding_mode_ > AMRWB_MODE_24k)) {
|
||||
*bitstream_len_byte = 0;
|
||||
return -1;
|
||||
}
|
||||
*bitstream_len_byte = WebRtcAmrWb_Encode(
|
||||
encoder_inst_ptr_, &in_audio_[in_audio_ix_read_], frame_len_smpl_,
|
||||
reinterpret_cast<int16_t*>(bitstream), encoding_mode_);
|
||||
|
||||
// Update VAD, if internal DTX is used
|
||||
if (has_internal_dtx_ && dtx_enabled_) {
|
||||
if (*bitstream_len_byte <= (7 * frame_len_smpl_ / 160)) {
|
||||
vad_decision = 0;
|
||||
}
|
||||
for (int16_t n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
|
||||
vad_label_[n] = vad_decision;
|
||||
}
|
||||
}
|
||||
// increment the read index this tell the caller that how far
|
||||
// we have gone forward in reading the audio buffer
|
||||
in_audio_ix_read_ += frame_len_smpl_;
|
||||
return *bitstream_len_byte;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::EnableDTX() {
|
||||
if (dtx_enabled_) {
|
||||
return 0;
|
||||
} else if (encoder_exist_) { // check if encoder exist
|
||||
// enable DTX
|
||||
if (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_, 1) < 0) {
|
||||
return -1;
|
||||
}
|
||||
dtx_enabled_ = true;
|
||||
return 0;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::DisableDTX() {
|
||||
if (!dtx_enabled_) {
|
||||
return 0;
|
||||
} else if (encoder_exist_) { // check if encoder exist
|
||||
// disable DTX
|
||||
if (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_, 0) < 0) {
|
||||
return -1;
|
||||
}
|
||||
dtx_enabled_ = false;
|
||||
return 0;
|
||||
} else {
|
||||
// encoder doesn't exists, therefore disabling is harmless
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
|
||||
// sanity check
|
||||
if (encoder_inst_ptr_ == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
|
||||
status += (WebRtcAmrWb_EncoderInit(encoder_inst_ptr_,
|
||||
((codec_params->enable_dtx) ? 1 : 0)) < 0)
|
||||
? -1
|
||||
: 0;
|
||||
status += (WebRtcAmrWb_EncodeBitmode(encoder_inst_ptr_,
|
||||
encoder_packing_format_) < 0)
|
||||
? -1
|
||||
: 0;
|
||||
return (status < 0) ? -1 : 0;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMAMRwb::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMAMRwb::InternalCreateEncoder() {
|
||||
return WebRtcAmrWb_CreateEnc(&encoder_inst_ptr_);
|
||||
}
|
||||
|
||||
void ACMAMRwb::DestructEncoderSafe() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
WebRtcAmrWb_FreeEnc(encoder_inst_ptr_);
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
// there is no encoder set the following
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
encoding_mode_ = -1; // invalid value
|
||||
encoding_rate_ = 0;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::SetBitRateSafe(const int32_t rate) {
|
||||
switch (rate) {
|
||||
case 7000: {
|
||||
encoding_mode_ = AMRWB_MODE_7k;
|
||||
encoding_rate_ = 7000;
|
||||
break;
|
||||
}
|
||||
case 9000: {
|
||||
encoding_mode_ = AMRWB_MODE_9k;
|
||||
encoding_rate_ = 9000;
|
||||
break;
|
||||
}
|
||||
case 12000: {
|
||||
encoding_mode_ = AMRWB_MODE_12k;
|
||||
encoding_rate_ = 12000;
|
||||
break;
|
||||
}
|
||||
case 14000: {
|
||||
encoding_mode_ = AMRWB_MODE_14k;
|
||||
encoding_rate_ = 14000;
|
||||
break;
|
||||
}
|
||||
case 16000: {
|
||||
encoding_mode_ = AMRWB_MODE_16k;
|
||||
encoding_rate_ = 16000;
|
||||
break;
|
||||
}
|
||||
case 18000: {
|
||||
encoding_mode_ = AMRWB_MODE_18k;
|
||||
encoding_rate_ = 18000;
|
||||
break;
|
||||
}
|
||||
case 20000: {
|
||||
encoding_mode_ = AMRWB_MODE_20k;
|
||||
encoding_rate_ = 20000;
|
||||
break;
|
||||
}
|
||||
case 23000: {
|
||||
encoding_mode_ = AMRWB_MODE_23k;
|
||||
encoding_rate_ = 23000;
|
||||
break;
|
||||
}
|
||||
case 24000: {
|
||||
encoding_mode_ = AMRWB_MODE_24k;
|
||||
encoding_rate_ = 24000;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMAMRwb::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
if (ptr_inst != NULL) {
|
||||
WebRtcAmrWb_FreeEnc(static_cast<AMRWB_encinst_t_*>(ptr_inst));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::SetAMRwbEncoderPackingFormat(
|
||||
ACMAMRPackingFormat packing_format) {
|
||||
if ((packing_format != AMRBandwidthEfficient) &&
|
||||
(packing_format != AMROctetAlligned) &&
|
||||
(packing_format != AMRFileStorage)) {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"Invalid AMRwb encoder packing-format.");
|
||||
return -1;
|
||||
} else {
|
||||
if (WebRtcAmrWb_EncodeBitmode(encoder_inst_ptr_, packing_format) < 0) {
|
||||
return -1;
|
||||
} else {
|
||||
encoder_packing_format_ = packing_format;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
|
||||
return encoder_packing_format_;
|
||||
}
|
||||
|
||||
int16_t ACMAMRwb::SetAMRwbDecoderPackingFormat(
|
||||
ACMAMRPackingFormat packing_format) {
|
||||
// Not implemented.
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
|
||||
// Not implemented.
|
||||
return AMRUndefined;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,66 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMRWB_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMRWB_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
// forward declaration
|
||||
struct AMRWB_encinst_t_;
|
||||
struct AMRWB_decinst_t_;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class ACMAMRwb : public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMAMRwb(int16_t codec_id);
|
||||
~ACMAMRwb();
|
||||
|
||||
// for FEC
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
|
||||
|
||||
int16_t SetAMRwbEncoderPackingFormat(
|
||||
const ACMAMRPackingFormat packing_format);
|
||||
|
||||
ACMAMRPackingFormat AMRwbEncoderPackingFormat() const;
|
||||
|
||||
int16_t SetAMRwbDecoderPackingFormat(
|
||||
const ACMAMRPackingFormat packing_format);
|
||||
|
||||
ACMAMRPackingFormat AMRwbDecoderPackingFormat() const;
|
||||
|
||||
protected:
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
|
||||
int16_t SetBitRateSafe(const int32_t rate);
|
||||
|
||||
int16_t EnableDTX();
|
||||
|
||||
int16_t DisableDTX();
|
||||
|
||||
AMRWB_encinst_t_* encoder_inst_ptr_;
|
||||
|
||||
int16_t encoding_mode_;
|
||||
int16_t encoding_rate_;
|
||||
ACMAMRPackingFormat encoder_packing_format_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_AMRWB_H_
|
@ -0,0 +1,191 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_celt.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
// NOTE! Celt is not included in the open-source package. Modify this file or
|
||||
// your codec API to match the function call and name of used CELT API file.
|
||||
#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_CELT
|
||||
|
||||
ACMCELT::ACMCELT(int16_t /* codec_id */)
|
||||
: enc_inst_ptr_(NULL),
|
||||
sampling_freq_(0),
|
||||
bitrate_(0),
|
||||
channels_(1) {
|
||||
return;
|
||||
}
|
||||
|
||||
ACMCELT::~ACMCELT() {
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMCELT::CreateInstance(void) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::InternalCreateEncoder() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
void ACMCELT::DestructEncoderSafe() {
|
||||
return;
|
||||
}
|
||||
|
||||
void ACMCELT::InternalDestructEncoderInst(void* /* ptr_inst */) {
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::SetBitRateSafe(const int32_t /*rate*/) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
|
||||
ACMCELT::ACMCELT(int16_t codec_id)
|
||||
: enc_inst_ptr_(NULL),
|
||||
sampling_freq_(32000), // Default sampling frequency.
|
||||
bitrate_(64000), // Default rate.
|
||||
channels_(1) { // Default send mono.
|
||||
// TODO(tlegrand): remove later when ACMGenericCodec has a new constructor.
|
||||
codec_id_ = codec_id;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
ACMCELT::~ACMCELT() {
|
||||
if (enc_inst_ptr_ != NULL) {
|
||||
WebRtcCelt_FreeEnc(enc_inst_ptr_);
|
||||
enc_inst_ptr_ = NULL;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte) {
|
||||
*bitstream_len_byte = 0;
|
||||
|
||||
// Call Encoder.
|
||||
*bitstream_len_byte = WebRtcCelt_Encode(enc_inst_ptr_,
|
||||
&in_audio_[in_audio_ix_read_],
|
||||
bitstream);
|
||||
|
||||
// Increment the read index this tell the caller that how far
|
||||
// we have gone forward in reading the audio buffer.
|
||||
in_audio_ix_read_ += frame_len_smpl_ * channels_;
|
||||
|
||||
if (*bitstream_len_byte < 0) {
|
||||
// Error reported from the encoder.
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalEncode: Encode error for Celt");
|
||||
*bitstream_len_byte = 0;
|
||||
return -1;
|
||||
}
|
||||
|
||||
return *bitstream_len_byte;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
|
||||
// Set bitrate and check that it is within the valid range.
|
||||
int16_t status = SetBitRateSafe((codec_params->codec_inst).rate);
|
||||
if (status < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// If number of channels changed we need to re-create memory.
|
||||
if (codec_params->codec_inst.channels != channels_) {
|
||||
WebRtcCelt_FreeEnc(enc_inst_ptr_);
|
||||
enc_inst_ptr_ = NULL;
|
||||
// Store new number of channels.
|
||||
channels_ = codec_params->codec_inst.channels;
|
||||
if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, channels_) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
// Initiate encoder.
|
||||
if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
|
||||
return 0;
|
||||
} else {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMCELT::CreateInstance(void) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::InternalCreateEncoder() {
|
||||
if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, num_channels_) < 0) {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalCreateEncoder: create encoder failed for Celt");
|
||||
return -1;
|
||||
}
|
||||
channels_ = num_channels_;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMCELT::DestructEncoderSafe() {
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
if (enc_inst_ptr_ != NULL) {
|
||||
WebRtcCelt_FreeEnc(enc_inst_ptr_);
|
||||
enc_inst_ptr_ = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void ACMCELT::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
if (ptr_inst != NULL) {
|
||||
WebRtcCelt_FreeEnc(static_cast<CELT_encinst_t*>(ptr_inst));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMCELT::SetBitRateSafe(const int32_t rate) {
|
||||
// Check that rate is in the valid range.
|
||||
if ((rate >= 48000) && (rate <= 128000)) {
|
||||
// Store new rate.
|
||||
bitrate_ = rate;
|
||||
|
||||
// Initiate encoder with new rate.
|
||||
if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
|
||||
return 0;
|
||||
} else {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"SetBitRateSafe: Failed to initiate Celt with rate %d",
|
||||
rate);
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"SetBitRateSafe: Invalid rate Celt, %d", rate);
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,50 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CELT_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CELT_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
// forward declaration
|
||||
struct CELT_encinst_t_;
|
||||
struct CELT_decinst_t_;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class ACMCELT : public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMCELT(int16_t codec_id);
|
||||
~ACMCELT();
|
||||
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
|
||||
|
||||
protected:
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
|
||||
int16_t SetBitRateSafe(const int32_t rate);
|
||||
|
||||
CELT_encinst_t_* enc_inst_ptr_;
|
||||
uint16_t sampling_freq_;
|
||||
int32_t bitrate_;
|
||||
uint16_t channels_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CELT_H_
|
@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_cng.h"
|
||||
|
||||
#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
ACMCNG::ACMCNG(int16_t codec_id) {
|
||||
encoder_inst_ptr_ = NULL;
|
||||
codec_id_ = codec_id;
|
||||
samp_freq_hz_ = ACMCodecDB::CodecFreq(codec_id_);
|
||||
return;
|
||||
}
|
||||
|
||||
ACMCNG::~ACMCNG() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
WebRtcCng_FreeEnc(encoder_inst_ptr_);
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// CNG is not like a regular encoder, this function
|
||||
// should not be called normally
|
||||
// instead the following function is called from inside
|
||||
// ACMGenericCodec::ProcessFrameVADDTX
|
||||
int16_t ACMCNG::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// CNG is not like a regular encoder,
|
||||
// this function should not be called normally
|
||||
// instead the following function is called from inside
|
||||
// ACMGenericCodec::ProcessFrameVADDTX
|
||||
int16_t ACMCNG::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMCNG::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMCNG::InternalCreateEncoder() {
|
||||
if (WebRtcCng_CreateEnc(&encoder_inst_ptr_) < 0) {
|
||||
encoder_inst_ptr_ = NULL;
|
||||
return -1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void ACMCNG::DestructEncoderSafe() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
WebRtcCng_FreeEnc(encoder_inst_ptr_);
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
}
|
||||
|
||||
void ACMCNG::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
if (ptr_inst != NULL) {
|
||||
WebRtcCng_FreeEnc(static_cast<CNG_enc_inst*>(ptr_inst));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,56 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CNG_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CNG_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
// forward declaration
|
||||
struct WebRtcCngEncInst;
|
||||
struct WebRtcCngDecInst;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class ACMCNG: public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMCNG(int16_t codec_id);
|
||||
~ACMCNG();
|
||||
|
||||
// for FEC
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams *codec_params);
|
||||
|
||||
protected:
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
|
||||
int16_t EnableDTX() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t DisableDTX() {
|
||||
return -1;
|
||||
}
|
||||
|
||||
WebRtcCngEncInst* encoder_inst_ptr_;
|
||||
uint16_t samp_freq_hz_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CNG_H_
|
@ -0,0 +1,957 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
/*
|
||||
* This file generates databases with information about all supported audio
|
||||
* codecs.
|
||||
*/
|
||||
|
||||
// TODO(tlegrand): Change constant input pointers in all functions to constant
|
||||
// references, where appropriate.
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
|
||||
|
||||
#include <assert.h>
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/modules/audio_coding/neteq4/interface/audio_decoder.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
// Includes needed to create the codecs.
|
||||
// G711, PCM mu-law and A-law
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_pcma.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_pcmu.h"
|
||||
#include "webrtc/modules/audio_coding/codecs/g711/include/g711_interface.h"
|
||||
// CNG
|
||||
#include "webrtc/modules/audio_coding/codecs/cng/include/webrtc_cng.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_cng.h"
|
||||
#ifdef WEBRTC_CODEC_ISAC
|
||||
#include "webrtc/modules/audio_coding/codecs/isac/main/interface/isac.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_ISACFX
|
||||
#include "webrtc/modules/audio_coding/codecs/isac/fix/interface/isacfix.h"
|
||||
#endif
|
||||
#if (defined WEBRTC_CODEC_ISACFX) || (defined WEBRTC_CODEC_ISAC)
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_isac.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_isac_macros.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_PCM16
|
||||
#include "webrtc/modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_pcm16b.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_ILBC
|
||||
#include "webrtc/modules/audio_coding/codecs/ilbc/interface/ilbc.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_ilbc.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
#include "webrtc/modules/audio_coding/codecs/amr/include/amr_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_amr.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
#include "webrtc/modules/audio_coding/codecs/amrwb/include/amrwb_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_amrwb.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
#include "webrtc/modules/audio_coding/codecs/celt/include/celt_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_celt.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g722.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
#include "webrtc/modules/audio_coding/codecs/g7221/include/g7221_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g7221.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
#include "webrtc/modules/audio_coding/codecs/g7221c/include/g7221c_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g7221c.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729
|
||||
#include "webrtc/modules/audio_coding/codecs/g729/include/g729_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g729.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729_1
|
||||
#include "webrtc/modules/audio_coding/codecs/g7291/include/g7291_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g7291.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_GSMFR
|
||||
#include "webrtc/modules/audio_coding/codecs/gsmfr/include/gsmfr_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_gsmfr.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_OPUS
|
||||
#include "webrtc/modules/audio_coding/codecs/opus/interface/opus_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_opus.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_SPEEX
|
||||
#include "webrtc/modules/audio_coding/codecs/speex/include/speex_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_speex.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.h"
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_red.h"
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Not yet used payload-types.
|
||||
// 83, 82, 81, 80, 79, 78, 77, 76, 75, 74, 73, 72, 71, 70, 69, 68,
|
||||
// 67, 66, 65
|
||||
|
||||
const CodecInst ACMCodecDB::database_[] = {
|
||||
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
||||
{103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
|
||||
# if (defined(WEBRTC_CODEC_ISAC))
|
||||
{104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
|
||||
{105, "ISAC", 48000, kIsacPacSize1440, 1, kIsacSwbDefaultRate},
|
||||
# endif
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_PCM16
|
||||
// Mono
|
||||
{107, "L16", 8000, 80, 1, 128000},
|
||||
{108, "L16", 16000, 160, 1, 256000},
|
||||
{109, "L16", 32000, 320, 1, 512000},
|
||||
// Stereo
|
||||
{111, "L16", 8000, 80, 2, 128000},
|
||||
{112, "L16", 16000, 160, 2, 256000},
|
||||
{113, "L16", 32000, 320, 2, 512000},
|
||||
#endif
|
||||
// G.711, PCM mu-law and A-law.
|
||||
// Mono
|
||||
{0, "PCMU", 8000, 160, 1, 64000},
|
||||
{8, "PCMA", 8000, 160, 1, 64000},
|
||||
// Stereo
|
||||
{110, "PCMU", 8000, 160, 2, 64000},
|
||||
{118, "PCMA", 8000, 160, 2, 64000},
|
||||
#ifdef WEBRTC_CODEC_ILBC
|
||||
{102, "ILBC", 8000, 240, 1, 13300},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
{114, "AMR", 8000, 160, 1, 12200},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
{115, "AMR-WB", 16000, 320, 1, 20000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
// Mono
|
||||
{116, "CELT", 32000, 640, 1, 64000},
|
||||
// Stereo
|
||||
{117, "CELT", 32000, 640, 2, 64000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
// Mono
|
||||
{9, "G722", 16000, 320, 1, 64000},
|
||||
// Stereo
|
||||
{119, "G722", 16000, 320, 2, 64000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
{92, "G7221", 16000, 320, 1, 32000},
|
||||
{91, "G7221", 16000, 320, 1, 24000},
|
||||
{90, "G7221", 16000, 320, 1, 16000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
{89, "G7221", 32000, 640, 1, 48000},
|
||||
{88, "G7221", 32000, 640, 1, 32000},
|
||||
{87, "G7221", 32000, 640, 1, 24000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729
|
||||
{18, "G729", 8000, 240, 1, 8000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729_1
|
||||
{86, "G7291", 16000, 320, 1, 32000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_GSMFR
|
||||
{3, "GSM", 8000, 160, 1, 13200},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_OPUS
|
||||
// Opus internally supports 48, 24, 16, 12, 8 kHz.
|
||||
// Mono and stereo.
|
||||
{120, "opus", 48000, 960, 2, 64000},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_SPEEX
|
||||
{85, "speex", 8000, 160, 1, 11000},
|
||||
{84, "speex", 16000, 320, 1, 22000},
|
||||
#endif
|
||||
// Comfort noise for four different sampling frequencies.
|
||||
{13, "CN", 8000, 240, 1, 0},
|
||||
{98, "CN", 16000, 480, 1, 0},
|
||||
{99, "CN", 32000, 960, 1, 0},
|
||||
#ifdef ENABLE_48000_HZ
|
||||
{100, "CN", 48000, 1440, 1, 0},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
{106, "telephone-event", 8000, 240, 1, 0},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
{127, "red", 8000, 0, 1, 0},
|
||||
#endif
|
||||
// To prevent compile errors due to trailing commas.
|
||||
{-1, "Null", -1, -1, -1, -1}
|
||||
};
|
||||
|
||||
// Create database with all codec settings at compile time.
|
||||
// Each entry needs the following parameters in the given order:
|
||||
// Number of allowed packet sizes, a vector with the allowed packet sizes,
|
||||
// Basic block samples, max number of channels that are supported.
|
||||
const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
|
||||
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
||||
{2, {kIsacPacSize480, kIsacPacSize960}, 0, 1, true},
|
||||
# if (defined(WEBRTC_CODEC_ISAC))
|
||||
{1, {kIsacPacSize960}, 0, 1, false},
|
||||
{1, {kIsacPacSize1440}, 0, 1, true},
|
||||
# endif
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_PCM16
|
||||
// Mono
|
||||
{4, {80, 160, 240, 320}, 0, 2, false},
|
||||
{4, {160, 320, 480, 640}, 0, 2, false},
|
||||
{2, {320, 640}, 0, 2, false},
|
||||
// Stereo
|
||||
{4, {80, 160, 240, 320}, 0, 2, false},
|
||||
{4, {160, 320, 480, 640}, 0, 2, false},
|
||||
{2, {320, 640}, 0, 2},
|
||||
#endif
|
||||
// G.711, PCM mu-law and A-law.
|
||||
// Mono
|
||||
{6, {80, 160, 240, 320, 400, 480}, 0, 2, false},
|
||||
{6, {80, 160, 240, 320, 400, 480}, 0, 2, false},
|
||||
// Stereo
|
||||
{6, {80, 160, 240, 320, 400, 480}, 0, 2, false},
|
||||
{6, {80, 160, 240, 320, 400, 480}, 0, 2, false},
|
||||
#ifdef WEBRTC_CODEC_ILBC
|
||||
{4, {160, 240, 320, 480}, 0, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
{3, {160, 320, 480}, 0, 1, true},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
{3, {320, 640, 960}, 0, 1, true},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
// Mono
|
||||
{1, {640}, 0, 2, false},
|
||||
// Stereo
|
||||
{1, {640}, 0, 2, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
// Mono
|
||||
{6, {160, 320, 480, 640, 800, 960}, 0, 2, false},
|
||||
// Stereo
|
||||
{6, {160, 320, 480, 640, 800, 960}, 0, 2, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
{1, {320}, 320, 1, false},
|
||||
{1, {320}, 320, 1, false},
|
||||
{1, {320}, 320, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
{1, {640}, 640, 1, false},
|
||||
{1, {640}, 640, 1, false},
|
||||
{1, {640}, 640, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729
|
||||
{6, {80, 160, 240, 320, 400, 480}, 0, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729_1
|
||||
{3, {320, 640, 960}, 0, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_GSMFR
|
||||
{3, {160, 320, 480}, 160, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_OPUS
|
||||
// Opus supports frames shorter than 10ms,
|
||||
// but it doesn't help us to use them.
|
||||
// Mono and stereo.
|
||||
{1, {960}, 0, 2, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_SPEEX
|
||||
{3, {160, 320, 480}, 0, 1, false},
|
||||
{3, {320, 640, 960}, 0, 1, false},
|
||||
#endif
|
||||
// Comfort noise for three different sampling frequencies.
|
||||
{1, {240}, 240, 1, false},
|
||||
{1, {480}, 480, 1, false},
|
||||
{1, {960}, 960, 1, false},
|
||||
#ifdef ENABLE_48000_HZ
|
||||
{1, {1440}, 1440, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
{1, {240}, 240, 1, false},
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
{1, {0}, 0, 1, false},
|
||||
#endif
|
||||
// To prevent compile errors due to trailing commas.
|
||||
{-1, {-1}, -1, -1, false}
|
||||
};
|
||||
|
||||
// Create a database of all NetEQ decoders at compile time.
|
||||
const NetEqDecoder ACMCodecDB::neteq_decoders_[] = {
|
||||
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
||||
kDecoderISAC,
|
||||
# if (defined(WEBRTC_CODEC_ISAC))
|
||||
kDecoderISACswb,
|
||||
kDecoderISACfb,
|
||||
# endif
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_PCM16
|
||||
// Mono
|
||||
kDecoderPCM16B,
|
||||
kDecoderPCM16Bwb,
|
||||
kDecoderPCM16Bswb32kHz,
|
||||
// Stereo
|
||||
kDecoderPCM16B_2ch,
|
||||
kDecoderPCM16Bwb_2ch,
|
||||
kDecoderPCM16Bswb32kHz_2ch,
|
||||
#endif
|
||||
// G.711, PCM mu-las and A-law.
|
||||
// Mono
|
||||
kDecoderPCMu,
|
||||
kDecoderPCMa,
|
||||
// Stereo
|
||||
kDecoderPCMu_2ch,
|
||||
kDecoderPCMa_2ch,
|
||||
#ifdef WEBRTC_CODEC_ILBC
|
||||
kDecoderILBC,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
kDecoderAMR,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
kDecoderAMRWB,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
// Mono
|
||||
kDecoderCELT_32,
|
||||
// Stereo
|
||||
kDecoderCELT_32_2ch,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
// Mono
|
||||
kDecoderG722,
|
||||
// Stereo
|
||||
kDecoderG722_2ch,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
kDecoderG722_1_32,
|
||||
kDecoderG722_1_24,
|
||||
kDecoderG722_1_16,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
kDecoderG722_1C_48,
|
||||
kDecoderG722_1C_32,
|
||||
kDecoderG722_1C_24,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729
|
||||
kDecoderG729,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729_1
|
||||
kDecoderG729_1,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_GSMFR
|
||||
kDecoderGSMFR,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_OPUS
|
||||
// Mono and stereo.
|
||||
kDecoderOpus,
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_SPEEX
|
||||
kDecoderSPEEX_8,
|
||||
kDecoderSPEEX_16,
|
||||
#endif
|
||||
// Comfort noise for three different sampling frequencies.
|
||||
kDecoderCNGnb,
|
||||
kDecoderCNGwb,
|
||||
kDecoderCNGswb32kHz
|
||||
#ifdef ENABLE_48000_HZ
|
||||
, kDecoderCNGswb48kHz
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
, kDecoderAVT
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
, kDecoderRED
|
||||
#endif
|
||||
};
|
||||
|
||||
// Get codec information from database.
|
||||
// TODO(tlegrand): replace memcpy with a pointer to the data base memory.
|
||||
int ACMCodecDB::Codec(int codec_id, CodecInst* codec_inst) {
|
||||
// Error check to see that codec_id is not out of bounds.
|
||||
if ((codec_id < 0) || (codec_id >= kNumCodecs)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Copy database information for the codec to the output.
|
||||
memcpy(codec_inst, &database_[codec_id], sizeof(CodecInst));
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Enumerator for error codes when asking for codec database id.
|
||||
enum {
|
||||
kInvalidCodec = -10,
|
||||
kInvalidPayloadtype = -30,
|
||||
kInvalidPacketSize = -40,
|
||||
kInvalidRate = -50
|
||||
};
|
||||
|
||||
// Gets the codec id number from the database. If there is some mismatch in
|
||||
// the codec settings, the function will return an error code.
|
||||
// NOTE! The first mismatch found will generate the return value.
|
||||
int ACMCodecDB::CodecNumber(const CodecInst& codec_inst, int* mirror_id) {
|
||||
// Look for a matching codec in the database.
|
||||
int codec_id = CodecId(codec_inst);
|
||||
|
||||
// Checks if we found a matching codec.
|
||||
if (codec_id == -1) {
|
||||
return kInvalidCodec;
|
||||
}
|
||||
|
||||
// Checks the validity of payload type
|
||||
if (!ValidPayloadType(codec_inst.pltype)) {
|
||||
return kInvalidPayloadtype;
|
||||
}
|
||||
|
||||
// Comfort Noise is special case, packet-size & rate is not checked.
|
||||
if (STR_CASE_CMP(database_[codec_id].plname, "CN") == 0) {
|
||||
*mirror_id = codec_id;
|
||||
return codec_id;
|
||||
}
|
||||
|
||||
// RED is special case, packet-size & rate is not checked.
|
||||
if (STR_CASE_CMP(database_[codec_id].plname, "red") == 0) {
|
||||
*mirror_id = codec_id;
|
||||
return codec_id;
|
||||
}
|
||||
|
||||
// Checks the validity of packet size.
|
||||
if (codec_settings_[codec_id].num_packet_sizes > 0) {
|
||||
bool packet_size_ok = false;
|
||||
int i;
|
||||
int packet_size_samples;
|
||||
for (i = 0; i < codec_settings_[codec_id].num_packet_sizes; i++) {
|
||||
packet_size_samples =
|
||||
codec_settings_[codec_id].packet_sizes_samples[i];
|
||||
if (codec_inst.pacsize == packet_size_samples) {
|
||||
packet_size_ok = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!packet_size_ok) {
|
||||
return kInvalidPacketSize;
|
||||
}
|
||||
}
|
||||
|
||||
if (codec_inst.pacsize < 1) {
|
||||
return kInvalidPacketSize;
|
||||
}
|
||||
|
||||
// Check the validity of rate. Codecs with multiple rates have their own
|
||||
// function for this.
|
||||
*mirror_id = codec_id;
|
||||
if (STR_CASE_CMP("isac", codec_inst.plname) == 0) {
|
||||
if (IsISACRateValid(codec_inst.rate)) {
|
||||
// Set mirrorID to iSAC WB which is only created once to be used both for
|
||||
// iSAC WB and SWB, because they need to share struct.
|
||||
*mirror_id = kISAC;
|
||||
return codec_id;
|
||||
} else {
|
||||
return kInvalidRate;
|
||||
}
|
||||
} else if (STR_CASE_CMP("ilbc", codec_inst.plname) == 0) {
|
||||
return IsILBCRateValid(codec_inst.rate, codec_inst.pacsize)
|
||||
? codec_id : kInvalidRate;
|
||||
} else if (STR_CASE_CMP("amr", codec_inst.plname) == 0) {
|
||||
return IsAMRRateValid(codec_inst.rate)
|
||||
? codec_id : kInvalidRate;
|
||||
} else if (STR_CASE_CMP("amr-wb", codec_inst.plname) == 0) {
|
||||
return IsAMRwbRateValid(codec_inst.rate)
|
||||
? codec_id : kInvalidRate;
|
||||
} else if (STR_CASE_CMP("g7291", codec_inst.plname) == 0) {
|
||||
return IsG7291RateValid(codec_inst.rate)
|
||||
? codec_id : kInvalidRate;
|
||||
} else if (STR_CASE_CMP("opus", codec_inst.plname) == 0) {
|
||||
return IsOpusRateValid(codec_inst.rate)
|
||||
? codec_id : kInvalidRate;
|
||||
} else if (STR_CASE_CMP("speex", codec_inst.plname) == 0) {
|
||||
return IsSpeexRateValid(codec_inst.rate)
|
||||
? codec_id : kInvalidRate;
|
||||
} else if (STR_CASE_CMP("celt", codec_inst.plname) == 0) {
|
||||
return IsCeltRateValid(codec_inst.rate)
|
||||
? codec_id : kInvalidRate;
|
||||
}
|
||||
|
||||
return IsRateValid(codec_id, codec_inst.rate) ?
|
||||
codec_id : kInvalidRate;
|
||||
}
|
||||
|
||||
// Looks for a matching payload name, frequency, and channels in the
|
||||
// codec list. Need to check all three since some codecs have several codec
|
||||
// entries with different frequencies and/or channels.
|
||||
// Does not check other codec settings, such as payload type and packet size.
|
||||
// Returns the id of the codec, or -1 if no match is found.
|
||||
int ACMCodecDB::CodecId(const CodecInst& codec_inst) {
|
||||
return (CodecId(codec_inst.plname, codec_inst.plfreq,
|
||||
codec_inst.channels));
|
||||
}
|
||||
|
||||
int ACMCodecDB::CodecId(const char* payload_name, int frequency, int channels) {
|
||||
for (int id = 0; id < kNumCodecs; id++) {
|
||||
bool name_match = false;
|
||||
bool frequency_match = false;
|
||||
bool channels_match = false;
|
||||
|
||||
// Payload name, sampling frequency and number of channels need to match.
|
||||
// NOTE! If |frequency| is -1, the frequency is not applicable, and is
|
||||
// always treated as true, like for RED.
|
||||
name_match = (STR_CASE_CMP(database_[id].plname, payload_name) == 0);
|
||||
frequency_match = (frequency == database_[id].plfreq) || (frequency == -1);
|
||||
// The number of channels must match for all codecs but Opus.
|
||||
if (STR_CASE_CMP(payload_name, "opus") != 0) {
|
||||
channels_match = (channels == database_[id].channels);
|
||||
} else {
|
||||
// For opus we just check that number of channels is valid.
|
||||
channels_match = (channels == 1 || channels == 2);
|
||||
}
|
||||
|
||||
if (name_match && frequency_match && channels_match) {
|
||||
// We have found a matching codec in the list.
|
||||
return id;
|
||||
}
|
||||
}
|
||||
|
||||
// We didn't find a matching codec.
|
||||
return -1;
|
||||
}
|
||||
// Gets codec id number, and mirror id, from database for the receiver.
|
||||
int ACMCodecDB::ReceiverCodecNumber(const CodecInst& codec_inst,
|
||||
int* mirror_id) {
|
||||
// Look for a matching codec in the database.
|
||||
int codec_id = CodecId(codec_inst);
|
||||
|
||||
// Set |mirror_id| to |codec_id|, except for iSAC. In case of iSAC we always
|
||||
// set |mirror_id| to iSAC WB (kISAC) which is only created once to be used
|
||||
// both for iSAC WB and SWB, because they need to share struct.
|
||||
if (STR_CASE_CMP(codec_inst.plname, "ISAC") != 0) {
|
||||
*mirror_id = codec_id;
|
||||
} else {
|
||||
*mirror_id = kISAC;
|
||||
}
|
||||
|
||||
return codec_id;
|
||||
}
|
||||
|
||||
// Returns the codec sampling frequency for codec with id = "codec_id" in
|
||||
// database.
|
||||
int ACMCodecDB::CodecFreq(int codec_id) {
|
||||
// Error check to see that codec_id is not out of bounds.
|
||||
if (codec_id < 0 || codec_id >= kNumCodecs) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return database_[codec_id].plfreq;
|
||||
}
|
||||
|
||||
// Returns the codec's basic coding block size in samples.
|
||||
int ACMCodecDB::BasicCodingBlock(int codec_id) {
|
||||
// Error check to see that codec_id is not out of bounds.
|
||||
if (codec_id < 0 || codec_id >= kNumCodecs) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return codec_settings_[codec_id].basic_block_samples;
|
||||
}
|
||||
|
||||
// Returns the NetEQ decoder database.
|
||||
const NetEqDecoder* ACMCodecDB::NetEQDecoders() {
|
||||
return neteq_decoders_;
|
||||
}
|
||||
|
||||
// Gets mirror id. The Id is used for codecs sharing struct for settings that
|
||||
// need different payload types.
|
||||
int ACMCodecDB::MirrorID(int codec_id) {
|
||||
if (STR_CASE_CMP(database_[codec_id].plname, "isac") == 0) {
|
||||
return kISAC;
|
||||
} else {
|
||||
return codec_id;
|
||||
}
|
||||
}
|
||||
|
||||
// Creates memory/instance for storing codec state.
|
||||
ACMGenericCodec* ACMCodecDB::CreateCodecInstance(const CodecInst& codec_inst) {
|
||||
// All we have support for right now.
|
||||
if (!STR_CASE_CMP(codec_inst.plname, "ISAC")) {
|
||||
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
||||
return new ACMISAC(kISAC);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "PCMU")) {
|
||||
if (codec_inst.channels == 1) {
|
||||
return new ACMPCMU(kPCMU);
|
||||
} else {
|
||||
return new ACMPCMU(kPCMU_2ch);
|
||||
}
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "PCMA")) {
|
||||
if (codec_inst.channels == 1) {
|
||||
return new ACMPCMA(kPCMA);
|
||||
} else {
|
||||
return new ACMPCMA(kPCMA_2ch);
|
||||
}
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "ILBC")) {
|
||||
#ifdef WEBRTC_CODEC_ILBC
|
||||
return new ACMILBC(kILBC);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "AMR")) {
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
return new ACMAMR(kGSMAMR);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "AMR-WB")) {
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
return new ACMAMRwb(kGSMAMRWB);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "CELT")) {
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
if (codec_inst.channels == 1) {
|
||||
return new ACMCELT(kCELT32);
|
||||
} else {
|
||||
return new ACMCELT(kCELT32_2ch);
|
||||
}
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "G722")) {
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
if (codec_inst.channels == 1) {
|
||||
return new ACMG722(kG722);
|
||||
} else {
|
||||
return new ACMG722(kG722_2ch);
|
||||
}
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "G7221")) {
|
||||
switch (codec_inst.plfreq) {
|
||||
case 16000: {
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
int codec_id;
|
||||
switch (codec_inst->rate) {
|
||||
case 16000 : {
|
||||
codec_id = kG722_1_16;
|
||||
break;
|
||||
}
|
||||
case 24000 : {
|
||||
codec_id = kG722_1_24;
|
||||
break;
|
||||
}
|
||||
case 32000 : {
|
||||
codec_id = kG722_1_32;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
return new ACMG722_1(codec_id);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
case 32000: {
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
int codec_id;
|
||||
switch (codec_inst->rate) {
|
||||
case 24000 : {
|
||||
codec_id = kG722_1C_24;
|
||||
break;
|
||||
}
|
||||
case 32000 : {
|
||||
codec_id = kG722_1C_32;
|
||||
break;
|
||||
}
|
||||
case 48000 : {
|
||||
codec_id = kG722_1C_48;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
return new ACMG722_1C(codec_id);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
}
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "CN")) {
|
||||
// For CN we need to check sampling frequency to know what codec to create.
|
||||
int codec_id;
|
||||
switch (codec_inst.plfreq) {
|
||||
case 8000: {
|
||||
codec_id = kCNNB;
|
||||
break;
|
||||
}
|
||||
case 16000: {
|
||||
codec_id = kCNWB;
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
codec_id = kCNSWB;
|
||||
break;
|
||||
}
|
||||
#ifdef ENABLE_48000_HZ
|
||||
case 48000: {
|
||||
codec_id = kCNFB;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
return new ACMCNG(codec_id);
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "G729")) {
|
||||
#ifdef WEBRTC_CODEC_G729
|
||||
return new ACMG729(kG729);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "G7291")) {
|
||||
#ifdef WEBRTC_CODEC_G729_1
|
||||
return new ACMG729_1(kG729_1);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "opus")) {
|
||||
#ifdef WEBRTC_CODEC_OPUS
|
||||
return new ACMOpus(kOpus);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "speex")) {
|
||||
#ifdef WEBRTC_CODEC_SPEEX
|
||||
int codec_id;
|
||||
switch (codec_inst->plfreq) {
|
||||
case 8000: {
|
||||
codec_id = kSPEEX8;
|
||||
break;
|
||||
}
|
||||
case 16000: {
|
||||
codec_id = kSPEEX16;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
return new ACMSPEEX(codec_id);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "CN")) {
|
||||
// For CN we need to check sampling frequency to know what codec to create.
|
||||
int codec_id;
|
||||
switch (codec_inst.plfreq) {
|
||||
case 8000: {
|
||||
codec_id = kCNNB;
|
||||
break;
|
||||
}
|
||||
case 16000: {
|
||||
codec_id = kCNWB;
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
codec_id = kCNSWB;
|
||||
break;
|
||||
}
|
||||
#ifdef ENABLE_48000_HZ
|
||||
case 48000: {
|
||||
codec_id = kCNFB;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
return new ACMCNG(codec_id);
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "L16")) {
|
||||
#ifdef WEBRTC_CODEC_PCM16
|
||||
// For L16 we need to check sampling frequency to know what codec to create.
|
||||
int codec_id;
|
||||
if (codec_inst.channels == 1) {
|
||||
switch (codec_inst.plfreq) {
|
||||
case 8000: {
|
||||
codec_id = kPCM16B;
|
||||
break;
|
||||
}
|
||||
case 16000: {
|
||||
codec_id = kPCM16Bwb;
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
codec_id = kPCM16Bswb32kHz;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
switch (codec_inst.plfreq) {
|
||||
case 8000: {
|
||||
codec_id = kPCM16B_2ch;
|
||||
break;
|
||||
}
|
||||
case 16000: {
|
||||
codec_id = kPCM16Bwb_2ch;
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
codec_id = kPCM16Bswb32kHz_2ch;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new ACMPCM16B(codec_id);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "telephone-event")) {
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
return new ACMDTMFPlayout(kAVT);
|
||||
#endif
|
||||
} else if (!STR_CASE_CMP(codec_inst.plname, "red")) {
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
return new ACMRED(kRED);
|
||||
#endif
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// Checks if the bitrate is valid for the codec.
|
||||
bool ACMCodecDB::IsRateValid(int codec_id, int rate) {
|
||||
if (database_[codec_id].rate == rate) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the bitrate is valid for iSAC.
|
||||
bool ACMCodecDB::IsISACRateValid(int rate) {
|
||||
if ((rate == -1) || ((rate <= 56000) && (rate >= 10000))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the bitrate is valid for iLBC.
|
||||
bool ACMCodecDB::IsILBCRateValid(int rate, int frame_size_samples) {
|
||||
if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
|
||||
(rate == 13300)) {
|
||||
return true;
|
||||
} else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
|
||||
(rate == 15200)) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the bitrate is valid for the GSM-AMR.
|
||||
bool ACMCodecDB::IsAMRRateValid(int rate) {
|
||||
switch (rate) {
|
||||
case 4750:
|
||||
case 5150:
|
||||
case 5900:
|
||||
case 6700:
|
||||
case 7400:
|
||||
case 7950:
|
||||
case 10200:
|
||||
case 12200: {
|
||||
return true;
|
||||
}
|
||||
default: {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the bitrate is valid for GSM-AMR-WB.
|
||||
bool ACMCodecDB::IsAMRwbRateValid(int rate) {
|
||||
switch (rate) {
|
||||
case 7000:
|
||||
case 9000:
|
||||
case 12000:
|
||||
case 14000:
|
||||
case 16000:
|
||||
case 18000:
|
||||
case 20000:
|
||||
case 23000:
|
||||
case 24000: {
|
||||
return true;
|
||||
}
|
||||
default: {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check if the bitrate is valid for G.729.1.
|
||||
bool ACMCodecDB::IsG7291RateValid(int rate) {
|
||||
switch (rate) {
|
||||
case 8000:
|
||||
case 12000:
|
||||
case 14000:
|
||||
case 16000:
|
||||
case 18000:
|
||||
case 20000:
|
||||
case 22000:
|
||||
case 24000:
|
||||
case 26000:
|
||||
case 28000:
|
||||
case 30000:
|
||||
case 32000: {
|
||||
return true;
|
||||
}
|
||||
default: {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the bitrate is valid for Speex.
|
||||
bool ACMCodecDB::IsSpeexRateValid(int rate) {
|
||||
if (rate > 2000) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the bitrate is valid for Opus.
|
||||
bool ACMCodecDB::IsOpusRateValid(int rate) {
|
||||
if ((rate < 6000) || (rate > 510000)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
// Checks if the bitrate is valid for Celt.
|
||||
bool ACMCodecDB::IsCeltRateValid(int rate) {
|
||||
if ((rate >= 48000) && (rate <= 128000)) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Checks if the payload type is in the valid range.
|
||||
bool ACMCodecDB::ValidPayloadType(int payload_type) {
|
||||
if ((payload_type < 0) || (payload_type > 127)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ACMCodecDB::OwnsDecoder(int codec_id) {
|
||||
assert(codec_id >= 0 && codec_id < ACMCodecDB::kNumCodecs);
|
||||
return ACMCodecDB::codec_settings_[codec_id].owns_decoder;
|
||||
}
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,355 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
/*
|
||||
* This file generates databases with information about all supported audio
|
||||
* codecs.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_
|
||||
|
||||
#include "webrtc/common_types.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
#include "webrtc/modules/audio_coding/neteq4/interface/neteq.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// TODO(tlegrand): replace class ACMCodecDB with a namespace.
|
||||
class ACMCodecDB {
|
||||
public:
|
||||
// Enum with array indexes for the supported codecs. NOTE! The order MUST
|
||||
// be the same as when creating the database in acm_codec_database.cc.
|
||||
enum {
|
||||
kNone = -1
|
||||
#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
|
||||
, kISAC
|
||||
# if (defined(WEBRTC_CODEC_ISAC))
|
||||
, kISACSWB
|
||||
, kISACFB
|
||||
# endif
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_PCM16
|
||||
// Mono
|
||||
, kPCM16B
|
||||
, kPCM16Bwb
|
||||
, kPCM16Bswb32kHz
|
||||
// Stereo
|
||||
, kPCM16B_2ch
|
||||
, kPCM16Bwb_2ch
|
||||
, kPCM16Bswb32kHz_2ch
|
||||
#endif
|
||||
// Mono
|
||||
, kPCMU
|
||||
, kPCMA
|
||||
// Stereo
|
||||
, kPCMU_2ch
|
||||
, kPCMA_2ch
|
||||
#ifdef WEBRTC_CODEC_ILBC
|
||||
, kILBC
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMR
|
||||
, kGSMAMR
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AMRWB
|
||||
, kGSMAMRWB
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_CELT
|
||||
// Mono
|
||||
, kCELT32
|
||||
// Stereo
|
||||
, kCELT32_2ch
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
// Mono
|
||||
, kG722
|
||||
// Stereo
|
||||
, kG722_2ch
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
, kG722_1_32
|
||||
, kG722_1_24
|
||||
, kG722_1_16
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
, kG722_1C_48
|
||||
, kG722_1C_32
|
||||
, kG722_1C_24
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729
|
||||
, kG729
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_G729_1
|
||||
, kG729_1
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_GSMFR
|
||||
, kGSMFR
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_OPUS
|
||||
// Mono and stereo
|
||||
, kOpus
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_SPEEX
|
||||
, kSPEEX8
|
||||
, kSPEEX16
|
||||
#endif
|
||||
, kCNNB
|
||||
, kCNWB
|
||||
, kCNSWB
|
||||
#ifdef ENABLE_48000_HZ
|
||||
, kCNFB
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
, kAVT
|
||||
#endif
|
||||
#ifdef WEBRTC_CODEC_RED
|
||||
, kRED
|
||||
#endif
|
||||
, kNumCodecs
|
||||
};
|
||||
|
||||
// Set unsupported codecs to -1
|
||||
#ifndef WEBRTC_CODEC_ISAC
|
||||
enum {kISACSWB = -1};
|
||||
enum {kISACFB = -1};
|
||||
# ifndef WEBRTC_CODEC_ISACFX
|
||||
enum {kISAC = -1};
|
||||
# endif
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_PCM16
|
||||
// Mono
|
||||
enum {kPCM16B = -1};
|
||||
enum {kPCM16Bwb = -1};
|
||||
enum {kPCM16Bswb32kHz = -1};
|
||||
// Stereo
|
||||
enum {kPCM16B_2ch = -1};
|
||||
enum {kPCM16Bwb_2ch = -1};
|
||||
enum {kPCM16Bswb32kHz_2ch = -1};
|
||||
#endif
|
||||
// 48 kHz not supported, always set to -1.
|
||||
enum {kPCM16Bswb48kHz = -1};
|
||||
#ifndef WEBRTC_CODEC_ILBC
|
||||
enum {kILBC = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_AMR
|
||||
enum {kGSMAMR = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_AMRWB
|
||||
enum {kGSMAMRWB = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_CELT
|
||||
// Mono
|
||||
enum {kCELT32 = -1};
|
||||
// Stereo
|
||||
enum {kCELT32_2ch = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_G722
|
||||
// Mono
|
||||
enum {kG722 = -1};
|
||||
// Stereo
|
||||
enum {kG722_2ch = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_G722_1
|
||||
enum {kG722_1_32 = -1};
|
||||
enum {kG722_1_24 = -1};
|
||||
enum {kG722_1_16 = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_G722_1C
|
||||
enum {kG722_1C_48 = -1};
|
||||
enum {kG722_1C_32 = -1};
|
||||
enum {kG722_1C_24 = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_G729
|
||||
enum {kG729 = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_G729_1
|
||||
enum {kG729_1 = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_GSMFR
|
||||
enum {kGSMFR = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_SPEEX
|
||||
enum {kSPEEX8 = -1};
|
||||
enum {kSPEEX16 = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_OPUS
|
||||
// Mono and stereo
|
||||
enum {kOpus = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_AVT
|
||||
enum {kAVT = -1};
|
||||
#endif
|
||||
#ifndef WEBRTC_CODEC_RED
|
||||
enum {kRED = -1};
|
||||
#endif
|
||||
#ifndef ENABLE_48000_HZ
|
||||
enum { kCNFB = -1 };
|
||||
#endif
|
||||
|
||||
// kMaxNumCodecs - Maximum number of codecs that can be activated in one
|
||||
// build.
|
||||
// kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
|
||||
// These might need to be increased if adding a new codec to the database
|
||||
static const int kMaxNumCodecs = 50;
|
||||
static const int kMaxNumPacketSize = 6;
|
||||
|
||||
// Codec specific settings
|
||||
//
|
||||
// num_packet_sizes - number of allowed packet sizes.
|
||||
// packet_sizes_samples - list of the allowed packet sizes.
|
||||
// basic_block_samples - assigned a value different from 0 if the codec
|
||||
// requires to be fed with a specific number of samples
|
||||
// that can be different from packet size.
|
||||
// channel_support - number of channels supported to encode;
|
||||
// 1 = mono, 2 = stereo, etc.
|
||||
// owns_decoder - if true, it means that the codec should own the
|
||||
// decoder instance. In this case, the codec should
|
||||
// implement ACMGenericCodec::Decoder(), which returns
|
||||
// a pointer to AudioDecoder. This pointer is injected
|
||||
// into NetEq when this codec is registered as receive
|
||||
// codec.
|
||||
struct CodecSettings {
|
||||
int num_packet_sizes;
|
||||
int packet_sizes_samples[kMaxNumPacketSize];
|
||||
int basic_block_samples;
|
||||
int channel_support;
|
||||
bool owns_decoder;
|
||||
};
|
||||
|
||||
// Gets codec information from database at the position in database given by
|
||||
// [codec_id].
|
||||
// Input:
|
||||
// [codec_id] - number that specifies at what position in the database to
|
||||
// get the information.
|
||||
// Output:
|
||||
// [codec_inst] - filled with information about the codec.
|
||||
// Return:
|
||||
// 0 if successful, otherwise -1.
|
||||
static int Codec(int codec_id, CodecInst* codec_inst);
|
||||
|
||||
// Returns codec id and mirror id from database, given the information
|
||||
// received in the input [codec_inst]. Mirror id is a number that tells
|
||||
// where to find the codec's memory (instance). The number is either the
|
||||
// same as codec id (most common), or a number pointing at a different
|
||||
// entry in the database, if the codec has several entries with different
|
||||
// payload types. This is used for codecs that must share one struct even if
|
||||
// the payload type differs.
|
||||
// One example is the codec iSAC which has the same struct for both 16 and
|
||||
// 32 khz, but they have different entries in the database. Let's say the
|
||||
// function is called with iSAC 32kHz. The function will return 1 as that is
|
||||
// the entry in the data base, and [mirror_id] = 0, as that is the entry for
|
||||
// iSAC 16 kHz, which holds the shared memory.
|
||||
// Input:
|
||||
// [codec_inst] - Information about the codec for which we require the
|
||||
// database id.
|
||||
// Output:
|
||||
// [mirror_id] - mirror id, which most often is the same as the return
|
||||
// value, see above.
|
||||
// [err_message] - if present, in the event of a mismatch found between the
|
||||
// input and the database, a descriptive error message is
|
||||
// written here.
|
||||
// [err_message] - if present, the length of error message is returned here.
|
||||
// Return:
|
||||
// codec id if successful, otherwise < 0.
|
||||
static int CodecNumber(const CodecInst& codec_inst, int* mirror_id,
|
||||
char* err_message, int max_message_len_byte);
|
||||
static int CodecNumber(const CodecInst& codec_inst, int* mirror_id);
|
||||
static int CodecId(const CodecInst& codec_inst);
|
||||
static int CodecId(const char* payload_name, int frequency, int channels);
|
||||
static int ReceiverCodecNumber(const CodecInst& codec_inst, int* mirror_id);
|
||||
|
||||
// Returns the codec sampling frequency for codec with id = "codec_id" in
|
||||
// database.
|
||||
// TODO(tlegrand): Check if function is needed, or if we can change
|
||||
// to access database directly.
|
||||
// Input:
|
||||
// [codec_id] - number that specifies at what position in the database to
|
||||
// get the information.
|
||||
// Return:
|
||||
// codec sampling frequency if successful, otherwise -1.
|
||||
static int CodecFreq(int codec_id);
|
||||
|
||||
// Return the codec's basic coding block size in samples.
|
||||
// TODO(tlegrand): Check if function is needed, or if we can change
|
||||
// to access database directly.
|
||||
// Input:
|
||||
// [codec_id] - number that specifies at what position in the database to
|
||||
// get the information.
|
||||
// Return:
|
||||
// codec basic block size if successful, otherwise -1.
|
||||
static int BasicCodingBlock(int codec_id);
|
||||
|
||||
// Returns the NetEQ decoder database.
|
||||
static const NetEqDecoder* NetEQDecoders();
|
||||
|
||||
// Returns mirror id, which is a number that tells where to find the codec's
|
||||
// memory (instance). It is either the same as codec id (most common), or a
|
||||
// number pointing at a different entry in the database, if the codec have
|
||||
// several entries with different payload types. This is used for codecs that
|
||||
// must share struct even if the payload type differs.
|
||||
// TODO(tlegrand): Check if function is needed, or if we can change
|
||||
// to access database directly.
|
||||
// Input:
|
||||
// [codec_id] - number that specifies codec's position in the database.
|
||||
// Return:
|
||||
// Mirror id on success, otherwise -1.
|
||||
static int MirrorID(int codec_id);
|
||||
|
||||
// Create memory/instance for storing codec state.
|
||||
// Input:
|
||||
// [codec_inst] - information about codec. Only name of codec, "plname", is
|
||||
// used in this function.
|
||||
static ACMGenericCodec* CreateCodecInstance(const CodecInst& codec_inst);
|
||||
|
||||
// Specifies if the codec specified by |codec_id| MUST own its own decoder.
|
||||
// This is the case for codecs which *should* share a single codec instance
|
||||
// between encoder and decoder. Or for codecs which ACM should have control
|
||||
// over the decoder. For instance iSAC is such a codec that encoder and
|
||||
// decoder share the same codec instance.
|
||||
static bool OwnsDecoder(int codec_id);
|
||||
|
||||
// Checks if the bitrate is valid for the codec.
|
||||
// Input:
|
||||
// [codec_id] - number that specifies codec's position in the database.
|
||||
// [rate] - bitrate to check.
|
||||
// [frame_size_samples] - (used for iLBC) specifies which frame size to go
|
||||
// with the rate.
|
||||
static bool IsRateValid(int codec_id, int rate);
|
||||
static bool IsISACRateValid(int rate);
|
||||
static bool IsILBCRateValid(int rate, int frame_size_samples);
|
||||
static bool IsAMRRateValid(int rate);
|
||||
static bool IsAMRwbRateValid(int rate);
|
||||
static bool IsG7291RateValid(int rate);
|
||||
static bool IsSpeexRateValid(int rate);
|
||||
static bool IsOpusRateValid(int rate);
|
||||
static bool IsCeltRateValid(int rate);
|
||||
|
||||
// Check if the payload type is valid, meaning that it is in the valid range
|
||||
// of 0 to 127.
|
||||
// Input:
|
||||
// [payload_type] - payload type.
|
||||
static bool ValidPayloadType(int payload_type);
|
||||
|
||||
// Databases with information about the supported codecs
|
||||
// database_ - stored information about all codecs: payload type, name,
|
||||
// sampling frequency, packet size in samples, default channel
|
||||
// support, and default rate.
|
||||
// codec_settings_ - stored codec settings: number of allowed packet sizes,
|
||||
// a vector with the allowed packet sizes, basic block
|
||||
// samples, and max number of channels that are supported.
|
||||
// neteq_decoders_ - list of supported decoders in NetEQ.
|
||||
static const CodecInst database_[kMaxNumCodecs];
|
||||
static const CodecSettings codec_settings_[kMaxNumCodecs];
|
||||
static const NetEqDecoder neteq_decoders_[kMaxNumCodecs];
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_CODEC_DATABASE_H_
|
@ -8,8 +8,8 @@
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_COMMON_DEFS_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_COMMON_DEFS_H_
|
||||
|
||||
#include <string.h>
|
||||
|
||||
@ -24,20 +24,13 @@
|
||||
#error iSAC and iSACFX codecs cannot be enabled at the same time
|
||||
#endif
|
||||
|
||||
#ifdef WIN32
|
||||
// OS-dependent case-insensitive string comparison
|
||||
#define STR_CASE_CMP(x, y) ::_stricmp(x, y)
|
||||
#else
|
||||
// OS-dependent case-insensitive string comparison
|
||||
#define STR_CASE_CMP(x, y) ::strcasecmp(x, y)
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// 60 ms is the maximum block size we support. An extra 20 ms is considered
|
||||
// for safety if process() method is not called when it should be, i.e. we
|
||||
// accept 20 ms of jitter. 80 ms @ 48 kHz (full-band) stereo is 7680 samples.
|
||||
#define AUDIO_BUFFER_SIZE_W16 7680
|
||||
#define AUDIO_BUFFER_SIZE_W16 7680
|
||||
|
||||
// There is one timestamp per each 10 ms of audio
|
||||
// the audio buffer, at max, may contain 32 blocks of 10ms
|
||||
@ -91,17 +84,7 @@ struct WebRtcACMCodecParams {
|
||||
ACMVADMode vad_mode;
|
||||
};
|
||||
|
||||
// A structure that encapsulates audio buffer and related parameters
|
||||
// used for synchronization of audio of two ACMs.
|
||||
//
|
||||
// in_audio: same as ACMGenericCodec::in_audio_
|
||||
// in_audio_ix_read: same as ACMGenericCodec::in_audio_ix_read_
|
||||
// in_audio_ix_write: same as ACMGenericCodec::in_audio_ix_write_
|
||||
// in_timestamp: same as ACMGenericCodec::in_timestamp_
|
||||
// in_timestamp_ix_write: same as ACMGenericCodec::in_timestamp_ix_write_
|
||||
// last_timestamp: same as ACMGenericCodec::last_timestamp_
|
||||
// last_in_timestamp: same as AudioCodingModuleImpl::last_in_timestamp_
|
||||
//
|
||||
// TODO(turajs): Remove when ACM1 is removed.
|
||||
struct WebRtcACMAudioBuff {
|
||||
int16_t in_audio[AUDIO_BUFFER_SIZE_W16];
|
||||
int16_t in_audio_ix_read;
|
||||
@ -114,4 +97,4 @@ struct WebRtcACMAudioBuff {
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_COMMON_DEFS_H_
|
@ -0,0 +1,86 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_dtmf_playout.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_AVT
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_receiver.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_AVT
|
||||
|
||||
ACMDTMFPlayout::ACMDTMFPlayout(int16_t /* codec_id */) { return; }
|
||||
|
||||
ACMDTMFPlayout::~ACMDTMFPlayout() { return; }
|
||||
|
||||
int16_t ACMDTMFPlayout::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMDTMFPlayout::InternalInitEncoder(
|
||||
WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMDTMFPlayout::InternalCreateEncoder() { return -1; }
|
||||
|
||||
void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptr_inst */) {
|
||||
return;
|
||||
}
|
||||
|
||||
void ACMDTMFPlayout::DestructEncoderSafe() {
|
||||
return;
|
||||
}
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
|
||||
ACMDTMFPlayout::ACMDTMFPlayout(int16_t codec_id) { codec_id_ = codec_id; }
|
||||
|
||||
ACMDTMFPlayout::~ACMDTMFPlayout() { return; }
|
||||
|
||||
int16_t ACMDTMFPlayout::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
int16_t ACMDTMFPlayout::InternalInitEncoder(
|
||||
WebRtcACMCodecParams* /* codec_params */) {
|
||||
// This codec does not need initialization,
|
||||
// DTMFPlayout has no instance
|
||||
return 0;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMDTMFPlayout::InternalCreateEncoder() {
|
||||
// DTMFPlayout has no instance
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptr_inst */) {
|
||||
// DTMFPlayout has no instance
|
||||
return;
|
||||
}
|
||||
|
||||
void ACMDTMFPlayout::DestructEncoderSafe() {
|
||||
// DTMFPlayout has no instance
|
||||
return;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,40 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_DTMF_PLAYOUT_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_DTMF_PLAYOUT_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class ACMDTMFPlayout : public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMDTMFPlayout(int16_t codec_id);
|
||||
~ACMDTMFPlayout();
|
||||
|
||||
// for FEC
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
|
||||
|
||||
protected:
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_DTMF_PLAYOUT_H_
|
@ -0,0 +1,197 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g722.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_G722
|
||||
#include "webrtc/modules/audio_coding/codecs/g722/include/g722_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_G722
|
||||
|
||||
ACMG722::ACMG722(int16_t /* codec_id */)
|
||||
: ptr_enc_str_(NULL),
|
||||
encoder_inst_ptr_(NULL),
|
||||
encoder_inst_ptr_right_(NULL) {}
|
||||
|
||||
ACMG722::~ACMG722() {}
|
||||
|
||||
int32_t ACMG722::Add10MsDataSafe(const uint32_t /* timestamp */,
|
||||
const int16_t* /* data */,
|
||||
const uint16_t /* length_smpl */,
|
||||
const uint8_t /* audio_channel */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMG722::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMG722::InternalInitEncoder(WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMG722::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMG722::InternalCreateEncoder() { return -1; }
|
||||
|
||||
void ACMG722::DestructEncoderSafe() { return; }
|
||||
|
||||
void ACMG722::InternalDestructEncoderInst(void* /* ptr_inst */) { return; }
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
|
||||
// Encoder and decoder memory
|
||||
struct ACMG722EncStr {
|
||||
G722EncInst* inst; // instance for left channel in case of stereo
|
||||
G722EncInst* inst_right; // instance for right channel in case of stereo
|
||||
};
|
||||
struct ACMG722DecStr {
|
||||
G722DecInst* inst; // instance for left channel in case of stereo
|
||||
G722DecInst* inst_right; // instance for right channel in case of stereo
|
||||
};
|
||||
|
||||
ACMG722::ACMG722(int16_t codec_id)
|
||||
: encoder_inst_ptr_(NULL), encoder_inst_ptr_right_(NULL) {
|
||||
ptr_enc_str_ = new ACMG722EncStr;
|
||||
if (ptr_enc_str_ != NULL) {
|
||||
ptr_enc_str_->inst = NULL;
|
||||
ptr_enc_str_->inst_right = NULL;
|
||||
}
|
||||
codec_id_ = codec_id;
|
||||
return;
|
||||
}
|
||||
|
||||
ACMG722::~ACMG722() {
|
||||
// Encoder
|
||||
if (ptr_enc_str_ != NULL) {
|
||||
if (ptr_enc_str_->inst != NULL) {
|
||||
WebRtcG722_FreeEncoder(ptr_enc_str_->inst);
|
||||
ptr_enc_str_->inst = NULL;
|
||||
}
|
||||
if (ptr_enc_str_->inst_right != NULL) {
|
||||
WebRtcG722_FreeEncoder(ptr_enc_str_->inst_right);
|
||||
ptr_enc_str_->inst_right = NULL;
|
||||
}
|
||||
delete ptr_enc_str_;
|
||||
ptr_enc_str_ = NULL;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int32_t ACMG722::Add10MsDataSafe(const uint32_t timestamp,
|
||||
const int16_t* data,
|
||||
const uint16_t length_smpl,
|
||||
const uint8_t audio_channel) {
|
||||
return ACMGenericCodec::Add10MsDataSafe(
|
||||
(timestamp >> 1), data, length_smpl, audio_channel);
|
||||
}
|
||||
|
||||
int16_t ACMG722::InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte) {
|
||||
// If stereo, split input signal in left and right channel before encoding
|
||||
if (num_channels_ == 2) {
|
||||
int16_t left_channel[960];
|
||||
int16_t right_channel[960];
|
||||
uint8_t out_left[480];
|
||||
uint8_t out_right[480];
|
||||
int16_t len_in_bytes;
|
||||
for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
|
||||
left_channel[j] = in_audio_[in_audio_ix_read_ + i];
|
||||
right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
|
||||
}
|
||||
len_in_bytes = WebRtcG722_Encode(
|
||||
encoder_inst_ptr_, left_channel, frame_len_smpl_,
|
||||
reinterpret_cast<int16_t*>(out_left));
|
||||
len_in_bytes += WebRtcG722_Encode(encoder_inst_ptr_right_,
|
||||
right_channel,
|
||||
frame_len_smpl_,
|
||||
reinterpret_cast<int16_t*>(out_right));
|
||||
*bitstream_len_byte = len_in_bytes;
|
||||
|
||||
// Interleave the 4 bits per sample from left and right channel
|
||||
for (int i = 0, j = 0; i < len_in_bytes; i += 2, j++) {
|
||||
bitstream[i] = (out_left[j] & 0xF0) + (out_right[j] >> 4);
|
||||
bitstream[i + 1] = ((out_left[j] & 0x0F) << 4) + (out_right[j] & 0x0F);
|
||||
}
|
||||
} else {
|
||||
*bitstream_len_byte = WebRtcG722_Encode(
|
||||
encoder_inst_ptr_, &in_audio_[in_audio_ix_read_], frame_len_smpl_,
|
||||
reinterpret_cast<int16_t*>(bitstream));
|
||||
}
|
||||
|
||||
// increment the read index this tell the caller how far
|
||||
// we have gone forward in reading the audio buffer
|
||||
in_audio_ix_read_ += frame_len_smpl_ * num_channels_;
|
||||
return *bitstream_len_byte;
|
||||
}
|
||||
|
||||
int16_t ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
|
||||
if (codec_params->codec_inst.channels == 2) {
|
||||
// Create codec struct for right channel
|
||||
if (ptr_enc_str_->inst_right == NULL) {
|
||||
WebRtcG722_CreateEncoder(&ptr_enc_str_->inst_right);
|
||||
if (ptr_enc_str_->inst_right == NULL) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
encoder_inst_ptr_right_ = ptr_enc_str_->inst_right;
|
||||
if (WebRtcG722_EncoderInit(encoder_inst_ptr_right_) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return WebRtcG722_EncoderInit(encoder_inst_ptr_);
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMG722::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMG722::InternalCreateEncoder() {
|
||||
if (ptr_enc_str_ == NULL) {
|
||||
// this structure must be created at the costructor
|
||||
// if it is still NULL then there is a probelm and
|
||||
// we dont continue
|
||||
return -1;
|
||||
}
|
||||
WebRtcG722_CreateEncoder(&ptr_enc_str_->inst);
|
||||
if (ptr_enc_str_->inst == NULL) {
|
||||
return -1;
|
||||
}
|
||||
encoder_inst_ptr_ = ptr_enc_str_->inst;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMG722::DestructEncoderSafe() {
|
||||
if (ptr_enc_str_ != NULL) {
|
||||
if (ptr_enc_str_->inst != NULL) {
|
||||
WebRtcG722_FreeEncoder(ptr_enc_str_->inst);
|
||||
ptr_enc_str_->inst = NULL;
|
||||
}
|
||||
}
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
}
|
||||
|
||||
void ACMG722::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
if (ptr_inst != NULL) {
|
||||
WebRtcG722_FreeEncoder(static_cast<G722EncInst*>(ptr_inst));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,57 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G722_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G722_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
typedef struct WebRtcG722EncInst G722EncInst;
|
||||
typedef struct WebRtcG722DecInst G722DecInst;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
// Forward declaration.
|
||||
struct ACMG722EncStr;
|
||||
struct ACMG722DecStr;
|
||||
|
||||
class ACMG722 : public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMG722(int16_t codec_id);
|
||||
~ACMG722();
|
||||
|
||||
// For FEC.
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
|
||||
|
||||
protected:
|
||||
int32_t Add10MsDataSafe(const uint32_t timestamp,
|
||||
const int16_t* data,
|
||||
const uint16_t length_smpl,
|
||||
const uint8_t audio_channel);
|
||||
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
|
||||
ACMG722EncStr* ptr_enc_str_;
|
||||
|
||||
G722EncInst* encoder_inst_ptr_;
|
||||
G722EncInst* encoder_inst_ptr_right_; // Prepared for stereo
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G722_H_
|
@ -0,0 +1,326 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g7221.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_G722_1
|
||||
// NOTE! G.722.1 is not included in the open-source package. The following
|
||||
// interface file is needed:
|
||||
#include "webrtc/modules/audio_coding/main/codecs/g7221/interface/g7221_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
// The API in the header file should match the one below.
|
||||
//
|
||||
// int16_t WebRtcG7221_CreateEnc16(G722_1_16_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221_CreateEnc24(G722_1_24_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221_CreateEnc32(G722_1_32_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221_CreateDec16(G722_1_16_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221_CreateDec24(G722_1_24_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221_CreateDec32(G722_1_32_decinst_t_** dec_inst);
|
||||
//
|
||||
// int16_t WebRtcG7221_FreeEnc16(G722_1_16_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221_FreeEnc24(G722_1_24_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221_FreeEnc32(G722_1_32_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221_FreeDec16(G722_1_16_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221_FreeDec24(G722_1_24_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221_FreeDec32(G722_1_32_decinst_t_** dec_inst);
|
||||
//
|
||||
// int16_t WebRtcG7221_EncoderInit16(G722_1_16_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcG7221_EncoderInit24(G722_1_24_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcG7221_EncoderInit32(G722_1_32_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcG7221_DecoderInit16(G722_1_16_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcG7221_DecoderInit24(G722_1_24_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcG7221_DecoderInit32(G722_1_32_decinst_t_* dec_inst);
|
||||
//
|
||||
// int16_t WebRtcG7221_Encode16(G722_1_16_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221_Encode24(G722_1_24_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221_Encode32(G722_1_32_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
//
|
||||
// int16_t WebRtcG7221_Decode16(G722_1_16_decinst_t_* dec_inst,
|
||||
// int16_t* bitstream,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221_Decode24(G722_1_24_decinst_t_* dec_inst,
|
||||
// int16_t* bitstream,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221_Decode32(G722_1_32_decinst_t_* dec_inst,
|
||||
// int16_t* bitstream,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
//
|
||||
// int16_t WebRtcG7221_DecodePlc16(G722_1_16_decinst_t_* dec_inst,
|
||||
// int16_t* output,
|
||||
// int16_t nr_lost_frames);
|
||||
// int16_t WebRtcG7221_DecodePlc24(G722_1_24_decinst_t_* dec_inst,
|
||||
// int16_t* output,
|
||||
// int16_t nr_lost_frames);
|
||||
// int16_t WebRtcG7221_DecodePlc32(G722_1_32_decinst_t_* dec_inst,
|
||||
// int16_t* output,
|
||||
// int16_t nr_lost_frames);
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_G722_1
|
||||
|
||||
ACMG722_1::ACMG722_1(int16_t /* codec_id */)
|
||||
: operational_rate_(-1),
|
||||
encoder_inst_ptr_(NULL),
|
||||
encoder_inst_ptr_right_(NULL),
|
||||
encoder_inst16_ptr_(NULL),
|
||||
encoder_inst16_ptr_right_(NULL),
|
||||
encoder_inst24_ptr_(NULL),
|
||||
encoder_inst24_ptr_right_(NULL),
|
||||
encoder_inst32_ptr_(NULL),
|
||||
encoder_inst32_ptr_right_(NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
ACMG722_1::~ACMG722_1() { return; }
|
||||
|
||||
int16_t ACMG722_1::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMG722_1::InternalInitEncoder(
|
||||
WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMG722_1::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMG722_1::InternalCreateEncoder() { return -1; }
|
||||
|
||||
void ACMG722_1::DestructEncoderSafe() { return; }
|
||||
|
||||
void ACMG722_1::InternalDestructEncoderInst(void* /* ptr_inst */) { return; }
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
ACMG722_1::ACMG722_1(int16_t codec_id)
|
||||
: encoder_inst_ptr_(NULL),
|
||||
encoder_inst_ptr_right_(NULL),
|
||||
encoder_inst16_ptr_(NULL),
|
||||
encoder_inst16_ptr_right_(NULL),
|
||||
encoder_inst24_ptr_(NULL),
|
||||
encoder_inst24_ptr_right_(NULL),
|
||||
encoder_inst32_ptr_(NULL),
|
||||
encoder_inst32_ptr_right_(NULL) {
|
||||
codec_id_ = codec_id;
|
||||
if (codec_id_ == ACMCodecDB::kG722_1_16) {
|
||||
operational_rate_ = 16000;
|
||||
} else if (codec_id_ == ACMCodecDB::kG722_1_24) {
|
||||
operational_rate_ = 24000;
|
||||
} else if (codec_id_ == ACMCodecDB::kG722_1_32) {
|
||||
operational_rate_ = 32000;
|
||||
} else {
|
||||
operational_rate_ = -1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
ACMG722_1::~ACMG722_1() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
delete encoder_inst_ptr_;
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
if (encoder_inst_ptr_right_ != NULL) {
|
||||
delete encoder_inst_ptr_right_;
|
||||
encoder_inst_ptr_right_ = NULL;
|
||||
}
|
||||
|
||||
switch (operational_rate_) {
|
||||
case 16000: {
|
||||
encoder_inst16_ptr_ = NULL;
|
||||
encoder_inst16_ptr_right_ = NULL;
|
||||
break;
|
||||
}
|
||||
case 24000: {
|
||||
encoder_inst24_ptr_ = NULL;
|
||||
encoder_inst24_ptr_right_ = NULL;
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
encoder_inst32_ptr_ = NULL;
|
||||
encoder_inst32_ptr_right_ = NULL;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
break;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMG722_1::InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte) {
|
||||
int16_t left_channel[320];
|
||||
int16_t right_channel[320];
|
||||
int16_t len_in_bytes;
|
||||
int16_t out_bits[160];
|
||||
|
||||
// If stereo, split input signal in left and right channel before encoding
|
||||
if (num_channels_ == 2) {
|
||||
for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
|
||||
left_channel[j] = in_audio_[in_audio_ix_read_ + i];
|
||||
right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
|
||||
}
|
||||
} else {
|
||||
memcpy(left_channel, &in_audio_[in_audio_ix_read_], 320);
|
||||
}
|
||||
|
||||
switch (operational_rate_) {
|
||||
case 16000: {
|
||||
len_in_bytes = WebRtcG7221_Encode16(encoder_inst16_ptr_, left_channel,
|
||||
320, &out_bits[0]);
|
||||
if (num_channels_ == 2) {
|
||||
len_in_bytes += WebRtcG7221_Encode16(encoder_inst16_ptr_right_,
|
||||
right_channel, 320,
|
||||
&out_bits[len_in_bytes / 2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 24000: {
|
||||
len_in_bytes = WebRtcG7221_Encode24(encoder_inst24_ptr_, left_channel,
|
||||
320, &out_bits[0]);
|
||||
if (num_channels_ == 2) {
|
||||
len_in_bytes += WebRtcG7221_Encode24(encoder_inst24_ptr_right_,
|
||||
right_channel, 320,
|
||||
&out_bits[len_in_bytes / 2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
len_in_bytes = WebRtcG7221_Encode32(encoder_inst32_ptr_, left_channel,
|
||||
320, &out_bits[0]);
|
||||
if (num_channels_ == 2) {
|
||||
len_in_bytes += WebRtcG7221_Encode32(encoder_inst32_ptr_right_,
|
||||
right_channel, 320,
|
||||
&out_bits[len_in_bytes / 2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalInitEncode: Wrong rate for G722_1.");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
memcpy(bitstream, out_bits, len_in_bytes);
|
||||
*bitstream_len_byte = len_in_bytes;
|
||||
|
||||
// increment the read index this tell the caller that how far
|
||||
// we have gone forward in reading the audio buffer
|
||||
in_audio_ix_read_ += 320 * num_channels_;
|
||||
return *bitstream_len_byte;
|
||||
}
|
||||
|
||||
int16_t ACMG722_1::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
|
||||
int16_t ret;
|
||||
|
||||
switch (operational_rate_) {
|
||||
case 16000: {
|
||||
ret = WebRtcG7221_EncoderInit16(encoder_inst16_ptr_right_);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
return WebRtcG7221_EncoderInit16(encoder_inst16_ptr_);
|
||||
}
|
||||
case 24000: {
|
||||
ret = WebRtcG7221_EncoderInit24(encoder_inst24_ptr_right_);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
return WebRtcG7221_EncoderInit24(encoder_inst24_ptr_);
|
||||
}
|
||||
case 32000: {
|
||||
ret = WebRtcG7221_EncoderInit32(encoder_inst32_ptr_right_);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
return WebRtcG7221_EncoderInit32(encoder_inst32_ptr_);
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
|
||||
unique_id_, "InternalInitEncoder: Wrong rate for G722_1.");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMG722_1::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMG722_1::InternalCreateEncoder() {
|
||||
if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
|
||||
return -1;
|
||||
}
|
||||
switch (operational_rate_) {
|
||||
case 16000: {
|
||||
WebRtcG7221_CreateEnc16(&encoder_inst16_ptr_);
|
||||
WebRtcG7221_CreateEnc16(&encoder_inst16_ptr_right_);
|
||||
break;
|
||||
}
|
||||
case 24000: {
|
||||
WebRtcG7221_CreateEnc24(&encoder_inst24_ptr_);
|
||||
WebRtcG7221_CreateEnc24(&encoder_inst24_ptr_right_);
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
WebRtcG7221_CreateEnc32(&encoder_inst32_ptr_);
|
||||
WebRtcG7221_CreateEnc32(&encoder_inst32_ptr_right_);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalCreateEncoder: Wrong rate for G722_1.");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMG722_1::DestructEncoderSafe() {
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
delete encoder_inst_ptr_;
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
if (encoder_inst_ptr_right_ != NULL) {
|
||||
delete encoder_inst_ptr_right_;
|
||||
encoder_inst_ptr_right_ = NULL;
|
||||
}
|
||||
encoder_inst16_ptr_ = NULL;
|
||||
encoder_inst24_ptr_ = NULL;
|
||||
encoder_inst32_ptr_ = NULL;
|
||||
}
|
||||
|
||||
void ACMG722_1::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
if (ptr_inst != NULL) {
|
||||
delete ptr_inst;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
@ -0,0 +1,62 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G7221_H_
|
||||
#define WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G7221_H_
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_generic_codec.h"
|
||||
|
||||
// forward declaration
|
||||
struct G722_1_16_encinst_t_;
|
||||
struct G722_1_16_decinst_t_;
|
||||
struct G722_1_24_encinst_t_;
|
||||
struct G722_1_24_decinst_t_;
|
||||
struct G722_1_32_encinst_t_;
|
||||
struct G722_1_32_decinst_t_;
|
||||
struct G722_1_Inst_t_;
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
class ACMG722_1 : public ACMGenericCodec {
|
||||
public:
|
||||
explicit ACMG722_1(int16_t codec_id);
|
||||
~ACMG722_1();
|
||||
|
||||
// for FEC
|
||||
ACMGenericCodec* CreateInstance(void);
|
||||
|
||||
int16_t InternalEncode(uint8_t* bitstream, int16_t* bitstream_len_byte);
|
||||
|
||||
int16_t InternalInitEncoder(WebRtcACMCodecParams* codec_params);
|
||||
|
||||
protected:
|
||||
void DestructEncoderSafe();
|
||||
|
||||
int16_t InternalCreateEncoder();
|
||||
|
||||
void InternalDestructEncoderInst(void* ptr_inst);
|
||||
|
||||
int32_t operational_rate_;
|
||||
|
||||
G722_1_Inst_t_* encoder_inst_ptr_;
|
||||
G722_1_Inst_t_* encoder_inst_ptr_right_; // Used in stereo mode
|
||||
|
||||
// Only one set of these pointer is valid at any instance
|
||||
G722_1_16_encinst_t_* encoder_inst16_ptr_;
|
||||
G722_1_16_encinst_t_* encoder_inst16_ptr_right_;
|
||||
G722_1_24_encinst_t_* encoder_inst24_ptr_;
|
||||
G722_1_24_encinst_t_* encoder_inst24_ptr_right_;
|
||||
G722_1_32_encinst_t_* encoder_inst32_ptr_;
|
||||
G722_1_32_encinst_t_* encoder_inst32_ptr_right_;
|
||||
};
|
||||
|
||||
} // namespace webrtc
|
||||
|
||||
#endif // WEBRTC_MODULES_AUDIO_CODING_MAIN_ACM2_ACM_G7221_H_
|
@ -0,0 +1,332 @@
|
||||
/*
|
||||
* Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
|
||||
*
|
||||
* Use of this source code is governed by a BSD-style license
|
||||
* that can be found in the LICENSE file in the root of the source
|
||||
* tree. An additional intellectual property rights grant can be found
|
||||
* in the file PATENTS. All contributing project authors may
|
||||
* be found in the AUTHORS file in the root of the source tree.
|
||||
*/
|
||||
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_g7221c.h"
|
||||
|
||||
#ifdef WEBRTC_CODEC_G722_1C
|
||||
// NOTE! G.722.1C is not included in the open-source package. The following
|
||||
// interface file is needed:
|
||||
#include "webrtc/modules/audio_coding/main/codecs/g7221c/interface/g7221c_interface.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_codec_database.h"
|
||||
#include "webrtc/modules/audio_coding/main/acm2/acm_common_defs.h"
|
||||
#include "webrtc/system_wrappers/interface/trace.h"
|
||||
|
||||
// The API in the header file should match the one below.
|
||||
//
|
||||
// int16_t WebRtcG7221C_CreateEnc24(G722_1C_24_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221C_CreateEnc32(G722_1C_32_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221C_CreateEnc48(G722_1C_48_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221C_CreateDec24(G722_1C_24_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221C_CreateDec32(G722_1C_32_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221C_CreateDec48(G722_1C_48_decinst_t_** dec_inst);
|
||||
//
|
||||
// int16_t WebRtcG7221C_FreeEnc24(G722_1C_24_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221C_FreeEnc32(G722_1C_32_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221C_FreeEnc48(G722_1C_48_encinst_t_** enc_inst);
|
||||
// int16_t WebRtcG7221C_FreeDec24(G722_1C_24_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221C_FreeDec32(G722_1C_32_decinst_t_** dec_inst);
|
||||
// int16_t WebRtcG7221C_FreeDec48(G722_1C_48_decinst_t_** dec_inst);
|
||||
//
|
||||
// int16_t WebRtcG7221C_EncoderInit24(G722_1C_24_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcG7221C_EncoderInit32(G722_1C_32_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcG7221C_EncoderInit48(G722_1C_48_encinst_t_* enc_inst);
|
||||
// int16_t WebRtcG7221C_DecoderInit24(G722_1C_24_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcG7221C_DecoderInit32(G722_1C_32_decinst_t_* dec_inst);
|
||||
// int16_t WebRtcG7221C_DecoderInit48(G722_1C_48_decinst_t_* dec_inst);
|
||||
//
|
||||
// int16_t WebRtcG7221C_Encode24(G722_1C_24_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221C_Encode32(G722_1C_32_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221C_Encode48(G722_1C_48_encinst_t_* enc_inst,
|
||||
// int16_t* input,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
//
|
||||
// int16_t WebRtcG7221C_Decode24(G722_1C_24_decinst_t_* dec_inst,
|
||||
// int16_t* bitstream,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221C_Decode32(G722_1C_32_decinst_t_* dec_inst,
|
||||
// int16_t* bitstream,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
// int16_t WebRtcG7221C_Decode48(G722_1C_48_decinst_t_* dec_inst,
|
||||
// int16_t* bitstream,
|
||||
// int16_t len,
|
||||
// int16_t* output);
|
||||
//
|
||||
// int16_t WebRtcG7221C_DecodePlc24(G722_1C_24_decinst_t_* dec_inst,
|
||||
// int16_t* output,
|
||||
// int16_t nr_lost_frames);
|
||||
// int16_t WebRtcG7221C_DecodePlc32(G722_1C_32_decinst_t_* dec_inst,
|
||||
// int16_t* output,
|
||||
// int16_t nr_lost_frames);
|
||||
// int16_t WebRtcG7221C_DecodePlc48(G722_1C_48_decinst_t_* dec_inst,
|
||||
// int16_t* output,
|
||||
// int16_t nr_lost_frames);
|
||||
#endif
|
||||
|
||||
namespace webrtc {
|
||||
|
||||
#ifndef WEBRTC_CODEC_G722_1C
|
||||
|
||||
ACMG722_1C::ACMG722_1C(int16_t /* codec_id */)
|
||||
: operational_rate_(-1),
|
||||
encoder_inst_ptr_(NULL),
|
||||
encoder_inst_ptr_right_(NULL),
|
||||
encoder_inst24_ptr_(NULL),
|
||||
encoder_inst24_ptr_right_(NULL),
|
||||
encoder_inst32_ptr_(NULL),
|
||||
encoder_inst32_ptr_right_(NULL),
|
||||
encoder_inst48_ptr_(NULL),
|
||||
encoder_inst48_ptr_right_(NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
ACMG722_1C::~ACMG722_1C() { return; }
|
||||
|
||||
int16_t ACMG722_1C::InternalEncode(uint8_t* /* bitstream */,
|
||||
int16_t* /* bitstream_len_byte */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int16_t ACMG722_1C::InternalInitEncoder(
|
||||
WebRtcACMCodecParams* /* codec_params */) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMG722_1C::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMG722_1C::InternalCreateEncoder() { return -1; }
|
||||
|
||||
void ACMG722_1C::DestructEncoderSafe() { return; }
|
||||
|
||||
void ACMG722_1C::InternalDestructEncoderInst(void* /* ptr_inst */) { return; }
|
||||
|
||||
#else //===================== Actual Implementation =======================
|
||||
ACMG722_1C::ACMG722_1C(int16_t codec_id)
|
||||
: encoder_inst_ptr_(NULL),
|
||||
encoder_inst_ptr_right_(NULL),
|
||||
encoder_inst24_ptr_(NULL),
|
||||
encoder_inst24_ptr_right_(NULL),
|
||||
encoder_inst32_ptr_(NULL),
|
||||
encoder_inst32_ptr_right_(NULL),
|
||||
encoder_inst48_ptr_(NULL),
|
||||
encoder_inst48_ptr_right_(NULL) {
|
||||
codec_id_ = codec_id;
|
||||
if (codec_id_ == ACMCodecDB::kG722_1C_24) {
|
||||
operational_rate_ = 24000;
|
||||
} else if (codec_id_ == ACMCodecDB::kG722_1C_32) {
|
||||
operational_rate_ = 32000;
|
||||
} else if (codec_id_ == ACMCodecDB::kG722_1C_48) {
|
||||
operational_rate_ = 48000;
|
||||
} else {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"Wrong codec id for G722_1c.");
|
||||
operational_rate_ = -1;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
ACMG722_1C::~ACMG722_1C() {
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
delete encoder_inst_ptr_;
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
if (encoder_inst_ptr_right_ != NULL) {
|
||||
delete encoder_inst_ptr_right_;
|
||||
encoder_inst_ptr_right_ = NULL;
|
||||
}
|
||||
|
||||
switch (operational_rate_) {
|
||||
case 24000: {
|
||||
encoder_inst24_ptr_ = NULL;
|
||||
encoder_inst24_ptr_right_ = NULL;
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
encoder_inst32_ptr_ = NULL;
|
||||
encoder_inst32_ptr_right_ = NULL;
|
||||
break;
|
||||
}
|
||||
case 48000: {
|
||||
encoder_inst48_ptr_ = NULL;
|
||||
encoder_inst48_ptr_right_ = NULL;
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"Wrong rate for G722_1c.");
|
||||
break;
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
int16_t ACMG722_1C::InternalEncode(uint8_t* bitstream,
|
||||
int16_t* bitstream_len_byte) {
|
||||
int16_t left_channel[640];
|
||||
int16_t right_channel[640];
|
||||
int16_t len_in_bytes;
|
||||
int16_t out_bits[240];
|
||||
|
||||
// If stereo, split input signal in left and right channel before encoding
|
||||
if (num_channels_ == 2) {
|
||||
for (int i = 0, j = 0; i < frame_len_smpl_ * 2; i += 2, j++) {
|
||||
left_channel[j] = in_audio_[in_audio_ix_read_ + i];
|
||||
right_channel[j] = in_audio_[in_audio_ix_read_ + i + 1];
|
||||
}
|
||||
} else {
|
||||
memcpy(left_channel, &in_audio_[in_audio_ix_read_], 640);
|
||||
}
|
||||
|
||||
switch (operational_rate_) {
|
||||
case 24000: {
|
||||
len_in_bytes = WebRtcG7221C_Encode24(encoder_inst24_ptr_, left_channel,
|
||||
640, &out_bits[0]);
|
||||
if (num_channels_ == 2) {
|
||||
len_in_bytes += WebRtcG7221C_Encode24(encoder_inst24_ptr_right_,
|
||||
right_channel, 640,
|
||||
&out_bits[len_in_bytes / 2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
len_in_bytes = WebRtcG7221C_Encode32(encoder_inst32_ptr_, left_channel,
|
||||
640, &out_bits[0]);
|
||||
if (num_channels_ == 2) {
|
||||
len_in_bytes += WebRtcG7221C_Encode32(encoder_inst32_ptr_right_,
|
||||
right_channel, 640,
|
||||
&out_bits[len_in_bytes / 2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 48000: {
|
||||
len_in_bytes = WebRtcG7221C_Encode48(encoder_inst48_ptr_, left_channel,
|
||||
640, &out_bits[0]);
|
||||
if (num_channels_ == 2) {
|
||||
len_in_bytes += WebRtcG7221C_Encode48(encoder_inst48_ptr_right_,
|
||||
right_channel, 640,
|
||||
&out_bits[len_in_bytes / 2]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalEncode: Wrong rate for G722_1c.");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
memcpy(bitstream, out_bits, len_in_bytes);
|
||||
*bitstream_len_byte = len_in_bytes;
|
||||
|
||||
// increment the read index this tell the caller that how far
|
||||
// we have gone forward in reading the audio buffer
|
||||
in_audio_ix_read_ += 640 * num_channels_;
|
||||
|
||||
return *bitstream_len_byte;
|
||||
}
|
||||
|
||||
int16_t ACMG722_1C::InternalInitEncoder(WebRtcACMCodecParams* codec_params) {
|
||||
int16_t ret;
|
||||
|
||||
switch (operational_rate_) {
|
||||
case 24000: {
|
||||
ret = WebRtcG7221C_EncoderInit24(encoder_inst24_ptr_right_);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
return WebRtcG7221C_EncoderInit24(encoder_inst24_ptr_);
|
||||
}
|
||||
case 32000: {
|
||||
ret = WebRtcG7221C_EncoderInit32(encoder_inst32_ptr_right_);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
return WebRtcG7221C_EncoderInit32(encoder_inst32_ptr_);
|
||||
}
|
||||
case 48000: {
|
||||
ret = WebRtcG7221C_EncoderInit48(encoder_inst48_ptr_right_);
|
||||
if (ret < 0) {
|
||||
return ret;
|
||||
}
|
||||
return WebRtcG7221C_EncoderInit48(encoder_inst48_ptr_);
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalInitEncode: Wrong rate for G722_1c.");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ACMGenericCodec* ACMG722_1C::CreateInstance(void) { return NULL; }
|
||||
|
||||
int16_t ACMG722_1C::InternalCreateEncoder() {
|
||||
if ((encoder_inst_ptr_ == NULL) || (encoder_inst_ptr_right_ == NULL)) {
|
||||
return -1;
|
||||
}
|
||||
switch (operational_rate_) {
|
||||
case 24000: {
|
||||
WebRtcG7221C_CreateEnc24(&encoder_inst24_ptr_);
|
||||
WebRtcG7221C_CreateEnc24(&encoder_inst24_ptr_right_);
|
||||
break;
|
||||
}
|
||||
case 32000: {
|
||||
WebRtcG7221C_CreateEnc32(&encoder_inst32_ptr_);
|
||||
WebRtcG7221C_CreateEnc32(&encoder_inst32_ptr_right_);
|
||||
break;
|
||||
}
|
||||
case 48000: {
|
||||
WebRtcG7221C_CreateEnc48(&encoder_inst48_ptr_);
|
||||
WebRtcG7221C_CreateEnc48(&encoder_inst48_ptr_right_);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, unique_id_,
|
||||
"InternalCreateEncoder: Wrong rate for G722_1c.");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void ACMG722_1C::DestructEncoderSafe() {
|
||||
encoder_exist_ = false;
|
||||
encoder_initialized_ = false;
|
||||
if (encoder_inst_ptr_ != NULL) {
|
||||
delete encoder_inst_ptr_;
|
||||
encoder_inst_ptr_ = NULL;
|
||||
}
|
||||
if (encoder_inst_ptr_right_ != NULL) {
|
||||
delete encoder_inst_ptr_right_;
|
||||
encoder_inst_ptr_right_ = NULL;
|
||||
}
|
||||
encoder_inst24_ptr_ = NULL;
|
||||
encoder_inst32_ptr_ = NULL;
|
||||
encoder_inst48_ptr_ = NULL;
|
||||
}
|
||||
|
||||
void ACMG722_1C::InternalDestructEncoderInst(void* ptr_inst) {
|
||||
if (ptr_inst != NULL) {
|
||||
delete ptr_inst;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace webrtc
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user