高通音频架构(二)

1.2 HAL(硬件抽象层)

在上一篇文章中,我们探讨了audio在framework层的一些代码流程,记下来看看HAL层。

在大部分驱动中,HAL层扮演的是一个过度的角色,基本上都是用于传递数据,不会做太多的逻辑处理,主要核心部分都交给了kernel,但似乎对于音频来说,刚好反过来了,音频的kernel放的是平台对于音频的一些共同的和硬件交互的代码,大部分音频厂家都不会把自己的核心部分添加到kernel中,而是选择了放在HAL层。所以,对于音频来说HAL层反而成了整个框架中的核心部分,那么接下来我们就简单的来探索一下这个HAL的一些小秘密。

首先,要想了解HAL先得知道它的代码存放,接口一般放置在platform/hardware/interface/audio目录下,它的一个目录结构如下:

platform/hardware/interface/audio

  • 2.0
  • 4.0
  • 5.0
  • 6.0
  • common
  • core
  • effect
  • policy

2.0/4.0/5.0/6.0这几个代表的HAL接口所使用的版本,这在framework层代码会有相应的对应,一般上层是从最新版本开始找,所以大部分情况下用的是最新版本。其他文件夹一般依情况而定,可能会有不同,有的会把对于接口的实现直接放置在当前目录,有的会选择另外目录,我当前这套代码实现是放置在当前目录。DevicesFactory.cpp 这个类的接口是会被上层直接调用到来获取其他信息

#if MAJOR_VERSION == 2
Return<void> DevicesFactory::openDevice(IDevicesFactory::Device device, openDevice_cb _hidl_cb) {
    
    
    switch (device) {
    
    
        case IDevicesFactory::Device::PRIMARY:
            return openDevice<PrimaryDevice>(AUDIO_HARDWARE_MODULE_ID_PRIMARY, _hidl_cb);
        case IDevicesFactory::Device::A2DP:
            return openDevice(AUDIO_HARDWARE_MODULE_ID_A2DP, _hidl_cb);
        case IDevicesFactory::Device::USB:
            return openDevice(AUDIO_HARDWARE_MODULE_ID_USB, _hidl_cb);
        case IDevicesFactory::Device::R_SUBMIX:
            return openDevice(AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, _hidl_cb);
        case IDevicesFactory::Device::STUB:
            return openDevice(AUDIO_HARDWARE_MODULE_ID_STUB, _hidl_cb);
    }
    _hidl_cb(Result::INVALID_ARGUMENTS, nullptr);
    return Void();
}
#elif MAJOR_VERSION >= 4
Return<void> DevicesFactory::openDevice(const hidl_string& moduleName, openDevice_cb _hidl_cb) {
    
    
    if (moduleName == AUDIO_HARDWARE_MODULE_ID_PRIMARY) {
    
    
        return openDevice<PrimaryDevice>(moduleName.c_str(), _hidl_cb);
    }
    return openDevice(moduleName.c_str(), _hidl_cb);
}
Return<void> DevicesFactory::openPrimaryDevice(openPrimaryDevice_cb _hidl_cb) {
    
    
    return openDevice<PrimaryDevice>(AUDIO_HARDWARE_MODULE_ID_PRIMARY, _hidl_cb);
}
#endif
 
Return<void> DevicesFactory::openDevice(const char* moduleName, openDevice_cb _hidl_cb) {
    
    
    return openDevice<implementation::Device>(moduleName, _hidl_cb);
}
 
template <class DeviceShim, class Callback>
Return<void> DevicesFactory::openDevice(const char* moduleName, Callback _hidl_cb) {
    
    
    audio_hw_device_t* halDevice;
    Result retval(Result::INVALID_ARGUMENTS);
    sp<DeviceShim> result;
    int halStatus = loadAudioInterface(moduleName, &halDevice);
    if (halStatus == OK) {
    
    
        result = new DeviceShim(halDevice);
        retval = Result::OK;
    } else if (halStatus == -EINVAL) {
    
    
        retval = Result::NOT_INITIALIZED;
    }
    _hidl_cb(retval, result);
    return Void();
}
 
// static
int DevicesFactory::loadAudioInterface(const char* if_name, audio_hw_device_t** dev) {
    
    
    const hw_module_t* mod;
    int rc;
 
    rc = hw_get_module_by_class(AUDIO_HARDWARE_MODULE_ID, if_name, &mod);
    if (rc) {
    
    
        ALOGE("%s couldn't load audio hw module %s.%s (%s)", __func__, AUDIO_HARDWARE_MODULE_ID,
              if_name, strerror(-rc));
        goto out;
    }
    rc = audio_hw_device_open(mod, dev);
    if (rc) {
    
    
        ALOGE("%s couldn't open audio hw device in %s.%s (%s)", __func__, AUDIO_HARDWARE_MODULE_ID,
              if_name, strerror(-rc));
        goto out;
    }
    if ((*dev)->common.version < AUDIO_DEVICE_API_VERSION_MIN) {
    
    
        ALOGE("%s wrong audio hw device version %04x", __func__, (*dev)->common.version);
        rc = -EINVAL;
        audio_hw_device_close(*dev);
        goto out;
    }
    return OK;
 
out:
    *dev = NULL;
    return rc;
}
 
IDevicesFactory* HIDL_FETCH_IDevicesFactory(const char* name) {
    
    
    return strcmp(name, "default") == 0 ? new DevicesFactory() : nullptr;
}

这个类中只有两个函数openDevice和loadAudioInterface,openDevice是上层会调用,当被上层调用时自己会执行loadAudioInterface,这个函数就连接到了我们HIDL的服务端,根据AUDIO_HARDWARE_MODULE_ID找到服务端,接着我们就去代码中果然找到了服务端出处/target/vendor/qcom/opensource/audio-hal/primary-hal/hal/audio_hw.c在这个目录下则是我们audio服务端实现的一处

static struct hw_module_methods_t hal_module_methods = {
    
    
    .open = adev_open,
};
 
struct audio_module HAL_MODULE_INFO_SYM = {
    
    
    .common = {
    
    
        .tag = HARDWARE_MODULE_TAG,
        .module_api_version = AUDIO_MODULE_API_VERSION_0_1,
        .hal_api_version = HARDWARE_HAL_API_VERSION,
        .id = AUDIO_HARDWARE_MODULE_ID,                                        
        .name = "QCOM Audio HAL",
        .author = "The Linux Foundation",
        .methods = &hal_module_methods,
    },
};

到HAL层hw_get_module_by_class时系统就会根据AUDIO_HARDWARE_MODULE_ID来匹配到这个地方,audio_hw_device_open对应了adev_open

static int adev_open(const hw_module_t *module, const char *name,
                     hw_device_t **device)
{
    
    
    int ret;
    char value[PROPERTY_VALUE_MAX] = {
    
    0};
    char mixer_ctl_name[128] = {
    
    0};
    struct mixer_ctl *ctl = NULL;
 
    ALOGD("%s: enter", __func__);
    if (strcmp(name, AUDIO_HARDWARE_INTERFACE) != 0) return -EINVAL;
 
    pthread_mutex_lock(&adev_init_lock);
    if (audio_device_ref_count != 0){
    
    
            *device = &adev->device.common;
            audio_device_ref_count++;
            ALOGD("%s: returning existing instance of adev", __func__);
            ALOGD("%s: exit", __func__);
            pthread_mutex_unlock(&adev_init_lock);
            return 0;
    }
 
    adev = calloc(1, sizeof(struct audio_device));
 
    if (!adev) {
    
    
        pthread_mutex_unlock(&adev_init_lock);
        return -ENOMEM;
    }
 
    pthread_mutex_init(&adev->lock, (const pthread_mutexattr_t *) NULL);
 
    // register audio ext hidl at the earliest
    audio_extn_hidl_init();
#ifdef DYNAMIC_LOG_ENABLED
    register_for_dynamic_logging("hal");
#endif
 
    /* default audio HAL major version */
    uint32_t maj_version = 3;
    if(property_get("vendor.audio.hal.maj.version", value, NULL))
        maj_version = atoi(value);
 
    adev->device.common.tag = HARDWARE_DEVICE_TAG;
    adev->device.common.version = HARDWARE_DEVICE_API_VERSION(maj_version, 0);
    adev->device.common.module = (struct hw_module_t *)module;
    adev->device.common.close = adev_close;
 
    adev->device.init_check = adev_init_check;
    adev->device.set_voice_volume = adev_set_voice_volume;
    adev->device.set_master_volume = adev_set_master_volume;
    adev->device.get_master_volume = adev_get_master_volume;
    adev->device.set_master_mute = adev_set_master_mute;
    adev->device.get_master_mute = adev_get_master_mute;
    adev->device.set_mode = adev_set_mode;
    adev->device.set_mic_mute = adev_set_mic_mute;
    adev->device.get_mic_mute = adev_get_mic_mute;
    adev->device.set_parameters = adev_set_parameters;
    adev->device.get_parameters = adev_get_parameters;
    adev->device.get_input_buffer_size = adev_get_input_buffer_size;
    adev->device.open_output_stream = adev_open_output_stream;
    adev->device.close_output_stream = adev_close_output_stream;
    adev->device.open_input_stream = adev_open_input_stream;
    adev->device.close_input_stream = adev_close_input_stream;
    adev->device.create_audio_patch = adev_create_audio_patch;
    adev->device.release_audio_patch = adev_release_audio_patch;
    adev->device.get_audio_port = adev_get_audio_port;
    adev->device.set_audio_port_config = adev_set_audio_port_config;
    adev->device.dump = adev_dump;
    adev->device.get_microphones = adev_get_microphones;
    adev->sub_mic_to_headphones_loopback_mode = false;
    adev->builtin_mic_to_headphones_loopback_mode = false;
    adev->headset_mic_to_headphones_loopback_mode = false;
    /* Set the default route before the PCM stream is opened */
    adev->mode = AUDIO_MODE_NORMAL;
    adev->primary_output = NULL;
    adev->out_device = AUDIO_DEVICE_NONE;
    adev->bluetooth_nrec = true;
    adev->acdb_settings = TTY_MODE_OFF;
    adev->allow_afe_proxy_usage = true;
    adev->bt_sco_on = false;
    ......
}

adev_open函数会把一些列的函数赋值给adev,然后返回给HAL层去被调用,这也对应到了上层的一些列相关操作,就不一一详说。还记得音频在播放的时候会使用到outputstream,那这里找到对应的adev_open_output_stream看下这个过程,当上层调用openoutputstream,到这里它会创建一个stream_out并把一些音频的属性持有,还有一些上层要使用到的关于stream的函数:start/stop//pause/resume/write等,先看下上层调用写数据。上层调用outputstream.write到了HAL服务层对应out_write,这个函数被调用时总结来说就是先判断是不是语音通话,是的话就用一种方式,不是就用start_output_stream,两个方向最后都是到pcm_start,只不过语音通话还要先pcm_open,然后接着中间对数据做了一些处理,最后调用了pcm_write函数来结束。可以从out_write的逻辑看,这个写数据的过程最后貌似都集中到了pcm中,最后都调用的pcm相关的函数来进行下一步的处理。这里就设计到一个pcm的概念,什么是PCM。脉冲编码调制(Pulse Code Modulation),这是它的全名,它的作用是把一个时间连续、取值连续的模拟信号变成时间离散、取值离散的数字信号后在信道中传输。脉冲编码调制就是对模拟信号先抽样,再对样值幅度量化、编码的过程。pcm是一个通信上的概念,脉冲编码调制是编码,wav是媒体概念,体现的是封装。wav文件可以封装pcm编码信息,也可以封装其他编码格式,例如mp3。我们这个地方所使用到的pcm它是存在与alsa-lib中,它的路径在external/tinyalsa

tinyalsa:

  • mixer.c
  • pcm.c
  • tinycap.c
  • tinyhostless.c
  • tinymix.c
  • tinypcminfo.c
  • tinyplay.c
    这是tinyalsa中存在几个文件,mixer和pcm分别是混音和脉冲编码调制,cap是采集,play是播放。tinyalsa也就是一个中转的作用,mixer.c和pcm.c会被编译成库供代码调用其他几个会被编译成二进制执行文件,可以供代码使用也可以通过命令行直接操作。

通过tinyalsa使得音频的各种操作更好的分类,HAL层和kernel层也更加的解耦,HAL层只需要调用它需要的函数也不需要关心具体实现,无论HAL层怎么修改都不会修改到与kernel层通信的部分,同时也更符合了Linux万物皆为文件的理念。

挑pcm看一下,open/start/write

struct pcm *pcm_open(unsigned int card, unsigned int device,
                     unsigned int flags, struct pcm_config *config)
{
    
    
    struct pcm *pcm;
    struct snd_pcm_info info;
    struct snd_pcm_hw_params params;
    struct snd_pcm_sw_params sparams;
    char fn[256];
    int rc;
 
    if (!config) {
    
    
        return &bad_pcm; /* TODO: could support default config here */
    }
    pcm = calloc(1, sizeof(struct pcm));
    if (!pcm)
        return &bad_pcm; /* TODO: could support default config here */
 
    pcm->config = *config;
 
    snprintf(fn, sizeof(fn), "/dev/snd/pcmC%uD%u%c", card, device,
             flags & PCM_IN ? 'c' : 'p');
 
    pcm->flags = flags;
    pcm->fd = open(fn, O_RDWR|O_NONBLOCK);
    if (pcm->fd < 0) {
    
    
        oops(pcm, errno, "cannot open device '%s'", fn);
        return pcm;
    }
 
    if (fcntl(pcm->fd, F_SETFL, fcntl(pcm->fd, F_GETFL) &
              ~O_NONBLOCK) < 0) {
    
    
        oops(pcm, errno, "failed to reset blocking mode '%s'", fn);
        goto fail_close;
    }
 
    if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_INFO, &info)) {
    
    
        oops(pcm, errno, "cannot get info");
        goto fail_close;
    }
    pcm->subdevice = info.subdevice;
 
    param_init(&params);
    param_set_mask(&params, SNDRV_PCM_HW_PARAM_FORMAT,
                   pcm_format_to_alsa(config->format));
    param_set_mask(&params, SNDRV_PCM_HW_PARAM_SUBFORMAT,
                   SNDRV_PCM_SUBFORMAT_STD);
    param_set_min(&params, SNDRV_PCM_HW_PARAM_PERIOD_SIZE, config->period_size);
    param_set_int(&params, SNDRV_PCM_HW_PARAM_SAMPLE_BITS,
                  pcm_format_to_bits(config->format));
    param_set_int(&params, SNDRV_PCM_HW_PARAM_FRAME_BITS,
                  pcm_format_to_bits(config->format) * config->channels);
    param_set_int(&params, SNDRV_PCM_HW_PARAM_CHANNELS,
                  config->channels);
    param_set_int(&params, SNDRV_PCM_HW_PARAM_PERIODS, config->period_count);
    param_set_int(&params, SNDRV_PCM_HW_PARAM_RATE, config->rate);
 
    if (flags & PCM_NOIRQ) {
    
    
        if (!(flags & PCM_MMAP)) {
    
    
            oops(pcm, EINVAL, "noirq only currently supported with mmap().");
            goto fail_close;
        }
 
        params.flags |= SNDRV_PCM_HW_PARAMS_NO_PERIOD_WAKEUP;
        pcm->noirq_frames_per_msec = config->rate / 1000;
    }
 
    if (flags & PCM_MMAP)
        param_set_mask(&params, SNDRV_PCM_HW_PARAM_ACCESS,
                       SNDRV_PCM_ACCESS_MMAP_INTERLEAVED);
    else
        param_set_mask(&params, SNDRV_PCM_HW_PARAM_ACCESS,
                       SNDRV_PCM_ACCESS_RW_INTERLEAVED);
 
    if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_HW_PARAMS, &params)) {
    
    
        oops(pcm, errno, "cannot set hw params");
        goto fail_close;
    }
 
    /* get our refined hw_params */
    config->period_size = param_get_int(&params, SNDRV_PCM_HW_PARAM_PERIOD_SIZE);
    config->period_count = param_get_int(&params, SNDRV_PCM_HW_PARAM_PERIODS);
    pcm->buffer_size = config->period_count * config->period_size;
 
    if (flags & PCM_MMAP) {
    
    
        pcm->mmap_buffer = mmap(NULL, pcm_frames_to_bytes(pcm, pcm->buffer_size),
                                PROT_READ | PROT_WRITE, MAP_FILE | MAP_SHARED, pcm->fd, 0);
        if (pcm->mmap_buffer == MAP_FAILED) {
    
    
            oops(pcm, errno, "failed to mmap buffer %d bytes\n",
                 pcm_frames_to_bytes(pcm, pcm->buffer_size));
            goto fail_close;
        }
    }
 
    memset(&sparams, 0, sizeof(sparams));
    sparams.tstamp_mode = SNDRV_PCM_TSTAMP_ENABLE;
    sparams.period_step = 1;
 
    if (!config->start_threshold) {
    
    
        if (pcm->flags & PCM_IN)
            pcm->config.start_threshold = sparams.start_threshold = 1;
        else
            pcm->config.start_threshold = sparams.start_threshold =
                config->period_count * config->period_size / 2;
    } else
        sparams.start_threshold = config->start_threshold;
 
    /* pick a high stop threshold - todo: does this need further tuning */
    if (!config->stop_threshold) {
    
    
        if (pcm->flags & PCM_IN)
            pcm->config.stop_threshold = sparams.stop_threshold =
                config->period_count * config->period_size * 10;
        else
            pcm->config.stop_threshold = sparams.stop_threshold =
                config->period_count * config->period_size;
    }
    else
        sparams.stop_threshold = config->stop_threshold;
 
    if (!pcm->config.avail_min) {
    
    
        if (pcm->flags & PCM_MMAP)
            pcm->config.avail_min = sparams.avail_min = pcm->config.period_size;
        else
            pcm->config.avail_min = sparams.avail_min = 1;
    } else
        sparams.avail_min = config->avail_min;
 
    sparams.xfer_align = config->period_size / 2; /* needed for old kernels */
    sparams.silence_threshold = config->silence_threshold;
    sparams.silence_size = config->silence_size;
    pcm->boundary = sparams.boundary = pcm->buffer_size;
 
    while (pcm->boundary * 2 <= INT_MAX - pcm->buffer_size)
        pcm->boundary *= 2;
 
    if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_SW_PARAMS, &sparams)) {
    
    
        oops(pcm, errno, "cannot set sw params");
        goto fail;
    }
 
    rc = pcm_hw_mmap_status(pcm);
    if (rc < 0) {
    
    
        oops(pcm, errno, "mmap status failed");
        goto fail;
    }
 
#ifdef SNDRV_PCM_IOCTL_TTSTAMP
    if (pcm->flags & PCM_MONOTONIC) {
    
    
        int arg = SNDRV_PCM_TSTAMP_TYPE_MONOTONIC;
        rc = ioctl(pcm->fd, SNDRV_PCM_IOCTL_TTSTAMP, &arg);
        if (rc < 0) {
    
    
            oops(pcm, errno, "cannot set timestamp type");
            goto fail;
        }
    }
#endif
 
    pcm->underruns = 0;
    return pcm;
 
fail:
    if (flags & PCM_MMAP)
        munmap(pcm->mmap_buffer, pcm_frames_to_bytes(pcm, pcm->buffer_size));
fail_close:
    close(pcm->fd);
    pcm->fd = -1;
    return pcm;
}

从pcm_open来看,首先它是打开了一个/dev/snd/pcmCxDxx的文件节点,这个文件节点是又kernel层生成,根据声卡、自身编号、输入输出进行分类,如/dev/snd/pcmC0D0p:代表的是第一个声卡的第一个播放功能的pcm,最后以为p代表播放,c代表采集后续到kernel再详说。打开节点之后通过ioctl设置硬件参数,然后再看看pcm_start

int pcm_start(struct pcm *pcm)
{
    
    
    int prepare_error = pcm_prepare(pcm);
    if (prepare_error)
        return prepare_error;
 
    if (pcm->flags & PCM_MMAP)
        pcm_sync_ptr(pcm, 0);
 
    if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_START) < 0)
        return oops(pcm, errno, "cannot start channel");
 
    pcm->running = 1;
    return 0;
}
 
int pcm_prepare(struct pcm *pcm)
{
    
    
    if (pcm->prepared)
        return 0;
 
    if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_PREPARE) < 0)
        return oops(pcm, errno, "cannot prepare channel");
 
    pcm->prepared = 1;
    return 0;
}

pcm_start显示调用pcm_prepare,pcm_prepare给节点发了一个指令SNDRV_PCM_IOCTL_PREPARE就没了,然后再发一个指令SNDRV_PCM_IOCTL_START,这就是pcm_start所做的事

int pcm_write(struct pcm *pcm, const void *data, unsigned int count)
{
    
    
    struct snd_xferi x;
 
    if (pcm->flags & PCM_IN)
        return -EINVAL;
 
    x.buf = (void*)data;
    x.frames = count / (pcm->config.channels *
                        pcm_format_to_bits(pcm->config.format) / 8);
 
    for (;;) {
    
    
        if (!pcm->running) {
    
    
            int prepare_error = pcm_prepare(pcm);
            if (prepare_error)
                return prepare_error;
            if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_WRITEI_FRAMES, &x))
                return oops(pcm, errno, "cannot write initial data");
            pcm->running = 1;
            return 0;
        }
        if (ioctl(pcm->fd, SNDRV_PCM_IOCTL_WRITEI_FRAMES, &x)) {
    
    
            pcm->prepared = 0;
            pcm->running = 0;
            if (errno == EPIPE) {
    
    
                /* we failed to make our window -- try to restart if we are
                 * allowed to do so.  Otherwise, simply allow the EPIPE error to
                 * propagate up to the app level */
                pcm->underruns++;
                if (pcm->flags & PCM_NORESTART)
                    return -EPIPE;
                continue;
            }
            return oops(pcm, errno, "cannot write stream data");
        }
        return 0;
    }
}

pcm_write的话显示获取当前要写的数据帧数,然后判断pcm是否在running中,如果没在running则调用pcm_prepare,然后则通过SNDRV_PCM_IOCTL_WRITEI_FRAMES指令发送数据。

总之,可以看的出来,在tinyalsa中基本没做什么事情主要知识中转一下数据,最终还是交给了kernel,那这是以播放为例走完了HAL层的流程,其他的流程我们可以以小见大也能有个大概的了解。mixer呢和pcm差不多只不过它操作的节点是/dev/snd/controlCx,这是音频中的一个总控制节点,一定会有而且只会有一个。其他几个以tiny开头的类可以理解为脚本,他们也是通过操作mixer和pcm来实现功能的,通过这种方式也是给音频操控提供了一种新的模式,更加方便使用也更加方便开发者的调试,这就是播放HAL的一点点流程。

猜你喜欢

转载自blog.csdn.net/wh2526422/article/details/124101204