最终使用amr识别成功了
recorderManager.start({
duration:60000,
sampleRate: 16000, // 采样率
numberOfChannels: 1, // 录音通道数
encodeBitRate: 96000, // 编码码率
format: 'amr',
frameSize: 10 // 指定帧大小,单位 KB
});
附完整代码
<template>
<view class="chat_wrap flex-column">
<headerView :is_back="true" :header_text="header_text"></headerView>
<scroll-view :scroll-into-view="scrollToView" scroll-y="true" class="msg_list">
<block v-for="(item,index) in msg_list">
<view :id="'item'+index" class="item my flex-row align-start just-end">
<view class="content flex-column">
<text class="text_val">{{item.message}}</text>
</view>

</view>
<view :id="'item'+index" v-if="item.audio" class="item her flex-row align-start just-start">

<view class="content">
<text @click="handleAudioPlay(item)" @longpress="handleMore(item)">点击播放</text>
</view>
</view>
</block>
</scroll-view>
<view class="btn flex-row align-end just-between">
<view class="add flex-row align-center just-center" @click="changePostType">
<image mode="widthFix" style="width:46rpx" src="@/static/gpt/keywords.png" v-if="post_type!='text'">
</image>
<image mode="widthFix" style="width: 34rpx;" src="@/static/gpt/voice.png" v-else></image>
</view>
<text v-if="post_type!='text'" @touchstart="startRecord"
@touchend="endRecord">{{is_voice_type=='starting'?'松开结束':'按住说话'}}</text>
<textarea v-else auto-height="true" placeholder="请输入描述词" v-model="postText"></textarea>
<view v-if="post_type=='text'" class="add flex-row align-center just-center" @click="handleSend">
<image src="@/static/gpt/post.png"></image>
</view>
</view>
</view>
</template>
<script>
import api from '@/http/api.js'
import headerView from '../../components/header-view.vue'
const recorderManager = uni.getRecorderManager();
const innerAudioContext = uni.createInnerAudioContext();
innerAudioContext.autoplay = true;
export default {
components: {
headerView
},
data() {
return {
header_text: '',
id: '',
msg_list: [],
postText: '',
scrollToView: '',
req: {},
is_show_file: false,
post_type: 'text',
voicePath: '',
is_voice_type: 'ending',
recorderManager: null,
is_requesting: false
}
},
async onLoad(option) {
let self = this;
this.header_text = option.title
this.req.assistant_id = option.id
this.getHistoryMsg()
recorderManager.onStop(function(res) {
self.voicePath = res.tempFilePath;
self.handleSend()
});
},
methods: {
async getHistoryMsg() {
let res = await api.getHistoryMsg(this.req)
this.msg_list = res.data
setTimeout(() => {
this.scrollToView = 'item' + Number(this.msg_list.length - 1);
}, 100)
},
// 切换文字 语音
changePostType() {
if (this.post_type == 'text') {
this.post_type = 'audio'
} else {
this.post_type = 'text'
}
},
startRecord() {
if (this.changeYyModule()) {
return
}
this.is_voice_type = 'starting'
recorderManager.start({
duration:60000,
sampleRate: 16000, // 采样率
numberOfChannels: 1, // 录音通道数
encodeBitRate: 96000, // 编码码率
format: 'amr',
frameSize: 10 // 指定帧大小,单位 KB
});
},
endRecord() {
console.log('录音结束');
this.is_voice_type = 'ending'
recorderManager.stop();
},
async handleSend() {
let res
if (this.is_requesting) {
return
}
this.is_requesting = true
if (this.post_type == 'text') {
if (this.postText == '') {
uni.showToast({
icon: 'error',
title: '请描述出您的内容'
})
}
this.msg_list.push({
text: this.postText
})
res = await api.textToAudio({
...this.req,
text: this.postText
})
} else {
if (this.voicePath == '') {
uni.showToast({
icon: 'error',
title: '请描述出您的内容'
})
}
console.log(this.voicePath)
res = await api.audioToText({
formData: {
...this.req
},
file: this.voicePath
})
}
console.log(res)
this.is_requesting = false
if (res.errno != 0) {
uni.showToast({
icon: 'error',
title: res.message
})
}
this.postText = ''
this.voicePath = ''
this.getHistoryMsg()
},
// 麦克风权限判断
changeYyModule(e) {
const appAuthorizeSetting = uni.getAppAuthorizeSetting()
let callback = false
if (appAuthorizeSetting.microphoneAuthorized == "denied") {
uni.showModal({
title: '提示',
content: '请先打开麦克风权限',
success: function(res) {
if (res.confirm) {
uni.openAppAuthorizeSetting()
} else if (res.cancel) {
console.log('用户点击取消');
}
},
});
callback = true
}
return callback
},
handleAudioPlay(item) {
innerAudioContext.src = item.audio
},
handleMore(item) {
const that = this
uni.showActionSheet({
itemList: ['下载'],
success: function(res) {
if (res.tapIndex == 0) {
that.downLoadAudio(item)
}
},
fail: function(res) {
console.log(res.errMsg);
}
});
},
downLoadAudio(item) {
uni.downloadFile({
url: item.audio, //仅为示例,并非真实的资源
success: (res) => {
if (res.statusCode === 200) {
console.log('下载成功');
}
}
})
}
}
}
</script>
<style scoped lang="less">
.chat_wrap {
background: #F6F5FC;
height: 100vh;
.msg_list {
height: calc(100% - 122rpx - 88rpx - var(--status-bar-height));
overflow-y: scroll;
padding: 20rpx 40rpx;
.item {
margin-bottom: 40rpx;
}
.her {
&>image {
width: 80rpx;
height: 80rpx;
border-radius: 20rpx;
margin-right: 20rpx;
background: url(@/static/logo_96.png);
background-size: 100%;
}
.content {
max-width: 560rpx;
padding: 20rpx;
background: #fff;
border-radius: 12rpx;
.text_val {
font-weight: 400;
font-size: 30rpx;
color: #333333;
word-break: break-word;
}
}
}
.my {
&>image {
width: 80rpx;
height: 80rpx;
border-radius: 20rpx;
margin-left: 20rpx;
background: url(@/static/logo_96.png);
background-size: 100%;
}
.content {
max-width: 560rpx;
padding: 20rpx;
background: #fff;
border-radius: 12rpx;
.text_val {
font-weight: 400;
font-size: 30rpx;
color: #333333;
word-break: break-word;
}
}
}
}
.btn {
width: 750rpx;
min-height: 122rpx;
background: #FFFFFF;
padding: 20rpx 40rpx;
position: fixed;
bottom: 0;
left: 0;
.add {
width: 80rpx;
height: 80rpx;
background: #FFFFFF;
border-radius: 12rpx;
border: 3rpx solid #252634;
image {
width: 44rpx;
height: 44rpx;
}
}
textarea,
text {
flex: 1;
background: #FFFFFF;
border-radius: 12rpx;
border: 3rpx solid #252634;
font-weight: 400;
font-size: 30rpx;
color: #828285;
padding: 17rpx 20rpx;
box-sizing: border-box;
margin: 0 20rpx;
}
text {
text-align: center;
color: #333;
}
textarea:first-child {
margin-left: 0;
}
}
}
</style>