Solution: Bind the click event to the audio component and manually trigger the play and pause methods! Code snippet: wxml file <!-- It is a voice call, there is a call record, and the call description does not contain 'missed' --> <view class="reference" wx:if="{{itemList.activity_type === 'phone' && itemList.activity_reference_id && tool.indexOf(itemList.comment,'missed') === -1 }}"> <!-- Voice playback--> <van-button class="ref-btn" wx:if="{{audioResourceMaps[itemList.activity_reference_id] === undefined}}" loading="{{itemList.activity_reference_id === currentGettingReferenceId}}" icon="play" type="info" plain data-reference-id="{{itemList.activity_reference_id}}" bindtap="getReference"> </van-button> <view wx:else class="audio-box"> <!-- Voice playback pause --> <van-button class="ref-btn" wx:if="{{audioResourceMaps[itemList.activity_reference_id]}}" data-reference-id="{{itemList.activity_reference_id}}" icon="pause" type="info" plain bindtap="pauseAudio"/> <!-- Click No Call Recording--> <span wx:else class="no-audio-text">No call recording found</span> </view> </view> wxss file .reference { margin-top: 20rpx; height: 100%; padding: 5rpx; box-sizing: border-box; } .ref-btn { width: 80rpx; height: 80rpx; display: flex; } .ref-btn button { width: 80rpx; height: 80rpx; border-radius: 50%; } js file /** * Initial data of the component */ data: { currentGettingReferenceId: null, //The audio id currently being played audioResourceMaps: {}, //list of clicked audios isPause:false, //pause or not}, /** * Component life cycle */ lifetimes: attached: function () { // Because it is a subcomponent, you need to get the instance here this.audioContext = wx.createInnerAudioContext(); }, detached: function () { // Stop playing this.stopAudio() // Executed when the component instance is removed from the page node tree}, }, methods: { // Get the recording getReference(e) { let id = e.target.dataset.referenceId if(id != this.data.currentGettingReferenceId){ this.stopAudio() } this.setData({ currentGettingReferenceId:id }) // Click the interface to obtain the recording URL. The interface request is written according to its own encapsulation WXAPI.getResourceUrl( `/conversation/conversationsession/${id}/`, { data_type: 'all', }).then(({resource_url}) => { console.log('audio address ====',resource_url,) let url = resource_url && resource_url.indexOf('https://') > -1? encodeURI(resource_url) : null this.data.audioResourceMaps[id] = url; if(resource_url) this.playAudio(id,url) this.setData({ audioResourceMaps: this.data.audioResourceMaps }) console.log('Played list =====',this.data.audioResourceMaps) }).catch(function (e) { console.log(e) }) }, // Pause pauseAudio(){ this.setData({ isPause: !this.data.isPause }) let id = this.data.currentGettingReferenceId console.log(id,'play paused id') const innerAudioContext = this.audioContext if(this.data.isPause){ innerAudioContext.pause() console.log('pause playback') }else{ innerAudioContext.play() console.log('Continue playing') } }, // Stop playing stopAudio(){ const innerAudioContext = this.audioContext innerAudioContext.stop() let obj = this.data.audioResourceMaps for(let item in obj){ delete obj[item] } // To stop playing, clear the audio address corresponding to the playlist id this.setData({ audioResourceMaps: obj, currentGettingReferenceId:null }) console.log('stop playing') }, // Play playAudio(id,url) { const innerAudioContext = this.audioContext console.log(url, 'audio address') if(url){ innerAudioContext.src = url innerAudioContext.play() innerAudioContext.onPlay(() => { console.log('Start playing') }) innerAudioContext.onTimeUpdate(() => { console.log(innerAudioContext.duration,'total duration') console.log(innerAudioContext.currentTime,'Current playback progress') }) setTimeout(() => { console.log(innerAudioContext.duration,'total duration') console.log(innerAudioContext.currentTime,'Current playback progress') }, 500) innerAudioContext.onEnded(() => { let obj = this.data.audioResourceMaps for(let item in obj){ delete obj[item] } this.setData({ audioResourceMaps: obj, currentGettingReferenceId:null }) console.log('Playback completed') }) innerAudioContext.onError((res) => { console.log(res.errMsg) console.log(res.errCode) }) } } Rendering ⚠️To use vant in WeChat applet, you must reference it in the .json file, otherwise the tag will not be displayed I quoted it in the app.json file and it is globally available "usingComponents": { "van-button": "@vant/weapp/button/index", "van-icon": "@vant/weapp/icon/index", } Official website documentation: developers.weixin.qq.com/miniprogram… Summarize This is the end of this article about how to solve the problem that the audio component of WeChat mini-program cannot be played on the iOS side. For more relevant content about the playback of the audio component of the mini-program on the iOS side, please search for previous articles on 123WORDPRESS.COM or continue to browse the related articles below. I hope everyone will support 123WORDPRESS.COM in the future! You may also be interested in:
|
<<: Docker core and specific use of installation
>>: An Uncommon Error and Solution for SQL Server Full Backup
Preface var is a way to declare variables in ES5....
1. What are the formats of lines? You can see you...
Table of contents Preface SQL statement optimizat...
<base target=_blank> changes the target fram...
1. Add an empty element of the same type, and the...
1. Flex is the abbreviation of Flexible Box, whic...
MySQL sequence AUTO_INCREMENT detailed explanatio...
My97DatePicker is a very flexible and easy-to-use...
Table of contents Write docker-compose.yml Run do...
When using MySQL, dates are generally stored in f...
CSS image splicing technology 1. Image stitching ...
Table of contents 1 Use of v-if and v-show 2. Dif...
Background Recently, I encountered such a problem...
Table of contents Working principle: What does th...
Recently, I need to implement a cascading selecti...