前言

“快对讲” 是基于 音视频技能 对讲业务的产品,为客户提供专业对讲、多媒体对和解可视化调度功用。 主要功用包含:

  • 频道与会话
  • 多频道对讲、监听、确定、强拆
  • 音视频单人、多人呼叫、呼叫调度台
  • 图片、视频上报
  • 视频回传、监看
  • 方位回传
  • 即时音讯:文字音讯、语音音讯、图片音讯、视频音讯、文件音讯、方位音讯
  • 文字播送、媒体播送
  • 监控、录像服务、调度台。

功用体会

  • 快对讲官网
  • 快对讲移动端下载
  • 快对讲调度办理渠道

IOS技术分享| IOS快对讲调度场景实现

场景功用完成

一、对讲

作用预览

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
/**
 anyRTC 云渠道对讲 频道办法。
 */
__attribute__((visibility("default"))) @interface ARTalkChannel: NSObject
/// ARTalkChannelDelegate 接口类向 App 发送回调通知,上报运行时的频道相关事情。
@property (nonatomic, weak, nullable) id<ARTalkChannelDelegate> channelDelegate;
/// 参加频道。
/// - Parameter completionBlock 同一用户只能一起参加最多 20 个频道。参加频道超限时用户会收到错误码 ARTalkJoinChannelErrorExceedLimit
- (void)joinWithCompletion:(ARTalkJoinChannelBlock _Nullable)completionBlock;
/// 离开频道。
/// - Parameter completionBlock ARTalkLeaveChannelBlock 回调返回本办法的调用成果。
- (void)leaveWithCompletion:(ARTalkLeaveChannelBlock _Nullable)completionBlock;
/// 设置自己的对讲等级
/// - Parameter level 说话等级,0为最大,level 越大等级越低
- (int)setLevel:(int)level;
/// 获取自己的对讲等级
/// - returns 对讲等级
- (int)getLevel;
/// 设置推送音频质量
/// - Parameter nQuality: 1-5 低,中,高,超高,HD,默以为1
- (int)setPushAudioQuality:(int)nQuality;
/// 设置拉取音频质量,暂不可运用
/// - Parameter nQuality: 1-5 低,中,高,超高,HD,默以为1
- (int)setPullAudioQuality:(int)nQuality;
/// 开端对讲
/// - Parameter nTalkOnTime: 对讲时长,0为无限制
/// - returns 0办法调用成功,小于0办法调用失利
- (int)pushToTalk:(int)nTalkOnTime;
/// 完毕对讲
/// - returns 0办法调用成功,小于0办法调用失利
- (int)stopPushToTalk;
/// 是否接纳频道其它声音
/// - Parameter mute: mute true 静音,false 免除静音
/// - returns 0办法调用成功,小于0办法调用失利
- (int)muteAllRemoteAudio:(BOOL)mute;
/// 打断对讲
/// - returns 0办法调用成功,小于0办法调用失利
- (int)breakTalk;
/// 是否接纳播送流
/// - Parameter enable: YES 接纳,NO 不接纳
- (int)enableAudioStream:(BOOL)enable;
/// 获取频道 ID
- (NSString *)getChannelId;
@end

二、频道与会话

作用预览

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
class ARMainViewCellData: ARUICommonCellData {
    /// 群id
    var groupId: String = ""
    /// 是否监听
    var isMonitor: Bool = false
    /// 监听状况
    var monitorStatus: Bool = false
    var groupName: String = ""
    /// 群组类型(0:群组(频道),1:暂时群组(会话))
    var groupType: Int = 1
    /// 在线人数
    var onlineCount: NSInteger = 0
    /// 群人数
    var memberCount: NSInteger = 0
    /// 用户在群组内权限(默许0,0:高,1:中,2:低,3:仅听)
    var permission: NSInteger = 0
    var isOwner: Bool = false
    var item: ARCahnnelItem!
    /// 对讲状况(优先级高)
    var talkStatus: String?
    /// 播送状况(优先级低)
    var broadCastStatus: String?
    /// 群组最大发言时长(单位:秒),默许:60s,0表明无限制
    var groupMaxSpeak: NSInteger = 60
    /// 最大排队人数(默许0:无排队,1:5人,2:10人)
    var groupMaxQueue: NSInteger = 0
    var groupDesc: String = ""
    /// 群组等级(默许:0,0:低,1:中,2:高)
    var groupGrade: NSInteger = 0
    var groupImId: NSInteger = 0
    /// 对讲状况
    var userDataNofi: ARTalkUserDataNofi?
    /// 未读数
    var unReadNum: NSInteger = 0
    weak var delegate: ARMainViewCellDataDelegate?
    weak var dataSource: ARMainViewCellDataSource?
    class func getCellData(item: ARCahnnelItem) -> ARMainViewCellData {
        let cellData = ARMainViewCellData()
        cellData.groupId = item.groupId
        cellData.groupImId = item.groupImId
        cellData.isMonitor = (item.isMonitor == 1)
        cellData.groupName = item.groupName
        cellData.groupType = item.groupType
        cellData.onlineCount = item.onlineCount
        cellData.memberCount = item.memberCount
        cellData.permission = item.permission
        cellData.isOwner = (item.ownerId == localUserData.uId)
        cellData.groupMaxSpeak = item.groupMaxSpeak
        cellData.groupMaxQueue = item.groupMaxQueue
        cellData.groupDesc = item.groupDesc
        cellData.groupGrade = item.groupGrade
        cellData.item = item
        cellData.unReadNum = ARIMMessage.queryGroupUnRead(recvId: UInt64(cellData.groupImId))
        return cellData
    }
    func updateMonitorStatus(monitor: Bool, result: @escaping (_ code: NSInteger) -> Void) {
        /// 修正监听状况
        /// 频道: api 接口 + sdk    会话: sdk
        debugPrint("ARUITalking - updateMonitorStatus monitor = \(monitor) monitorStatus = \(monitorStatus)")
        guard monitor != monitorStatus else { return }
        if groupType == 0 {
            /// api 接口
            ARNetWorkManager.shared.updateGroupMonitorStatus(groupId: groupId, isMonitor: monitor ? 1 : 0) {
                print("ARUITalking - updateGroupMonitorStatus api sucess")
            } failed: { _ in
                print("ARUITalking - updateGroupMonitorStatus api failed")
            }
        }
        if !monitor && groupId == ARTalkManager.shared.lockCellData?.groupId {
            /// 免除频道确定
            ARTalkManager.shared.lockChannel(data: nil)
        }
        if ARTalkManager.shared.lockCellData != nil && ARTalkManager.shared.lockCellData?.groupId != groupId && monitor {
            ARTalkManager.shared.getTalkChannel(channelId: groupId).muteAllRemoteAudio(true)
        }
        /// sdk 接口
        ARTalkManager.shared.monitorChannel(channelId: groupId, grpImId: groupImId, isMonitor: monitor) { [weak self] code in
            debugPrint("ARUITalking - updateMonitorStatus sdk result = \(code)")
            guard let self = self else { return }
            if code == 0 {
                self.item.isMonitor = monitor ? 1 : 0
                self.isMonitor = monitor
                self.monitorStatus = monitor
                if self.delegate != nil {
                    self.delegate?.onMonitorStatusChange()
                }
            }
            result(code)
        }
    }
}

三、音视频通话

作用预览

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
@interface ARUICalling : NSObject
/// 基础装备
@property(nonatomic, strong) ARUIConfiguration *config;
+ (instancetype)shareInstance;
/// 通话接口
/// @param users 用户信息
/// @param type 呼叫类型:视频/语音
- (void)call:(NSArray<ARCallUser *> *)users type:(ARUICallingType)type NS_SWIFT_NAME(call(users:type:));
/// 通话接口(用于自定义chanId或添加token)
/// @param users 用户信息
/// @param type 呼叫类型:视频/语音
/// @param chanId 频道id
/// @param token rtc token 动态密钥 【1】安全要求不高: 将值设为 nil 【2】安全要求高: 将值设置为 Token。如果你已经启用了 App Certificate,请务必运用 Token。【3】请务必保证用于生成 Token 的 App ID 和 ARUILogin initWithSdkAppID 时用的是同一个 App ID。
- (void)call:(NSArray<ARCallUser *> *)users type:(ARUICallingType)type chanId:(NSString *_Nullable)chanId token:(NSString *_Nullable)token NS_SWIFT_NAME(call(users:type:chanId:token:));
/// 通话回调
/// @param listener 回调
- (void)setCallingListener:(id<ARUICallingListerner>)listener NS_SWIFT_NAME(setCallingListener(listener:));
/// 设置铃声,主张在30s以内,只支撑本地音频文件
/// @param filePath 音频文件路径
- (void)setCallingBell:(NSString *)filePath NS_SWIFT_NAME(setCallingBell(filePath:));
/// 敞开静音形式(默许关)
- (void)enableMuteMode:(BOOL)enable NS_SWIFT_NAME(enableMuteMode(enable:));
/// 翻开悬浮窗(默许关)
- (void)enableFloatWindow:(BOOL)enable NS_SWIFT_NAME(enableFloatWindow(enable:));
/// 敞开自定义路由(默许关)
/// @param enable 翻开后,在onStart回调中,会收到对应的ViewController对象,能够自行决定视图展示方式
- (void)enableCustomViewRoute:(BOOL)enable NS_SWIFT_NAME(enableCustomViewRoute(enable:));
/// rtc token 鉴权 (收到onCallInvitedByToken回调时调用)
/// @param token 安全机制
- (void)updateCallingToken:(NSString *_Nonnull)token NS_SWIFT_NAME(updateCallingToken(token:));
@end

四、视频回传、监看

作用预览

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
    func initializeEngine() {
        // init ARtcEngineKit
        rtcEngine = ARtcEngineKit.sharedEngine(withAppId: ARUILogin.getSdkAppID(), delegate: self)
        let rtcConfig = ARCoreConfig.default().rtcConfig
        if (rtcConfig?.addr.count != 0 && rtcConfig?.port != 0) {
            /// 装备私有云
            let dic: NSDictionary = ["Cmd": "ConfPriCloudAddr", "ServerAdd": rtcConfig?.addr as Any, "Port": rtcConfig?.port as Any] as NSDictionary
            rtcEngine.setParameters(toJSONString(dict: dic))
        }
        rtcEngine.setChannelProfile(.liveBroadcasting)
        rtcEngine.enableAudioVolumeIndication(2000, smooth: 3, report_vad: true)
        rtcEngine.setClientRole(.broadcaster)
        let configuration = ARCameraCapturerConfiguration()
        configuration.cameraDirection = .rear
        rtcEngine.setCameraCapturerConfiguration(configuration)
        rtcEngine.enableVideo()
        let fpsValue = ARUIVideoFrameRateFps(rawValue: localConfig.videoFps)?.fps()
        rtcEngine.setVideoEncoderConfiguration(ARVideoEncoderConfiguration(size: (ARUIVideoDimension(rawValue: localConfig.videoSize)?.dimension())!, frameRate: ARVideoFrameRate(rawValue: fpsValue ?? 15)!, bitrate: 500, orientationMode: .adaptative))
        setLocalVideoRender(render: view)
    }
    func setLocalVideoRender(render: UIView) {
        let videoCanvas = ARtcVideoCanvas()
        videoCanvas.view = render
        videoCanvas.renderMode = .hidden
        rtcEngine.setupLocalVideo(videoCanvas)
        rtcEngine.startPreview()
    }
    func joinChannel(token: String?, expire: Bool) {
        rtcEngine.joinChannel(byToken: token, channelId: channelId, uid: userInfo.uId) { [weak self] channel, uid, elapsed in
            guard let self = self else { return }
            if !expire {
                self.startTimer()
                self.startReport()
            }
            debugPrint("ARUIPassBack - joinChannel sucess")
        }
    }

五、图片、视频上报

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
@objc protocol ARReportUploadManagerDelegate: NSObjectProtocol {
    /// 上报进度
    @objc optional func onReportDataProgress(progress: CGFloat, identification: String)
    /// 上报成功
    @objc optional func onReportDataSucess(identification: String)
    /// 上报失利
    @objc optional func onReportDataFailure(identification: String)
}
    func fetchImage(for asset: PHAsset) {
        let option = PHImageRequestOptions()
        option.resizeMode = .fast
        option.isNetworkAccessAllowed = true
        PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 100, height: 100), contentMode: .aspectFill, options: option) { [weak self] image, info in
            var downloadFinished = false
            if let info = info {
                downloadFinished = !(info[PHImageCancelledKey] as? Bool ?? false) && (info[PHImageErrorKey] == nil)
            }
            let isDegraded = (info?[PHImageResultIsDegradedKey] as? Bool ?? false)
            if downloadFinished, !isDegraded {
                guard let self = self else { return }
                self.addNewData(images: [image!], assets: [asset])
            }
        }
    }
    func calculateFileSize() {
        var fileSize = 0
        var sandBoxSize = 0
        for detail in reportInfo.detail {
            if detail.type == 2 {
                fileSize = detail.size
                sandBoxSize = detail.compressSize
                break
            } else {
                fileSize += detail.size
                sandBoxSize += detail.compressSize
            }
        }
        delegate?.onReportDataSourceChange?(fileSize: "\(formatLength(length: fileSize))", compressSize: "\(formatLength(length: sandBoxSize))", enable: assets.count != 0)
    }

六、方位回传

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
extension ARUIShareLocationController: MAMapViewDelegate {
    func mapView(_ mapView: MAMapView!, viewFor annotation: MAAnnotation!) -> MAAnnotationView! {
        /// 大头针、气泡
        if annotation is MAPointAnnotation {
            let customReuseIndetifier: String = "annotationReuseIndetifier"
            var annotationView = mapView.dequeueReusableAnnotationView(withIdentifier: customReuseIndetifier) as? ARUICustomAnnotationView
            if annotationView == nil {
                annotationView = ARUICustomAnnotationView(annotation: annotation, reuseIdentifier: customReuseIndetifier)
                annotationView?.image = UIImage(named: "icon_direction")
                // 设置为false,用以调用自定义的calloutView
                annotationView?.canShowCallout = false
                // 设置中心点偏移,使得标注底部中间点成为经纬度对应点
                annotationView?.centerOffset = CGPoint(x: 0, y: -18)
                annotationView?.isDraggable = false
            }
            if annotation.isKind(of: ARUIMapAnnotation.self) {
                let mapAnnotation = annotation as! ARUIMapAnnotation
                annotationView?.setHeadUrl(url: mapAnnotation.faceUrl)
                annotationView?.setNickName(text: mapAnnotation.nickName)
            }
            if annotation.isKind(of: MAUserLocation.self) {
                localAnnotationView = annotationView
                localAnnotationView?.setHeadUrl(url: localUserData.faceUrl)
                localAnnotationView?.setNickName(text: localUserData.nickName)
                localLocation = annotation as? MAUserLocation
            }
            return annotationView
        }
        return nil
    }
    func mapView(_ mapView: MAMapView!, didUpdate userLocation: MAUserLocation!, updatingLocation: Bool) {
        if (!updatingLocation && localAnnotationView != nil) {
            UIView.animate(withDuration: 0.1) {
                let degree = userLocation.heading.trueHeading - self.mapView.rotationDegree
                self.localAnnotationView?.imageView.transform = CGAffineTransform(rotationAngle: degree * Double.pi / 180.0)
            }
        }
    }
}

七、即时音讯

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
/// 音讯内容类型
enum ARIMElemType: NSInteger, Codable, HandyJSONEnum {
    /// 未知音讯
    case none = 100
    /// 文本音讯
    case text = 101
    /// 图片音讯
    case picture = 102
    /// 语音音讯
    case voice = 103
    /// 视频音讯
    case video = 104
    /// ptt 语音
    case pushtotalk = 105
    /// 文件音讯
    case file = 106
    case atText = 107
    /// 兼并音讯
    case merger = 108
    case card = 109
    /// 地理方位音讯
    case location = 110
    /// 自定义音讯
    case custom = 111
    case revoke = 112
    case hasReadReceipt = 113
    case typing = 114
    case quote = 115
    case common = 200
    case groupMsg = 201
}
/// 音讯状况
enum ARIMMessageStatus: NSInteger, Codable, HandyJSONEnum {
    case sending = 1
    case sendSuccess = 2
    case sendFailed = 3
    case hasDeleted = 4
    case revoked = 5
    case filtered = 6
}
/// IM音讯
///"sendId":3578970541,"recvId":2723541307,"clientMsgId":"1653488317610","sessionType":1,"contentType":101,"content":"IM音讯","curSeq":1011,"sendTime":1653488317,"status":2
struct ARIMMessage: HandyJSON, PersistableRecord {
    /// 序列号
    var curSeq: UInt64!
    /// 音讯 Id
    var clientMsgId: String!
    /// 发送者 Id
    var sendId: UInt64!
    /// 接受者 id
    var recvId: UInt64!
    /// 音讯类型(点对点音讯:1,群组音讯:2)
    var sessionType: ARIMSessionType = .c2c
    /// 音讯内容类型,附加信息中的<音讯内容类型>
    var contentType: ARIMElemType = .none
    /// 音讯内容
    var content: String = ""
    /// 音讯状况
    var status: ARIMMessageStatus?
    /// 发送时刻(单位:毫秒)
    var sendTime: Int?
    /// 发送者渠道类型
    var senderPlatformId: ARIMPlatform?
    /// 附加音讯
    var ex: String?
    var syncMsgId: String = ""
    /// 音讯保存时刻(单位:秒)
    var createTime: Int = 0
    /// 音讯归属Id
    var msgImId: Int = 0
    /// 已读状况,默许未读
    var isRead: Bool = false
    /// 是否播映(针对语音、ptt 已读未读),默许未播映
    var isPlay: Bool = false
    /////////////////////////////////////////////////////////////////////////////////
    //  自定义音讯体
    /////////////////////////////////////////////////////////////////////////////////
    /// 图片音讯
    var imageElem: ARUIImageElem?
    /// 视频音讯
    var videoElem: ARUIVideoElem?
    /// 语音音讯
    var soundElem: ARUISoundElem?
    /// 文件音讯
    var fileElem: ARUIFileElem?
    /// 方位音讯
    var locationElem: ARUILocationElem?
}
extension ARIMMessage {
    func getImageElem() -> ARUIImageElem {
        /// 获取图片音讯单元
        var elem = ARUIImageElem()
        var jsonStr = content.base64Decoded()
        /// 兼容之前未base64的音讯
        if content.base64Decoded() == "【不支撑的音讯类型】" {
            jsonStr = content
        }
        elem.imageList = JSONDeserializer<ARUIImageItem>.deserializeModelArrayFrom(json: jsonStr) as? [ARUIImageItem]
        return elem
    }
    func getVideoElem() -> ARUIVideoElem {
        /// 获取视频音讯单元
        let elem = JSONDeserializer<ARUIVideoElem>.deserializeFrom(json: content.base64Decoded())
        return elem!
    }
    func getSoundElem() -> ARUISoundElem {
        /// 获取音频音讯单元
        let elem = JSONDeserializer<ARUISoundElem>.deserializeFrom(json: content.base64Decoded())
        return elem!
    }
    func getFileElem() -> ARUIFileElem {
        /// 获取文件音讯单元
        let elem = JSONDeserializer<ARUIFileElem>.deserializeFrom(json: content.base64Decoded())
        return elem!
    }
    func getLocationElem() -> ARUILocationElem {
        /// 获取方位音讯单元
        let elem = JSONDeserializer<ARUILocationElem>.deserializeFrom(json: content.base64Decoded())
        return elem!
    }
}

八、文字播送、媒体播送

IOS技术分享| IOS快对讲调度场景实现

部分代码完成
@protocol ARTalkChannelDelegate <NSObject>
@optional
/// 播送敞开
/// @param channel 地点频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
- (void)channel:(ARTalkChannel * _Nonnull)channel userStreamOn:(NSString *)uid userData: (NSString * _Nullable)userData;
/// 播送封闭
/// @param channel 地点频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
- (void)channel:(ARTalkChannel * _Nonnull)channel userStreamOff:(NSString *)uid userData: (NSString * _Nullable)userData;
/// 开端对讲回调
/// @param channel 地点频道。详见 ARTalkChannel 。
/// @param code 错误码
- (void)channel:(ARTalkChannel * _Nonnull)channel pushToTalkResult:(ARTalkPushToTalkErrorCode)code;
/// 完毕对讲回调
/// @param channel 地点频道。详见 ARTalkChannel 。
/// @param code 错误码
- (void)channel:(ARTalkChannel * _Nonnull)channel pushToTalkEnded:(ARTalkPushToTalkEndErrorCode)code;
/// 其他用户开端对讲回调
/// @param channel 地点频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
/// @param level 用户等级
- (void)channel:(ARTalkChannel * _Nonnull)channel userIsTalkOn:(NSString *)uid userData: (NSString * _Nullable)userData userLevel:(NSInteger)level;
/// 其他用户完毕对讲回调
/// @param channel 地点频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
- (void)channel:(ARTalkChannel * _Nonnull)channel userIsTalkOff:(NSString *)uid userData: (NSString * _Nullable)userData;
@end

快对讲 iOS 端基础库

platform :ios, '11.0'
use_frameworks!
target 'ARUITalking' do
    # anyRTC 音视频库
    pod 'ARtcKit_iOS', '~> 4.3.0.3'
    # anyRTC 实时音讯库
    pod 'ARtmKit_iOS', '~> 1.1.0.1'
    # anyRTC 对讲库
    pod 'ARTalkKit_iOS'
end

以上便是快对讲IOS端的基本功用完成。快对讲调度体系将语音、视频、图画、文本音讯等信息高度交融一体,建立归纳指挥调度业务,不仅完成企业通讯数字信息化,进行高效协作提升企业全体形象,也能满足紧急救援、紧急决议计划等要求,达到统一指挥、联合行动的目的。开发者能够基于 ARUICalling 音视频通话开源组件,来开发自己的专属快对讲调度体系。

IOS技术分享| IOS快对讲调度场景实现