1
IOS 技术分享| IOS 快对讲调度场景实现
作者:anyRTC开发者
- 2023-01-04 上海
本文字数:9937 字
阅读完需:约 33 分钟
前言
“快对讲” 是基于 anyRTC 音视频技术 对讲业务的产品,为客户提供专业对讲、多媒体对讲和可视化调度功能。 主要功能包含:
频道与会话
多频道对讲、监听、锁定、强拆
音视频单人、多人呼叫、呼叫调度台
图片、视频上报
视频回传、监看
位置回传
即时消息:文字消息、语音消息、图片消息、视频消息、文件消息、位置消息
文字广播、媒体广播
监控、录像服务、调度台。
功能体验
场景功能实现
一、对讲
效果预览
部分代码实现
/**
anyRTC 云平台对讲 频道方法。
*/
__attribute__((visibility("default"))) @interface ARTalkChannel: NSObject
/// ARTalkChannelDelegate 接口类向 App 发送回调通知,上报运行时的频道相关事件。
@property (nonatomic, weak, nullable) id<ARTalkChannelDelegate> channelDelegate;
/// 加入频道。
/// - Parameter completionBlock 同一用户只能同时加入最多 20 个频道。加入频道超限时用户会收到错误码 ARTalkJoinChannelErrorExceedLimit
- (void)joinWithCompletion:(ARTalkJoinChannelBlock _Nullable)completionBlock;
/// 离开频道。
/// - Parameter completionBlock ARTalkLeaveChannelBlock 回调返回本方法的调用结果。
- (void)leaveWithCompletion:(ARTalkLeaveChannelBlock _Nullable)completionBlock;
/// 设置自己的对讲等级
/// - Parameter level 说话等级,0为最大,level 越大等级越低
- (int)setLevel:(int)level;
/// 获取自己的对讲等级
/// - returns 对讲等级
- (int)getLevel;
/// 设置推送音频质量
/// - Parameter nQuality: 1-5 低,中,高,超高,HD,默认为1
- (int)setPushAudioQuality:(int)nQuality;
/// 设置拉取音频质量,暂不可使用
/// - Parameter nQuality: 1-5 低,中,高,超高,HD,默认为1
- (int)setPullAudioQuality:(int)nQuality;
/// 开始对讲
/// - Parameter nTalkOnTime: 对讲时长,0为无限制
/// - returns 0方法调用成功,小于0方法调用失败
- (int)pushToTalk:(int)nTalkOnTime;
/// 结束对讲
/// - returns 0方法调用成功,小于0方法调用失败
- (int)stopPushToTalk;
/// 是否接收频道其它声音
/// - Parameter mute: mute true 静音,false 解除静音
/// - returns 0方法调用成功,小于0方法调用失败
- (int)muteAllRemoteAudio:(BOOL)mute;
/// 打断对讲
/// - returns 0方法调用成功,小于0方法调用失败
- (int)breakTalk;
/// 是否接收广播流
/// - Parameter enable: YES 接收,NO 不接收
- (int)enableAudioStream:(BOOL)enable;
/// 获取频道 ID
- (NSString *)getChannelId;
@end
复制代码
二、频道与会话
效果预览
部分代码实现
class ARMainViewCellData: ARUICommonCellData {
/// 群id
var groupId: String = ""
/// 是否监听
var isMonitor: Bool = false
/// 监听状态
var monitorStatus: Bool = false
var groupName: String = ""
/// 群组类型(0:群组(频道),1:临时群组(会话))
var groupType: Int = 1
/// 在线人数
var onlineCount: NSInteger = 0
/// 群人数
var memberCount: NSInteger = 0
/// 用户在群组内权限(默认0,0:高,1:中,2:低,3:仅听)
var permission: NSInteger = 0
var isOwner: Bool = false
var item: ARCahnnelItem!
/// 对讲状态(优先级高)
var talkStatus: String?
/// 广播状态(优先级低)
var broadCastStatus: String?
/// 群组最大发言时长(单位:秒),默认:60s,0表示无限制
var groupMaxSpeak: NSInteger = 60
/// 最大排队人数(默认0:无排队,1:5人,2:10人)
var groupMaxQueue: NSInteger = 0
var groupDesc: String = ""
/// 群组级别(默认:0,0:低,1:中,2:高)
var groupGrade: NSInteger = 0
var groupImId: NSInteger = 0
/// 对讲状态
var userDataNofi: ARTalkUserDataNofi?
/// 未读数
var unReadNum: NSInteger = 0
weak var delegate: ARMainViewCellDataDelegate?
weak var dataSource: ARMainViewCellDataSource?
class func getCellData(item: ARCahnnelItem) -> ARMainViewCellData {
let cellData = ARMainViewCellData()
cellData.groupId = item.groupId
cellData.groupImId = item.groupImId
cellData.isMonitor = (item.isMonitor == 1)
cellData.groupName = item.groupName
cellData.groupType = item.groupType
cellData.onlineCount = item.onlineCount
cellData.memberCount = item.memberCount
cellData.permission = item.permission
cellData.isOwner = (item.ownerId == localUserData.uId)
cellData.groupMaxSpeak = item.groupMaxSpeak
cellData.groupMaxQueue = item.groupMaxQueue
cellData.groupDesc = item.groupDesc
cellData.groupGrade = item.groupGrade
cellData.item = item
cellData.unReadNum = ARIMMessage.queryGroupUnRead(recvId: UInt64(cellData.groupImId))
return cellData
}
func updateMonitorStatus(monitor: Bool, result: @escaping (_ code: NSInteger) -> Void) {
/// 修改监听状态
/// 频道: api 接口 + sdk 会话: sdk
debugPrint("ARUITalking - updateMonitorStatus monitor = \(monitor) monitorStatus = \(monitorStatus)")
guard monitor != monitorStatus else { return }
if groupType == 0 {
/// api 接口
ARNetWorkManager.shared.updateGroupMonitorStatus(groupId: groupId, isMonitor: monitor ? 1 : 0) {
print("ARUITalking - updateGroupMonitorStatus api sucess")
} failed: { _ in
print("ARUITalking - updateGroupMonitorStatus api failed")
}
}
if !monitor && groupId == ARTalkManager.shared.lockCellData?.groupId {
/// 解除频道锁定
ARTalkManager.shared.lockChannel(data: nil)
}
if ARTalkManager.shared.lockCellData != nil && ARTalkManager.shared.lockCellData?.groupId != groupId && monitor {
ARTalkManager.shared.getTalkChannel(channelId: groupId).muteAllRemoteAudio(true)
}
/// sdk 接口
ARTalkManager.shared.monitorChannel(channelId: groupId, grpImId: groupImId, isMonitor: monitor) { [weak self] code in
debugPrint("ARUITalking - updateMonitorStatus sdk result = \(code)")
guard let self = self else { return }
if code == 0 {
self.item.isMonitor = monitor ? 1 : 0
self.isMonitor = monitor
self.monitorStatus = monitor
if self.delegate != nil {
self.delegate?.onMonitorStatusChange()
}
}
result(code)
}
}
}
复制代码
三、音视频通话
效果预览
部分代码实现
@interface ARUICalling : NSObject
/// 基础配置
@property(nonatomic, strong) ARUIConfiguration *config;
+ (instancetype)shareInstance;
/// 通话接口
/// @param users 用户信息
/// @param type 呼叫类型:视频/语音
- (void)call:(NSArray<ARCallUser *> *)users type:(ARUICallingType)type NS_SWIFT_NAME(call(users:type:));
/// 通话接口(用于自定义chanId或添加token)
/// @param users 用户信息
/// @param type 呼叫类型:视频/语音
/// @param chanId 频道id
/// @param token rtc token 动态密钥 【1】安全要求不高: 将值设为 nil 【2】安全要求高: 将值设置为 Token。如果你已经启用了 App Certificate,请务必使用 Token。【3】请务必确保用于生成 Token 的 App ID 和 ARUILogin initWithSdkAppID 时用的是同一个 App ID。
- (void)call:(NSArray<ARCallUser *> *)users type:(ARUICallingType)type chanId:(NSString *_Nullable)chanId token:(NSString *_Nullable)token NS_SWIFT_NAME(call(users:type:chanId:token:));
/// 通话回调
/// @param listener 回调
- (void)setCallingListener:(id<ARUICallingListerner>)listener NS_SWIFT_NAME(setCallingListener(listener:));
/// 设置铃声,建议在30s以内,只支持本地音频文件
/// @param filePath 音频文件路径
- (void)setCallingBell:(NSString *)filePath NS_SWIFT_NAME(setCallingBell(filePath:));
/// 开启静音模式(默认关)
- (void)enableMuteMode:(BOOL)enable NS_SWIFT_NAME(enableMuteMode(enable:));
/// 打开悬浮窗(默认关)
- (void)enableFloatWindow:(BOOL)enable NS_SWIFT_NAME(enableFloatWindow(enable:));
/// 开启自定义路由(默认关)
/// @param enable 打开后,在onStart回调中,会收到对应的ViewController对象,可以自行决定视图展示方式
- (void)enableCustomViewRoute:(BOOL)enable NS_SWIFT_NAME(enableCustomViewRoute(enable:));
/// rtc token 鉴权 (收到onCallInvitedByToken回调时调用)
/// @param token 安全机制
- (void)updateCallingToken:(NSString *_Nonnull)token NS_SWIFT_NAME(updateCallingToken(token:));
@end
复制代码
四、视频回传、监看
效果预览
部分代码实现
func initializeEngine() {
// init ARtcEngineKit
rtcEngine = ARtcEngineKit.sharedEngine(withAppId: ARUILogin.getSdkAppID(), delegate: self)
let rtcConfig = ARCoreConfig.default().rtcConfig
if (rtcConfig?.addr.count != 0 && rtcConfig?.port != 0) {
/// 配置私有云
let dic: NSDictionary = ["Cmd": "ConfPriCloudAddr", "ServerAdd": rtcConfig?.addr as Any, "Port": rtcConfig?.port as Any] as NSDictionary
rtcEngine.setParameters(toJSONString(dict: dic))
}
rtcEngine.setChannelProfile(.liveBroadcasting)
rtcEngine.enableAudioVolumeIndication(2000, smooth: 3, report_vad: true)
rtcEngine.setClientRole(.broadcaster)
let configuration = ARCameraCapturerConfiguration()
configuration.cameraDirection = .rear
rtcEngine.setCameraCapturerConfiguration(configuration)
rtcEngine.enableVideo()
let fpsValue = ARUIVideoFrameRateFps(rawValue: localConfig.videoFps)?.fps()
rtcEngine.setVideoEncoderConfiguration(ARVideoEncoderConfiguration(size: (ARUIVideoDimension(rawValue: localConfig.videoSize)?.dimension())!, frameRate: ARVideoFrameRate(rawValue: fpsValue ?? 15)!, bitrate: 500, orientationMode: .adaptative))
setLocalVideoRender(render: view)
}
func setLocalVideoRender(render: UIView) {
let videoCanvas = ARtcVideoCanvas()
videoCanvas.view = render
videoCanvas.renderMode = .hidden
rtcEngine.setupLocalVideo(videoCanvas)
rtcEngine.startPreview()
}
func joinChannel(token: String?, expire: Bool) {
rtcEngine.joinChannel(byToken: token, channelId: channelId, uid: userInfo.uId) { [weak self] channel, uid, elapsed in
guard let self = self else { return }
if !expire {
self.startTimer()
self.startReport()
}
debugPrint("ARUIPassBack - joinChannel sucess")
}
}
复制代码
五、图片、视频上报
部分代码实现
@objc protocol ARReportUploadManagerDelegate: NSObjectProtocol {
/// 上报进度
@objc optional func onReportDataProgress(progress: CGFloat, identification: String)
/// 上报成功
@objc optional func onReportDataSucess(identification: String)
/// 上报失败
@objc optional func onReportDataFailure(identification: String)
}
func fetchImage(for asset: PHAsset) {
let option = PHImageRequestOptions()
option.resizeMode = .fast
option.isNetworkAccessAllowed = true
PHImageManager.default().requestImage(for: asset, targetSize: CGSize(width: 100, height: 100), contentMode: .aspectFill, options: option) { [weak self] image, info in
var downloadFinished = false
if let info = info {
downloadFinished = !(info[PHImageCancelledKey] as? Bool ?? false) && (info[PHImageErrorKey] == nil)
}
let isDegraded = (info?[PHImageResultIsDegradedKey] as? Bool ?? false)
if downloadFinished, !isDegraded {
guard let self = self else { return }
self.addNewData(images: [image!], assets: [asset])
}
}
}
func calculateFileSize() {
var fileSize = 0
var sandBoxSize = 0
for detail in reportInfo.detail {
if detail.type == 2 {
fileSize = detail.size
sandBoxSize = detail.compressSize
break
} else {
fileSize += detail.size
sandBoxSize += detail.compressSize
}
}
delegate?.onReportDataSourceChange?(fileSize: "\(formatLength(length: fileSize))", compressSize: "\(formatLength(length: sandBoxSize))", enable: assets.count != 0)
}
复制代码
六、位置回传
部分代码实现
extension ARUIShareLocationController: MAMapViewDelegate {
func mapView(_ mapView: MAMapView!, viewFor annotation: MAAnnotation!) -> MAAnnotationView! {
/// 大头针、气泡
if annotation is MAPointAnnotation {
let customReuseIndetifier: String = "annotationReuseIndetifier"
var annotationView = mapView.dequeueReusableAnnotationView(withIdentifier: customReuseIndetifier) as? ARUICustomAnnotationView
if annotationView == nil {
annotationView = ARUICustomAnnotationView(annotation: annotation, reuseIdentifier: customReuseIndetifier)
annotationView?.image = UIImage(named: "icon_direction")
// 设置为false,用以调用自定义的calloutView
annotationView?.canShowCallout = false
// 设置中心点偏移,使得标注底部中间点成为经纬度对应点
annotationView?.centerOffset = CGPoint(x: 0, y: -18)
annotationView?.isDraggable = false
}
if annotation.isKind(of: ARUIMapAnnotation.self) {
let mapAnnotation = annotation as! ARUIMapAnnotation
annotationView?.setHeadUrl(url: mapAnnotation.faceUrl)
annotationView?.setNickName(text: mapAnnotation.nickName)
}
if annotation.isKind(of: MAUserLocation.self) {
localAnnotationView = annotationView
localAnnotationView?.setHeadUrl(url: localUserData.faceUrl)
localAnnotationView?.setNickName(text: localUserData.nickName)
localLocation = annotation as? MAUserLocation
}
return annotationView
}
return nil
}
func mapView(_ mapView: MAMapView!, didUpdate userLocation: MAUserLocation!, updatingLocation: Bool) {
if (!updatingLocation && localAnnotationView != nil) {
UIView.animate(withDuration: 0.1) {
let degree = userLocation.heading.trueHeading - self.mapView.rotationDegree
self.localAnnotationView?.imageView.transform = CGAffineTransform(rotationAngle: degree * Double.pi / 180.0)
}
}
}
}
复制代码
七、即时消息
部分代码实现
/// 消息内容类型
enum ARIMElemType: NSInteger, Codable, HandyJSONEnum {
/// 未知消息
case none = 100
/// 文本消息
case text = 101
/// 图片消息
case picture = 102
/// 语音消息
case voice = 103
/// 视频消息
case video = 104
/// ptt 语音
case pushtotalk = 105
/// 文件消息
case file = 106
case atText = 107
/// 合并消息
case merger = 108
case card = 109
/// 地理位置消息
case location = 110
/// 自定义消息
case custom = 111
case revoke = 112
case hasReadReceipt = 113
case typing = 114
case quote = 115
case common = 200
case groupMsg = 201
}
/// 消息状态
enum ARIMMessageStatus: NSInteger, Codable, HandyJSONEnum {
case sending = 1
case sendSuccess = 2
case sendFailed = 3
case hasDeleted = 4
case revoked = 5
case filtered = 6
}
/// IM消息
///"sendId":3578970541,"recvId":2723541307,"clientMsgId":"1653488317610","sessionType":1,"contentType":101,"content":"IM消息","curSeq":1011,"sendTime":1653488317,"status":2
struct ARIMMessage: HandyJSON, PersistableRecord {
/// 序列号
var curSeq: UInt64!
/// 消息 Id
var clientMsgId: String!
/// 发送者 Id
var sendId: UInt64!
/// 接受者 id
var recvId: UInt64!
/// 消息类型(点对点消息:1,群组消息:2)
var sessionType: ARIMSessionType = .c2c
/// 消息内容类型,附加信息中的<消息内容类型>
var contentType: ARIMElemType = .none
/// 消息内容
var content: String = ""
/// 消息状态
var status: ARIMMessageStatus?
/// 发送时间(单位:毫秒)
var sendTime: Int?
/// 发送者平台类型
var senderPlatformId: ARIMPlatform?
/// 附加消息
var ex: String?
var syncMsgId: String = ""
/// 消息保存时间(单位:秒)
var createTime: Int = 0
/// 消息归属Id
var msgImId: Int = 0
/// 已读状态,默认未读
var isRead: Bool = false
/// 是否播放(针对语音、ptt 已读未读),默认未播放
var isPlay: Bool = false
/////////////////////////////////////////////////////////////////////////////////
// 自定义消息体
/////////////////////////////////////////////////////////////////////////////////
/// 图片消息
var imageElem: ARUIImageElem?
/// 视频消息
var videoElem: ARUIVideoElem?
/// 语音消息
var soundElem: ARUISoundElem?
/// 文件消息
var fileElem: ARUIFileElem?
/// 位置消息
var locationElem: ARUILocationElem?
}
extension ARIMMessage {
func getImageElem() -> ARUIImageElem {
/// 获取图片消息单元
var elem = ARUIImageElem()
var jsonStr = content.base64Decoded()
/// 兼容之前未base64的消息
if content.base64Decoded() == "【不支持的消息类型】" {
jsonStr = content
}
elem.imageList = JSONDeserializer<ARUIImageItem>.deserializeModelArrayFrom(json: jsonStr) as? [ARUIImageItem]
return elem
}
func getVideoElem() -> ARUIVideoElem {
/// 获取视频消息单元
let elem = JSONDeserializer<ARUIVideoElem>.deserializeFrom(json: content.base64Decoded())
return elem!
}
func getSoundElem() -> ARUISoundElem {
/// 获取音频消息单元
let elem = JSONDeserializer<ARUISoundElem>.deserializeFrom(json: content.base64Decoded())
return elem!
}
func getFileElem() -> ARUIFileElem {
/// 获取文件消息单元
let elem = JSONDeserializer<ARUIFileElem>.deserializeFrom(json: content.base64Decoded())
return elem!
}
func getLocationElem() -> ARUILocationElem {
/// 获取位置消息单元
let elem = JSONDeserializer<ARUILocationElem>.deserializeFrom(json: content.base64Decoded())
return elem!
}
}
复制代码
八、文字广播、媒体广播
部分代码实现
@protocol ARTalkChannelDelegate <NSObject>
@optional
/// 广播开启
/// @param channel 所在频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
- (void)channel:(ARTalkChannel * _Nonnull)channel userStreamOn:(NSString *)uid userData: (NSString * _Nullable)userData;
/// 广播关闭
/// @param channel 所在频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
- (void)channel:(ARTalkChannel * _Nonnull)channel userStreamOff:(NSString *)uid userData: (NSString * _Nullable)userData;
/// 开始对讲回调
/// @param channel 所在频道。详见 ARTalkChannel 。
/// @param code 错误码
- (void)channel:(ARTalkChannel * _Nonnull)channel pushToTalkResult:(ARTalkPushToTalkErrorCode)code;
/// 结束对讲回调
/// @param channel 所在频道。详见 ARTalkChannel 。
/// @param code 错误码
- (void)channel:(ARTalkChannel * _Nonnull)channel pushToTalkEnded:(ARTalkPushToTalkEndErrorCode)code;
/// 其他用户开始对讲回调
/// @param channel 所在频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
/// @param level 用户等级
- (void)channel:(ARTalkChannel * _Nonnull)channel userIsTalkOn:(NSString *)uid userData: (NSString * _Nullable)userData userLevel:(NSInteger)level;
/// 其他用户结束对讲回调
/// @param channel 所在频道。详见 ARTalkChannel 。
/// @param uid 用户id
/// @param userData 自定义信息
- (void)channel:(ARTalkChannel * _Nonnull)channel userIsTalkOff:(NSString *)uid userData: (NSString * _Nullable)userData;
@end
复制代码
快对讲 iOS 端基础库
platform :ios, '11.0'
use_frameworks!
target 'ARUITalking' do
# anyRTC 音视频库
pod 'ARtcKit_iOS', '~> 4.3.0.3'
# anyRTC 实时消息库
pod 'ARtmKit_iOS', '~> 1.1.0.1'
# anyRTC 对讲库
pod 'ARTalkKit_iOS'
end
复制代码
以上就是快对讲 IOS 端的基本功能实现。快对讲调度系统将语音、视频、图像、文本消息等信息高度融合一体,搭建综合指挥调度业务,不仅实现企业通讯数字信息化,进行高效协作提升企业整体形象,也能满足紧急救援、紧急决策等要求,达到统一指挥、联合行动的目的。开发者可以基于 ARUICalling 音视频通话开源组件,来开发自己的专属快对讲调度系统。
划线
评论
复制
发布于: 刚刚阅读数: 4
版权声明: 本文为 InfoQ 作者【anyRTC开发者】的原创文章。
原文链接:【http://xie.infoq.cn/article/88ff32a6417454c05ab876405】。文章转载请联系作者。
anyRTC开发者
关注
实时交互,万物互联! 2020-08-10 加入
实时交互,万物互联,全球实时互动云服务商领跑者!
评论