From dbb7c40e7f3f5510cec1648b134c08eb471529d5 Mon Sep 17 00:00:00 2001 From: Pushpsen Date: Mon, 18 May 2020 16:35:21 +0530 Subject: [PATCH] v2.0.2 --- .../AudioWaveForm/AudioPlayerManager.swift | 191 +++++ .../AudioWaveForm/AudioRecorderManager.swift | 157 ++++ .../Classes/AVAudioFileExtensions.swift | 36 + .../AudioWaveForm/Classes/AudioContext.swift | 210 +++++ .../Classes/AudioVisualizationView.swift | 379 ++++++++ .../Classes/AudioVisualizationView.xib | 35 + .../AudioWaveForm/Classes/BaseNibView.swift | 91 ++ .../AudioWaveForm/Classes/Chronometer.swift | 53 ++ .../Extensions/AudioWaveForm/ViewModel.swift | 101 +++ .../CometChatCallDetailLogView.swift | 1 - .../CallManager/CometChatCallManager.swift | 9 +- .../Message/ChatView/ChatView.swift | 14 +- .../LeftAudioMessageBubble.swift | 90 ++ .../LeftAudioMessageBubble.xib | 192 +++++ .../RightAudioMessageBubble.swift | 95 ++ .../RightAudioMessageBubble.xib | 156 ++++ .../RightFileMessageBubble.xib | 11 +- .../LeftTextMessageBubble.xib | 12 +- .../RightTextMessageBubble.xib | 10 +- .../MessageList/CometChatMessageList.swift | 809 +++++++++++++----- .../MessageList/CometChatMessageList.xib | 174 +++- License.md | 1 + README.md | 1 + Screenshots/audioCall.png | Bin 1688 -> 0 bytes 24 files changed, 2580 insertions(+), 248 deletions(-) create mode 100644 Library/Resources/Extensions/AudioWaveForm/AudioPlayerManager.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/AudioRecorderManager.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/Classes/AVAudioFileExtensions.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/Classes/AudioContext.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.xib create mode 100644 Library/Resources/Extensions/AudioWaveForm/Classes/BaseNibView.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/Classes/Chronometer.swift create mode 100644 Library/Resources/Extensions/AudioWaveForm/ViewModel.swift create mode 100755 Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.swift create mode 100755 Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.xib create mode 100755 Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.swift create mode 100755 Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.xib delete mode 100644 Screenshots/audioCall.png diff --git a/Library/Resources/Extensions/AudioWaveForm/AudioPlayerManager.swift b/Library/Resources/Extensions/AudioWaveForm/AudioPlayerManager.swift new file mode 100644 index 00000000..bd23e956 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/AudioPlayerManager.swift @@ -0,0 +1,191 @@ +// +// AudioPlayerManager.swift +// ela +// +// Created by Bastien Falcou on 4/14/16. +// Copyright © 2016 Fueled. All rights reserved. +// + +import Foundation +import AVFoundation +import UIKit + +final class AudioPlayerManager: NSObject { + static let shared = AudioPlayerManager() + + var isRunning: Bool { + guard let audioPlayer = self.audioPlayer, audioPlayer.isPlaying else { + return false + } + return true + } + + private var audioPlayer: AVAudioPlayer? + private var audioMeteringLevelTimer: Timer? + + // MARK: - Reinit and play from the beginning + + func play(at url: URL, with audioVisualizationTimeInterval: TimeInterval = 0.05) throws -> TimeInterval { + if AudioRecorderManager.shared.isRunning { + print("Audio Player did fail to start: AVFoundation is recording") + throw AudioErrorType.alreadyRecording + } + + if self.isRunning { + print("Audio Player did fail to start: already playing a file") + throw AudioErrorType.alreadyPlaying + } + + if !URL.checkPath(url.path) { + print("Audio Player did fail to start: file doesn't exist") + throw AudioErrorType.audioFileWrongPath + } + + try self.audioPlayer = AVAudioPlayer(contentsOf: url) + self.setupPlayer(with: audioVisualizationTimeInterval) + print("Started to play sound") + + return self.audioPlayer!.duration + } + + func play(_ data: Data, with audioVisualizationTimeInterval: TimeInterval = 0.05) throws -> TimeInterval { + try self.audioPlayer = AVAudioPlayer(data: data) + self.setupPlayer(with: audioVisualizationTimeInterval) + print("Started to play sound") + + return self.audioPlayer!.duration + } + + private func setupPlayer(with audioVisualizationTimeInterval: TimeInterval) { + if let player = self.audioPlayer { + player.play() + player.isMeteringEnabled = true + player.delegate = self + + self.audioMeteringLevelTimer = Timer.scheduledTimer(timeInterval: audioVisualizationTimeInterval, target: self, + selector: #selector(AudioPlayerManager.timerDidUpdateMeter), userInfo: nil, repeats: true) + } + } + + // MARK: - Resume and pause current if exists + + func resume() throws -> TimeInterval { + if self.audioPlayer?.play() == false { + print("Audio Player did fail to resume for internal reason") + throw AudioErrorType.internalError + } + + print("Resumed sound") + return self.audioPlayer!.duration - self.audioPlayer!.currentTime + } + + func pause() throws { + if !self.isRunning { + print("Audio Player did fail to start: there is nothing currently playing") + throw AudioErrorType.notCurrentlyPlaying + } + + self.audioPlayer?.pause() + print("Paused current playing sound") + } + + func stop() throws { + if !self.isRunning { + print("Audio Player did fail to stop: there is nothing currently playing") + throw AudioErrorType.notCurrentlyPlaying + } + + self.audioPlayer?.stop() + print("Audio player stopped") + } + + // MARK: - Private + + @objc private func timerDidUpdateMeter() { + if self.isRunning { + self.audioPlayer!.updateMeters() + let averagePower = self.audioPlayer!.averagePower(forChannel: 0) + let percentage: Float = pow(10, (0.05 * averagePower)) + NotificationCenter.default.post(name: .audioPlayerManagerMeteringLevelDidUpdateNotification, object: self, userInfo: [audioPercentageUserInfoKey: percentage]) + } + } +} + +extension AudioPlayerManager: AVAudioPlayerDelegate { + func audioPlayerDidFinishPlaying(_ player: AVAudioPlayer, successfully flag: Bool) { + NotificationCenter.default.post(name: .audioPlayerManagerMeteringLevelDidFinishNotification, object: self) + } +} + +extension Notification.Name { + static let audioPlayerManagerMeteringLevelDidUpdateNotification = Notification.Name("AudioPlayerManagerMeteringLevelDidUpdateNotification") + static let audioPlayerManagerMeteringLevelDidFinishNotification = Notification.Name("AudioPlayerManagerMeteringLevelDidFinishNotification") +} + + + +enum AudioErrorType: Error { + case alreadyRecording + case alreadyPlaying + case notCurrentlyPlaying + case audioFileWrongPath + case recordFailed + case playFailed + case recordPermissionNotGranted + case internalError +} + +extension AudioErrorType: LocalizedError { + public var errorDescription: String? { + switch self { + case .alreadyRecording: + return "The application is currently recording sounds" + case .alreadyPlaying: + return "The application is already playing a sound" + case .notCurrentlyPlaying: + return "The application is not currently playing" + case .audioFileWrongPath: + return "Invalid path for audio file" + case .recordFailed: + return "Unable to record sound at the moment, please try again" + case .playFailed: + return "Unable to play sound at the moment, please try again" + case .recordPermissionNotGranted: + return "Unable to record sound because the permission has not been granted. This can be changed in your settings." + case .internalError: + return "An error occured while trying to process audio command, please try again" + } + } +} + + +extension URL { + static func checkPath(_ path: String) -> Bool { + let isFileExist = FileManager.default.fileExists(atPath: path) + return isFileExist + } + + static func documentsPath(forFileName fileName: String) -> URL? { + let documents = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)[0] + let writePath = URL(string: documents)!.appendingPathComponent(fileName) + + var directory: ObjCBool = ObjCBool(false) + if FileManager.default.fileExists(atPath: documents, isDirectory:&directory) { + return directory.boolValue ? writePath : nil + } + return nil + } +} + +extension UIViewController { + func showAlert(with error: Error) { + let alertController = UIAlertController(title: "Error", message: error.localizedDescription, preferredStyle: .alert) + alertController.addAction(UIAlertAction(title: "OK", style: .cancel) { _ in + alertController.dismiss(animated: true, completion: nil) + }) + + DispatchQueue.main.async { + self.present(alertController, animated: true, completion: nil) + } + } +} diff --git a/Library/Resources/Extensions/AudioWaveForm/AudioRecorderManager.swift b/Library/Resources/Extensions/AudioWaveForm/AudioRecorderManager.swift new file mode 100644 index 00000000..64856679 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/AudioRecorderManager.swift @@ -0,0 +1,157 @@ +// +// AudioRecorderManager.swift +// ela +// +// Created by Bastien Falcou on 4/14/16. +// Copyright © 2016 Fueled. All rights reserved. +// + +import Foundation +import AVFoundation + +let audioPercentageUserInfoKey = "percentage" + +final class AudioRecorderManager: NSObject { + let audioFileNamePrefix = "audio.m4a" + let encoderBitRate: Int = 80000 + let numberOfChannels: Int = 2 + let sampleRate: Double = 44100.0 + + static let shared = AudioRecorderManager() + + var isPermissionGranted = false + var isRunning: Bool { + guard let recorder = self.recorder, recorder.isRecording else { + return false + } + return true + } + + var currentRecordPath: URL? + + private var recorder: AVAudioRecorder? + private var audioMeteringLevelTimer: Timer? + + func askPermission(completion: ((Bool) -> Void)? = nil) { + AVAudioSession.sharedInstance().requestRecordPermission { [weak self] granted in + self?.isPermissionGranted = granted + completion?(granted) + print("Audio Recorder did not grant permission") + } + } + + func startRecording(with audioVisualizationTimeInterval: TimeInterval = 0.05, completion: @escaping (URL?, Error?) -> Void) { + func startRecordingReturn() { + do { + completion(try internalStartRecording(with: audioVisualizationTimeInterval), nil) + } catch { + completion(nil, error) + } + } + + if !self.isPermissionGranted { + self.askPermission { granted in + startRecordingReturn() + } + } else { + startRecordingReturn() + } + } + + fileprivate func internalStartRecording(with audioVisualizationTimeInterval: TimeInterval) throws -> URL { + if self.isRunning { + throw AudioErrorType.alreadyPlaying + } + + let recordSettings = [ + AVFormatIDKey: NSNumber(value:kAudioFormatMPEG4AAC), + AVEncoderAudioQualityKey : AVAudioQuality.max.rawValue, + AVEncoderBitRateKey : self.encoderBitRate, + AVNumberOfChannelsKey: self.numberOfChannels, + AVSampleRateKey : self.sampleRate + ] as [String : Any] + + guard let path = URL.documentsPath(forFileName: self.audioFileNamePrefix) else { + print("Incorrect path for new audio file") + throw AudioErrorType.audioFileWrongPath + } + + try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .default, options: .defaultToSpeaker) + try AVAudioSession.sharedInstance().setActive(true) + + self.recorder = try AVAudioRecorder(url: path, settings: recordSettings) + self.recorder!.delegate = self + self.recorder!.isMeteringEnabled = true + + if !self.recorder!.prepareToRecord() { + print("Audio Recorder prepare failed") + throw AudioErrorType.recordFailed + } + + if !self.recorder!.record() { + print("Audio Recorder start failed") + throw AudioErrorType.recordFailed + } + + self.audioMeteringLevelTimer = Timer.scheduledTimer(timeInterval: audioVisualizationTimeInterval, target: self, + selector: #selector(AudioRecorderManager.timerDidUpdateMeter), userInfo: nil, repeats: true) + + print("Audio Recorder did start - creating file at index: \(path.absoluteString)") + + self.currentRecordPath = path + return path + } + + func stopRecording() throws { + self.audioMeteringLevelTimer?.invalidate() + self.audioMeteringLevelTimer = nil + + if !self.isRunning { + print("Audio Recorder did fail to stop") + throw AudioErrorType.notCurrentlyPlaying + } + + self.recorder!.stop() + print("Audio Recorder did stop successfully") + } + + func reset() throws { + if self.isRunning { + print("Audio Recorder tried to remove recording before stopping it") + throw AudioErrorType.alreadyRecording + } + + self.recorder?.deleteRecording() + self.recorder = nil + self.currentRecordPath = nil + + print("Audio Recorder did remove current record successfully") + } + + @objc func timerDidUpdateMeter() { + if self.isRunning { + self.recorder!.updateMeters() + let averagePower = recorder!.averagePower(forChannel: 0) + let percentage: Float = pow(10, (0.05 * averagePower)) + NotificationCenter.default.post(name: .audioRecorderManagerMeteringLevelDidUpdateNotification, object: self, userInfo: [audioPercentageUserInfoKey: percentage]) + } + } +} + +extension AudioRecorderManager: AVAudioRecorderDelegate { + func audioRecorderDidFinishRecording(_ recorder: AVAudioRecorder, successfully flag: Bool) { + NotificationCenter.default.post(name: .audioRecorderManagerMeteringLevelDidFinishNotification, object: self) + print("Audio Recorder finished successfully") + } + + func audioRecorderEncodeErrorDidOccur(_ recorder: AVAudioRecorder, error: Error?) { + NotificationCenter.default.post(name: .audioRecorderManagerMeteringLevelDidFailNotification, object: self) + print("Audio Recorder error") + } +} + +extension Notification.Name { + static let audioRecorderManagerMeteringLevelDidUpdateNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidUpdateNotification") + static let audioRecorderManagerMeteringLevelDidFinishNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidFinishNotification") + static let audioRecorderManagerMeteringLevelDidFailNotification = Notification.Name("AudioRecorderManagerMeteringLevelDidFailNotification") +} diff --git a/Library/Resources/Extensions/AudioWaveForm/Classes/AVAudioFileExtensions.swift b/Library/Resources/Extensions/AudioWaveForm/Classes/AVAudioFileExtensions.swift new file mode 100644 index 00000000..10009983 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/Classes/AVAudioFileExtensions.swift @@ -0,0 +1,36 @@ +// +// AVAudioFileExtensions.swift +// Pods-SoundWave_Example +// +// Created by Bastien Falcou on 4/21/19. +// Inspired from https://stackoverflow.com/a/52280271 +// + +import AVFoundation + +extension AVAudioFile { + func buffer() throws -> [[Float]] { + let format = AVAudioFormat(commonFormat: .pcmFormatFloat32, + sampleRate: self.fileFormat.sampleRate, + channels: self.fileFormat.channelCount, + interleaved: false) + let buffer = AVAudioPCMBuffer(pcmFormat: format!, frameCapacity: UInt32(self.length))! + try self.read(into: buffer, frameCount: UInt32(self.length)) + return self.analyze(buffer: buffer) + } + + private func analyze(buffer: AVAudioPCMBuffer) -> [[Float]] { + let channelCount = Int(buffer.format.channelCount) + let frameLength = Int(buffer.frameLength) + var result = Array(repeating: [Float](repeatElement(0, count: frameLength)), count: channelCount) + for channel in 0.. ()) { + let asset = AVURLAsset(url: audioURL, options: [AVURLAssetPreferPreciseDurationAndTimingKey: NSNumber(value: true as Bool)]) + + guard let assetTrack = asset.tracks(withMediaType: AVMediaType.audio).first else { + fatalError("Couldn't load AVAssetTrack") + } + + asset.loadValuesAsynchronously(forKeys: ["duration"]) { + var error: NSError? + let status = asset.statusOfValue(forKey: "duration", error: &error) + switch status { + case .loaded: + guard let formatDescriptions = assetTrack.formatDescriptions as? [CMAudioFormatDescription], + let audioFormatDesc = formatDescriptions.first, + let asbd = CMAudioFormatDescriptionGetStreamBasicDescription(audioFormatDesc) else + { + break + } + let totalSamples = Int((asbd.pointee.mSampleRate) * Float64(asset.duration.value) / Float64(asset.duration.timescale)) + let audioContext = AudioContext(audioURL: audioURL, totalSamples: totalSamples, asset: asset, assetTrack: assetTrack) + completionHandler(audioContext) + case .failed, .cancelled, .loading, .unknown: + print("Couldn't load asset: \(error?.localizedDescription ?? "Unknown error")") + completionHandler(nil) + @unknown default: + print("Couldn't load asset, unknown default: \(error?.localizedDescription ?? "Unknown error")") + completionHandler(nil) + } + } + } + + public func render(targetSamples: Int = 100) -> [Float] { + let sampleRange: CountableRange = 0..? + CMBlockBufferGetDataPointer(readBuffer, + atOffset: 0, + lengthAtOffsetOut: &readBufferLength, + totalLengthOut: nil, + dataPointerOut: &readBufferPointer) + sampleBuffer.append(UnsafeBufferPointer(start: readBufferPointer, count: readBufferLength)) + CMSampleBufferInvalidate(readSampleBuffer) + + let totalSamples = sampleBuffer.count / MemoryLayout.size + let downSampledLength = totalSamples / samplesPerPixel + let samplesToProcess = downSampledLength * samplesPerPixel + + guard samplesToProcess > 0 else { continue } + + processSamples(fromData: &sampleBuffer, + outputSamples: &outputSamples, + samplesToProcess: samplesToProcess, + downSampledLength: downSampledLength, + samplesPerPixel: samplesPerPixel, + filter: filter) + } + + // Process the remaining samples at the end which didn't fit into samplesPerPixel + let samplesToProcess = sampleBuffer.count / MemoryLayout.size + if samplesToProcess > 0 { + let downSampledLength = 1 + let samplesPerPixel = samplesToProcess + let filter = [Float](repeating: 1.0 / Float(samplesPerPixel), count: samplesPerPixel) + + processSamples(fromData: &sampleBuffer, + outputSamples: &outputSamples, + samplesToProcess: samplesToProcess, + downSampledLength: downSampledLength, + samplesPerPixel: samplesPerPixel, + filter: filter) + } + + guard reader.status == .completed || true else { + fatalError("Couldn't read the audio file") + } + + return self.percentage(outputSamples) + } + + private func processSamples(fromData sampleBuffer: inout Data, + outputSamples: inout [Float], + samplesToProcess: Int, + downSampledLength: Int, + samplesPerPixel: Int, + filter: [Float]) { + sampleBuffer.withUnsafeBytes { (body: UnsafeRawBufferPointer) in + var processingBuffer = [Float](repeating: 0.0, count: samplesToProcess) + let sampleCount = vDSP_Length(samplesToProcess) + + guard let samples = body.bindMemory(to: Int16.self).baseAddress else { + return + } + + // Convert 16bit int samples to floats + vDSP_vflt16(samples, 1, &processingBuffer, 1, sampleCount) + + // Take the absolute values to get amplitude + vDSP_vabs(processingBuffer, 1, &processingBuffer, 1, sampleCount) + + // Get the corresponding dB, and clip the results + getdB(from: &processingBuffer) + + // Downsample and average + var downSampledData = [Float](repeating: 0.0, count: downSampledLength) + vDSP_desamp(processingBuffer, + vDSP_Stride(samplesPerPixel), + filter, &downSampledData, + vDSP_Length(downSampledLength), + vDSP_Length(samplesPerPixel)) + + // Remove processed samples + sampleBuffer.removeFirst(samplesToProcess * MemoryLayout.size) + + outputSamples += downSampledData + } + } + + private func getdB(from normalizedSamples: inout [Float]) { + // Convert samples to a log scale + var zero: Float = 32768.0 + vDSP_vdbcon(normalizedSamples, 1, &zero, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count), 1) + + // Clip to [noiseFloor, 0] + var ceil: Float = 0.0 + var noiseFloorMutable: Float = -80.0 // TODO: CHANGE THIS VALUE + vDSP_vclip(normalizedSamples, 1, &noiseFloorMutable, &ceil, &normalizedSamples, 1, vDSP_Length(normalizedSamples.count)) + } + + private func percentage(_ array: [Float]) -> [Float] { + guard let firstElement = array.first else { + return [] + } + let absArray = array.map { abs($0) } + let minValue = absArray.reduce(firstElement) { min($0, $1) } + let maxValue = absArray.reduce(firstElement) { max($0, $1) } + let delta = maxValue - minValue + return absArray.map { abs(1 - (delta / ($0 - minValue))) } + } +} diff --git a/Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.swift b/Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.swift new file mode 100644 index 00000000..92d7dd86 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.swift @@ -0,0 +1,379 @@ +// +// AudioVisualizationView.swift +// Pods +// +// Created by Bastien Falcou on 12/6/16. +// + +import AVFoundation +import UIKit + +public class AudioVisualizationView: BaseNibView { + public enum AudioVisualizationMode { + case read + case write + } + + private enum LevelBarType { + case upper + case lower + case single + } + + @IBInspectable public var meteringLevelBarWidth: CGFloat = 3.0 { + didSet { + self.setNeedsDisplay() + } + } + @IBInspectable public var meteringLevelBarInterItem: CGFloat = 2.0 { + didSet { + self.setNeedsDisplay() + } + } + @IBInspectable public var meteringLevelBarCornerRadius: CGFloat = 2.0 { + didSet { + self.setNeedsDisplay() + } + } + @IBInspectable public var meteringLevelBarSingleStick: Bool = false { + didSet { + self.setNeedsDisplay() + } + } + + public var audioVisualizationMode: AudioVisualizationMode = .read + + public var audioVisualizationTimeInterval: TimeInterval = 0.05 // Time interval between each metering bar representation + + // Specify a `gradientPercentage` to have the width of gradient be that percentage of the view width (starting from left) + // The rest of the screen will be filled by `self.gradientStartColor` to display nicely. + // Do not specify any `gradientPercentage` for gradient calculating fitting size automatically. + public var currentGradientPercentage: Float? + + private var meteringLevelsArray: [Float] = [] // Mutating recording array (values are percentage: 0.0 to 1.0) + private var meteringLevelsClusteredArray: [Float] = [] // Generated read mode array (values are percentage: 0.0 to 1.0) + + private var currentMeteringLevelsArray: [Float] { + if !self.meteringLevelsClusteredArray.isEmpty { + return meteringLevelsClusteredArray + } + return meteringLevelsArray + } + + private var playChronometer: Chronometer? + + public var meteringLevels: [Float]? { + didSet { + if let meteringLevels = self.meteringLevels { + self.meteringLevelsClusteredArray = meteringLevels + self.currentGradientPercentage = 0.0 + _ = self.scaleSoundDataToFitScreen() + } + } + } + + static var audioVisualizationDefaultGradientStartColor: UIColor { + return #colorLiteral(red: 0.2, green: 0.6, blue: 1, alpha: 1) + + } + static var audioVisualizationDefaultGradientEndColor: UIColor { + return #colorLiteral(red: 0.2, green: 0.7453606592, blue: 1, alpha: 1) + } + + @IBInspectable public var gradientStartColor: UIColor = AudioVisualizationView.audioVisualizationDefaultGradientStartColor { + didSet { + self.setNeedsDisplay() + } + } + @IBInspectable public var gradientEndColor: UIColor = AudioVisualizationView.audioVisualizationDefaultGradientEndColor { + didSet { + self.setNeedsDisplay() + } + } + + override public init(frame: CGRect) { + super.init(frame: frame) + } + + required public init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + } + + override public func draw(_ rect: CGRect) { + super.draw(rect) + + if let context = UIGraphicsGetCurrentContext() { + self.drawLevelBarsMaskAndGradient(inContext: context) + } + } + + public func reset() { + self.meteringLevels = nil + self.currentGradientPercentage = nil + self.meteringLevelsClusteredArray.removeAll() + self.meteringLevelsArray.removeAll() + self.setNeedsDisplay() + } + + // MARK: - Record Mode Handling + + public func add(meteringLevel: Float) { + guard self.audioVisualizationMode == .write else { + fatalError("trying to populate audio visualization view in read mode") + } + + self.meteringLevelsArray.append(meteringLevel) + self.setNeedsDisplay() + } + + public func scaleSoundDataToFitScreen() -> [Float] { + if self.meteringLevelsArray.isEmpty { + return [] + } + + self.meteringLevelsClusteredArray.removeAll() + var lastPosition: Int = 0 + + for index in 0.. self.meteringLevelsArray.count && floor(position) != position { + let low: Int = Int(floor(position)) + let high: Int = Int(ceil(position)) + + if high < self.meteringLevelsArray.count { + h = self.meteringLevelsArray[low] + ((position - Float(low)) * (self.meteringLevelsArray[high] - self.meteringLevelsArray[low])) + } else { + h = self.meteringLevelsArray[low] + } + } else { + for nestedIndex in lastPosition...Int(position) { + h += self.meteringLevelsArray[nestedIndex] + } + let stepsNumber = Int(1 + position - Float(lastPosition)) + h = h / Float(stepsNumber) + } + + lastPosition = Int(position) + self.meteringLevelsClusteredArray.append(h) + } + self.setNeedsDisplay() + return self.meteringLevelsClusteredArray + } + + // PRAGMA: - Play Mode Handling + + public func play(from url: URL) { + guard self.audioVisualizationMode == .read else { + fatalError("trying to read audio visualization in write mode") + } + + AudioContext.load(fromAudioURL: url) { audioContext in + guard let audioContext = audioContext else { + fatalError("Couldn't create the audioContext") + } + self.meteringLevels = audioContext.render(targetSamples: 100) + self.play(for: 10) + } + } + + public func play(for duration: TimeInterval) { + guard self.audioVisualizationMode == .read else { + fatalError("trying to read audio visualization in write mode") + } + + guard self.meteringLevels != nil else { + fatalError("trying to read audio visualization of non initialized sound record") + } + + if let currentChronometer = self.playChronometer { + currentChronometer.start() // resume current + return + } + + self.playChronometer = Chronometer(withTimeInterval: self.audioVisualizationTimeInterval) + self.playChronometer?.start(shouldFire: false) + + self.playChronometer?.timerDidUpdate = { [weak self] timerDuration in + guard let this = self else { + return + } + + if timerDuration >= duration { + this.stop() + return + } + + DispatchQueue.main.async { + this.currentGradientPercentage = Float(timerDuration) / Float(duration) + this.setNeedsDisplay() + } + } + } + + public func pause() { + guard let chronometer = self.playChronometer, chronometer.isPlaying else { + fatalError("trying to pause audio visualization view when not playing") + } + self.playChronometer?.pause() + } + + public func stop() { + self.playChronometer?.stop() + self.playChronometer = nil + + self.currentGradientPercentage = 1.0 + self.setNeedsDisplay() + self.currentGradientPercentage = nil + } + + // MARK: - Mask + Gradient + + private func drawLevelBarsMaskAndGradient(inContext context: CGContext) { + if self.currentMeteringLevelsArray.isEmpty { + return + } + + context.saveGState() + + UIGraphicsBeginImageContextWithOptions(self.frame.size, false, 0.0) + + let maskContext = UIGraphicsGetCurrentContext() + UIColor.black.set() + + self.drawMeteringLevelBars(inContext: maskContext!) + + let mask = UIGraphicsGetCurrentContext()?.makeImage() + UIGraphicsEndImageContext() + + context.clip(to: self.bounds, mask: mask!) + + self.drawGradient(inContext: context) + + context.restoreGState() + } + + private func drawGradient(inContext context: CGContext) { + if self.currentMeteringLevelsArray.isEmpty { + return + } + + context.saveGState() + + let startPoint = CGPoint(x: 0.0, y: self.centerY) + var endPoint = CGPoint(x: self.xLeftMostBar() + self.meteringLevelBarWidth, y: self.centerY) + + if let gradientPercentage = self.currentGradientPercentage { + endPoint = CGPoint(x: self.frame.size.width * CGFloat(gradientPercentage), y: self.centerY) + } + + let colorSpace = CGColorSpaceCreateDeviceRGB() + let colorLocations: [CGFloat] = [0.0, 1.0] + let colors = [self.gradientStartColor.cgColor, self.gradientEndColor.cgColor] + + let gradient = CGGradient(colorsSpace: colorSpace, colors: colors as CFArray, locations: colorLocations) + + context.drawLinearGradient(gradient!, start: startPoint, end: endPoint, options: CGGradientDrawingOptions(rawValue: 0)) + + context.restoreGState() + + if self.currentGradientPercentage != nil { + self.drawPlainBackground(inContext: context, fillFromXCoordinate: endPoint.x) + } + } + + private func drawPlainBackground(inContext context: CGContext, fillFromXCoordinate xCoordinate: CGFloat) { + context.saveGState() + + let squarePath = UIBezierPath() + + squarePath.move(to: CGPoint(x: xCoordinate, y: 0.0)) + squarePath.addLine(to: CGPoint(x: self.frame.size.width, y: 0.0)) + squarePath.addLine(to: CGPoint(x: self.frame.size.width, y: self.frame.size.height)) + squarePath.addLine(to: CGPoint(x: xCoordinate, y: self.frame.size.height)) + + squarePath.close() + squarePath.addClip() + + self.gradientStartColor.setFill() + squarePath.fill() + + context.restoreGState() + } + + // MARK: - Bars + + private func drawMeteringLevelBars(inContext context: CGContext) { + let offset = max(self.currentMeteringLevelsArray.count - self.maximumNumberBars, 0) + + for index in offset.. CGFloat { + return self.xPointForMeteringLevel(min(self.maximumNumberBars - 1, self.currentMeteringLevelsArray.count - 1)) + } + + private func heightForMeteringLevel(_ meteringLevel: Float) -> CGFloat { + return CGFloat(meteringLevel) * self.maximumBarHeight + } + + private func xPointForMeteringLevel(_ atIndex: Int) -> CGFloat { + return CGFloat(atIndex) * (self.meteringLevelBarWidth + self.meteringLevelBarInterItem) + } +} diff --git a/Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.xib b/Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.xib new file mode 100644 index 00000000..6850ae59 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/Classes/AudioVisualizationView.xib @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Library/Resources/Extensions/AudioWaveForm/Classes/BaseNibView.swift b/Library/Resources/Extensions/AudioWaveForm/Classes/BaseNibView.swift new file mode 100644 index 00000000..947b0255 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/Classes/BaseNibView.swift @@ -0,0 +1,91 @@ +// +// BaseNibView.swift +// Pods +// +// Created by Bastien Falcou on 12/6/16. +// + +import UIKit + +/** +* Subclass this class to use +* @note +* Instructions: +* - Subclass this class +* - Associate it with a nib via File's Owner (Whose name is defined by [-nibName]) +* - Bind contentView to the root view of the nib +* - Then you can insert it either in code or in a xib/storyboard, your choice +*/ + +public class BaseNibView: UIView { + @IBOutlet var contentView: UIView! + +/** + * Is called when the nib name associated with the class is going to be loaded. + * + * @return The nib name (Default implementation returns class name: `NSStringFromClass([self class])`) + * You will want to override this method in swift as the class name is prefixed with the module in that case + */ + var nibName: String { + return String(describing: type(of: self)) + } + +/** + * Called when first loading the nib. + * Defaults to `[NSBundle bundleForClass:[self class]]` + * + * @return The bundle in which to find the nib. + */ + var nibBundle: Bundle { + return Bundle(for: type(of: self)) + } + +/** + * Use the 2 methods above to instanciate the correct instance of UINib for the view. + * You can override this if you need more customization. + * + * @return An instance of UINib + */ + var nib: UINib { + return UINib(nibName: self.nibName, bundle: self.nibBundle) + } + + private var shouldAwakeFromNib: Bool = true + + override init(frame: CGRect) { + super.init(frame: frame) + self.createFromNib() + } + + required public init?(coder aDecoder: NSCoder) { + super.init(coder: aDecoder) + } + + override public func awakeFromNib() { + super.awakeFromNib() + + self.shouldAwakeFromNib = false + self.createFromNib() + } + + private func createFromNib() { + if self.contentView == nil { + return + } + + self.nib.instantiate(withOwner: self, options: nil) + assert(self.contentView != nil, "contentView is nil. Did you forgot to link it in IB?") + + if self.shouldAwakeFromNib { + self.awakeFromNib() + } + + self.contentView.translatesAutoresizingMaskIntoConstraints = false + self.addSubview(self.contentView) + + self.contentView.topAnchor.constraint(equalTo: self.topAnchor).isActive = true + self.contentView.bottomAnchor.constraint(equalTo: self.bottomAnchor).isActive = true + self.contentView.leftAnchor.constraint(equalTo: self.leftAnchor).isActive = true + self.contentView.rightAnchor.constraint(equalTo: self.rightAnchor).isActive = true + } +} diff --git a/Library/Resources/Extensions/AudioWaveForm/Classes/Chronometer.swift b/Library/Resources/Extensions/AudioWaveForm/Classes/Chronometer.swift new file mode 100644 index 00000000..eff4ba11 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/Classes/Chronometer.swift @@ -0,0 +1,53 @@ +// +// Chronometer.swift +// Pods +// +// Created by Bastien Falcou on 12/6/16. +// + +import Foundation + +public final class Chronometer: NSObject { + private var timer: Timer? + private var timeInterval: TimeInterval = 1.0 + + public var isPlaying = false + public var timerCurrentValue: TimeInterval = 0.0 + + public var timerDidUpdate: ((TimeInterval) -> ())? + public var timerDidComplete: (() -> ())? + + public init(withTimeInterval timeInterval: TimeInterval = 0.0) { + super.init() + + self.timeInterval = timeInterval + } + + public func start(shouldFire fire: Bool = true) { + self.timer = Timer(timeInterval: self.timeInterval, target: self, selector: #selector(Chronometer.timerDidTrigger), userInfo: nil, repeats: true) + RunLoop.main.add(self.timer!, forMode: .default) + self.timer?.fire() + self.isPlaying = true + } + + public func pause() { + self.timer?.invalidate() + self.timer = nil + self.isPlaying = false + } + + public func stop() { + self.isPlaying = false + self.timer?.invalidate() + self.timer = nil + self.timerCurrentValue = 0.0 + self.timerDidComplete?() + } + + // MARK: - Private + + @objc fileprivate func timerDidTrigger() { + self.timerDidUpdate?(self.timerCurrentValue) + self.timerCurrentValue += self.timeInterval + } +} diff --git a/Library/Resources/Extensions/AudioWaveForm/ViewModel.swift b/Library/Resources/Extensions/AudioWaveForm/ViewModel.swift new file mode 100644 index 00000000..fe82fb60 --- /dev/null +++ b/Library/Resources/Extensions/AudioWaveForm/ViewModel.swift @@ -0,0 +1,101 @@ +// +// ViewModel.swift +// SoundWave +// +// Created by Bastien Falcou on 12/6/16. +// Copyright © 2016 CocoaPods. All rights reserved. +// + +import Foundation + +struct SoundRecord { + var audioFilePathLocal: URL? + var meteringLevels: [Float]? +} + +final class ViewModel { + var audioVisualizationTimeInterval: TimeInterval = 0.05 // Time interval between each metering bar representation + + var currentAudioRecord: SoundRecord? + private var isPlaying = false + + var audioMeteringLevelUpdate: ((Float) -> ())? + var audioDidFinish: (() -> ())? + + init() { + // notifications update metering levels + NotificationCenter.default.addObserver(self, selector: #selector(ViewModel.didReceiveMeteringLevelUpdate), + name: .audioPlayerManagerMeteringLevelDidUpdateNotification, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(ViewModel.didReceiveMeteringLevelUpdate), + name: .audioRecorderManagerMeteringLevelDidUpdateNotification, object: nil) + + // notifications audio finished + NotificationCenter.default.addObserver(self, selector: #selector(ViewModel.didFinishRecordOrPlayAudio), + name: .audioPlayerManagerMeteringLevelDidFinishNotification, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(ViewModel.didFinishRecordOrPlayAudio), + name: .audioRecorderManagerMeteringLevelDidFinishNotification, object: nil) + } + + // MARK: - Recording + + func askAudioRecordingPermission(completion: ((Bool) -> Void)? = nil) { + return AudioRecorderManager.shared.askPermission(completion: completion) + } + + func startRecording(completion: @escaping (SoundRecord?, Error?) -> Void) { + AudioRecorderManager.shared.startRecording(with: self.audioVisualizationTimeInterval, completion: { [weak self] url, error in + guard let url = url else { + completion(nil, error!) + return + } + + self?.currentAudioRecord = SoundRecord(audioFilePathLocal: url, meteringLevels: []) + print("sound record created at url \(url.absoluteString))") + completion(self?.currentAudioRecord, nil) + }) + } + + func stopRecording() throws { + try AudioRecorderManager.shared.stopRecording() + } + + func resetRecording() throws { + try AudioRecorderManager.shared.reset() + self.isPlaying = false + self.currentAudioRecord = nil + } + + // MARK: - Playing + + func startPlaying() throws -> TimeInterval { + guard let currentAudioRecord = self.currentAudioRecord else { + throw AudioErrorType.audioFileWrongPath + } + + if self.isPlaying { + return try AudioPlayerManager.shared.resume() + } else { + guard let audioFilePath = currentAudioRecord.audioFilePathLocal else { + fatalError("tried to unwrap audio file path that is nil") + } + + self.isPlaying = true + return try AudioPlayerManager.shared.play(at: audioFilePath, with: self.audioVisualizationTimeInterval) + } + } + + func pausePlaying() throws { + try AudioPlayerManager.shared.pause() + } + + // MARK: - Notifications Handling + + @objc private func didReceiveMeteringLevelUpdate(_ notification: Notification) { + let percentage = notification.userInfo![audioPercentageUserInfoKey] as! Float + self.audioMeteringLevelUpdate?(percentage) + } + + @objc private func didFinishRecordOrPlayAudio(_ notification: Notification) { + self.audioDidFinish?() + } +} diff --git a/Library/UI Components/Call/CallDetailLogView/CometChatCallDetailLogView.swift b/Library/UI Components/Call/CallDetailLogView/CometChatCallDetailLogView.swift index dcebb1a8..19e2d1c7 100644 --- a/Library/UI Components/Call/CallDetailLogView/CometChatCallDetailLogView.swift +++ b/Library/UI Components/Call/CallDetailLogView/CometChatCallDetailLogView.swift @@ -23,7 +23,6 @@ class CometChatCallDetailLogView: UITableViewCell { // MARK: - Declaration of Variables - var call: BaseMessage! { didSet { diff --git a/Library/UI Components/Managers/CallManager/CometChatCallManager.swift b/Library/UI Components/Managers/CallManager/CometChatCallManager.swift index 1eaf6c05..d662af30 100644 --- a/Library/UI Components/Managers/CallManager/CometChatCallManager.swift +++ b/Library/UI Components/Managers/CallManager/CometChatCallManager.swift @@ -46,11 +46,9 @@ import CometChatPro let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: "Kindly, unblock the user to make a call.", duration: .short) snackbar.show() }else{ - let outgoingCall = CometChatOutgoingCall() - outgoingCall.makeCall(call: call, to: user) - outgoingCall.modalPresentationStyle = .fullScreen DispatchQueue.main.async { let outgoingCall = CometChatOutgoingCall() + outgoingCall.makeCall(call: call, to: user) outgoingCall.modalPresentationStyle = .fullScreen if let window = UIApplication.shared.windows.first , let rootViewController = window.rootViewController { var currentController = rootViewController @@ -63,11 +61,9 @@ import CometChatPro } } if let group = to as? Group { - let outgoingCall = CometChatOutgoingCall() - outgoingCall.makeCall(call: call, to: group) - outgoingCall.modalPresentationStyle = .fullScreen DispatchQueue.main.async { let outgoingCall = CometChatOutgoingCall() + outgoingCall.makeCall(call: call, to: group) outgoingCall.modalPresentationStyle = .fullScreen if let window = UIApplication.shared.windows.first , let rootViewController = window.rootViewController { var currentController = rootViewController @@ -79,7 +75,6 @@ import CometChatPro } } } - } /* ----------------------------------------------------------------------------------------- */ diff --git a/Library/UI Components/Message/ChatView/ChatView.swift b/Library/UI Components/Message/ChatView/ChatView.swift index 47a61617..7399f693 100755 --- a/Library/UI Components/Message/ChatView/ChatView.swift +++ b/Library/UI Components/Message/ChatView/ChatView.swift @@ -14,7 +14,7 @@ import CometChatPro // MARK: - Declaration of Protocol protocol ChatViewInternalDelegate { - func didMicrophoneButtonPressed() + func didMicrophoneButtonPressed(with: UILongPressGestureRecognizer) func didSendButtonPressed() func didAttachmentButtonPressed() func didStickerButtonPressed() @@ -33,9 +33,8 @@ import CometChatPro // MARK: - Declaration of IBOutlet @IBOutlet weak var attachment: UIView! - @IBOutlet weak var send: UIView! - @IBOutlet weak var microphone: UIView! - @IBOutlet weak var heightConstraint: NSLayoutConstraint! + @IBOutlet weak var send: UIButton! + @IBOutlet weak var microphone: UIButton! @IBOutlet weak var textView: UITextView! @IBOutlet weak var sticker: UIButton! @@ -43,12 +42,13 @@ import CometChatPro override init(frame: CGRect) { super.init(frame: frame) - } + required init?(coder aDecoder: NSCoder) { super.init(coder: aDecoder) - + + } func loadViewFromNib() { let bundle = Bundle(for: type(of: self)) @@ -63,7 +63,7 @@ import CometChatPro /// This method triggers when microphone button pressed /// - Parameter sender: This specifies the sender Object @IBAction func microphoneButtonPressed(_ sender: Any) { - internalDelegate?.didMicrophoneButtonPressed() + } diff --git a/Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.swift b/Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.swift new file mode 100755 index 00000000..9f595683 --- /dev/null +++ b/Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.swift @@ -0,0 +1,90 @@ +// LeftaudioMessageBubble.swift +// CometChatUIKit +// Created by CometChat Inc. on 20/09/19. +// Copyright © 2020 CometChat Inc. All rights reserved. + + +// MARK: - Importing Frameworks. + +import UIKit +import CometChatPro + +/* ----------------------------------------------------------------------------------------- */ + +class LeftAudioMessageBubble: UITableViewCell { + + // MARK: - Declaration of IBOutlets + + @IBOutlet weak var tintedView: UIView! + @IBOutlet weak var fileName: UILabel! + @IBOutlet weak var size: UILabel! + @IBOutlet weak var icon: UIImageView! + @IBOutlet weak var timeStamp: UILabel! + @IBOutlet weak var avatar: Avatar! + @IBOutlet weak var receiptStack: UIStackView! + @IBOutlet weak var name: UILabel! + @IBOutlet weak var nameView: UIView! + + // MARK: - Declaration of Variables + var selectionColor: UIColor { + set { + let view = UIView() + view.backgroundColor = newValue + self.selectedBackgroundView = view + } + get { + return self.selectedBackgroundView?.backgroundColor ?? UIColor.clear + } + } + + var audioMessage: MediaMessage! { + didSet { + receiptStack.isHidden = true + if audioMessage.receiverType == .group { + nameView.isHidden = false + }else { + nameView.isHidden = true + } + if let userName = audioMessage.sender?.name { + name.text = userName + ":" + } + + timeStamp.text = String().setMessageTime(time: Int(audioMessage?.sentAt ?? 0)) + fileName.text = "Audio File" + if let fileSize = audioMessage.attachment?.fileSize { + print(Units(bytes: Int64(fileSize)).getReadableUnit()) + size.text = Units(bytes: Int64(fileSize)).getReadableUnit() + } + if let avatarURL = audioMessage.sender?.avatar { + avatar.set(image: avatarURL, with: audioMessage.sender?.name ?? "") + } + } + } + + // MARK: - Initialization of required Methods + + override func awakeFromNib() { + super.awakeFromNib() + + if #available(iOS 13.0, *) { + selectionColor = .systemBackground + } else { + selectionColor = .white + } + } + + override func setSelected(_ selected: Bool, animated: Bool) { + super.setSelected(selected, animated: animated) + switch isEditing { + case true: + switch selected { + case true: self.tintedView.isHidden = false + case false: self.tintedView.isHidden = true + } + case false: break + } + } + +} + +/* ----------------------------------------------------------------------------------------- */ diff --git a/Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.xib b/Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.xib new file mode 100755 index 00000000..98e36888 --- /dev/null +++ b/Library/UI Components/Message/MessageView/AudioMessageBubble/LeftAudioMessageBubble/LeftAudioMessageBubble.xib @@ -0,0 +1,192 @@ + + + + + + + + + + + + SFProDisplay-Medium + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.swift b/Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.swift new file mode 100755 index 00000000..bf7fa8ce --- /dev/null +++ b/Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.swift @@ -0,0 +1,95 @@ +// RightaudioMessageBubble.swift +// CometChatUIKit +// Created by CometChat Inc. on 20/09/19. +// Copyright © 2020 CometChat Inc. All rights reserved. + + +// MARK: - Importing Frameworks. + +import UIKit +import CometChatPro + +/* ----------------------------------------------------------------------------------------- */ + +class RightAudioMessageBubble: UITableViewCell { + + // MARK: - Declaration of IBOutlets + + @IBOutlet weak var tintedView: UIView! + @IBOutlet weak var name: UILabel! + @IBOutlet weak var size: UILabel! + @IBOutlet weak var icon: UIImageView! + @IBOutlet weak var timeStamp: UILabel! + @IBOutlet weak var receipt: UIImageView! + @IBOutlet weak var receiptStack: UIStackView! + + // MARK: - Declaration of Variables + var selectionColor: UIColor { + set { + let view = UIView() + view.backgroundColor = newValue + self.selectedBackgroundView = view + } + get { + return self.selectedBackgroundView?.backgroundColor ?? UIColor.clear + } + } + + var audioMessage: MediaMessage! { + didSet { + receiptStack.isHidden = true + if audioMessage.sentAt == 0 { + timeStamp.text = NSLocalizedString("SENDING", comment: "") + name.text = "Audio File" + size.text = NSLocalizedString("calculating...", comment: "") + }else{ + timeStamp.text = String().setMessageTime(time: audioMessage.sentAt) + name.text = "Audio File" + if let fileSize = audioMessage.attachment?.fileSize { + print(Units(bytes: Int64(fileSize)).getReadableUnit()) + size.text = Units(bytes: Int64(fileSize)).getReadableUnit() + } + } + + if audioMessage.readAt > 0 { + receipt.image = #imageLiteral(resourceName: "read") + timeStamp.text = String().setMessageTime(time: Int(audioMessage?.readAt ?? 0)) + }else if audioMessage.deliveredAt > 0 { + receipt.image = #imageLiteral(resourceName: "delivered") + timeStamp.text = String().setMessageTime(time: Int(audioMessage?.deliveredAt ?? 0)) + }else if audioMessage.sentAt > 0 { + receipt.image = #imageLiteral(resourceName: "sent") + timeStamp.text = String().setMessageTime(time: Int(audioMessage?.sentAt ?? 0)) + }else if audioMessage.sentAt == 0 { + receipt.image = #imageLiteral(resourceName: "wait") + timeStamp.text = NSLocalizedString("SENDING", comment: "") + } + } + } + + // MARK: - Initialization of required Methods + + override func awakeFromNib() { + super.awakeFromNib() + if #available(iOS 13.0, *) { + selectionColor = .systemBackground + } else { + selectionColor = .white + } + } + + override func setSelected(_ selected: Bool, animated: Bool) { + super.setSelected(selected, animated: animated) + switch isEditing { + case true: + switch selected { + case true: self.tintedView.isHidden = false + case false: self.tintedView.isHidden = true + } + case false: break + } + } + +} + +/* ----------------------------------------------------------------------------------------- */ diff --git a/Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.xib b/Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.xib new file mode 100755 index 00000000..251b28b0 --- /dev/null +++ b/Library/UI Components/Message/MessageView/AudioMessageBubble/RightAudioMessageBubble/RightAudioMessageBubble.xib @@ -0,0 +1,156 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Library/UI Components/Message/MessageView/FileMessageBubble/RightFileMessageBubble/RightFileMessageBubble.xib b/Library/UI Components/Message/MessageView/FileMessageBubble/RightFileMessageBubble/RightFileMessageBubble.xib index 03d4f47d..680a261a 100755 --- a/Library/UI Components/Message/MessageView/FileMessageBubble/RightFileMessageBubble/RightFileMessageBubble.xib +++ b/Library/UI Components/Message/MessageView/FileMessageBubble/RightFileMessageBubble/RightFileMessageBubble.xib @@ -1,16 +1,16 @@ - + - + - + @@ -26,6 +26,7 @@ - + diff --git a/Library/UI Components/Message/MessageView/TextMessageBubble/LeftTextMessageBubble/LeftTextMessageBubble.xib b/Library/UI Components/Message/MessageView/TextMessageBubble/LeftTextMessageBubble/LeftTextMessageBubble.xib index c0dc2ccd..fd3171a5 100755 --- a/Library/UI Components/Message/MessageView/TextMessageBubble/LeftTextMessageBubble/LeftTextMessageBubble.xib +++ b/Library/UI Components/Message/MessageView/TextMessageBubble/LeftTextMessageBubble/LeftTextMessageBubble.xib @@ -1,9 +1,9 @@ - + - + @@ -18,7 +18,7 @@ - + @@ -59,7 +59,7 @@ - + @@ -85,7 +85,7 @@ - + @@ -116,7 +116,7 @@ - + diff --git a/Library/UI Components/Message/MessageView/TextMessageBubble/RightTextMessageBubble/RightTextMessageBubble.xib b/Library/UI Components/Message/MessageView/TextMessageBubble/RightTextMessageBubble/RightTextMessageBubble.xib index 80655db2..2a4bf006 100755 --- a/Library/UI Components/Message/MessageView/TextMessageBubble/RightTextMessageBubble/RightTextMessageBubble.xib +++ b/Library/UI Components/Message/MessageView/TextMessageBubble/RightTextMessageBubble/RightTextMessageBubble.xib @@ -1,9 +1,9 @@ - + - + @@ -15,7 +15,7 @@ - + @@ -45,7 +45,7 @@ - + @@ -71,7 +71,7 @@ - + diff --git a/Library/UI Screens/Messages/MessageList/CometChatMessageList.swift b/Library/UI Screens/Messages/MessageList/CometChatMessageList.swift index 3d1d1ea5..476f605c 100755 --- a/Library/UI Screens/Messages/MessageList/CometChatMessageList.swift +++ b/Library/UI Screens/Messages/MessageList/CometChatMessageList.swift @@ -40,10 +40,53 @@ enum CometChatExtension { /* ----------------------------------------------------------------------------------------- */ -public class CometChatMessageList: UIViewController { +public class CometChatMessageList: UIViewController, AVAudioRecorderDelegate, AVAudioPlayerDelegate { + + enum AudioRecodingState { + case ready + case recording + case recorded + case playing + case paused + + var buttonImage: UIImage { + switch self { + case .ready, .recording: + if #available(iOS 13.0, *) { + return UIImage(systemName: "pause.fill") ?? #imageLiteral(resourceName: "play") + } else {} + case .recorded, .paused: + if #available(iOS 13.0, *) { + return UIImage(systemName: "play.fill") ?? #imageLiteral(resourceName: "play") + } else {} + case .playing: + if #available(iOS 13.0, *) { + return UIImage(systemName: "pause.fill") ?? #imageLiteral(resourceName: "play") + } else {} + } + return #imageLiteral(resourceName: "microphone") + } + + var audioVisualizationMode: AudioVisualizationView.AudioVisualizationMode { + switch self { + case .ready, .recording: + return .write + case .paused, .playing, .recorded: + return .read + } + } + } // MARK: - Declaration of Outlets + @IBOutlet weak var microhone: UIButton! + @IBOutlet weak var audioNotePauseButton: UIButton! + @IBOutlet weak var audioNoteSendButton: UIButton! + @IBOutlet weak var audioNoteDeleteButton: UIButton! + @IBOutlet weak var audioNoteActionView: UIView! + @IBOutlet weak var audioNoteTimer: UILabel! + @IBOutlet weak var audioNoteView: UIView! + @IBOutlet private var audioVisualizationView: AudioVisualizationView! @IBOutlet weak var tableView: UITableView? @IBOutlet weak var chatView: ChatView! @IBOutlet weak var messageActionView: UIView! @@ -88,11 +131,31 @@ public class CometChatMessageList: UIViewController { var isGroupIs : Bool = false var refreshControl: UIRefreshControl! var membersCount:String? + var totalHour = Int() + var totalMinut = Int() + var totalSecond = 0 + var timer:Timer? + var isTimerRunning = false var messageMode: MessageMode = .send var selectedIndexPath: IndexPath? var selectedMessage: BaseMessage? lazy var previewItem = NSURL() var quickLook = QLPreviewController() + var soundRecorder : AVAudioRecorder! + var soundPlayer : AVAudioPlayer! + var isAudioPaused : Bool = false + private let viewModel = ViewModel() + var audioURL:URL? + var fileName : String? + private var chronometer: Chronometer? + + private var currentState: AudioRecodingState = .ready { + didSet { + self.audioNotePauseButton.setImage(self.currentState.buttonImage, for: .normal) + self.audioVisualizationView.audioVisualizationMode = self.currentState.audioVisualizationMode + } + } + let documentPicker: UIDocumentPickerViewController = UIDocumentPickerViewController(documentTypes: ["public.data","public.content","public.audiovisual-content","public.movie","public.audiovisual-content","public.video","public.audio","public.data","public.zip-archive","com.pkware.zip-archive","public.composite-content","public.text"], in: UIDocumentPickerMode.import) // MARK: - View controller lifecycle methods @@ -105,6 +168,7 @@ public class CometChatMessageList: UIViewController { registerCells() setupChatView() setupKeyboard() + setupRecorder() self.addObsevers() } @@ -126,7 +190,7 @@ public class CometChatMessageList: UIViewController { - See Also: [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) */ - @objc public func set(conversationWith: AppEntity, type: CometChat.ReceiverType){ + @objc public func set(conversationWith: AppEntity, type: CometChat.ReceiverType){ switch type { case .user: isGroupIs = false @@ -168,18 +232,18 @@ public class CometChatMessageList: UIViewController { /** - This method group the new message as per timestamp and append it on UI - - Parameters: - - messages: Specifies the group of message containing same timestamp. - - Author: CometChat Team - - Copyright: © 2019 CometChat Inc. - - See Also: - [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) - */ + This method group the new message as per timestamp and append it on UI + - Parameters: + - messages: Specifies the group of message containing same timestamp. + - Author: CometChat Team + - Copyright: © 2019 CometChat Inc. + - See Also: + [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) + */ private func addNewGroupedMessage(messages: [BaseMessage]){ DispatchQueue.main.async { - if messages.isEmpty { self.tableView?.setEmptyMessage("No Messages Found.") - }else{ self.tableView?.restore() } + if messages.isEmpty { self.tableView?.setEmptyMessage("No Messages Found.") + }else{ self.tableView?.restore() } } let groupedMessages = Dictionary(grouping: messages) { (element) -> Date in let date = Date(timeIntervalSince1970: TimeInterval(element.sentAt)) @@ -205,18 +269,18 @@ public class CometChatMessageList: UIViewController { } /** - This method groups the messages as per timestamp. - - Parameters: - - messages: Specifies the group of message containing same timestamp. - - Author: CometChat Team - - Copyright: © 2019 CometChat Inc. - - See Also: - [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) - */ + This method groups the messages as per timestamp. + - Parameters: + - messages: Specifies the group of message containing same timestamp. + - Author: CometChat Team + - Copyright: © 2019 CometChat Inc. + - See Also: + [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) + */ private func groupMessages(messages: [BaseMessage]){ DispatchQueue.main.async { - if messages.isEmpty { self.tableView?.setEmptyMessage("No Messages Found.") - }else{ self.tableView?.restore() } + if messages.isEmpty { self.tableView?.setEmptyMessage("No Messages Found.") + }else{ self.tableView?.restore() } } let groupedMessages = Dictionary(grouping: messages) { (element) -> Date in let date = Date(timeIntervalSince1970: TimeInterval(element.sentAt)) @@ -236,14 +300,14 @@ public class CometChatMessageList: UIViewController { } /** - This method groups the previous messages as per timestamp. - - Parameters: - - messages: Specifies the group of message containing same timestamp. - - Author: CometChat Team - - Copyright: © 2019 CometChat Inc. - - See Also: - [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) - */ + This method groups the previous messages as per timestamp. + - Parameters: + - messages: Specifies the group of message containing same timestamp. + - Author: CometChat Team + - Copyright: © 2019 CometChat Inc. + - See Also: + [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) + */ private func groupPreviousMessages(messages: [BaseMessage]){ let groupedMessages = Dictionary(grouping: messages) { (element) -> Date in let date = Date(timeIntervalSince1970: TimeInterval(element.sentAt)) @@ -279,9 +343,10 @@ public class CometChatMessageList: UIViewController { } } guard let messages = fetchedMessages?.filter({ - ($0 as? TextMessage != nil && $0.messageType == .text) || + ($0 as? TextMessage != nil && $0.messageType == .text) || ($0 as? MediaMessage != nil && $0.messageType == .image) || ($0 as? MediaMessage != nil && $0.messageType == .video) || + ($0 as? MediaMessage != nil && $0.messageType == .audio) || ($0 as? MediaMessage != nil && $0.messageType == .file) || ($0 as? Call != nil && ($0 as? Call)?.callStatus == .initiated) || ($0 as? Call != nil && ($0 as? Call)?.callStatus == .unanswered) || @@ -304,7 +369,7 @@ public class CometChatMessageList: UIViewController { }) { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } self.refreshControl.endRefreshing() @@ -332,8 +397,9 @@ public class CometChatMessageList: UIViewController { messageRequest = MessagesRequest.MessageRequestBuilder().set(uid: forID).set(limit: 30).build() messageRequest?.fetchPrevious(onSuccess: { (fetchedMessages) in guard let messages = fetchedMessages?.filter({ - ($0 as? TextMessage != nil && $0.messageType == .text) || + ($0 as? TextMessage != nil && $0.messageType == .text) || ($0 as? MediaMessage != nil && $0.messageType == .image) || + ($0 as? MediaMessage != nil && $0.messageType == .audio) || ($0 as? MediaMessage != nil && $0.messageType == .video) || ($0 as? MediaMessage != nil && $0.messageType == .file) || ($0 as? Call != nil && ($0 as? Call)?.callStatus == .initiated) || @@ -365,7 +431,7 @@ public class CometChatMessageList: UIViewController { }, onError: { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -375,9 +441,10 @@ public class CometChatMessageList: UIViewController { case .group: messageRequest = MessagesRequest.MessageRequestBuilder().set(guid: forID).set(limit: 30).build() messageRequest?.fetchPrevious(onSuccess: { (fetchedMessages) in - guard let messages = fetchedMessages?.filter({ - ($0 as? TextMessage != nil && $0.messageType == .text) || + guard let messages = fetchedMessages?.filter({ + ($0 as? TextMessage != nil && $0.messageType == .text) || ($0 as? MediaMessage != nil && $0.messageType == .image) || + ($0 as? MediaMessage != nil && $0.messageType == .audio) || ($0 as? MediaMessage != nil && $0.messageType == .video) || ($0 as? MediaMessage != nil && $0.messageType == .file) || ($0 as? Call != nil && ($0 as? Call)?.callStatus == .initiated) || @@ -409,7 +476,7 @@ public class CometChatMessageList: UIViewController { }, onError: { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -552,7 +619,7 @@ public class CometChatMessageList: UIViewController { [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) */ private func setupSuperview() { - UIFont.loadAllFonts(bundleIdentifierString: Bundle.main.bundleIdentifier ?? "") +// UIFont.loadAllFonts(bundleIdentifierString: Bundle.main.bundleIdentifier ?? "") let bundle = Bundle(for: type(of: self)) let nib = UINib(nibName: "CometChatMessageList", bundle: bundle) let view = nib.instantiate(withOwner: self, options: nil)[0] as! UIView @@ -568,7 +635,6 @@ public class CometChatMessageList: UIViewController { [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) */ private func setupDelegates(){ - CometChat.messagedelegate = self CometChat.userdelegate = self CometChat.groupdelegate = self @@ -576,6 +642,21 @@ public class CometChatMessageList: UIViewController { smartRepliesView.smartRepliesDelegate = self quickLook.dataSource = self } + + + func setupRecorder(){ + self.viewModel.askAudioRecordingPermission() + self.viewModel.audioMeteringLevelUpdate = { [weak self] meteringLevel in + guard let self = self, self.audioVisualizationView.audioVisualizationMode == .write else { + return + } + self.audioVisualizationView.add(meteringLevel: meteringLevel) + } + self.viewModel.audioDidFinish = { [weak self] in + self?.currentState = .recorded + self?.audioVisualizationView.stop() + } + } /** This method observers for the notifications of certain events. @@ -630,7 +711,7 @@ public class CometChatMessageList: UIViewController { if let name = notification.userInfo?["name"] as? String { self.hide(view: .blockedView, false) blockedMessage.text = - NSLocalizedString("YOU'VE_BLOCKED", comment: "") + "\(String(describing: name.capitalized))" + NSLocalizedString("YOU'VE_BLOCKED", comment: "") + "\(String(describing: name.capitalized))" } } @@ -742,6 +823,15 @@ public class CometChatMessageList: UIViewController { [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) */ @IBAction func didBackButtonPressed(_ sender: UIButton) { + if currentState == .playing { + do { + try self.viewModel.pausePlaying() + self.currentState = .paused + self.audioVisualizationView.pause() + } catch { + self.showAlert(with: error) + } + } switch self.isModal() { case true: self.dismiss(animated: true, completion: nil) @@ -756,12 +846,110 @@ public class CometChatMessageList: UIViewController { } CometChat.endTyping(indicator: indicator) } + } @IBAction func didCancelButtonPressed(_ sender: UIButton) { self.didPreformCancel() } + @IBAction func didAudioNoteDeletePressed(_ sender: UIButton) { + if currentState == .playing { + do { + try self.viewModel.pausePlaying() + self.currentState = .paused + self.audioVisualizationView.pause() + } catch { + self.showAlert(with: error) + } + } + do { + try self.viewModel.resetRecording() + self.audioVisualizationView.reset() + self.currentState = .ready + } catch { + self.showAlert(with: error) + } + + UIView.transition(with: view, duration: 0.5, options: .transitionCrossDissolve, animations: { + AudioServicesPlayAlertSound(SystemSoundID(1519)) + self.audioNoteView.isHidden = true + self.audioNoteActionView.isHidden = true + if #available(iOS 13.0, *) { + self.audioNotePauseButton.setImage(UIImage(systemName: "pause.fill"), for: .normal) + } else { + // Fallback on earlier versions + } + self.isAudioPaused = false + }) + } + + @IBAction func didAudioNoteSendPressed(_ sender: Any) { + if currentState == .playing { + do { + try self.viewModel.pausePlaying() + self.currentState = .paused + self.audioVisualizationView.pause() + } catch { + self.showAlert(with: error) + } + } + if let url = self.viewModel.currentAudioRecord?.audioFilePathLocal?.absoluteURL { + let newURL = "file://" + url.absoluteString + self.sendMedia(withURL: newURL, type: .audio) + UIView.transition(with: view, duration: 0.5, options: .transitionCrossDissolve, animations: { + AudioServicesPlayAlertSound(SystemSoundID(1519)) + self.audioNoteView.isHidden = true + self.audioNoteActionView.isHidden = true + }) + if #available(iOS 13.0, *) { + audioNotePauseButton.setImage(UIImage(systemName: "pause.fill"), for: .normal) + } else { + // Fallback on earlier versions + } + self.isAudioPaused = false + } + } + + + @IBAction func didAudioNotePausePressed(_ sender: UIButton) { + switch self.currentState { + case .recording: + self.chronometer?.stop() + self.chronometer = nil + self.viewModel.currentAudioRecord!.meteringLevels = self.audioVisualizationView.scaleSoundDataToFitScreen() + self.audioVisualizationView.audioVisualizationMode = .read + + do { + try self.viewModel.stopRecording() + self.currentState = .recorded + } catch { + self.currentState = .ready + self.showAlert(with: error) + } + case .recorded, .paused: + do { + let duration = try self.viewModel.startPlaying() + self.currentState = .playing + self.audioVisualizationView.meteringLevels = self.viewModel.currentAudioRecord!.meteringLevels + self.audioVisualizationView.play(for: duration) + } catch { + self.showAlert(with: error) + } + case .playing: + do { + try self.viewModel.pausePlaying() + self.currentState = .paused + self.audioVisualizationView.pause() + } catch { + self.showAlert(with: error) + } + default: + break + } + } + + private func didPreformCancel(){ self.selectedMessages.removeAll() self.selectedMessage = nil @@ -827,36 +1015,36 @@ public class CometChatMessageList: UIViewController { @IBAction func didCopyButtonPressed(_ sender: UIButton) { - if selectedMessages.isEmpty { + if selectedMessages.isEmpty { DispatchQueue.main.async { let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: NSLocalizedString("SELECT_A_MESSGE", comment: ""), duration: .short) snackbar.show() } }else{ - var messages = [String]() - for message in selectedMessages { - let name = message.sender?.name?.capitalized ?? "" - let time = String().setMessageTime(time: Int(message.sentAt)) - var messageText = "" - switch message.messageType { - case .text: messageText = (message as? TextMessage)?.text ?? "" - case .image: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" - case .video: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" - case .file: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" - case .custom: messageText = NSLocalizedString("CUSTOM_MESSAGE", comment: "") - case .audio: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" - case .groupMember: break - @unknown default: break + var messages = [String]() + for message in selectedMessages { + let name = message.sender?.name?.capitalized ?? "" + let time = String().setMessageTime(time: Int(message.sentAt)) + var messageText = "" + switch message.messageType { + case .text: messageText = (message as? TextMessage)?.text ?? "" + case .image: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" + case .video: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" + case .file: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" + case .custom: messageText = NSLocalizedString("CUSTOM_MESSAGE", comment: "") + case .audio: messageText = (message as? MediaMessage)?.attachment?.fileUrl ?? "" + case .groupMember: break + @unknown default: break + } + let message = name + "[\(time)]" + ": " + messageText + messages.append(message) + } + UIPasteboard.general.string = messages.joined(separator: "\n\n") + DispatchQueue.main.async { + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: NSLocalizedString("TEXT_COPIED", comment: ""), duration: .short) + snackbar.show() + self.didPreformCancel() } - let message = name + "[\(time)]" + ": " + messageText - messages.append(message) - } - UIPasteboard.general.string = messages.joined(separator: "\n\n") - DispatchQueue.main.async { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: NSLocalizedString("TEXT_COPIED", comment: ""), duration: .short) - snackbar.show() - self.didPreformCancel() - } } } /** @@ -902,6 +1090,7 @@ public class CometChatMessageList: UIViewController { if sender.state == .began { let touchPoint = sender.location(in: self.tableView) if let indexPath = tableView?.indexPathForRow(at: touchPoint) { + self.selectedIndexPath = indexPath tableView?.isEditing = true self.addBackButton(bool: false) @@ -922,6 +1111,16 @@ public class CometChatMessageList: UIViewController { forwardButton.isHidden = false messageActionView.isHidden = false } + + if let selectedCell = tableView?.cellForRow(at: indexPath) as? RightAudioMessageBubble { + AudioServicesPlayAlertSound(SystemSoundID(kSystemSoundID_Vibrate)) + self.selectedMessage = selectedCell.audioMessage + editButton.isHidden = true + deleteButton.isHidden = false + forwardButton.isHidden = false + messageActionView.isHidden = false + } + if let selectedCell = tableView?.cellForRow(at: indexPath) as? RightLinkPreviewBubble { AudioServicesPlayAlertSound(SystemSoundID(kSystemSoundID_Vibrate)) self.selectedMessage = selectedCell.linkPreviewMessage @@ -970,6 +1169,15 @@ public class CometChatMessageList: UIViewController { forwardButton.isHidden = false messageActionView.isHidden = false } + + if let selectedCell = tableView?.cellForRow(at: indexPath) as? LeftAudioMessageBubble { + AudioServicesPlayAlertSound(SystemSoundID(kSystemSoundID_Vibrate)) + self.selectedMessage = selectedCell.audioMessage + editButton.isHidden = true + deleteButton.isHidden = true + forwardButton.isHidden = false + messageActionView.isHidden = false + } if (tableView?.cellForRow(at: indexPath) as? ActionMessageBubble) != nil { editButton.isHidden = true deleteButton.isHidden = true @@ -980,6 +1188,95 @@ public class CometChatMessageList: UIViewController { } } + /** + This method triggers when user pressed microphone button in Chat View. + - Author: CometChat Team + - Copyright: © 2020 CometChat Inc. + - See Also: + [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) + */ + @objc func didLongPressedOnMicrophone(sender: UILongPressGestureRecognizer){ + if sender.state == .began { + self.audioNoteView.isHidden = false + self.audioNoteActionView.isHidden = false + if self.currentState == .ready { + AudioServicesPlayAlertSound(SystemSoundID(kSystemSoundID_Vibrate)) + self.viewModel.startRecording { [weak self] soundRecord, error in + if let error = error { + self?.showAlert(with: error) + return + } + self?.audioNoteDeleteButton.tintColor = .systemGray + self?.currentState = .recording + self?.chronometer = Chronometer() + self?.chronometer?.start() + self?.startTimer() + } + } + }else if sender.state == .ended { + switch self.currentState { + case .recording: + AudioServicesPlayAlertSound(SystemSoundID(kSystemSoundID_Vibrate)) + self.chronometer?.stop() + self.chronometer = nil + self.audioNoteDeleteButton.tintColor = .systemRed + self.viewModel.currentAudioRecord!.meteringLevels = self.audioVisualizationView.scaleSoundDataToFitScreen() + self.audioVisualizationView.audioVisualizationMode = .read + do { + try self.viewModel.stopRecording() + self.currentState = .recorded + } catch { + self.currentState = .ready + self.showAlert(with: error) + } + case .recorded, .paused: + do { + self.totalSecond = 0 + self.timer?.invalidate() + self.audioNoteDeleteButton.tintColor = UIColor.systemGray + let duration = try self.viewModel.startPlaying() + self.currentState = .playing + self.audioVisualizationView.meteringLevels = self.viewModel.currentAudioRecord!.meteringLevels + self.audioVisualizationView.play(for: duration) + } catch { + self.showAlert(with: error) + } + case .playing: + do { + self.totalSecond = 0 + self.timer?.invalidate() + self.audioNoteDeleteButton.tintColor = UIColor.systemGray + try self.viewModel.pausePlaying() + self.currentState = .paused + self.audioVisualizationView.pause() + } catch { + self.showAlert(with: error) + } + default: + break + } + } + } + + func startTimer(){ + self.audioNoteTimer.text = "" + timer?.invalidate() + self.totalSecond = 0 + timer = Timer.scheduledTimer(timeInterval: 1, target: self, selector: #selector(countdown), userInfo: nil, repeats: true) + } + + @objc func countdown() { + var hours: Int + var minutes: Int + var seconds: Int + hours = totalSecond / 3600 + minutes = totalSecond / 60 + seconds = totalSecond % 60 + totalSecond = totalSecond + 1 + if currentState == .recording{ + audioNoteTimer.text = "\(hours):\(minutes):\(seconds)" + } + } /** This method setup the tableview to load CometChatMessageList. - Author: CometChat Team @@ -997,6 +1294,10 @@ public class CometChatMessageList: UIViewController { // Added Long Press let longPressOnMessage = UILongPressGestureRecognizer(target: self, action: #selector(didLongPressedOnMessage)) tableView?.addGestureRecognizer(longPressOnMessage) + + let longPressOnMicrophone = UILongPressGestureRecognizer(target: self, action: #selector(didLongPressedOnMicrophone)) + microhone.addGestureRecognizer(longPressOnMicrophone) + microhone.isUserInteractionEnabled = true } @@ -1033,6 +1334,12 @@ public class CometChatMessageList: UIViewController { let rightFileMessageBubble = UINib.init(nibName: "RightFileMessageBubble", bundle: nil) self.tableView?.register(rightFileMessageBubble, forCellReuseIdentifier: "rightFileMessageBubble") + let leftAudioMessageBubble = UINib.init(nibName: "LeftAudioMessageBubble", bundle: nil) + self.tableView?.register(leftAudioMessageBubble, forCellReuseIdentifier: "leftAudioMessageBubble") + + let rightAudioMessageBubble = UINib.init(nibName: "RightAudioMessageBubble", bundle: nil) + self.tableView?.register(rightAudioMessageBubble, forCellReuseIdentifier: "rightAudioMessageBubble") + let actionMessageBubble = UINib.init(nibName: "ActionMessageBubble", bundle: nil) self.tableView?.register(actionMessageBubble, forCellReuseIdentifier: "actionMessageBubble") @@ -1206,7 +1513,7 @@ public class CometChatMessageList: UIViewController { }) { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -1233,25 +1540,25 @@ public class CometChatMessageList: UIViewController { }else{ guard let message = selectedMessages.first else { return } guard let indexPath = selectedIndexPath else { return } - CometChat.delete(messageId: message.id, onSuccess: { (deletedMessage) in - let textMessage:BaseMessage = (deletedMessage as? ActionMessage)?.actionOn as! BaseMessage - if let row = self.chatMessages[indexPath.section].firstIndex(where: {$0.id == textMessage.id}) { - self.chatMessages[indexPath.section][row] = textMessage - } - DispatchQueue.main.async { - self.tableView?.reloadRows(at: [indexPath], with: .automatic) - self.didPreformCancel() - } - }) { (error) in - DispatchQueue.main.async { - let errorMessage = error.errorDescription - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) - snackbar.show() - self.didPreformCancel() + CometChat.delete(messageId: message.id, onSuccess: { (deletedMessage) in + let textMessage:BaseMessage = (deletedMessage as? ActionMessage)?.actionOn as! BaseMessage + if let row = self.chatMessages[indexPath.section].firstIndex(where: {$0.id == textMessage.id}) { + self.chatMessages[indexPath.section][row] = textMessage + } + DispatchQueue.main.async { + self.tableView?.reloadRows(at: [indexPath], with: .automatic) + self.didPreformCancel() + } + }) { (error) in + DispatchQueue.main.async { + let errorMessage = error.errorDescription + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + snackbar.show() + self.didPreformCancel() + } + self.selectedIndexPath = nil + print("unable to delete message: \(error.errorDescription)") } - self.selectedIndexPath = nil - print("unable to delete message: \(error.errorDescription)") - } } } @@ -1289,33 +1596,33 @@ public class CometChatMessageList: UIViewController { } }else{ if let message = selectedMessages.first { - var textToShare = "" - messageActionView.isHidden = true - if message.messageType == .text { - if message.receiverType == .user{ - textToShare = (message as? TextMessage)?.text ?? "" - }else{ - if let name = (message as? TextMessage)?.sender?.name , let text = (message as? TextMessage)?.text { - textToShare = name + " : " + text + var textToShare = "" + messageActionView.isHidden = true + if message.messageType == .text { + if message.receiverType == .user{ + textToShare = (message as? TextMessage)?.text ?? "" + }else{ + if let name = (message as? TextMessage)?.sender?.name , let text = (message as? TextMessage)?.text { + textToShare = name + " : " + text + } } - } - }else if message.messageType == .audio || message.messageType == .file || message.messageType == .image || message.messageType == .video { - - if message.receiverType == .user{ - textToShare = (message as? MediaMessage)?.attachment?.fileUrl ?? "" - }else{ - if let name = (message as? MediaMessage)?.sender?.name, let url = (message as? MediaMessage)?.attachment?.fileUrl { - textToShare = name + " : " + url + }else if message.messageType == .audio || message.messageType == .file || message.messageType == .image || message.messageType == .video { + + if message.receiverType == .user{ + textToShare = (message as? MediaMessage)?.attachment?.fileUrl ?? "" + }else{ + if let name = (message as? MediaMessage)?.sender?.name, let url = (message as? MediaMessage)?.attachment?.fileUrl { + textToShare = name + " : " + url + } } } + let sendItems = [ textToShare] + let activityViewController = UIActivityViewController(activityItems: sendItems, applicationActivities: nil) + activityViewController.popoverPresentationController?.sourceView = self.view // so that iPads won't crash + activityViewController.excludedActivityTypes = [.airDrop] + self.present(activityViewController, animated: true, completion: nil) + self.didPreformCancel() } - let sendItems = [ textToShare] - let activityViewController = UIActivityViewController(activityItems: sendItems, applicationActivities: nil) - activityViewController.popoverPresentationController?.sourceView = self.view // so that iPads won't crash - activityViewController.excludedActivityTypes = [.airDrop] - self.present(activityViewController, animated: true, completion: nil) - self.didPreformCancel() - } } } @@ -1364,8 +1671,6 @@ public class CometChatMessageList: UIViewController { } - - } /* ----------------------------------------------------------------------------------------- */ @@ -1417,7 +1722,7 @@ extension CometChatMessageList: UIDocumentPickerDelegate { DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -1451,7 +1756,7 @@ extension CometChatMessageList: UIDocumentPickerDelegate { }) { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -1641,7 +1946,7 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { } case .video where message.senderUid == CometChat.getLoggedInUser()?.uid: - if let videoMessage = message as? MediaMessage { + if let videoMessage = message as? MediaMessage { let senderCell = tableView.dequeueReusableCell(withIdentifier: "rightVideoMessageBubble", for: indexPath) as! RightVideoMessageBubble senderCell.mediaMessage = videoMessage if chatMessages[indexPath.section][safe: indexPath.row] == filteredMessages.last || tableView.isLast(for: indexPath){ @@ -1651,11 +1956,23 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { } return senderCell } - case .audio: - if message is MediaMessage { - let audioMessageCell = tableView.dequeueReusableCell(withIdentifier: "actionMessageBubble", for: indexPath) as! ActionMessageBubble - audioMessageCell.message.text = "Audio Message" - return audioMessageCell + case .audio where message.senderUid != CometChat.getLoggedInUser()?.uid: + + if let audioMessage = message as? MediaMessage { + let receiverCell = tableView.dequeueReusableCell(withIdentifier: "leftAudioMessageBubble", for: indexPath) as! LeftAudioMessageBubble + receiverCell.audioMessage = audioMessage + return receiverCell + } + case .audio where message.senderUid == CometChat.getLoggedInUser()?.uid: + if let audioMessage = message as? MediaMessage { + let senderCell = tableView.dequeueReusableCell(withIdentifier: "rightAudioMessageBubble", for: indexPath) as! RightAudioMessageBubble + senderCell.audioMessage = audioMessage + if chatMessages[indexPath.section][safe: indexPath.row] == filteredMessages.last || tableView.isLast(for: indexPath){ + senderCell.receiptStack.isHidden = false + }else{ + senderCell.receiptStack.isHidden = true + } + return senderCell } case .file where message.senderUid != CometChat.getLoggedInUser()?.uid: @@ -1665,7 +1982,7 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { return receiverCell } case .file where message.senderUid == CometChat.getLoggedInUser()?.uid: - if let fileMessage = message as? MediaMessage { + if let fileMessage = message as? MediaMessage { let senderCell = tableView.dequeueReusableCell(withIdentifier: "rightFileMessageBubble", for: indexPath) as! RightFileMessageBubble senderCell.fileMessage = fileMessage if chatMessages[indexPath.section][safe: indexPath.row] == filteredMessages.last || tableView.isLast(for: indexPath){ @@ -1858,6 +2175,42 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { } } + if let selectedCell = tableView.cellForRow(at: indexPath) as? RightAudioMessageBubble { + selectedCell.receiptStack.isHidden = false + if tableView.isEditing == true{ + if !self.selectedMessages.contains(selectedCell.audioMessage) { + self.selectedMessages.append(selectedCell.audioMessage) + } + }else{ + self.previewMediaMessage(url: selectedCell.audioMessage?.attachment?.fileUrl ?? "", completion: {(success, fileURL) in + if success { + if let url = fileURL { + self.previewItem = url as NSURL + self.presentQuickLook() + } + } + }) + } + } + + if let selectedCell = tableView.cellForRow(at: indexPath) as? LeftAudioMessageBubble { + selectedCell.receiptStack.isHidden = false + if tableView.isEditing == true{ + if !self.selectedMessages.contains(selectedCell.audioMessage) { + self.selectedMessages.append(selectedCell.audioMessage) + } + }else{ + self.previewMediaMessage(url: selectedCell.audioMessage?.attachment?.fileUrl ?? "", completion: {(success, fileURL) in + if success { + if let url = fileURL { + self.previewItem = url as NSURL + self.presentQuickLook() + } + } + }) + } + } + if let selectedCell = tableView.cellForRow(at: indexPath) as? LeftLinkPreviewBubble { if tableView.isEditing == true{ if !self.selectedMessages.contains(selectedCell.linkPreviewMessage) { @@ -1915,7 +2268,7 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { self.selectedMessages.remove(at: index) } } - + } if let selectedCell = tableView.cellForRow(at: indexPath) as? RightTextMessageBubble { @@ -1925,7 +2278,7 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { self.selectedMessages.remove(at: index) } } - + } if let selectedCell = tableView.cellForRow(at: indexPath) as? LeftTextMessageBubble { selectedCell.receiptStack.isHidden = true @@ -1934,7 +2287,7 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { self.selectedMessages.remove(at: index) } } - + } if let selectedCell = tableView.cellForRow(at: indexPath) as? RightImageMessageBubble { @@ -1990,6 +2343,24 @@ extension CometChatMessageList: UITableViewDelegate , UITableViewDataSource { } } } + + if let selectedCell = tableView.cellForRow(at: indexPath) as? RightAudioMessageBubble { + selectedCell.receiptStack.isHidden = true + if self.selectedMessages.contains(selectedCell.audioMessage) { + if let index = self.selectedMessages.firstIndex(where: { $0.id == selectedCell.audioMessage.id }) { + self.selectedMessages.remove(at: index) + } + } + } + + if let selectedCell = tableView.cellForRow(at: indexPath) as? LeftAudioMessageBubble { + selectedCell.receiptStack.isHidden = true + if self.selectedMessages.contains(selectedCell.audioMessage) { + if let index = self.selectedMessages.firstIndex(where: { $0.id == selectedCell.audioMessage.id }) { + self.selectedMessages.remove(at: index) + } + } + } },completion: nil) if tableView.isEditing == true { @@ -2018,6 +2389,8 @@ extension CometChatMessageList : UITextViewDelegate { return } CometChat.endTyping(indicator: indicator) + chatView.microphone.isHidden = false + chatView.send.isHidden = true } } @@ -2027,6 +2400,15 @@ extension CometChatMessageList : UITextViewDelegate { guard let indicator = typingIndicator else { return } + if textView.text?.count == 0 { + CometChat.startTyping(indicator: indicator) + chatView.microphone.isHidden = false + chatView.send.isHidden = true + }else{ + CometChat.endTyping(indicator: indicator) + chatView.microphone.isHidden = true + chatView.send.isHidden = false + } CometChat.startTyping(indicator: indicator) } } @@ -2050,6 +2432,7 @@ extension CometChatMessageList:QLPreviewControllerDataSource, QLPreviewControlle let previewController = QLPreviewController() previewController.modalPresentationStyle = .popover previewController.dataSource = self + previewController.navigationController?.title = "" self.present(previewController, animated: true, completion: nil) } } @@ -2072,8 +2455,8 @@ extension CometChatMessageList:QLPreviewControllerDataSource, QLPreviewControlle completion(true, destinationUrl) } else { let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: "Downloading...", duration: .long) - snackbar.animationType = .fadeInFadeOut - snackbar.show() + snackbar.animationType = .fadeInFadeOut + snackbar.show() URLSession.shared.downloadTask(with: itemUrl!, completionHandler: { (location, response, error) -> Void in guard let tempLocation = location, error == nil else { return } do { @@ -2109,6 +2492,12 @@ extension CometChatMessageList:QLPreviewControllerDataSource, QLPreviewControlle extension CometChatMessageList : ChatViewInternalDelegate { + + + func didMicrophoneButtonPressed(with: UILongPressGestureRecognizer) { + + } + /** This method triggers when user pressed attachment button in Chat View. - Author: CometChat Team @@ -2124,11 +2513,11 @@ extension CometChatMessageList : ChatViewInternalDelegate { CameraHandler.shared.imagePickedBlock = {(photoURL) in self.sendMedia(withURL: photoURL, type: .image) } - } + } let photoLibraryAction: UIAlertAction = UIAlertAction(title: NSLocalizedString("PHOTO_&_VIDEO_LIBRARY", comment: ""), style: .default) { action -> Void in CameraHandler.shared.presentPhotoLibrary(for: self) CameraHandler.shared.imagePickedBlock = {(photoURL) in - self.sendMedia(withURL: photoURL, type: .image) + self.sendMedia(withURL: photoURL, type: .image) } CameraHandler.shared.videoPickedBlock = {(videoURL) in self.sendMedia(withURL: videoURL, type: .video) @@ -2160,80 +2549,100 @@ extension CometChatMessageList : ChatViewInternalDelegate { } private func sendMedia(withURL: String, type: CometChat.MessageType){ - var lastSection = 0 - if chatMessages.count == 0 { - lastSection = (self.tableView?.numberOfSections ?? 0) - }else { - lastSection = (self.tableView?.numberOfSections ?? 0) - 1 - } - CometChatSoundManager().play(sound: .outgoingMessage, bool: true) - var mediaMessage: MediaMessage? - switch self.isGroupIs { - case true: - mediaMessage = MediaMessage(receiverUid: self.currentGroup?.guid ?? "", fileurl: withURL, messageType: type, receiverType: .group) - mediaMessage?.muid = "\(Int(Date().timeIntervalSince1970 * 1000))" - mediaMessage?.sender?.uid = CometChat.getLoggedInUser()?.uid - mediaMessage?.senderUid = CometChat.getLoggedInUser()?.uid ?? "" - if self.chatMessages.count == 0 { - self.addNewGroupedMessage(messages: [mediaMessage!]) - self.filteredMessages.append(mediaMessage!) - }else{ - self.chatMessages[lastSection].append(mediaMessage!) - self.filteredMessages.append(mediaMessage!) - DispatchQueue.main.async { - self.tableView?.beginUpdates() - self.tableView?.insertRows(at: [IndexPath.init(row: self.chatMessages[lastSection].count - 1, section: lastSection)], with: .right) - self.tableView?.endUpdates() - self.tableView?.scrollToBottomRow() - } + var lastSection = 0 + if chatMessages.count == 0 { + lastSection = (self.tableView?.numberOfSections ?? 0) + }else { + lastSection = (self.tableView?.numberOfSections ?? 0) - 1 + } + CometChatSoundManager().play(sound: .outgoingMessage, bool: true) + var mediaMessage: MediaMessage? + switch self.isGroupIs { + case true: + mediaMessage = MediaMessage(receiverUid: self.currentGroup?.guid ?? "", fileurl: withURL, messageType: type, receiverType: .group) + mediaMessage?.muid = "\(Int(Date().timeIntervalSince1970 * 1000))" + mediaMessage?.sender?.uid = CometChat.getLoggedInUser()?.uid + mediaMessage?.senderUid = CometChat.getLoggedInUser()?.uid ?? "" + if self.chatMessages.count == 0 { + self.addNewGroupedMessage(messages: [mediaMessage!]) + self.filteredMessages.append(mediaMessage!) + }else{ + self.chatMessages[lastSection].append(mediaMessage!) + self.filteredMessages.append(mediaMessage!) + DispatchQueue.main.async { + self.tableView?.beginUpdates() + self.tableView?.insertRows(at: [IndexPath.init(row: self.chatMessages[lastSection].count - 1, section: lastSection)], with: .right) + self.tableView?.endUpdates() + self.tableView?.scrollToBottomRow() } - CometChat.sendMediaMessage(message: mediaMessage!, onSuccess: { (message) in - if let row = self.chatMessages[lastSection].firstIndex(where: {$0.muid == message.muid}) { - self.chatMessages[lastSection][row] = message - } - DispatchQueue.main.async{ self.tableView?.reloadData()} - }) { (error) in - DispatchQueue.main.async { - if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) - snackbar.show() + } + CometChat.sendMediaMessage(message: mediaMessage!, onSuccess: { (message) in + if let row = self.chatMessages[lastSection].firstIndex(where: {$0.muid == message.muid}) { + self.chatMessages[lastSection][row] = message + } + DispatchQueue.main.async{ + if message.messageType == .audio || message.messageType == .file { + do { + try self.viewModel.resetRecording() + self.audioVisualizationView.reset() + self.currentState = .ready + } catch { + self.showAlert(with: error) } } - print("sendMediaMessage error: \(String(describing: error?.errorDescription))") - } - case false: - mediaMessage = MediaMessage(receiverUid: self.currentUser?.uid ?? "", fileurl: withURL, messageType: type, receiverType: .user) - mediaMessage?.muid = "\(Int(Date().timeIntervalSince1970 * 1000))" - mediaMessage?.sender?.uid = CometChat.getLoggedInUser()?.uid - mediaMessage?.senderUid = CometChat.getLoggedInUser()?.uid ?? "" - if self.chatMessages.count == 0 { - self.addNewGroupedMessage(messages: [mediaMessage!]) - self.filteredMessages.append(mediaMessage!) - }else{ - self.chatMessages[lastSection].append(mediaMessage!) - self.filteredMessages.append(mediaMessage!) - DispatchQueue.main.async { - self.tableView?.beginUpdates() - self.tableView?.insertRows(at: [IndexPath.init(row: self.chatMessages[lastSection].count - 1, section: lastSection)], with: .right) - self.tableView?.endUpdates() - self.tableView?.scrollToBottomRow() + self.tableView?.reloadData()} + }) { (error) in + DispatchQueue.main.async { + if let errorMessage = error?.errorDescription { + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + snackbar.show() } } - CometChat.sendMediaMessage(message: mediaMessage!, onSuccess: { (message) in - if let row = self.chatMessages[lastSection].firstIndex(where: {$0.muid == message.muid}) { - self.chatMessages[lastSection][row] = message - } - DispatchQueue.main.async{ self.tableView?.reloadData()} - }) { (error) in - DispatchQueue.main.async { - if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) - snackbar.show() + print("sendMediaMessage error: \(String(describing: error?.errorDescription))") + } + case false: + mediaMessage = MediaMessage(receiverUid: self.currentUser?.uid ?? "", fileurl: withURL, messageType: type, receiverType: .user) + mediaMessage?.muid = "\(Int(Date().timeIntervalSince1970 * 1000))" + mediaMessage?.sender?.uid = CometChat.getLoggedInUser()?.uid + mediaMessage?.senderUid = CometChat.getLoggedInUser()?.uid ?? "" + if self.chatMessages.count == 0 { + self.addNewGroupedMessage(messages: [mediaMessage!]) + self.filteredMessages.append(mediaMessage!) + }else{ + self.chatMessages[lastSection].append(mediaMessage!) + self.filteredMessages.append(mediaMessage!) + DispatchQueue.main.async { + self.tableView?.beginUpdates() + self.tableView?.insertRows(at: [IndexPath.init(row: self.chatMessages[lastSection].count - 1, section: lastSection)], with: .right) + self.tableView?.endUpdates() + self.tableView?.scrollToBottomRow() + } + } + CometChat.sendMediaMessage(message: mediaMessage!, onSuccess: { (message) in + if let row = self.chatMessages[lastSection].firstIndex(where: {$0.muid == message.muid}) { + self.chatMessages[lastSection][row] = message + } + DispatchQueue.main.async{ + if message.messageType == .audio || message.messageType == .file { + do { + try self.viewModel.resetRecording() + self.audioVisualizationView.reset() + self.currentState = .ready + } catch { + self.showAlert(with: error) } } - print("sendMediaMessage error: \(String(describing: error?.errorDescription))") + self.tableView?.reloadData()} + }) { (error) in + DispatchQueue.main.async { + if let errorMessage = error?.errorDescription { + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + snackbar.show() + } } + print("sendMediaMessage error: \(String(describing: error?.errorDescription))") } + } } @@ -2249,16 +2658,6 @@ extension CometChatMessageList : ChatViewInternalDelegate { } - /** - This method triggers when user pressed microphone button in Chat View. - - Author: CometChat Team - - Copyright: © 2020 CometChat Inc. - - See Also: - [CometChatMessageList Documentation](https://prodocs.cometchat.com/docs/ios-ui-screens#section-4-comet-chat-message-list) - */ - func didMicrophoneButtonPressed() { - - } /** @@ -2352,7 +2751,7 @@ extension CometChatMessageList : ChatViewInternalDelegate { }) { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -2395,7 +2794,7 @@ extension CometChatMessageList : ChatViewInternalDelegate { }) { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -2498,7 +2897,7 @@ extension CometChatMessageList : CometChatMessageDelegate { self.hide(view: .smartRepliesView, true) } }else{ - CometChatSoundManager().play(sound: .incomingMessageForOther, bool: true) + CometChatSoundManager().play(sound: .incomingMessageForOther, bool: true) } } } @@ -2893,7 +3292,7 @@ extension CometChatMessageList : SmartRepliesViewDelegate { } CometChat.sendTextMessage(message: textMessage!, onSuccess: { (message) in - CometChatSoundManager().play(sound: .outgoingMessage, bool: true) + CometChatSoundManager().play(sound: .outgoingMessage, bool: true) if let row = self.chatMessages[lastSection].firstIndex(where: {$0.muid == message.muid}) { self.chatMessages[lastSection][row] = message } @@ -2904,7 +3303,7 @@ extension CometChatMessageList : SmartRepliesViewDelegate { }) { (error) in DispatchQueue.main.async { if let errorMessage = error?.errorDescription { - let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) + let snackbar: CometChatSnackbar = CometChatSnackbar.init(message: errorMessage, duration: .short) snackbar.show() } } @@ -2930,7 +3329,7 @@ extension CometChatMessageList : SmartRepliesViewDelegate { self.chatView.textView.text = "" } CometChat.sendTextMessage(message: textMessage!, onSuccess: { (message) in - CometChatSoundManager().play(sound: .outgoingMessage, bool: true) + CometChatSoundManager().play(sound: .outgoingMessage, bool: true) if let row = self.chatMessages[lastSection].firstIndex(where: {$0.muid == message.muid}) { self.chatMessages[lastSection][row] = message } diff --git a/Library/UI Screens/Messages/MessageList/CometChatMessageList.xib b/Library/UI Screens/Messages/MessageList/CometChatMessageList.xib index fb52b102..f880c2a1 100755 --- a/Library/UI Screens/Messages/MessageList/CometChatMessageList.xib +++ b/Library/UI Screens/Messages/MessageList/CometChatMessageList.xib @@ -1,9 +1,9 @@ - + - + @@ -19,8 +19,15 @@ - + + + + + + + + @@ -34,6 +41,7 @@ + @@ -51,7 +59,7 @@ - + @@ -111,7 +119,7 @@ - + + + + + + + + + + + - + - diff --git a/License.md b/License.md index 53054bd9..68cbd42d 100755 --- a/License.md +++ b/License.md @@ -29,3 +29,4 @@ THE SOFTWARE. + diff --git a/README.md b/README.md index 9ad511db..ef05ddd8 100644 --- a/README.md +++ b/README.md @@ -10,6 +10,7 @@

+


















# What is UI Kit diff --git a/Screenshots/audioCall.png b/Screenshots/audioCall.png deleted file mode 100644 index 8ee15dd5fd049cd3fcafc2ae1c0166314ddbd2e5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1688 zcmV;J250$+P)Px#L}ge>W=%~1DgXcg2mk?xX#fNO00031000^Q000001E2u_0{{R30RRC20H6W@ z1ONa40RR91HlPCl1ONa40RR91HUIzs0P{mGqyPX00!c(cRA>d=T3u`uRTTbaY7wL* zP!YsLEtOJ(zmc{^qfP4%N=y??uy3LdK&=(U2TA-PVoZ$TMUAPkt?_{eW03eDQN$Q% zNO)ko_*XC>(Mk{yv4GTHn(p|0V`sBFGrK!8`_nBa*`2xfoO{pt=G-~=+>+3~Vf!1`%rPwK}Z?KA$X5vQfHagKP<6 zb5@T%yv3>*K7%*eX?Kv$N+m<1dlt^Y(;s#iAGas(y2^q^^*=t=wE=P#qj z?BNHzWs%ZS<@lK=H&mdo53KSa8Y*6XfsEq2y3=_-z0*VKi9nupRoXkKG=D`ub z2q*I<_f+7CiuOD;Ng+K9AV*dXkj+lnxu$tc83h?T9p)6f-j$l6Bt53wk0>`yF)&GU zWxFsq6-i*tOlaxW$hjX=9z1j{u(bnt>}D=c91SJ9%64IJ)Y-sO)Mz=|c_*3+5OJbx!pJF$_!4W0}4{eb}rZe=|mnZz2AN&n?k$KFY=l%IbtZiWMKyAJX zK0aheR!=v_xO+JAm#4bu6~d&UnD-m7_ZYX6ION>aQVZN$=dLY&S(r>`hc+K|;|Z?S zW59XWLcNj${W;EMrJksTN$*%eX#cO=!u`hE)vSww*Y4qqJlD%BW{qj@r58xUdtU)D z4of%9n+d#rpVxEBF&`IxbPVFP;J=b3+c{*b4Viai&+9-%zJyWSS z?g!>qd8N79#TP#Umy_py_cT3h+5>z`fcX%C4OCpwzVSA85_uiwXMiT`uYUn zX*GKdf%}1;%bar=R|yJ6LwslX3?dPA>@eB*K%~|=8hv*PSoaz5)7cz?F7^zr3ZA+Z z=xTB?LKKC`5f6}DCZrHkO6kjEHVt*->8NdjIq* z6zSYrSk#~GG2O^(O?`;5wxDyG`Yr_#NPKV&njK)tBv=&$wZ0Ta6hnt>cR;ECBrp}U z_sv;*yY8nF$>!93d)b}_%&y?+Jl)FIcxsSrN7Wj^JdI zK8KKvnTRsEY2pilwEbeHcvU)0|Arp1^LO5faxXG>HK8ObJ8i@nF59E zb5mfCr8d0aKI=;br`%F!z7t2h^{Ko9+)vJjn5l-_057CZXH&sB_Y>>7w2z&2K)~@{ i?QEsn8?+_(*!e%|lHXC-x$>I;0000