// // ComposeAttachmentTableViewCell.swift // Tusker // // Created by Shadowfacts on 3/13/20. // Copyright © 2020 Shadowfacts. All rights reserved. // import UIKit import Photos import AVFoundation import Vision protocol ComposeAttachmentTableViewCellDelegate: class { func composeAttachment(_ cell: ComposeAttachmentTableViewCell, present viewController: UIViewController, animated: Bool) func removeAttachment(_ cell: ComposeAttachmentTableViewCell) func attachmentDescriptionChanged(_ cell: ComposeAttachmentTableViewCell) func composeAttachmentDescriptionHeightChanged(_ cell: ComposeAttachmentTableViewCell) } class ComposeAttachmentTableViewCell: UITableViewCell { weak var delegate: ComposeAttachmentTableViewCellDelegate? @IBOutlet weak var assetImageView: UIImageView! @IBOutlet weak var descriptionTextView: UITextView! @IBOutlet weak var descriptionTextViewHeightConstraint: NSLayoutConstraint! @IBOutlet weak var descriptionPlaceholderLabel: UILabel! @IBOutlet weak var removeButton: UIButton! @IBOutlet weak var activityIndicator: UIActivityIndicatorView! var attachment: CompositionAttachment! var state: State = .allowEntry { didSet { switch state { case .allowEntry: descriptionTextView.isEditable = true updateDescriptionPlaceholderLabel() activityIndicator.stopAnimating() case .recognizingText: descriptionTextView.isEditable = false descriptionPlaceholderLabel.isHidden = true activityIndicator.startAnimating() } } } private var textRecognitionRequest: VNRecognizeTextRequest? override func awakeFromNib() { super.awakeFromNib() assetImageView.layer.masksToBounds = true assetImageView.layer.cornerRadius = 8 descriptionTextView.delegate = self } func updateUI(for attachment: CompositionAttachment) { self.attachment = attachment descriptionTextView.text = attachment.attachmentDescription updateDescriptionPlaceholderLabel() assetImageView.contentMode = .scaleAspectFill assetImageView.backgroundColor = .secondarySystemBackground switch attachment.data { case let .image(image): assetImageView.image = image case let .asset(asset): let size = CGSize(width: 80, height: 80) PHImageManager.default().requestImage(for: asset, targetSize: size, contentMode: .aspectFill, options: nil) { (image, _) in guard self.attachment == attachment else { return } self.assetImageView.image = image } case let .video(url): let asset = AVURLAsset(url: url) let imageGenerator = AVAssetImageGenerator(asset: asset) if let cgImage = try? imageGenerator.copyCGImage(at: .zero, actualTime: nil) { assetImageView.image = UIImage(cgImage: cgImage) } case let .drawing(drawing): assetImageView.image = drawing.imageInLightMode(from: drawing.bounds) assetImageView.contentMode = .scaleAspectFit assetImageView.backgroundColor = .white } } func updateDescriptionPlaceholderLabel() { descriptionPlaceholderLabel.isHidden = !descriptionTextView.text.isEmpty } func setEnabled(_ enabled: Bool) { descriptionTextView.isEditable = enabled removeButton.isEnabled = enabled } func recognizeTextFromImage() { precondition(attachment.data.type == .image) state = .recognizingText DispatchQueue.global(qos: .userInitiated).async { self.attachment.data.getData { (data, mimeType) in let handler = VNImageRequestHandler(data: data, options: [:]) let request = VNRecognizeTextRequest { (request, error) in DispatchQueue.main.async { self.state = .allowEntry if let results = request.results as? [VNRecognizedTextObservation] { var text = "" for observation in results { let result = observation.topCandidates(1).first! text.append(result.string) text.append("\n") } self.descriptionTextView.text = text self.textViewDidChange(self.descriptionTextView) } } } request.recognitionLevel = .accurate request.usesLanguageCorrection = true self.textRecognitionRequest = request DispatchQueue.global(qos: .userInitiated).async { do { try handler.perform([request]) } catch { // The perform call throws an error with code 1 if the request is cancelled, which we don't want to show an alert for. guard (error as NSError).code != 1 else { return } DispatchQueue.main.async { self.state = .allowEntry let title = NSLocalizedString("Text Recognition Failed", comment: "text recognition error alert title") let message = error.localizedDescription let alert = UIAlertController(title: title, message: message, preferredStyle: .alert) self.delegate?.composeAttachment(self, present: alert, animated: true) } } } } } } override func prepareForReuse() { super.prepareForReuse() assetImageView.image = nil descriptionTextViewHeightConstraint.constant = 80 } @IBAction func removeButtonPressed(_ sender: Any) { textRecognitionRequest?.cancel() delegate?.removeAttachment(self) } } extension ComposeAttachmentTableViewCell { enum State { case allowEntry, recognizingText } } extension ComposeAttachmentTableViewCell: UITextViewDelegate { func textViewDidChange(_ textView: UITextView) { attachment.attachmentDescription = textView.text updateDescriptionPlaceholderLabel() delegate?.attachmentDescriptionChanged(self) let smallestSize = textView.sizeThatFits(CGSize(width: textView.bounds.width, height: .greatestFiniteMagnitude)) let old = descriptionTextViewHeightConstraint.constant descriptionTextViewHeightConstraint.constant = max(80, smallestSize.height) if old != descriptionTextViewHeightConstraint.constant { delegate?.composeAttachmentDescriptionHeightChanged(self) } } }