Compare commits

..

No commits in common. "cb474436496b1d7fb48db3bc32f4a523d8b9ac2e" and "e4eff2d362b7e7921e82cc35aaaea1bf6244549e" have entirely different histories.

12 changed files with 322 additions and 66 deletions

View File

@ -1,11 +1,5 @@
# Changelog # Changelog
## 2023.8 (106)
Bugfixes:
- Fix being able to set post language to multiple/undefined
- iPadOS: Fix language picker button not having a pointer effect
- macOS: Fix Cmd+W sometimes closing the non-foreground window
## 2023.8 (105) ## 2023.8 (105)
Features/Improvements: Features/Improvements:
- Use server-set preference for default post visibility, language, and (on Hometown) local-only - Use server-set preference for default post visibility, language, and (on Hometown) local-only

View File

@ -111,7 +111,7 @@ class PostService: ObservableObject {
do { do {
(data, utType) = try await getData(for: attachment) (data, utType) = try await getData(for: attachment)
currentStep += 1 currentStep += 1
} catch let error as DraftAttachment.ExportError { } catch let error as AttachmentData.Error {
throw Error.attachmentData(index: index, cause: error) throw Error.attachmentData(index: index, cause: error)
} }
do { do {
@ -169,7 +169,7 @@ class PostService: ObservableObject {
} }
enum Error: Swift.Error, LocalizedError { enum Error: Swift.Error, LocalizedError {
case attachmentData(index: Int, cause: DraftAttachment.ExportError) case attachmentData(index: Int, cause: AttachmentData.Error)
case attachmentUpload(index: Int, cause: Client.Error) case attachmentUpload(index: Int, cause: Client.Error)
case posting(Client.Error) case posting(Client.Error)

View File

@ -20,11 +20,7 @@ public final class ComposeController: ViewController {
public typealias ReplyContentView = (any StatusProtocol, @escaping (CGFloat) -> Void) -> AnyView public typealias ReplyContentView = (any StatusProtocol, @escaping (CGFloat) -> Void) -> AnyView
public typealias EmojiImageView = (Emoji) -> AnyView public typealias EmojiImageView = (Emoji) -> AnyView
@Published public private(set) var draft: Draft { @Published public private(set) var draft: Draft
didSet {
assert(draft.managedObjectContext == DraftsPersistentContainer.shared.viewContext)
}
}
@Published public var config: ComposeUIConfig @Published public var config: ComposeUIConfig
@Published public var mastodonController: ComposeMastodonContext @Published public var mastodonController: ComposeMastodonContext
let fetchAvatar: AvatarImageView.FetchAvatar let fetchAvatar: AvatarImageView.FetchAvatar
@ -110,7 +106,6 @@ public final class ComposeController: ViewController {
emojiImageView: @escaping EmojiImageView emojiImageView: @escaping EmojiImageView
) { ) {
self.draft = draft self.draft = draft
assert(draft.managedObjectContext == DraftsPersistentContainer.shared.viewContext)
self.config = config self.config = config
self.mastodonController = mastodonController self.mastodonController = mastodonController
self.fetchAvatar = fetchAvatar self.fetchAvatar = fetchAvatar

View File

@ -137,8 +137,6 @@ extension DraftAttachment {
//private let attachmentTypeIdentifier = "space.vaccor.Tusker.composition-attachment" //private let attachmentTypeIdentifier = "space.vaccor.Tusker.composition-attachment"
private let imageType = UTType.image.identifier private let imageType = UTType.image.identifier
private let heifType = UTType.heif.identifier
private let heicType = UTType.heic.identifier
private let jpegType = UTType.jpeg.identifier private let jpegType = UTType.jpeg.identifier
private let pngType = UTType.png.identifier private let pngType = UTType.png.identifier
private let mp4Type = UTType.mpeg4Movie.identifier private let mp4Type = UTType.mpeg4Movie.identifier
@ -150,7 +148,7 @@ extension DraftAttachment: NSItemProviderReading {
// todo: is there a better way of handling movies than manually adding all possible UTI types? // todo: is there a better way of handling movies than manually adding all possible UTI types?
// just using kUTTypeMovie doesn't work, because we need the actually type in order to get the file extension // just using kUTTypeMovie doesn't work, because we need the actually type in order to get the file extension
// without the file extension, getting the thumbnail and exporting the video for attachment upload fails // without the file extension, getting the thumbnail and exporting the video for attachment upload fails
[/*typeIdentifier, */ gifType, heifType, heicType, jpegType, pngType, imageType, mp4Type, quickTimeType] [/*typeIdentifier, */ gifType, jpegType, pngType, imageType, mp4Type, quickTimeType]
} }
public static func object(withItemProviderData data: Data, typeIdentifier: String) throws -> DraftAttachment { public static func object(withItemProviderData data: Data, typeIdentifier: String) throws -> DraftAttachment {
@ -275,13 +273,20 @@ extension DraftAttachment {
var data = data var data = data
var type = type var type = type
if type != .png && type != .jpeg,
let image = UIImage(data: data) {
// The quality of 0.8 was chosen completely arbitrarily, it may need to be tuned in the future.
data = image.jpegData(compressionQuality: 0.8)!
type = .jpeg
}
let image = CIImage(data: data)! let image = CIImage(data: data)!
let needsColorSpaceConversion = features.needsWideColorGamutHack && image.colorSpace?.name != CGColorSpace.sRGB let needsColorSpaceConversion = features.needsWideColorGamutHack && image.colorSpace?.name != CGColorSpace.sRGB
// neither Mastodon nor Pleroma handles HEIC well, so convert to JPEG // neither Mastodon nor Pleroma handles HEIC well, so convert to JPEG
// they also do a bad job converting wide color gamut images (they seem to just drop the profile, letting the wide-gamut values be reinterprete as sRGB) // they also do a bad job converting wide color gamut images (they seem to just drop the profile, letting the wide-gamut values be reinterprete as sRGB)
// if that changes in the future, we'll need to pass the InstanceFeatures in here somehow and gate the conversion // if that changes in the future, we'll need to pass the InstanceFeatures in here somehow and gate the conversion
if needsColorSpaceConversion || type == .heic || type == .heif { if needsColorSpaceConversion || type == .heic {
let context = CIContext() let context = CIContext()
let colorSpace = needsColorSpaceConversion || image.colorSpace != nil ? CGColorSpace(name: CGColorSpace.sRGB)! : image.colorSpace! let colorSpace = needsColorSpaceConversion || image.colorSpace != nil ? CGColorSpace(name: CGColorSpace.sRGB)! : image.colorSpace!
if type == .png { if type == .png {

View File

@ -0,0 +1,278 @@
//
// AttachmentData.swift
// ComposeUI
//
// Created by Shadowfacts on 1/1/20.
// Copyright © 2020 Shadowfacts. All rights reserved.
//
import UIKit
import Photos
import UniformTypeIdentifiers
import PencilKit
import InstanceFeatures
enum AttachmentData {
case asset(PHAsset)
case image(Data, originalType: UTType)
case video(URL)
case drawing(PKDrawing)
case gif(Data)
var type: AttachmentType {
switch self {
case let .asset(asset):
return asset.attachmentType!
case .image(_, originalType: _):
return .image
case .video(_):
return .video
case .drawing(_):
return .image
case .gif(_):
return .image
}
}
var isAsset: Bool {
switch self {
case .asset(_):
return true
default:
return false
}
}
var canSaveToDraft: Bool {
switch self {
case .video(_):
return false
default:
return true
}
}
func getData(features: InstanceFeatures, skipAllConversion: Bool = false, completion: @escaping (Result<(Data, UTType), Error>) -> Void) {
switch self {
case let .image(originalData, originalType):
let data: Data
let type: UTType
switch originalType {
case .png, .jpeg:
data = originalData
type = originalType
default:
let image = UIImage(data: originalData)!
// The quality of 0.8 was chosen completely arbitrarily, it may need to be tuned in the future.
data = image.jpegData(compressionQuality: 0.8)!
type = .jpeg
}
let processed = processImageData(data, type: type, features: features, skipAllConversion: skipAllConversion)
completion(.success(processed))
case let .asset(asset):
if asset.mediaType == .image {
let options = PHImageRequestOptions()
options.version = .current
options.deliveryMode = .highQualityFormat
options.resizeMode = .none
options.isNetworkAccessAllowed = true
PHImageManager.default().requestImageDataAndOrientation(for: asset, options: options) { (data, dataUTI, orientation, info) in
guard let data = data, let dataUTI = dataUTI else {
completion(.failure(.missingData))
return
}
let processed = processImageData(data, type: UTType(dataUTI)!, features: features, skipAllConversion: skipAllConversion)
completion(.success(processed))
}
} else if asset.mediaType == .video {
let options = PHVideoRequestOptions()
options.deliveryMode = .automatic
options.isNetworkAccessAllowed = true
options.version = .current
PHImageManager.default().requestExportSession(forVideo: asset, options: options, exportPreset: AVAssetExportPresetHighestQuality) { (exportSession, info) in
if let exportSession = exportSession {
AttachmentData.exportVideoData(session: exportSession, completion: completion)
} else if let error = info?[PHImageErrorKey] as? Error {
completion(.failure(.videoExport(error)))
} else {
completion(.failure(.noVideoExportSession))
}
}
} else {
fatalError("assetType must be either image or video")
}
case let .video(url):
let asset = AVURLAsset(url: url)
guard let session = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetHighestQuality) else {
completion(.failure(.noVideoExportSession))
return
}
AttachmentData.exportVideoData(session: session, completion: completion)
case let .drawing(drawing):
let image = drawing.imageInLightMode(from: drawing.bounds, scale: 1)
completion(.success((image.pngData()!, .png)))
case let .gif(data):
completion(.success((data, .gif)))
}
}
private func processImageData(_ data: Data, type: UTType, features: InstanceFeatures, skipAllConversion: Bool) -> (Data, UTType) {
guard !skipAllConversion else {
return (data, type)
}
var data = data
var type = type
let image = CIImage(data: data)!
let needsColorSpaceConversion = features.needsWideColorGamutHack && image.colorSpace?.name != CGColorSpace.sRGB
// neither Mastodon nor Pleroma handles HEIC well, so convert to JPEG
// they also do a bad job converting wide color gamut images (they seem to just drop the profile, letting the wide-gamut values be reinterprete as sRGB)
// if that changes in the future, we'll need to pass the InstanceFeatures in here somehow and gate the conversion
if needsColorSpaceConversion || type == .heic {
let context = CIContext()
let colorSpace = needsColorSpaceConversion || image.colorSpace != nil ? CGColorSpace(name: CGColorSpace.sRGB)! : image.colorSpace!
if type == .png {
data = context.pngRepresentation(of: image, format: .ARGB8, colorSpace: colorSpace)!
} else {
data = context.jpegRepresentation(of: image, colorSpace: colorSpace)!
type = .jpeg
}
}
return (data, type)
}
private static func exportVideoData(session: AVAssetExportSession, completion: @escaping (Result<(Data, UTType), Error>) -> Void) {
session.outputFileType = .mp4
session.outputURL = FileManager.default.temporaryDirectory.appendingPathComponent("exported_video_\(UUID())").appendingPathExtension("mp4")
session.exportAsynchronously {
guard session.status == .completed else {
completion(.failure(.videoExport(session.error!)))
return
}
do {
let data = try Data(contentsOf: session.outputURL!)
completion(.success((data, .mpeg4Movie)))
} catch {
completion(.failure(.videoExport(error)))
}
}
}
enum AttachmentType {
case image, video
}
enum Error: Swift.Error, LocalizedError {
case missingData
case videoExport(Swift.Error)
case noVideoExportSession
var localizedDescription: String {
switch self {
case .missingData:
return "Missing Data"
case .videoExport(let error):
return "Exporting video: \(error)"
case .noVideoExportSession:
return "Couldn't create video export session"
}
}
}
}
extension PHAsset {
var attachmentType: AttachmentData.AttachmentType? {
switch self.mediaType {
case .image:
return .image
case .video:
return .video
default:
return nil
}
}
}
extension AttachmentData: Codable {
func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
switch self {
case let .asset(asset):
try container.encode("asset", forKey: .type)
try container.encode(asset.localIdentifier, forKey: .assetIdentifier)
case let .image(originalData, originalType):
try container.encode("image", forKey: .type)
try container.encode(originalType, forKey: .imageType)
try container.encode(originalData, forKey: .imageData)
case .video(_):
throw EncodingError.invalidValue(self, EncodingError.Context(codingPath: encoder.codingPath, debugDescription: "video CompositionAttachments cannot be encoded"))
case let .drawing(drawing):
try container.encode("drawing", forKey: .type)
let drawingData = drawing.dataRepresentation()
try container.encode(drawingData, forKey: .drawing)
case .gif(_):
throw EncodingError.invalidValue(self, EncodingError.Context(codingPath: encoder.codingPath, debugDescription: "gif CompositionAttachments cannot be encoded"))
}
}
init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
switch try container.decode(String.self, forKey: .type) {
case "asset":
let identifier = try container.decode(String.self, forKey: .assetIdentifier)
guard let asset = PHAsset.fetchAssets(withLocalIdentifiers: [identifier], options: nil).firstObject else {
throw DecodingError.dataCorruptedError(forKey: .assetIdentifier, in: container, debugDescription: "Could not fetch asset with local identifier")
}
self = .asset(asset)
case "image":
let data = try container.decode(Data.self, forKey: .imageData)
if let type = try container.decodeIfPresent(UTType.self, forKey: .imageType) {
self = .image(data, originalType: type)
} else {
guard let image = UIImage(data: data) else {
throw DecodingError.dataCorruptedError(forKey: .imageData, in: container, debugDescription: "CompositionAttachment data could not be decoded into UIImage")
}
let jpegData = image.jpegData(compressionQuality: 1)!
self = .image(jpegData, originalType: .jpeg)
}
case "drawing":
let drawingData = try container.decode(Data.self, forKey: .drawing)
let drawing = try PKDrawing(data: drawingData)
self = .drawing(drawing)
default:
throw DecodingError.dataCorruptedError(forKey: .type, in: container, debugDescription: "CompositionAttachment type must be one of image, asset, or drawing")
}
}
enum CodingKeys: CodingKey {
case type
case imageData
case imageType
/// The local identifier of the PHAsset for this attachment
case assetIdentifier
/// The PKDrawing object for this attachment.
case drawing
}
}
extension AttachmentData: Equatable {
static func ==(lhs: AttachmentData, rhs: AttachmentData) -> Bool {
switch (lhs, rhs) {
case let (.asset(a), .asset(b)):
return a.localIdentifier == b.localIdentifier
case let (.image(a, originalType: aType), .image(b, originalType: bType)):
return a == b && aType == bType
case let (.video(a), .video(b)):
return a == b
case let (.drawing(a), .drawing(b)):
return a == b
default:
return false
}
}
}

View File

@ -30,13 +30,7 @@ struct LanguagePicker: View {
if maybeIso639Code.last == "-" { if maybeIso639Code.last == "-" {
maybeIso639Code = maybeIso639Code[..<maybeIso639Code.index(before: maybeIso639Code.endIndex)] maybeIso639Code = maybeIso639Code[..<maybeIso639Code.index(before: maybeIso639Code.endIndex)]
} }
let identifier = String(maybeIso639Code) let code = Locale.LanguageCode(String(maybeIso639Code))
// mul (for multiple languages) and unk (unknown) are ISO codes, but not ones that akkoma permits, so we ignore them on all platforms
guard identifier != "mul",
identifier != "und" else {
return nil
}
let code = Locale.LanguageCode(identifier)
if code.isISOLanguage { if code.isISOLanguage {
return code return code
} else { } else {
@ -45,13 +39,16 @@ struct LanguagePicker: View {
} }
private var codeFromPreferredLanguages: Locale.LanguageCode? { private var codeFromPreferredLanguages: Locale.LanguageCode? {
if let identifier = Locale.preferredLanguages.first, if let identifier = Locale.preferredLanguages.first {
case let code = Locale.LanguageCode(identifier), let code = Locale.LanguageCode(identifier)
code.isISOLanguage { if code.isISOLanguage {
return code return code
} else { } else {
return nil return nil
} }
} else {
return nil
}
} }
private var languageCode: Binding<Locale.LanguageCode> { private var languageCode: Binding<Locale.LanguageCode> {
@ -69,8 +66,6 @@ struct LanguagePicker: View {
Text((languageCode.wrappedValue.identifier(.alpha2) ?? languageCode.wrappedValue.identifier).uppercased()) Text((languageCode.wrappedValue.identifier(.alpha2) ?? languageCode.wrappedValue.identifier).uppercased())
} }
.accessibilityLabel("Post Language") .accessibilityLabel("Post Language")
.padding(5)
.hoverEffect()
.sheet(isPresented: $isShowingSheet) { .sheet(isPresented: $isShowingSheet) {
NavigationStack { NavigationStack {
LanguagePickerList(languageCode: languageCode, hasChangedSelection: $hasChangedSelection, isPresented: $isShowingSheet) LanguagePickerList(languageCode: languageCode, hasChangedSelection: $hasChangedSelection, isPresented: $isShowingSheet)
@ -143,12 +138,10 @@ private struct LanguagePickerList: View {
// make sure recents always contains the currently selected lang // make sure recents always contains the currently selected lang
let recents = addRecentLang(languageCode) let recents = addRecentLang(languageCode)
recentLangs = recents recentLangs = recents
.filter { $0 != "mul" && $0 != "und" }
.map { Lang(code: .init($0)) } .map { Lang(code: .init($0)) }
.sorted { $0.name < $1.name } .sorted { $0.name < $1.name }
langs = Locale.LanguageCode.isoLanguageCodes langs = Locale.LanguageCode.isoLanguageCodes
.filter { $0.identifier != "mul" && $0.identifier != "und" }
.map { Lang(code: $0) } .map { Lang(code: $0) }
.sorted { $0.name < $1.name } .sorted { $0.name < $1.name }
} }

View File

@ -162,6 +162,13 @@ class AppDelegate: UIResponder, UIApplicationDelegate {
} }
} }
@objc func closeWindow() {
guard let scene = UIApplication.shared.connectedScenes.first(where: { $0.activationState == .foregroundActive }) else {
return
}
UIApplication.shared.requestSceneSessionDestruction(scene.session, options: nil)
}
private func swizzleStatusBar() { private func swizzleStatusBar() {
let selector = Selector(("handleTapAction:")) let selector = Selector(("handleTapAction:"))
var originalIMP: IMP? var originalIMP: IMP?

View File

@ -41,25 +41,22 @@ struct MenuController {
static let prevSubTabCommand = UIKeyCommand(title: "Previous Sub Tab", action: #selector(TabbedPageViewController.selectPrevPage), input: "[", modifierFlags: [.command, .shift]) static let prevSubTabCommand = UIKeyCommand(title: "Previous Sub Tab", action: #selector(TabbedPageViewController.selectPrevPage), input: "[", modifierFlags: [.command, .shift])
static func buildMainMenu(builder: UIMenuBuilder) { static func buildMainMenu(builder: UIMenuBuilder) {
builder.replace(menu: .file, with: buildFileMenu(builder: builder)) builder.replace(menu: .file, with: buildFileMenu())
builder.insertChild(buildSubTabMenu(), atStartOfMenu: .view) builder.insertChild(buildSubTabMenu(), atStartOfMenu: .view)
builder.insertChild(buildSidebarShortcuts(), atStartOfMenu: .view) builder.insertChild(buildSidebarShortcuts(), atStartOfMenu: .view)
} }
private static func buildFileMenu(builder: UIMenuBuilder) -> UIMenu { private static func buildFileMenu() -> UIMenu {
var children: [UIMenuElement] = [
composeCommand,
refreshCommand(discoverabilityTitle: nil),
]
if let close = builder.menu(for: .close) {
children.append(close)
}
return UIMenu( return UIMenu(
title: "File", title: "File",
image: nil, image: nil,
identifier: nil, identifier: nil,
options: [], options: [],
children: children children: [
composeCommand,
refreshCommand(discoverabilityTitle: nil),
UIKeyCommand(title: "Close", action: #selector(AppDelegate.closeWindow), input: "w", modifierFlags: .command),
]
) )
} }

View File

@ -35,7 +35,7 @@ class MainSceneDelegate: UIResponder, UIWindowSceneDelegate, TuskerSceneDelegate
window = UIWindow(windowScene: windowScene) window = UIWindow(windowScene: windowScene)
showAppOrOnboardingUI(session: session) showAppOrOnboardingUI(session: session)
if !connectionOptions.urlContexts.isEmpty { if connectionOptions.urlContexts.count > 0 {
self.scene(scene, openURLContexts: connectionOptions.urlContexts) self.scene(scene, openURLContexts: connectionOptions.urlContexts)
} }
@ -50,21 +50,14 @@ class MainSceneDelegate: UIResponder, UIWindowSceneDelegate, TuskerSceneDelegate
} }
func scene(_ scene: UIScene, openURLContexts URLContexts: Set<UIOpenURLContext>) { func scene(_ scene: UIScene, openURLContexts URLContexts: Set<UIOpenURLContext>) {
guard let url = URLContexts.first?.url, if URLContexts.count > 1 {
var components = URLComponents(url: url, resolvingAgainstBaseURL: false), fatalError("Cannot open more than 1 URL")
let rootViewController else {
return
} }
if components.host == "compose" { let url = URLContexts.first!.url
if let mastodonController = window!.windowScene!.session.mastodonController {
let draft = mastodonController.createDraft() if var components = URLComponents(url: url, resolvingAgainstBaseURL: false),
let text = components.queryItems?.first(where: { $0.name == "text" })?.value let rootViewController = rootViewController {
draft.text = text ?? ""
rootViewController.compose(editing: draft, animated: true, isDucked: false)
}
} else {
// Assume anything else is a search query
components.scheme = "https" components.scheme = "https"
let query = components.string! let query = components.string!
rootViewController.performSearch(query: query) rootViewController.performSearch(query: query)

View File

@ -574,11 +574,6 @@ class TimelineViewController: UIViewController, TimelineLikeCollectionViewContro
var count = 0 var count = 0
while count < 5 { while count < 5 {
count += 1 count += 1
let crumb = Breadcrumb(level: .info, category: "TimelineViewController")
crumb.message = "scrollToItem, attempt=\(count)"
SentrySDK.addBreadcrumb(crumb)
let origOffset = self.collectionView.contentOffset let origOffset = self.collectionView.contentOffset
self.collectionView.layoutIfNeeded() self.collectionView.layoutIfNeeded()
self.collectionView.scrollToItem(at: indexPath, at: .centeredVertically, animated: false) self.collectionView.scrollToItem(at: indexPath, at: .centeredVertically, animated: false)

View File

@ -211,10 +211,9 @@ extension TimelineLikeCollectionViewController {
extension TimelineLikeCollectionViewController { extension TimelineLikeCollectionViewController {
// apply(_:animatingDifferences:) is marked as nonisolated, so just awaiting it doesn't dispatch to the main thread, unlike other async @MainActor methods // apply(_:animatingDifferences:) is marked as nonisolated, so just awaiting it doesn't dispatch to the main thread, unlike other async @MainActor methods
// but we always want to update the data source on the main thread for consistency, so this method does that // but we always want to update the data source on the main thread for consistency, so this method does that
@MainActor
func apply(_ snapshot: NSDiffableDataSourceSnapshot<Section, Item>, animatingDifferences: Bool) async { func apply(_ snapshot: NSDiffableDataSourceSnapshot<Section, Item>, animatingDifferences: Bool) async {
await MainActor.run { await self.dataSource.apply(snapshot, animatingDifferences: animatingDifferences)
dataSource?.apply(snapshot, animatingDifferences: animatingDifferences)
}
} }
@MainActor @MainActor

View File

@ -10,7 +10,7 @@
// https://help.apple.com/xcode/#/dev745c5c974 // https://help.apple.com/xcode/#/dev745c5c974
MARKETING_VERSION = 2023.8 MARKETING_VERSION = 2023.8
CURRENT_PROJECT_VERSION = 106 CURRENT_PROJECT_VERSION = 105
CURRENT_PROJECT_VERSION = $(inherited)$(CURRENT_PROJECT_VERSION_BUILD_SUFFIX_$(CONFIGURATION)) CURRENT_PROJECT_VERSION = $(inherited)$(CURRENT_PROJECT_VERSION_BUILD_SUFFIX_$(CONFIGURATION))
CURRENT_PROJECT_VERSION_BUILD_SUFFIX_Debug=-dev CURRENT_PROJECT_VERSION_BUILD_SUFFIX_Debug=-dev