admin管理员组文章数量:1023025
From iPhone Notes app I need to collect both text and images to share in my main app. There can be three cases.
- User can share both text and images simultaneously.
- User can share only text.
- User can share only images.
In my code I can share only image or text. When the notes app contains images and Texts both, my code does not work and share extension hangs. I need ways for case one to handle where when user share both text and images document from notes app I need to collect them.
My Code
import UIKit
import SwiftUI
import Vision
import UniformTypeIdentifiers
class ShareViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
guard
let extensionItem = extensionContext?.inputItems.first as? NSExtensionItem,
let itemProvider = extensionItem.attachments?.first else {
close()
return
}
let textDataType = UTType.plainText.identifier
if itemProvider.hasItemConformingToTypeIdentifier(textDataType) {
// Handle text
itemProvider.loadItem(forTypeIdentifier: textDataType, options: nil) { (providedText, error) in
if error != nil {
self.close()
return
}
if let text = providedText as? String {
DispatchQueue.main.async {
self.displayShareView(with: text)
}
} else {
}
}
} else {
let group = DispatchGroup()
// Iterate over input items from the extension context
for item in self.extensionContext!.inputItems as! [NSExtensionItem] {
for provider in item.attachments! {
let itemProvider = provider
// Handle image items (use UTType.image.identifier)
if itemProvider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
group.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.image.identifier, options: nil) { (result, error) in
if let error = error {
print("Error loading image: \(error.localizedDescription)")
group.leave()
return
}
if let resultURL = result as? NSURL {
print("Received image URL: \(resultURL)")
if let imageData = try? Data(contentsOf: resultURL as URL), let image = UIImage(data: imageData) {
print("Processing image for text...")
self.processImageForText(image)
} else {
print("Failed to load image data.")
}
} else {
print("No image URL found.")
}
group.leave()
}
}
}
}
// Once all operations are completed, complete the extension request
group.notify(queue: .main) {
// self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
}
}
// Process image for text using Vision (OCR)
private func processImageForText(_ image: UIImage) {
DispatchQueue.global(qos: .userInitiated).async {
guard let cgImage = image.cgImage else {
print("Failed to convert UIImage to CGImage.")
DispatchQueue.main.async {
self.close()
}
return
}
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
let request = VNRecognizeTextRequest { (request, error) in
if let error = error {
print("OCR error: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
return
}
let recognizedText = request.results?pactMap { result in
(result as? VNRecognizedTextObservation)?.topCandidates(1).first?.string
}.joined(separator: "\n") ?? "No text found in image"
print("Recognized text from image: \(recognizedText)")
DispatchQueue.main.async {
self.displayShareView(with: recognizedText)
}
}
do {
try requestHandler.perform([request])
} catch {
print("Failed to perform text recognition: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
}
}
}
// Display the SwiftUI view with the given text
private func displayShareView(with text: String) {
let contentView = UIHostingController(rootView: ShareExtensionView(text: text, onSave: { [weak self] savedText, title in
self?.handleSaveAction(with: savedText, title: title)
}))
self.addChild(contentView)
self.view.addSubview(contentView.view)
// Set up constraints
contentView.view.translatesAutoresizingMaskIntoConstraints = false
contentView.view.topAnchor.constraint(equalTo: self.view.topAnchor).isActive = true
contentView.view.bottomAnchor.constraint(equalTo: self.view.bottomAnchor).isActive = true
contentView.view.leftAnchor.constraint(equalTo: self.view.leftAnchor).isActive = true
contentView.view.rightAnchor.constraint(equalTo: self.view.rightAnchor).isActive = true
}
// Close the Share Extension
func close() {
self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
private func handleSaveAction(with text: String, title: String) {
// Save text to shared container
let userDefaults = UserDefaults(suiteName: "group.aibuddy.aibuddyshareextention")
userDefaults?.set(text, forKey: "sharedTextFromExtensionText")
userDefaults?.set(title, forKey: "sharedTextFromExtensionTitle")
userDefaults?.synchronize()
// Prepare data to pass
let title = title
let encodedTitle = title.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
let encodedText = text.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
// Use a URL scheme to open the main app with parameters
let urlScheme = "aibuddyshare://openFromShareExtension?title=\(encodedTitle)&text=\(encodedText)"
// Ensure the URL scheme is valid before attempting to open
if let url = URL(string: urlScheme) {
print("Attempting to open the app with URL: \(url)") // Debugging log
// Attempt to open the app
let _ = openURL(url)
close()
} else {
print("Invalid URL scheme: \(urlScheme)") // Log if URL is invalid
self.close()
}
}
@objc func openURL(_ url: URL) -> Bool {
var responder: UIResponder? = self
while responder != nil {
if let application = responder as? UIApplication {
return application.perform(#selector(openURL(_:)), with: url) != nil
}
responder = responder?.next
}
return false
}
}
From iPhone Notes app I need to collect both text and images to share in my main app. There can be three cases.
- User can share both text and images simultaneously.
- User can share only text.
- User can share only images.
In my code I can share only image or text. When the notes app contains images and Texts both, my code does not work and share extension hangs. I need ways for case one to handle where when user share both text and images document from notes app I need to collect them.
My Code
import UIKit
import SwiftUI
import Vision
import UniformTypeIdentifiers
class ShareViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
guard
let extensionItem = extensionContext?.inputItems.first as? NSExtensionItem,
let itemProvider = extensionItem.attachments?.first else {
close()
return
}
let textDataType = UTType.plainText.identifier
if itemProvider.hasItemConformingToTypeIdentifier(textDataType) {
// Handle text
itemProvider.loadItem(forTypeIdentifier: textDataType, options: nil) { (providedText, error) in
if error != nil {
self.close()
return
}
if let text = providedText as? String {
DispatchQueue.main.async {
self.displayShareView(with: text)
}
} else {
}
}
} else {
let group = DispatchGroup()
// Iterate over input items from the extension context
for item in self.extensionContext!.inputItems as! [NSExtensionItem] {
for provider in item.attachments! {
let itemProvider = provider
// Handle image items (use UTType.image.identifier)
if itemProvider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
group.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.image.identifier, options: nil) { (result, error) in
if let error = error {
print("Error loading image: \(error.localizedDescription)")
group.leave()
return
}
if let resultURL = result as? NSURL {
print("Received image URL: \(resultURL)")
if let imageData = try? Data(contentsOf: resultURL as URL), let image = UIImage(data: imageData) {
print("Processing image for text...")
self.processImageForText(image)
} else {
print("Failed to load image data.")
}
} else {
print("No image URL found.")
}
group.leave()
}
}
}
}
// Once all operations are completed, complete the extension request
group.notify(queue: .main) {
// self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
}
}
// Process image for text using Vision (OCR)
private func processImageForText(_ image: UIImage) {
DispatchQueue.global(qos: .userInitiated).async {
guard let cgImage = image.cgImage else {
print("Failed to convert UIImage to CGImage.")
DispatchQueue.main.async {
self.close()
}
return
}
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
let request = VNRecognizeTextRequest { (request, error) in
if let error = error {
print("OCR error: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
return
}
let recognizedText = request.results?pactMap { result in
(result as? VNRecognizedTextObservation)?.topCandidates(1).first?.string
}.joined(separator: "\n") ?? "No text found in image"
print("Recognized text from image: \(recognizedText)")
DispatchQueue.main.async {
self.displayShareView(with: recognizedText)
}
}
do {
try requestHandler.perform([request])
} catch {
print("Failed to perform text recognition: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
}
}
}
// Display the SwiftUI view with the given text
private func displayShareView(with text: String) {
let contentView = UIHostingController(rootView: ShareExtensionView(text: text, onSave: { [weak self] savedText, title in
self?.handleSaveAction(with: savedText, title: title)
}))
self.addChild(contentView)
self.view.addSubview(contentView.view)
// Set up constraints
contentView.view.translatesAutoresizingMaskIntoConstraints = false
contentView.view.topAnchor.constraint(equalTo: self.view.topAnchor).isActive = true
contentView.view.bottomAnchor.constraint(equalTo: self.view.bottomAnchor).isActive = true
contentView.view.leftAnchor.constraint(equalTo: self.view.leftAnchor).isActive = true
contentView.view.rightAnchor.constraint(equalTo: self.view.rightAnchor).isActive = true
}
// Close the Share Extension
func close() {
self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
private func handleSaveAction(with text: String, title: String) {
// Save text to shared container
let userDefaults = UserDefaults(suiteName: "group.aibuddy.aibuddyshareextention")
userDefaults?.set(text, forKey: "sharedTextFromExtensionText")
userDefaults?.set(title, forKey: "sharedTextFromExtensionTitle")
userDefaults?.synchronize()
// Prepare data to pass
let title = title
let encodedTitle = title.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
let encodedText = text.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
// Use a URL scheme to open the main app with parameters
let urlScheme = "aibuddyshare://openFromShareExtension?title=\(encodedTitle)&text=\(encodedText)"
// Ensure the URL scheme is valid before attempting to open
if let url = URL(string: urlScheme) {
print("Attempting to open the app with URL: \(url)") // Debugging log
// Attempt to open the app
let _ = openURL(url)
close()
} else {
print("Invalid URL scheme: \(urlScheme)") // Log if URL is invalid
self.close()
}
}
@objc func openURL(_ url: URL) -> Bool {
var responder: UIResponder? = self
while responder != nil {
if let application = responder as? UIApplication {
return application.perform(#selector(openURL(_:)), with: url) != nil
}
responder = responder?.next
}
return false
}
}
Share
Improve this question
edited Nov 19, 2024 at 8:02
Tanvirgeek
asked Nov 19, 2024 at 7:56
TanvirgeekTanvirgeek
7123 gold badges11 silver badges20 bronze badges
1 Answer
Reset to default 0 let dispatchGroup = DispatchGroup()
var sharedText = ""
var sharedImages: [UIImage] = []
//Loop Through All Items
for extensionItem in extensionItems {
if let attachments = extensionItem.attachments {
for attachment in attachments {
let itemProvider = attachment
// Handle Text
if itemProvider.hasItemConformingToTypeIdentifier(UTType.plainText.identifier) {
dispatchGroup.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.plainText.identifier, options: nil) { (providedText, error) in
if let text = providedText as? String {
sharedText = text
}
dispatchGroup.leave()
}
}
// Handle Images
if itemProvider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
dispatchGroup.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.image.identifier, options: nil) { (result, error) in
if let url = result as? URL, let data = try? Data(contentsOf: url), let image = UIImage(data: data) {
sharedImages.append(image)
}
dispatchGroup.leave()
}
}
}
}
}
Loop through all the items and add them to an array. Once the loop finishes. OCR the images and display them in the view.
From iPhone Notes app I need to collect both text and images to share in my main app. There can be three cases.
- User can share both text and images simultaneously.
- User can share only text.
- User can share only images.
In my code I can share only image or text. When the notes app contains images and Texts both, my code does not work and share extension hangs. I need ways for case one to handle where when user share both text and images document from notes app I need to collect them.
My Code
import UIKit
import SwiftUI
import Vision
import UniformTypeIdentifiers
class ShareViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
guard
let extensionItem = extensionContext?.inputItems.first as? NSExtensionItem,
let itemProvider = extensionItem.attachments?.first else {
close()
return
}
let textDataType = UTType.plainText.identifier
if itemProvider.hasItemConformingToTypeIdentifier(textDataType) {
// Handle text
itemProvider.loadItem(forTypeIdentifier: textDataType, options: nil) { (providedText, error) in
if error != nil {
self.close()
return
}
if let text = providedText as? String {
DispatchQueue.main.async {
self.displayShareView(with: text)
}
} else {
}
}
} else {
let group = DispatchGroup()
// Iterate over input items from the extension context
for item in self.extensionContext!.inputItems as! [NSExtensionItem] {
for provider in item.attachments! {
let itemProvider = provider
// Handle image items (use UTType.image.identifier)
if itemProvider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
group.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.image.identifier, options: nil) { (result, error) in
if let error = error {
print("Error loading image: \(error.localizedDescription)")
group.leave()
return
}
if let resultURL = result as? NSURL {
print("Received image URL: \(resultURL)")
if let imageData = try? Data(contentsOf: resultURL as URL), let image = UIImage(data: imageData) {
print("Processing image for text...")
self.processImageForText(image)
} else {
print("Failed to load image data.")
}
} else {
print("No image URL found.")
}
group.leave()
}
}
}
}
// Once all operations are completed, complete the extension request
group.notify(queue: .main) {
// self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
}
}
// Process image for text using Vision (OCR)
private func processImageForText(_ image: UIImage) {
DispatchQueue.global(qos: .userInitiated).async {
guard let cgImage = image.cgImage else {
print("Failed to convert UIImage to CGImage.")
DispatchQueue.main.async {
self.close()
}
return
}
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
let request = VNRecognizeTextRequest { (request, error) in
if let error = error {
print("OCR error: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
return
}
let recognizedText = request.results?pactMap { result in
(result as? VNRecognizedTextObservation)?.topCandidates(1).first?.string
}.joined(separator: "\n") ?? "No text found in image"
print("Recognized text from image: \(recognizedText)")
DispatchQueue.main.async {
self.displayShareView(with: recognizedText)
}
}
do {
try requestHandler.perform([request])
} catch {
print("Failed to perform text recognition: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
}
}
}
// Display the SwiftUI view with the given text
private func displayShareView(with text: String) {
let contentView = UIHostingController(rootView: ShareExtensionView(text: text, onSave: { [weak self] savedText, title in
self?.handleSaveAction(with: savedText, title: title)
}))
self.addChild(contentView)
self.view.addSubview(contentView.view)
// Set up constraints
contentView.view.translatesAutoresizingMaskIntoConstraints = false
contentView.view.topAnchor.constraint(equalTo: self.view.topAnchor).isActive = true
contentView.view.bottomAnchor.constraint(equalTo: self.view.bottomAnchor).isActive = true
contentView.view.leftAnchor.constraint(equalTo: self.view.leftAnchor).isActive = true
contentView.view.rightAnchor.constraint(equalTo: self.view.rightAnchor).isActive = true
}
// Close the Share Extension
func close() {
self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
private func handleSaveAction(with text: String, title: String) {
// Save text to shared container
let userDefaults = UserDefaults(suiteName: "group.aibuddy.aibuddyshareextention")
userDefaults?.set(text, forKey: "sharedTextFromExtensionText")
userDefaults?.set(title, forKey: "sharedTextFromExtensionTitle")
userDefaults?.synchronize()
// Prepare data to pass
let title = title
let encodedTitle = title.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
let encodedText = text.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
// Use a URL scheme to open the main app with parameters
let urlScheme = "aibuddyshare://openFromShareExtension?title=\(encodedTitle)&text=\(encodedText)"
// Ensure the URL scheme is valid before attempting to open
if let url = URL(string: urlScheme) {
print("Attempting to open the app with URL: \(url)") // Debugging log
// Attempt to open the app
let _ = openURL(url)
close()
} else {
print("Invalid URL scheme: \(urlScheme)") // Log if URL is invalid
self.close()
}
}
@objc func openURL(_ url: URL) -> Bool {
var responder: UIResponder? = self
while responder != nil {
if let application = responder as? UIApplication {
return application.perform(#selector(openURL(_:)), with: url) != nil
}
responder = responder?.next
}
return false
}
}
From iPhone Notes app I need to collect both text and images to share in my main app. There can be three cases.
- User can share both text and images simultaneously.
- User can share only text.
- User can share only images.
In my code I can share only image or text. When the notes app contains images and Texts both, my code does not work and share extension hangs. I need ways for case one to handle where when user share both text and images document from notes app I need to collect them.
My Code
import UIKit
import SwiftUI
import Vision
import UniformTypeIdentifiers
class ShareViewController: UIViewController {
override func viewDidLoad() {
super.viewDidLoad()
guard
let extensionItem = extensionContext?.inputItems.first as? NSExtensionItem,
let itemProvider = extensionItem.attachments?.first else {
close()
return
}
let textDataType = UTType.plainText.identifier
if itemProvider.hasItemConformingToTypeIdentifier(textDataType) {
// Handle text
itemProvider.loadItem(forTypeIdentifier: textDataType, options: nil) { (providedText, error) in
if error != nil {
self.close()
return
}
if let text = providedText as? String {
DispatchQueue.main.async {
self.displayShareView(with: text)
}
} else {
}
}
} else {
let group = DispatchGroup()
// Iterate over input items from the extension context
for item in self.extensionContext!.inputItems as! [NSExtensionItem] {
for provider in item.attachments! {
let itemProvider = provider
// Handle image items (use UTType.image.identifier)
if itemProvider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
group.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.image.identifier, options: nil) { (result, error) in
if let error = error {
print("Error loading image: \(error.localizedDescription)")
group.leave()
return
}
if let resultURL = result as? NSURL {
print("Received image URL: \(resultURL)")
if let imageData = try? Data(contentsOf: resultURL as URL), let image = UIImage(data: imageData) {
print("Processing image for text...")
self.processImageForText(image)
} else {
print("Failed to load image data.")
}
} else {
print("No image URL found.")
}
group.leave()
}
}
}
}
// Once all operations are completed, complete the extension request
group.notify(queue: .main) {
// self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
}
}
// Process image for text using Vision (OCR)
private func processImageForText(_ image: UIImage) {
DispatchQueue.global(qos: .userInitiated).async {
guard let cgImage = image.cgImage else {
print("Failed to convert UIImage to CGImage.")
DispatchQueue.main.async {
self.close()
}
return
}
let requestHandler = VNImageRequestHandler(cgImage: cgImage, options: [:])
let request = VNRecognizeTextRequest { (request, error) in
if let error = error {
print("OCR error: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
return
}
let recognizedText = request.results?pactMap { result in
(result as? VNRecognizedTextObservation)?.topCandidates(1).first?.string
}.joined(separator: "\n") ?? "No text found in image"
print("Recognized text from image: \(recognizedText)")
DispatchQueue.main.async {
self.displayShareView(with: recognizedText)
}
}
do {
try requestHandler.perform([request])
} catch {
print("Failed to perform text recognition: \(error.localizedDescription)")
DispatchQueue.main.async {
self.close()
}
}
}
}
// Display the SwiftUI view with the given text
private func displayShareView(with text: String) {
let contentView = UIHostingController(rootView: ShareExtensionView(text: text, onSave: { [weak self] savedText, title in
self?.handleSaveAction(with: savedText, title: title)
}))
self.addChild(contentView)
self.view.addSubview(contentView.view)
// Set up constraints
contentView.view.translatesAutoresizingMaskIntoConstraints = false
contentView.view.topAnchor.constraint(equalTo: self.view.topAnchor).isActive = true
contentView.view.bottomAnchor.constraint(equalTo: self.view.bottomAnchor).isActive = true
contentView.view.leftAnchor.constraint(equalTo: self.view.leftAnchor).isActive = true
contentView.view.rightAnchor.constraint(equalTo: self.view.rightAnchor).isActive = true
}
// Close the Share Extension
func close() {
self.extensionContext!pleteRequest(returningItems: [], completionHandler: nil)
}
private func handleSaveAction(with text: String, title: String) {
// Save text to shared container
let userDefaults = UserDefaults(suiteName: "group.aibuddy.aibuddyshareextention")
userDefaults?.set(text, forKey: "sharedTextFromExtensionText")
userDefaults?.set(title, forKey: "sharedTextFromExtensionTitle")
userDefaults?.synchronize()
// Prepare data to pass
let title = title
let encodedTitle = title.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
let encodedText = text.addingPercentEncoding(withAllowedCharacters: .urlQueryAllowed) ?? ""
// Use a URL scheme to open the main app with parameters
let urlScheme = "aibuddyshare://openFromShareExtension?title=\(encodedTitle)&text=\(encodedText)"
// Ensure the URL scheme is valid before attempting to open
if let url = URL(string: urlScheme) {
print("Attempting to open the app with URL: \(url)") // Debugging log
// Attempt to open the app
let _ = openURL(url)
close()
} else {
print("Invalid URL scheme: \(urlScheme)") // Log if URL is invalid
self.close()
}
}
@objc func openURL(_ url: URL) -> Bool {
var responder: UIResponder? = self
while responder != nil {
if let application = responder as? UIApplication {
return application.perform(#selector(openURL(_:)), with: url) != nil
}
responder = responder?.next
}
return false
}
}
Share
Improve this question
edited Nov 19, 2024 at 8:02
Tanvirgeek
asked Nov 19, 2024 at 7:56
TanvirgeekTanvirgeek
7123 gold badges11 silver badges20 bronze badges
1 Answer
Reset to default 0 let dispatchGroup = DispatchGroup()
var sharedText = ""
var sharedImages: [UIImage] = []
//Loop Through All Items
for extensionItem in extensionItems {
if let attachments = extensionItem.attachments {
for attachment in attachments {
let itemProvider = attachment
// Handle Text
if itemProvider.hasItemConformingToTypeIdentifier(UTType.plainText.identifier) {
dispatchGroup.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.plainText.identifier, options: nil) { (providedText, error) in
if let text = providedText as? String {
sharedText = text
}
dispatchGroup.leave()
}
}
// Handle Images
if itemProvider.hasItemConformingToTypeIdentifier(UTType.image.identifier) {
dispatchGroup.enter()
itemProvider.loadItem(forTypeIdentifier: UTType.image.identifier, options: nil) { (result, error) in
if let url = result as? URL, let data = try? Data(contentsOf: url), let image = UIImage(data: data) {
sharedImages.append(image)
}
dispatchGroup.leave()
}
}
}
}
}
Loop through all the items and add them to an array. Once the loop finishes. OCR the images and display them in the view.
本文标签:
版权声明:本文标题:swift - iOS Share extension from Notes app, need to collect Text and Images simultaneously - Stack Overflow 内容由热心网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://it.en369.cn/questions/1745577488a2157109.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论