Skip to content

Commit a7decf3

Browse files
committed
changes
1 parent 78a4790 commit a7decf3

File tree

4 files changed

+55
-49
lines changed

4 files changed

+55
-49
lines changed

mlkit-ios/.gitignore

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,5 +22,10 @@ DerivedData/
2222
*.perspectivev3
2323
!default.perspectivev3
2424

25+
# Pods
26+
Pods
27+
28+
# Firebase
29+
GoogleService-Info.plist
30+
2531
.DS_Store
26-
Pods/

mlkit-ios/text-recognition/final/text-recognition/text-recognition.xcodeproj/project.pbxproj

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
BB70D4E3209CF98500743DA7 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = BB70D4E1209CF98500743DA7 /* Main.storyboard */; };
1414
BB70D4E5209CF98500743DA7 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = BB70D4E4209CF98500743DA7 /* Assets.xcassets */; };
1515
BB70D4E8209CF98500743DA7 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = BB70D4E6209CF98500743DA7 /* LaunchScreen.storyboard */; };
16-
BB70D4F0209CF9C500743DA7 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = BB70D4EF209CF9C500743DA7 /* GoogleService-Info.plist */; };
16+
BBAAAE9920A210F500FA85D0 /* GoogleService-Info.plist in Resources */ = {isa = PBXBuildFile; fileRef = BBAAAE9820A210F500FA85D0 /* GoogleService-Info.plist */; };
1717
/* End PBXBuildFile section */
1818

1919
/* Begin PBXFileReference section */
@@ -27,7 +27,7 @@
2727
BB70D4E4209CF98500743DA7 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
2828
BB70D4E7209CF98500743DA7 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
2929
BB70D4E9209CF98500743DA7 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
30-
BB70D4EF209CF9C500743DA7 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; };
30+
BBAAAE9820A210F500FA85D0 /* GoogleService-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "GoogleService-Info.plist"; sourceTree = ""; };
3131
/* End PBXFileReference section */
3232

3333
/* Begin PBXFrameworksBuildPhase section */
@@ -84,7 +84,7 @@
8484
BB70D4DF209CF98500743DA7 /* ViewController.swift */,
8585
BB70D4F1209D000A00743DA7 /* Supporting Files */,
8686
BB70D4E9209CF98500743DA7 /* Info.plist */,
87-
BB70D4EF209CF9C500743DA7 /* GoogleService-Info.plist */,
87+
BBAAAE9820A210F500FA85D0 /* GoogleService-Info.plist */,
8888
);
8989
path = "text-recognition";
9090
sourceTree = "";
@@ -162,7 +162,7 @@
162162
buildActionMask = 2147483647;
163163
files = (
164164
BB70D4E8209CF98500743DA7 /* LaunchScreen.storyboard in Resources */,
165-
BB70D4F0209CF9C500743DA7 /* GoogleService-Info.plist in Resources */,
165+
BBAAAE9920A210F500FA85D0 /* GoogleService-Info.plist in Resources */,
166166
BB70D4E5209CF98500743DA7 /* Assets.xcassets in Resources */,
167167
BB70D4E3209CF98500743DA7 /* Main.storyboard in Resources */,
168168
);

mlkit-ios/text-recognition/final/text-recognition/text-recognition/GoogleService-Info.plist

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -7,21 +7,21 @@
77
<key>AD_UNIT_ID_FOR_INTERSTITIAL_TESTkey>
88
<string>ca-app-pub-3940256099942544/4411468910string>
99
<key>CLIENT_IDkey>
10-
<string>959916140374-m77p0u9uo2tq8a8p70mng326ijrluhb6.apps.googleusercontent.comstring>
10+
<string>83487835625-ug0l17p5f6huffrd0qttf9dtotd91gkk.apps.googleusercontent.comstring>
1111
<key>REVERSED_CLIENT_IDkey>
12-
<string>com.googleusercontent.apps.959916140374-m77p0u9uo2tq8a8p70mng326ijrluhb6string>
12+
<string>com.googleusercontent.apps.83487835625-ug0l17p5f6huffrd0qttf9dtotd91gkkstring>
1313
<key>API_KEYkey>
14-
<string>AIzaSyAOJFq8_p89x7kx2EeL7EUd3pB2SiscXJkstring>
14+
<string>AIzaSyBlRcXZE9wvqpV0jxnVd7MbdAES31TlV7sstring>
1515
<key>GCM_SENDER_IDkey>
16-
<string>959916140374string>
16+
<string>83487835625string>
1717
<key>PLIST_VERSIONkey>
1818
<string>1string>
1919
<key>BUNDLE_IDkey>
2020
<string>com.google.firebase.codelab.mlkitstring>
2121
<key>PROJECT_IDkey>
22-
<string>ml-kit-ios-codelabstring>
22+
<string>ml-kit-daystring>
2323
<key>STORAGE_BUCKETkey>
24-
<string>ml-kit-ios-codelab.appspot.comstring>
24+
<string>ml-kit-day.appspot.comstring>
2525
<key>IS_ADS_ENABLEDkey>
2626
<true>true>
2727
<key>IS_ANALYTICS_ENABLEDkey>
@@ -33,8 +33,8 @@
3333
<key>IS_SIGNIN_ENABLEDkey>
3434
<true>true>
3535
<key>GOOGLE_APP_IDkey>
36-
<string>1:959916140374:ios:693c00b96c712e8cstring>
36+
<string>1:83487835625:ios:693c00b96c712e8cstring>
3737
<key>DATABASE_URLkey>
38-
<string>https://ml-kit-ios-codelab.firebaseio.comstring>
38+
<string>https://ml-kit-day.firebaseio.comstring>
3939
dict>
4040
plist>

mlkit-ios/text-recognition/final/text-recognition/text-recognition/ViewController.swift

Lines changed: 37 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ class ViewController: UIViewController, UIPickerViewDelegate, UIPickerViewDataSo
3535
let images = [
3636
ImageDisplay(file: "do-not-feed-birds", name: "Image 1"),
3737
ImageDisplay(file: "walk-on-grass", name: "Image 2"),
38-
]
38+
]
3939

4040
override func viewDidLoad() {
4141
super.viewDidLoad()
@@ -57,8 +57,6 @@ class ViewController: UIViewController, UIPickerViewDelegate, UIPickerViewDataSo
5757
runCloudTextRecognition(with: imageView.image!)
5858
}
5959

60-
61-
6260
// MARK: Text Recognition
6361

6462
func runTextRecognition(with image: UIImage) {
@@ -71,57 +69,60 @@ class ViewController: UIViewController, UIPickerViewDelegate, UIPickerViewDataSo
7169
func runCloudTextRecognition(with image: UIImage) {
7270
let visionImage = VisionImage(image: image)
7371
cloudTextDetector.detect(in: visionImage) { features, error in
72+
if let error = error {
73+
print("Received error: \(error)")
74+
return
75+
}
76+
7477
self.processCloudResult(from: features, error: error)
7578
}
7679
}
80+
7781

7882
// MARK: Image Drawing
7983

8084
func processResult(from text: [VisionText]?, error: Error?) {
8185
removeFrames()
82-
if let features = text, let image = imageView.image {
83-
for text in features {
84-
if let block = text as? VisionTextBlock {
85-
for line in block.lines {
86-
for element in line.elements {
87-
self.addFrameView(
88-
featureFrame: element.frame,
89-
imageSize: image.size,
90-
viewFrame: self.imageView.frame,
91-
text: element.text
92-
)
93-
}
86+
guard let features = text, let image = imageView.image else {
87+
return
88+
}
89+
for text in features {
90+
if let block = text as? VisionTextBlock {
91+
for line in block.lines {
92+
for element in line.elements {
93+
self.addFrameView(
94+
featureFrame: element.frame,
95+
imageSize: image.size,
96+
viewFrame: self.imageView.frame,
97+
text: element.text
98+
)
9499
}
95100
}
96101
}
97102
}
98103
}
104+
99105

100106
func processCloudResult(from text: VisionCloudText?, error: Error?) {
101107
removeFrames()
102-
if let features = text, let image = imageView.image, let pages = features.pages {
103-
for page in pages {
104-
if let blocks = page.blocks {
105-
for block in blocks {
106-
if let paragraphs = block.paragraphs {
107-
for paragraph in paragraphs {
108-
if let words = paragraph.words {
109-
for word in words {
110-
self.addFrameView(
111-
featureFrame: word.frame,
112-
imageSize: image.size,
113-
viewFrame: self.imageView.frame
114-
)
115-
}
116-
}
117-
}
118-
}
108+
guard let features = text, let image = imageView.image, let pages = features.pages else {
109+
return
110+
}
111+
for page in pages {
112+
for block in page.blocks ?? [] {
113+
for paragraph in block.paragraphs ?? [] {
114+
for word in paragraph.words ?? [] {
115+
self.addFrameView(
116+
featureFrame: word.frame,
117+
imageSize: image.size,
118+
viewFrame: self.imageView.frame
119+
)
119120
}
120121
}
121-
122122
}
123123
}
124124
}
125+
125126

126127
/// Converts a feature frame to a frame UIView that is displayed over the image.
127128
///
@@ -184,10 +185,10 @@ class ViewController: UIViewController, UIPickerViewDelegate, UIPickerViewDataSo
184185
textLayer.string = text
185186
textLayer.fontSize = 12.0
186187
textLayer.foregroundColor = Constants.lineColor
187-
let center = CGPoint(x: rect.midX, y: rect.midX)
188+
let center = CGPoint(x: rect.midX, y: rect.midY)
188189
textLayer.position = center
189-
textLayer.isHidden = false
190-
textLayer.alignmentMode = kCAAlignmentLeft
190+
textLayer.frame = rect
191+
textLayer.alignmentMode = kCAAlignmentCenter
191192
textLayer.contentsScale = UIScreen.main.scale
192193
frameSublayer.addSublayer(textLayer)
193194
}

0 commit comments

Comments
 (0)