Skip to content

Commit 74eef85

Browse files
committed
add SPM support
1 parent 136c838 commit 74eef85

18 files changed

+1525
-0
lines changed

Package.swift

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
// swift-tools-version:5.2
2+
// The swift-tools-version declares the minimum version of Swift required to build this package.
3+
4+
import PackageDescription
5+
6+
let package = Package(
7+
name: "DocumentScanner",
8+
platforms: [
9+
.iOS(.v11),
10+
],
11+
products: [
12+
// Products define the executables and libraries produced by a package, and make them visible to other packages.
13+
.library(
14+
name: "DocumentScanner",
15+
targets: ["DocumentScanner"]),
16+
],
17+
targets: [
18+
// Targets are the basic building blocks of a package. A target can define a module or a test suite.
19+
// Targets can depend on other targets in this package, and on products in packages which this package depends on.
20+
.target(
21+
name: "DocumentScanner",
22+
dependencies: []),
23+
.testTarget(
24+
name: "DocumentScannerTests",
25+
dependencies: ["DocumentScanner"]),
26+
]
27+
)

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
[![License](https://img.shields.io/cocoapods/l/DocumentScanner.svg?style=flat)](https://cocoapods.org/pods/DocumentScanner)
77
[![Platform](https://img.shields.io/cocoapods/p/DocumentScanner.svg?style=flat)](https://cocoapods.org/pods/DocumentScanner)
88
[![Carthage compatible](https://img.shields.io/badge/Carthage-compatible-4BC51D.svg?style=flat)](https://github.com/Carthage/Carthage)
9+
[![SPM compatible](https://img.shields.io/badge/SPM-compatible-4BC51D.svg?style=flat)](https://swift.org/package-manager)
910

1011
## Example
1112

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
import UIKit
2+
3+
extension CGRect {
4+
var center: CGPoint {
5+
return CGPoint(x: midX, y: midY)
6+
}
7+
}
Lines changed: 205 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,205 @@
1+
import UIKit
2+
import AVFoundation
3+
import Vision
4+
5+
open class Camera: NSObject {
6+
7+
private let capturePhotoOutput = AVCapturePhotoOutput()
8+
private let videoDataOutputQueue = DispatchQueue(
9+
label: "VideoDataOutputQueue",
10+
qos: .userInitiated,
11+
attributes: [],
12+
autoreleaseFrequency: .workItem)
13+
14+
private(set) var scannerView = ScannerView()
15+
private(set) var observationRect = ObservationRectangle()
16+
17+
private (set) var rectDetector = RectangleDetector()
18+
19+
var bufferSize: CGSize = .zero
20+
21+
var onPhotoCapture: ((UIImage) -> Void)?
22+
23+
lazy var videoDevice: AVCaptureDevice? = {
24+
let videoDevice = AVCaptureDevice.DiscoverySession(
25+
deviceTypes: [.builtInWideAngleCamera],
26+
mediaType: .video,
27+
position: .back).devices.first
28+
29+
do {
30+
try videoDevice?.lockForConfiguration()
31+
// let dimensions = CMVideoFormatDescriptionGetDimensions((videoDevice?.activeFormat.formatDescription)!)
32+
//bufferSize.width = CGFloat(dimensions.width)
33+
//bufferSize.height = CGFloat(dimensions.height)
34+
videoDevice?.unlockForConfiguration()
35+
} catch {
36+
print(error)
37+
}
38+
39+
return videoDevice
40+
}()
41+
42+
lazy var captureSession: AVCaptureSession = {
43+
let session = AVCaptureSession()
44+
session.sessionPreset = .photo
45+
guard
46+
let backCamera = videoDevice,
47+
let input = try? AVCaptureDeviceInput(device: backCamera)
48+
else { return session }
49+
50+
session.addInput(input)
51+
52+
return session
53+
}()
54+
55+
lazy var cameraLayer: AVCaptureVideoPreviewLayer = {
56+
let layer = AVCaptureVideoPreviewLayer(session: captureSession)
57+
layer.videoGravity = .resizeAspectFill
58+
59+
return layer
60+
}()
61+
62+
init(detector: RectangleDetector) {
63+
super.init()
64+
65+
self.rectDetector = detector
66+
67+
Utils.subscribeToDeviceOrientationNotifications(self, selector: #selector(deviceOrientationDidChange(_:)))
68+
}
69+
70+
func prepareForSession(prepared: (AVCaptureVideoPreviewLayer, ScannerView) -> Void) {
71+
scannerView.cameraView.layer.addSublayer(cameraLayer)
72+
73+
prepared(cameraLayer, scannerView)
74+
}
75+
76+
func configureAndStartSessiion() {
77+
78+
let videoOutput = AVCaptureVideoDataOutput()
79+
videoOutput.alwaysDiscardsLateVideoFrames = true
80+
videoOutput.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
81+
captureSession.addOutput(videoOutput)
82+
83+
captureSession.beginConfiguration()
84+
85+
capturePhotoOutput.isHighResolutionCaptureEnabled = true
86+
87+
if captureSession.canAddOutput(capturePhotoOutput) {
88+
captureSession.addOutput(capturePhotoOutput)
89+
}
90+
91+
captureSession.commitConfiguration()
92+
93+
// set initial video orientation
94+
updateConnectionOrientation()
95+
96+
startSession()
97+
98+
observeDetectorOutput()
99+
observeScannerViewActions()
100+
}
101+
102+
func startSession() {
103+
scannerView.captureButton.isEnabled = true
104+
captureSession.startRunning()
105+
}
106+
107+
func stopSession() {
108+
captureSession.stopRunning()
109+
}
110+
111+
@objc private func deviceOrientationDidChange(_ notification: Notification) {
112+
if let superView = scannerView.superview {
113+
scannerView.frame.size = superView.frame.size
114+
cameraLayer.frame.size = superView.frame.size
115+
}
116+
117+
// Change video orientation to always display video in correct orientation
118+
updateConnectionOrientation()
119+
}
120+
121+
private func updateConnectionOrientation() {
122+
guard let connection = cameraLayer.connection else { return }
123+
connection.videoOrientation = Utils.videoOrientationFromDeviceOrientation(
124+
videoOrientation: connection.videoOrientation)
125+
}
126+
127+
private func observeDetectorOutput() {
128+
rectDetector.onRectDetect = { [weak self]
129+
rect, newFrame in
130+
131+
guard let `self` = self else { return }
132+
133+
self.observationRect = rect
134+
135+
let flippedRect = rect.flipped
136+
137+
let topLeft = self.cameraLayer.layerPointConverted(fromCaptureDevicePoint: flippedRect.topLeft)
138+
let topRight = self.cameraLayer.layerPointConverted(fromCaptureDevicePoint: flippedRect.topRight)
139+
let bottomRight = self.cameraLayer.layerPointConverted(fromCaptureDevicePoint: flippedRect.bottomRight)
140+
let bottomLeft = self.cameraLayer.layerPointConverted(fromCaptureDevicePoint: flippedRect.bottomLeft)
141+
142+
self.scannerView.observationRect = ObservationRectangle(
143+
topLeft: topLeft,
144+
topRight: topRight,
145+
bottomRight: bottomRight,
146+
bottomLeft: bottomLeft)
147+
}
148+
}
149+
150+
private func observeScannerViewActions() {
151+
scannerView.onImageCapture = { [weak self] in
152+
153+
guard let `self` = self else { return }
154+
155+
self.capturePhoto()
156+
}
157+
}
158+
159+
private func capturePhoto() {
160+
let settings = AVCapturePhotoSettings()
161+
settings.flashMode = .auto
162+
settings.isHighResolutionPhotoEnabled = true
163+
settings.isAutoStillImageStabilizationEnabled = true
164+
165+
// let previewPixelType = settings.availablePreviewPhotoPixelFormatTypes.first!
166+
// let previewFormat = [
167+
// kCVPixelBufferPixelFormatTypeKey as String: previewPixelType,
168+
// kCVPixelBufferWidthKey as String: 160,
169+
// kCVPixelBufferHeightKey as String: 160
170+
// ]
171+
//
172+
// settings.previewPhotoFormat = previewFormat
173+
174+
capturePhotoOutput.capturePhoto(with: settings, delegate: self)
175+
}
176+
}
177+
178+
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
179+
extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate {
180+
open func captureOutput(
181+
_ output: AVCaptureOutput,
182+
didOutput sampleBuffer: CMSampleBuffer,
183+
from connection: AVCaptureConnection) {
184+
185+
// make sure the pixel buffer can be converted
186+
guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
187+
188+
rectDetector.detect(from: pixelBuffer)
189+
}
190+
}
191+
192+
// MARK: - AVCapturePhotoCaptureDelegate
193+
extension Camera: AVCapturePhotoCaptureDelegate {
194+
open func photoOutput(
195+
_ output: AVCapturePhotoOutput,
196+
didFinishProcessingPhoto photo: AVCapturePhoto,
197+
error: Error?) {
198+
199+
guard let data = photo.fileDataRepresentation() else { return }
200+
201+
guard let image = UIImage(data: data) else { return }
202+
203+
onPhotoCapture?(image)
204+
}
205+
}
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import UIKit
2+
3+
open class CornerView: UIView {
4+
5+
enum Position {
6+
case topLeft, topRight, bottomRight, bottomLeft
7+
}
8+
9+
let position: Position
10+
11+
init(position: Position) {
12+
self.position = position
13+
14+
super.init(frame: CGRect(x: 0, y: 0, width: 25, height: 25))
15+
setup()
16+
}
17+
18+
required public init?(coder aDecoder: NSCoder) {
19+
fatalError("init(coder:) has not been implemented")
20+
}
21+
22+
private func setup() {
23+
layer.cornerRadius = self.bounds.width / 2
24+
layer.borderColor = UIColor.white.cgColor
25+
layer.borderWidth = 1.5
26+
}
27+
}

0 commit comments

Comments
 (0)