This is the official swift SDK for the Skyle eye-tracker sold by the eyeV GmbH. The SDK is based on the Skyle gRPC protos and Apples Combine framework.
It works on >= macOS 10.15
Catalina and >= iPadOS 13
Important: Update to the latest firmware (>= v3.0) on your eye tracker before using this!
You need to install the following to get RNDIS support on macOS: hoRNDIS
FOLLOW THESE STEPS TO INSTALL ON BIG SUR
Some modules are deprecated including:
- MjpegStream -> use Video
- Profiles, Profile -> no replacement
- Reset -> not needed anymore
import SkyleKit
import SwiftUI
#if os(iOS)
import UIKit
#elseif os(macOS)
import AppKit
#endif
/// Converts a RawImages object into a CGImage.
/// Assumes a one-channel (grayscale) 8-bit image.
func createCGImage(from rawImage: Skyle_RawImage) -> CGImage? {
let width = Int(rawImage.width)
let height = Int(rawImage.height)
let bitsPerComponent = 8
let bitsPerPixel = 8
let bytesPerRow = width // since 1 byte per pixel
let colorSpace = CGColorSpaceCreateDeviceGray()
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue)
guard let provider = CGDataProvider(data: rawImage.data as CFData) else {
return nil
}
return CGImage(width: width,
height: height,
bitsPerComponent: bitsPerComponent,
bitsPerPixel: bitsPerPixel,
bytesPerRow: bytesPerRow,
space: colorSpace,
bitmapInfo: bitmapInfo,
provider: provider,
decode: nil,
shouldInterpolate: true,
intent: .defaultIntent)
}
#if os(iOS)
func getScreenMetrics() -> (size: CGSize, resolution: CGSize)? {
let screen = UIScreen.main
let scale = screen.scale
let sizeInPoints = screen.bounds.size
// Resolution in pixels:
let pixelWidth = sizeInPoints.width * scale
let pixelHeight = sizeInPoints.height * scale
let resolution = CGSize(width: pixelWidth, height: pixelHeight)
// You need to know the device’s PPI. For example, many iPhones have a PPI of 326.
// Adjust this value according to your device.
let ppi: CGFloat = 326.0
// Calculate physical size in inches:
let widthInInches = pixelWidth / ppi
let heightInInches = pixelHeight / ppi
// Convert inches to millimeters (1 inch = 25.4 mm):
let physicalWidthMM = widthInInches * 25.4
let physicalHeightMM = heightInInches * 25.4
let size = CGSize(width: physicalWidthMM, height: physicalHeightMM)
return (size, resolution)
}
#elseif os(macOS)
func getScreenMetrics() -> (size: CGSize, resolution: CGSize)? {
guard let screen = NSScreen.main else { return nil }
let frameInPoints = screen.frame
let scale = screen.backingScaleFactor
let pixelWidth = frameInPoints.size.width * scale
let pixelHeight = frameInPoints.size.height * scale
let resolution = CGSize(width: pixelWidth, height: pixelHeight)
// Attempt to get DPI from the screen’s device description
let dpi: CGFloat
if let dpiValue = screen.deviceDescription[NSDeviceDescriptionKey.resolution] as? NSSize {
// Use one dimension (usually the same horizontally and vertically)
dpi = dpiValue.width
} else {
// Default DPI if not available
dpi = 72.0
}
// Calculate physical size in inches:
let widthInInches = pixelWidth / dpi
let heightInInches = pixelHeight / dpi
// Convert inches to millimeters:
let physicalWidthMM = widthInInches * 25.4
let physicalHeightMM = heightInInches * 25.4
let size = CGSize(width: physicalWidthMM, height: physicalHeightMM)
return (size, resolution)
}
#endif
struct ContentView: View {
@ObservedObject var et: ET // Connectivity stream
@ObservedObject var gaze: ET.Gaze // Gaze stream
@ObservedObject var version: ET.Version
@ObservedObject var positioning: ET.Positioning // Positioning, Features and Distance stream
@ObservedObject var control: ET.Control
@ObservedObject var video: ET.Video
@State private var windowOrigin: CGPoint = .zero
@State private var leftDistanceCM: Double = 0
@State private var rightDistanceCM: Double = 0
init() {
let et = ET()
self.et = et
self.gaze = et.gaze // please use makeGaze() instead
self.version = et.version // please use makeVersion() instead
self.positioning = et.positioning // please use makePositioning() instead
self.control = et.control // please use makeControl() instead
self.video = et.video
}
var body: some View {
GeometryReader { geometry in
VStack {
Text("Welcome to Skyle").font(.system(.title)).padding()
Text("Frimware version: \(self.version.firmware)")
Text("Current gaze point: x: \(Int(self.gaze.point.x.rounded())) y: \(Int(self.gaze.point.y.rounded()))")
Text("\(self.et.hardConnectivity ? "Ethernet Connected" : "Ethernet disconnected")")
Text("\(self.et.connectivity)")
HStack {
Text("\(leftDistanceCM, specifier: "%.1f") cm")
Text("\(rightDistanceCM, specifier: "%.1f") cm")
}
.onReceive(
self.positioning.$face
.collect(.byTimeOrCount(RunLoop.main, .milliseconds(1000), 60))
.compactMap { faces -> (Double, Double)? in
// Convert values (in mm) to an array, filtering out nils if necessary.
let leftValues = faces.compactMap { $0?.eyes.left.distance }
let rightValues = faces.compactMap { $0?.eyes.right.distance }
guard !leftValues.isEmpty, !rightValues.isEmpty else {
return nil
}
let avgLeft = leftValues.reduce(0, +) / Double(leftValues.count)
let avgRight = rightValues.reduce(0, +) / Double(rightValues.count)
return (avgLeft, avgRight)
}
) { (avgLeft, avgRight) in
leftDistanceCM = avgLeft / 10
rightDistanceCM = avgRight / 10
}
HStack {
Circle()
.fill(self.positioning.face?.eyes.left.isValid ?? false ? Color.green : Color.red)
.frame(width: 20, height: 20)
.padding(20)
.animation(.easeInOut, value: self.positioning.face?.eyes.left.isValid)
Circle()
.fill(self.positioning.face?.eyes.right.isValid ?? false ? Color.green : Color.red)
.frame(width: 20, height: 20)
.padding(20)
.animation(.easeInOut, value: self.positioning.face?.eyes.right.isValid)
}
GeometryReader { proxy in
ZStack {
if let cgImage = createCGImage(from: self.video.image) {
#if os(iOS)
Image(uiImage: UIImage(cgImage: cgImage))
.resizable()
.scaledToFit()
#elseif os(macOS)
Image(nsImage: NSImage(cgImage: cgImage, size: .zero))
.resizable()
.scaledToFit()
#endif
} else {
Rectangle()
.fill(Color.gray)
}
if self.positioning.isPresent {
if self.positioning.position.left != Point.zero {
Circle().fill(Color.blue)
.frame(width: 10, height: 10)
.position(x: self.calculateX(self.positioning.position.left.x, proxy: proxy), y: self.calculateY(self.positioning.position.left.y, proxy: proxy))
}
if self.positioning.position.right != Point.zero {
Circle().fill(Color.blue)
.frame(width: 10, height: 10)
.position(x: self.calculateX(self.positioning.position.right.x, proxy: proxy), y: self.calculateY(self.positioning.position.right.y, proxy: proxy))
}
}
Circle()
.fill(Color.red)
.frame(width: 20, height: 20)
// Convert absolute screen coordinates to local coordinates:
// localPoint = absolutePoint - windowOrigin
.position(x: self.gaze.point.x - windowOrigin.x, // subtract windowOrigin just for macos
y: self.gaze.point.y - windowOrigin.y)
.animation(.easeInOut, value: self.gaze.point)
}
}
.frame(width: 300 / self.factor, height: 300)
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.onReceive(self.et.$connectivity) { connected in
if connected == .ready {
self.gaze.start()
self.positioning.start()
self.video.start()
}
}
.onReceive(self.et.$hardConnectivity) { hardConnected in }
.onAppear {
// Get the view's frame in global coordinates.
let frame = geometry.frame(in: .global)
windowOrigin = frame.origin
if let metrics = getScreenMetrics() {
print("Pixel Resolution: \(metrics.resolution)")
print("Physical Size (mm): \(metrics.size)")
et.control.setDisplayDimensions(metrics)
}
}
.onChange(of: geometry.frame(in: .global)) { newFrame in
// Update windowOrigin if the window moves or resizes
windowOrigin = newFrame.origin
}
}
}
private var factor: CGFloat {
return CGFloat(et.resolution().height / et.resolution().width)
}
private func calculateX(_ value: Double, proxy: GeometryProxy) -> CGFloat {
let width = Double(proxy.size.height / self.factor)
let v = CGFloat(value / et.resolution().width * width)
return v + (proxy.size.width - proxy.size.height / self.factor) / 2
}
private func calculateY(_ value: Double, proxy: GeometryProxy) -> CGFloat {
let height = Double(proxy.size.height)
return CGFloat(value / et.resolution().height * height)
}
}
#Preview {
ContentView()
}
You can also include SkyleKit with SPM:
Just add the following to your dependencies
in your Package.swift file.
.package(url: "https://github.com/eyev-de/SkyleKit.git", from: "1.0.0"),
Alternatively, you can add it to your project in Xcode. Go to File -> Swift Packages -> Add Package Dependency...
(c) 2020 eyeV GmbH, written by Konstantin Wachendorff
Distributed under the MIT license. See LICENSE for more information.
Also see our other repos here
If you bought the Skyle eye tracker and need support, please contact support@eyev.de.