Comments (7)
Sure thing.
#import "MPPIrisTracker.h"
#import "mediapipe/objc/MPPGraph.h"
#import "mediapipe/objc/MPPCameraInputSource.h"
#import "mediapipe/objc/MPPLayerRenderer.h"
#include "mediapipe/framework/formats/landmark.pb.h"
static NSString* const kGraphName = @"iris_tracking_gpu";
static const char* kInputStream = "input_video";
static const char* kOutputStream = "output_video";
static const char* kLandmarksOutputStream = "iris_landmarks";
static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue";
/// Input side packet for focal length parameter.
std::map<std::string, mediapipe::Packet> _input_side_packets;
mediapipe::Packet _focal_length_side_packet;
@interface MPPIrisTracker() <MPPGraphDelegate>
@property(nonatomic) MPPGraph* mediapipeGraph;
@end
@implementation MPPIrisTracker { }
#pragma mark - Cleanup methods
- (void)dealloc {
self.mediapipeGraph.delegate = nil;
[self.mediapipeGraph cancel];
// Ignore errors since we're cleaning up.
[self.mediapipeGraph closeAllInputStreamsWithError:nil];
[self.mediapipeGraph waitUntilDoneWithError:nil];
}
#pragma mark - MediaPipe graph methods
// https://google.github.io/mediapipe/getting_started/hello_world_ios.html#using-a-mediapipe-graph-in-ios
+ (MPPGraph*)loadGraphFromResource:(NSString*)resource {
// Load the graph config resource.
NSError* configLoadError = nil;
NSBundle* bundle = [NSBundle bundleForClass:[self class]];
if (!resource || resource.length == 0) {
return nil;
}
NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"];
NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError];
if (!data) {
NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError);
return nil;
}
// Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object.
mediapipe::CalculatorGraphConfig config;
config.ParseFromArray(data.bytes, data.length);
// Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object.
MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config];
_focal_length_side_packet =
mediapipe::MakePacket<std::unique_ptr<float>>(absl::make_unique<float>(0.0));
_input_side_packets = {
{"focal_length_pixel", _focal_length_side_packet},
};
[newGraph addSidePackets:_input_side_packets];
[newGraph addFrameOutputStream:kLandmarksOutputStream outputPacketType:MPPPacketTypeRaw];
[newGraph addFrameOutputStream:kOutputStream outputPacketType:MPPPacketTypePixelBuffer];
return newGraph;
}
- (instancetype)init
{
self = [super init];
if (self) {
self.mediapipeGraph = [[self class] loadGraphFromResource:kGraphName];
self.mediapipeGraph.delegate = self;
self.mediapipeGraph.maxFramesInFlight = 2;
}
return self;
}
- (void)startGraph {
// Start running self.mediapipeGraph.
NSError* error;
if (![self.mediapipeGraph startWithError:&error]) {
NSLog(@"Failed to start graph: %@", error);
}
}
#pragma mark - MPPInputSourceDelegate methods
- (void)processVideoFrame:(CVPixelBufferRef)imageBuffer
timestamp:(CMTime)timestamp {
mediapipe::Timestamp graphTimestamp(static_cast<mediapipe::TimestampBaseType>(
mediapipe::Timestamp::kTimestampUnitsPerSecond * CMTimeGetSeconds(timestamp)));
[self.mediapipeGraph sendPixelBuffer:imageBuffer
intoStream:kInputStream
packetType:MPPPacketTypePixelBuffer
timestamp:graphTimestamp];
}
#pragma mark - MPPGraphDelegate methods
// Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer
fromStream:(const std::string&)streamName {
if (streamName == kOutputStream) {
[_delegate irisTracker: self didOutputPixelBuffer: pixelBuffer];
}
}
// Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread.
- (void)mediapipeGraph:(MPPGraph*)graph
didOutputPacket:(const ::mediapipe::Packet&)packet
fromStream:(const std::string&)streamName {
if (streamName == kLandmarksOutputStream) {
if (packet.IsEmpty()) {
NSLog(@"[TS:%lld] No iris landmarks", packet.Timestamp().Value());
return;
}
const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>();
NSLog(@"[TS:%lld] Number of landmarks on iris: %d", packet.Timestamp().Value(),
landmarks.landmark_size());
for (int i = 0; i < landmarks.landmark_size(); ++i) {
NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(),
landmarks.landmark(i).y(), landmarks.landmark(i).z());
}
}
}
@end
from mediapipe-prebuilt.
@kiranscaria Implementation is still going, I'll add it to the repo once the pose estimation part is done. Thank you.
from mediapipe-prebuilt.
@P1xelfehler You can checkout the latest commit(05deb55).
+new file: src/ios/hand/BUILD
+new file: src/ios/hand/Info.plist
+new file: src/ios/hand/MPPBHand.h
+new file: src/ios/hand/MPPBHand.mm
+new file: src/ios/iris/BUILD
+new file: src/ios/iris/Info.plist
+new file: src/ios/iris/MPPBIris.h
+new file: src/ios/iris/MPPBIris.mm
from mediapipe-prebuilt.
@P1xelfehler You can checkout the latest commit(05deb55).
+new file: src/ios/hand/BUILD +new file: src/ios/hand/Info.plist +new file: src/ios/hand/MPPBHand.h +new file: src/ios/hand/MPPBHand.mm +new file: src/ios/iris/BUILD +new file: src/ios/iris/Info.plist +new file: src/ios/iris/MPPBIris.h +new file: src/ios/iris/MPPBIris.mm
Thank you!
from mediapipe-prebuilt.
Sure thing.
#import "MPPIrisTracker.h" #import "mediapipe/objc/MPPGraph.h" #import "mediapipe/objc/MPPCameraInputSource.h" #import "mediapipe/objc/MPPLayerRenderer.h" #include "mediapipe/framework/formats/landmark.pb.h" static NSString* const kGraphName = @"iris_tracking_gpu"; static const char* kInputStream = "input_video"; static const char* kOutputStream = "output_video"; static const char* kLandmarksOutputStream = "iris_landmarks"; static const char* kVideoQueueLabel = "com.google.mediapipe.example.videoQueue"; /// Input side packet for focal length parameter. std::map<std::string, mediapipe::Packet> _input_side_packets; mediapipe::Packet _focal_length_side_packet; @interface MPPIrisTracker() <MPPGraphDelegate> @property(nonatomic) MPPGraph* mediapipeGraph; @end @implementation MPPIrisTracker { } #pragma mark - Cleanup methods - (void)dealloc { self.mediapipeGraph.delegate = nil; [self.mediapipeGraph cancel]; // Ignore errors since we're cleaning up. [self.mediapipeGraph closeAllInputStreamsWithError:nil]; [self.mediapipeGraph waitUntilDoneWithError:nil]; } #pragma mark - MediaPipe graph methods // https://google.github.io/mediapipe/getting_started/hello_world_ios.html#using-a-mediapipe-graph-in-ios + (MPPGraph*)loadGraphFromResource:(NSString*)resource { // Load the graph config resource. NSError* configLoadError = nil; NSBundle* bundle = [NSBundle bundleForClass:[self class]]; if (!resource || resource.length == 0) { return nil; } NSURL* graphURL = [bundle URLForResource:resource withExtension:@"binarypb"]; NSData* data = [NSData dataWithContentsOfURL:graphURL options:0 error:&configLoadError]; if (!data) { NSLog(@"Failed to load MediaPipe graph config: %@", configLoadError); return nil; } // Parse the graph config resource into mediapipe::CalculatorGraphConfig proto object. mediapipe::CalculatorGraphConfig config; config.ParseFromArray(data.bytes, data.length); // Create MediaPipe graph with mediapipe::CalculatorGraphConfig proto object. MPPGraph* newGraph = [[MPPGraph alloc] initWithGraphConfig:config]; _focal_length_side_packet = mediapipe::MakePacket<std::unique_ptr<float>>(absl::make_unique<float>(0.0)); _input_side_packets = { {"focal_length_pixel", _focal_length_side_packet}, }; [newGraph addSidePackets:_input_side_packets]; [newGraph addFrameOutputStream:kLandmarksOutputStream outputPacketType:MPPPacketTypeRaw]; [newGraph addFrameOutputStream:kOutputStream outputPacketType:MPPPacketTypePixelBuffer]; return newGraph; } - (instancetype)init { self = [super init]; if (self) { self.mediapipeGraph = [[self class] loadGraphFromResource:kGraphName]; self.mediapipeGraph.delegate = self; self.mediapipeGraph.maxFramesInFlight = 2; } return self; } - (void)startGraph { // Start running self.mediapipeGraph. NSError* error; if (![self.mediapipeGraph startWithError:&error]) { NSLog(@"Failed to start graph: %@", error); } } #pragma mark - MPPInputSourceDelegate methods - (void)processVideoFrame:(CVPixelBufferRef)imageBuffer timestamp:(CMTime)timestamp { mediapipe::Timestamp graphTimestamp(static_cast<mediapipe::TimestampBaseType>( mediapipe::Timestamp::kTimestampUnitsPerSecond * CMTimeGetSeconds(timestamp))); [self.mediapipeGraph sendPixelBuffer:imageBuffer intoStream:kInputStream packetType:MPPPacketTypePixelBuffer timestamp:graphTimestamp]; } #pragma mark - MPPGraphDelegate methods // Receives CVPixelBufferRef from the MediaPipe graph. Invoked on a MediaPipe worker thread. - (void)mediapipeGraph:(MPPGraph*)graph didOutputPixelBuffer:(CVPixelBufferRef)pixelBuffer fromStream:(const std::string&)streamName { if (streamName == kOutputStream) { [_delegate irisTracker: self didOutputPixelBuffer: pixelBuffer]; } } // Receives a raw packet from the MediaPipe graph. Invoked on a MediaPipe worker thread. - (void)mediapipeGraph:(MPPGraph*)graph didOutputPacket:(const ::mediapipe::Packet&)packet fromStream:(const std::string&)streamName { if (streamName == kLandmarksOutputStream) { if (packet.IsEmpty()) { NSLog(@"[TS:%lld] No iris landmarks", packet.Timestamp().Value()); return; } const auto& landmarks = packet.Get<::mediapipe::NormalizedLandmarkList>(); NSLog(@"[TS:%lld] Number of landmarks on iris: %d", packet.Timestamp().Value(), landmarks.landmark_size()); for (int i = 0; i < landmarks.landmark_size(); ++i) { NSLog(@"\tLandmark[%d]: (%f, %f, %f)", i, landmarks.landmark(i).x(), landmarks.landmark(i).y(), landmarks.landmark(i).z()); } } } @end
Thanks
from mediapipe-prebuilt.
@kiranscaria Implementation is still going, I'll add it to the repo once the pose estimation part is done. Thank you.
Looking forward to it
from mediapipe-prebuilt.
Could you please upload it to this repo as well?
from mediapipe-prebuilt.
Related Issues (7)
Recommend Projects
-
React
A declarative, efficient, and flexible JavaScript library for building user interfaces.
-
Vue.js
🖖 Vue.js is a progressive, incrementally-adoptable JavaScript framework for building UI on the web.
-
Typescript
TypeScript is a superset of JavaScript that compiles to clean JavaScript output.
-
TensorFlow
An Open Source Machine Learning Framework for Everyone
-
Django
The Web framework for perfectionists with deadlines.
-
Laravel
A PHP framework for web artisans
-
D3
Bring data to life with SVG, Canvas and HTML. 📊📈🎉
-
Recommend Topics
-
javascript
JavaScript (JS) is a lightweight interpreted programming language with first-class functions.
-
web
Some thing interesting about web. New door for the world.
-
server
A server is a program made to process requests and deliver data to clients.
-
Machine learning
Machine learning is a way of modeling and interpreting data that allows a piece of software to respond intelligently.
-
Visualization
Some thing interesting about visualization, use data art
-
Game
Some thing interesting about game, make everyone happy.
Recommend Org
-
Facebook
We are working to build community through open source technology. NB: members must have two-factor auth.
-
Microsoft
Open source projects and samples from Microsoft.
-
Google
Google ❤️ Open Source for everyone.
-
Alibaba
Alibaba Open Source for everyone
-
D3
Data-Driven Documents codes.
-
Tencent
China tencent open source team.
from mediapipe-prebuilt.