Skip to content

Instantly share code, notes, and snippets.

_serialQueue = dispatch_queue_create(BLE_MANAGER_DISPATCH_QUEUE, DISPATCH_QUEUE_SERIAL);
NSDictionary *options = [@{CBPeripheralManagerOptionShowPowerAlertKey:@YES} mutableCopy];
_peripheralManager = [[CBPeripheralManager alloc] initWithDelegate:self queue:_serialQueue options:options];
@SunXiaoShan
SunXiaoShan / uploadGoogleDrive.sh
Last active June 22, 2018 10:08
GoogleDriveScript
# Step 1 : setup your root folder-id of Google Drive
ROOT_FOLDER_ID=""
if [ -z "$ROOT_FOLDER_ID" -a "$ROOT_FOLDER_ID"=="" ]; then
echo "error - ROOT_FOLDER_ID is empty"
exit
fi
# Step 2 : check backup folder exist
BACKUP=./backup/
if [ ! -d "$BACKUP" ]; then
- (void)viewDidLoad {
[super viewDidLoad];
self.faceDetectionComponent = [[FaceDetectionComponent alloc] initWithPreviewView:self.previewView];
[self.faceDetectionComponent setupAVCapture];
[self.faceDetectionComponent setBorderImage:[UIImage imageNamed:@"border"]];
}
- (void)drawFaces:(NSArray *)features
forVideoBox:(CGRect)clearAperture
orientation:(UIDeviceOrientation)orientation {
NSArray *sublayers = [NSArray arrayWithArray:[self.previewLayer sublayers]];
NSInteger sublayersCount = [sublayers count], currentSublayer = 0;
NSInteger featuresCount = [features count], currentFeature = 0;
[CATransaction begin];
[CATransaction setValue:(id)kCFBooleanTrue forKey:kCATransactionDisableActions];
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection {
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CFDictionaryRef attachments = CMCopyDictionaryOfAttachments(kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate);
CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer
options:(__bridge NSDictionary *)attachments];
if (attachments) {
CFRelease(attachments);
}
- (void)setupAVCapture {
NSError *error = nil;
// Select device
AVCaptureSession *session = [[AVCaptureSession alloc] init];
if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) {
[session setSessionPreset:AVCaptureSessionPreset640x480];
} else {
[session setSessionPreset:AVCaptureSessionPresetPhoto];
}
@SunXiaoShan
SunXiaoShan / SetupFaceDetectingData.m
Created March 22, 2018 03:01
SetupFaceDetectingData
- (void)setupData {
NSDictionary *detectorOptions = [[NSDictionary alloc] initWithObjectsAndKeys:
CIDetectorAccuracyLow, CIDetectorAccuracy,
nil];
self._faceDetector = [CIDetector detectorOfType:CIDetectorTypeFace
context:nil
options:detectorOptions];
}
@SunXiaoShan
SunXiaoShan / gpx_data_sample.gpx
Created February 13, 2018 07:29
gpx data sample
<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
<gpx>
<metadata>
<link href="http://www.garmin.com">
<text>Garmin International</text>
</link>
<time>2018-02-13T00:33:59Z</time>
<extensions>
<vs:srcid>1479365569</vs:srcid>
</extensions>
// Tells the delegate when the recognition of all requested utterances is finished.
// @see https://developer.apple.com/reference/speech/sfspeechrecognitiontaskdelegate/1649215-speechrecognitiontask
open func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didFinishSuccessfully successfully: Bool) {
resetSRMethod()
}
func resetSRMethod() {
stopNoAudioDurationTimer()
stopTimer()
InterruptEvent()
open func speechRecognitionTask(_ task: SFSpeechRecognitionTask, didHypothesizeTranscription transcription: SFTranscription) {
self.recognizedText = transcription.formattedString
// Start judgment of silent time
self.stopNoAudioDurationTimer()
self.startNoAudioDurationTimer()
}
func startNoAudioDurationTimer() {
self.stopTimer()