Skip to content

Instantly share code, notes, and snippets.

View acotilla91's full-sized avatar

Alejandro Cotilla acotilla91

View GitHub Profile
<component name="GIFDecoder" extends="Task">
<interface>
<!-- Callback listener -->
<field id="delegate" type="node"/>
<!-- Extracts the frames of the given GIF and stores them locally as individual GIF files.
- Parameter {String} gifUrl: The url of the GIF to decode. -->
<function name="decodeGIF"/>
</interface>
<apps>
<app id="31012" type="menu" version="1.9.28">FandangoNOW Movies & TV</app>
<app id="12" subtype="ndka" type="appl" version="4.2.81179030">Netflix</app>
<app id="13" subtype="ndka" type="appl" version="10.8.2019032615">Prime Video</app>
<app id="837" subtype="ndka" type="appl" version="1.0.80000284">YouTube</app>
<app id="46041" subtype="ndka" type="appl" version="5.13.154">Sling TV</app>
<app id="61322" subtype="rsga" type="appl" version="4.6.227">HBO NOW</app>
<app id="245916" subtype="rsga" type="appl" version="1.2.6">Pop Now</app>
<app id="73386" subtype="rsga" type="appl" version="2.11.76">MTV</app>
</apps>
import Foundation
func getStatusWithAnyCasting(jsonData: Data) -> Int {
guard
let json = try? JSONSerialization.jsonObject(with: jsonData, options: []),
let dict = json as? [String: Any],
let statusString = dict["status"] as? String,
!statusString.isEmpty,
let status = Int(statusString)
else {
import Foundation
func getStatusWithAnyCasting(jsonData: Data) -> Int {
guard
let json = try? JSONSerialization.jsonObject(with: jsonData, options: []),
let dict = json as? [String: Any],
let statusString = dict["status"] as? String,
!statusString.isEmpty,
let status = Int(statusString)
else {
func collectionView(_ collectionView: UICollectionView, didSelectItemAt indexPath: IndexPath) {
activityIndicator.startAnimating()
let person = persons[indexPath.item]
// Clear photos section
self.photos = []
collectionView.reloadSections([photosSection])
// Disable interactions while finding similar images
func photos(withFaceIds faceIds: [String]) -> [Photo] {
var filteredPhotos: [Photo] = []
let faceIdsSet = Set(faceIds)
for photo in photos {
let hasFaceIds = Set(photo.faceIds).intersection(faceIdsSet).isEmpty == false
if hasFaceIds {
filteredPhotos.append(photo)
}
}
lazy var allPhotosFaceIds: [String] = {
var allFaceIds: [String] = []
for photo in photos {
allFaceIds.append(contentsOf: photo.faceIds)
}
return allFaceIds
}()
// Create Persons
for (avatarData, avatarImage) in zip(avatarDatas, avatarImages) {
if let faceId = AzureFaceRecognition.shared.syncDetectFaceIds(imageData: avatarData).first {
let person = Person()
person.faceId = faceId
person.avatar = avatarImage
self.persons.append(person)
}
}
@acotilla91
acotilla91 / AzureFaceRecognition.swift
Created August 29, 2018 11:10
Microsoft Azure Face API integrated on iOS
import UIKit
let APIKey = "<API-KEY>" // Ocp-Apim-Subscription-Key
let Region = "eastus"
let FindSimilarsUrl = "https://\(Region).api.cognitive.microsoft.com/face/v1.0/findsimilars"
let DetectUrl = "https://\(Region).api.cognitive.microsoft.com/face/v1.0/detect?returnFaceId=true"
class AzureFaceRecognition: NSObject {
static let shared = AzureFaceRecognition()
function speechRecognizerDidReceiveTranscript(transcript as String)
m.label.text = transcript
m.infoLb.text = "Hold the OK button to start dictation, release it once you're done."
end function