- install npm packages
- update brunch-config.js
- rename web/static/css/app.css to web/static/css/app.scss
- update web/static/css/app.scss
| import UIKit | |
| import AVFoundation | |
| protocol FrameExtractorDelegate: class { | |
| func captured(image: UIImage) | |
| } | |
| class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate { | |
| private let position = AVCaptureDevicePosition.front |
| # Go to home directory | |
| cd ~ | |
| # You can change what anaconda version you want at | |
| # https://repo.continuum.io/archive/ | |
| wget https://repo.continuum.io/archive/Anaconda3-5.0.1-Linux-x86_64.sh | |
| bash Anaconda3-5.0.1-Linux-x86_64.sh -b -p ~/anaconda | |
| rm Anaconda3-5.0.1-Linux-x86_64.sh | |
| echo 'export PATH="~/anaconda/bin:$PATH"' >> ~/.bashrc |
First, I should be clear that this was done on a G2 AWS instance and I started with working nvidia support by following http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/using_cluster_computing.html
From that (or if you're feeling more bold), the thing to take is getting the right binary install package and running it. Look at http://www.nvidia.com/object/unix.html and get the package you want (at the time, I'm using 'Latest Long Lived Branch version: 361.45.11'), then run the file you get. For example, sudo sh ./NVIDIA-Linux-x86_64-361.45.11.run.
I do not have time to test on a clean instance so you may need a bit more setup that I've not mentioned and I make no guarantees anyway since I hardly know what I'm doing here :)
The ffmpeg_build.sh script was mostly copy/paste from the guide at https://trac.ffmpeg.org/wiki/CompilationGuide/Centos and with significant help from folks on FreeNode #ffmpeg (notably furq and JEEB though there were others).
The steps for adding OpenCL headers support was borrowed
| package main | |
| import ( | |
| "github.com/gin-gonic/contrib/sessions" | |
| "github.com/gin-gonic/gin" | |
| "log" | |
| "net/http" | |
| "strings" | |
| ) |
| import Cocoa | |
| // Support Foundation calls on String | |
| public extension String { public var ns: NSString {return self as NSString} } | |
| /// Custom Labeled Playground-Based Drag-and-Drop window | |
| public class DropView: NSTextField { | |
| // Default action handler | |
| public var handler: ([String]) -> Void = { paths in Swift.print(paths) } |
| //: Playground - noun: a place where people can play | |
| import UIKit | |
| import XCPlayground | |
| let containerView = UIView(frame: CGRect(x: 0.0, y: 0.0, width: 375.0, height: 667.0)) | |
| containerView.backgroundColor = UIColor.whiteColor() | |
| let stackView = UIStackView() |
| /** Adapted from http://leifdenby.svbtle.com/reactjs-and-browser-history-a-historyjs-mixin */ | |
| var HistoryJSMixin = { | |
| _historyjs_recoverState: function(state) { | |
| received_state_serialized = state.data; | |
| if (!$.isEmptyObject(received_state_serialized)) { | |
| if (this.deserializeState !== undefined) { | |
| received_state = this.deserializeState(received_state_serialized); | |
| } | |
| else { |
| func getUIImageForRGBAData(#width: UInt, #height: UInt, #data: NSData) -> UIImage? { | |
| let pixelData = data.bytes | |
| let bytesPerPixel:UInt = 4 | |
| let scanWidth = bytesPerPixel * width | |
| let provider = CGDataProviderCreateWithData(nil, pixelData, height * scanWidth, nil) | |
| let colorSpaceRef = CGColorSpaceCreateDeviceRGB() | |
| var bitmapInfo:CGBitmapInfo = .ByteOrderDefault; | |
| bitmapInfo |= CGBitmapInfo(CGImageAlphaInfo.Last.rawValue) |
| /** | |
| * Changes value to past tense. | |
| * Simple filter does not support irregular verbs such as eat-ate, fly-flew, etc. | |
| * http://jsfiddle.net/bryan_k/0xczme2r/ | |
| * | |
| * @param {String} value The value string. | |
| */ | |
| Vue.filter('past-tense', function(value) { | |
| // Slightly follows http://www.oxforddictionaries.com/us/words/verb-tenses-adding-ed-and-ing | |
| var vowels = ['a', 'e', 'i', 'o', 'u']; |