Skip to content

Instantly share code, notes, and snippets.

View treyhuffine's full-sized avatar

Trey Huffine treyhuffine

View GitHub Profile
import gql from 'graphql-tag';
export const QUERY_LAUNCH_PROFILE = gql`
query LaunchProfile($id: String!) {
launch(id: $id) {
flight_number
mission_name
launch_year
launch_success
details
import gql from 'graphql-tag';
export const QUERY_LAUNCH_LIST = gql`
query LaunchList {
launches {
flight_number
mission_name
launch_year
}
}
overwrite: true
schema: 'https://spacexdata.herokuapp.com/graphql'
documents: './src/components/**/*.ts'
generates:
src/generated/graphql.tsx:
plugins:
- 'typescript'
- 'typescript-operations'
- 'typescript-react-apollo'
config:
interface FunctionComponent<P = {}> {
(props: P & { children?: ReactNode }, context?: any): ReactElement | null;
propTypes?: WeakValidationMap<P>;
contextTypes?: ValidationMap<any>;
defaultProps?: Partial<P>;
displayName?: string;
}
vector<Point3f> objectPointsForReprojection {
objectPoints[2], // tip of nose
objectPoints[2] + Point3f(0,0,15), // nose and Z-axis
objectPoints[2] + Point3f(0,15,0), // nose and Y-axis
objectPoints[2] + Point3f(15,0,0) // nose and X-axis
};
//...
vector<Point2f> projectionOutput(objectPointsForReprojection.size());
projectPoints(objectPointsForReprojection, rvec, tvec, K, Mat(), projectionOutput);
arrowedLine(out, projectionOutput[0], projectionOutput[1], Scalar(255,255,0));
vector<Point3f> objectPoints {
{8.27412, 1.33849, 10.63490}, //left eye corner
{-8.27412, 1.33849, 10.63490}, //right eye corner
{0, -4.47894, 17.73010}, //nose tip
{-4.61960, -10.14360, 12.27940}, //right mouth corner
{4.61960, -10.14360, 12.27940}, //left mouth corner
};
vector<int> landmarksIDsFor3DPoints {45, 36, 30, 48, 54}; // 0-index
// ...
vector<Point2f> points2d;
vector<Rect> faces;
faceDetector(img, faces, face_cascade);
// Check if faces detected or not
if (faces.size() != 0) {
// We assume a single face so we look at the first only
cv::rectangle(img, faces[0], Scalar(255, 0, 0), 2);
vector<vector<Point2f> > shapes;
if (facemark->fit(img, faces, shapes)) {
#include <opencv2/face.hpp>
using namespace cv::face;
// ...
const string facemark_filename = "data/lbfmodel.yaml";
Ptr<Facemark> facemark = createFacemarkLBF();
facemark->loadModel(facemark_filename);
cout << "Loaded facemark LBF model" << endl;
const string cascade_name = "$OPENCV_ROOT/data/haarcascades/haarcascade_frontalface_default.xml";
CascadeClassifier face_cascade;
if (not face_cascade.load(cascade_name)) {
cerr << "Cannot load cascade classifier from file: " << cascade_name << endl;
return -1;
}
// ... obtain an image in img
vector<Rect> faces;
faceDetector(img, faces, face_cascade);
// Check if any faces were detected or not
void faceDetector(const Mat& image,
std::vector<Rect> &faces,
CascadeClassifier &face_cascade
) {
Mat gray;
// The cascade classifier works best on grayscale images
if (image.channels() > 1) {
cvtColor(image, gray, COLOR_BGR2GRAY);
} else {
gray = image.clone();