Skip to content

Instantly share code, notes, and snippets.

View jherico's full-sized avatar
🖖
Peace and long life

Tano🏳️‍⚧️ jherico

🖖
Peace and long life
View GitHub Profile
class ComboBoxPreference : public EditPreference {
Q_OBJECT
Q_PROPERTY(QStringList items READ getItems CONSTANT)
public:
ComboBoxPreference(const QString& category, const QString& name, Getter getter, Setter setter)
: EditPreference(category, name, getter, setter) { }
Type getType() { return ComboBox; }
const QStringList& getItems() { return _browseLabel; }
#define GLEW_STATIC
#include <GL/glew.h>
#define OVR_OS_WIN32
#include <OVR_CAPI_GL.h>
#include <SDL.h>
#include <iostream>
#undef main
int _tmain() {
bdavis@MONOLITH8 ~/git
$ cd hifi
bdavis@MONOLITH8 ~/git/hifi (hmd_mono)
$ mkdir build2
bdavis@MONOLITH8 ~/git/hifi (hmd_mono)
$ cd build2/
bdavis@MONOLITH8 ~/git/hifi/build2 (hmd_mono)
vk::ImageViewCreateInfo colorAttachmentView;
colorAttachmentView.format = colorFormat;
colorAttachmentView.subresourceRange.aspectMask = vk::ImageAspectFlagBits::eColor;
colorAttachmentView.subresourceRange.levelCount = 1;
colorAttachmentView.subresourceRange.layerCount = 1;
colorAttachmentView.viewType = vk::ImageViewType::e2D;
#version 450
struct LightingModel
{
vec4 _UnlitEmissiveLightmapBackground;
vec4 _ScatteringDiffuseSpecularAlbedo;
vec4 _AmbientDirectionalPointSpot;
vec4 _ShowContourObscuranceWireframe;
vec4 _Haze_spareyzw;
};
{
"types" : {
"LightingModel" : [
{
"name" : "_UnlitEmissiveLightmapBackground",
"type" : "vec4"
},
{
"name" : "_ScatteringDiffuseSpecularAlbedo",
"type" : "vec4"
function(deviceName) {
navigator.mediaDevices.getUserMedia({ audio: true, video: false }).then(function(mediaStream) {
navigator.mediaDevices.enumerateDevices().then(function(devices) {
devices.forEach(function(device) {
if (device.kind == "audiooutput") {
if (device.label == deviceName){
console.log("Changing HTML audio output to device " + device.label);
var deviceId = device.deviceId;
var videos = document.getElementsByTagName("video");
for (var i = 0; i < videos.length; i++){
package org.saintandreas.audiotest;
import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.media.MediaRecorder;
import android.opengl.GLES32;
import android.opengl.GLSurfaceView;
import android.os.Bundle;
import android.support.annotation.NonNull;
case Accessor::Type::eSCALAR:
switch (componentType) {
case Accessor::ComponentType::eBYTE:
return normalized ? vk::Format::eR8Snorm : vk::Format::eR8Sint;
case Accessor::ComponentType::eUNSIGNED_BYTE:
return normalized ? vk::Format::eR8Unorm : vk::Format::eR8Uint;
case Accessor::ComponentType::eSHORT:
return normalized ? vk::Format::eR16Snorm : vk::Format::eR16Sint;
case Accessor::ComponentType::eUNSIGNED_SHORT:
return normalized ? vk::Format::eR16Unorm : vk::Format::eR16Uint;
case Accessor::Type::eSCALAR:
switch (componentType) {
case Accessor::ComponentType::eBYTE:
return normalized ? vk::Format::eR8Snorm : vk::Format::eR8Sint;
case Accessor::ComponentType::eUNSIGNED_BYTE:
return normalized ? vk::Format::eR8Unorm : vk::Format::eR8Uint;
case Accessor::ComponentType::eSHORT:
return normalized ? vk::Format::eR16Snorm : vk::Format::eR16Sint;
case Accessor::ComponentType::eUNSIGNED_SHORT:
return normalized ? vk::Format::eR16Unorm : vk::Format::eR16Uint;