Skip to content

Instantly share code, notes, and snippets.

@agrancini-sc
Last active November 1, 2025 00:57
Show Gist options
  • Select an option

  • Save agrancini-sc/b9dfea7a0810b829e3e5f69c9aa9c22d to your computer and use it in GitHub Desktop.

Select an option

Save agrancini-sc/b9dfea7a0810b829e3e5f69c9aa9c22d to your computer and use it in GitHub Desktop.
Upload Assets to Supabase
import { RectangleButton } from 'SpectaclesUIKit.lspkg/Scripts/Components/Button/RectangleButton';
import { SnapCloudRequirements } from './Examples/SnapCloudRequirements';
import { createClient } from 'SupabaseClient.lspkg/supabase-snapcloud';
@component
export class UploadAssetToSupabase extends BaseScriptComponent {
// Supabase Configuration - Centralized via SnapCloudRequirements
@input
@hint("Reference to SnapCloudRequirements for centralized Supabase configuration")
public snapCloudRequirements: SnapCloudRequirements;
@input
@hint("Storage bucket name")
bucketName: string = "specs-bucket";
@input
@hint("Texture to upload (e.g., from Image component)")
textureToUpload: Texture;
@input
@hint("RenderMeshVisual containing the mesh to upload")
renderMeshVisual: RenderMeshVisual;
private client: any;
private isInitialized: boolean = false;
onAwake() {
print("=== UploadAssetToSupabase Starting ===");
print("Setting up delayed start...");
// Use DelayedCallbackEvent instead of OnStartEvent for better reliability
const delayedEvent = this.createEvent("DelayedCallbackEvent");
delayedEvent.bind(() => {
print("DelayedCallback triggered - starting upload");
this.startUpload();
});
(delayedEvent as DelayedCallbackEvent).reset(0.5); // Wait 0.5 seconds
print("Delayed start configured");
}
async startUpload() {
print("startUpload called");
try {
// Initialize Supabase client
print("Initializing Supabase...");
await this.initializeSupabase();
print("Supabase initialization complete");
// Check internet connectivity
if (!global.deviceInfoSystem.isInternetAvailable()) {
print("WARNING: No internet connection available");
return;
}
print("✓ Internet available");
print("Upload Asset to Supabase - Ready");
// Automatically trigger upload for testing
print("=== AUTO-TRIGGERING UPLOAD (TESTING MODE) ===");
await this.uploadAllAssets();
} catch (error) {
print(`ERROR in startUpload: ${error}`);
}
}
/**
* Initialize Supabase client
*/
private async initializeSupabase() {
print("initializeSupabase called");
if (!this.snapCloudRequirements || !this.snapCloudRequirements.isConfigured()) {
print("SnapCloudRequirements not configured");
print("Please assign SnapCloudRequirements component in Inspector");
return;
}
print("Getting Supabase project...");
const supabaseProject = this.snapCloudRequirements.getSupabaseProject();
print(`Project URL: ${supabaseProject.url}`);
// Create Supabase client - match Example1 exactly
print("Creating Supabase client...");
const options = {
realtime: {
heartbeatIntervalMs: 2500, // Temporary fix for alpha limitation
},
};
this.client = createClient(
supabaseProject.url,
supabaseProject.publicToken,
options
);
print("Supabase client created");
// Sign in with Snapchat ID token
print("Signing in with Snapchat ID token...");
const { data, error } = await this.client.auth.signInWithIdToken({
provider: 'snapchat',
token: '',
});
print("Sign in completed");
if (error) {
print(`Auth error: ${JSON.stringify(error)}`);
} else {
print(`✓ Authenticated with Supabase`);
print(`User: ${JSON.stringify(data.user)}`);
this.isInitialized = true;
}
}
/**
* Upload all configured assets
*/
async uploadAllAssets() {
print("Starting upload process...");
try {
// Upload texture if provided
if (this.textureToUpload) {
await this.uploadTexture();
}
// Upload mesh if RenderMeshVisual is provided
if (this.renderMeshVisual) {
await this.uploadMesh();
}
print("All uploads completed successfully!");
} catch (error) {
print(`Upload failed: ${error}`);
}
}
/**
* Upload texture to Supabase Storage using Supabase client
*/
async uploadTexture() {
print("--- Uploading Texture ---");
if (!this.isInitialized) {
print("ERROR: Supabase client not initialized");
return;
}
try {
// Use the directly provided texture
const texture = this.textureToUpload;
if (!texture) {
print("ERROR: No texture provided");
return;
}
print(`Texture name: ${texture.name}`);
print(`Texture size: ${texture.getWidth()}x${texture.getHeight()}`);
// Convert texture to bytes
const textureData = await this.getTextureAsBytes(texture);
if (!textureData) {
print("ERROR: Could not extract texture data");
return;
}
// Generate unique filename
const timestamp = Date.now();
const fileName = `texture_${timestamp}.raw`;
const objectPath = `textures/${fileName}`;
print(`Uploading texture as: ${objectPath}`);
print(`Texture size: ${textureData.byteLength} bytes`);
// Upload using Supabase client
const { data, error } = await this.client.storage
.from(this.bucketName)
.upload(objectPath, textureData.buffer);
if (error) {
print(`ERROR: Upload failed - ${JSON.stringify(error)}`);
} else {
print(`✓ Texture uploaded successfully: ${objectPath}`);
print(`Data: ${JSON.stringify(data)}`);
}
} catch (error) {
print(`ERROR uploading texture: ${error}`);
}
}
/**
* Upload mesh from RenderMeshVisual to Supabase Storage using Supabase client
*/
async uploadMesh() {
print("--- Uploading Mesh ---");
if (!this.isInitialized) {
print("ERROR: Supabase client not initialized");
return;
}
try {
// Get render mesh
const renderMesh = this.renderMeshVisual.mesh;
if (!renderMesh) {
print("ERROR: No mesh found on RenderMeshVisual");
return;
}
print(`Mesh name: ${renderMesh.name}`);
print(`Mesh topology: ${renderMesh.topology}`);
// Extract mesh data
const meshData = await this.getMeshAsBytes(renderMesh);
if (!meshData) {
print("ERROR: Could not extract mesh data");
return;
}
// Generate unique filename
const timestamp = Date.now();
const fileName = `mesh_${timestamp}.bin`;
const objectPath = `meshes/${fileName}`;
print(`Uploading mesh as: ${objectPath}`);
print(`Mesh size: ${meshData.byteLength} bytes`);
// Upload using Supabase client
const { data, error } = await this.client.storage
.from(this.bucketName)
.upload(objectPath, meshData.buffer);
if (error) {
print(`ERROR: Upload failed - ${JSON.stringify(error)}`);
} else {
print(`✓ Mesh uploaded successfully: ${objectPath}`);
print(`Data: ${JSON.stringify(data)}`);
}
} catch (error) {
print(`ERROR uploading mesh: ${error}`);
}
}
/**
* Convert texture to byte array
*/
private async getTextureAsBytes(texture: Texture): Promise<Uint8Array | null> {
try {
// Use ProceduralTextureProvider to read texture pixels
const proceduralTexture = ProceduralTextureProvider.create(
texture.getWidth(),
texture.getHeight(),
Colorspace.RGBA
);
// Get pixel data as Uint8Array using getPixels
const width = texture.getWidth();
const height = texture.getHeight();
const pixelData = new Uint8Array(width * height * 4); // RGBA
// Read pixels from procedural texture provider
const provider = proceduralTexture.control as ProceduralTextureProvider;
provider.getPixels(0, 0, width, height, pixelData);
return pixelData;
} catch (error) {
print(`Error converting texture to bytes: ${error}`);
return null;
}
}
/**
* Encode texture to PNG format
* Note: This is a placeholder - Lens Studio doesn't have built-in PNG encoding
* In practice, you'd send raw pixel data or use a different format
*/
private encodeTextureToPNG(texture: Texture): Uint8Array {
// For now, return placeholder data
// In production, you'd implement actual PNG encoding or send raw pixel data
const width = texture.getWidth();
const height = texture.getHeight();
const size = width * height * 4; // RGBA
print(`Texture dimensions: ${width}x${height}`);
print(`Expected data size: ${size} bytes`);
// Create dummy data for demonstration
const dummyData = new Uint8Array(size);
return dummyData;
}
/**
* Convert mesh to byte array
*/
private async getMeshAsBytes(
renderMesh: RenderMesh
): Promise<Uint8Array | null> {
try {
// Extract mesh topology and vertex data
const topology = renderMesh.topology;
print(`Mesh topology: ${topology}`);
// Get vertex count (approximate size calculation)
// In production, you'd extract actual vertex data, indices, normals, UVs, etc.
const vertexCount = this.estimateVertexCount(renderMesh);
print(`Estimated vertex count: ${vertexCount}`);
// Create mesh data structure
return this.encodeMeshData(renderMesh);
} catch (error) {
print(`Error converting mesh to bytes: ${error}`);
return null;
}
}
/**
* Estimate vertex count from mesh
*/
private estimateVertexCount(renderMesh: RenderMesh): number {
// This is an approximation - actual implementation would need mesh API access
return 1000; // Placeholder
}
/**
* Encode mesh data to binary format
* Note: This is simplified - in production you'd use proper mesh serialization
*/
private encodeMeshData(renderMesh: RenderMesh): Uint8Array {
// For demonstration, create dummy mesh data
// In production, you'd serialize vertex positions, normals, UVs, indices
const dummySize = 10000; // 10KB placeholder
const dummyData = new Uint8Array(dummySize);
print(`Encoded mesh data: ${dummySize} bytes`);
return dummyData;
}
}
// SET UP THIS IN YOUR BUCKET
Step 1: Go to Storage Settings
Open your Snap Cloud Dashboard
Navigate to Storage → specs-bucket
Click on Policies tab
Step 2: Configure Bucket Policy
Click "New Policy" and create these policies:
Policy 1: Allow Uploads
Policy name: Allow uploads
Allowed operation: INSERT
Target roles: public (or anon, authenticated)
USING expression: (leave empty)
WITH CHECK expression: bucket_id = 'specs-bucket'
Policy 2: Allow Reads
Policy name: Allow reads
Allowed operation: SELECT
Target roles: public
USING expression: bucket_id = 'specs-bucket'
WITH CHECK expression: (leave empty)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment