Created
July 31, 2023 05:00
-
-
Save raghavmittal101/f9843b0be9ec5745be928d4f7ddc8760 to your computer and use it in GitHub Desktop.
poc code
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import numpy as np | |
import bpy | |
import math | |
import mathutils | |
base_dir = "/Users/raghavmittal101/Documents/tcs/blenderNerf/" | |
def getDepthMap(filename): | |
z = bpy.data.images['Viewer Node'] | |
w, h = z.size | |
dmap = np.array(z.pixels[:], dtype=np.float32) # convert to numpy array | |
dmap = np.reshape(dmap, (h, w, 4))[:,:,0] | |
dmap = np.rot90(dmap, k=2) | |
dmap = np.fliplr(dmap) | |
np.savetxt(base_dir + filename + ".csv", dmap, delimiter=",") | |
return dmap | |
def getDMapImage(filename): | |
# Set the output file path | |
output_file = base_dir + filename + ".png" | |
# Set the viewport resolution | |
bpy.context.scene.render.resolution_x = 1920 | |
bpy.context.scene.render.resolution_y = 1080 | |
# Set the output format | |
bpy.context.scene.render.image_settings.file_format = "PNG" | |
# Render the viewport and save the result | |
bpy.ops.render.render(write_still=True) | |
bpy.data.images["Render Result"].save_render(output_file) | |
def getRGBImage(filename): | |
# Set the output file path | |
output_file = base_dir + filename + ".png" | |
# Set the viewport resolution | |
bpy.context.scene.render.resolution_x = 1920 | |
bpy.context.scene.render.resolution_y = 1080 | |
# Set the output format | |
bpy.context.scene.render.image_settings.file_format = "PNG" | |
# Render the viewport and save the result | |
bpy.ops.render.render(write_still=True) | |
bpy.data.images["Viewer Node"].save_render(output_file) | |
def CreateDMapNodes(): | |
bpy.context.scene.node_tree.nodes.clear() | |
tree = bpy.context.scene.node_tree | |
tree.nodes.clear() | |
rendererLayersNode = tree.nodes.new(type='CompositorNodeRLayers') | |
normalizeLayerNode = tree.nodes.new(type='CompositorNodeNormalize') | |
compositeNode = tree.nodes.new(type="CompositorNodeComposite") | |
compositorViewerNode = tree.nodes.new(type="CompositorNodeViewer") | |
tree.links.new(rendererLayersNode.outputs["Image"], compositorViewerNode.inputs["Image"]) | |
tree.links.new(rendererLayersNode.outputs["Depth"], normalizeLayerNode.inputs["Value"]) | |
tree.links.new(normalizeLayerNode.outputs["Value"], compositeNode.inputs["Image"]) | |
def HemispherePosPointsGeneratorBasic(stepsize, oX, oY, oZ, radius): | |
xList = [] | |
yList = [] | |
zList = [] | |
for x in np.arange(oX-radius, oX+radius, stepsize): | |
for y in np.arange(oY-radius, oY+radius, stepsize): | |
for z in np.arange(oZ, oZ+radius, stepsize): | |
if(math.sqrt((x - oX)**2 + (y-oY)**2 + (z-oZ)**2) == radius): | |
xList.append(x) | |
yList.append(y) | |
zList.append(z) | |
return [xList, yList, zList] | |
def SpherePosPointsGeneratorFebonacciLattice(pointsCount, oX, oY, oZ, radius): | |
n = pointsCount | |
goldenRatio = (1 + 5**0.5)/2 | |
i = np.arange(0, n) | |
theta = 2 * np.pi * i / goldenRatio | |
phi = np.arccos(1 - 2*(i+0.5)/n) | |
x, y, z = np.cos(theta) * np.sin(phi) * radius, np.sin(theta) * np.sin(phi) * radius, np.cos(phi) * radius; | |
return [x, y, z] | |
def placeCameras(): | |
points = SpherePosPointsGeneratorFebonacciLattice(25, 0, 0, 0, 10) | |
count = 0 | |
existing_cameras = [c for c in bpy.data.cameras] | |
while existing_cameras : | |
bpy.data.cameras.remove(existing_cameras.pop()) | |
for i in range(len(points[0])): | |
scn = bpy.context.scene | |
cam1 = bpy.data.cameras.new("Camera " + str(count)) | |
cam1.lens = 18 | |
cam_obj1 = bpy.data.objects.new("Camera " + str(count), cam1) | |
cam_obj1.location = (points[0][i], points[1][i], points[2][i]) | |
point_at(cam_obj1, (0, 0, 0) ) | |
scn.collection.objects.link(cam_obj1) | |
count+=1 | |
############ | |
def point_at(obj, target, roll=0): | |
""" | |
Rotate obj to look at target | |
:arg obj: the object to be rotated. Usually the camera | |
:arg target: the location (3-tuple or Vector) to be looked at | |
:arg roll: The angle of rotation about the axis from obj to target in radians. | |
Based on: https://blender.stackexchange.com/a/5220/12947 (ideasman42) | |
""" | |
if not isinstance(target, mathutils.Vector): | |
target = mathutils.Vector(target) | |
loc = obj.location | |
# direction points from the object to the target | |
direction = target - loc | |
tracker, rotator = (('-Z', 'Y'),'Z') if obj.type=='CAMERA' else (('X', 'Z'),'Y') #because new cameras points down(-Z), usually meshes point (-Y) | |
quat = direction.to_track_quat(*tracker) | |
# /usr/share/blender/scripts/addons/add_advanced_objects_menu/arrange_on_curve.py | |
quat = quat.to_matrix().to_4x4() | |
rollMatrix = mathutils.Matrix.Rotation(roll, 4, rotator) | |
# remember the current location, since assigning to obj.matrix_world changes it | |
loc = loc.to_tuple() | |
#obj.matrix_world = quat * rollMatrix | |
# in blender 2.8 and above @ is used to multiply matrices | |
# using * still works but results in unexpected behaviour! | |
obj.matrix_world = quat @ rollMatrix | |
obj.location = loc | |
placeCameras() | |
for i in bpy.data.cameras: | |
try: | |
bpy.data.scenes['Scene'].camera = bpy.data.objects[i.name] | |
# CreateDMapNodes() | |
# getDepthMap("dmapData_"+i.name) | |
getDMapImage("dmapImage_"+i.name) | |
getRGBImage("rgbImage_"+i.name) | |
except: | |
pass | |
# there are two ideas to generate a set of cameras placed around hemisphere | |
# 1. fill a cube containing sphere with 3d points uniformly distributed accross the cube. Next, remove all the points except the points whose distance from origin of sphere is equivalent to radius. | |
# the ISSUE with this approach is that you end up getting non-uniformely distributed points on sphere surface. the points towards the corner of cube but close to sphere surface are mostly lost as they might not be equal to radius. | |
# 2. generate equidistance uniformely distributed points on sphere surface by ensuring polar and azimuthan equivalents are available for a point on opposite side. | |
# refer for equidistant points on sphere | |
# https://www.cmu.edu/biolphys/deserno/pdf/sphere_equi.pdf | |
#https://extremelearning.com.au/how-to-evenly-distribute-points-on-a-sphere-more-effectively-than-the-canonical-fibonacci-lattice/ |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment