Skip to content

Instantly share code, notes, and snippets.

@bjin
Last active May 24, 2020 08:44
Show Gist options
  • Save bjin/33ffbc0fbdbc00aefa21b2e44bbd27cd to your computer and use it in GitHub Desktop.
Save bjin/33ffbc0fbdbc00aefa21b2e44bbd27cd to your computer and use it in GitHub Desktop.
mpv user shader to fix radial distortion commonly found in wide angle action cameras
*.mp4
*.mov
*.png
*.txt
#!/usr/bin/env python
import cv2
import numpy as np
import os
import sys
import math
# Defining the dimensions of checkerboard
CHECKERBOARD = (6,9)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
# Creating vector to store vectors of 3D points for each checkerboard image
objpoints = []
# Creating vector to store vectors of 2D points for each checkerboard image
imgpoints = []
# Defining the world coordinates for 3D points
objp = np.zeros((1, CHECKERBOARD[0] * CHECKERBOARD[1], 3), np.float32)
objp[0,:,:2] = np.mgrid[0:CHECKERBOARD[0], 0:CHECKERBOARD[1]].T.reshape(-1, 2)
prev_img_shape = None
# Extracting path of individual image stored in a given directory
images = sys.argv[1:]
width = None
height = None
for fname in images:
img = cv2.imread(fname)
if img is None:
print("file %s not found" % fname)
sys.exit(-1)
if width is not None and width != img.shape[1]:
print("width not match")
sys.exit(-1)
width = img.shape[1]
if height is not None and height != img.shape[0]:
print("height not match")
sys.exit(-1)
height = img.shape[0]
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
# Find the chess board corners
# If desired number of corners are found in the image then ret = true
ret, corners = cv2.findChessboardCorners(gray, CHECKERBOARD, cv2.CALIB_CB_ADAPTIVE_THRESH + cv2.CALIB_CB_FAST_CHECK + cv2.CALIB_CB_NORMALIZE_IMAGE)
"""
If desired number of corner are detected,
we refine the pixel coordinates and display
them on the images of checker board
"""
if ret == True:
objpoints.append(objp)
# refining pixel coordinates for given 2d points.
corners2 = cv2.cornerSubPix(gray, corners, (11,11),(-1,-1), criteria)
imgpoints.append(corners2)
# Draw and display the corners
img = cv2.drawChessboardCorners(img, CHECKERBOARD, corners2, ret)
cv2.imshow('img',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
h,w = img.shape[:2]
"""
Performing camera calibration by
passing the value of known 3D points (objpoints)
and corresponding pixel coordinates of the
detected corners (imgpoints)
"""
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, gray.shape[::-1], None, None)
inf = 1e9
eps = 1e-7
def solve(k1, k2, k3, y):
# find minimal sqrt(x) s.t. y = (k3 * x^3 + k2 * x^2 + k1 * x + 1) * sqrt(x)
def f(x):
return (((k3 * x + k2) * x + k1) * x + 1) * math.sqrt(x) - y
def diff_f(x):
return (((7 * k3 * x + 5 * k2) * x + 3 * k1) * x + 1) / (2 * math.sqrt(x))
min_sqrtx = inf
for i in range(200):
x = (i + 0.5) / 100.0
for j in range(30):
if (x < eps):
break
x -= f(x) / diff_f(x)
if (x < eps):
continue
if (abs(f(x)) < eps):
min_sqrtx = min(min_sqrtx, math.sqrt(x))
return min_sqrtx
k1, k2, p1, p3, k3 = dist[0]
diag = math.sqrt(width ** 2 + height ** 2)
w = width / diag
h = height / diag
ws = solve(k1, k2, k3, w) / w
hs = solve(k1, k2, k3, h) / h
ds = solve(k1, k2, k3, 1.0)
zoom = min(ws, hs, ds)
print("#define K1 %s" % k1)
print("#define K2 %s" % k2)
print("#define K3 %s" % k3)
print("#define ZOOM %s" % zoom)
if ws < zoom * 3 and hs < zoom * 3:
new_width = math.floor(ws / zoom * width + eps)
new_height = math.floor(hs / zoom * height + eps)
print("")
print("For stretched lense fixing, use --video-aspect-override=%d:%d (and --vf=gpu=w=%d:h=%d for encoding):" % (new_width, new_height, new_width, new_height))
print("")
print("//!DESC lensfix-stretch")
print("//!HOOK MAIN")
print("//!BIND HOOKED")
print("//!WIDTH %d" % new_width)
print("//!HEIGHT %d" % new_height)
print("#define K1 %s" % k1)
print("#define K2 %s" % k2)
print("#define K3 %s" % k3)
print("#define ZOOM vec2(%d, %d) / vec2(%d, %d) * %s" % (new_width, new_height, width, height, zoom))
//!DESC lensfix (with ewa_lanczossharp resampler)
//!HOOK MAIN
//!BIND HOOKED
//!BIND FILTER_LUT
//!WIDTH OUTPUT.width
//!HEIGHT OUTPUT.height
// The following parameters are calibrated by opencv.
// See More:
// https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html
// https://www.learnopencv.com/camera-calibration-using-opencv/
// https://docs.opencv.org/2.4/doc/tutorials/calib3d/camera_calibration/camera_calibration.html#cameracalibrationopencv
//
// 1. Print https://github.com/opencv/opencv/blob/master/doc/pattern.png
// 2. Take photos of the pattern with camera from different angles
// 3. $ ./calibrate.py photo1.png photo2.png ...
// Press any key for each shown photos.
#define K1 -0.36273784475910814
#define K2 0.14395222613195036
#define K3 -0.02752716367238447
#define ZOOM 1.1045972020461248
vec4 hook() {
vec2 unit_rect = HOOKED_size / length(HOOKED_size);
vec2 pos = (HOOKED_pos - 0.5) * 2.0 * unit_rect;
pos *= ZOOM;
float r2 = pos.x * pos.x + pos.y * pos.y;
pos *= 1.0 + (((K3) * r2 + (K2)) * r2 + (K1)) * r2;
pos = pos / unit_rect / 2.0 + 0.5;
vec2 size = HOOKED_size;
vec2 pt = HOOKED_pt;
vec4 color = vec4(0.0);
vec2 fcoord = fract(pos * size - vec2(0.5));
vec2 base = pos - fcoord * pt;
float w, d, wsum = 0.0;
vec4 in0;
vec4 in1;
vec4 in2;
int idx;
// scaler samples
d = length(vec2(-2.0, -2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-2.0, -2.0));
color += vec4(w) * in0;
}
d = length(vec2(0.0, -3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, -3.0));
color += vec4(w) * in0;
}
d = length(vec2(-1.0, -2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-1.0, -2.0));
color += vec4(w) * in0;
}
d = length(vec2(0.0, -2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, -2.0));
color += vec4(w) * in0;
}
d = length(vec2(1.0, -3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, -3.0));
color += vec4(w) * in0;
}
d = length(vec2(1.0, -2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, -2.0));
color += vec4(w) * in0;
}
d = length(vec2(2.0, -2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(2.0, -2.0));
color += vec4(w) * in0;
}
d = length(vec2(3.0, -2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(3.0, -2.0));
color += vec4(w) * in0;
}
d = length(vec2(-2.0, -1.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-2.0, -1.0));
color += vec4(w) * in0;
}
d = length(vec2(-3.0, 0.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-3.0, 0.0));
color += vec4(w) * in0;
}
d = length(vec2(-2.0, 0.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-2.0, 0.0));
color += vec4(w) * in0;
}
d = length(vec2(-1.0, -1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-1.0, -1.0));
color += vec4(w) * in0;
d = length(vec2(0.0, -1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, -1.0));
color += vec4(w) * in0;
d = length(vec2(-1.0, 0.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-1.0, 0.0));
color += vec4(w) * in0;
d = length(vec2(0.0, 0.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, 0.0));
color += vec4(w) * in0;
d = length(vec2(1.0, -1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, -1.0));
color += vec4(w) * in0;
d = length(vec2(2.0, -1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(2.0, -1.0));
color += vec4(w) * in0;
d = length(vec2(1.0, 0.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, 0.0));
color += vec4(w) * in0;
d = length(vec2(2.0, 0.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(2.0, 0.0));
color += vec4(w) * in0;
d = length(vec2(3.0, -1.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(3.0, -1.0));
color += vec4(w) * in0;
}
d = length(vec2(3.0, 0.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(3.0, 0.0));
color += vec4(w) * in0;
}
d = length(vec2(4.0, 0.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(4.0, 0.0));
color += vec4(w) * in0;
}
d = length(vec2(-3.0, 1.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-3.0, 1.0));
color += vec4(w) * in0;
}
d = length(vec2(-2.0, 1.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-2.0, 1.0));
color += vec4(w) * in0;
}
d = length(vec2(-2.0, 2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-2.0, 2.0));
color += vec4(w) * in0;
}
d = length(vec2(-1.0, 1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-1.0, 1.0));
color += vec4(w) * in0;
d = length(vec2(0.0, 1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, 1.0));
color += vec4(w) * in0;
d = length(vec2(-1.0, 2.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-1.0, 2.0));
color += vec4(w) * in0;
d = length(vec2(0.0, 2.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, 2.0));
color += vec4(w) * in0;
d = length(vec2(1.0, 1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, 1.0));
color += vec4(w) * in0;
d = length(vec2(2.0, 1.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(2.0, 1.0));
color += vec4(w) * in0;
d = length(vec2(1.0, 2.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, 2.0));
color += vec4(w) * in0;
d = length(vec2(2.0, 2.0) - fcoord);
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(2.0, 2.0));
color += vec4(w) * in0;
d = length(vec2(3.0, 1.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(3.0, 1.0));
color += vec4(w) * in0;
}
d = length(vec2(4.0, 1.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(4.0, 1.0));
color += vec4(w) * in0;
}
d = length(vec2(3.0, 2.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(3.0, 2.0));
color += vec4(w) * in0;
}
d = length(vec2(-2.0, 3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-2.0, 3.0));
color += vec4(w) * in0;
}
d = length(vec2(-1.0, 3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(-1.0, 3.0));
color += vec4(w) * in0;
}
d = length(vec2(0.0, 3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, 3.0));
color += vec4(w) * in0;
}
d = length(vec2(0.0, 4.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(0.0, 4.0));
color += vec4(w) * in0;
}
d = length(vec2(1.0, 3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, 3.0));
color += vec4(w) * in0;
}
d = length(vec2(2.0, 3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(2.0, 3.0));
color += vec4(w) * in0;
}
d = length(vec2(1.0, 4.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(1.0, 4.0));
color += vec4(w) * in0;
}
d = length(vec2(3.0, 3.0) - fcoord);
if (d < 3.051550) {
w = tex1D(FILTER_LUT, LUT_POS(d * 1.0/3.238315, 1024.0)).r;
wsum += w;
in0 = HOOKED_tex(base + pt * vec2(3.0, 3.0));
color += vec4(w) * in0;
}
color = color / vec4(wsum);
return color;
}
//!TEXTURE FILTER_LUT
//!SIZE 1024
//!FORMAT r32f
//!FILTER LINEAR
//!BORDER CLAMP
0000803F0BFF7F3F2DFC7F3F65F77F3FB3F07F3F18E87F3F94DD7F3F27D17F3FD0C27F3F92B27F3F6AA07F3F5B8C7F3F64767F3F865E7F3FC0447F3F15297F3F830B7F3F0BEC7E3FAECA7E3F6DA77E3F48827E3F3F5B7E3F54327E3F86077E3FD8DA7D3F49AC7D3FDA7B7D3F8C497D3F5F157D3F56DF7C3F70A77C3FAE6D7C3F12327C3F9CF47B3F4EB57B3F28747B3F2C317B3F5AEC7A3FB4A57A3F3B5D7A3FF0127A3FD5C6793FEA78793F3129793FABD7783F5A84783F3E2F783F5AD8773FAF7F773F3D25773F08C9763F0F6B763F550B763FDBA9753FA446753FAFE1743F007B743F9812743F78A8733FA23C733F18CF723FDC5F723FF0EE713F557C713F0D08713F1B92703F7F1A703F3DA16F3F56266F3FCBA96E3FA02B6E3FD6AB6D3F6F2A6D3F6DA76C3FD3226C3FA29C6B3FDC146B3F858B6A3F9E006A3F2974693F28E6683F9F56683F8FC5673FFA32673FE39E663F4C09663F3972653FAAD9643FA33F643F26A4633F3507633FD368623F03C9613FC727613F2285603F16E15F3FA63B5F3FD5945E3FA5EC5D3F19435D3F33985C3FF6EB5B3F663E5B3F858F5A3F55DF593FD92D593F147B583F0AC7573FBC11573F2E5B563F63A3553F5DEA543F2030543FAD74533F09B8523F37FA513F383B513F117B503FC4B94F3F54F74E3FC4334E3F186F4D3F51A94C3F75E24B3F851A4B3F84514A3F7787493F5FBC483F40F0473F1D23473FFA54463FDA85453FBFB5443FADE4433FA712433FB03F423FCC6B413FFE96403F49C13F3FB1EA3E3F38133E3FE23A3D3FB2613C3FAC873B3FD2AC3A3F29D1393FB3F4383F7517383F7039373FA85A363F227B353FE09A343FE5B9333F35D8323FD4F5313FC412313F0A2F303FA84A2F3FA1652E3FFB7F2D3FB6992C3FD8B22B3F64CB2A3F5CE3293FC5FA283FA111283FF527273FC33D263F1053253FDE67243F317C233F0C90223F73A3213F69B6203FF2C81F3F11DB1E3FC9EC1D3F1FFE1C3F140F1C3FAE1F1B3FEF2F1A3FDA3F193F744F183FBF5E173FBF6D163F777C153FEB8A143F1F99133F15A7123FD1B4113F56C2103FA8CF0F3FCADC0E3FC0E90D3F8DF60C3F34030C3FB90F0B3F1E1C0A3F6928093F9B34083FB840073FC34C063FC158053FB364043F9E70033F847C023F6A88013F5294003F7E40FF3E6B58FD3E7070FB3E9388F93EDCA0F73E51B9F53EF8D1F33ED7EAF13EF603F03E591DEE3E0937EC3E0B51EA3E656BE83E1E86E63E3CA1E43EC5BCE23EC0D8E03E33F5DE3E2312DD3E972FDB3E964DD93E256CD73E4A8BD53E0BABD33E6FCBD13E7BECCF3E350ECE3EA430CC3ECC53CA3EB577C83E639CC63EDDC1C43E29E8C23E4B0FC13E4B37BF3E2D60BD3EF789BB3EAEB4B93E5AE0B73EFE0CB63EA03AB43E4669B23EF698B03EB4C9AE3E86FBAC3E722EAB3E7C62A93EAB97A73E02CEA53E8805A43E423EA23E3478A03E63B39E3ED6EF9C3E902D9B3E966C993EEFAC973E9DEE953EA731943E1176923EE0BB903E19038F3EBF4B8D3ED8958B3E69E1893E762E883E037D863E15CD843EB01E833EDA71813E298D7F3ECC397C3EA4E9783EBA9C753E1553723EBE0C6F3EBEC96B3E1B8A683EDF4D653E1115623EB9DF5E3EDEAD5B3E887F583EBF54553E892D523EEF094F3EF8E94B3EAACD483E0DB5453E27A0423E008F3F3E9E813C3E0878393E4472363E5970333E4E72303E28782D3EEE812A3EA58F273E54A1243E01B7213EB2D01E3E6BEE1B3E3410193E1036163E0760133E1C8E103E56C00D3EB9F60A3E4A31083E0E70053E0BB3023E89F4FF3D7F8BFA3D012BF53D17D3EF3DCC83EA3D263DE53D2EFFDF3DEDC9DA3D6A9DD53DAC79D03DBD5ECB3DA14CC63D6243C13D0543BC3D924BB73D0F5DB23D8177AD3DF09AA83D62C7A33DDBFC9E3D623B9A3DFC82953DAED3903D7C2D8C3D6D90873D83FC823D89E37C3D69E0733DAFEF6A3D6211623D8945593D2C8C503D50E5473DFB503F3D34CF363D00602E3D6303263D63B91D3D0282153D465D0D3D314B053D8C97FA3C11BEEA3CF509DB3C3B7BCB3CE811BC3CFCCDAC3C7AAF9D3C63B68E3C6DC57F3CE968623C3757453C5390283C38140C3CBCC5DF3B7DF8A73B3B81613B2178E83A78140239AD9EC5BA07964CBB86999ABB5753CEBB4EBC00BCC1041ABC180333BC6CB74BBCD52164BC6E427CBCA90C8ABC4FD395BC3775A1BC72F2ACBC0F4BB8BC207FC3BCB68ECEBCE479D9BCBC40E4BC51E3EEBCB961F9BC03DE01BD28F906BD55020CBD97F910BDF8DE15BD84B21ABD49741FBD512424BDAAC228BD604F2DBD82CA31BD1C3436BD3D8C3ABDF2D23EBD4A0843BD542C47BD1E3F4BBDB9404FBD323153BD9B1057BD02DF5ABD799C5EBD0F4962BDD6E465BDDF6F69BD3BEA6CBDFA5370BD30AD73BDEEF576BD462E7ABD4A567DBD073780BDD2BA81BD903683BD4AAA84BD0B1686BDDB7987BDC5D588BDD4298ABD11768BBD87BA8CBD3FF78DBD462C8FBDA45990BD667F91BD959D92BD3CB493BD68C394BD22CB95BD76CB96BD6FC497BD18B698BD7EA099BDAB839ABDAB5F9BBD89349CBD52029DBD11C99DBDD3889EBDA3419FBD8DF39FBD9D9EA0BDE042A1BD62E0A1BD2F77A2BD5407A3BDDC90A3BDD613A4BD4C90A4BD4D06A5BDE475A5BD1EDFA5BD0942A6BDB19EA6BD23F5A6BD6C45A7BD998FA7BDB7D3A7BDD311A8BDFB49A8BD3B7CA8BDA2A8A8BD3BCFA8BD16F0A8BD3E0BA9BDC220A9BDAE30A9BD113BA9BDF83FA9BD713FA9BD8839A9BD4C2EA9BDCB1DA9BD1108A9BD2DEDA8BD2CCDA8BD1DA8A8BD0C7EA8BD084FA8BD1E1BA8BD5CE2A7BDD1A4A7BD8962A7BD931BA7BDFCCFA6BDD37FA6BD252BA6BD01D2A5BD7374A5BD8A12A5BD54ACA4BDDE41A4BD37D3A3BD6C60A3BD8CE9A2BDA36EA2BDC1EFA1BDF26CA1BD45E6A0BDC85BA0BD88CD9FBD933B9FBDF6A59EBDC10C9EBD00709DBDC1CF9CBD122C9CBD00859BBD9ADA9ABDED2C9ABD067C99BDF3C798BDC31098BD815697BD3C9996BD02D995BDDF1595BDE14F94BD168793BD8BBB92BD4EED91BD6A1C91BDEF4890BDE9728FBD649A8EBD6FBF8DBD17E28CBD68028CBD6F208BBD393C8ABDD45589BD4C6D88BDAE8287BD079686BD63A785BDCFB684BD57C483BD09D082BDF1D981BD1AE280BD25D17FBDCBDA7DBD3FE17BBD9AE479BDF4E477BD64E275BD04DD73BDEBD471BD31CA6FBDEDBC6DBD37AD6BBD269B69BDD28667BD527065BDBD5763BD293D61BDAE205FBD62025DBD5BE25ABDB0C058BD779D56BDC57854BDB15252BD512B50BDB9024EBD00D94BBD3AAE49BD7D8247BDDD5545BD702843BD49FA40BD7ECB3EBD239C3CBD4C6C3ABD0C3C38BD780B36BDA3DA33BDA1A931BD85782FBD62472DBD4C162BBD54E528BD8DB426BD0B8424BDDF5322BD1B2420BDD2F41DBD15C61BBDF69719BD856A17BDD53D15BDF61113BDFAE610BDF1BC0EBDEB930CBDF86B0ABD2A4508BD901F06BD3AFB03BD36D801BD2C6DFFBCCF2CFBBC74EFF6BC3AB5F2BC3C7EEEBC994AEABC6D1AE6BCD4EDE1BCEAC4DDBCCB9FD9BC927ED5BC5961D1BC3B48CDBC5233C9BCB822C5BC8616C1BCD40EBDBCBD0BB9BC560DB5BCB913B1BCFD1EADBC392FA9BC8344A5BCF25EA1BC9B7E9DBC95A399BCF4CD95BCCEFD91BC35338EBC406E8ABC01AF86BC8BF582BCE4837EBC902877BC3ED96FBC139668BC305F61BCB9345ABCCE1653BC90054CBC200145BC9B093EBC221F37BCD14130BCC67129BC1DAF22BCF1F91BBC5E5215BC7DB80EBC682C08BC38AE01BC077CF6BBC7B7E9BBDC0FDDBB7284D0BBB215C4BBC7C3B7BBD78EABBB08779FBB807C93BB629F87BBA1BF77BBD67B60BBA37349BB44A732BBF3161CBBE5C205BB9F56DFBAC6A0B3BA9C6488BAF6443BBAD36ACDB978D998B8A1247E391537113A1BEE613AE3D6983AD73AC03AB922E73A30C7063BD3BE193B34782C3B47F33E3BFE2F513B522E633B3CEE743BDB37833B60D98B3BAC5B943BC0BE9C3BA002A53B4F27AD3BD02CB53B2B13BD3B65DAC43B8882CC3B9B0BD43BA875DB3BBCC0E23BE2ECE93B28FAF03B9BE8F73B4BB8FE3BA4B4023CD2FD053CB937093C61620C3CD67D0F3C228A123C4F87153C6A75183C7E541B3C99241E3CC6E5203C1398233C8E3B263C46D0283C48562B3CA4CD2D3C6A36303CAA90323C73DC343CD719373CE648393CB3693B3C4F7C3D3CCC803F3C3E77413CB65F433C4A3A453C0C07473C10C6483C6B774A3C321B4C3C7AB14D3C583A4F3CE3B5503C2F24523C5585533C6AD9543C8620563CBF5A573C2E88583CEAA8593C0CBD5A3CACC45B3CE3BF5C3CC9AE5D3C78915E3C0A685F3C9832603C3CF1603C10A4613C2F4B623CB3E6623CB876633C58FB633CAF74643CD7E2643CEE45653C0E9E653C53EB653CDA2D663CC065663C2093663C18B6663CC4CE663C41DD663CACE1663C23DC663CC4CC663CABB3663CF690663CC364663C302F663C5BF0653C61A8653C6257653C7BFD643CCB9A643C702F643C89BB633C343F633C8FBA623CBA2D623CD398613CF9FB603C4B57603CE7AA5F3CEDF65E3C7B3B5E3CB0785D3CACAE5C3C8CDD5B3C71055B3C79265A3CC340593C6E54583C9961573C6268563CE968553C4D63543CAC57533C2546523CD82E513CE211503C62EF4E3C77C74D3C3F9A4C3CDA674B3C65304A3CFEF3483CC5B2473CD66C463C5122453C54D3433CFB7F423C6628413CB1CC3F3CFA6C3E3C60093D3CFFA13B3CF4363A3C5EC8383C5856373C00E1353C7268343CCCEC323C296E313CA7EC2F3C61682E3C73E12C3CFA572B3C11CC293CD43D283C5EAD263CCA1A253C3486233CB7EF213C6E57203C72BD1E3CDF211D3CCF841B3C5BE6193C9E46183CB2A5163CAF03153CB060133CCDBC113C1F18103CBF720E3CC5CC0C3C4A260B3C667F093C31D8073CC130063C3089043C93E1023C023A013C2825FF3BBED6FB3BF388F83BF33BF53BEAEFF13B04A5EE3B695BEB3B4713E83BC5CCE43B0D88E13B4845DE3B9F04DB3B39C6D73B3E8AD43BD550D13B241ACE3B50E6CA3B80B5C73BD887C43B7C5DC13B9136BE3B3913BB3B98F3B73BD0D7B43B03C0B13B52ACAE3BDD9CAB3BC591A83B2A8BA53B2B89A23BE68B9F3B7A939C3B05A0993BA2B1963B70C8933B89E4903B0A068E3B0D2D8B3BAD59883B048C853B2BC4823B3A02803B958C7A3BE820753B9BC16F3BDB6E6A3BD528653BB4EF5F3BA3C35A3BC9A4553B5093503B5E8F4B3B1999463BA6B0413B28D63C3BC409383B994B333BC99B2E3B73FA293BB667253BAFE3203B7B6E1C3B3508183BF7B0133BDB680F3BF92F0B3B6906073B3FEC023B25C3FD3AEDCCF53AFEF5ED3A7B3EE63A86A6DE3A412ED73AC9D5CF3A3B9DC83AB184C13A458CBA3A0BB4B33A19FCAC3A8264A63A55ED9F3AA396993A7860933ADF4A8D3AE155873A8681813AA69B773A97756C3AE390613A8BED563A8A8B4C3ADA6A423A708B383A3FED2E3A3490253A3A741C3A3A99133A18FF0A3AB3A5023AD619F5393169E5392739D6395F89C7397A59B93914A9AB39C0779E390DC59139849085394DB37339E13F5D39B3C54739A143333983B81F3921230D397404F73804A9D5384331B6386C9A983849C37938FA074638E2FC1538A337D337DABC813796F1DD360000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000
//!DESC lensfix
//!HOOK MAIN
//!BIND HOOKED
// The following parameters are calibrated by opencv.
// See More:
// https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html
// https://www.learnopencv.com/camera-calibration-using-opencv/
// https://docs.opencv.org/2.4/doc/tutorials/calib3d/camera_calibration/camera_calibration.html#cameracalibrationopencv
//
// 1. Print https://github.com/opencv/opencv/blob/master/doc/pattern.png
// 2. Take photos of the pattern with camera from different angles
// 3. $ ./calibrate.py photo1.png photo2.png ...
// Press any key for each shown photos.
#define K1 -0.36273784475910814
#define K2 0.14395222613195036
#define K3 -0.02752716367238447
#define ZOOM 1.1045972020461248
vec4 hook() {
vec2 unit_rect = HOOKED_size / length(HOOKED_size);
vec2 pos = (HOOKED_pos - 0.5) * 2.0 * unit_rect;
pos *= ZOOM;
float r2 = pos.x * pos.x + pos.y * pos.y;
pos *= 1.0 + (((K3) * r2 + (K2)) * r2 + (K1)) * r2;
pos = pos / unit_rect / 2.0 + 0.5;
return HOOKED_tex(pos);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment