Skip to content

Instantly share code, notes, and snippets.

@warmspringwinds
Last active August 29, 2015 14:23
Show Gist options
  • Save warmspringwinds/d49d94d9eaff268e0ec8 to your computer and use it in GitHub Desktop.
Save warmspringwinds/d49d94d9eaff268e0ec8 to your computer and use it in GitHub Desktop.
Draft
%%cython -a
# cython: cdivision=True
# cython: boundscheck=False
# cython: nonecheck=False
# cython: wraparound=False
import numpy as np
cimport numpy as cnp
from skimage._shared.transform cimport integrate
from libc.stdlib cimport malloc, free
import time
import xml.etree.ElementTree as ET
cdef:
Py_ssize_t[::1] mlbp_r_offsets = np.asarray([-1, -1, -1, 0, 1, 1, 1, 0])
Py_ssize_t[::1] mlbp_c_offsets = np.asarray([-1, 0, 1, 1, 1, 0, -1, -1])
cdef _multiblock_lbp(float[:, ::1] int_image,
Py_ssize_t r,
Py_ssize_t c,
Py_ssize_t width,
Py_ssize_t height):
"""Multi-block local binary pattern (MB-LBP) [1]_.
Parameters
----------
int_image : (N, M) float array
Integral image.
r : int
Row-coordinate of top left corner of a rectangle containing feature.
c : int
Column-coordinate of top left corner of a rectangle containing feature.
width : int
Width of one of 9 equal rectangles that will be used to compute
a feature.
height : int
Height of one of 9 equal rectangles that will be used to compute
a feature.
Returns
-------
output : int
8-bit MB-LBP feature descriptor.
References
----------
.. [1] Face Detection Based on Multi-Block LBP
Representation. Lun Zhang, Rufeng Chu, Shiming Xiang, Shengcai Liao,
Stan Z. Li
http://www.cbsr.ia.ac.cn/users/scliao/papers/Zhang-ICB07-MBLBP.pdf
"""
cdef:
# Top-left coordinates of central rectangle.
Py_ssize_t central_rect_r = r + height
Py_ssize_t central_rect_c = c + width
Py_ssize_t r_shift = height - 1
Py_ssize_t c_shift = width - 1
# Copy offset array to multiply it by width and height later.
Py_ssize_t[::1] r_offsets = mlbp_r_offsets.copy()
Py_ssize_t[::1] c_offsets = mlbp_c_offsets.copy()
Py_ssize_t current_rect_r, current_rect_c
Py_ssize_t element_num, i
double current_rect_val
int has_greater_value
int lbp_code = 0
# Pre-multiply offsets with width and height.
for i in range(8):
r_offsets[i] = r_offsets[i]*height
c_offsets[i] = c_offsets[i]*width
# Sum of intensity values of central rectangle.
cdef float central_rect_val = integrate(int_image, central_rect_r, central_rect_c,
central_rect_r + r_shift,
central_rect_c + c_shift)
for element_num in range(8):
current_rect_r = central_rect_r + r_offsets[element_num]
current_rect_c = central_rect_c + c_offsets[element_num]
current_rect_val = integrate(int_image, current_rect_r, current_rect_c,
current_rect_r + r_shift,
current_rect_c + c_shift)
has_greater_value = current_rect_val >= central_rect_val
# If current rectangle's intensity value is bigger
# make corresponding bit to 1.
lbp_code |= has_greater_value << (7 - element_num)
return lbp_code
cdef struct MBLBP:
Py_ssize_t r
Py_ssize_t c
Py_ssize_t width
Py_ssize_t height
cdef struct MBLBPStump:
Py_ssize_t feature_id
float left
float right
cnp.uint32_t[::1] lut
cdef struct Stage:
Py_ssize_t first_idx
Py_ssize_t amount
float threshold
cdef class Cascade:
cdef:
public float eps
public Py_ssize_t stages_amount
public Py_ssize_t stumps_amount
public Py_ssize_t features_amount
Stage * stages
MBLBPStump * stumps
MBLBP * features
def __dealloc__(self):
# Free the memory that was used for c-arrays.
free(self.stages)
free(self.stumps)
free(self.features)
cdef evaluate(self, float[:, ::1] int_img):
cdef:
float stage_threshold
float stage_points
Py_ssize_t stage_number
Py_ssize_t weak_classifier_number
Py_ssize_t feature_number
Py_ssize_t features_amount
Py_ssize_t stumps_amount
Py_ssize_t firt_stump_idx
cnp.uint32_t[::1] current_lut
int lbp_code
int bit
Stage current_stage
MBLBPStump current_stump
MBLBP current_feature
for stage_number in range(self.stages_amount):
current_stage = self.stages[stage_number]
firt_stump_idx = current_stage.first_idx
stage_points = 0
for weak_classifier_number in range(current_stage.amount):
current_stump = self.stumps[firt_stump_idx + weak_classifier_number]
current_feature = self.features[current_stump.feature_id]
lbp_code = _multiblock_lbp(int_img,
current_feature.r,
current_feature.c,
current_feature.width,
current_feature.height)
current_lut = current_stump.lut
bit = (current_lut[lbp_code >> 5] >> (lbp_code & 31)) & 1
stage_points += current_stump.left if bit else current_stump.right
if stage_points < (current_stage.threshold - self.eps):
return False
return True
def load_xml(self, filename):
# # Helper function to parse feature descriptors.
# def mblbp_from_feature_node(feature_node):
# string_params = feature_node[0].text.split()
# params = map(lambda x: int(x), string_params)
# return MBLBP(params[1], params[0], params[2], params[3])
cdef:
Stage * stages_carr
MBLBPStump * stumps_carr
MBLBP * features_carr
float stage_threshold
Py_ssize_t stage_number
Py_ssize_t stages_amount
Py_ssize_t weak_classifiers_amount
Py_ssize_t weak_classifier_number
Py_ssize_t feature_number
Py_ssize_t features_amount
Py_ssize_t stumps_amount
cnp.uint32_t[::1] lut
MBLBP new_feature
MBLBPStump new_stump
Stage new_stage
self.eps = 1e-5
tree = ET.parse(filename)
# Load entities.
features = tree.find('.//features')
stages = tree.find('.//stages')
# Get the respective amounts.
stages_amount = int(tree.find('.//stageNum').text)
features_amount = len(features)
stumps_amount = 0
for stage_number in range(stages_amount):
current_stage = stages[stage_number]
weak_classifiers_amount = int(current_stage.find('maxWeakCount').text)
stumps_amount += weak_classifiers_amount
# Allocate memory for data.
features_carr = <MBLBP*>malloc(features_amount*sizeof(MBLBP))
stumps_carr = <MBLBPStump*>malloc(stumps_amount*sizeof(MBLBPStump))
stages_carr = <Stage*>malloc(stages_amount*sizeof(Stage))
# Check if memory was allocated.
if not (features_carr and stumps_carr and stages_carr):
raise MemoryError()
# Parse and load features in memory.
for feature_number in range(features_amount):
params = features[feature_number][0].text.split()
params = map(lambda x: int(x), params)
new_feature = MBLBP(params[1], params[0], params[2], params[3])
features_carr[feature_number] = new_feature
stump_counter = 0
# Parse and load stumps, stages.
for stage_number in range(stages_amount):
current_stage = stages[stage_number]
# Parse and load current stage.
stage_threshold = float(current_stage.find('stageThreshold').text)
weak_classifiers_amount = int(current_stage.find('maxWeakCount').text)
new_stage = Stage(stumps_amount, weak_classifiers_amount, stage_threshold)
stages_carr[stage_number] = new_stage
weak_classifiers = current_stage.find('weakClassifiers')
for weak_classifier_number in range(weak_classifiers_amount):
current_weak_classifier = weak_classifiers[weak_classifier_number]
# Stump's leaf values. First negative if image is probably not
# a face. Second positive if image is probably a face.
leaf_values = current_weak_classifier.find('leafValues').text
leaf_values = map(lambda x: float(x), leaf_values.split())
# Extract the elements only starting from second.
# First two are useless
internal_nodes = current_weak_classifier.find('internalNodes')
internal_nodes = internal_nodes.text.split()[2:]
# Extract the feature number and respective parameters.
# The MBLBP position and size.
feature_number = int(internal_nodes[0])
lut_array = map(lambda x: int(x), internal_nodes[1:])
lut = np.asarray(lut_array, dtype='uint32')
new_stump = MBLBPStump(feature_number, leaf_values[0], leaf_values[1], lut)
stumps_carr[stump_counter] = new_stump
self.features = features_carr
self.stumps = stumps_carr
self.stages = stages_carr
self.stages_amount = stages_amount
self.features_amount = features_amount
self.stumps_amount = stumps_amount
sample = Cascade()
begin = time.time()
sample.load_xml('lbpcascade_frontalface.xml')
end = time.time()
print end - begin
print sample.stumps_amount
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment