Created
July 7, 2015 13:56
-
-
Save ebonical/d5441f8df2ae0d32c0e8 to your computer and use it in GitHub Desktop.
Encode depthmap metadata inside a JPEG
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env ruby | |
require "optparse" | |
require "base64" | |
require "digest" | |
require "ostruct" | |
require "fileutils" | |
class Options | |
def self.help | |
parse %w[--help] | |
end | |
def self.parse(args) | |
options = OpenStruct.new | |
opt_parser = OptionParser.new do |opts| | |
opts.banner = "Usage: encode_gdepth [options] IMAGE" | |
opts.separator "" | |
opts.on("-d", "--depth=IMAGE", String, | |
"Path to PNG depth image (Required)") do |opt| | |
options.depth_path = opt | |
end | |
opts.on("-o", "--output=IMAGE", String, | |
"Write output to <image> instead of overwriting original") do |opt| | |
options.output_path = opt | |
end | |
opts.on( "-b", "--backup", "Create backup of original image" ) do |opt| | |
options.backup = opt | |
end | |
opts.on("-v", "--[no-]verbose", "Run verbosely") do |opt| | |
options.verbose = opt | |
end | |
opts.on_tail("-h", "--help", "Show this message") do | |
puts opts | |
exit | |
end | |
end | |
opt_parser.parse!(args) | |
options.original_path = args.last | |
options | |
end | |
end | |
if ARGV.empty? | |
Options.help | |
else | |
options = Options.parse(ARGV) | |
end | |
begin | |
raise OptionParser::MissingArgument.new("Original image") if options.original_path.nil? | |
raise OptionParser::MissingArgument.new("Depth image") if options.depth_path.nil? | |
rescue => e | |
puts e.message | |
Options.help | |
end | |
VERBOSE = options.verbose | |
def vputs(message) | |
if VERBOSE | |
puts message | |
end | |
end | |
# | |
# Ported from original JavaScript source by Rafał Lindemann: | |
# https://github.com/panrafal/depthy/blob/master/app/scripts/classes/GDepthEncoder.js | |
# | |
class GDepthEncoder | |
XMP_HEADER = 'http://ns.adobe.com/xap/1.0/' | |
XMP_EXTENSION_HEADER = 'http://ns.adobe.com/xmp/extension/' | |
XMLNS = { | |
'GFocus' => 'http://ns.google.com/photos/1.0/focus/', | |
'GImage' => 'http://ns.google.com/photos/1.0/image/', | |
'GDepth' => 'http://ns.google.com/photos/1.0/depthmap/', | |
'xmpNote' => 'http://ns.adobe.com/xmp/note/' | |
} | |
def encode_depthmap(image, depthmap = nil) | |
props, extProps = {}, {} | |
if depthmap | |
props["GDepth:Format"] = "RangeInverse" | |
props["GDepth:Mime"] = "image/png" | |
props["GDepth:Near"] = depthmap[:min_depth] | |
props["GDepth:Far"] = depthmap[:max_depth] | |
extProps["GDepth:Data"] = depthmap[:data] | |
vputs "Depthmap size #{depthmap.fetch(:data, '').size}" | |
end | |
standard_xmp = build_xmp(props) | |
extended_xmp = build_xmp(extProps) | |
encode_xmp image, standard_xmp, extended_xmp | |
end | |
def build_xmp(props, xmlns = nil) | |
xmp = [] | |
xmlns ||= XMLNS | |
xmp << '<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 5.1.0-jc003">' | |
xmp << '<rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">' | |
xmp << '<rdf:Description rdf:about=""' | |
xmlns.each { |k,v| xmp << %{ xmlns:#{k}="#{v}"} } | |
props.each { |k,v| xmp << %{ #{k}="#{v}"} } | |
xmp << ' /></rdf:RDF></x:xmpmeta>' | |
xmp.join | |
end | |
def encode_xmp(image, standard_xmp, extended_xmp) | |
data = image.read.unpack("C*") | |
offset = 0 | |
parts = [] | |
xmp_written = false | |
write_xmp = -> { | |
unless xmp_written | |
parts << build_xmp_segments(standard_xmp, extended_xmp) | |
vputs "XMP written!" | |
xmp_written = true | |
end | |
} | |
while offset < data.size | |
seg_start = offset | |
seg_type = nil | |
if (b = data[offset]) != 0xFF | |
raise "Bad JPG Format, 0xFF expected, got #{b}" | |
end | |
offset += 1 | |
while true | |
seg_type = data[offset] | |
offset += 1 | |
if seg_type == 0xFF | |
vputs "Padding 0xFF found" | |
parts << 0xFF | |
else | |
break | |
end | |
end | |
if seg_type == 0xC0 || seg_type == 0xC2 || seg_type == 0xDA | |
write_xmp.() # right before SOF / SOS | |
end | |
if seg_type == 0xDA | |
# copy the rest on SOS... no XMP should exist beyound that point | |
remaining = data.size - seg_start | |
vputs "SOS found, copy remaining bytes #{remaining}" | |
parts << data[seg_start, remaining] | |
break | |
end | |
if seg_type == 0x00 || (seg_type >= 0xD0 && seg_type <= 0xD9) | |
parts << data[seg_start, 2] | |
vputs "Found ctrl segment #{seg_type}" | |
next | |
end | |
seg_size = data[offset, 2].pack("CC").unpack("n").first | |
offset += 2 | |
if seg_type == 0xE1 | |
# read header | |
app_header = "" | |
offset -= 1 | |
while (b = data[offset += 1]) != 0 | |
app_header << b.chr | |
end | |
vputs "Found APP1 #{app_header}" | |
# Ignore any existing XMP | |
if app_header == XMP_HEADER || app_header == XMP_EXTENSION_HEADER | |
vputs "Found old XMP, skipping" | |
offset += seg_size - (offset - seg_start - 2) | |
next | |
end | |
end | |
# Copying segment | |
remaining = seg_size - (offset - seg_start - 2) | |
vputs "Copying segment #{seg_type}, size: #{seg_size}, left: #{remaining}" | |
offset += remaining | |
parts << data[seg_start, 2 + seg_size] | |
if seg_type == 0xE1 | |
write_xmp.() # right after EXIF | |
end | |
end | |
parts = parts.flatten.map { |part| String === part ? part.unpack("C*") : part } | |
parts.flatten.pack("C*") | |
end | |
def build_xmp_segments(standard_xmp, extended_xmp) | |
parts = [] | |
if extended_xmp | |
extended_uid = Digest::MD5.hexdigest(extended_xmp).upcase | |
vputs "Extended UID: #{extended_uid}" | |
standard_xmp.sub!(/(<rdf:Description) /, %{\\1 xmpNote:HasExtendedXMP="#{extended_uid}" }) | |
end | |
vputs "StandardXMP: #{(standard_xmp || '').size}" | |
vputs "ExtendedXMP: #{(extended_xmp || '').size}" | |
parts << [0xFF, 0xE1] | |
parts << make_uint16_buffer(2 + XMP_HEADER.size + 1 + standard_xmp.size) | |
parts << [XMP_HEADER, 0] | |
parts << standard_xmp | |
vputs "Written standard XMP" | |
if extended_xmp | |
offset = 0 | |
while offset < extended_xmp.size | |
chunk_size = [65383, extended_xmp.size - offset].min | |
parts << [0xFF, 0xE1] | |
parts << make_uint16_buffer(2 + XMP_EXTENSION_HEADER.size + 1 + 32 + 4 + 4 + chunk_size) | |
parts << [XMP_EXTENSION_HEADER, 0] | |
parts << [extended_uid, make_uint32_buffer([extended_xmp.size, offset])] | |
parts << extended_xmp[offset, chunk_size] | |
vputs "Written extended XMP chunk #{offset} #{chunk_size}b of #{extended_xmp.size}" | |
offset += chunk_size | |
end | |
end | |
parts.flatten | |
end | |
def make_uint16_buffer(array) | |
buffer = [] | |
[array].flatten.each_with_index do |value, i| | |
buffer << [value].pack("n").unpack("C*") | |
end | |
buffer.flatten | |
end | |
def make_uint32_buffer(array) | |
buffer = [] | |
[array].flatten.each_with_index do |value, i| | |
buffer << [value].pack("N").unpack("C*") | |
end | |
buffer.flatten | |
end | |
end | |
# Check existence of input files | |
unless File.exist?(options.original_path) | |
puts "Could not find original image file" | |
exit | |
end | |
unless File.exist?(options.depth_path) | |
puts "Could not find depth image file" | |
exit | |
end | |
encoder = GDepthEncoder.new | |
# Create backup of original image | |
if options.backup | |
vputs "Making backup of original image..." | |
backup_path = options.original_path + ".bak" | |
FileUtils.cp(options.original_path, backup_path, verbose: options.verbose) | |
end | |
vputs "---Encoding---" | |
original_image = File.open(options.original_path, "r") | |
depthmap_image = File.read(options.depth_path) | |
depthmap_data = Base64.strict_encode64(depthmap_image) | |
encoded_data = encoder.encode_depthmap(original_image, data: depthmap_data, min_depth: 0.4, max_depth: 11) | |
# # Write final image | |
output_path = options.output_path || options.original_path | |
bytes = File.open(output_path, "w") { |f| f.write(encoded_data) } | |
puts "Written #{bytes} bytes" |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment