Sort a temporary ordering and then replace m_index
when done: success.
template <typename Compare>
void stableSort(Compare compare)
{
auto order = m_index;
std::stable_sort(
using StringMap = std::map<std::string, std::string>; | |
// From null: [json.exception.type_error.302] type must be object, but is null | |
try { StringMap m = json(); } | |
catch (std::exception& e) { std::cout << "From null: " << e.what() << std::endl; } | |
// From non-object: [json.exception.type_error.302] type must be object, but is number | |
try { StringMap m = json(42); } | |
catch (std::exception& e) { std::cout << "From number: " << e.what() << std::endl; } |
using StringMap = std::map<std::string, std::string>; | |
// From null: [json.exception.type_error.302] type must be object, but is null | |
try { StringMap m = json(); } | |
catch (std::exception& e) { std::cout << "From null: " << e.what() << std::endl; } | |
// From non-object: [json.exception.type_error.302] type must be object, but is number | |
try { StringMap m = json(42); } | |
catch (std::exception& e) { std::cout << "From number: " << e.what() << std::endl; } |
Areas of interest from the recently published Geiger mode LiDAR dataset by USGS around Chicago. Data comes from the USGS 3D Elevation Program, computation and data hosting in partnership with AWS Public Datasets, and processed as EPT with Entwine software by Hobu Inc.
See the rest of the data (over 13 trillion points) at usgs.entwine.io.
Query and manipulate with PDAL's EPT reader using the EPT endpoint.
https://entwine.io/data/red-rocks.laz
and https://entwine.io/data/red-rocks.bpf
contain the same point cloud data and the same SRS info. The SRS is UTM zone 13N (EPSG:26913).
In this sample program, the following pipeline behaviors are run in order:
readers.bpf.spatialreference
set to EPSG:26912
readers.laz.spatialreference
set to EPSG:26912
set lines=65 | |
set columns=175 | |
set nocompatible | |
set number | |
set expandtab | |
set tabstop=4 | |
set shiftwidth=4 | |
set hlsearch | |
set ignorecase | |
set smartcase |
{ | |
"partitions": [ | |
{ | |
"defaults": { | |
"hostname": "{service}.{region}.{dnsSuffix}", | |
"protocols": [ | |
"https" | |
], | |
"signatureVersions": [ | |
"v4" |
#include <iostream> | |
#include <pdal/util/Utils.hpp> | |
int main() | |
{ | |
const auto bt(pdal::Utils::backtrace()); | |
if (bt.empty()) std::cout << "No backtrace!" << std::endl; | |
else | |
{ | |
std::cout << "Backtrace:" << std::endl; |
Assuming ~/data
will be the greyhound data directory.
Index data:
docker run -it -v ~/data:/opt/data connormanning/entwine build \
-i https://entwine.io/sample-data/red-rocks.laz \
-o /opt/data/red-rocks
Create ~/data/config.json
to allow the /write
greyhound endpoint:
docker run -d \ | |
--name gdb \ | |
--security-opt seccomp=unconfined \ | |
--cap-add=SYS_PTRACE \ | |
-p 80:80 \ | |
-v /whatever/volume/mappings:/something \ | |
-v `pwd`:/opt/home \ | |
connormanning/greyhound:dev \ | |
-c "(nohup greyhound -c /opt/home/config.json > /var/log/greyhound.txt &) && tail -f /var/log/greyhound.txt" |