Skip to content
Commits on Source (6)
Metadata-Version: 1.2
Name: PDAL
Version: 2.1.8
Version: 2.2.0
Summary: Point cloud data processing
Home-page: http://pdal.io
Author: Howard Butler
......@@ -60,6 +60,9 @@ Description: ===================================================================
.. image:: https://travis-ci.org/PDAL/python.svg?branch=master
:target: https://travis-ci.org/PDAL/python
.. image:: https://ci.appveyor.com/api/projects/status/of4kecyahpo8892d
:target: https://ci.appveyor.com/project/hobu/python/
Requirements
================================================================================
......@@ -93,5 +96,5 @@ Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Topic :: Scientific/Engineering :: GIS
Requires: Python (>=2.7)
Requires: Python (>=3.0)
Requires: Numpy
......@@ -50,6 +50,9 @@ sorts it by the ``X`` dimension:
.. image:: https://travis-ci.org/PDAL/python.svg?branch=master
:target: https://travis-ci.org/PDAL/python
.. image:: https://ci.appveyor.com/api/projects/status/of4kecyahpo8892d
:target: https://ci.appveyor.com/project/hobu/python/
Requirements
================================================================================
......
2.1.8
\ No newline at end of file
2.2.0
\ No newline at end of file
python-pdal (2.2.0+ds-1) unstable; urgency=medium
* New upstream release.
* Update copyright years for copyright holders.
* Refresh patches.
-- Bas Couwenberg <sebastic@debian.org> Sat, 07 Sep 2019 08:13:12 +0200
python-pdal (2.1.8+ds-3) unstable; urgency=medium
* Add filenamemangle to distinguish it from pdal releases.
......
......@@ -7,9 +7,8 @@ Comment: The upstream sources are repacked to excluded the .egg-info
Files-Excluded: PDAL.egg-info/*
Files: *
Copyright: 2015, Hobu, Inc. <howard@hobu.co>
Copyright: 2015, 2019, Hobu, Inc. <howard@hobu.co>
2016, 2018, Howard Butler <howard@hobu.co>
2011, Michael P. Gerlek <mpg@flaxen.com>
License: BSD-3-Clause
Files: setup.py
......
Description: Don't append library in clean target.
Description: Fix clean target.
Author: Bas Couwenberg <sebastic@debian.org>
Forwarded: https://github.com/PDAL/python/pull/24
Applied-Upstream: https://github.com/PDAL/python/commit/b8b192925814828cabdb4e527697b77c30edd943
Forwarded: https://github.com/PDAL/python/pull/32
--- a/setup.py
+++ b/setup.py
@@ -157,7 +157,7 @@ if DEBUG:
@@ -156,7 +156,7 @@ if DEBUG:
if os.name != 'nt':
extra_compile_args += ['-g','-O0']
# readers.numpy doesn't exist until PDAL 1.8
-if PDALVERSION >= Version('1.8'):
+if PDALVERSION is not None and PDALVERSION >= Version('1.8'):
libraries.append('pdal_plugin_reader_numpy')
-if PDALVERSION < Version('2.0.0'):
+if PDALVERSION is not None and PDALVERSION < Version('2.0.0'):
raise Exception("PDAL version '%s' is not compatible with PDAL Python library version '%s'"%(PDALVERSION, module_version))
if os.name in ['nt']:
/******************************************************************************
* Copyright (c) 2019, Hobu Inc. (info@hobu.co)
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following
* conditions are met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
* OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
* AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
****************************************************************************/
#include "PyArray.hpp"
#include <pdal/io/MemoryViewReader.hpp>
#include <numpy/arrayobject.h>
namespace pdal
{
namespace python
{
namespace
{
Dimension::Type pdalType(int t)
{
using namespace Dimension;
switch (t)
{
case NPY_FLOAT32:
return Type::Float;
case NPY_FLOAT64:
return Type::Double;
case NPY_INT8:
return Type::Signed8;
case NPY_INT16:
return Type::Signed16;
case NPY_INT32:
return Type::Signed32;
case NPY_INT64:
return Type::Signed64;
case NPY_UINT8:
return Type::Unsigned8;
case NPY_UINT16:
return Type::Unsigned16;
case NPY_UINT32:
return Type::Unsigned32;
case NPY_UINT64:
return Type::Unsigned64;
default:
return Type::None;
}
assert(0);
return Type::None;
}
std::string toString(PyObject *pname)
{
PyObject* r = PyObject_Str(pname);
if (!r)
throw pdal_error("couldn't make string representation value");
Py_ssize_t size;
return std::string(PyUnicode_AsUTF8AndSize(r, &size));
}
} // unnamed namespace
Array::Array() : m_array(nullptr)
{
if (_import_array() < 0)
throw pdal_error("Could not import numpy.core.multiarray.");
}
Array::Array(PyArrayObject* array) : m_array(array), m_rowMajor(true)
{
if (_import_array() < 0)
throw pdal_error("Could not import numpy.core.multiarray.");
Py_XINCREF(array);
PyArray_Descr *dtype = PyArray_DTYPE(m_array);
npy_intp ndims = PyArray_NDIM(m_array);
npy_intp *shape = PyArray_SHAPE(m_array);
int numFields = (dtype->fields == Py_None) ?
0 :
static_cast<int>(PyDict_Size(dtype->fields));
int xyz = 0;
if (numFields == 0)
{
if (ndims != 3)
throw pdal_error("Array without fields must have 3 dimensions.");
m_fields.push_back({"Intensity", pdalType(dtype->type_num), 0});
}
else
{
PyObject *names_dict = dtype->fields;
PyObject *names = PyDict_Keys(names_dict);
PyObject *values = PyDict_Values(names_dict);
if (!names || !values)
throw pdal_error("Bad field specification in numpy array.");
for (int i = 0; i < numFields; ++i)
{
std::string name = toString(PyList_GetItem(names, i));
if (name == "X")
xyz |= 1;
else if (name == "Y")
xyz |= 2;
else if (name == "Z")
xyz |= 4;
PyObject *tup = PyList_GetItem(values, i);
// Get offset.
size_t offset = PyLong_AsLong(PySequence_Fast_GET_ITEM(tup, 1));
// Get type.
PyArray_Descr *descriptor =
(PyArray_Descr *)PySequence_Fast_GET_ITEM(tup, 0);
Dimension::Type type = pdalType(descriptor->type_num);
if (type == Dimension::Type::None)
throw pdal_error("Incompatible type for field '" + name + "'.");
m_fields.push_back({name, type, offset});
}
if (xyz != 0 && xyz != 7)
throw pdal_error("Array fields must contain all or none "
"of X, Y and Z");
if (xyz == 0 && ndims != 3)
throw pdal_error("Array without named X/Y/Z fields "
"must have three dimensions.");
}
if (xyz == 0)
m_shape = { (size_t)shape[0], (size_t)shape[1], (size_t)shape[2] };
m_rowMajor = !(PyArray_FLAGS(m_array) & NPY_ARRAY_F_CONTIGUOUS);
}
Array::~Array()
{
if (m_array)
Py_XDECREF((PyObject *)m_array);
}
void Array::update(PointViewPtr view)
{
if (m_array)
Py_XDECREF((PyObject *)m_array);
m_array = nullptr; // Just in case of an exception.
Dimension::IdList dims = view->dims();
npy_intp size = view->size();
PyObject *dtype_dict = (PyObject*)buildNumpyDescription(view);
if (!dtype_dict)
throw pdal_error("Unable to build numpy dtype "
"description dictionary");
PyArray_Descr *dtype = nullptr;
if (PyArray_DescrConverter(dtype_dict, &dtype) == NPY_FAIL)
throw pdal_error("Unable to build numpy dtype");
Py_XDECREF(dtype_dict);
// This is a 1 x size array.
m_array = (PyArrayObject *)PyArray_NewFromDescr(&PyArray_Type, dtype,
1, &size, 0, nullptr, NPY_ARRAY_CARRAY, nullptr);
// copy the data
DimTypeList types = view->dimTypes();
for (PointId idx = 0; idx < view->size(); idx++)
{
char *p = (char *)PyArray_GETPTR1(m_array, idx);
view->getPackedPoint(types, idx, p);
}
}
//ABELL - Who's responsible for incrementing the ref count?
PyArrayObject *Array::getPythonArray() const
{
return m_array;
}
PyObject* Array::buildNumpyDescription(PointViewPtr view) const
{
// Build up a numpy dtype dictionary
//
// {'formats': ['f8', 'f8', 'f8', 'u2', 'u1', 'u1', 'u1', 'u1', 'u1',
// 'f4', 'u1', 'u2', 'f8', 'u2', 'u2', 'u2'],
// 'names': ['X', 'Y', 'Z', 'Intensity', 'ReturnNumber',
// 'NumberOfReturns', 'ScanDirectionFlag', 'EdgeOfFlightLine',
// 'Classification', 'ScanAngleRank', 'UserData',
// 'PointSourceId', 'GpsTime', 'Red', 'Green', 'Blue']}
//
Dimension::IdList dims = view->dims();
PyObject* dict = PyDict_New();
PyObject* sizes = PyList_New(dims.size());
PyObject* formats = PyList_New(dims.size());
PyObject* titles = PyList_New(dims.size());
for (size_t i = 0; i < dims.size(); ++i)
{
Dimension::Id id = dims[i];
Dimension::Type t = view->dimType(id);
npy_intp stride = view->dimSize(id);
std::string name = view->dimName(id);
std::string kind("i");
Dimension::BaseType b = Dimension::base(t);
if (b == Dimension::BaseType::Unsigned)
kind = "u";
else if (b == Dimension::BaseType::Signed)
kind = "i";
else if (b == Dimension::BaseType::Floating)
kind = "f";
else
throw pdal_error("Unable to map kind '" + kind +
"' to PDAL dimension type");
std::stringstream oss;
oss << kind << stride;
PyObject* pySize = PyLong_FromLong(stride);
PyObject* pyTitle = PyUnicode_FromString(name.c_str());
PyObject* pyFormat = PyUnicode_FromString(oss.str().c_str());
PyList_SetItem(sizes, i, pySize);
PyList_SetItem(titles, i, pyTitle);
PyList_SetItem(formats, i, pyFormat);
}
PyDict_SetItemString(dict, "names", titles);
PyDict_SetItemString(dict, "formats", formats);
return dict;
}
bool Array::rowMajor() const
{
return m_rowMajor;
}
Array::Shape Array::shape() const
{
return m_shape;
}
const Array::Fields& Array::fields() const
{
return m_fields;
}
ArrayIter& Array::iterator()
{
ArrayIter *it = new ArrayIter(*this);
m_iterators.push_back(std::unique_ptr<ArrayIter>(it));
return *it;
}
ArrayIter::ArrayIter(Array& array)
{
m_iter = NpyIter_New(array.getPythonArray(),
NPY_ITER_EXTERNAL_LOOP | NPY_ITER_READONLY | NPY_ITER_REFS_OK,
NPY_KEEPORDER, NPY_NO_CASTING, NULL);
if (!m_iter)
throw pdal_error("Unable to create numpy iterator.");
char *itererr;
m_iterNext = NpyIter_GetIterNext(m_iter, &itererr);
if (!m_iterNext)
{
NpyIter_Deallocate(m_iter);
throw pdal_error(std::string("Unable to create numpy iterator: ") +
itererr);
}
m_data = NpyIter_GetDataPtrArray(m_iter);
m_stride = NpyIter_GetInnerStrideArray(m_iter);
m_size = NpyIter_GetInnerLoopSizePtr(m_iter);
m_done = false;
}
ArrayIter::~ArrayIter()
{
NpyIter_Deallocate(m_iter);
}
ArrayIter& ArrayIter::operator++()
{
if (m_done)
return *this;
if (--(*m_size))
*m_data += *m_stride;
else if (!m_iterNext(m_iter))
m_done = true;
return *this;
}
ArrayIter::operator bool () const
{
return !m_done;
}
char * ArrayIter::operator * () const
{
return *m_data;
}
} // namespace python
} // namespace pdal
/******************************************************************************
* Copyright (c) 2011, Michael P. Gerlek (mpg@flaxen.com)
* Copyright (c) 2019, Hobu Inc. (info@hobu.co)
*
* All rights reserved.
*
......@@ -13,7 +13,7 @@
* notice, this list of conditions and the following disclaimer in
* the documentation and/or other materials provided
* with the distribution.
* * Neither the name of Hobu, Inc. or Flaxen Geo Consulting nor the
* * Neither the name of Hobu, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
......@@ -34,204 +34,69 @@
#pragma once
#include <pdal/PointView.hpp>
#include <algorithm>
#pragma warning(disable: 4127) // conditional expression is constant
#include <Python.h>
#undef toupper
#undef tolower
#undef isspace
#include <numpy/ndarraytypes.h>
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include <numpy/arrayobject.h>
// forward declare PyObject so we don't need the python headers everywhere
// see: http://mail.python.org/pipermail/python-dev/2003-August/037601.html
#ifndef PyObject_HEAD
struct _object;
typedef _object PyObject;
#endif
#include <pdal/PointView.hpp>
#include <pdal/io/MemoryViewReader.hpp>
namespace pdal
{
namespace python
{
class ArrayIter;
class PDAL_DLL Array
{
public:
using Shape = std::array<size_t, 3>;
using Fields = std::vector<MemoryViewReader::Field>;
Array() : m_py_array(0), m_own_array(true)
{
#undef NUMPY_IMPORT_ARRAY_RETVAL
#define NUMPY_IMPORT_ARRAY_RETVAL
import_array();
}
Array(PyObject* array) : m_py_array(array), m_own_array(false)
{
#undef NUMPY_IMPORT_ARRAY_RETVAL
#define NUMPY_IMPORT_ARRAY_RETVAL
import_array();
if (!PyArray_Check(array))
throw pdal::pdal_error("pdal::python::Array constructor object is not a numpy array");
Py_XINCREF(array);
}
~Array()
{
cleanup();
}
inline void update(PointViewPtr view)
{
typedef std::unique_ptr<std::vector<uint8_t>> DataPtr;
cleanup();
int nd = 1;
Dimension::IdList dims = view->dims();
npy_intp mydims = view->size();
npy_intp* ndims = &mydims;
std::vector<npy_intp> strides(dims.size());
DataPtr pdata( new std::vector<uint8_t>(view->pointSize()* view->size(), 0));
PyArray_Descr *dtype = nullptr;
PyObject * dtype_dict = (PyObject*)buildNumpyDescription(view);
if (!dtype_dict)
throw pdal_error("Unable to build numpy dtype description dictionary");
int did_convert = PyArray_DescrConverter(dtype_dict, &dtype);
if (did_convert == NPY_FAIL)
throw pdal_error("Unable to build numpy dtype");
Py_XDECREF(dtype_dict);
#ifdef NPY_ARRAY_CARRAY
int flags = NPY_ARRAY_CARRAY;
#else
int flags = NPY_CARRAY;
#endif
uint8_t* sp = pdata.get()->data();
PyObject * pyArray = PyArray_NewFromDescr(&PyArray_Type,
dtype,
nd,
ndims,
0,
sp,
flags,
NULL);
// copy the data
uint8_t* p(sp);
DimTypeList types = view->dimTypes();
for (PointId idx = 0; idx < view->size(); idx++)
{
p = sp + (view->pointSize() * idx);
view->getPackedPoint(types, idx, (char*)p);
}
// Create an array for reading data from PDAL.
Array();
m_py_array = pyArray;
m_data_array = std::move(pdata);
}
// Create an array for writing data to PDAL.
Array(PyArrayObject* array);
inline PyObject* getPythonArray() const
{
return m_py_array;
}
~Array();
void update(PointViewPtr view);
PyArrayObject *getPythonArray() const;
bool rowMajor() const;
Shape shape() const;
const Fields& fields() const;
ArrayIter& iterator();
private:
inline PyObject* buildNumpyDescription(PointViewPtr view) const;
inline void cleanup()
{
PyObject* p = (PyObject*)(m_py_array);
if (m_own_array)
{
m_data_array.reset();
}
Py_XDECREF(p);
}
inline PyObject* buildNumpyDescription(PointViewPtr view) const
{
// Build up a numpy dtype dictionary
//
// {'formats': ['f8', 'f8', 'f8', 'u2', 'u1', 'u1', 'u1', 'u1', 'u1', 'f4', 'u1', 'u2', 'f8', 'u2', 'u2', 'u2'],
// 'names': ['X', 'Y', 'Z', 'Intensity', 'ReturnNumber', 'NumberOfReturns',
// 'ScanDirectionFlag', 'EdgeOfFlightLine', 'Classification',
// 'ScanAngleRank', 'UserData', 'PointSourceId', 'GpsTime', 'Red', 'Green',
// 'Blue']}
//
std::stringstream oss;
Dimension::IdList dims = view->dims();
PyObject* dict = PyDict_New();
PyObject* sizes = PyList_New(dims.size());
PyObject* formats = PyList_New(dims.size());
PyObject* titles = PyList_New(dims.size());
PyArrayObject* m_array;
Array& operator=(Array const& rhs);
Fields m_fields;
bool m_rowMajor;
Shape m_shape;
std::vector<std::unique_ptr<ArrayIter>> m_iterators;
};
for (Dimension::IdList::size_type i=0; i < dims.size(); ++i)
class ArrayIter
{
Dimension::Id id = (dims[i]);
Dimension::Type t = view->dimType(id);
npy_intp stride = view->dimSize(id);
std::string name = view->dimName(id);
std::string kind("i");
Dimension::BaseType b = Dimension::base(t);
if (b == Dimension::BaseType::Unsigned)
kind = "u";
else if (b == Dimension::BaseType::Signed)
kind = "i";
else if (b == Dimension::BaseType::Floating)
kind = "f";
else
{
std::stringstream o;
oss << "unable to map kind '" << kind <<"' to PDAL dimension type";
throw pdal::pdal_error(o.str());
}
oss << kind << stride;
PyObject* pySize = PyLong_FromLong(stride);
PyObject* pyTitle = PyUnicode_FromString(name.c_str());
PyObject* pyFormat = PyUnicode_FromString(oss.str().c_str());
PyList_SetItem(sizes, i, pySize);
PyList_SetItem(titles, i, pyTitle);
PyList_SetItem(formats, i, pyFormat);
oss.str("");
}
PyDict_SetItemString(dict, "names", titles);
PyDict_SetItemString(dict, "formats", formats);
// PyObject* obj = PyUnicode_AsASCIIString(PyObject_Str(dict));
// const char* s = PyBytes_AsString(obj);
// std::string output(s);
// std::cout << "array: " << output << std::endl;
return dict;
}
public:
ArrayIter(const ArrayIter&) = delete;
ArrayIter(Array& array);
~ArrayIter();
PyObject* m_py_array;
std::unique_ptr<std::vector<uint8_t> > m_data_array;
bool m_own_array;
ArrayIter& operator++();
operator bool () const;
char *operator * () const;
Array& operator=(Array const& rhs);
private:
NpyIter *m_iter;
NpyIter_IterNextFunc *m_iterNext;
char **m_data;
npy_intp *m_size;
npy_intp *m_stride;
bool m_done;
};
} // namespace python
......
......@@ -33,124 +33,125 @@
****************************************************************************/
#include "PyPipeline.hpp"
#ifdef PDAL_HAVE_LIBXML2
#include <pdal/XMLSchema.hpp>
#endif
#ifndef _WIN32
#include <dlfcn.h>
#endif
#include <Python.h>
#define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
#include <numpy/arrayobject.h>
#include "PyArray.hpp"
#include <pdal/Stage.hpp>
#include <pdal/pdal_features.hpp>
#include <pdal/PipelineWriter.hpp>
#include <pdal/io/NumpyReader.hpp>
namespace libpdalpython
{
using namespace pdal::python;
#include "PyArray.hpp"
Pipeline::Pipeline(std::string const& json, std::vector<Array*> arrays)
namespace pdal
{
namespace python
{
// Create a pipeline for writing data to PDAL
Pipeline::Pipeline(std::string const& json, std::vector<Array*> arrays) :
m_executor(new PipelineExecutor(json))
{
#ifndef _WIN32
// See comment in alternate constructor below.
::dlopen("libpdal_base.so", RTLD_NOLOAD | RTLD_GLOBAL);
::dlopen("libpdal_plugin_reader_numpy.so", RTLD_NOLOAD | RTLD_GLOBAL);
#endif
#undef NUMPY_IMPORT_ARRAY_RETVAL
#define NUMPY_IMPORT_ARRAY_RETVAL
import_array();
m_executor = std::shared_ptr<pdal::PipelineExecutor>(new pdal::PipelineExecutor(json));
if (_import_array() < 0)
throw pdal_error("Could not impory numpy.core.multiarray.");
pdal::PipelineManager& manager = m_executor->getManager();
PipelineManager& manager = m_executor->getManager();
std::stringstream strm(json);
manager.readPipeline(strm);
std::vector<Stage *> roots = manager.roots();
if (roots.size() != 1)
throw pdal_error("Filter pipeline must contain a single root stage.");
pdal::Stage *r = manager.getStage();
if (!r)
throw pdal::pdal_error("pipeline had no stages!");
#if PDAL_VERSION_MAJOR > 1 || PDAL_VERSION_MINOR >=8
int counter = 1;
for (auto array : arrays)
{
// Create numpy reader for each array
pdal::Options options;
std::stringstream tag;
tag << "readers_numpy" << counter;
pdal::StageCreationOptions opts { "", "readers.numpy", nullptr, options, tag.str()};
pdal::Stage& reader = manager.makeReader(opts);
// Options
pdal::NumpyReader* np_reader = dynamic_cast<pdal::NumpyReader*>(&reader);
if (!np_reader)
throw pdal::pdal_error("couldn't cast reader!");
Options options;
options.add("order", array->rowMajor() ?
MemoryViewReader::Order::RowMajor :
MemoryViewReader::Order::ColumnMajor);
options.add("shape", MemoryViewReader::Shape(array->shape()));
PyObject* parray = (PyObject*)array->getPythonArray();
if (!parray)
throw pdal::pdal_error("array was none!");
Stage& s = manager.makeReader("", "readers.memoryview", options);
MemoryViewReader& r = dynamic_cast<MemoryViewReader &>(s);
for (auto f : array->fields())
r.pushField(f);
ArrayIter& iter = array->iterator();
auto incrementer = [&iter](PointId id) -> char *
{
if (! iter)
return nullptr;
np_reader->setArray(parray);
char *c = *iter;
++iter;
return c;
};
r->setInput(reader);
counter++;
r.setIncrementer(incrementer);
PyObject* parray = (PyObject*)array->getPythonArray();
if (!parray)
throw pdal_error("array was none!");
roots[0]->setInput(r);
}
#endif
manager.validateStageOptions();
}
Pipeline::Pipeline(std::string const& json)
// Create a pipeline for reading data from PDAL
Pipeline::Pipeline(std::string const& json) :
m_executor(new PipelineExecutor(json))
{
// Make the symbols in pdal_base global so that they're accessible
// to PDAL plugins. Python dlopen's this extension with RTLD_LOCAL,
// which means that without this, symbols in libpdal_base aren't available
// for resolution of symbols on future runtime linking. This is an issue
// on Apline and other Linux variants that doesn't use UNIQUE symbols
// for C++ template statics. only
// on Alpine and other Linux variants that don't use UNIQUE symbols
// for C++ template statics only. Without this, you end up with multiple
// copies of template statics.
#ifndef _WIN32
::dlopen("libpdal_base.so", RTLD_NOLOAD | RTLD_GLOBAL);
#endif
#undef NUMPY_IMPORT_ARRAY_RETVAL
#define NUMPY_IMPORT_ARRAY_RETVAL
import_array();
m_executor = std::shared_ptr<pdal::PipelineExecutor>(new pdal::PipelineExecutor(json));
if (_import_array() < 0)
throw pdal_error("Could not impory numpy.core.multiarray.");
}
Pipeline::~Pipeline()
{
}
{}
void Pipeline::setLogLevel(int level)
{
m_executor->setLogLevel(level);
}
int Pipeline::getLogLevel() const
{
return static_cast<int>(m_executor->getLogLevel());
}
int64_t Pipeline::execute()
{
int64_t count = m_executor->execute();
return count;
return m_executor->execute();
}
bool Pipeline::validate()
{
return m_executor->validate();
auto res = m_executor->validate();
return res;
}
std::vector<Array *> Pipeline::getArrays() const
......@@ -160,16 +161,18 @@ std::vector<Array *> Pipeline::getArrays() const
if (!m_executor->executed())
throw python_error("call execute() before fetching arrays");
const pdal::PointViewSet& pvset = m_executor->getManagerConst().views();
const PointViewSet& pvset = m_executor->getManagerConst().views();
for (auto i: pvset)
{
//ABELL - Leak?
Array *array = new pdal::python::Array;
Array *array = new python::Array;
array->update(i);
output.push_back(array);
}
return output;
}
} //namespace libpdalpython
} // namespace python
} // namespace pdal
......@@ -43,20 +43,12 @@
#include <sstream>
#include <memory>
#undef toupper
#undef tolower
#undef isspace
namespace pdal
{
namespace python
{
class Array;
}
}
namespace libpdalpython
{
class Array;
class python_error : public std::runtime_error
{
......@@ -65,10 +57,12 @@ public:
{}
};
class Pipeline {
class Pipeline
{
public:
Pipeline(std::string const& json);
Pipeline(std::string const& json, std::vector<pdal::python::Array*> arrays);
Pipeline(std::string const& json,
std::vector<pdal::python::Array*> arrays);
~Pipeline();
int64_t execute();
......@@ -98,4 +92,5 @@ private:
std::shared_ptr<pdal::PipelineExecutor> m_executor;
};
}
} // namespace python
} // namespace pdal
__version__='2.1.8'
__version__='2.2.0'
from .pipeline import Pipeline
from .array import Array
......
This diff is collapsed.
......@@ -23,7 +23,6 @@ cdef extern from "pdal/pdal_config.hpp" namespace "pdal::Config":
def getVersionString():
return versionString()
def getVersionMajor():
return versionMajor()
def getVersionMinor():
......@@ -39,10 +38,10 @@ def getPluginInstallPath():
cdef extern from "PyArray.hpp" namespace "pdal::python":
cdef cppclass Array:
Array(object) except +
Array(np.ndarray) except +
void *getPythonArray() except+
cdef extern from "PyPipeline.hpp" namespace "libpdalpython":
cdef extern from "PyPipeline.hpp" namespace "pdal::python":
cdef cppclass Pipeline:
Pipeline(const char* ) except +
Pipeline(const char*, vector[Array*]& ) except +
......@@ -56,11 +55,9 @@ cdef extern from "PyPipeline.hpp" namespace "libpdalpython":
int getLogLevel()
void setLogLevel(int)
cdef class PyArray:
cdef Array *thisptr
def __cinit__(self, object array):
def __cinit__(self, np.ndarray array):
self.thisptr = new Array(array)
def __dealloc__(self):
del self.thisptr
......@@ -109,24 +106,14 @@ cdef class PyPipeline:
cdef Array* a
if arrays is not None:
print("Looping arrays\n")
for array in arrays:
a = new Array(array)
c_arrays.push_back(a)
if PY_MAJOR_VERSION >= 3:
if arrays:
self.thisptr = new Pipeline(json.encode('UTF-8'), c_arrays)
else:
self.thisptr = new Pipeline(json.encode('UTF-8'))
else:
if arrays:
self.thisptr = new Pipeline(json, c_arrays)
else:
self.thisptr = new Pipeline(json)
# if arrays:
# self.thisptr = new Pipeline(json.encode('UTF-8'), c_arrays)
# else:
# self.thisptr = new Pipeline(json.encode('UTF-8'))
def __dealloc__(self):
del self.thisptr
......@@ -158,6 +145,7 @@ cdef class PyPipeline:
return json.loads(j)
property arrays:
def __get__(self):
v = self.thisptr.getArrays()
output = []
......@@ -171,6 +159,7 @@ cdef class PyPipeline:
inc(it)
return output
def execute(self):
if not self.thisptr:
raise Exception("C++ Pipeline object not constructed!")
......
......@@ -156,9 +156,9 @@ if DEBUG:
if os.name != 'nt':
extra_compile_args += ['-g','-O0']
# readers.numpy doesn't exist until PDAL 1.8
if PDALVERSION >= Version('1.8'):
libraries.append('pdal_plugin_reader_numpy')
if PDALVERSION < Version('2.0.0'):
raise Exception("PDAL version '%s' is not compatible with PDAL Python library version '%s'"%(PDALVERSION, module_version))
if os.name in ['nt']:
if os.environ.get('OSGEO4W_ROOT'):
......@@ -168,8 +168,6 @@ if os.name in ['nt']:
library_dirs = ['%s\Library\lib' % prefix]
libraries = ['pdalcpp','pdal_util','ws2_32']
if PDALVERSION >= Version('1.8'):
libraries.append('libpdal_plugin_reader_numpy')
extra_compile_args = ['/DNOMINMAX',]
......@@ -182,7 +180,7 @@ if 'linux' in sys.platform or 'linux2' in sys.platform or 'darwin' in sys.platfo
sources=['pdal/libpdalpython'+ext, "pdal/PyPipeline.cpp" ]
sources=['pdal/libpdalpython'+ext, "pdal/PyPipeline.cpp", "pdal/PyArray.cpp" ]
extensions = [DistutilsExtension("*",
sources,
include_dirs=include_dirs,
......@@ -192,12 +190,12 @@ extensions = [DistutilsExtension("*",
extra_link_args=extra_link_args,)]
if USE_CYTHON and "clean" not in sys.argv:
from Cython.Build import cythonize
extensions= cythonize(extensions, language="c++")
extensions= cythonize(extensions, compiler_directives={'language_level':3})
setup_args = dict(
name = 'PDAL',
version = str(module_version),
requires = ['Python (>=2.7)', 'Numpy'],
requires = ['Python (>=3.0)', 'Numpy'],
description = 'Point cloud data processing',
license = 'BSD',
keywords = 'point cloud spatial',
......
......@@ -31,14 +31,14 @@ class PDALTest(unittest.TestCase):
return output
class TestPipeline(PDALTest):
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
"missing test data")
def test_construction(self):
"""Can we construct a PDAL pipeline"""
json = self.fetch_json('sort.json')
r = pdal.Pipeline(json)
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
"missing test data")
def test_execution(self):
......@@ -48,13 +48,13 @@ class TestPipeline(PDALTest):
r.validate()
r.execute()
self.assertGreater(len(r.pipeline), 200)
#
def test_validate(self):
"""Do we complain with bad pipelines"""
r = pdal.Pipeline(bad_json)
with self.assertRaises(RuntimeError):
r.validate()
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
"missing test data")
def test_array(self):
......@@ -65,11 +65,11 @@ class TestPipeline(PDALTest):
r.execute()
arrays = r.arrays
self.assertEqual(len(arrays), 1)
#
a = arrays[0]
self.assertAlmostEqual(a[0][0], 635619.85, 7)
self.assertAlmostEqual(a[1064][2], 456.92, 7)
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
"missing test data")
def test_metadata(self):
......@@ -82,8 +82,8 @@ class TestPipeline(PDALTest):
import json
j = json.loads(metadata)
self.assertEqual(j["metadata"]["readers.las"][0]["count"], 1065)
#
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
"missing test data")
def test_no_execute(self):
......@@ -93,17 +93,17 @@ class TestPipeline(PDALTest):
with self.assertRaises(RuntimeError):
r.arrays
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'reproject.json')),
"missing test data")
def test_logging(self):
"""Can we fetch log output"""
json = self.fetch_json('reproject.json')
r = pdal.Pipeline(json)
r.loglevel = 8
r.validate()
count = r.execute()
self.assertEqual(count, 789)
self.assertEqual(r.log.split()[0], '(pypipeline')
# @unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'reproject.json')),
# "missing test data")
# def test_logging(self):
# """Can we fetch log output"""
# json = self.fetch_json('reproject.json')
# r = pdal.Pipeline(json)
# r.loglevel = 8
# r.validate()
# count = r.execute()
# self.assertEqual(count, 789)
# self.assertEqual(r.log.split()[0], '(pypipeline')
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'sort.json')),
"missing test data")
......@@ -114,7 +114,7 @@ class TestPipeline(PDALTest):
r.validate()
r.execute()
self.assertEqual(r.schema['schema']['dimensions'][0]['name'], 'X')
#
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'chip.json')),
"missing test data")
def test_merged_arrays(self):
......@@ -125,16 +125,17 @@ class TestPipeline(PDALTest):
r.execute()
arrays = r.arrays
self.assertEqual(len(arrays), 43)
#
class TestArrayLoad(PDALTest):
@unittest.skipUnless(os.path.exists(os.path.join(DATADIRECTORY, 'perlin.npy')),
"missing test data")
def test_merged_arrays(self):
"""Can we load data from a a list of arrays to PDAL"""
"""Can we load data from a list of arrays to PDAL"""
if Version(pdal.info.version) < Version('1.8'):
return True
data = np.load(os.path.join(DATADIRECTORY, 'perlin.npy'))
data = np.load(os.path.join(DATADIRECTORY, 'test3d.npy'))
arrays = [data, data, data]
......@@ -143,7 +144,7 @@ class TestArrayLoad(PDALTest):
"pipeline":[
{
"type":"filters.range",
"limits":"Intensity[0:0.10]"
"limits":"Intensity[100:300)"
}
]
}"""
......@@ -154,9 +155,9 @@ class TestArrayLoad(PDALTest):
arrays = p.arrays
self.assertEqual(len(arrays), 3)
data = arrays[0]
self.assertEqual(len(data), 1836)
self.assertEqual(sum([len(i) for i in arrays]), 3*1836)
for data in arrays:
self.assertEqual(len(data), 12)
self.assertEqual(data['Intensity'].sum(), 1926)
class TestDimensions(PDALTest):
def test_fetch_dimensions(self):
......