From f1fe73d1909a2448a004a88362a1a532d0d4f7c3 Mon Sep 17 00:00:00 2001 From: sanine Date: Sun, 12 Feb 2023 23:53:22 -0600 Subject: switch to tinyobj and nanovg from assimp and cairo --- libs/assimp/port/PyAssimp/pyassimp/__init__.py | 1 - libs/assimp/port/PyAssimp/pyassimp/core.py | 556 ---------- libs/assimp/port/PyAssimp/pyassimp/errors.py | 11 - libs/assimp/port/PyAssimp/pyassimp/formats.py | 41 - libs/assimp/port/PyAssimp/pyassimp/helper.py | 283 ----- libs/assimp/port/PyAssimp/pyassimp/material.py | 89 -- libs/assimp/port/PyAssimp/pyassimp/postprocess.py | 530 ---------- libs/assimp/port/PyAssimp/pyassimp/structs.py | 1135 --------------------- 8 files changed, 2646 deletions(-) delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/__init__.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/core.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/errors.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/formats.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/helper.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/material.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/postprocess.py delete mode 100644 libs/assimp/port/PyAssimp/pyassimp/structs.py (limited to 'libs/assimp/port/PyAssimp/pyassimp') diff --git a/libs/assimp/port/PyAssimp/pyassimp/__init__.py b/libs/assimp/port/PyAssimp/pyassimp/__init__.py deleted file mode 100644 index bb67a43..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .core import * diff --git a/libs/assimp/port/PyAssimp/pyassimp/core.py b/libs/assimp/port/PyAssimp/pyassimp/core.py deleted file mode 100644 index 35ad882..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/core.py +++ /dev/null @@ -1,556 +0,0 @@ -""" -PyAssimp - -This is the main-module of PyAssimp. -""" - -import sys -if sys.version_info < (2,6): - raise RuntimeError('pyassimp: need python 2.6 or newer') - -# xrange was renamed range in Python 3 and the original range from Python 2 was removed. -# To keep compatibility with both Python 2 and 3, xrange is set to range for version 3.0 and up. -if sys.version_info >= (3,0): - xrange = range - - -try: - import numpy -except ImportError: - numpy = None -import logging -import ctypes -from contextlib import contextmanager -logger = logging.getLogger("pyassimp") -# attach default null handler to logger so it doesn't complain -# even if you don't attach another handler to logger -logger.addHandler(logging.NullHandler()) - -from . import structs -from . import helper -from . import postprocess -from .errors import AssimpError - -class AssimpLib(object): - """ - Assimp-Singleton - """ - load, load_mem, export, export_blob, release, dll = helper.search_library() -_assimp_lib = AssimpLib() - -def make_tuple(ai_obj, type = None): - res = None - - #notes: - # ai_obj._fields_ = [ ("attr", c_type), ... ] - # getattr(ai_obj, e[0]).__class__ == float - - if isinstance(ai_obj, structs.Matrix4x4): - if numpy: - res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]).reshape((4,4)) - #import pdb;pdb.set_trace() - else: - res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_] - res = [res[i:i+4] for i in xrange(0,16,4)] - elif isinstance(ai_obj, structs.Matrix3x3): - if numpy: - res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]).reshape((3,3)) - else: - res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_] - res = [res[i:i+3] for i in xrange(0,9,3)] - else: - if numpy: - res = numpy.array([getattr(ai_obj, e[0]) for e in ai_obj._fields_]) - else: - res = [getattr(ai_obj, e[0]) for e in ai_obj._fields_] - - return res - -# Returns unicode object for Python 2, and str object for Python 3. -def _convert_assimp_string(assimp_string): - if sys.version_info >= (3, 0): - return str(assimp_string.data, errors='ignore') - else: - return unicode(assimp_string.data, errors='ignore') - -# It is faster and more correct to have an init function for each assimp class -def _init_face(aiFace): - aiFace.indices = [aiFace.mIndices[i] for i in range(aiFace.mNumIndices)] -assimp_struct_inits = { structs.Face : _init_face } - -def call_init(obj, caller = None): - if helper.hasattr_silent(obj,'contents'): #pointer - _init(obj.contents, obj, caller) - else: - _init(obj,parent=caller) - -def _is_init_type(obj): - - if obj and helper.hasattr_silent(obj,'contents'): #pointer - return _is_init_type(obj[0]) - # null-pointer case that arises when we reach a mesh attribute - # like mBitangents which use mNumVertices rather than mNumBitangents - # so it breaks the 'is iterable' check. - # Basically: - # FIXME! - elif not bool(obj): - return False - tname = obj.__class__.__name__ - return not (tname[:2] == 'c_' or tname == 'Structure' \ - or tname == 'POINTER') and not isinstance(obj, (int, str, bytes)) - -def _init(self, target = None, parent = None): - """ - Custom initialize() for C structs, adds safely accessible member functionality. - - :param target: set the object which receive the added methods. Useful when manipulating - pointers, to skip the intermediate 'contents' deferencing. - """ - if not target: - target = self - - dirself = dir(self) - for m in dirself: - - if m.startswith("_"): - continue - - if m.startswith('mNum'): - if 'm' + m[4:] in dirself: - continue # will be processed later on - else: - name = m[1:].lower() - - obj = getattr(self, m) - setattr(target, name, obj) - continue - - if m == 'mName': - target.name = str(_convert_assimp_string(self.mName)) - target.__class__.__repr__ = lambda x: str(x.__class__) + "(" + getattr(x, 'name','') + ")" - target.__class__.__str__ = lambda x: getattr(x, 'name', '') - continue - - name = m[1:].lower() - - obj = getattr(self, m) - - # Create tuples - if isinstance(obj, structs.assimp_structs_as_tuple): - setattr(target, name, make_tuple(obj)) - logger.debug(str(self) + ": Added array " + str(getattr(target, name)) + " as self." + name.lower()) - continue - - if m.startswith('m') and len(m) > 1 and m[1].upper() == m[1]: - - if name == "parent": - setattr(target, name, parent) - logger.debug("Added a parent as self." + name) - continue - - if helper.hasattr_silent(self, 'mNum' + m[1:]): - - length = getattr(self, 'mNum' + m[1:]) - - # -> special case: properties are - # stored as a dict. - if m == 'mProperties': - setattr(target, name, _get_properties(obj, length)) - continue - - - if not length: # empty! - setattr(target, name, []) - logger.debug(str(self) + ": " + name + " is an empty list.") - continue - - - try: - if obj._type_ in structs.assimp_structs_as_tuple: - if numpy: - setattr(target, name, numpy.array([make_tuple(obj[i]) for i in range(length)], dtype=numpy.float32)) - - logger.debug(str(self) + ": Added an array of numpy arrays (type "+ str(type(obj)) + ") as self." + name) - else: - setattr(target, name, [make_tuple(obj[i]) for i in range(length)]) - - logger.debug(str(self) + ": Added a list of lists (type "+ str(type(obj)) + ") as self." + name) - - else: - setattr(target, name, [obj[i] for i in range(length)]) #TODO: maybe not necessary to recreate an array? - - logger.debug(str(self) + ": Added list of " + str(obj) + " " + name + " as self." + name + " (type: " + str(type(obj)) + ")") - - # initialize array elements - try: - init = assimp_struct_inits[type(obj[0])] - except KeyError: - if _is_init_type(obj[0]): - for e in getattr(target, name): - call_init(e, target) - else: - for e in getattr(target, name): - init(e) - - - except IndexError: - logger.error("in " + str(self) +" : mismatch between mNum" + name + " and the actual amount of data in m" + name + ". This may be due to version mismatch between libassimp and pyassimp. Quitting now.") - sys.exit(1) - - except ValueError as e: - - logger.error("In " + str(self) + "->" + name + ": " + str(e) + ". Quitting now.") - if "setting an array element with a sequence" in str(e): - logger.error("Note that pyassimp does not currently " - "support meshes with mixed triangles " - "and quads. Try to load your mesh with" - " a post-processing to triangulate your" - " faces.") - raise e - - - - else: # starts with 'm' but not iterable - setattr(target, m, obj) - logger.debug("Added " + name + " as self." + name + " (type: " + str(type(obj)) + ")") - - if _is_init_type(obj): - call_init(obj, target) - - if isinstance(self, structs.Mesh): - _finalize_mesh(self, target) - - if isinstance(self, structs.Texture): - _finalize_texture(self, target) - - if isinstance(self, structs.Metadata): - _finalize_metadata(self, target) - - - return self - - -def pythonize_assimp(type, obj, scene): - """ This method modify the Assimp data structures - to make them easier to work with in Python. - - Supported operations: - - MESH: replace a list of mesh IDs by reference to these meshes - - ADDTRANSFORMATION: add a reference to an object's transformation taken from their associated node. - - :param type: the type of modification to operate (cf above) - :param obj: the input object to modify - :param scene: a reference to the whole scene - """ - - if type == "MESH": - meshes = [] - for i in obj: - meshes.append(scene.meshes[i]) - return meshes - - if type == "ADDTRANSFORMATION": - def getnode(node, name): - if node.name == name: return node - for child in node.children: - n = getnode(child, name) - if n: return n - - node = getnode(scene.rootnode, obj.name) - if not node: - raise AssimpError("Object " + str(obj) + " has no associated node!") - setattr(obj, "transformation", node.transformation) - -def recur_pythonize(node, scene): - ''' - Recursively call pythonize_assimp on - nodes tree to apply several post-processing to - pythonize the assimp datastructures. - ''' - node.meshes = pythonize_assimp("MESH", node.meshes, scene) - for mesh in node.meshes: - mesh.material = scene.materials[mesh.materialindex] - for cam in scene.cameras: - pythonize_assimp("ADDTRANSFORMATION", cam, scene) - for c in node.children: - recur_pythonize(c, scene) - -def release(scene): - ''' - Release resources of a loaded scene. - ''' - _assimp_lib.release(ctypes.pointer(scene)) - -@contextmanager -def load(filename, - file_type = None, - processing = postprocess.aiProcess_Triangulate): - ''' - Load a model into a scene. On failure throws AssimpError. - - Arguments - --------- - filename: Either a filename or a file object to load model from. - If a file object is passed, file_type MUST be specified - Otherwise Assimp has no idea which importer to use. - This is named 'filename' so as to not break legacy code. - processing: assimp postprocessing parameters. Verbose keywords are imported - from postprocessing, and the parameters can be combined bitwise to - generate the final processing value. Note that the default value will - triangulate quad faces. Example of generating other possible values: - processing = (pyassimp.postprocess.aiProcess_Triangulate | - pyassimp.postprocess.aiProcess_OptimizeMeshes) - file_type: string of file extension, such as 'stl' - - Returns - --------- - Scene object with model data - ''' - - if hasattr(filename, 'read'): - # This is the case where a file object has been passed to load. - # It is calling the following function: - # const aiScene* aiImportFileFromMemory(const char* pBuffer, - # unsigned int pLength, - # unsigned int pFlags, - # const char* pHint) - if file_type is None: - raise AssimpError('File type must be specified when passing file objects!') - data = filename.read() - model = _assimp_lib.load_mem(data, - len(data), - processing, - file_type) - else: - # a filename string has been passed - model = _assimp_lib.load(filename.encode(sys.getfilesystemencoding()), processing) - - if not model: - raise AssimpError('Could not import file!') - scene = _init(model.contents) - recur_pythonize(scene.rootnode, scene) - try: - yield scene - finally: - release(scene) - -def export(scene, - filename, - file_type = None, - processing = postprocess.aiProcess_Triangulate): - ''' - Export a scene. On failure throws AssimpError. - - Arguments - --------- - scene: scene to export. - filename: Filename that the scene should be exported to. - file_type: string of file exporter to use. For example "collada". - processing: assimp postprocessing parameters. Verbose keywords are imported - from postprocessing, and the parameters can be combined bitwise to - generate the final processing value. Note that the default value will - triangulate quad faces. Example of generating other possible values: - processing = (pyassimp.postprocess.aiProcess_Triangulate | - pyassimp.postprocess.aiProcess_OptimizeMeshes) - - ''' - - exportStatus = _assimp_lib.export(ctypes.pointer(scene), file_type.encode("ascii"), filename.encode(sys.getfilesystemencoding()), processing) - - if exportStatus != 0: - raise AssimpError('Could not export scene!') - -def export_blob(scene, - file_type = None, - processing = postprocess.aiProcess_Triangulate): - ''' - Export a scene and return a blob in the correct format. On failure throws AssimpError. - - Arguments - --------- - scene: scene to export. - file_type: string of file exporter to use. For example "collada". - processing: assimp postprocessing parameters. Verbose keywords are imported - from postprocessing, and the parameters can be combined bitwise to - generate the final processing value. Note that the default value will - triangulate quad faces. Example of generating other possible values: - processing = (pyassimp.postprocess.aiProcess_Triangulate | - pyassimp.postprocess.aiProcess_OptimizeMeshes) - Returns - --------- - Pointer to structs.ExportDataBlob - ''' - exportBlobPtr = _assimp_lib.export_blob(ctypes.pointer(scene), file_type.encode("ascii"), processing) - - if exportBlobPtr == 0: - raise AssimpError('Could not export scene to blob!') - return exportBlobPtr - -def _finalize_texture(tex, target): - setattr(target, "achformathint", tex.achFormatHint) - if numpy: - data = numpy.array([make_tuple(getattr(tex, "pcData")[i]) for i in range(tex.mWidth * tex.mHeight)]) - else: - data = [make_tuple(getattr(tex, "pcData")[i]) for i in range(tex.mWidth * tex.mHeight)] - setattr(target, "data", data) - -def _finalize_mesh(mesh, target): - """ Building of meshes is a bit specific. - - We override here the various datasets that can - not be process as regular fields. - - For instance, the length of the normals array is - mNumVertices (no mNumNormals is available) - """ - nb_vertices = getattr(mesh, "mNumVertices") - - def fill(name): - mAttr = getattr(mesh, name) - if numpy: - if mAttr: - data = numpy.array([make_tuple(getattr(mesh, name)[i]) for i in range(nb_vertices)], dtype=numpy.float32) - setattr(target, name[1:].lower(), data) - else: - setattr(target, name[1:].lower(), numpy.array([], dtype="float32")) - else: - if mAttr: - data = [make_tuple(getattr(mesh, name)[i]) for i in range(nb_vertices)] - setattr(target, name[1:].lower(), data) - else: - setattr(target, name[1:].lower(), []) - - def fillarray(name): - mAttr = getattr(mesh, name) - - data = [] - for index, mSubAttr in enumerate(mAttr): - if mSubAttr: - data.append([make_tuple(getattr(mesh, name)[index][i]) for i in range(nb_vertices)]) - - if numpy: - setattr(target, name[1:].lower(), numpy.array(data, dtype=numpy.float32)) - else: - setattr(target, name[1:].lower(), data) - - fill("mNormals") - fill("mTangents") - fill("mBitangents") - - fillarray("mColors") - fillarray("mTextureCoords") - - # prepare faces - if numpy: - faces = numpy.array([f.indices for f in target.faces], dtype=numpy.int32) - else: - faces = [f.indices for f in target.faces] - setattr(target, 'faces', faces) - -def _init_metadata_entry(entry): - entry.type = entry.mType - if entry.type == structs.MetadataEntry.AI_BOOL: - entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_bool)).contents.value - elif entry.type == structs.MetadataEntry.AI_INT32: - entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_int32)).contents.value - elif entry.type == structs.MetadataEntry.AI_UINT64: - entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_uint64)).contents.value - elif entry.type == structs.MetadataEntry.AI_FLOAT: - entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_float)).contents.value - elif entry.type == structs.MetadataEntry.AI_DOUBLE: - entry.data = ctypes.cast(entry.mData, ctypes.POINTER(ctypes.c_double)).contents.value - elif entry.type == structs.MetadataEntry.AI_AISTRING: - assimp_string = ctypes.cast(entry.mData, ctypes.POINTER(structs.String)).contents - entry.data = _convert_assimp_string(assimp_string) - elif entry.type == structs.MetadataEntry.AI_AIVECTOR3D: - assimp_vector = ctypes.cast(entry.mData, ctypes.POINTER(structs.Vector3D)).contents - entry.data = make_tuple(assimp_vector) - - return entry - -def _finalize_metadata(metadata, target): - """ Building the metadata object is a bit specific. - - Firstly, there are two separate arrays: one with metadata keys and one - with metadata values, and there are no corresponding mNum* attributes, - so the C arrays are not converted to Python arrays using the generic - code in the _init function. - - Secondly, a metadata entry value has to be cast according to declared - metadata entry type. - """ - length = metadata.mNumProperties - setattr(target, 'keys', [str(_convert_assimp_string(metadata.mKeys[i])) for i in range(length)]) - setattr(target, 'values', [_init_metadata_entry(metadata.mValues[i]) for i in range(length)]) - -class PropertyGetter(dict): - def __getitem__(self, key): - semantic = 0 - if isinstance(key, tuple): - key, semantic = key - - return dict.__getitem__(self, (key, semantic)) - - def keys(self): - for k in dict.keys(self): - yield k[0] - - def __iter__(self): - return self.keys() - - def items(self): - for k, v in dict.items(self): - yield k[0], v - - -def _get_properties(properties, length): - """ - Convenience Function to get the material properties as a dict - and values in a python format. - """ - result = {} - #read all properties - for p in [properties[i] for i in range(length)]: - #the name - p = p.contents - key = str(_convert_assimp_string(p.mKey)) - key = (key.split('.')[1], p.mSemantic) - - #the data - if p.mType == 1: - arr = ctypes.cast(p.mData, - ctypes.POINTER(ctypes.c_float * int(p.mDataLength/ctypes.sizeof(ctypes.c_float))) - ).contents - value = [x for x in arr] - elif p.mType == 3: #string can't be an array - value = _convert_assimp_string(ctypes.cast(p.mData, ctypes.POINTER(structs.MaterialPropertyString)).contents) - - elif p.mType == 4: - arr = ctypes.cast(p.mData, - ctypes.POINTER(ctypes.c_int * int(p.mDataLength/ctypes.sizeof(ctypes.c_int))) - ).contents - value = [x for x in arr] - else: - value = p.mData[:p.mDataLength] - - if len(value) == 1: - [value] = value - - result[key] = value - - return PropertyGetter(result) - -def decompose_matrix(matrix): - if not isinstance(matrix, structs.Matrix4x4): - raise AssimpError("pyassimp.decompose_matrix failed: Not a Matrix4x4!") - - scaling = structs.Vector3D() - rotation = structs.Quaternion() - position = structs.Vector3D() - - _assimp_lib.dll.aiDecomposeMatrix(ctypes.pointer(matrix), - ctypes.byref(scaling), - ctypes.byref(rotation), - ctypes.byref(position)) - return scaling._init(), rotation._init(), position._init() - diff --git a/libs/assimp/port/PyAssimp/pyassimp/errors.py b/libs/assimp/port/PyAssimp/pyassimp/errors.py deleted file mode 100644 index e017b51..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/errors.py +++ /dev/null @@ -1,11 +0,0 @@ -#-*- coding: UTF-8 -*- - -""" -All possible errors. -""" - -class AssimpError(BaseException): - """ - If an internal error occurs. - """ - pass diff --git a/libs/assimp/port/PyAssimp/pyassimp/formats.py b/libs/assimp/port/PyAssimp/pyassimp/formats.py deleted file mode 100644 index 5d454e5..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/formats.py +++ /dev/null @@ -1,41 +0,0 @@ -FORMATS = ["CSM", - "LWS", - "B3D", - "COB", - "PLY", - "IFC", - "OFF", - "SMD", - "IRRMESH", - "3D", - "DAE", - "MDL", - "HMP", - "TER", - "WRL", - "XML", - "NFF", - "AC", - "OBJ", - "3DS", - "STL", - "IRR", - "Q3O", - "Q3D", - "MS3D", - "Q3S", - "ZGL", - "MD2", - "X", - "BLEND", - "XGL", - "MD5MESH", - "MAX", - "LXO", - "DXF", - "BVH", - "LWO", - "NDO"] - -def available_formats(): - return FORMATS diff --git a/libs/assimp/port/PyAssimp/pyassimp/helper.py b/libs/assimp/port/PyAssimp/pyassimp/helper.py deleted file mode 100644 index 7c14f60..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/helper.py +++ /dev/null @@ -1,283 +0,0 @@ -#-*- coding: UTF-8 -*- - -""" -Some fancy helper functions. -""" - -import os -import ctypes -import operator - -from distutils.sysconfig import get_python_lib -import re -import sys - -try: import numpy -except ImportError: numpy = None - -import logging;logger = logging.getLogger("pyassimp") - -from .errors import AssimpError - -additional_dirs, ext_whitelist = [],[] - -# populate search directories and lists of allowed file extensions -# depending on the platform we're running on. -if os.name=='posix': - additional_dirs.append('./') - additional_dirs.append('/usr/lib/') - additional_dirs.append('/usr/lib/x86_64-linux-gnu/') - additional_dirs.append('/usr/lib/aarch64-linux-gnu/') - additional_dirs.append('/usr/local/lib/') - - if 'LD_LIBRARY_PATH' in os.environ: - additional_dirs.extend([item for item in os.environ['LD_LIBRARY_PATH'].split(':') if item]) - - # check if running from anaconda. - anaconda_keywords = ("conda", "continuum") - if any(k in sys.version.lower() for k in anaconda_keywords): - cur_path = get_python_lib() - pattern = re.compile('.*\/lib\/') - conda_lib = pattern.match(cur_path).group() - logger.info("Adding Anaconda lib path:"+ conda_lib) - additional_dirs.append(conda_lib) - - # note - this won't catch libassimp.so.N.n, but - # currently there's always a symlink called - # libassimp.so in /usr/local/lib. - ext_whitelist.append('.so') - # libassimp.dylib in /usr/local/lib - ext_whitelist.append('.dylib') - -elif os.name=='nt': - ext_whitelist.append('.dll') - path_dirs = os.environ['PATH'].split(';') - additional_dirs.extend(path_dirs) - -def vec2tuple(x): - """ Converts a VECTOR3D to a Tuple """ - return (x.x, x.y, x.z) - -def transform(vector3, matrix4x4): - """ Apply a transformation matrix on a 3D vector. - - :param vector3: array with 3 elements - :param matrix4x4: 4x4 matrix - """ - if numpy: - return numpy.dot(matrix4x4, numpy.append(vector3, 1.)) - else: - m0,m1,m2,m3 = matrix4x4; x,y,z = vector3 - return [ - m0[0]*x + m0[1]*y + m0[2]*z + m0[3], - m1[0]*x + m1[1]*y + m1[2]*z + m1[3], - m2[0]*x + m2[1]*y + m2[2]*z + m2[3], - m3[0]*x + m3[1]*y + m3[2]*z + m3[3] - ] - -def _inv(matrix4x4): - m0,m1,m2,m3 = matrix4x4 - - det = m0[3]*m1[2]*m2[1]*m3[0] - m0[2]*m1[3]*m2[1]*m3[0] - \ - m0[3]*m1[1]*m2[2]*m3[0] + m0[1]*m1[3]*m2[2]*m3[0] + \ - m0[2]*m1[1]*m2[3]*m3[0] - m0[1]*m1[2]*m2[3]*m3[0] - \ - m0[3]*m1[2]*m2[0]*m3[1] + m0[2]*m1[3]*m2[0]*m3[1] + \ - m0[3]*m1[0]*m2[2]*m3[1] - m0[0]*m1[3]*m2[2]*m3[1] - \ - m0[2]*m1[0]*m2[3]*m3[1] + m0[0]*m1[2]*m2[3]*m3[1] + \ - m0[3]*m1[1]*m2[0]*m3[2] - m0[1]*m1[3]*m2[0]*m3[2] - \ - m0[3]*m1[0]*m2[1]*m3[2] + m0[0]*m1[3]*m2[1]*m3[2] + \ - m0[1]*m1[0]*m2[3]*m3[2] - m0[0]*m1[1]*m2[3]*m3[2] - \ - m0[2]*m1[1]*m2[0]*m3[3] + m0[1]*m1[2]*m2[0]*m3[3] + \ - m0[2]*m1[0]*m2[1]*m3[3] - m0[0]*m1[2]*m2[1]*m3[3] - \ - m0[1]*m1[0]*m2[2]*m3[3] + m0[0]*m1[1]*m2[2]*m3[3] - - return[[( m1[2]*m2[3]*m3[1] - m1[3]*m2[2]*m3[1] + m1[3]*m2[1]*m3[2] - m1[1]*m2[3]*m3[2] - m1[2]*m2[1]*m3[3] + m1[1]*m2[2]*m3[3]) /det, - ( m0[3]*m2[2]*m3[1] - m0[2]*m2[3]*m3[1] - m0[3]*m2[1]*m3[2] + m0[1]*m2[3]*m3[2] + m0[2]*m2[1]*m3[3] - m0[1]*m2[2]*m3[3]) /det, - ( m0[2]*m1[3]*m3[1] - m0[3]*m1[2]*m3[1] + m0[3]*m1[1]*m3[2] - m0[1]*m1[3]*m3[2] - m0[2]*m1[1]*m3[3] + m0[1]*m1[2]*m3[3]) /det, - ( m0[3]*m1[2]*m2[1] - m0[2]*m1[3]*m2[1] - m0[3]*m1[1]*m2[2] + m0[1]*m1[3]*m2[2] + m0[2]*m1[1]*m2[3] - m0[1]*m1[2]*m2[3]) /det], - [( m1[3]*m2[2]*m3[0] - m1[2]*m2[3]*m3[0] - m1[3]*m2[0]*m3[2] + m1[0]*m2[3]*m3[2] + m1[2]*m2[0]*m3[3] - m1[0]*m2[2]*m3[3]) /det, - ( m0[2]*m2[3]*m3[0] - m0[3]*m2[2]*m3[0] + m0[3]*m2[0]*m3[2] - m0[0]*m2[3]*m3[2] - m0[2]*m2[0]*m3[3] + m0[0]*m2[2]*m3[3]) /det, - ( m0[3]*m1[2]*m3[0] - m0[2]*m1[3]*m3[0] - m0[3]*m1[0]*m3[2] + m0[0]*m1[3]*m3[2] + m0[2]*m1[0]*m3[3] - m0[0]*m1[2]*m3[3]) /det, - ( m0[2]*m1[3]*m2[0] - m0[3]*m1[2]*m2[0] + m0[3]*m1[0]*m2[2] - m0[0]*m1[3]*m2[2] - m0[2]*m1[0]*m2[3] + m0[0]*m1[2]*m2[3]) /det], - [( m1[1]*m2[3]*m3[0] - m1[3]*m2[1]*m3[0] + m1[3]*m2[0]*m3[1] - m1[0]*m2[3]*m3[1] - m1[1]*m2[0]*m3[3] + m1[0]*m2[1]*m3[3]) /det, - ( m0[3]*m2[1]*m3[0] - m0[1]*m2[3]*m3[0] - m0[3]*m2[0]*m3[1] + m0[0]*m2[3]*m3[1] + m0[1]*m2[0]*m3[3] - m0[0]*m2[1]*m3[3]) /det, - ( m0[1]*m1[3]*m3[0] - m0[3]*m1[1]*m3[0] + m0[3]*m1[0]*m3[1] - m0[0]*m1[3]*m3[1] - m0[1]*m1[0]*m3[3] + m0[0]*m1[1]*m3[3]) /det, - ( m0[3]*m1[1]*m2[0] - m0[1]*m1[3]*m2[0] - m0[3]*m1[0]*m2[1] + m0[0]*m1[3]*m2[1] + m0[1]*m1[0]*m2[3] - m0[0]*m1[1]*m2[3]) /det], - [( m1[2]*m2[1]*m3[0] - m1[1]*m2[2]*m3[0] - m1[2]*m2[0]*m3[1] + m1[0]*m2[2]*m3[1] + m1[1]*m2[0]*m3[2] - m1[0]*m2[1]*m3[2]) /det, - ( m0[1]*m2[2]*m3[0] - m0[2]*m2[1]*m3[0] + m0[2]*m2[0]*m3[1] - m0[0]*m2[2]*m3[1] - m0[1]*m2[0]*m3[2] + m0[0]*m2[1]*m3[2]) /det, - ( m0[2]*m1[1]*m3[0] - m0[1]*m1[2]*m3[0] - m0[2]*m1[0]*m3[1] + m0[0]*m1[2]*m3[1] + m0[1]*m1[0]*m3[2] - m0[0]*m1[1]*m3[2]) /det, - ( m0[1]*m1[2]*m2[0] - m0[2]*m1[1]*m2[0] + m0[2]*m1[0]*m2[1] - m0[0]*m1[2]*m2[1] - m0[1]*m1[0]*m2[2] + m0[0]*m1[1]*m2[2]) /det]] - -def get_bounding_box(scene): - bb_min = [1e10, 1e10, 1e10] # x,y,z - bb_max = [-1e10, -1e10, -1e10] # x,y,z - inv = numpy.linalg.inv if numpy else _inv - return get_bounding_box_for_node(scene.rootnode, bb_min, bb_max, inv(scene.rootnode.transformation)) - -def get_bounding_box_for_node(node, bb_min, bb_max, transformation): - - if numpy: - transformation = numpy.dot(transformation, node.transformation) - else: - t0,t1,t2,t3 = transformation - T0,T1,T2,T3 = node.transformation - transformation = [ [ - t0[0]*T0[0] + t0[1]*T1[0] + t0[2]*T2[0] + t0[3]*T3[0], - t0[0]*T0[1] + t0[1]*T1[1] + t0[2]*T2[1] + t0[3]*T3[1], - t0[0]*T0[2] + t0[1]*T1[2] + t0[2]*T2[2] + t0[3]*T3[2], - t0[0]*T0[3] + t0[1]*T1[3] + t0[2]*T2[3] + t0[3]*T3[3] - ],[ - t1[0]*T0[0] + t1[1]*T1[0] + t1[2]*T2[0] + t1[3]*T3[0], - t1[0]*T0[1] + t1[1]*T1[1] + t1[2]*T2[1] + t1[3]*T3[1], - t1[0]*T0[2] + t1[1]*T1[2] + t1[2]*T2[2] + t1[3]*T3[2], - t1[0]*T0[3] + t1[1]*T1[3] + t1[2]*T2[3] + t1[3]*T3[3] - ],[ - t2[0]*T0[0] + t2[1]*T1[0] + t2[2]*T2[0] + t2[3]*T3[0], - t2[0]*T0[1] + t2[1]*T1[1] + t2[2]*T2[1] + t2[3]*T3[1], - t2[0]*T0[2] + t2[1]*T1[2] + t2[2]*T2[2] + t2[3]*T3[2], - t2[0]*T0[3] + t2[1]*T1[3] + t2[2]*T2[3] + t2[3]*T3[3] - ],[ - t3[0]*T0[0] + t3[1]*T1[0] + t3[2]*T2[0] + t3[3]*T3[0], - t3[0]*T0[1] + t3[1]*T1[1] + t3[2]*T2[1] + t3[3]*T3[1], - t3[0]*T0[2] + t3[1]*T1[2] + t3[2]*T2[2] + t3[3]*T3[2], - t3[0]*T0[3] + t3[1]*T1[3] + t3[2]*T2[3] + t3[3]*T3[3] - ] ] - - for mesh in node.meshes: - for v in mesh.vertices: - v = transform(v, transformation) - bb_min[0] = min(bb_min[0], v[0]) - bb_min[1] = min(bb_min[1], v[1]) - bb_min[2] = min(bb_min[2], v[2]) - bb_max[0] = max(bb_max[0], v[0]) - bb_max[1] = max(bb_max[1], v[1]) - bb_max[2] = max(bb_max[2], v[2]) - - - for child in node.children: - bb_min, bb_max = get_bounding_box_for_node(child, bb_min, bb_max, transformation) - - return bb_min, bb_max - -def try_load_functions(library_path, dll): - ''' - Try to bind to aiImportFile and aiReleaseImport - - Arguments - --------- - library_path: path to current lib - dll: ctypes handle to library - - Returns - --------- - If unsuccessful: - None - If successful: - Tuple containing (library_path, - load from filename function, - load from memory function, - export to filename function, - export to blob function, - release function, - ctypes handle to assimp library) - ''' - - try: - load = dll.aiImportFile - release = dll.aiReleaseImport - load_mem = dll.aiImportFileFromMemory - export = dll.aiExportScene - export2blob = dll.aiExportSceneToBlob - except AttributeError: - #OK, this is a library, but it doesn't have the functions we need - return None - - # library found! - from .structs import Scene, ExportDataBlob - load.restype = ctypes.POINTER(Scene) - load_mem.restype = ctypes.POINTER(Scene) - export2blob.restype = ctypes.POINTER(ExportDataBlob) - return (library_path, load, load_mem, export, export2blob, release, dll) - -def search_library(): - ''' - Loads the assimp library. - Throws exception AssimpError if no library_path is found - - Returns: tuple, (load from filename function, - load from memory function, - export to filename function, - export to blob function, - release function, - dll) - ''' - #this path - folder = os.path.dirname(__file__) - - # silence 'DLL not found' message boxes on win - try: - ctypes.windll.kernel32.SetErrorMode(0x8007) - except AttributeError: - pass - - candidates = [] - # test every file - for curfolder in [folder]+additional_dirs: - if os.path.isdir(curfolder): - for filename in os.listdir(curfolder): - # our minimum requirement for candidates is that - # they should contain 'assimp' somewhere in - # their name - if filename.lower().find('assimp')==-1 : - continue - is_out=1 - for et in ext_whitelist: - if et in filename.lower(): - is_out=0 - break - if is_out: - continue - - library_path = os.path.join(curfolder, filename) - logger.debug('Try ' + library_path) - try: - dll = ctypes.cdll.LoadLibrary(library_path) - except Exception as e: - logger.warning(str(e)) - # OK, this except is evil. But different OSs will throw different - # errors. So just ignore any errors. - continue - # see if the functions we need are in the dll - loaded = try_load_functions(library_path, dll) - if loaded: candidates.append(loaded) - - if not candidates: - # no library found - raise AssimpError("assimp library not found") - else: - # get the newest library_path - candidates = map(lambda x: (os.lstat(x[0])[-2], x), candidates) - res = max(candidates, key=operator.itemgetter(0))[1] - logger.debug('Using assimp library located at ' + res[0]) - - # XXX: if there are 1000 dll/so files containing 'assimp' - # in their name, do we have all of them in our address - # space now until gc kicks in? - - # XXX: take version postfix of the .so on linux? - return res[1:] - -def hasattr_silent(object, name): - """ - Calls hasttr() with the given parameters and preserves the legacy (pre-Python 3.2) - functionality of silently catching exceptions. - - Returns the result of hasatter() or False if an exception was raised. - """ - - try: - if not object: - return False - return hasattr(object, name) - except AttributeError: - return False diff --git a/libs/assimp/port/PyAssimp/pyassimp/material.py b/libs/assimp/port/PyAssimp/pyassimp/material.py deleted file mode 100644 index a36e50a..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/material.py +++ /dev/null @@ -1,89 +0,0 @@ -# Dummy value. -# -# No texture, but the value to be used as 'texture semantic' -# (#aiMaterialProperty::mSemantic) for all material properties -# # not* related to textures. -# -aiTextureType_NONE = 0x0 - -# The texture is combined with the result of the diffuse -# lighting equation. -# -aiTextureType_DIFFUSE = 0x1 - -# The texture is combined with the result of the specular -# lighting equation. -# -aiTextureType_SPECULAR = 0x2 - -# The texture is combined with the result of the ambient -# lighting equation. -# -aiTextureType_AMBIENT = 0x3 - -# The texture is added to the result of the lighting -# calculation. It isn't influenced by incoming light. -# -aiTextureType_EMISSIVE = 0x4 - -# The texture is a height map. -# -# By convention, higher gray-scale values stand for -# higher elevations from the base height. -# -aiTextureType_HEIGHT = 0x5 - -# The texture is a (tangent space) normal-map. -# -# Again, there are several conventions for tangent-space -# normal maps. Assimp does (intentionally) not -# distinguish here. -# -aiTextureType_NORMALS = 0x6 - -# The texture defines the glossiness of the material. -# -# The glossiness is in fact the exponent of the specular -# (phong) lighting equation. Usually there is a conversion -# function defined to map the linear color values in the -# texture to a suitable exponent. Have fun. -# -aiTextureType_SHININESS = 0x7 - -# The texture defines per-pixel opacity. -# -# Usually 'white' means opaque and 'black' means -# 'transparency'. Or quite the opposite. Have fun. -# -aiTextureType_OPACITY = 0x8 - -# Displacement texture -# -# The exact purpose and format is application-dependent. -# Higher color values stand for higher vertex displacements. -# -aiTextureType_DISPLACEMENT = 0x9 - -# Lightmap texture (aka Ambient Occlusion) -# -# Both 'Lightmaps' and dedicated 'ambient occlusion maps' are -# covered by this material property. The texture contains a -# scaling value for the final color value of a pixel. Its -# intensity is not affected by incoming light. -# -aiTextureType_LIGHTMAP = 0xA - -# Reflection texture -# -# Contains the color of a perfect mirror reflection. -# Rarely used, almost never for real-time applications. -# -aiTextureType_REFLECTION = 0xB - -# Unknown texture -# -# A texture reference that does not match any of the definitions -# above is considered to be 'unknown'. It is still imported -# but is excluded from any further postprocessing. -# -aiTextureType_UNKNOWN = 0xC diff --git a/libs/assimp/port/PyAssimp/pyassimp/postprocess.py b/libs/assimp/port/PyAssimp/pyassimp/postprocess.py deleted file mode 100644 index 0c55d67..0000000 --- a/libs/assimp/port/PyAssimp/pyassimp/postprocess.py +++ /dev/null @@ -1,530 +0,0 @@ -#
Calculates the tangents and bitangents for the imported meshes. -# -# Does nothing if a mesh does not have normals. You might want this post -# processing step to be executed if you plan to use tangent space calculations -# such as normal mapping applied to the meshes. There's a config setting, -# #AI_CONFIG_PP_CT_MAX_SMOOTHING_ANGLE, which allows you to specify -# a maximum smoothing angle for the algorithm. However, usually you'll -# want to leave it at the default value. -# -aiProcess_CalcTangentSpace = 0x1 - -##
Identifies and joins identical vertex data sets within all -# imported meshes. -# -# After this step is run, each mesh contains unique vertices, -# so a vertex may be used by multiple faces. You usually want -# to use this post processing step. If your application deals with -# indexed geometry, this step is compulsory or you'll just waste rendering -# time. If this flag is not specified, no vertices are referenced by -# more than one face and no index buffer is required for rendering. -# -aiProcess_JoinIdenticalVertices = 0x2 - -##
Converts all the imported data to a left-handed coordinate space. -# -# By default the data is returned in a right-handed coordinate space (which -# OpenGL prefers). In this space, +X points to the right, -# +Z points towards the viewer, and +Y points upwards. In the DirectX -# coordinate space +X points to the right, +Y points upwards, and +Z points -# away from the viewer. -# -# You'll probably want to consider this flag if you use Direct3D for -# rendering. The #aiProcess_ConvertToLeftHanded flag supersedes this -# setting and bundles all conversions typically required for D3D-based -# applications. -# -aiProcess_MakeLeftHanded = 0x4 - -##
Triangulates all faces of all meshes. -# -# By default the imported mesh data might contain faces with more than 3 -# indices. For rendering you'll usually want all faces to be triangles. -# This post processing step splits up faces with more than 3 indices into -# triangles. Line and point primitives are #not# modified! If you want -# 'triangles only' with no other kinds of primitives, try the following -# solution: -#