Accelerating Maya -> PyOpenGL mesh IO

A few weeks back I posted about exporting a mesh from Maya and drawing it with Python.

Faster export

I recently got back to improving this a little. First I improved maya exporter performance by porting it to a C++ plugin.
I don’t want to go over all the details, because it is similar to the python version posted before and if I’m going to explain this code properly I’d have to do a tutorial series on the Maya API in the first place! So here’s a little dump of the visual studio project instead: plugin.vcxproj!

It is currently very basic and just exports all data it can find. I’m aware certain maya models can crash certain other functions in Maya’s MFnMesh (and related) class. E.g. empty UV sets, UV sets with UVs for only certain vertices/faces, geometry with holes crashing getTriangles, etc. It may be good to write a python layer that does some validation on the mesh as well as add flags to explicitly export (or ignore) certain attributes and UV/color sets.

Faster import

Next I used the python mmap (memory map) module to upload the mesh directly from disk to openGL without getting (and therefore boxing) the raw data in Python objects first. Previously I was loading binary to python, which requires python to cast the binary to a python object, which I then wrapped into a ctypes object, allocating and copying huge chunks of memory and constructing tons of python objects. With mmap I can just cast the file handle to a void* and hand it to glBufferData.

import os
import mmap
import ctypes
import contextlib


@contextlib.contextmanager
def memoryMap(fileDescriptor, sizeInBytes=0, offsetInBytes=0):
    if isinstance(fileDescriptor, basestring):
        fd = os.open(fileDescriptor, os.O_RDWR | os.O_BINARY)
        ownFd = True
    else:
        fd = fileDescriptor
        ownFd = False
    mfd = None
    try:
        mfd = mmap.mmap(fd, sizeInBytes, offset=offsetInBytes)
        yield MappedReader(mfd)
    finally:
        if mfd is not None:
            mfd.close()
        if ownFd:
            os.close(fd)


class MappedReader(object):
    def __init__(self, memoryMap):
        """Wrap a memory map into a stream that can stream through the file and map sections to ctypes."""
        self.__memoryMap = memoryMap
        self.__offset = 0

    def close(self):
        self.__memoryMap.close()

    def size(self):
        return self.__memoryMap.size()

    def seek(self, offset):
        assert offset >= 0 and offset < self.size(), 'Seek %s beyond file bounds [0, %s)' % (offset, self.size())
        self.__offset = offset

    def tell(self):
        return self.__offset

    def read(self, ctype):
        """
        Map a part of the file memory to a ctypes object (from_buffer, so ctype points directly to file memory).
        Object type is inferred from the given type.
        File cursor is moved to the next unread byte (seek = tell + sizeof(ctype)).
        """
        result = ctype.from_buffer(self.__memoryMap, self.__offset)
        self.__offset += ctypes.sizeof(result)
        return result

    def readValue(self, ctype):
        """
        Utility to read and directly return the data cast as a python value.
        """
        return self.read(ctype).value

The memoryMap context can take a file descriptor (acquired through os.open, different from the regular open) or file path.
It will then open the entire file as read-only binary and map it instead of reading it.
Last it returns a MappedReader object which is a little wrapper around the mmap object that assists in reading chunks as a certain ctype.
This way I can easily read some header data (previously I'd do this by reading n bytes and using struct.unpack) and then read the remainder (or a large chunk) of the file as a ctypes pointer.

This code is a refactor from what I did in the tutorial mentioned at the top, but using mmap instead! It is mostly identical.

def _loadMesh_v0(stream, vao, bufs):
    vertexCount = stream.readValue(ctypes.c_uint32)
    vertexSize = stream.readValue(ctypes.c_ubyte)

    indexCount = stream.readValue(ctypes.c_uint32)
    indexSize = stream.readValue(ctypes.c_ubyte)

    assert indexSize in indexTypeFromSize, 'Unknown element data type, element size must be one of %s' % indexTypeFromSize.keys()
    indexType = indexTypeFromSize[indexSize]

    drawMode = stream.readValue(ctypes.c_uint32)
    assert drawMode in (GL_LINES, GL_TRIANGLES), 'Unknown draw mode.'  # TODO: list all render types

    # gather layout
    numAttributes = stream.readValue(ctypes.c_ubyte)

    offset = 0
    layouts = [None] * numAttributes
    for i in xrange(numAttributes):
        location = stream.readValue(ctypes.c_ubyte)
        dimensions = stream.readValue(ctypes.c_ubyte)
        assert dimensions in (1, 2, 3, 4)
        dataType = stream.readValue(ctypes.c_uint32)
        assert dataType in attributeElementTypes, 'Invalid GLenum value for attribute element type.'
        layouts[i] = AttributeLayout(location, dimensions, dataType, offset)
        offset += dimensions * sizeOfType[dataType]

    assert offset == vertexSize, 'File says each chunk of vertex data is %s bytes, but attribute layout used up %s bytes' % (vertexSize, offset)

    # apply layout
    for layout in layouts:
        glVertexAttribPointer(layout.location, layout.dimensions, layout.dataType, GL_FALSE, vertexSize, ctypes.c_void_p(layout.offset))  # total offset is now stride
        glEnableVertexAttribArray(layout.location)

    raw = stream.read(ctypes.c_ubyte * (vertexSize * vertexCount))
    glBufferData(GL_ARRAY_BUFFER, vertexSize * vertexCount, raw, GL_STATIC_DRAW)

    raw = stream.read(ctypes.c_ubyte * (indexSize * indexCount))
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexSize * indexCount, raw, GL_STATIC_DRAW)

    if stream.size() - stream.tell() > 0:
        raise RuntimeError('Error reading mesh file, more data in file after we were done reading.')
    
    return Mesh(vao, bufs, drawMode, indexCount, indexType)


def model(filePath):
    vao = glGenVertexArrays(1)
    glBindVertexArray(vao)
    bufs = glGenBuffers(2)
    glBindBuffer(GL_ARRAY_BUFFER, bufs[0])
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufs[1])
    with memoryMap(filePath) as stream:
        fileVersion = stream.readValue(ctypes.c_ubyte)
        if fileVersion == 0:
            return _loadMesh_v0(stream, vao, bufs)
        raise RuntimeError('Unknown mesh file version %s in %s' % (fileVersion, filePath))

Attribute editor in PyQt

I’ve been working on a particle editor, though that isn’t entirely done yet, I did create something interesting in the process. An attribute editor for arbitrary python objects.

This is where I’m at right now, I hope to get to work on this more and share details about the particles themselves once it is more complete.

On the right you see an editor for the following object:

class ParticleSettings(OrderedClass):
    def __init__(self):
        super(ParticleSettings, self).__init__()
        # emitter
        self.emitterType = Enum(('Sphere', 'Cone', 'Box'), 0)
        self.emitterSettings = Vec3([0.5, 0.5, 0.5])
        self.emitterIsVolume = True
        self.randomDirection = False
        # not curve based
        self.startSize = RandomFloat()
        self.startSpeed = RandomFloat()
        self.startRotation = RandomFloat()
        self.lifeTime = RandomFloat()
        # curve based on particle alive time / life time
        self.sizeOverTime = RandomChannelFloat()
        self.angularVelocity = RandomChannelFloat()
        self.velocityOverTime = RandomChannelVec3()

I’ve added some data types so I can visualize them better, but the attribute editing framework I wrote works off the bat on python’s basic types.
I’d like to break down how I got here, as I wrote a heap of code which still needs a heap of refactoring for it to be presentable, I’ll demonstrate creating a more basic example, which should be more useful because it isn’t cluttered with my edge cases.

Preparing edit widgets

First, I made some widgets to edit some basic data types. Because we want to generate and connect our widgets to data, it’d be nice if they all have the same interface to keep the rest of our code abstract. I went for the following interface:

class AEComponent(QObject):
    # Attribute editor widget editing a single value type. Note that UI interactions from the user should emit valueChanged.
    valueChanged = pyqtSignal(object)

    def __init__(self):
        # The constructor may accept additional arguments, e.g. default value or enum options
        self._value = None

    def value(self):
        # Return the internal value
        return self._value

    def setValue(self, value):
        # Set value should programatically adjust the internal value, without emitting a signal; used in case multiple set values may trigger or when a parent widget is already going to send a change event.
        self.blockSignals(True)
        self._value = value
        self.blockSignals(False)

    def editValue(self, value):
        # Set the value and emit a change event
        self._value = value
        self.valueChanged.emit(value)

Note that this is an example of the interface, not a base class. I will not actually use the code above, I’ll just subclass Qt widgets and make them behave the same.

The core data types I want to support are:

int QSpinBox
float QDoubleSpinBox
bool checkable QPushButton
str QLineEdit
object recurse into its propreties
dict recurse into its items
list recurse into its items

Because tuples & sets are not mutable it’d be hard to construct a widget that sets the entire tuple at once.
I do not intend to adjust the composition of lists, dicts and objects – so no element insertion / removal.

Int & double
QSpinBox already has a value and setValue, but the setValue emits a signal. Instead I’m adding an editValue that forwards to the super setValue and make setValue block the signals. I’ve also made it so I can construct versions that only support e.g. ctypes.c_char by adding a number of bits parameter that is used to infer limits. It’d be trivial to extend this to unsigned and size-limited variants. The LineEditSelected used is listed below at the QLineEdit, it just simply selects all text at the focus event.

class SpinBox(QSpinBox):
    """
    QSpinBox with right limits & that follows the AEComponent interface.
    """
    def __init__(self, value=0, bits=32):
        super(SpinBox, self).__init__()
        self.setMinimum(-2 ** (bits - 1))
        self.setMaximum(2 ** (bits - 1) - 1)
        self.setValue(value)
        self.setLineEdit(LineEditSelected())

    def setValue(self, value):
        self.blockSignals(True)
        super(SpinBox, self).setValue(value)
        self.blockSignals(False)

    def editValue(self, value):
        super(SpinBox, self).setValue(value)

Doubles are almost identical.

class DoubleSpinBox(QDoubleSpinBox):
    """
    QDoubleSpinBox with right limits & that follows the AEComponent interface.
    """
    def __init__(self, value=0.0):
        super(DoubleSpinBox, self).__init__()
        self.setMinimum(-float('inf'))
        self.setMaximum(float('inf'))
        self.setValue(value)
        self.setSingleStep(0.01)  # Depending on use case this can be very coarse.
        self.setLineEdit(LineEditSelected())

    def setValue(self, value):
        self.blockSignals(True)
        super(DoubleSpinBox, self).setValue(value)
        self.blockSignals(False)

    def editValue(self, value):
        super(DoubleSpinBox, self).setValue(value)

Booleans with icons
A few more interesting things to make this work based on a checkable QPushButton.
Manual value changed signal handling & keeping track of the icon to use.

class IconBoolEdit(QPushButton):
    """
    QPushButton with icons to act as a boolean (not tri-state) toggle.
    """
    valueChanged = pyqtSignal(bool)

    def __init__(self, *args):
        super(IconBoolEdit, self).__init__(*args)
        self.__icons = icons.get('Unchecked'), icons.get('Checked')  # Implement your own way to get icons!
        self.setIcon(self.__icons[0])
        self.setCheckable(True)
        self.clicked.connect(self.__updateIcons)
        self.clicked.connect(self.__emitValueChanged)

    def setIcons(self, off, on):
        self.__icons = off, on
        self.__updateIcons(self.isChecked())

    def __updateIcons(self, state):
        self.setIcon(self.__icons[int(state)] or QIcon())

    def __emitValueChanged(self, state):
        self.valueChanged.emit(state)

    def value(self):
        return self.isChecked()

    def setValue(self, state):
        self.setChecked(state)
        self.__updateIcons(state)

    def editValue(self, state):
        self.setChecked(state)
        self.__updateIcons(state)
        self.__emitValueChanged(state)

Strings
This is very similar to the spinbox. One addition I added is to make sure clicking the line edit selects all constants so a user can start typing a new word immediately.

class LineEdit(QLineEdit):
    valueChanged = pyqtSignal(str)

    def __init__(self, *args):
        super(LineEdit, self).__init__(*args)
        self.textChanged.connect(self.valueChanged.emit)

    def value(self):
        return self.text()

    def setValue(self, text):
        self.blockSignals(True)
        self.setText(text)
        self.blockSignals(False)

    def editValue(self, text):
        self.setText(text)


class LineEditSelected(LineEdit):
    def __init__(self):
        super(LineEditSelected, self).__init__()
        self.__state = False

    def focusInEvent(self, event):
        super(LineEditSelected, self).focusInEvent(event)
        self.selectAll()
        self.__state = True

    def mousePressEvent(self, event):
        super(LineEditSelected, self).mousePressEvent(event)
        if self.__state:
            self.selectAll()
            self.__state = False

Reflecting python objects

Reflection in python is very easy, and our use case simple.
Every python object has a __dict__ attribute that contains all the current members of an object (but not methods).
In python we tend to denote protected (internal) data by prefixing variable names with an underscore.
So to find all attributes that we want to inspect we can simply do:

for name in instance.__dict__:
    if name[0] == '_':
        continue

Now to control such an attribute with a widget we need to construct the right widget and connect the change event to a setter.
In python we can use the functools module to bind the global getattr and setattr methods and get a way to connect a callback to a property assignment.

    value = getattr(instance, name)  # get the current value by name, like the dot operator but using a string to get to the right property
    cls = factory.findEditorForType(type(value))  # factory to get the right widget for our data type, more on this later
    widget = cls()  # construct the widget
    widget.setValue(getattr(instance, name))  # set the editor's initial value to match with our data
    widget.valueChanged.connect(functools.partial(setattr, instance, name))  # make the editor update our data

Widget factory

The last piece of the puzzle is a way to generate widgets based on data types. I wanted to keep this abstract, so I made a class out of it.
We can register data type & widget type relations and it understands to create a widget if we have one registered for a base class of the type we’re querying.

class AEFactory(object):
    def __init__(self):
        self.__typeWidgets = {}

    def registerType(self, dataType, widgetConstructor):
        self.__typeWidgets[dataType] = widgetConstructor

    @staticmethod
    def _allBaseTypes(cls):
        """
        Recurse all base classes and return a list of all bases with most close relatives first.
        https://stackoverflow.com/questions/1401661/list-all-base-classes-in-a-hierarchy-of-given-class
        """
        result = list(cls.__bases__)
        for base in result:
            result.extend(AEFactory._allBaseTypes(base))
        return result

    def _findEditorForType(self, dataType):
        if dataType in self.__typeWidgets:
            return self.__typeWidgets[dataType]

        for baseType in AEFactory._allBaseTypes(dataType):
            if dataType in self.__typeWidgets:
                return self.__typeWidgets[baseType]

Complex data

Now this will work fine for simple objects with simple data types. But the real fun begins when we have instances whose properties are lists of other instances.
Our findEditorForType will return None in this case and we get an error. Instead, we should split this up in several steps. First we determine the type of data we’re dealing with, to defer the widget creation to any type of recursive function until we reach simple data types for which we can generate widgets.

from collections import OrderedDict

class AEFactor(object):

... the above code still goes here ...

    def generate(self, data, parent=None, name=None):
        """
        This recursively generates widgets & returns an iterator of every resulting item.
        """
        if isinstance(data, (dict, OrderedDict)):
            generator = self._generateMap(data)
        elif hasattr(data, '__getitem__') and hasattr(data, '__setitem__'):
            generator = self._generateList(data)
        elif hasattr(data, '__dict__'):
            generator = self._generateInstance(data)
        else:
            generator = self._generateField(data, parent, name)

    def _generateField(self, data, parent, name):
        cls = self._findEditorForType(type(data))
        assert cls, 'Error: could not inspect object "%s" (parent: %s, name: %s). No wrapper registered or non-supported compound type.' % (data, parent, name)
        widget = cls()
        widget.setValue(data)
        widget.valueChanged.connect(functools.partial(setattr, parent, name))
        yield widget

    def _generateInstance(self, data):
        for name in data.__dict__:
            if name[0] == '_':
                continue
            yield QLabel(name)
            for widget in self.generate(getattr(data, name), data, name):
                yield widget

    def _generateList(self, data):
        for i in xrange(len(data)):
            yield QLabel(str(i))
            for widget in self.generate(data[i], data, str(i)):
                yield widget

    def _generateMap(self, data):
        for key in data:
            yield QLabel(str(key))
            for widget in self.generate(data[key], data, key):
                yield widget

Formatting

If we have a class that needs a special widget or layout, like my particle editor, we may wish to grab the widgets generated for that class and manipulate them.
One case I have is that I have a random channel, which has a minimum, maximum and isRandom flag. If isRandom is turned off then I just want to show the minimum field because the maximum is unused. In order to do this I extended the factory with the ability to inject functions that take groups of widgets for a certain data
type. See registerWrapper, findWrapperForType and the modifications at the end of generate.

class AEFactory(object):
    def __init__(self):
        self.__typeWidgets = {}
        self.__typeWrappers = {}

    def registerType(self, dataType, widgetConstructor):
        self.__typeWidgets[dataType] = widgetConstructor

    def registerWrapper(self, dataType, wrapperFunction):
        """
        The wrapperFunction must accept a generator of widgets & return a generator of widgets.
        """
        self.__typeWrappers[dataType] = wrapperFunction

    @staticmethod
    def _allBaseTypes(cls):
        """
        Recurse all base classes and return a list of all bases with most close relatives first.
        https://stackoverflow.com/questions/1401661/list-all-base-classes-in-a-hierarchy-of-given-class
        """
        result = list(cls.__bases__)
        for base in result:
            result.extend(AEFactory._allBaseTypes(base))
        return result

    def _findEditorForType(self, dataType):
        if dataType in self.__typeWidgets:
            return self.__typeWidgets[dataType]

        for baseType in AEFactory._allBaseTypes(dataType):
            if dataType in self.__typeWidgets:
                return self.__typeWidgets[baseType]

    def _findWrapperForType(self, dataType):
        if dataType in self.__typeWrappers:
            return self.__typeWrappers[dataType]

        for baseType in AEFactory._allBaseTypes(dataType):
            if dataType in self.__typeWrappers:
                return self.__typeWrappers[baseType]

    def generate(self, data, parent=None, name=None):
        """
        This recursively generates widgets & returns an iterator of every resulting item.
        """
        if isinstance(data, (dict, OrderedDict)):
            generator = self._generateMap(data)
        elif hasattr(data, '__getitem__') and hasattr(data, '__setitem__'):
            generator = self._generateList(data)
        elif hasattr(data, '__dict__'):
            generator = self._generateInstance(data)
        else:
            generator = self._generateField(data, parent, name)

        wrapper = self._findWrapperForType(type(data))
        if wrapper:
            generator = wrapper(generator)
        for widget in generator:
            yield widget

    def _generateField(self, data, parent, name):
        cls = self._findEditorForType(type(data))
        assert cls, 'Error: could not inspect object "%s" (parent: %s, name: %s). No wrapper registered or non-supported compound type.' % (data, parent, name)
        widget = cls()
        widget.setValue(data)
        widget.valueChanged.connect(functools.partial(setattr, parent, name))
        yield widget

    def _generateInstance(self, data):
        for name in data.__dict__:
            if name[0] == '_':
                continue
            yield QLabel(name)
            for widget in self.generate(getattr(data, name), data, name):
                yield widget

    def _generateList(self, data):
        for i in xrange(len(data)):
            yield QLabel(str(i))
            for widget in self.generate(data[i], data, str(i)):
                yield widget

    def _generateMap(self, data):
        for key in data:
            yield QLabel(str(key))
            for widget in self.generate(data[key], data, key):
                yield widget
Note: I currently allow it to work on sub classes, with the risk of that subclass having extra attributes – or a different attribute order – resulting in the widgets being jumbled & my function breaking the layout completely. I’m not sure yet how to validate that a sub class matches the base class’ member layout, so maybe I should just allow explicit overrides for a single type without inheritance support.

Constraining class member order

One thing that annoys me, and maybe you noticed already, is that python does not guarantee that dictionaries are ordered.
For this the collections.OrderedDict type exists, but when dealing with class members and the __dict__ attribute we have no control over this.

Now my solution to this is pretty shaky, and I’m definitely not proud of what I came up with, but let me share it anyway!
First I created a class that overrides __setattr__ to keep track of the order in which data is set.
Then I override __getattribute__ so that when the __dict__ attribute is requested we return a wrapper around it that behaves
like the real dict, but implements all iterators to use the ordered keys list instead.

class FakeOrderedDict(object):
    def __init__(self, realDict, order):
        self.realDict = realDict
        self.order = order

    def __getitem__(self, key):
        return self.realDict[key]

    def __setitem__(self, key, value):
        self.realDict[key] = value

    def __iter__(self):
        return iter(self.order)

    def iterkeys(self):
        return iter(self.order)

    def itervalues(self):
        for key in self.order:
            yield self.realDict[key]

    def iteritems(self):
        for key in self.order:
            yield key, self.realDict[key]


class OrderedClass(object):
    def __init__(self):
        self.__dict__['_OrderedClass__attrs'] = []

    def __getattribute__(self, key):
        result = super(OrderedClass, self).__getattribute__(key)
        if key == '__dict__':
            if '_OrderedClass__attrs' in result:
                return FakeOrderedDict(result, result['_OrderedClass__attrs'])
        return result

    def __setattr__(self, key, value):
        order = self.__dict__['_OrderedClass__attrs']
        if key not in order:
            order.append(key)
        return super(OrderedClass, self).__setattr__(key, value)

That’s all folks

Example usage:

# create test objects
class Vector(list):
    pass


class Compound(OrderedClass):  # inheriting from OrderedClass to ensure widget order
    def __init__(self):
        super(Compound, self).__init__()
        self.x = 2.0  # note how explicit floats are important now
        self.y = 5.0


class Data(OrderedClass):
    def __init__(self):
        super(Data, self).__init__()
        self.name = 'List test'
        self.value = Vector([1.0, 5, True])
        self.dict = {'A': Compound(), 'B': Compound()}


def groupHLayout(widgets):
    h = QHBoxLayout()
    m = QWidget()
    for w in widgets:
        h.addWidget(w)
    m.setLayout(h)
    yield m


# create test data
data = Data()

# create Qt application
app = QApplication([])
window = QWidget()
main = QVBoxLayout()
window.setLayout(main)

# initialize inspector
factory = AEFactory()
factory.registerType(bool, IconBoolEdit)
factory.registerType(int, SpinBox)
factory.registerType(float, DoubleSpinBox)
factory.registerType(str, LineEdit)
factory.registerWrapper(Vector, groupHLayout)

# inspect the data
for widget in factory.generate(data):
    main.addWidget(widget)

window.show()
app.exec_()

# print the data after closing the editor to show we indeed propagated the changes to the data as they happened
print data.name, data.value, data.dict['A'].x, data.dict['A'].y, data.dict, data.dict['B'].x, data.dict['B'].y

Last, have a full code dump!

from collections import OrderedDict
import functools
from PyQt4.QtCore import *
from PyQt4.QtGui import *


class SpinBox(QSpinBox):
    """
    QSpinBox with right limits & that follows the AEComponent interface.
    """

    def __init__(self, value=0, bits=32):
        super(SpinBox, self).__init__()
        self.setMinimum(-2 ** (bits - 1))
        self.setMaximum(2 ** (bits - 1) - 1)
        self.setValue(value)
        self.setLineEdit(LineEditSelected())

    def setValue(self, value):
        self.blockSignals(True)
        super(SpinBox, self).setValue(value)
        self.blockSignals(False)

    def editValue(self, value):
        super(SpinBox, self).setValue(value)


class DoubleSpinBox(QDoubleSpinBox):
    """
    QDoubleSpinBox with right limits & that follows the AEComponent interface.
    """

    def __init__(self, value=0.0):
        super(DoubleSpinBox, self).__init__()
        self.setMinimum(-float('inf'))
        self.setMaximum(float('inf'))
        self.setValue(value)
        self.setSingleStep(0.01)  # Depending on use case this can be very coarse.
        self.setLineEdit(LineEditSelected())

    def setValue(self, value):
        self.blockSignals(True)
        super(DoubleSpinBox, self).setValue(value)
        self.blockSignals(False)

    def editValue(self, value):
        super(DoubleSpinBox, self).setValue(value)


class IconBoolEdit(QPushButton):
    """
    QPushButton with icons to act as a boolean (not tri-state) toggle.
    """
    valueChanged = pyqtSignal(bool)

    def __init__(self, *args):
        super(IconBoolEdit, self).__init__(*args)
        self.__icons = None, None  # icons.get('Unchecked'), icons.get('Checked')  # Implement your own way to get icons!
        self.setIcon(self.__icons[0] or QIcon())
        self.setCheckable(True)
        self.clicked.connect(self.__updateIcons)
        self.clicked.connect(self.__emitValueChanged)

    def setIcons(self, off, on):
        self.__icons = off, on
        self.__updateIcons(self.isChecked())

    def __updateIcons(self, state):
        self.setIcon(self.__icons[int(state)] or QIcon())

    def __emitValueChanged(self, state):
        self.valueChanged.emit(state)

    def value(self):
        return self.isChecked()

    def setValue(self, state):
        self.setChecked(state)
        self.__updateIcons(state)

    def editValue(self, state):
        self.setChecked(state)
        self.__updateIcons(state)
        self.__emitValueChanged(state)


class LineEdit(QLineEdit):
    valueChanged = pyqtSignal(str)

    def __init__(self, *args):
        super(LineEdit, self).__init__(*args)
        self.textChanged.connect(self.valueChanged.emit)

    def value(self):
        return self.text()

    def setValue(self, text):
        self.blockSignals(True)
        self.setText(text)
        self.blockSignals(False)

    def editValue(self, text):
        self.setText(text)


class LineEditSelected(LineEdit):
    def __init__(self):
        super(LineEditSelected, self).__init__()
        self.__state = False

    def focusInEvent(self, event):
        super(LineEditSelected, self).focusInEvent(event)
        self.selectAll()
        self.__state = True

    def mousePressEvent(self, event):
        super(LineEditSelected, self).mousePressEvent(event)
        if self.__state:
            self.selectAll()
            self.__state = False


class AEFactory(object):
    def __init__(self):
        self.__typeWidgets = {}
        self.__typeWrappers = {}

    def registerType(self, dataType, widgetConstructor):
        self.__typeWidgets[dataType] = widgetConstructor

    def registerWrapper(self, dataType, wrapperFunction):
        """
        The wrapperFunction must accept a generator of widgets & return a generator of widgets.
        """
        self.__typeWrappers[dataType] = wrapperFunction

    @staticmethod
    def _allBaseTypes(cls):
        """
        Recurse all base classes and return a list of all bases with most close relatives first.
        https://stackoverflow.com/questions/1401661/list-all-base-classes-in-a-hierarchy-of-given-class
        """
        result = list(cls.__bases__)
        for base in result:
            result.extend(AEFactory._allBaseTypes(base))
        return result

    def _findEditorForType(self, dataType):
        if dataType in self.__typeWidgets:
            return self.__typeWidgets[dataType]

        for baseType in AEFactory._allBaseTypes(dataType):
            if dataType in self.__typeWidgets:
                return self.__typeWidgets[baseType]

    def _findWrapperForType(self, dataType):
        if dataType in self.__typeWrappers:
            return self.__typeWrappers[dataType]

        for baseType in AEFactory._allBaseTypes(dataType):
            if dataType in self.__typeWrappers:
                return self.__typeWrappers[baseType]

    def generate(self, data, parent=None, name=None):
        """
        This recursively generates widgets & returns an iterator of every resulting item.
        """
        if isinstance(data, (dict, OrderedDict)):
            generator = self._generateMap(data)
        elif hasattr(data, '__getitem__') and hasattr(data, '__setitem__'):
            generator = self._generateList(data)
        elif hasattr(data, '__dict__'):
            generator = self._generateInstance(data)
        else:
            generator = self._generateField(data, parent, name)

        wrapper = self._findWrapperForType(type(data))
        if wrapper:
            generator = wrapper(generator)
        for widget in generator:
            yield widget

    def _generateField(self, data, parent, name):
        cls = self._findEditorForType(type(data))
        assert cls, 'Error: could not inspect object "%s" (parent: %s, name: %s). No wrapper registered or non-supported compound type.' % (data, parent, name)
        widget = cls()
        widget.setValue(data)
        widget.valueChanged.connect(functools.partial(setattr, parent, name))
        yield widget

    def _generateInstance(self, data):
        for name in data.__dict__:
            if name[0] == '_':
                continue
            yield QLabel(name)
            for widget in self.generate(getattr(data, name), data, name):
                yield widget

    def _generateList(self, data):
        for i in xrange(len(data)):
            yield QLabel(str(i))
            for widget in self.generate(data[i], data, str(i)):
                yield widget

    def _generateMap(self, data):
        for key in data:
            yield QLabel(str(key))
            for widget in self.generate(data[key], data, key):
                yield widget


class FakeOrderedDict(object):
    def __init__(self, realDict, order):
        self.realDict = realDict
        self.order = order

    def __getitem__(self, key):
        return self.realDict[key]

    def __setitem__(self, key, value):
        self.realDict[key] = value

    def __iter__(self):
        return iter(self.order)

    def iterkeys(self):
        return iter(self.order)

    def itervalues(self):
        for key in self.order:
            yield self.realDict[key]

    def iteritems(self):
        for key in self.order:
            yield key, self.realDict[key]


class OrderedClass(object):
    def __init__(self):
        self.__dict__['_OrderedClass__attrs'] = []

    def __getattribute__(self, key):
        result = super(OrderedClass, self).__getattribute__(key)
        if key == '__dict__':
            if '_OrderedClass__attrs' in result:
                return FakeOrderedDict(result, result['_OrderedClass__attrs'])
        return result

    def __setattr__(self, key, value):
        order = self.__dict__['_OrderedClass__attrs']
        if key not in order:
            order.append(key)
        return super(OrderedClass, self).__setattr__(key, value)


# create test objects
class Vector(list):
    pass


class Compound(OrderedClass):  # inheriting from OrderedClass to ensure widget order
    def __init__(self):
        super(Compound, self).__init__()
        self.x = 2.0  # note how explicit floats are important now
        self.y = 5.0


class Data(OrderedClass):
    def __init__(self):
        super(Data, self).__init__()
        self.name = 'List test'
        self.value = Vector([1.0, 5, True])
        self.dict = {'A': Compound(), 'B': Compound()}


def groupHLayout(widgets):
    h = QHBoxLayout()
    m = QWidget()
    for w in widgets:
        h.addWidget(w)
    m.setLayout(h)
    yield m


# create test data
data = Data()

# create Qt application
app = QApplication([])
window = QWidget()
main = QVBoxLayout()
window.setLayout(main)

# initialize inspector
factory = AEFactory()
factory.registerType(bool, IconBoolEdit)
factory.registerType(int, SpinBox)
factory.registerType(float, DoubleSpinBox)
factory.registerType(str, LineEdit)
factory.registerWrapper(Vector, groupHLayout)

# inspect the data
for widget in factory.generate(data):
    main.addWidget(widget)

window.show()
app.exec_()

# print the data after closing the editor to show we indeed propagated the changes to the data as they happened
print data.name, data.value, data.dict['A'].x, data.dict['A'].y, data.dict, data.dict['B'].x, data.dict['B'].y

Polygons & textures creeping in my raymarcher

This week I’ve been working on additional features in my 64k toolchain. None of this is yet viable for 64k executables but it enhances the tool quite a bit.

My first step was implementing vertex shader support. A cool thing about vertex shaders in openGL is that they are responsible for outputting the vertex data, nobody said anything about requiring input. So with a function like glDrawArraysInstanced, we have full reign in the vertex shader to generate points based on gl_VertexID and gl_InstanceID.

Here I’m generating a grid of 10×10 quads, added some barycentric coordinates as per This article

#version 420

uniform mat4 uV;
uniform mat4 uVi;
uniform mat4 uP;

out vec3 bary;
out vec2 uv;

void main()
{
    vec3 local = vec3(gl_VertexID % 2, gl_VertexID / 2, 0.5) - 0.5;
    vec3 global = vec3(gl_InstanceID % 10, gl_InstanceID / 10, 4.5) - 4.5;

    uv = (local + global).xy * vec2(0.1, 0.1 * 16 / 9) + 0.5;

    bary = vec3(0);
    bary[gl_VertexID % 3] = 1.0;

    gl_Position = uP * vec4(mat3(uVi) * ((local + global - uV[3].xyz) * vec3(1,1,-1)), 1);
}

This was surprisingly easy to implement. In the tool I scan a template definition XML to figure out which shader source files to stitch together and treat as 1 fragment shader. Adding the distinction between .frag and .vert files allowed me to compile the resulting program with a different vertex shader than the default one and it was up and running quite fast.

Next came a more interesting bit, mixing my raymarching things together with this polygonal grid.
There are 2 bits to this, one is matching the projection, two is depth testing, and thus matching the depth output from the raymarcher.

To project a vertex I subtract the ray origin from the vertex and then multiply it by the inverse rotation. Apparantly that flips the Z axis so I had to fix that too. Then I multiply that with the projection matrix. the “u” prefix means uniform variable.

vec4 viewCoord = vec4(uViewInverse * ((vertex - uRayOrigin) * vec3(1,1-1)), 1)

My ray direction is based on mixing the corners of a frustum these days, I used to rotate the ray to get a fisheye effect but that doesn’t fly with regular projection matrices. My frustum calculation looks something like this (before going into the shader as a mat4):

tanFov = tan(uniforms.get('uFovBias', 0.5))
horizontalFov = (tanFov * aspectRatio)
uniforms['uFrustum'] = (-horizontalFov, -tanFov, 1.0, 0.0,
                        horizontalFov, -tanFov, 1.0, 0.0,
                        -horizontalFov, tanFov, 1.0, 0.0,
                        horizontalFov, tanFov, 1.0, 0.0)

So I can get a projection matrix from that as well. Additionally I added a uniform for the clipRange so the raymarcher near/far planes match the polygonal ones.

uniforms['uClipRange'] = (0.01, 100.0)
near, far = uniforms['uClipRange']
projection = Mat44.frustum(-xfov * near, xfov * near, -tfov * near, tfov * near, near, far)

For reference my raymarching ray looks like this:

vec4 d = mix(mix(uFrustum[0], uFrustum[1], uv.x), mix(uFrustum[2], uFrustum[3],uv.x), uv.y);
Ray ray = Ray(uV[3].xyz, normalize(d.xyz * mat3(uV)));

With this raymarching a 10x10x0.01 box matches up perfectly with the polygonal plane on top! Then the next issue is depth testing. All my render targets are now equipped with a float32 depth buffer, depth testing is enabled and before every frame I clear all depth buffers. Now I find my grid on top of my test scene because the raymarcher does not yet write the depth.

Following this nice article I learned a laot about this topic.
So to get the distance along Z I first define the world-space view axis (0,0,-1). Dotting that with the (intersection – rayOrigin), which is the same as totalDistance * raydirection, yield the right eye-space Z distance. The rest is explained in the article. It is pretty straight forward to map the Z using the clipping planes previously defined to match gl_DepthRange. I first fit between a 01 range (ndcDepth) and then fit back to gl_depthRange. One final trick is to fade to the FAR depth if we have 100% fog.

    vec3 viewForward = vec3(0.0, 0.0, -1.0) * mat3(uV);
    float eyeHitZ = hit.totalDistance * dot(ray.direction, viewForward);
    float ndcDepth = ((uClipRange.y + uClipRange.x) + (2 * uClipRange.y * uClipRange.x) / eyeHitZ) / (uClipRange.y - uClipRange.x);
    float z = ((gl_DepthRange.diff * ndcDepth) + gl_DepthRange.near + gl_DepthRange.far) / 2.0;
    gl_FragDepth = mix(z, gl_DepthRange.far, step(0.999, outColor0.w));

Now as if that wasn’t enough cool stuff, I added the option to bind an image file to a shot. Whenever a shot gets drawn it’s texture list is queried, uploaded and bound to the user defined uniform names. Uploading is cached so every texture is loaded only once, I should probably add file watchers… The cool thing here is that not only can I now texture things, I can also enter storyboards and time them before working on actual 3D scenes!

Creating a tool to make a 64k demo

In the process of picking up this webpage again, I can talk about something we did quite a while ago. I, together with a team, went through the process of making a 64 kilobyte demo. We happened to win at one of the biggest demoscene events in europe. Revision 2017. I still feel the afterglow of happiness from that.

If you’re not sure what that is, read on, else, scroll down! You program a piece of software that is only 64 kb in size, that shows an audio-visual experience generated in realtime. To stay within such size limits you have to generate everything, we chose to go for a rendering technique called ray marching, that allowed us to put all 3D modeling, texture generation, lighting, etc. as ascii (glsl sources) in the executable. On top of that we used a very minimal (yet versatile) modular synthesizer called 64klang2. Internally it stores a kind of minimal midi data and the patches and it can render amazing audio in realtime, so it doesn’t need to pre-render the song or anything. All this elementary and small size data and code compiles to something over 200kb, which is then compressed using an executable packer like UPX or kkrunchy

It was called Eidolon. You can watch a video:
https://youtu.be/rsZHBJdaz-Y
Or stress test your GPU / leave a comment here:
http://www.pouet.net/prod.php?which=69669

The technologies used were fairly basic, it’s very old school phong & lambert shading, 2 blur passes for bloom, so all in all pretty low tech and not worth discussing. What I would like to discuss is the evolution of the tool. I’ll keep it high level this time though. Maybe in the future I can talk about specific implementations of things, but just seeing the UI will probably explain a lot of the features and the way things work.

Step 1: Don’t make a tool from scratch

Our initial idea was to leverage existing software. One of our team members, who controlled the team besides modelling and eventually directing the whole creative result, had some experience with a real-time node based software called Touch Designer. It is a tool where you can do realtime visuals, and it supports exactly what we need: rendering into a 2D texture with a fragment shader.

We wanted to have the same rendering code for all scenes, and just fill in the modeling and material code that is unique per scene. We figured out how to concatenate separate pieces of text and draw them into a buffer. Multiple buffers even. At some point i packed all code and rendering logic of a pass into 1 grouped node and we could design our render pipeline entirely node based.

Here you see the text snippets (1) merged into some buffers (2) and then post processed for the bloom (3). On the right (4) you see the first problem we hit with Touch Designer. The compiler error log is drawn inside this node. There is basically no easy way to have that error visible in the main application somewhere. So the first iteration of the renderer (and coincidentally the main character of Eidolon) looked something like this:

The renderer didn’t really change after this.

In case I sound too negative about touch designer in the next few paragraphs, our use case was rather special, so take this with a grain of salt!

We have a timeline control, borrowed the UI design from Maya a little, so this became the main preview window. That’s when we hit some problems though. The software has no concept of window focus, so it’d constantly suffer hanging keys or responding to keys while typing in the text editor.

Last issue that really killed it though: everything has to be in 1 binary file. There is no native way to reference external text files for the shader code, or merge node graphs. There is a really weird utility that expands the binary to ascii, but then literally every single node is a text file so it is just unmergeable.

Step 2: Make a tool

So then this happened:

Over a week’s time in the evenings and then 1 long saturday I whipped this up using PyQt and PyOpenGL. This is the first screenshot I made, the curve editor isn’t actually an editor yet and there is no concept of camera shots (we use this to get hard cuts).

It has all the same concepts however, separate text files for the shader code, with an XML file determining what render passes use what files and in what buffer they render / what buffers they reference in turn. With the added advantage of the perfect granularity all stored in ascii files.

Some files are template-level, some were scene-level, so creating a new scene actually only copies the scene-level fies which can them be adjusted in a text editor, with a file watcher updating the picture. The CurveEditor feeds right back into the uniforms of the shader (by name) and the time slider at the bottom is the same idea as Maya / what you saw before.

Step 3: Make it better

Render pipeline
The concept was to set up a master render pipeline into which scenes would inject snippets of code. On disk this became a bunch of snippets, and an XML based template definition. This would be the most basic XML file:

<template>
    <pass buffer="0" outputs="1">
        <global path="header.glsl"/>
        <section path="scene.glsl"/>
        <global path="pass.glsl"/>
    </pass>
    <pass input0="0">
        <global path="present.glsl"/>
    </pass>
</template>

This will concatenated 3 files to 1 fragment shader, render into full-screen buffer “0” and then use present.glsl as another fragment shader, which in turn has the previous buffer “0” as input (forwarded to a sampler2D uniform).

This branched out into making static bufffers (textures), setting buffer sizes (smaller textures), multiple target buffers (render main and reflection pass at once), set buffer size to a portion of the screen (downsampling for bloom), 3D texture support (volumetric noise textures for cloud).

Creating a new scene will just copy “scene.glsl” from the template to a new folder, there you can then fill out the necessary function(s) to get a unique scene. Here’s an example from our latest Evoke demo. 6 scenes, under which you see the “section” files for each scene.

Camera control
The second important thing I wanted to tackle was camera control. Basically the demo will control the camera based on some animation data, but it is nice to fly around freely and even use the current camera position as animation keyframe. So this was just using Qt’s event system to hook up the mouse and keyboard to the viewport.

I also created a little widget that displays where the camera is, has an “animation input or user input” toggle as well as a “snap to current animation frame” button.

Animation control
So now to animate the camera, without hard coding values! Or even typing numbers, preferably. I know a lot of people use a tracker-like tool called Rocket, I never used it and it looks an odd way to control animation data to me. I come from a 3D background, so I figured I’d just want a curve editor like e.g. Maya has. In Touch Designer we also had a basic curve editor, conveniently you can name a channel the same as a uniform, then just have code evaluate the curve at the current time and send the result to that uniform location.
Some trickery was necessary to pack vec3s, I just look for channels that start with the same name and then end in .x, .y, .z, and possibly .w.

Here’s an excerpt from a long camera shot with lots of movement, showing off our cool hermite splines. At the top right you can see we have several built in tangent modes, we never got around to building custom tangent editing. In the end this is more than enough however. With flat tangents we can create easing/acceleration, with spline tangents we can get continuous paths and with linear tangents we get continuous speed. Next to that are 2 cool buttons that allow us to feed the camera position to another uniform, so you can literally fly to a place where you want to put an object. It’s not as good as actual move/rotate widgets but for the limited times we need to place 3D objects it’s great.

Hard cuts
Apart from being impossible to represent in this interface, we don’t support 2 keys at identical times. This means that we can’t really have the camera “jump” to a new position instantly. With a tiny amount of curve inbetween the previous and the next shot position, the time cursor can actually render 1 frame of a random camera position. So we had to solve this. I think it is one of the only big features that you won’t see in the initial screenshot above actually.

Introducing camera shots. A shot has its own “scene it should display” and its own set of animation data. So selecting a different shot yields different curve editor content. Shots are placed on a shared timeline, so scrolling through time will automatically show the right shot and setting a keyframe will automatically figure out the “shot local time” to put the key based on the global demo time. The curve editor has it’s own playhead that is directly linked to the global timeline as well so we can adjust the time in multiple places.

When working with lots of people we had issues with people touching other people’s (work in progress) shots. Therefore we introduced “disabling” of shots. This way anyone could just prefix their shots and disable them before submitting, and we could mix and match shots from several people to get a final camera flow we all liked.

Shots are also rendered on the timeline as colored blocks. The grey block underneath those is our “range slider”. It makes the top part apply on only a subsection of the demo, so it is easy to loop a specific time range, or just zoom in far enough to use the mouse to change the time granularly enough.

The devil is in the details
Some things I overlooked in the first implementation, and some useful things I added only recently.
1. Undo/Redo of animation changes. Not unimportant, and luckily not hard to add with Qt.
2. Ctrl click timeline to immediately start animating that shot
3. Right click a shot to find the scene
4. Right click a scene to create a shot for that scene in particular
5. Current time display in minutes:seconds instead of just beats
6. BPM stored per-project instead of globally
7. Lots of hotkeys!

These things make the tool just that much faster to use.

Finally, here’s our tool today. There’s still plenty to be done, but we made 2 demos with it so far and it gets better every time!

Part 3: Importing and drawing a custom mesh file

Part 3: Creating an importer

This is part 3 of a series and it is about getting started with visualizing triangle meshes with Python 2.7 using the libraries PyOpenGL and PyQt4.

Part 1
Part 2
Part 3

I will assume you know python, you will not need a lot of Qt or OpenGL experience, though I will also not go into the deeper details of how OpenGL works. For that I refer you to official documentation and the excellent (C++) tutorials at https://open.gl/. Although they are C++, there is a lot of explanation about OpenGL and why to do certain calls in a certain order.

On a final note: I will make generalizations and simplifications when explaining things. If you think something works different then I say it probably does, this is to try and convey ideas to beginners, not to explain low level openGL implementations.

3.1 Importing

Now with our file format resembling openGL so closely makes this step relatively easy. First I’ll declare some globals, because openGL does not have real enums but just a bunch of global constants I make some groups to do testing and data mapping against.

from OpenGL.GL import *

attributeElementTypes = (GL_BYTE,
                        GL_UNSIGNED_BYTE,
                        GL_SHORT,
                        GL_UNSIGNED_SHORT,
                        GL_INT,
                        GL_UNSIGNED_INT,
                        GL_HALF_FLOAT,
                        GL_FLOAT,
                        GL_DOUBLE,
                        GL_FIXED,
                        GL_INT_2_10_10_10_REV,
                        GL_UNSIGNED_INT_2_10_10_10_REV,
                        GL_UNSIGNED_INT_10F_11F_11F_REV)
sizeOfType = {GL_BYTE: 1,
             GL_UNSIGNED_BYTE: 1,
             GL_SHORT: 2,
             GL_UNSIGNED_SHORT: 2,
             GL_INT: 4,
             GL_UNSIGNED_INT: 4,
             GL_HALF_FLOAT: 2,
             GL_FLOAT: 4,
             GL_DOUBLE: 8,
             GL_FIXED: 4,
             GL_INT_2_10_10_10_REV: 4,
             GL_UNSIGNED_INT_2_10_10_10_REV: 4,
             GL_UNSIGNED_INT_10F_11F_11F_REV: 4}
drawModes = (GL_POINTS,
            GL_LINE_STRIP,
            GL_LINE_LOOP,
            GL_LINES,
            GL_LINE_STRIP_ADJACENCY,
            GL_LINES_ADJACENCY,
            GL_TRIANGLE_STRIP,
            GL_TRIANGLE_FAN,
            GL_TRIANGLES,
            GL_TRIANGLE_STRIP_ADJACENCY,
            GL_TRIANGLES_ADJACENCY,
            GL_PATCHES)
indexTypeFromSize = {1: GL_UNSIGNED_BYTE, 2: GL_UNSIGNED_SHORT, 4: GL_UNSIGNED_INT}

Next up is a Mesh class that stores a vertex array object (and corresponding buffers for deletion) along with all info necessary to draw the mesh once it’s on the GPU.

class Mesh(object):
    def __init__(self, vao, bufs, drawMode, indexCount, indexType):
        self.__vao = vao
        self.__bufs = bufs
        self.__drawMode = drawMode
        self.__indexCount = indexCount
        self.__indexType = indexType
 
    def __del__(self):
        glDeleteBuffers(len(self.__bufs), self.__bufs)
        glDeleteVertexArrays(1, [self.__vao])
 
    def draw(self):
        glBindVertexArray(self.__vao)
        glDrawElements(self.__drawMode, self.__indexCount, self.__indexType, None)

Now let’s, given a file path, open up the file and run the importer for the right version (if known).

def model(filePath):
    vao = glGenVertexArrays(1)
    glBindVertexArray(vao)
    bufs = glGenBuffers(2)
    glBindBuffer(GL_ARRAY_BUFFER, bufs[0])
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufs[1])
    with open(filePath, 'rb') as fh:
        fileVersion = struct.unpack('B', fh.read(1))[0]
        if fileVersion == 0:
            return _loadMesh_v0(fh, vao, bufs)
        raise RuntimeError('Unknown mesh file version %s in %s' % (fileVersion, filePath))

Next we can start reading the rest of the file:

    vertexCount = struct.unpack('I', fh.read(4))[0]
    vertexSize = struct.unpack('B', fh.read(1))[0]
    indexCount = struct.unpack('I', fh.read(4))[0]
    indexSize = struct.unpack('B', fh.read(1))[0]
    assert indexSize in indexTypeFromSize, 'Unknown element data type, element size must be one of %s' % indexTypeFromSize.keys()
    indexType = indexTypeFromSize[indexSize]
    drawMode = struct.unpack('I', fh.read(4))[0]
    assert drawMode in (GL_LINES, GL_TRIANGLES), 'Unknown draw mode.'  # TODO: list all render types

Read and apply the attribute layout:

# gather layout
numAttributes = struct.unpack('B', fh.read(1))[0]
offset = 0
layouts = []
for i in xrange(numAttributes):
   location = struct.unpack('B', fh.read(1))[0]
   dimensions = struct.unpack('B', fh.read(1))[0]
   assert dimensions in (1, 2, 3, 4)
   dataType = struct.unpack('I', fh.read(4))[0]
   assert dataType in attributeElementTypes, 'Invalid GLenum value for attribute element type.'
   layouts.append((location, dimensions, dataType, offset))
   offset += dimensions * sizeOfType[dataType]
# apply
for layout in layouts:
   glVertexAttribPointer(layout[0], layout[1], layout[2], GL_FALSE, offset, ctypes.c_void_p(layout[3]))  # total offset is now stride
   glEnableVertexAttribArray(layout[0])

Read and upload the raw buffer data. This step is easy because we can directly copy the bytes as the storage matches exactly with how openGL expects it due to the layout code above.

raw = fh.read(vertexSize * vertexCount)
glBufferData(GL_ARRAY_BUFFER, vertexSize * vertexCount, raw, GL_STATIC_DRAW)
raw = fh.read(indexSize * indexCount)
glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexSize * indexCount, raw, GL_STATIC_DRAW)

3.2 The final code

This is the application code including all the rendering from chapter 1, only the rectangle has been replaced by the loaded mesh.

# the importer
import struct
from OpenGL.GL import *

attributeElementTypes = (GL_BYTE,
                        GL_UNSIGNED_BYTE,
                        GL_SHORT,
                        GL_UNSIGNED_SHORT,
                        GL_INT,
                        GL_UNSIGNED_INT,
                        GL_HALF_FLOAT,
                        GL_FLOAT,
                        GL_DOUBLE,
                        GL_FIXED,
                        GL_INT_2_10_10_10_REV,
                        GL_UNSIGNED_INT_2_10_10_10_REV,
                        GL_UNSIGNED_INT_10F_11F_11F_REV)
sizeOfType = {GL_BYTE: 1,
             GL_UNSIGNED_BYTE: 1,
             GL_SHORT: 2,
             GL_UNSIGNED_SHORT: 2,
             GL_INT: 4,
             GL_UNSIGNED_INT: 4,
             GL_HALF_FLOAT: 2,
             GL_FLOAT: 4,
             GL_DOUBLE: 8,
             GL_FIXED: 4,
             GL_INT_2_10_10_10_REV: 4,
             GL_UNSIGNED_INT_2_10_10_10_REV: 4,
             GL_UNSIGNED_INT_10F_11F_11F_REV: 4}
drawModes = (GL_POINTS,
            GL_LINE_STRIP,
            GL_LINE_LOOP,
            GL_LINES,
            GL_LINE_STRIP_ADJACENCY,
            GL_LINES_ADJACENCY,
            GL_TRIANGLE_STRIP,
            GL_TRIANGLE_FAN,
            GL_TRIANGLES,
            GL_TRIANGLE_STRIP_ADJACENCY,
            GL_TRIANGLES_ADJACENCY,
            GL_PATCHES)
indexTypeFromSize = {1: GL_UNSIGNED_BYTE, 2: GL_UNSIGNED_SHORT, 4: GL_UNSIGNED_INT}


def _loadMesh_v0(fh, vao, bufs):
    vertexCount = struct.unpack('I', fh.read(4))[0]
    vertexSize = struct.unpack('B', fh.read(1))[0]
    indexCount = struct.unpack('I', fh.read(4))[0]
    indexSize = struct.unpack('B', fh.read(1))[0]
    assert indexSize in indexTypeFromSize, 'Unknown element data type, element size must be one of %s' % indexTypeFromSize.keys()
    indexType = indexTypeFromSize[indexSize]
    drawMode = struct.unpack('I', fh.read(4))[0]
    assert drawMode in (GL_LINES, GL_TRIANGLES), 'Unknown draw mode.'  # TODO: list all render types
  
    # gather layout
    numAttributes = struct.unpack('B', fh.read(1))[0]
    offset = 0
    layouts = []
    for i in xrange(numAttributes):
        location = struct.unpack('B', fh.read(1))[0]
        dimensions = struct.unpack('B', fh.read(1))[0]
        assert dimensions in (1, 2, 3, 4)
        dataType = struct.unpack('I', fh.read(4))[0]
        assert dataType in attributeElementTypes, 'Invalid GLenum value for attribute element type.'
        layouts.append((location, dimensions, dataType, offset))
        offset += dimensions * sizeOfType[dataType]
  
    # apply layout
    for layout in layouts:
        glVertexAttribPointer(layout[0], layout[1], layout[2], GL_FALSE, offset, ctypes.c_void_p(layout[3]))  # total offset is now stride
        glEnableVertexAttribArray(layout[0])
  
    raw = fh.read(vertexSize * vertexCount)
    glBufferData(GL_ARRAY_BUFFER, vertexSize * vertexCount, raw, GL_STATIC_DRAW)
    raw = fh.read(indexSize * indexCount)
    glBufferData(GL_ELEMENT_ARRAY_BUFFER, indexSize * indexCount, raw, GL_STATIC_DRAW)
  
    assert len(fh.read()) == 0, 'Expected end of file, but file is longer than it indicates'
    return Mesh(vao, bufs, drawMode, indexCount, indexType)


class Mesh(object):
    def __init__(self, vao, bufs, drawMode, indexCount, indexType):
        self.__vao = vao
        self.__bufs = bufs
        self.__drawMode = drawMode
        self.__indexCount = indexCount
        self.__indexType = indexType
  
    def __del__(self):
        glDeleteBuffers(len(self.__bufs), self.__bufs)
        glDeleteVertexArrays(1, [self.__vao])
  
    def draw(self):
        glBindVertexArray(self.__vao)
        glDrawElements(self.__drawMode, self.__indexCount, self.__indexType, None)


def model(filePath):
    vao = glGenVertexArrays(1)
    glBindVertexArray(vao)
    bufs = glGenBuffers(2)
    glBindBuffer(GL_ARRAY_BUFFER, bufs[0])
    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, bufs[1])
    with open(filePath, 'rb') as fh:
        fileVersion = struct.unpack('B', fh.read(1))[0]
        if fileVersion == 0:
            return _loadMesh_v0(fh, vao, bufs)
        raise RuntimeError('Unknown mesh file version %s in %s' % (fileVersion, filePath))


# import the necessary modules
import time
from PyQt4.QtCore import *  # QTimer
from PyQt4.QtGui import *  # QApplication
from PyQt4.QtOpenGL import *  # QGLWidget
from OpenGL.GL import *  # OpenGL functionality


# this is the basic window
class OpenGLView(QGLWidget):
    def initializeGL(self):
        # set the RGBA values of the background
        glClearColor(0.1, 0.2, 0.3, 1.0)
  
        # set a timer to redraw every 1/60th of a second
        self.__timer = QTimer()
        self.__timer.timeout.connect(self.repaint)
        self.__timer.start(1000 / 60)
  
        # import a model
        self.__mesh = model(r'C:\Users\John\Python\maya\cube.bm')
  
    def resizeGL(self, width, height):
        glViewport(0, 0, width, height)
  
    def paintGL(self):
        glLoadIdentity()
        glScalef(self.height() / float(self.width()), 1.0, 1.0)
        glRotate((time.time() % 36.0) * 10, 0, 0, 1)
  
        glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
        self.__mesh.draw()


# this initializes Qt
app = QApplication([])
# this creates the openGL window, but it isn't initialized yet
window = OpenGLView()
# this only schedules the window to be shown on the next Qt update
window.show()
# this starts the Qt main update loop, it avoids python from continuing beyond this line
# and any Qt stuff we did above is now going to actually get executed, along with any future
# events like mouse clicks and window resizes
app.exec_()