First commit
This commit is contained in:
commit
a2dd11fbc6
10 changed files with 467 additions and 0 deletions
217
BlenderCOL.py
Normal file
217
BlenderCOL.py
Normal file
|
@ -0,0 +1,217 @@
|
||||||
|
bl_info = {
|
||||||
|
"name": "Export COL with Obj2Col",
|
||||||
|
"author": "Blank",
|
||||||
|
"version": (1, 0, 0),
|
||||||
|
"blender": (2, 71, 0),
|
||||||
|
"location": "File > Export > Collision (.col)",
|
||||||
|
"description": "This script allows you do export col files quickly using obj2col directly from blender",
|
||||||
|
"warning": "Might break, doing this mostly for my own convinience",
|
||||||
|
"category": "Import-Export"
|
||||||
|
}
|
||||||
|
|
||||||
|
import bpy
|
||||||
|
import os
|
||||||
|
from bpy_extras.io_utils import ExportHelper
|
||||||
|
from bpy.props import (BoolProperty,
|
||||||
|
FloatProperty,
|
||||||
|
StringProperty,
|
||||||
|
EnumProperty,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Header(Struct):
|
||||||
|
vertex_count = uint32
|
||||||
|
vertex_offset = uint32
|
||||||
|
group_count = uint32
|
||||||
|
group_offset = uint32
|
||||||
|
|
||||||
|
|
||||||
|
class Vertex(Struct):
|
||||||
|
x = float32
|
||||||
|
y = float32
|
||||||
|
z = float32
|
||||||
|
|
||||||
|
def __init__(self,x,y,z):
|
||||||
|
self.x = x
|
||||||
|
self.y = y
|
||||||
|
self.z = z
|
||||||
|
|
||||||
|
|
||||||
|
class Group(Struct):
|
||||||
|
unknown0 = uint8 # 0,1,2,4,6,7,8,64,128,129,132,135,160,192, bitfield?
|
||||||
|
unknown1 = uint8 # 0-12
|
||||||
|
triangle_count = uint16
|
||||||
|
__padding__ = Padding(1,b'\x00')
|
||||||
|
has_unknown4 = bool8
|
||||||
|
__padding__ = Padding(2)
|
||||||
|
vertex_index_offset = uint32
|
||||||
|
unknown2_offset = uint32 # 0-18,20,21,23,24,27-31
|
||||||
|
unknown3_offset = uint32 # 0-27
|
||||||
|
unknown4_offset = uint32 # 0,1,2,3,4,8,255,6000,7500,7800,8000,8400,9000,10000,10300,12000,14000,17000,19000,20000,21000,22000,27500,30300
|
||||||
|
|
||||||
|
|
||||||
|
class Triangle:
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.vertex_indices = None
|
||||||
|
self.unknown0 = 128
|
||||||
|
self.unknown1 = 0
|
||||||
|
self.unknown2 = 0
|
||||||
|
self.unknown3 = 0
|
||||||
|
self.unknown4 = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def has_unknown4(self):
|
||||||
|
return self.unknown4 is not None
|
||||||
|
|
||||||
|
|
||||||
|
def pack(stream,vertices,triangles): #pack triangles into col file
|
||||||
|
groups = []
|
||||||
|
|
||||||
|
for triangle in triangles:
|
||||||
|
for group in groups: #for each triangle add to appropriate group
|
||||||
|
if triangle.unknown0 != group.unknown0: continue #break out of loop to next cycle
|
||||||
|
if triangle.unknown1 != group.unknown1: continue
|
||||||
|
if triangle.has_unknown4 != group.has_unknown4: continue
|
||||||
|
group.triangles.append(triangle)
|
||||||
|
break
|
||||||
|
else: #if no group has been found
|
||||||
|
group = Group() #create a new group
|
||||||
|
group.unknown0 = triangle.unknown0
|
||||||
|
group.unknown1 = triangle.unknown1
|
||||||
|
group.has_unknown4 = triangle.has_unknown4
|
||||||
|
group.triangles = [triangle]
|
||||||
|
groups.append(group) #add to list of groups
|
||||||
|
|
||||||
|
header = Header()
|
||||||
|
header.vertex_count = len(vertices)
|
||||||
|
header.vertex_offset = Header.sizeof() + Group.sizeof()*len(groups)
|
||||||
|
header.group_count = len(groups)
|
||||||
|
header.group_offset = Header.sizeof()
|
||||||
|
Header.pack(stream,header)
|
||||||
|
|
||||||
|
stream.write(b'\x00'*Group.sizeof()*len(groups))
|
||||||
|
|
||||||
|
for vertex in vertices:
|
||||||
|
Vertex.pack(stream,vertex)
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
group.triangle_count = len(group.triangles)
|
||||||
|
group.vertex_index_offset = stream.tell()
|
||||||
|
for triangle in group.triangles:
|
||||||
|
uint16.pack(stream,triangle.vertex_indices[0])
|
||||||
|
uint16.pack(stream,triangle.vertex_indices[1])
|
||||||
|
uint16.pack(stream,triangle.vertex_indices[2])
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
group.unknown2_offset = stream.tell()
|
||||||
|
for triangle in group.triangles:
|
||||||
|
uint8.pack(stream,triangle.unknown2)
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
group.unknown3_offset = stream.tell()
|
||||||
|
for triangle in group.triangles:
|
||||||
|
uint8.pack(stream,triangle.unknown3)
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
if not group.has_unknown4:
|
||||||
|
group.unknown4_offset = 0
|
||||||
|
else:
|
||||||
|
group.unknown4_offset = stream.tell()
|
||||||
|
for triangle in group.triangles:
|
||||||
|
uint16.pack(stream,triangle.unknown4)
|
||||||
|
|
||||||
|
stream.seek(header.group_offset)
|
||||||
|
for group in groups:
|
||||||
|
Group.pack(stream,group)
|
||||||
|
|
||||||
|
|
||||||
|
def unpack(stream):
|
||||||
|
header = Header.unpack(stream)
|
||||||
|
|
||||||
|
stream.seek(header.group_offset)
|
||||||
|
groups = [Group.unpack(stream) for _ in range(header.group_count)]
|
||||||
|
|
||||||
|
stream.seek(header.vertex_offset)
|
||||||
|
vertices = [Vertex.unpack(stream) for _ in range(header.vertex_count)]
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
group.triangles = [Triangle() for _ in range(group.triangle_count)]
|
||||||
|
for triangle in group.triangles:
|
||||||
|
triangle.unknown0 = group.unknown0
|
||||||
|
triangle.unknown1 = group.unknown1
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
stream.seek(group.vertex_index_offset)
|
||||||
|
for triangle in group.triangles:
|
||||||
|
triangle.vertex_indices = [uint16.unpack(stream) for _ in range(3)]
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
stream.seek(group.unknown2_offset)
|
||||||
|
for triangle in group.triangles:
|
||||||
|
triangle.unknown2 = uint8.unpack(stream)
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
stream.seek(group.unknown3_offset)
|
||||||
|
for triangle in group.triangles:
|
||||||
|
triangle.unknown3 = uint8.unpack(stream)
|
||||||
|
|
||||||
|
for group in groups:
|
||||||
|
if not group.has_unknown4: continue
|
||||||
|
stream.seek(group.unknown4_offset)
|
||||||
|
for triangle in group.triangles:
|
||||||
|
triangle.unknown4 = uint16.unpack(stream)
|
||||||
|
|
||||||
|
triangles = sum((group.triangles for group in groups),[])
|
||||||
|
|
||||||
|
return vertices,triangles
|
||||||
|
|
||||||
|
class ExportCOL(bpy.types.Operator, ExportHelper):
|
||||||
|
"""Save a COL file"""
|
||||||
|
bl_idname = "export_mesh.col"
|
||||||
|
bl_label = "Export COL"
|
||||||
|
filter_glob = StringProperty(
|
||||||
|
default="*.col",
|
||||||
|
options={'HIDDEN'},
|
||||||
|
)
|
||||||
|
|
||||||
|
check_extension = True
|
||||||
|
filename_ext = ".col"
|
||||||
|
|
||||||
|
#To do: add material presets
|
||||||
|
|
||||||
|
def execute(self, context): # execute() is called by blender when running the operator.
|
||||||
|
Obj = bpy.context.scene.objects.active
|
||||||
|
Mesh = Obj.data
|
||||||
|
VertexList = []
|
||||||
|
Triangles = []
|
||||||
|
for Vert in Mesh.vertices:
|
||||||
|
VertexList.append(Vertex(Vert.co.x,Vert.co.y,Vert.co.z)) #add in verts
|
||||||
|
|
||||||
|
for Face in Mesh.polygons:
|
||||||
|
MyTriangle = Triangle()
|
||||||
|
MyTriangle.vertex_indices = [Face.vertices[0],Face.vertices[1],Face.vertices[2]] #add three vertex indicies
|
||||||
|
Triangles.append(MyTriangle) #add triangles
|
||||||
|
|
||||||
|
ColStream = open(self.filepath,'wb')
|
||||||
|
pack(ColStream,VertexList,Triangles)
|
||||||
|
return {'FINISHED'} # this lets blender know the operator finished successfully.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def register():
|
||||||
|
bpy.utils.register_class(ExportCOL)
|
||||||
|
bpy.types.INFO_MT_file_export.append(menu_func)
|
||||||
|
|
||||||
|
def menu_func(self, context):
|
||||||
|
self.layout.operator(ExportCOL.bl_idname, text="Collision (.col)")
|
||||||
|
|
||||||
|
def unregister():
|
||||||
|
bpy.utils.unregister_class(ExportCOL)
|
||||||
|
bpy.types.INFO_MT_file_export.remove(menu_func)
|
||||||
|
|
||||||
|
|
||||||
|
# This allows you to run the script directly from blenders text editor
|
||||||
|
# to test the addon without having to install it.
|
||||||
|
if __name__ == "__main__":
|
||||||
|
register()
|
23
btypes/__init__.py
Normal file
23
btypes/__init__.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
"""Module for reading and writing data structures."""
|
||||||
|
|
||||||
|
from btypes.types import *
|
||||||
|
|
||||||
|
|
||||||
|
class FormatError(Exception): pass
|
||||||
|
|
||||||
|
|
||||||
|
def align(stream,length,padding=b'This is padding data to alignment.'):
|
||||||
|
if stream.tell() % length == 0: return
|
||||||
|
n,r = divmod(length - (stream.tell() % length),len(padding))
|
||||||
|
stream.write(n*padding + padding[0:r])
|
||||||
|
|
||||||
|
|
||||||
|
SEEK_POS = 0
|
||||||
|
SEEK_CUR = 1
|
||||||
|
SEEK_END = 2
|
||||||
|
|
||||||
|
|
||||||
|
NATIVE_ENDIAN = '='
|
||||||
|
LITTLE_ENDIAN = '<'
|
||||||
|
BIG_ENDIAN = '>'
|
||||||
|
|
BIN
btypes/__pycache__/__init__.cpython-34.pyc
Normal file
BIN
btypes/__pycache__/__init__.cpython-34.pyc
Normal file
Binary file not shown.
BIN
btypes/__pycache__/__init__.cpython-36.pyc
Normal file
BIN
btypes/__pycache__/__init__.cpython-36.pyc
Normal file
Binary file not shown.
BIN
btypes/__pycache__/big_endian.cpython-34.pyc
Normal file
BIN
btypes/__pycache__/big_endian.cpython-34.pyc
Normal file
Binary file not shown.
BIN
btypes/__pycache__/big_endian.cpython-36.pyc
Normal file
BIN
btypes/__pycache__/big_endian.cpython-36.pyc
Normal file
Binary file not shown.
BIN
btypes/__pycache__/types.cpython-34.pyc
Normal file
BIN
btypes/__pycache__/types.cpython-34.pyc
Normal file
Binary file not shown.
BIN
btypes/__pycache__/types.cpython-36.pyc
Normal file
BIN
btypes/__pycache__/types.cpython-36.pyc
Normal file
Binary file not shown.
16
btypes/big_endian.py
Normal file
16
btypes/big_endian.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
from btypes import *
|
||||||
|
|
||||||
|
bool8 = BasicType('?',BIG_ENDIAN)
|
||||||
|
sint8 = BasicType('b',BIG_ENDIAN)
|
||||||
|
uint8 = BasicType('B',BIG_ENDIAN)
|
||||||
|
sint16 = BasicType('h',BIG_ENDIAN)
|
||||||
|
uint16 = BasicType('H',BIG_ENDIAN)
|
||||||
|
sint32 = BasicType('l',BIG_ENDIAN)
|
||||||
|
uint32 = BasicType('L',BIG_ENDIAN)
|
||||||
|
sint64 = BasicType('q',BIG_ENDIAN)
|
||||||
|
uint64 = BasicType('Q',BIG_ENDIAN)
|
||||||
|
float32 = BasicType('f',BIG_ENDIAN)
|
||||||
|
float64 = BasicType('d',BIG_ENDIAN)
|
||||||
|
cstring = CString('ascii')
|
||||||
|
pstring = PString(uint8,'ascii')
|
||||||
|
|
211
btypes/types.py
Normal file
211
btypes/types.py
Normal file
|
@ -0,0 +1,211 @@
|
||||||
|
import struct as _struct
|
||||||
|
|
||||||
|
|
||||||
|
class BasicType:
|
||||||
|
|
||||||
|
def __init__(self,format_character,endianess):
|
||||||
|
self.format_character = format_character
|
||||||
|
self.endianess = endianess
|
||||||
|
self.format_string = endianess + format_character
|
||||||
|
self.size = _struct.calcsize(self.format_string)
|
||||||
|
|
||||||
|
def pack(self,stream,value):
|
||||||
|
stream.write(_struct.pack(self.format_string,value))
|
||||||
|
|
||||||
|
def unpack(self,stream):
|
||||||
|
return _struct.unpack(self.format_string,stream.read(self.size))[0]
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return self.size
|
||||||
|
|
||||||
|
|
||||||
|
class FixedPointConverter:
|
||||||
|
|
||||||
|
def __init__(self,integer_type,scale):
|
||||||
|
self.integer_type = integer_type
|
||||||
|
self.scale = scale
|
||||||
|
|
||||||
|
def pack(self,stream,value):
|
||||||
|
self.integer_type.pack(stream,int(value/self.scale))
|
||||||
|
|
||||||
|
def unpack(self,stream):
|
||||||
|
return self.integer_type.unpack(stream)*self.scale
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return self.integer_type.sizeof()
|
||||||
|
|
||||||
|
|
||||||
|
class ByteString:
|
||||||
|
|
||||||
|
def __init__(self,length):
|
||||||
|
self.length = length
|
||||||
|
|
||||||
|
def pack(self,stream,string):
|
||||||
|
if len(string) != self.length:
|
||||||
|
raise ValueError('wrong string length')
|
||||||
|
stream.write(string)
|
||||||
|
|
||||||
|
def unpack(self,stream):
|
||||||
|
return stream.read(self.length)
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return self.length
|
||||||
|
|
||||||
|
|
||||||
|
class Array:
|
||||||
|
|
||||||
|
def __init__(self,element_type,length):
|
||||||
|
self.element_type = element_type
|
||||||
|
self.length = length
|
||||||
|
|
||||||
|
def pack(self,stream,array):
|
||||||
|
if len(array) != self.length:
|
||||||
|
raise ValueError('wrong array length')
|
||||||
|
for value in array:
|
||||||
|
self.element_type.pack(stream,value)
|
||||||
|
|
||||||
|
def unpack(self,stream):
|
||||||
|
return [self.element_type.unpack(stream) for i in range(self.length)]
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return self.length*self.element_type.sizeof()
|
||||||
|
|
||||||
|
|
||||||
|
class CString:
|
||||||
|
|
||||||
|
def __init__(self,encoding):
|
||||||
|
self.encoding = encoding
|
||||||
|
|
||||||
|
def pack(self,stream,string):
|
||||||
|
stream.write((string + '\0').encode(self.encoding))
|
||||||
|
|
||||||
|
def unpack(self,stream):
|
||||||
|
#XXX: This might not work for all encodings
|
||||||
|
null = '\0'.encode(self.encoding)
|
||||||
|
string = b''
|
||||||
|
while True:
|
||||||
|
c = stream.read(len(null))
|
||||||
|
if c == null: break
|
||||||
|
string += c
|
||||||
|
return string.decode(self.encoding)
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class PString:
|
||||||
|
|
||||||
|
def __init__(self,length_type,encoding):
|
||||||
|
self.length_type = length_type
|
||||||
|
self.encoding = encoding
|
||||||
|
|
||||||
|
def pack(self,stream,string):
|
||||||
|
string = string.encode(self.encoding)
|
||||||
|
self.length_type.pack(stream,len(string))
|
||||||
|
stream.write(string)
|
||||||
|
|
||||||
|
def unpack(self,stream):
|
||||||
|
length = self.length_type.unpack(stream)
|
||||||
|
return stream.read(length).decode(self.encoding)
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class Field:
|
||||||
|
|
||||||
|
def __init__(self,name,field_type):
|
||||||
|
self.name = name
|
||||||
|
self.field_type = field_type
|
||||||
|
|
||||||
|
def pack(self,stream,struct):
|
||||||
|
self.field_type.pack(stream,getattr(struct,self.name))
|
||||||
|
|
||||||
|
def unpack(self,stream,struct):
|
||||||
|
setattr(struct,self.name,self.field_type.unpack(stream))
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return self.field_type.sizeof()
|
||||||
|
|
||||||
|
def equal(self,struct,other):
|
||||||
|
return getattr(struct,self.name) == getattr(other,self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class Padding:
|
||||||
|
|
||||||
|
def __init__(self,length,padding=b'\xFF'):
|
||||||
|
self.length = length
|
||||||
|
self.padding = padding
|
||||||
|
|
||||||
|
def pack(self,stream,struct):
|
||||||
|
stream.write(self.padding*self.length)
|
||||||
|
|
||||||
|
def unpack(self,stream,struct):
|
||||||
|
stream.read(self.length)
|
||||||
|
|
||||||
|
def sizeof(self):
|
||||||
|
return self.length
|
||||||
|
|
||||||
|
def equal(self,struct,other):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class StructClassDictionary(dict):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
super().__init__()
|
||||||
|
self.struct_fields = []
|
||||||
|
|
||||||
|
def __setitem__(self,key,value):
|
||||||
|
if not key[:2] == key[-2:] == '__' and not hasattr(value,'__get__'):
|
||||||
|
self.struct_fields.append(Field(key,value))
|
||||||
|
elif key == '__padding__':
|
||||||
|
self.struct_fields.append(value)
|
||||||
|
else:
|
||||||
|
super().__setitem__(key,value)
|
||||||
|
|
||||||
|
|
||||||
|
class StructMetaClass(type):
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __prepare__(metacls,cls,bases):
|
||||||
|
return StructClassDictionary()
|
||||||
|
|
||||||
|
def __new__(metacls,cls,bases,classdict):
|
||||||
|
if any(field.sizeof() is None for field in classdict.struct_fields):
|
||||||
|
struct_size = None
|
||||||
|
else:
|
||||||
|
struct_size = sum(field.sizeof() for field in classdict.struct_fields)
|
||||||
|
|
||||||
|
struct_class = type.__new__(metacls,cls,bases,classdict)
|
||||||
|
struct_class.struct_fields = classdict.struct_fields
|
||||||
|
struct_class.struct_size = struct_size
|
||||||
|
return struct_class
|
||||||
|
|
||||||
|
def __init__(self,cls,bases,classdict):
|
||||||
|
super().__init__(cls,bases,classdict)
|
||||||
|
|
||||||
|
|
||||||
|
class Struct(metaclass=StructMetaClass):
|
||||||
|
|
||||||
|
__slots__ = tuple()
|
||||||
|
|
||||||
|
def __eq__(self,other):
|
||||||
|
return all(field.equal(self,other) for field in self.struct_fields)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def pack(cls,stream,struct):
|
||||||
|
for field in cls.struct_fields:
|
||||||
|
field.pack(stream,struct)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def unpack(cls,stream):
|
||||||
|
struct = cls.__new__(cls) #TODO: what if __init__ does something important?
|
||||||
|
for field in cls.struct_fields:
|
||||||
|
field.unpack(stream,struct)
|
||||||
|
return struct
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def sizeof(cls):
|
||||||
|
return cls.struct_size
|
||||||
|
|
Reference in a new issue