Advertisement
snake5

io_export_ss3dmesh.py

Feb 16th, 2016
483
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 32.96 KB | None | 0 0
  1. import bpy
  2. from bpy.props import *
  3. from mathutils import *
  4. from pprint import pprint
  5. from copy import copy
  6. from itertools import islice
  7. import math, struct, csv, sys
  8. import os.path
  9.  
  10.  
  11. """ FORMAT
  12.    
  13.     BUFFER: (min size = 4)
  14.     - uint32 size
  15.     - uint8 data[size]
  16.    
  17.     SMALLBUF: (min size = 1)
  18.     - uint8 size
  19.     - uint8 data[size]
  20.    
  21.     PART: (min size = 20)
  22.     - uint8 flags
  23.     - uint8 blendmode
  24.     - uint32 voff
  25.     - uint32 vcount
  26.     - uint32 ioff
  27.     - uint32 icount
  28.     - uint8 texcount
  29.     - smallbuf shader
  30.     - smallbuf textures[texcount]
  31.    
  32.     MESH:
  33.     - magic "SS3DMESH"
  34.     - uint32 flags
  35.    
  36.     - float boundsmin[3]
  37.     - float boundsmax[3]
  38.    
  39.     - buffer vdata
  40.     - buffer idata
  41.     - smallbuf format
  42.     - uint8 numparts
  43.     - part parts[numparts]
  44.    
  45.     minimum size = 12+24+10 = 46
  46. """
  47.  
  48.  
  49. bl_info = {
  50.     "name": "SS3DMESH Mesh Format (.ssm)",
  51.     "author": "Arvīds Kokins",
  52.     "version": (0, 5, 2),
  53.     "blender": (2, 6, 9),
  54.     "api": 38019,
  55.     "location": "File > Export > SS3DMESH (.ssm)",
  56.     "description": "SS3DMESH Mesh Export (.ssm)",
  57.     "warning": "",
  58.     "wiki_url": "http://cragegames.com",
  59.     "tracker_url": "http://cragegames.com",
  60.     "category": "Import-Export"
  61. }
  62.  
  63.  
  64. def write_smallbuf( f, bytebuf ):
  65.     if len( bytebuf ) > 255:
  66.         raise Exception( "smallbuf too big" )
  67.     if type( bytebuf ) == str:
  68.         bytebuf = bytes( bytebuf, "UTF-8" )
  69.     f.write( struct.pack( "B", len( bytebuf ) ) )
  70.     f.write( bytebuf )
  71. #
  72.  
  73. def write_buffer( f, bytebuf ):
  74.     if len( bytebuf ) > 0xffffffff:
  75.         raise Exception( "buffer too big" )
  76.     if type( bytebuf ) == str:
  77.         bytebuf = bytes( bytebuf, "UTF-8" )
  78.     f.write( struct.pack( "L", len( bytebuf ) ) )
  79.     f.write( bytebuf )
  80. #
  81.  
  82. def serialize_matrix( m ):
  83.     return struct.pack( "16f",
  84.         m[0][0], m[1][0], m[2][0], m[3][0],
  85.         m[0][1], m[1][1], m[2][1], m[3][1],
  86.         m[0][2], m[1][2], m[2][2], m[3][2],
  87.         m[0][3], m[1][3], m[2][3], m[3][3] )
  88.  
  89. def write_part( f, part ):
  90.     if len( part["textures"] ) > 8:
  91.         raise Exception( "too many textures (max. 8 allowed)" )
  92.     f.write( struct.pack( "=BB", part["flags"], part["blendmode"] ) )
  93.     write_smallbuf( f, part["name"] )
  94.     f.write( serialize_matrix( part[ "mtx" ] ) )
  95.     f.write( struct.pack( "=LLLLB", part["voff"], part["vcount"],
  96.         part["ioff"], part["icount"], len( part["textures"] ) ) )
  97.     write_smallbuf( f, bytes( part["shader"], "UTF-8" ) )
  98.     for tex in part["textures"]:
  99.         write_smallbuf( f, tex.replace("\\", "/") )
  100. #
  101.  
  102. def write_mesh( f, meshdata, armdata, boneorder ):
  103.     is_skinned = armdata != None
  104.     bbmin = meshdata["bbmin"]
  105.     bbmax = meshdata["bbmax"]
  106.     vertices = meshdata["vertices"]
  107.     indices = meshdata["indices"]
  108.     format = meshdata["format"]
  109.     parts = meshdata["parts"]
  110.    
  111.     if len( parts ) > 255:
  112.         raise Exception( "too many parts (max. 255 allowed)" )
  113.    
  114.     is_i32 = len( vertices ) > 65535
  115.    
  116.     print( "--- MESH STATS ---" )
  117.     print( "Vertex count: %d" % ( len(vertices) ) )
  118.     print( "Index count: %d" % ( len(indices) ) )
  119.     print( "Format string: " + format )
  120.     print( "Part count: %d" % ( len(parts) ) )
  121.     for part_id, part in enumerate( parts ):
  122.         print( "- part %d: name=%s voff=%d vcount=%d ioff=%d icount=%d flags=%d blendmode=%d texcount=%d shader='%s'" % (
  123.             part_id, part["name"], part["voff"], part["vcount"], part["ioff"], part["icount"],
  124.             part["flags"], part["blendmode"], len( part["textures"] ), part["shader"] ) )
  125.    
  126.     f.write( bytes( "SS3DMESH", "UTF-8" ) )
  127.     # mesh data flags, 0x100 = extended mtl data, 0x200 = part name data
  128.     f.write( struct.pack( "L", 0x100 + 0x200 +
  129.         (1 if is_i32 else 0) * 0x01 + \
  130.         (1 if is_skinned else 0) * 0x80 ) )
  131.     f.write( struct.pack( "6f", bbmin.x, bbmin.y, bbmin.z, bbmax.x, bbmax.y, bbmax.z ) )
  132.    
  133.     vdata = bytes()
  134.     for vertex in vertices:
  135.         vdata += vertex
  136.     write_buffer( f, vdata )
  137.    
  138.     idata = bytes()
  139.     if( is_i32 ):
  140.         for index in indices:
  141.             idata += struct.pack( "L", index )
  142.     else:
  143.         for index in indices:
  144.             idata += struct.pack( "H", index )
  145.     write_buffer( f, idata )
  146.    
  147.     write_smallbuf( f, format )
  148.     f.write( struct.pack( "B", len(parts) ) )
  149.     for part in parts:
  150.         write_part( f, part )
  151.    
  152.     if is_skinned:
  153.         f.write( struct.pack( "B", len(boneorder) ) )
  154.         for bonename in boneorder:
  155.             bone = armdata.bones[ bonename ]
  156.             print( "Bone found: " + bone.name )
  157.             write_smallbuf( f, bone.name )
  158.             pid = 255
  159.             m = bone.matrix_local
  160.             if bone.parent is not None:
  161.                 m = bone.parent.matrix_local.inverted() * m
  162.                 for bpid, pbone in enumerate(boneorder):
  163.                     if bone.parent.name == pbone:
  164.                         pid = bpid
  165.                         break
  166.             #
  167.             # print(m)
  168.             f.write( struct.pack( "B", pid ) )
  169.             f.write( serialize_matrix( m ) )
  170.         #
  171.     #
  172.    
  173.     return
  174. #
  175.  
  176. def write_anims( f, anims ):
  177.    
  178.     print( "--- ANIMATIONS' STATS ---" )
  179.     print( "Count: %d" % len( anims ) )
  180.    
  181.     f.write( bytes( "SS3DANIM", "UTF-8" ) )
  182.     f.write( struct.pack( "L", len( anims ) ) )
  183.    
  184.     i = 0
  185.     for anim in anims:
  186.         i += 1
  187.         print( "Animation #%d: %s" % ( i, anim["name"] ) )
  188.        
  189.         a_name = anim["name"]
  190.         a_frames = anim["frames"]
  191.         a_tracks = anim["tracks"]
  192.         a_markers = anim["markers"]
  193.         a_speed = anim["speed"]
  194.        
  195.         a_name_bytes = bytes( a_name, "UTF-8" )
  196.         animbuf = struct.pack( "B", len(a_name_bytes) ) + a_name_bytes
  197.         animbuf += struct.pack( "=LfBB", a_frames, a_speed, len( a_tracks ), len( a_markers ) )
  198.        
  199.         for track_name, track_matrices in a_tracks.items():
  200.             track_name_bytes = bytes( track_name, "UTF-8" )
  201.             trackbuf = struct.pack( "B", len(track_name_bytes) ) + track_name_bytes
  202.            
  203.             for fid in range(a_frames):
  204.                 mtx = track_matrices[ fid ]
  205.                
  206.                 # decompose
  207.                 pos = mtx.to_translation()
  208.                 rot = mtx.to_quaternion()
  209.                 scl = mtx.to_scale()
  210.                
  211.                 # write
  212.                 trackbuf += struct.pack( "10f",
  213.                     pos.x, pos.y, pos.z,
  214.                     rot.x, rot.y, rot.z, rot.w,
  215.                     scl.x, scl.y, scl.z
  216.                 )
  217.             #
  218.            
  219.             animbuf += struct.pack( "L", len(trackbuf) ) + trackbuf
  220.         #
  221.        
  222.         for marker in a_markers:
  223.             animbuf += struct.pack( "=16sL", bytes( marker["name"], "UTF-8" ), marker["frame"] )
  224.        
  225.         write_buffer( f, animbuf )
  226.     #
  227. #
  228.  
  229. def serialize_vec3_array( arr ):
  230.     out = bytes()
  231.     for item in arr:
  232.         out += struct.pack( "fff", item.x, item.y, item.z )
  233.     return out
  234.  
  235. def serialize_quat_array( arr ):
  236.     out = bytes()
  237.     for item in arr:
  238.         out += struct.pack( "ffff", item.x, item.y, item.z, item.w )
  239.     return out
  240.  
  241. def wrap_small_string( strdata ):
  242.     data = bytes( strdata, "UTF-8" )
  243.     return struct.pack( "B", len(data) ) + data
  244.  
  245. def wrap_chunk( name, data ):
  246.     return bytes( name, "UTF-8" ) + struct.pack( "L", len( data ) ) + data
  247.  
  248. def reduce_array( arr ):
  249.     if len(arr) > 1:
  250.         diff = False
  251.         for item in arr:
  252.             if item != arr[0]:
  253.                 diff = True
  254.                 break
  255.         if diff == False:
  256.             arr = arr[:1]
  257.     return arr
  258.  
  259. def write_anims_anbd( f, anims ):
  260.    
  261.     print( "--- ANIMATIONS' STATS ---" )
  262.     print( "Count: %d" % len( anims ) )
  263.    
  264.     anbd_chunk = bytes()
  265.    
  266.     i = 0
  267.     for anim in anims:
  268.         i += 1
  269.         print( "Animation #%d: %s" % ( i, anim["name"] ) )
  270.        
  271.         a_name = anim["name"]
  272.         a_frames = anim["frames"]
  273.         a_tracks = anim["tracks"]
  274.         a_markers = anim["markers"]
  275.         a_speed = anim["speed"]
  276.        
  277.         anim_chunk = struct.pack( "fH", a_speed, a_frames )
  278.        
  279.         # parse animation
  280.         float_data = bytes()
  281.         track_data = bytes()
  282.         track_count = 0
  283.        
  284.         for track_name, track_matrices in a_tracks.items():
  285.             track_offset = len( float_data ) // 4
  286.             pos_arr = []
  287.             rot_arr = []
  288.             scl_arr = []
  289.            
  290.             # decompose animation matrices into position/rotation/scale
  291.             for fid in range(a_frames):
  292.                 mtx = track_matrices[ fid ]
  293.                
  294.                 pos = mtx.to_translation()
  295.                 rot = mtx.to_quaternion()
  296.                 scl = mtx.to_scale()
  297.                
  298.                 pos_arr.append( pos )
  299.                 rot_arr.append( rot )
  300.                 scl_arr.append( scl )
  301.             #
  302.            
  303.             # see if each list is constant and reduce it if so
  304.             pos_arr = reduce_array( pos_arr )
  305.             rot_arr = reduce_array( rot_arr )
  306.             scl_arr = reduce_array( scl_arr )
  307.            
  308.             track_data += wrap_small_string( track_name )
  309.             track_data += struct.pack( "LHHHH", track_offset, len(pos_arr), len(rot_arr), len(scl_arr), 0 )
  310.             track_count += 1
  311.            
  312.             float_data += serialize_vec3_array( pos_arr )
  313.             float_data += serialize_quat_array( rot_arr )
  314.             float_data += serialize_vec3_array( scl_arr )
  315.         #
  316.        
  317.         # floats
  318.         anim_chunk += struct.pack( "L", len( float_data ) // 4 ) + float_data
  319.        
  320.         # tracks
  321.         anim_chunk += struct.pack( "L", track_count ) + track_data
  322.        
  323.         # markers
  324.         anim_chunk += struct.pack( "L", len( a_markers ) )
  325.         for marker in a_markers:
  326.             anim_chunk += struct.pack( "16sH", bytes( marker["name"], "UTF-8" ), marker["frame"] )
  327.        
  328.         anbd_chunk += wrap_chunk( "ANIM", anim_chunk ) + wrap_small_string( a_name )
  329.     #
  330.    
  331.     anbd_chunk = wrap_chunk( "SGRXANBD", anbd_chunk )
  332.     f.write( anbd_chunk )
  333. #
  334.  
  335. def addCached( olist, o, minpos = 0 ):
  336.     try:
  337.         return olist[ minpos: ].index( o )
  338.     except:
  339.         olist.append( o )
  340.         return len( olist ) - 1 - minpos
  341. #
  342.  
  343. def find_in_userdata( obj, key, default = None ):
  344.     for prop in obj.items():
  345.         if type( prop[ 1 ] ) in ( int, str, float, bool ):
  346.             if prop[ 0 ] == key:
  347.                 return prop[ 1 ]
  348.     return default
  349. #
  350.  
  351. def parse_materials( geom_node, textures ):
  352.     materials = []
  353.     print( "Parsing materials... ", end="" )
  354.     for mtl in geom_node.data.materials:
  355.         outmtl = { "textures": [], "shader": "default", "flags": 0, "blendmode": 0 }
  356.         for tex in  mtl.texture_slots:
  357.             outmtl["textures"].append( textures[ tex.name ] if tex != None else "" )
  358.         while len(outmtl["textures"]) and outmtl["textures"][-1] == "":
  359.             outmtl["textures"].pop()
  360.         shdr = find_in_userdata( mtl, "shader" )
  361.         if type( shdr ) == str:
  362.             outmtl["shader"] = shdr
  363.         bmode = find_in_userdata( mtl, "blendmode" )
  364.         if type( bmode ) == str:
  365.             if bmode == "none":
  366.                 outmtl["blendmode"] = 0
  367.             if bmode == "basic":
  368.                 outmtl["blendmode"] = 1
  369.             if bmode == "additive":
  370.                 outmtl["blendmode"] = 2
  371.             if bmode == "multiply":
  372.                 outmtl["blendmode"] = 3
  373.         if find_in_userdata( mtl, "unlit", False ):
  374.             outmtl["flags"] |= 1
  375.         if find_in_userdata( mtl, "nocull", False ):
  376.             outmtl["flags"] |= 2
  377.         materials.append( outmtl )
  378.     print( "OK!" )
  379.     return materials
  380.  
  381. def parse_geometry( geom_node, textures, opt_boneorder, props ):
  382.    
  383.     if props.apply_modifiers == "NONE":
  384.         MESH = geom_node.to_mesh( bpy.context.scene, False, "PREVIEW" )
  385.     else:
  386.         preview_settings = []
  387.         if props.apply_modifiers == "SKIPARM":
  388.             for mod in geom_node.modifiers:
  389.                 preview_settings.append( mod.show_viewport )
  390.                 if mod.type == "ARMATURE":
  391.                     mod.show_viewport = False
  392.         #
  393.         MESH = geom_node.to_mesh( bpy.context.scene, True, "PREVIEW" )
  394.         if props.apply_modifiers == "SKIPARM":
  395.             for i, mod in enumerate( geom_node.modifiers ):
  396.                 mod.show_viewport = preview_settings[ i ]
  397.         #
  398.     #
  399.        
  400.     opt_vgroups = geom_node.vertex_groups if len(geom_node.vertex_groups) else None
  401.    
  402.     materials = parse_materials( geom_node, textures )
  403.    
  404.     print( "Generating geometry for %s... " % geom_node.name, end="" )
  405.     MESH.calc_normals_split()
  406.    
  407.     # SORT BY MATERIAL
  408.     MID2FACES = {} # material index -> faces
  409.     FACE2MID = {} # face -> material index
  410.    
  411.     for face in MESH.polygons:
  412.        
  413.         if face.material_index not in MID2FACES:
  414.             MID2FACES[ face.material_index ] = []
  415.         MID2FACES[ face.material_index ].append( face )
  416.    
  417.     # GENERATE COMPACT DATA
  418.     Plist = []
  419.     Nlist = []
  420.     Tlists = [ [] for tl in MESH.uv_layers ]
  421.     Clists = [ [] for cl in MESH.vertex_colors ]
  422.     genParts = [] # array of Part ( array of Face ( array of Vertex ( position index, normal index, texcoord indices, color indices ) ) )
  423.     foundMIDs = []
  424.    
  425.     if len( Tlists ) > 2:
  426.         print( "Too many UV layers" )
  427.         Tlists = Tlists[:1]
  428.     if len( Clists ) > 1:
  429.         print( "Too many color layers" )
  430.         Clists = Clists[:1]
  431.    
  432.     for flist_id in MID2FACES:
  433.         genPart = []
  434.         flist = MID2FACES[ flist_id ]
  435.         m_id = 0
  436.         for face in flist:
  437.             m_id = face.material_index
  438.             if flist_id != m_id:
  439.                 continue
  440.            
  441.             genFace = []
  442.            
  443.             for vid in range( len( face.vertices ) ):
  444.                 v_id = face.vertices[ vid ]
  445.                 l_id = face.loop_start + vid
  446.                 VTX = MESH.vertices[ v_id ]
  447.                
  448.                 pos_id = addCached( Plist, VTX.co )
  449.                 if face.use_smooth != False:
  450.                     nrm_id = addCached( Nlist, MESH.loops[ l_id ].normal )
  451.                 else:
  452.                     nrm_id = addCached( Nlist, face.normal )
  453.                
  454.                 genVertex = [ pos_id, nrm_id ]
  455.                
  456.                 for si in range( len( Tlists ) ):
  457.                     txc_id = addCached( Tlists[ si ], MESH.uv_layers[ si ].data[ l_id ].uv )
  458.                     genVertex.append( txc_id )
  459.                 for si in range( len( Clists ) ):
  460.                     col_id = addCached( Clists[ si ], MESH.vertex_colors[ si ].data[ l_id ].color )
  461.                     genVertex.append( col_id )
  462.                
  463.                 if opt_vgroups != None and opt_boneorder != None:
  464.                     groupweights = []
  465.                     for vg in VTX.groups:
  466.                         for grp in opt_vgroups:
  467.                             if vg.group == grp.index:
  468.                                 groupweights.append([ opt_boneorder.index( grp.name ), vg.weight ])
  469.                                 break
  470.                             #
  471.                         #
  472.                     #
  473.                     # sort by importance
  474.                     groupweights.sort( key=lambda x: x[1], reverse = True )
  475.                     # trim useless
  476.                     for gwoff, gw in enumerate(groupweights):
  477.                         if gw[1] < 1.0/256.0:
  478.                             del groupweights[ gwoff: ]
  479.                             break
  480.                     # check if more than 4, warn / trim if so
  481.                     if len(groupweights) > 4:
  482.                         print( "Too many weights (%d > 4) for vertex %d at %s" % ( len(groupweights), v_id, VTX.co ) )
  483.                         del groupweights[ 4: ]
  484.                     if len(groupweights) == 0:
  485.                         groupweights.append([ 0, 1.0 ])
  486.                     # renormalize
  487.                     wsum = 0.0
  488.                     for gw in groupweights:
  489.                         wsum += gw[1]
  490.                     wsum /= 255.0
  491.                     for gw in groupweights:
  492.                         gw[1] = round( gw[1] / wsum )
  493.                     # finish quantization to bytes
  494.                     wsum = 0.0
  495.                     for gw in groupweights:
  496.                         wsum += gw[1]
  497.                     gwoff = 0
  498.                     while wsum != 255:
  499.                         sgnadd = -1 if wsum > 255 else 1
  500.                         groupweights[ gwoff ][1] += sgnadd
  501.                         wsum += sgnadd
  502.                     # compress
  503.                     while len(groupweights) < 4:
  504.                         groupweights.append([ 0, 0 ])
  505.                     gw_groups = struct.pack( "4B", groupweights[0][0], groupweights[1][0], groupweights[2][0], groupweights[3][0] )
  506.                     gw_weights = struct.pack( "4B", groupweights[0][1], groupweights[1][1], groupweights[2][1], groupweights[3][1] )
  507.                    
  508.                     genVertex.append( gw_groups )
  509.                     genVertex.append( gw_weights )
  510.                 #
  511.                
  512.                 genFace.append( genVertex )
  513.             genPart.append( genFace )
  514.         genParts.append( genPart )
  515.         foundMIDs.append( m_id )
  516.     #
  517.    
  518.     # VALIDATION
  519.     if len( Plist ) <= 0:
  520.         raise Exception( "Mesh has no vertices!" )
  521.     #
  522.    
  523.     # CONVERT TO VERTEX BUFFER FORMAT
  524.     vertices = []
  525.     indices = []
  526.     parts = []
  527.     defmtl = { "textures": [], "shader": "default", "flags": 0, "blendmode": 0 }
  528.    
  529.     mtl_num = -1
  530.     for part in genParts:
  531.         mtl_num += 1
  532.         mtl_id = foundMIDs[ mtl_num ]
  533.         vroot = len(vertices)
  534.         outpart = {
  535.             "name": geom_node.name + "#" + str(mtl_num),
  536.             "mtx": geom_node.matrix_world,
  537.             "voff": len(vertices),
  538.             "vcount": 0,
  539.             "ioff": len(indices),
  540.             "icount": 0,
  541.             "flags": materials[ mtl_id ]["flags"] if mtl_id in materials else defmtl["flags"],
  542.             "blendmode": materials[ mtl_id ]["blendmode"] if mtl_id in materials else defmtl["blendmode"],
  543.             "shader": materials[ mtl_id ]["shader"] if mtl_id in materials else defmtl["shader"],
  544.             "textures": materials[ mtl_id ]["textures"] if mtl_id in materials else defmtl["textures"],
  545.         }
  546.        
  547.         for face in part:
  548.             tmpidcs = []
  549.             for vertex in face:
  550.                 P = Plist[ vertex[0] ]
  551.                 N = Nlist[ vertex[1] ]
  552.                 vertexdata = struct.pack( "3f3f", P.x, P.y, P.z, N.x, N.y, N.z )
  553.                 vip = 2
  554.                 for si in range( len( Tlists ) ):
  555.                     T = Tlists[ si ][ vertex[ vip ] ]
  556.                     vip += 1
  557.                     vertexdata += struct.pack( "2f", T.x, 1 - T.y )
  558.                 for si in range( len( Clists ) ):
  559.                     C = Clists[ si ][ vertex[ vip ] ]
  560.                     vip += 1
  561.                     vertexdata += struct.pack( "4B", int(C.r * 255), int(C.g * 255), int(C.b * 255), 255 )
  562.                 if opt_vgroups != None and opt_boneorder != None:
  563.                     vertexdata += vertex[ vip ] # indices (groups)
  564.                     vip += 1
  565.                     vertexdata += vertex[ vip ] # weights
  566.                     vip += 1
  567.                 tmpidcs.append( addCached( vertices, vertexdata, vroot ) )
  568.             #
  569.             for i in range( 2, len( tmpidcs ) ):
  570.                 indices.append( tmpidcs[ 0 ] )
  571.                 indices.append( tmpidcs[ i ] )
  572.                 indices.append( tmpidcs[ i - 1 ] )
  573.             #
  574.         #
  575.        
  576.         outpart["vcount"] = len(vertices) - outpart["voff"]
  577.         outpart["icount"] = len(indices) - outpart["ioff"]
  578.         parts.append( outpart )
  579.     #
  580.    
  581.     # TANGENT SPACE CALC
  582.     if len( Tlists ) > 0:
  583.         tan1list = [ Vector([0,0,0]) for i in range(len(vertices)) ]
  584.         tan2list = [ Vector([0,0,0]) for i in range(len(vertices)) ]
  585.         hitlist = [ 0 for i in range(len(vertices)) ]
  586.        
  587.         for part in parts:
  588.             voff = part["voff"]
  589.             ioff = part["ioff"]
  590.             for i in range( 0, part["icount"], 3 ):
  591.                 i1 = indices[ ioff + i + 0 ] + voff
  592.                 i2 = indices[ ioff + i + 1 ] + voff
  593.                 i3 = indices[ ioff + i + 2 ] + voff
  594.                
  595.                 Pdc1 = struct.unpack( "3f", vertices[ i1 ][ :12 ] )
  596.                 Pdc2 = struct.unpack( "3f", vertices[ i2 ][ :12 ] )
  597.                 Pdc3 = struct.unpack( "3f", vertices[ i3 ][ :12 ] )
  598.                
  599.                 v1 = Vector([ Pdc1[0], Pdc1[1], Pdc1[2] ])
  600.                 v2 = Vector([ Pdc2[0], Pdc2[1], Pdc2[2] ])
  601.                 v3 = Vector([ Pdc3[0], Pdc3[1], Pdc3[2] ])
  602.                
  603.                 Tdc1 = struct.unpack( "2f", vertices[ i1 ][ 24:32 ] )
  604.                 Tdc2 = struct.unpack( "2f", vertices[ i2 ][ 24:32 ] )
  605.                 Tdc3 = struct.unpack( "2f", vertices[ i3 ][ 24:32 ] )
  606.                
  607.                 w1 = Vector([ Tdc1[0], Tdc1[1], 0 ])
  608.                 w2 = Vector([ Tdc2[0], Tdc2[1], 0 ])
  609.                 w3 = Vector([ Tdc3[0], Tdc3[1], 0 ])
  610.                
  611.                 x1 = v2.x - v1.x;
  612.                 x2 = v3.x - v1.x;
  613.                 y1 = v2.y - v1.y;
  614.                 y2 = v3.y - v1.y;
  615.                 z1 = v2.z - v1.z;
  616.                 z2 = v3.z - v1.z;
  617.                
  618.                 s1 = w2.x - w1.x;
  619.                 s2 = w3.x - w1.x;
  620.                 t1 = w2.y - w1.y;
  621.                 t2 = w3.y - w1.y;
  622.                
  623.                 ir = s1 * t2 - s2 * t1
  624.                 if True: # abs( ir ) > 0.000001:
  625.                     r = abs( ir ) # 1.0 / ir
  626.                     sdir = Vector([(t2 * x1 - t1 * x2) * r, (t2 * y1 - t1 * y2) * r, (t2 * z1 - t1 * z2) * r])
  627.                     tdir = Vector([(s1 * x2 - s2 * x1) * r, (s1 * y2 - s2 * y1) * r, (s1 * z2 - s2 * z1) * r])
  628.                    
  629.                     tan1list[ i1 ] += sdir
  630.                     tan1list[ i2 ] += sdir
  631.                     tan1list[ i3 ] += sdir
  632.                    
  633.                     tan2list[ i1 ] += tdir
  634.                     tan2list[ i2 ] += tdir
  635.                     tan2list[ i3 ] += tdir
  636.                    
  637.                     hitlist[ i1 ] += 1
  638.                     hitlist[ i2 ] += 1
  639.                     hitlist[ i3 ] += 1
  640.                 else:
  641.                     print( "Bad triangle UV map!!! invR: %f | UV0: %f;%f | UV1: %f;%f | UV2: %f;%f" % \
  642.                         (ir, Tdc1[0],Tdc1[1],Tdc2[0],Tdc2[1],Tdc3[0],Tdc3[1]))
  643.                 #
  644.             #
  645.         #
  646.        
  647.         for v_id, vertex in enumerate( vertices ):
  648.             Ndc = struct.unpack( "3f", vertex[ 12:24 ] )
  649.             n = Vector([ Ndc[0], Ndc[1], Ndc[2] ])
  650.             t = tan1list[ v_id ]
  651.             t2 = tan2list[ v_id ]
  652.            
  653.             outtan = ( t - n * n.dot( t ) ).normalized()
  654.             if outtan == Vector([0.0,0.0,0.0]):
  655.                 outtan = Vector([0.0,0.0,1.0])
  656.                 print( "Tangent was detected to be 0,0,0 on vertex %d - changed to 0,0,1" % v_id )
  657.                 Tdc = struct.unpack( "2f", vertex[ 24:32 ] )
  658.                 print( "HC: %d, Nrm: %s, Tx1: %s, Tg2: %s" % ( hitlist[ v_id ], n, Tdc, t2 ) )
  659.             sign = -1.0 if n.cross( t ).dot( t2 ) < 0.0 else 1.0
  660.             vertices[ v_id ] = vertex[ :24 ] + struct.pack( "4f", outtan.x, outtan.y, outtan.z, sign ) + vertex[ 24: ]
  661.         #
  662.     #
  663.    
  664.     # AABB
  665.     bbmin = Plist[0].copy()
  666.     bbmax = Plist[0].copy()
  667.     for pos in Plist:
  668.         if bbmin.x > pos.x:
  669.             bbmin.x = pos.x
  670.         if bbmin.y > pos.y:
  671.             bbmin.y = pos.y
  672.         if bbmin.z > pos.z:
  673.             bbmin.z = pos.z
  674.         if bbmax.x < pos.x:
  675.             bbmax.x = pos.x
  676.         if bbmax.y < pos.y:
  677.             bbmax.y = pos.y
  678.         if bbmax.z < pos.z:
  679.             bbmax.z = pos.z
  680.     #
  681.    
  682.     # FORMAT STRING
  683.     format = "pf3nf3"
  684.     if len( Tlists ) > 0:
  685.         format += "tf4"
  686.     for si in range( len( Tlists ) ):
  687.         format += "%df2" % ( si )
  688.     for si in range( len( Clists ) ): # only one expected
  689.         format += "cb4"
  690.     if opt_vgroups != None and opt_boneorder != None:
  691.         format += "ib4wb4"
  692.     #
  693.    
  694.     print( "OK!" )
  695.    
  696.     if MESH is not geom_node.data:
  697.         bpy.data.meshes.remove( MESH )
  698.    
  699.     return {
  700.         "bbmin": bbmin, "bbmax": bbmax,
  701.         "vertices": vertices, "indices": indices,
  702.         "format": format, "parts": parts,
  703.     }
  704. #
  705.  
  706. def gen_empty_mesh_data():
  707.     return {
  708.         "bbmin": Vector([ sys.float_info.max, sys.float_info.max, sys.float_info.max ]),
  709.         "bbmax": Vector([ -sys.float_info.max, -sys.float_info.max, -sys.float_info.max ]),
  710.         "vertices": [], "indices": [], "format": "pf3nf3", "parts": [],
  711.     }
  712. #
  713.  
  714. def mesh_data_add( meshdata, ndata ):
  715.     # AABB
  716.     if meshdata["bbmin"].x > ndata["bbmin"].x:
  717.         meshdata["bbmin"].x = ndata["bbmin"].x
  718.     if meshdata["bbmin"].y > ndata["bbmin"].y:
  719.         meshdata["bbmin"].y = ndata["bbmin"].y
  720.     if meshdata["bbmin"].z > ndata["bbmin"].z:
  721.         meshdata["bbmin"].z = ndata["bbmin"].z
  722.     if meshdata["bbmax"].x < ndata["bbmax"].x:
  723.         meshdata["bbmax"].x = ndata["bbmax"].x
  724.     if meshdata["bbmax"].y < ndata["bbmax"].y:
  725.         meshdata["bbmax"].y = ndata["bbmax"].y
  726.     if meshdata["bbmax"].z < ndata["bbmax"].z:
  727.         meshdata["bbmax"].z = ndata["bbmax"].z
  728.    
  729.     # parts
  730.     for part in ndata["parts"]:
  731.         part = copy(part)
  732.         part["voff"] += len(meshdata["vertices"])
  733.         part["ioff"] += len(meshdata["indices"])
  734.         meshdata["parts"].append( part )
  735.    
  736.     # format diff
  737.     format_part_order = [ "pf3", "nf3", "tf4", "0f2", "1f2",
  738.         "3f2", "4f2", "cb4", "ib4", "wb4", "DUMMY",
  739.     ]
  740.     # - split into chunks
  741.     oldfmt_A = meshdata["format"]
  742.     oldfmt_B = ndata["format"]
  743.     fmt_A = [oldfmt_A[i:i+3] for i in range(0, len(oldfmt_A), 3)] + [ "DUMMY" ]
  744.     fmt_B = [oldfmt_B[i:i+3] for i in range(0, len(oldfmt_B), 3)] + [ "DUMMY" ]
  745.    
  746.     # - iterate through
  747.     i = 0
  748.     while i < len(fmt_A) or i < len(fmt_B):
  749.         if fmt_A[ i ] == fmt_B[ i ]:
  750.             i += 1
  751.             continue
  752.         # - formats not equal, need to pad one side
  753.         # - pad the side whose index is higher
  754.         if format_part_order.index( fmt_A[ i ] ) > format_part_order.index( fmt_B[ i ] ):
  755.             fmt_tgt = fmt_A
  756.             fmt_src = fmt_B
  757.             data_tgt = meshdata["vertices"]
  758.         else:
  759.             fmt_src = fmt_A
  760.             fmt_tgt = fmt_B
  761.             data_tgt = ndata["vertices"]
  762.         # - pad the format
  763.         curfmt = fmt_src[ i ]
  764.         fmt_tgt.insert( i, curfmt )
  765.         # - calculate padding offset
  766.         pad_offset = 0
  767.         for f in islice( fmt_src, i ):
  768.             nominal = 4 if f[1] == "f" else 1
  769.             multiplier = int(f[2])
  770.             pad_offset += nominal * multiplier
  771.         # - calculate padding data
  772.         if curfmt == "pf3" or curfmt == "nf3":
  773.             raise Exception( "UNEXPECTED PADDING FORMAT" )
  774.         pad_core = struct.pack( "f", 0.0 ) if curfmt[1] == "f" else struct.pack( "B", 0 )
  775.         pad_mult = int(curfmt[2])
  776.         pad_data = pad_core * pad_mult
  777.         # print( "pad with %d bytes of data by format %s" % ( len(pad_data), curfmt ) )
  778.         # - perform padding on each vertex
  779.         for v in range( len( data_tgt ) ):
  780.             data_tgt[ v ] = data_tgt[ v ][ : pad_offset ] + pad_data + data_tgt[ v ][ pad_offset : ]
  781.         # - move on because formats have been made equal here
  782.         i += 1
  783.     #
  784.    
  785.     # - set new format
  786.     meshdata["format"] = "".join( fmt_A[:-1] )
  787.    
  788.     # validate vertex size
  789.     if len(meshdata["vertices"]) > 0 and len(ndata["vertices"]) > 0:
  790.         if len(meshdata["vertices"][0]) != len(ndata["vertices"][0]):
  791.             raise Exception(
  792.                 "Vertex sizes not equal: old=%d new=%d fmtA=%s fmtB=%s newfmtA=%s newfmtB=%s" % (
  793.                     len(meshdata["vertices"][0]),
  794.                     len(ndata["vertices"][0]),
  795.                     oldfmt_A,
  796.                     oldfmt_B,
  797.                     "".join( fmt_A[:-1] ),
  798.                     "".join( fmt_B[:-1] ),
  799.                 )
  800.             )
  801.     #
  802.    
  803.     # combine vertex/index data
  804.     meshdata["vertices"] += ndata["vertices"]
  805.     meshdata["indices"] += ndata["indices"]
  806. #
  807.  
  808. def parse_armature( node ):
  809.     for mod in node.modifiers:
  810.         if mod.type == "ARMATURE":
  811.             return mod.object
  812.     return None
  813. #
  814.  
  815. def generate_bone_order( armdata ):
  816.     if armdata == None:
  817.         return None
  818.     bonelist = []
  819.     bonequeue = []
  820.     for bone in armdata.bones:
  821.         if bone.parent == None:
  822.             bonequeue.append( bone )
  823.     #
  824.     while len(bonequeue) != 0:
  825.         bone = bonequeue.pop(0)
  826.         bonelist.append( bone.name )
  827.         for bone in bone.children:
  828.             bonequeue.append( bone )
  829.     #
  830.     return bonelist
  831. #
  832.  
  833. def parse_animations( armobj, boneorder, filepath ):
  834.     animations = []
  835.     if armobj is not None and armobj.animation_data is not None:
  836.         print( "Generating animations... " )
  837.         oldact = armobj.animation_data.action
  838.         for action in bpy.data.actions:
  839.             if action.use_fake_user is False:
  840.                 continue # do not export animations that are not pinned (likely to be deleted)
  841.             armobj.animation_data.action = action
  842.             anim_tracks = {}
  843.             for bonename in boneorder:
  844.                 anim_tracks[ bonename ] = []
  845.             frame_begin, frame_end = [ int(x) for x in action.frame_range ]
  846.             for frame in range( frame_begin, frame_end + 1 ):
  847.                 bpy.context.scene.frame_set( frame )
  848.                 for bonename in boneorder:
  849.                     bone = armobj.pose.bones[ bonename ]
  850.                     track = anim_tracks[ bonename ]
  851.                    
  852.                     # input: object space pose/anim matrices (CP, CA, PP, PA)
  853.                     # output: pose space anim matrix (out)
  854.                     # out = inv( inv(PP) * CP ) * ( inv(PA) * CA )
  855.                     mtx_anim = bone.matrix
  856.                     mtx_base = bone.bone.matrix_local
  857.                     if bone.parent is not None:
  858.                         mtx_anim = bone.parent.matrix.inverted() * mtx_anim
  859.                         mtx_base = bone.parent.bone.matrix_local.inverted() * mtx_base
  860.                     mtx = mtx_base.inverted() * mtx_anim
  861.                    
  862.                     track.append( mtx )
  863.                 #
  864.             #
  865.             anim_markers = []
  866.             for pmrk in action.pose_markers:
  867.                 anim_markers.append({ "name": pmrk.name, "frame": pmrk.frame })
  868.             #
  869.             animspeed = bpy.context.scene.render.fps / bpy.context.scene.render.fps_base
  870.             animlistname = os.path.dirname( filepath ) + "/" + action.name + ".animlist.csv"
  871.             print( "Looking for animation descriptor - " + animlistname )
  872.             if os.path.isfile( animlistname ):
  873.                 print( "Found it, decoding action..." )
  874.                 try:
  875.                     animlist = csv.reader( open( animlistname, "r" ), delimiter = ",", quotechar = '"' )
  876.                     for anim in animlist:
  877.                         tstart = int(anim[0], 10)
  878.                         tend = int(anim[1], 10)
  879.                         tname = anim[2].strip()
  880.                         anim_sliced_tracks = {}
  881.                         for track_name, track_matrices in anim_tracks.items():
  882.                             anim_sliced_tracks[ track_name ] = track_matrices[ tstart : tend ]
  883.                         animations.append({ "name": tname, "frames": tend - tstart, "tracks": anim_sliced_tracks, "speed": animspeed })
  884.                     #
  885.                 except IOError as e:
  886.                     print( "I/O error({0}): {1}".format(e.errno, e.strerror) )
  887.                 except ValueError:
  888.                     print( "Could not convert data to an integer." )
  889.                 #
  890.             else:
  891.                 print( "Did not find it, will append the whole action." )
  892.                 animations.append({ "name": action.name, "frames": frame_end - frame_begin + 1, "tracks": anim_tracks, "markers" : anim_markers, "speed": animspeed })
  893.             #
  894.         #
  895.         armobj.animation_data.action = oldact
  896.         print( "\tOK!" )
  897.     #
  898.     return animations
  899. #
  900.  
  901. def parse_textures():
  902.     textures = {}
  903.     print( "Parsing textures... ", end="" )
  904.     for tex in bpy.data.textures:
  905.         texpath = ""
  906.         if hasattr( tex, "image" ) and tex.image != None:
  907.             texpath = tex.image.filepath[ 2: ]
  908.         textures[ tex.name ] = texpath
  909.     print( "OK!" )
  910.     return textures
  911.  
  912. def write_ss3dmesh( ctx, props ):
  913.     filepath = props.filepath
  914.     print( "\n\\\\\n>>> SS3DMESH Exporter v0.5!\n//\n\n" )
  915.     print( "Exporting..." )
  916.    
  917.     textures = parse_textures()
  918.    
  919.     meshdata = gen_empty_mesh_data()
  920.     armobj = None
  921.     armdata = None
  922.     boneorder = []
  923.     for node in ctx.scene.objects:
  924.         if node.type != "MESH":
  925.             continue
  926.        
  927.         if props.export_selected and not node.select:
  928.             continue
  929.        
  930.         cur_armobj = parse_armature( node )
  931.         # do not allow multiple armatures
  932.         if armobj is not None and cur_armobj is not None and armobj.name != cur_armobj.name:
  933.             props.report( {"ERROR"},
  934.                 "multiple armatures are not supported (curr=%s, new=%s)" % (
  935.                     armobj.name, cur_armobj.name ) )
  936.             return {'CANCELLED'}
  937.         # do this only for the first time
  938.         if armobj is None and cur_armobj is not None:
  939.             armobj = cur_armobj
  940.             armdata = None if armobj is None else armobj.data
  941.             boneorder = generate_bone_order( armdata )
  942.         cur_meshdata = parse_geometry( node, textures, boneorder, props )
  943.         mesh_data_add( meshdata, cur_meshdata )
  944.    
  945.     if props.export_anim:
  946.         print( "Parsing animations..." )
  947.         animations = parse_animations( armobj, boneorder, filepath )
  948.         print( "OK!" )
  949.    
  950.     print( "Writing mesh... " )
  951.     with open( filepath, 'wb' ) as f:
  952.         write_mesh( f, meshdata, armdata, boneorder )
  953.    
  954.     if props.export_anim:
  955.         if len(animations) == 0:
  956.             props.report( {"WARNING"}, "No animations found!" )
  957.         else:
  958.             print( "Writing animations... " )
  959.             with open( filepath + ".anm", 'wb' ) as f:
  960.                 write_anims( f, animations )
  961.             with open( filepath + ".anb", 'wb' ) as f:
  962.                 write_anims_anbd( f, animations )
  963.     #
  964.    
  965.     print( "\n\\\\\n>>> Done!\n//\n\n" )
  966.  
  967.     return {'CANCELLED'}
  968. #
  969.  
  970. def write_sgrxanbd( ctx, props ):
  971.     print( "\n\\\\\n>>> SGRXANBD Exporter v0.5!\n//\n\n" )
  972.     print( "Exporting..." )
  973.    
  974.     filepath = props.filepath
  975.    
  976.     print( "Parsing nodes... ", end="" )
  977.     armobj = None
  978.     geom_node = bpy.context.active_object
  979.     if bpy.context.active_object.type == "ARMATURE":
  980.         armobj = bpy.context.active_object
  981.     for node in ctx.scene.objects:
  982.         if node.type == "ARMATURE":
  983.             armobj = node
  984.             break
  985.     #
  986.     if armobj is None:
  987.         props.report( {"ERROR"}, "No armature found!" )
  988.         return {'CANCELLED'}
  989.     else:
  990.         armdata = armobj.data
  991.     print( "OK!" )
  992.    
  993.     boneorder = generate_bone_order( armdata )
  994.    
  995.     print( "Parsing animations..." )
  996.     animations = parse_animations( armobj, boneorder, filepath )
  997.     print( "OK!" )
  998.    
  999.     if len(animations) == 0:
  1000.         props.report( {"WARNING"}, "No animations found!" )
  1001.     else:
  1002.         print( "Writing animations... " )
  1003.         with open( filepath, 'wb' ) as f:
  1004.             write_anims_anbd( f, animations )
  1005.     #
  1006.    
  1007.     print( "\n\\\\\n>>> Done!\n//\n\n" )
  1008.  
  1009.     return {'CANCELLED'}
  1010.  
  1011. # ExportHelper is a helper class, defines filename and
  1012. # invoke() function which calls the file selector.
  1013. from bpy_extras.io_utils import ExportHelper
  1014. from bpy.props import StringProperty, BoolProperty, EnumProperty
  1015.  
  1016.  
  1017. apply_mod_ui_items = [
  1018.     ( "NONE", "None", "Don't apply any modifiers" ),
  1019.     ( "SKIPARM", "All except armatures", "Apply non-armature modifiers" ),
  1020.     ( "ALL", "All", "Apply all modifiers" ),
  1021. ]
  1022.  
  1023.  
  1024. class ExportSS3DMESH( bpy.types.Operator, ExportHelper ):
  1025.     '''SS3DMESH Exporter'''
  1026.     bl_idname = "export.ss3dmesh"
  1027.     bl_label = "[SGRX] Export .ssm"
  1028.     bl_options = {'REGISTER', 'UNDO'}
  1029.    
  1030.     # ExportHelper mixin class uses this
  1031.     filename_ext = ".ssm"
  1032.    
  1033.     filter_glob = StringProperty(
  1034.         default = "*.ssm",
  1035.         options = {'HIDDEN'},
  1036.     )
  1037.     export_anim = BoolProperty(name="Export animation", default=False)
  1038.     export_selected = BoolProperty(name="Export selected mesh only", default=True)
  1039.     apply_modifiers = EnumProperty(items=apply_mod_ui_items,
  1040.         name="Apply modifiers", default="SKIPARM")
  1041.    
  1042.     def execute( self, ctx ):
  1043.         return write_ss3dmesh( ctx, self )
  1044.  
  1045. class ExportSGRXANBD( bpy.types.Operator, ExportHelper ):
  1046.     '''SGRXANBD (anim. bundle) Exporter'''
  1047.     bl_idname = "export.sgrxanbd"
  1048.     bl_label = "[SGRX] Export .anb"
  1049.     bl_options = {'REGISTER', 'UNDO'}
  1050.    
  1051.     filename_ext = ".anb"
  1052.    
  1053.     filter_glob = StringProperty(
  1054.         default = "*.anb",
  1055.         options = {'HIDDEN'},
  1056.     )
  1057.    
  1058.     def execute( self, ctx ):
  1059.         return write_sgrxanbd( ctx, self )
  1060.  
  1061.  
  1062. # Only needed if you want to add into a dynamic menu
  1063. def menu_func_export( self, ctx ):
  1064.     self.layout.operator( ExportSS3DMESH.bl_idname, text="SS3DMESH Exporter" )
  1065.     self.layout.operator( ExportSGRXANBD.bl_idname, text="SGRXANBD (anim. bundle) Exporter" )
  1066.  
  1067.  
  1068.  
  1069. def makeMaterial(name, uvlayer):
  1070.     tex = bpy.data.textures.new(name, type = 'IMAGE')
  1071.    
  1072.     mat = bpy.data.materials.new(name)
  1073.     mat.diffuse_color = (1,1,1)
  1074.     mat.diffuse_shader = 'LAMBERT'
  1075.     mat.diffuse_intensity = 1.0
  1076.     mat.specular_color = (1,1,1)
  1077.     mat.specular_shader = 'COOKTORR'
  1078.     mat.specular_intensity = 0.2
  1079.     mat.alpha = 1
  1080.     mat.ambient = 1
  1081.    
  1082.     mtex = mat.texture_slots.add()
  1083.     mtex.texture = tex
  1084.     mtex.texture_coords = 'UV'
  1085.     mtex.use_map_color_diffuse = True
  1086.     mtex.mapping = 'FLAT'
  1087.     mtex.uv_layer = uvlayer
  1088.    
  1089.     return mat
  1090.  
  1091. class DialogOperator(bpy.types.Operator):
  1092.     bl_idname = "object.generate_material"
  1093.     bl_label = "Generate Material"
  1094.    
  1095.     prop_mtlname = StringProperty(name="Material name", default="newmtl")
  1096.    
  1097.     def execute(self, context):
  1098.         active = bpy.context.active_object.data
  1099.         if hasattr(active, "materials"):
  1100.             mtlname = self.prop_mtlname
  1101.             active.materials.append( makeMaterial( mtlname, active.uv_layers.keys()[0] ) )
  1102.             self.report({'INFO'}, "Material created: %s!" % mtlname)
  1103.         else:
  1104.             self.report({'INFO'}, "Object cannot have materials!")
  1105.         #
  1106.         return {'FINISHED'}
  1107.    
  1108.     def invoke(self, context, event):
  1109.         return context.window_manager.invoke_props_dialog(self)
  1110.  
  1111.  
  1112.  
  1113. class DialogPanel(bpy.types.Panel):
  1114.     bl_label = "Texturing Tools"
  1115.     bl_space_type = "VIEW_3D"
  1116.     bl_region_type = "UI"
  1117.    
  1118.     def draw(self, context):
  1119.         self.layout.operator("object.generate_material")
  1120.  
  1121.  
  1122. def register():
  1123.     bpy.utils.register_class( ExportSS3DMESH )
  1124.     bpy.utils.register_class( ExportSGRXANBD )
  1125.     bpy.utils.register_class( DialogOperator )
  1126.     bpy.utils.register_class( DialogPanel )
  1127.     bpy.types.INFO_MT_file_export.append( menu_func_export )
  1128.     # bpy.types.VIEW3D_PT_tools_object.append( generate_material_func )
  1129.  
  1130.  
  1131. def unregister():
  1132.     bpy.utils.unregister_class( ExportSS3DMESH )
  1133.     bpy.utils.unregister_class( ExportSGRXANBD )
  1134.     bpy.utils.unregister_class( DialogOperator )
  1135.     bpy.utils.unregister_class( DialogPanel )
  1136.     bpy.types.INFO_MT_file_export.remove( menu_func_export )
  1137.     # bpy.types.VIEW3D_PT_tools_object.remove( generate_material_func )
  1138.  
  1139.  
  1140. if __name__ == "__main__":
  1141.     register()
  1142. #
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement