[Bf-blender-cvs] SVN commit: /data/svn/bf-blender [21167] branches/soc-2009-kazanbas: * OBJ exporter almost converted
Arystanbek Dyussenov
arystan.d at gmail.com
Fri Jun 26 14:33:07 CEST 2009
Revision: 21167
http://projects.blender.org/plugins/scmsvn/viewcvs.php?view=rev&root=bf-blender&revision=21167
Author: kazanbas
Date: 2009-06-26 14:33:07 +0200 (Fri, 26 Jun 2009)
Log Message:
-----------
* OBJ exporter almost converted
* added MeshEdge.loose property
* changed mask used in Object.create_*_mesh to CD_MASK_MESH.
Modified Paths:
--------------
branches/soc-2009-kazanbas/release/scripts/export_obj-2.5.py
branches/soc-2009-kazanbas/source/blender/makesrna/intern/rna_mesh.c
branches/soc-2009-kazanbas/source/blender/makesrna/intern/rna_object_api.c
Modified: branches/soc-2009-kazanbas/release/scripts/export_obj-2.5.py
===================================================================
--- branches/soc-2009-kazanbas/release/scripts/export_obj-2.5.py 2009-06-26 11:25:19 UTC (rev 21166)
+++ branches/soc-2009-kazanbas/release/scripts/export_obj-2.5.py 2009-06-26 12:33:07 UTC (rev 21167)
@@ -47,7 +47,7 @@
import bpy
-import BPySys
+# import BPySys
# import Blender
# from Blender import Mesh, Scene, Window, sys, Image, Draw
@@ -75,16 +75,20 @@
# (material.name, image.name):matname_imagename # matname_imagename has gaps removed.
MTL_DICT = {}
-def write_mtl(filename):
+def write_mtl(scene, filename):
+
+ world = bpy.data.worlds[0]
+ worldAmb = world.ambient_color
+
+# world = Blender.World.GetCurrent()
+# if world:
+# worldAmb = world.getAmb()
+# else:
+# worldAmb = (0,0,0) # Default value
- world = Blender.World.GetCurrent()
- if world:
- worldAmb = world.getAmb()
- else:
- worldAmb = (0,0,0) # Default value
-
file = open(filename, "w")
- file.write('# Blender3D MTL File: %s\n' % Blender.Get('filename').split('\\')[-1].split('/')[-1])
+ # XXX
+# file.write('# Blender3D MTL File: %s\n' % Blender.Get('filename').split('\\')[-1].split('/')[-1])
file.write('# Material Count: %i\n' % len(MTL_DICT))
# Write material/image combinations we have used.
for key, (mtl_mat_name, mat, img) in MTL_DICT.iteritems():
@@ -261,7 +265,7 @@
return tot_verts
-def write(filename, objects,\
+def write(filename, objects, scene, \
EXPORT_TRI=False, EXPORT_EDGES=False, EXPORT_NORMALS=False, EXPORT_NORMALS_HQ=False,\
EXPORT_UV=True, EXPORT_MTL=True, EXPORT_COPY_IMAGES=False,\
EXPORT_APPLY_MODIFIERS=True, EXPORT_ROTX90=True, EXPORT_BLEN_OBS=True,\
@@ -290,8 +294,10 @@
of vertices is the face's group
"""
weightDict = {}
- for vert in face:
- vWeights = vWeightMap[vert.index]
+ for vert_index in face.verts:
+# for vert in face:
+ vWeights = vWeightMap[vert_index]
+# vWeights = vWeightMap[vert]
for vGroupName, weight in vWeights:
weightDict[vGroupName] = weightDict.get(vGroupName, 0) + weight
@@ -302,7 +308,18 @@
else:
return '(null)'
+ # TODO: implement this in C? dunno how it should be called...
+ def getVertsFromGroup(me, group_index):
+ ret = []
+ for i, v in enumerate(me.verts):
+ for g in v.groups:
+ if g.group == group.index:
+ ret.append((i, g.weight))
+
+ return ret
+
+
print 'OBJ Export path: "%s"' % filename
temp_mesh_name = '~tmp-mesh'
@@ -349,6 +366,7 @@
globalNormals = {}
+ # Get all meshes
for ob_main in objects:
if ob_main.dupli_type != 'NONE':
@@ -375,22 +393,25 @@
# continue
# end nurbs
-
+
+ if ob.type != 'MESH':
+ continue
+
+ # XXX EXPORT_APPLY_MODIFIERS is not used (always true)
+ # we also need influences to be copied... for EXPORT_POLYGROUPS to work
+ # which create_preview_mesh presumably does (CD_MASK_MDEFORMVERT flag)
+ me = ob.create_preview_mesh()
+
# # Will work for non meshes now! :)
# me= BPyMesh.getMeshFromObject(ob, containerMesh, EXPORT_APPLY_MODIFIERS, EXPORT_POLYGROUPS, scn)
# if not me:
# continue
- if ob.type != 'MESH':
- continue
-
if EXPORT_UV:
faceuv = len(me.uv_layers) > 0
else:
faceuv = False
- me = ob.create_render_mesh()
-
# We have a valid mesh
if EXPORT_TRI and me.faces:
# Add a dummy object to it.
@@ -598,19 +619,21 @@
# XXX
if EXPORT_POLYGROUPS:
# Retrieve the list of vertex groups
- vertGroupNames = [g.name for g in ob.vertex_groups]
# vertGroupNames = me.getVertGroupNames()
currentVGroup = ''
# Create a dictionary keyed by face id and listing, for each vertex, the vertex groups it belongs to
vgroupsMap = [[] for _i in range(len(me.verts))]
# vgroupsMap = [[] for _i in xrange(len(me.verts))]
- for vertexGroupName in vertGroupNames:
- for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
- vgroupsMap[vIdx].append((vertexGroupName, vWeight))
+ for g in ob.vertex_groups:
+# for vertexGroupName in vertGroupNames:
+ for vIdx, vWeight in getVertsFromGroup(me, g.index)
+# for vIdx, vWeight in me.getVertsFromGroup(vertexGroupName, 1):
+ vgroupsMap[vIdx].append((g.name, vWeight))
for f_index, f in enumerate(faces):
- f_v= f.v
+ f_v = [{"index": index, "vertex": me.verts[index]} for index in f.verts]
+# f_v= f.v
f_smooth= f.smooth
f_mat = min(f.material_index, len(materialNames)-1)
# f_mat = min(f.mat, len(materialNames)-1)
@@ -632,6 +655,14 @@
key = materialNames[f_mat], None # No image, use None instead.
# XXX
+ # Write the vertex group
+ if EXPORT_POLYGROUPS:
+ if len(ob.vertex_groups):
+ # find what vertext group the face belongs to
+ theVGroup = findVertexGroupName(f,vgroupsMap)
+ if theVGroup != currentVGroup:
+ currentVGroup = theVGroup
+ file.write('g %s\n' % theVGroup)
# # Write the vertex group
# if EXPORT_POLYGROUPS:
# if vertGroupNames:
@@ -648,7 +679,9 @@
if key[0] == None and key[1] == None:
# Write a null material, since we know the context has changed.
if EXPORT_GROUP_BY_MAT:
- file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.getData(1))) ) # can be mat_image or (null)
+ # can be mat_image or (null)
+ file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.data.name)) )
+# file.write('g %s_%s\n' % (fixName(ob.name), fixName(ob.getData(1))) ) # can be mat_image or (null)
file.write('usemtl (null)\n') # mat, image
else:
@@ -667,7 +700,8 @@
mat_data = MTL_DICT[key] = ('%s_%s' % (fixName(key[0]), fixName(key[1]))), materialItems[f_mat], f_image
if EXPORT_GROUP_BY_MAT:
- file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.getData(1)), mat_data[0]) ) # can be mat_image or (null)
+ file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.data.name), mat_data[0]) ) # can be mat_image or (null)
+# file.write('g %s_%s_%s\n' % (fixName(ob.name), fixName(ob.getData(1)), mat_data[0]) ) # can be mat_image or (null)
file.write('usemtl %s\n' % mat_data[0]) # can be mat_image or (null)
@@ -685,24 +719,36 @@
if EXPORT_NORMALS:
if f_smooth: # Smoothed, use vertex normals
for vi, v in enumerate(f_v):
- file.write( ' %d/%d/%d' % (\
- v.index+totverts,\
- totuvco + uv_face_mapping[f_index][vi],\
- globalNormals[ veckey3d(v.no) ])) # vert, uv, normal
+ file.write( ' %d/%d/%d' % \
+ (v["index"] + totverts,
+ totuvco + uv_face_mapping[f_index][vi],
+ globalNormals[ veckey3d(v["vertex"].normal) ]) ) # vert, uv, normal
+# file.write( ' %d/%d/%d' % (\
+# v.index+totverts,\
+# totuvco + uv_face_mapping[f_index][vi],\
+# globalNormals[ veckey3d(v.no) ])) # vert, uv, normal
else: # No smoothing, face normals
- no = globalNormals[ veckey3d(f.no) ]
+ no = globalNormals[ veckey3d(f.normal) ]
+# no = globalNormals[ veckey3d(f.no) ]
for vi, v in enumerate(f_v):
- file.write( ' %d/%d/%d' % (\
- v.index+totverts,\
- totuvco + uv_face_mapping[f_index][vi],\
- no)) # vert, uv, normal
+ file.write( ' %d/%d/%d' % \
+ (v["index"] + totverts,
+ totuvco + uv_face_mapping[f_index][vi],
+ no) ) # vert, uv, normal
+# file.write( ' %d/%d/%d' % (\
+# v.index+totverts,\
+# totuvco + uv_face_mapping[f_index][vi],\
+# no)) # vert, uv, normal
else: # No Normals
for vi, v in enumerate(f_v):
file.write( ' %d/%d' % (\
- v.index+totverts,\
+ v["index"] + totverts,\
totuvco + uv_face_mapping[f_index][vi])) # vert, uv
+# file.write( ' %d/%d' % (\
+# v.index+totverts,\
+# totuvco + uv_face_mapping[f_index][vi])) # vert, uv
face_vert_index += len(f_v)
@@ -710,344 +756,56 @@
if EXPORT_NORMALS:
if f_smooth: # Smoothed, use vertex normals
for v in f_v:
- file.write( ' %d//%d' % (\
- v.index+totverts,\
- globalNormals[ veckey3d(v.no) ]))
+ file.write( ' %d//%d' %
+ (v["index"] + totverts, globalNormals[ veckey3d(v["vertex"].normal) ]) )
+
+# file.write( ' %d//%d' % (\
+# v.index+totverts,\
+# globalNormals[ veckey3d(v.no) ]))
else: # No smoothing, face normals
- no = globalNormals[ veckey3d(f.no) ]
+ no = globalNormals[ veckey3d(f.normal) ]
+# no = globalNormals[ veckey3d(f.no) ]
for v in f_v:
- file.write( ' %d//%d' % (\
- v.index+totverts,\
- no))
+ file.write( ' %d//%d' % (v["index"] + totverts, no) )
+# file.write( ' %d//%d' % (\
+# v.index+totverts,\
+# no))
else: # No Normals
for v in f_v:
- file.write( ' %d' % (\
- v.index+totverts))
+ file.write( ' %d' % (v["index"] + totverts) )
+# file.write( ' %d' % (\
+# v.index+totverts))
file.write('\n')
# Write edges.
if EXPORT_EDGES:
- LOOSE= Mesh.EdgeFlags.LOOSE
for ed in edges:
- if ed.flag & LOOSE:
- file.write('f %d %d\n' % (ed.v1.index+totverts, ed.v2.index+totverts))
+ if ed.loose:
+ file.write('f %d %d\n' % (ed.verts[0] + totverts, ed.verts[1] + totverts))
+# LOOSE= Mesh.EdgeFlags.LOOSE
+# for ed in edges:
+# if ed.flag & LOOSE:
+# file.write('f %d %d\n' % (ed.v1.index+totverts, ed.v2.index+totverts))
# Make the indicies global rather then per mesh
totverts += len(me.verts)
if faceuv:
totuvco += uv_unique_count
- me.verts= None
+ # clean up
+ bpy.data.remove_mesh(me)
+# me.verts= None
+
if ob_main.dupli_type != 'NONE':
ob_main.free_dupli_list()
-
- # Get all meshes
- for ob_main in objects:
- for ob, ob_mat in BPyObject.getDerivedObjects(ob_main):
-
- # Nurbs curve support
- if EXPORT_CURVE_AS_NURBS and test_nurbs_compat(ob):
- if EXPORT_ROTX90:
- ob_mat = ob_mat * mat_xrot90
-
- totverts += write_nurb(file, ob, ob_mat)
-
- continue
- # end nurbs
-
- # Will work for non meshes now! :)
- # getMeshFromObject(ob, container_mesh=None, apply_modifiers=True, vgroups=True, scn=None)
@@ Diff output truncated at 10240 characters. @@
More information about the Bf-blender-cvs
mailing list