[Bf-extensions-cvs] [4b7f9c6] master: Fix T34801: bridge tool, internal faces deletion Internal faces are now correctly deleted and smooth shading is applied.

Bart Crouch noreply at git.blender.org
Wed Dec 18 19:54:57 CET 2013


Commit: 4b7f9c6819318b772767dad19621bbbaf5827177
Author: Bart Crouch
Date:   Wed Dec 18 19:54:45 2013 +0100
http://developer.blender.org/rBA4b7f9c6819318b772767dad19621bbbaf5827177

Fix T34801: bridge tool, internal faces deletion
Internal faces are now correctly deleted and smooth shading is applied.

Fix: matrix operations
Based on report in anonymous forum.
Credits to: http://toro.2ch.net/test/read.cgi/cg/1361851855/570-571n

Fix: GUI layout, aligned buttons

New functionality: GStretch converts grease pencil strokes to vertices.

First commit via git, please don't shoot me if I did something wrong.

===================================================================

M	mesh_looptools.py

===================================================================

diff --git a/mesh_looptools.py b/mesh_looptools.py
index 20c9b67..3294c0c 100644
--- a/mesh_looptools.py
+++ b/mesh_looptools.py
@@ -19,8 +19,8 @@
 bl_info = {
     "name": "LoopTools",
     "author": "Bart Crouch",
-    "version": (4, 2, 0),
-    "blender": (2, 63, 0),
+    "version": (4, 5, 0),
+    "blender": (2, 69, 3),
     "location": "View3D > Toolbar and View3D > Specials (W-key)",
     "warning": "",
     "description": "Mesh modelling toolkit. Several tools to aid modelling",
@@ -237,13 +237,19 @@ def calculate_plane(bm_mod, loop, method="best_fit", object=False):
         # calculating the normal to the plane
         normal = False
         try:
-            mat.invert()
+            mat = matrix_invert(mat)
         except:
-            if sum(mat[0]) == 0.0:
+            ax = 2
+            if math.fabs(sum(mat[0])) < math.fabs(sum(mat[1])):
+                if math.fabs(sum(mat[0])) < math.fabs(sum(mat[2])):
+                    ax = 0
+            elif math.fabs(sum(mat[1])) < math.fabs(sum(mat[2])):
+                ax = 1
+            if ax == 0:
                 normal = mathutils.Vector((1.0, 0.0, 0.0))
-            elif sum(mat[1]) == 0.0:
+            elif ax == 1:
                 normal = mathutils.Vector((0.0, 1.0, 0.0))
-            elif sum(mat[2]) == 0.0:
+            else:
                 normal = mathutils.Vector((0.0, 0.0, 1.0))
         if not normal:
             # warning! this is different from .normalize()
@@ -569,6 +575,28 @@ def get_mapping(derived, bm, bm_mod, single_vertices, full_search, loops):
     return(mapping)
 
 
+# calculate the determinant of a matrix
+def matrix_determinant(m):
+    determinant = m[0][0] * m[1][1] * m[2][2] + m[0][1] * m[1][2] * m[2][0] \
+        + m[0][2] * m[1][0] * m[2][1] - m[0][2] * m[1][1] * m[2][0] \
+        - m[0][1] * m[1][0] * m[2][2] - m[0][0] * m[1][2] * m[2][1]
+
+    return(determinant)
+
+
+# custom matrix inversion, to provide higher precision than the built-in one
+def matrix_invert(m):
+    r = mathutils.Matrix((
+        (m[1][1]*m[2][2] - m[1][2]*m[2][1], m[0][2]*m[2][1] - m[0][1]*m[2][2],
+        m[0][1]*m[1][2] - m[0][2]*m[1][1]),
+        (m[1][2]*m[2][0] - m[1][0]*m[2][2], m[0][0]*m[2][2] - m[0][2]*m[2][0],
+        m[0][2]*m[1][0] - m[0][0]*m[1][2]),
+        (m[1][0]*m[2][1] - m[1][1]*m[2][0], m[0][1]*m[2][0] - m[0][0]*m[2][1],
+        m[0][0]*m[1][1] - m[0][1]*m[1][0])))
+    
+    return (r * (1 / matrix_determinant(m)))
+
+
 # returns a list of all loops parallel to the input, input included
 def get_parallel_loops(bm_mod, loops):
     # get required dictionaries
@@ -751,14 +779,12 @@ def settings_write(self):
 
 # clean up and set settings back to original state
 def terminate(global_undo):
-    context = bpy.context
-
     # update editmesh cached data
-    obj = context.active_object
+    obj = bpy.context.active_object
     if obj.mode == 'EDIT':
         bmesh.update_edit_mesh(obj.data, tessface=True, destructive=True)
 
-    context.user_preferences.edit.use_global_undo = global_undo
+    bpy.context.user_preferences.edit.use_global_undo = global_undo
 
 
 ##########################################
@@ -1366,11 +1392,14 @@ def bridge_create_faces(object, bm, faces, twist):
         if faces[i][-1] == faces[i][-2]:
             faces[i] = faces[i][:-1]
     
+    new_faces = []
     for i in range(len(faces)):
-        bm.faces.new([bm.verts[v] for v in faces[i]])
+        new_faces.append(bm.faces.new([bm.verts[v] for v in faces[i]]))
     bm.normal_update()
     object.data.update(calc_edges=True) # calc_edges prevents memory-corruption
 
+    return(new_faces)
+
 
 # calculate input loops
 def bridge_get_input(bm):
@@ -1566,10 +1595,10 @@ def bridge_save_unused_faces(bm, old_selected_faces, loops):
 
 
 # add the newly created faces to the selection
-def bridge_select_new_faces(bm, amount, smooth):
-    for i in range(amount):
-        bm.faces[-(i+1)].select_set(True)
-        bm.faces[-(i+1)].smooth = smooth
+def bridge_select_new_faces(new_faces, smooth):
+    for face in new_faces:
+        face.select_set(True)
+        face.smooth = smooth
 
 
 # sort loops, so they are connected in the correct order when lofting
@@ -1605,6 +1634,20 @@ def bridge_sort_loops(bm, loops, loft_loop):
     return(loops)
 
 
+# remapping old indices to new position in list
+def bridge_update_old_selection(bm, old_selected_faces):
+    #old_indices = old_selected_faces[:]
+    #old_selected_faces = []
+    #for i, face in enumerate(bm.faces):
+    #    if face.index in old_indices:
+    #        old_selected_faces.append(i)
+    
+    old_selected_faces = [i for i, face in enumerate(bm.faces) if face.index \
+        in old_selected_faces]
+    
+    return(old_selected_faces)
+
+
 ##########################################
 ####### Circle functions #################
 ##########################################
@@ -2432,12 +2475,22 @@ def flatten_project(bm, loop, com, normal):
     return(verts_projected)
 
 
-
-
 ##########################################
 ####### Gstretch functions ###############
 ##########################################
 
+# fake stroke class, used to create custom strokes if no GP data is found
+class gstretch_fake_stroke():
+    def __init__(self, points):
+        self.points = [gstretch_fake_stroke_point(p) for p in points]
+
+
+# fake stroke point class, used in fake strokes
+class gstretch_fake_stroke_point():
+    def __init__(self, loc):
+        self.co = loc
+
+
 # flips loops, if necessary, to obtain maximum alignment to stroke
 def gstretch_align_pairs(ls_pairs, object, bm_mod, method):    
     # returns total distance between all verts in loop and corresponding stroke
@@ -2535,6 +2588,86 @@ def gstretch_calculate_verts(loop, stroke, object, bm_mod, method):
     return(move)
 
 
+# create new vertices, based on GP strokes
+def gstretch_create_verts(object, bm_mod, strokes, method, conversion,
+conversion_distance, conversion_max, conversion_min, conversion_vertices):
+    move = []
+    stroke_verts = []
+    mat_world = object.matrix_world.inverted()
+    singles = gstretch_match_single_verts(bm_mod, strokes, mat_world)
+    
+    for stroke in strokes:
+        stroke_verts.append([stroke, []])
+        min_end_point = 0
+        if conversion == 'vertices':
+            min_end_point = conversion_vertices
+            end_point = conversion_vertices
+        elif conversion == 'limit_vertices':
+            min_end_point = conversion_min
+            end_point = conversion_max
+        else:
+            end_point = len(stroke.points)
+        # creation of new vertices at fixed user-defined distances
+        if conversion == 'distance':
+            method = 'project'
+            prev_point = stroke.points[0]
+            stroke_verts[-1][1].append(bm_mod.verts.new(mat_world * \
+                prev_point.co))
+            distance = 0
+            limit = conversion_distance
+            for point in stroke.points:
+                new_distance = distance + (point.co - prev_point.co).length
+                iteration = 0
+                while new_distance > limit:
+                    to_cover = limit - distance + (limit * iteration)
+                    new_loc = prev_point.co + to_cover * \
+                        (point.co - prev_point.co).normalized()
+                    stroke_verts[-1][1].append(bm_mod.verts.new(mat_world * \
+                        new_loc))
+                    new_distance -= limit
+                    iteration += 1
+                distance = new_distance
+                prev_point = point
+        # creation of new vertices for other methods
+        else:
+            # add vertices at stroke points
+            for point in stroke.points[:end_point]:
+                stroke_verts[-1][1].append(bm_mod.verts.new(\
+                    mat_world * point.co))
+            # add more vertices, beyond the points that are available
+            if min_end_point > min(len(stroke.points), end_point):
+                for i in range(min_end_point -
+                (min(len(stroke.points), end_point))):
+                    stroke_verts[-1][1].append(bm_mod.verts.new(\
+                        mat_world * point.co))
+                # force even spreading of points, so they are placed on stroke
+                method = 'regular'
+    bm_mod.verts.index_update()
+    for stroke, verts_seq in stroke_verts:
+        if len(verts_seq) < 2:
+            continue
+        # spread vertices evenly over the stroke
+        if method == 'regular':
+            loop = [[vert.index for vert in verts_seq], False]
+            move += gstretch_calculate_verts(loop, stroke, object, bm_mod,
+                method)
+        # create edges
+        for i, vert in enumerate(verts_seq):
+            if i > 0:
+                bm_mod.edges.new((verts_seq[i-1], verts_seq[i]))
+            vert.select = True
+        # connect single vertices to the closest stroke
+        if singles:
+            for vert, m_stroke, point in singles:
+                if m_stroke != stroke:
+                    continue
+                bm_mod.edges.new((vert, verts_seq[point]))
+                
+    bmesh.update_edit_mesh(object.data)
+
+    return(move)
+
+
 # erases the grease pencil stroke
 def gstretch_erase_stroke(stroke, context):
     # change 3d coordinate into a stroke-point
@@ -2549,6 +2682,10 @@ def gstretch_erase_stroke(stroke, context):
             'time': 0}
         return(lib)
 
+    if type(stroke) != bpy.types.GPencilStroke:
+        # fake stroke, there is nothing to delete
+        return
+
     erase_stroke = [sp(p.co, context) for p in stroke.points]
     if erase_stroke:
         erase_stroke[0]['is_start'] = True
@@ -2589,6 +2726,17 @@ def gstretch_eval_stroke(stroke, distance, stroke_lengths_cache=False):
     return(loc, stroke_lengths_cache)
 
 
+# create fake grease pencil strokes for the active object
+def gstretch_get_fake_strokes(object, bm_mod, loops):
+    strokes = []
+    for loop in loops:
+        p1 = object.matrix_world * bm_mod.verts[loop[0][0]].co
+        p2 = object.matrix_world * bm_mod.verts[loop[0][-1]].co
+        strokes.append(gstretch_fake_stroke([p1, p2]))
+
+    retur

@@ Diff output truncated at 10240 characters. @@



More information about the Bf-extensions-cvs mailing list