[Bf-extensions-cvs] SVN commit: /data/svn/bf-extensions [2871] trunk/py/scripts/addons/ netrender: netrender

Martin Poirier theeth at yahoo.com
Mon Jan 9 03:47:55 CET 2012


Revision: 2871
          http://projects.blender.org/scm/viewvc.php?view=rev&root=bf-extensions&revision=2871
Author:   theeth
Date:     2012-01-09 02:47:50 +0000 (Mon, 09 Jan 2012)
Log Message:
-----------
netrender
- distributed point cache baking
- (baking fluids or anything else needs to be added but is not a hard task)
- master support getting all results as a zip (rendering, baking or otherwise), available as a link on the job's page in the web interface
- framework to support multiple result files per frame/task (needed for baking)

Modified Paths:
--------------
    trunk/py/scripts/addons/netrender/__init__.py
    trunk/py/scripts/addons/netrender/baking.py
    trunk/py/scripts/addons/netrender/balancing.py
    trunk/py/scripts/addons/netrender/client.py
    trunk/py/scripts/addons/netrender/master.py
    trunk/py/scripts/addons/netrender/master_html.py
    trunk/py/scripts/addons/netrender/model.py
    trunk/py/scripts/addons/netrender/operators.py
    trunk/py/scripts/addons/netrender/slave.py
    trunk/py/scripts/addons/netrender/ui.py
    trunk/py/scripts/addons/netrender/utils.py

Modified: trunk/py/scripts/addons/netrender/__init__.py
===================================================================
--- trunk/py/scripts/addons/netrender/__init__.py	2012-01-06 11:02:03 UTC (rev 2870)
+++ trunk/py/scripts/addons/netrender/__init__.py	2012-01-09 02:47:50 UTC (rev 2871)
@@ -21,7 +21,7 @@
 bl_info = {
     "name": "Network Renderer",
     "author": "Martin Poirier",
-    "version": (1, 7),
+    "version": (1, 8),
     "blender": (2, 6, 0),
     "api": 35011,
     "location": "Render > Engine > Network Render",

Modified: trunk/py/scripts/addons/netrender/baking.py
===================================================================
--- trunk/py/scripts/addons/netrender/baking.py	2012-01-06 11:02:03 UTC (rev 2870)
+++ trunk/py/scripts/addons/netrender/baking.py	2012-01-09 02:47:50 UTC (rev 2871)
@@ -17,10 +17,20 @@
 # ##### END GPL LICENSE BLOCK #####
 
 import bpy
-import sys, subprocess
+import sys, subprocess, re
 
+from netrender.utils import *
+
 BLENDER_PATH = sys.argv[0]
 
+def commandToTask(command):
+    i = command.index("|")
+    ri = command.rindex("|")
+    return (command[:i], command[i+1:ri], command[ri+1:])
+    
+def taskToCommand(task):
+    return "|".join(task)
+                    
 def bake(job, tasks):
     main_file = job.files[0]
     job_full_path = main_file.filepath
@@ -33,43 +43,91 @@
     
     return process
 
-def process_cache(obj, point_cache):
+result_pattern = re.compile("BAKE FILE\[ ([0-9]+) \]: (.*)")
+def resultsFromOuput(lines):
+    results = []
+    for line in lines:
+        match = result_pattern.match(line)
+
+        if match:
+            task_id = int(match.groups()[0])
+            task_filename = match.groups()[1]
+            
+            results.append((task_id, task_filename))
+            
+    return results
+
+def bake_cache(obj, point_cache, task_index):
     if point_cache.is_baked:
         bpy.ops.ptcache.free_bake({"point_cache": point_cache})
         
     point_cache.use_disk_cache = True
+    point_cache.use_external = False
     
     bpy.ops.ptcache.bake({"point_cache": point_cache}, bake=True)
+    
+    results = cache_results(obj, point_cache)
+    
+    print()
+    
+    for filename in results:
+        print("BAKE FILE[", task_index, "]:", filename)
+  
 
-def process_generic(obj, index):
+def cache_results(obj, point_cache):
+    name = cacheName(obj, point_cache)
+    default_path = cachePath(bpy.data.filepath)
+
+    cache_path = bpy.path.abspath(point_cache.filepath) if point_cache.use_external else default_path
+    
+    index = "%02i" % point_cache.index
+
+    if os.path.exists(cache_path):
+        pattern = re.compile(name + "_([0-9]+)_" + index + "\.bphys")
+
+        cache_files = []
+
+        for cache_file in sorted(os.listdir(cache_path)):
+            match = pattern.match(cache_file)
+
+            if match:
+                cache_files.append(os.path.join(cache_path, cache_file))
+
+        cache_files.sort()
+        
+        return cache_files
+    
+    return []
+
+def process_generic(obj, index, task_index):
     modifier = obj.modifiers[index]
     point_cache = modifier.point_cache
-    process_cache(obj, point_cache)
+    bake_cache(obj, point_cache, task_index)
 
-def process_smoke(obj, index):
+def process_smoke(obj, index, task_index):
     modifier = obj.modifiers[index]
     point_cache = modifier.domain_settings.point_cache
-    process_cache(obj, point_cache)
+    bake_cache(obj, point_cache, task_index)
 
-def process_particle(obj, index):
+def process_particle(obj, index, task_index):
     psys = obj.particle_systems[index]
     point_cache = psys.point_cache
-    process_cache(obj, point_cache)
+    bake_cache(obj, point_cache, task_index)
 
-def process_paint(obj, index):
+def process_paint(obj, index, task_index):
     modifier = obj.modifiers[index]
     for surface in modifier.canvas_settings.canvas_surfaces:
-        process_cache(obj, surface.point_cache)
+        bake_cache(obj, surface.point_cache, task_index)
 
-def process_null(obj, index):
+def process_null(obj, index, task_index):
     raise ValueException("No baking possible with arguments: " + " ".join(sys.argv))
 
-bake_funcs = {}
-bake_funcs["CLOTH"] = process_generic
-bake_funcs["SOFT_BODY"] = process_generic
-bake_funcs["PARTICLE_SYSTEM"] = process_particle
-bake_funcs["SMOKE"] = process_smoke
-bake_funcs["DYNAMIC_PAINT"] = process_paint
+process_funcs = {}
+process_funcs["CLOTH"] = process_generic
+process_funcs["SOFT_BODY"] = process_generic
+process_funcs["PARTICLE_SYSTEM"] = process_particle
+process_funcs["SMOKE"] = process_smoke
+process_funcs["DYNAMIC_PAINT"] = process_paint
 
 if __name__ == "__main__":
     try:
@@ -84,4 +142,4 @@
             obj = bpy.data.objects[task_args[i+1]]
             index = int(task_args[i+2])
             
-            bake_funcs.get(bake_type, process_null)(obj, index)
+            process_funcs.get(bake_type, process_null)(obj, index, i)

Modified: trunk/py/scripts/addons/netrender/balancing.py
===================================================================
--- trunk/py/scripts/addons/netrender/balancing.py	2012-01-06 11:02:03 UTC (rev 2870)
+++ trunk/py/scripts/addons/netrender/balancing.py	2012-01-09 02:47:50 UTC (rev 2871)
@@ -149,7 +149,7 @@
         return "Priority to new jobs"
 
     def test(self, job):
-        return job.countFrames(status = DONE) < self.limit
+        return job.countFrames(status = FRAME_DONE) < self.limit
 
 class MinimumTimeBetweenDispatchPriority(PriorityRule):
     def __init__(self, limit = 10):
@@ -166,14 +166,14 @@
         return "Priority to jobs that haven't been dispatched recently"
 
     def test(self, job):
-        return job.countFrames(status = DISPATCHED) == 0 and (time.time() - job.last_dispatched) / 60 > self.limit
+        return job.countFrames(status = FRAME_DISPATCHED) == 0 and (time.time() - job.last_dispatched) / 60 > self.limit
 
 class ExcludeQueuedEmptyJob(ExclusionRule):
     def __str__(self):
         return "Exclude non queued or empty jobs"
 
     def test(self, job):
-        return job.status != JOB_QUEUED or job.countFrames(status = QUEUED) == 0
+        return job.status != JOB_QUEUED or job.countFrames(status = FRAME_QUEUED) == 0
 
 class ExcludeSlavesLimit(ExclusionRule):
     def __init__(self, count_jobs, count_slaves, limit = 0.75):

Modified: trunk/py/scripts/addons/netrender/client.py
===================================================================
--- trunk/py/scripts/addons/netrender/client.py	2012-01-06 11:02:03 UTC (rev 2870)
+++ trunk/py/scripts/addons/netrender/client.py	2012-01-09 02:47:50 UTC (rev 2871)
@@ -224,7 +224,7 @@
 
     for i, task in enumerate(tasks):
         job.addFrame(i + 1)
-        job.frames[-1].command = "|".join(task)
+        job.frames[-1].command = netrender.baking.taskToCommand(task)
         
     # try to send path first
     with ConnectionContext():

Modified: trunk/py/scripts/addons/netrender/master.py
===================================================================
--- trunk/py/scripts/addons/netrender/master.py	2012-01-06 11:02:03 UTC (rev 2870)
+++ trunk/py/scripts/addons/netrender/master.py	2012-01-09 02:47:50 UTC (rev 2871)
@@ -20,6 +20,7 @@
 import http, http.client, http.server, socket, socketserver
 import shutil, time, hashlib
 import pickle
+import zipfile
 import select # for select.error
 import json
 
@@ -131,7 +132,7 @@
 
     def testFinished(self):
         for f in self.frames:
-            if f.status == QUEUED or f.status == DISPATCHED:
+            if f.status == FRAME_QUEUED or f.status == FRAME_DISPATCHED:
                 break
         else:
             self.status = JOB_FINISHED
@@ -175,13 +176,16 @@
     def getFrames(self):
         frames = []
         for f in self.frames:
-            if f.status == QUEUED:
+            if f.status == FRAME_QUEUED:
                 self.last_dispatched = time.time()
                 frames.append(f)
                 if len(frames) >= self.chunks:
                     break
 
         return frames
+    
+    def getResultPath(self, filename):
+        return os.path.join(self.save_path, filename)
 
 class MRenderFrame(netrender.model.RenderFrame):
     def __init__(self, frame, command):
@@ -189,17 +193,23 @@
         self.number = frame
         self.slave = None
         self.time = 0
-        self.status = QUEUED
+        self.status = FRAME_QUEUED
         self.command = command
 
         self.log_path = None
 
+    def addDefaultRenderResult(self):
+        self.results.append(self.getRenderFilename())
+
+    def getRenderFilename(self):
+        return "%06d.exr" % self.number
+
     def reset(self, all):
-        if all or self.status == ERROR:
+        if all or self.status == FRAME_ERROR:
             self.log_path = None
             self.slave = None
             self.time = 0
-            self.status = QUEUED
+            self.status = FRAME_QUEUED
 
 
 # -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
@@ -208,6 +218,7 @@
 # =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
 file_pattern = re.compile("/file_([a-zA-Z0-9]+)_([0-9]+)")
 render_pattern = re.compile("/render_([a-zA-Z0-9]+)_([0-9]+).exr")
+result_pattern = re.compile("/result_([a-zA-Z0-9]+).zip")
 thumb_pattern = re.compile("/thumb_([a-zA-Z0-9]+)_([0-9]+).jpg")
 log_pattern = re.compile("/log_([a-zA-Z0-9]+)_([0-9]+).log")
 reset_pattern = re.compile("/reset(all|)_([a-zA-Z0-9]+)_([0-9]+)")
@@ -295,18 +306,18 @@
                     frame = job[frame_number]
 
                     if frame:
-                        if frame.status in (QUEUED, DISPATCHED):
+                        if frame.status in (FRAME_QUEUED, FRAME_DISPATCHED):
                             self.send_head(http.client.ACCEPTED)
-                        elif frame.status == DONE:
+                        elif frame.status == FRAME_DONE:
                             self.server.stats("", "Sending result to client")
 
-                            filename = os.path.join(job.save_path, "%06d.exr" % frame_number)
+                            filename = job.getResultPath(frame.getRenderFilename())
 
                             f = open(filename, 'rb')
                             self.send_head(content = "image/x-exr")
                             shutil.copyfileobj(f, self.wfile)
                             f.close()

@@ Diff output truncated at 10240 characters. @@


More information about the Bf-extensions-cvs mailing list