[Bf-extensions-cvs] [9817975] temp-blend-utils: Preparing for new blend-file addoon
Campbell Barton
noreply at git.blender.org
Thu Jan 28 06:09:49 CET 2016
Commit: 981797518714c16a737f161d1d8fe32e6a34517e
Author: Campbell Barton
Date: Thu Jan 28 13:29:04 2016 +1100
Branches: temp-blend-utils
https://developer.blender.org/rBA981797518714c16a737f161d1d8fe32e6a34517e
Preparing for new blend-file addoon
This takes work done on bam, to be used as a Blender addon.
Currently not in working state - this is just a file copy from bam.
===================================================================
A io_blend_utils/__init__.py
A io_blend_utils/blend/blendfile.py
A io_blend_utils/blend/blendfile_pack.py
A io_blend_utils/blend/blendfile_path_walker.py
A io_blend_utils/pack_op.py
===================================================================
diff --git a/io_blend_utils/__init__.py b/io_blend_utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/io_blend_utils/blend/blendfile.py b/io_blend_utils/blend/blendfile.py
new file mode 100644
index 0000000..0739a1b
--- /dev/null
+++ b/io_blend_utils/blend/blendfile.py
@@ -0,0 +1,917 @@
+# ***** BEGIN GPL LICENSE BLOCK *****
+#
+# This program is free software; you can redistribute it and/or
+# modify it under the terms of the GNU General Public License
+# as published by the Free Software Foundation; either version 2
+# of the License, or (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+#
+# ***** END GPL LICENCE BLOCK *****
+#
+# (c) 2009, At Mind B.V. - Jeroen Bakker
+# (c) 2014, Blender Foundation - Campbell Barton
+
+import os
+import struct
+import logging
+import gzip
+import tempfile
+
+log = logging.getLogger("blendfile")
+log.setLevel(logging.ERROR)
+
+FILE_BUFFER_SIZE = 1024 * 1024
+
+
+# -----------------------------------------------------------------------------
+# module global routines
+#
+# read routines
+# open a filename
+# determine if the file is compressed
+# and returns a handle
+def open_blend(filename, access="rb"):
+ """Opens a blend file for reading or writing pending on the access
+ supports 2 kind of blend files. Uncompressed and compressed.
+ Known issue: does not support packaged blend files
+ """
+ handle = open(filename, access)
+ magic_test = b"BLENDER"
+ magic = handle.read(len(magic_test))
+ if magic == magic_test:
+ log.debug("normal blendfile detected")
+ handle.seek(0, os.SEEK_SET)
+ bfile = BlendFile(handle)
+ bfile.is_compressed = False
+ bfile.filepath_orig = filename
+ return bfile
+ elif magic[:2] == b'\x1f\x8b':
+ log.debug("gzip blendfile detected")
+ handle.close()
+ log.debug("decompressing started")
+ fs = gzip.open(filename, "rb")
+ data = fs.read(FILE_BUFFER_SIZE)
+ magic = data[:len(magic_test)]
+ if magic == magic_test:
+ handle = tempfile.TemporaryFile()
+ while data:
+ handle.write(data)
+ data = fs.read(FILE_BUFFER_SIZE)
+ log.debug("decompressing finished")
+ fs.close()
+ log.debug("resetting decompressed file")
+ handle.seek(os.SEEK_SET, 0)
+ bfile = BlendFile(handle)
+ bfile.is_compressed = True
+ bfile.filepath_orig = filename
+ return bfile
+ else:
+ raise Exception("filetype inside gzip not a blend")
+ else:
+ raise Exception("filetype not a blend or a gzip blend")
+
+
+def align(offset, by):
+ n = by - 1
+ return (offset + n) & ~n
+
+
+# -----------------------------------------------------------------------------
+# module classes
+
+
+class BlendFile:
+ """
+ Blend file.
+ """
+ __slots__ = (
+ # file (result of open())
+ "handle",
+ # str (original name of the file path)
+ "filepath_orig",
+ # BlendFileHeader
+ "header",
+ # struct.Struct
+ "block_header_struct",
+ # BlendFileBlock
+ "blocks",
+ # [DNAStruct, ...]
+ "structs",
+ # dict {b'StructName': sdna_index}
+ # (where the index is an index into 'structs')
+ "sdna_index_from_id",
+ # dict {addr_old: block}
+ "block_from_offset",
+ # int
+ "code_index",
+ # bool (did we make a change)
+ "is_modified",
+ # bool (is file gzipped)
+ "is_compressed",
+ )
+
+ def __init__(self, handle):
+ log.debug("initializing reading blend-file")
+ self.handle = handle
+ self.header = BlendFileHeader(handle)
+ self.block_header_struct = self.header.create_block_header_struct()
+ self.blocks = []
+ self.code_index = {}
+
+ block = BlendFileBlock(handle, self)
+ while block.code != b'ENDB':
+ if block.code == b'DNA1':
+ (self.structs,
+ self.sdna_index_from_id,
+ ) = BlendFile.decode_structs(self.header, block, handle)
+ else:
+ handle.seek(block.size, os.SEEK_CUR)
+
+ self.blocks.append(block)
+ self.code_index.setdefault(block.code, []).append(block)
+
+ block = BlendFileBlock(handle, self)
+ self.is_modified = False
+ self.blocks.append(block)
+
+ # cache (could lazy init, incase we never use?)
+ self.block_from_offset = {block.addr_old: block for block in self.blocks if block.code != b'ENDB'}
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, type, value, traceback):
+ self.close()
+
+ def find_blocks_from_code(self, code):
+ assert(type(code) == bytes)
+ if code not in self.code_index:
+ return []
+ return self.code_index[code]
+
+ def find_block_from_offset(self, offset):
+ # same as looking looping over all blocks,
+ # then checking ``block.addr_old == offset``
+ assert(type(offset) is int)
+ return self.block_from_offset.get(offset)
+
+ def close(self):
+ """
+ Close the blend file
+ writes the blend file to disk if changes has happened
+ """
+ if not self.is_modified:
+ self.handle.close()
+ else:
+ handle = self.handle
+ if self.is_compressed:
+ log.debug("close compressed blend file")
+ handle.seek(os.SEEK_SET, 0)
+ log.debug("compressing started")
+ fs = gzip.open(self.filepath_orig, "wb")
+ data = handle.read(FILE_BUFFER_SIZE)
+ while data:
+ fs.write(data)
+ data = handle.read(FILE_BUFFER_SIZE)
+ fs.close()
+ log.debug("compressing finished")
+
+ handle.close()
+
+ def ensure_subtype_smaller(self, sdna_index_curr, sdna_index_next):
+ # never refine to a smaller type
+ if (self.structs[sdna_index_curr].size >
+ self.structs[sdna_index_next].size):
+
+ raise RuntimeError("cant refine to smaller type (%s -> %s)" %
+ (self.structs[sdna_index_curr].dna_type_id.decode('ascii'),
+ self.structs[sdna_index_next].dna_type_id.decode('ascii')))
+
+ @staticmethod
+ def decode_structs(header, block, handle):
+ """
+ DNACatalog is a catalog of all information in the DNA1 file-block
+ """
+ log.debug("building DNA catalog")
+ shortstruct = DNA_IO.USHORT[header.endian_index]
+ shortstruct2 = struct.Struct(header.endian_str + b'HH')
+ intstruct = DNA_IO.UINT[header.endian_index]
+
+ data = handle.read(block.size)
+ types = []
+ names = []
+
+ structs = []
+ sdna_index_from_id = {}
+
+ offset = 8
+ names_len = intstruct.unpack_from(data, offset)[0]
+ offset += 4
+
+ log.debug("building #%d names" % names_len)
+ for i in range(names_len):
+ tName = DNA_IO.read_data0_offset(data, offset)
+ offset = offset + len(tName) + 1
+ names.append(DNAName(tName))
+ del names_len
+
+ offset = align(offset, 4)
+ offset += 4
+ types_len = intstruct.unpack_from(data, offset)[0]
+ offset += 4
+ log.debug("building #%d types" % types_len)
+ for i in range(types_len):
+ dna_type_id = DNA_IO.read_data0_offset(data, offset)
+ # None will be replaced by the DNAStruct, below
+ types.append(DNAStruct(dna_type_id))
+ offset += len(dna_type_id) + 1
+
+ offset = align(offset, 4)
+ offset += 4
+ log.debug("building #%d type-lengths" % types_len)
+ for i in range(types_len):
+ tLen = shortstruct.unpack_from(data, offset)[0]
+ offset = offset + 2
+ types[i].size = tLen
+ del types_len
+
+ offset = align(offset, 4)
+ offset += 4
+
+ structs_len = intstruct.unpack_from(data, offset)[0]
+ offset += 4
+ log.debug("building #%d structures" % structs_len)
+ for sdna_index in range(structs_len):
+ d = shortstruct2.unpack_from(data, offset)
+ struct_type_index = d[0]
+ offset += 4
+ dna_struct = types[struct_type_index]
+ sdna_index_from_id[dna_struct.dna_type_id] = sdna_index
+ structs.append(dna_struct)
+
+ fields_len = d[1]
+ dna_offset = 0
+
+ for field_index in range(fields_len):
+ d2 = shortstruct2.unpack_from(data, offset)
+ field_type_index = d2[0]
+ field_name_index = d2[1]
+ offset += 4
+ dna_type = types[field_type_index]
+ dna_name = names[field_name_index]
+ if dna_name.is_pointer or dna_name.is_method_pointer:
+ dna_size = header.pointer_size * dna_name.array_size
+ else:
+ dna_size = dna_type.size * dna_name.array_size
+
+ field = DNAField(dna_type, dna_name, dna_size, dna_offset)
+ dna_struct.fields.append(field)
+ dna_struct.field_from_name[dna_name.name_only] = field
+ dna_offset += dna_size
+
+ return structs, sdna_index_from_id
+
+
+class BlendFileBlock:
+ """
+ Instance of a struct.
+ """
+ __slots__ = (
+ # BlendFile
+ "file",
+ "code",
+ "size",
+ "addr_old",
+ "sdna_index",
+
@@ Diff output truncated at 10240 characters. @@
More information about the Bf-extensions-cvs
mailing list